1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
46 #include "langhooks.h"
49 #include "tree-iterator.h"
50 #include "basic-block.h"
51 #include "tree-ssa-alias.h"
52 #include "internal-fn.h"
53 #include "gimple-expr.h"
56 #include "gimple-ssa.h"
58 #include "tree-ssanames.h"
60 #include "common/common-target.h"
63 #include "diagnostic.h"
64 #include "tree-ssa-live.h"
65 #include "tree-outof-ssa.h"
66 #include "target-globals.h"
68 #include "tree-ssa-address.h"
69 #include "cfgexpand.h"
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
76 #define STACK_PUSH_CODE PRE_INC
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
89 /* This structure is used by move_by_pieces to describe the move to
91 struct move_by_pieces_d
100 int explicit_inc_from
;
101 unsigned HOST_WIDE_INT len
;
102 HOST_WIDE_INT offset
;
106 /* This structure is used by store_by_pieces to describe the clear to
109 struct store_by_pieces_d
115 unsigned HOST_WIDE_INT len
;
116 HOST_WIDE_INT offset
;
117 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
);
122 static void move_by_pieces_1 (insn_gen_fn
, machine_mode
,
123 struct move_by_pieces_d
*);
124 static bool block_move_libcall_safe_for_call_parm (void);
125 static bool emit_block_move_via_movmem (rtx
, rtx
, rtx
, unsigned, unsigned, HOST_WIDE_INT
,
126 unsigned HOST_WIDE_INT
, unsigned HOST_WIDE_INT
,
127 unsigned HOST_WIDE_INT
);
128 static tree
emit_block_move_libcall_fn (int);
129 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
130 static rtx
clear_by_pieces_1 (void *, HOST_WIDE_INT
, enum machine_mode
);
131 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
132 static void store_by_pieces_1 (struct store_by_pieces_d
*, unsigned int);
133 static void store_by_pieces_2 (insn_gen_fn
, machine_mode
,
134 struct store_by_pieces_d
*);
135 static tree
clear_storage_libcall_fn (int);
136 static rtx_insn
*compress_float_constant (rtx
, rtx
);
137 static rtx
get_subtarget (rtx
);
138 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
139 HOST_WIDE_INT
, enum machine_mode
,
140 tree
, int, alias_set_type
);
141 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
);
142 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
,
143 unsigned HOST_WIDE_INT
, unsigned HOST_WIDE_INT
,
144 enum machine_mode
, tree
, alias_set_type
, bool);
146 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (const_tree
, const_tree
);
148 static int is_aligning_offset (const_tree
, const_tree
);
149 static void expand_operands (tree
, tree
, rtx
, rtx
*, rtx
*,
150 enum expand_modifier
);
151 static rtx
reduce_to_bit_field_precision (rtx
, rtx
, tree
);
152 static rtx
do_store_flag (sepops
, rtx
, enum machine_mode
);
154 static void emit_single_push_insn (enum machine_mode
, rtx
, tree
);
156 static void do_tablejump (rtx
, enum machine_mode
, rtx
, rtx
, rtx
, int);
157 static rtx
const_vector_from_tree (tree
);
158 static void write_complex_part (rtx
, rtx
, bool);
160 /* This macro is used to determine whether move_by_pieces should be called
161 to perform a structure copy. */
162 #ifndef MOVE_BY_PIECES_P
163 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
164 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
165 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
168 /* This macro is used to determine whether clear_by_pieces should be
169 called to clear storage. */
170 #ifndef CLEAR_BY_PIECES_P
171 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
172 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
173 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
176 /* This macro is used to determine whether store_by_pieces should be
177 called to "memset" storage with byte values other than zero. */
178 #ifndef SET_BY_PIECES_P
179 #define SET_BY_PIECES_P(SIZE, ALIGN) \
180 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
181 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
184 /* This macro is used to determine whether store_by_pieces should be
185 called to "memcpy" storage when the source is a constant string. */
186 #ifndef STORE_BY_PIECES_P
187 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
188 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
189 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
192 /* This is run to set up which modes can be used
193 directly in memory and to initialize the block move optab. It is run
194 at the beginning of compilation and when the target is reinitialized. */
197 init_expr_target (void)
200 enum machine_mode mode
;
205 /* Try indexing by frame ptr and try by stack ptr.
206 It is known that on the Convex the stack ptr isn't a valid index.
207 With luck, one or the other is valid on any machine. */
208 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
209 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
211 /* A scratch register we can modify in-place below to avoid
212 useless RTL allocations. */
213 reg
= gen_rtx_REG (VOIDmode
, -1);
215 insn
= rtx_alloc (INSN
);
216 pat
= gen_rtx_SET (VOIDmode
, NULL_RTX
, NULL_RTX
);
217 PATTERN (insn
) = pat
;
219 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
220 mode
= (enum machine_mode
) ((int) mode
+ 1))
224 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
225 PUT_MODE (mem
, mode
);
226 PUT_MODE (mem1
, mode
);
227 PUT_MODE (reg
, mode
);
229 /* See if there is some register that can be used in this mode and
230 directly loaded or stored from memory. */
232 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
233 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
234 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
237 if (! HARD_REGNO_MODE_OK (regno
, mode
))
240 SET_REGNO (reg
, regno
);
243 SET_DEST (pat
) = reg
;
244 if (recog (pat
, insn
, &num_clobbers
) >= 0)
245 direct_load
[(int) mode
] = 1;
247 SET_SRC (pat
) = mem1
;
248 SET_DEST (pat
) = reg
;
249 if (recog (pat
, insn
, &num_clobbers
) >= 0)
250 direct_load
[(int) mode
] = 1;
253 SET_DEST (pat
) = mem
;
254 if (recog (pat
, insn
, &num_clobbers
) >= 0)
255 direct_store
[(int) mode
] = 1;
258 SET_DEST (pat
) = mem1
;
259 if (recog (pat
, insn
, &num_clobbers
) >= 0)
260 direct_store
[(int) mode
] = 1;
264 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
266 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
267 mode
= GET_MODE_WIDER_MODE (mode
))
269 enum machine_mode srcmode
;
270 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
271 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
275 ic
= can_extend_p (mode
, srcmode
, 0);
276 if (ic
== CODE_FOR_nothing
)
279 PUT_MODE (mem
, srcmode
);
281 if (insn_operand_matches (ic
, 1, mem
))
282 float_extend_from_mem
[mode
][srcmode
] = true;
287 /* This is run at the start of compiling a function. */
292 memset (&crtl
->expr
, 0, sizeof (crtl
->expr
));
295 /* Copy data from FROM to TO, where the machine modes are not the same.
296 Both modes may be integer, or both may be floating, or both may be
298 UNSIGNEDP should be nonzero if FROM is an unsigned type.
299 This causes zero-extension instead of sign-extension. */
302 convert_move (rtx to
, rtx from
, int unsignedp
)
304 enum machine_mode to_mode
= GET_MODE (to
);
305 enum machine_mode from_mode
= GET_MODE (from
);
306 int to_real
= SCALAR_FLOAT_MODE_P (to_mode
);
307 int from_real
= SCALAR_FLOAT_MODE_P (from_mode
);
311 /* rtx code for making an equivalent value. */
312 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
313 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
316 gcc_assert (to_real
== from_real
);
317 gcc_assert (to_mode
!= BLKmode
);
318 gcc_assert (from_mode
!= BLKmode
);
320 /* If the source and destination are already the same, then there's
325 /* If FROM is a SUBREG that indicates that we have already done at least
326 the required extension, strip it. We don't handle such SUBREGs as
329 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
330 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from
)))
331 >= GET_MODE_PRECISION (to_mode
))
332 && SUBREG_CHECK_PROMOTED_SIGN (from
, unsignedp
))
333 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
335 gcc_assert (GET_CODE (to
) != SUBREG
|| !SUBREG_PROMOTED_VAR_P (to
));
337 if (to_mode
== from_mode
338 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
340 emit_move_insn (to
, from
);
344 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
346 gcc_assert (GET_MODE_BITSIZE (from_mode
) == GET_MODE_BITSIZE (to_mode
));
348 if (VECTOR_MODE_P (to_mode
))
349 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
351 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
353 emit_move_insn (to
, from
);
357 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
359 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
360 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
370 gcc_assert ((GET_MODE_PRECISION (from_mode
)
371 != GET_MODE_PRECISION (to_mode
))
372 || (DECIMAL_FLOAT_MODE_P (from_mode
)
373 != DECIMAL_FLOAT_MODE_P (to_mode
)));
375 if (GET_MODE_PRECISION (from_mode
) == GET_MODE_PRECISION (to_mode
))
376 /* Conversion between decimal float and binary float, same size. */
377 tab
= DECIMAL_FLOAT_MODE_P (from_mode
) ? trunc_optab
: sext_optab
;
378 else if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
383 /* Try converting directly if the insn is supported. */
385 code
= convert_optab_handler (tab
, to_mode
, from_mode
);
386 if (code
!= CODE_FOR_nothing
)
388 emit_unop_insn (code
, to
, from
,
389 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
393 /* Otherwise use a libcall. */
394 libcall
= convert_optab_libfunc (tab
, to_mode
, from_mode
);
396 /* Is this conversion implemented yet? */
397 gcc_assert (libcall
);
400 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
402 insns
= get_insns ();
404 emit_libcall_block (insns
, to
, value
,
405 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
407 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
411 /* Handle pointer conversion. */ /* SPEE 900220. */
412 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
416 if (GET_MODE_PRECISION (from_mode
) > GET_MODE_PRECISION (to_mode
))
423 if (convert_optab_handler (ctab
, to_mode
, from_mode
)
426 emit_unop_insn (convert_optab_handler (ctab
, to_mode
, from_mode
),
432 /* Targets are expected to provide conversion insns between PxImode and
433 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
434 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
436 enum machine_mode full_mode
437 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode
), MODE_INT
);
439 gcc_assert (convert_optab_handler (trunc_optab
, to_mode
, full_mode
)
440 != CODE_FOR_nothing
);
442 if (full_mode
!= from_mode
)
443 from
= convert_to_mode (full_mode
, from
, unsignedp
);
444 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, full_mode
),
448 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
451 enum machine_mode full_mode
452 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode
), MODE_INT
);
453 convert_optab ctab
= unsignedp
? zext_optab
: sext_optab
;
454 enum insn_code icode
;
456 icode
= convert_optab_handler (ctab
, full_mode
, from_mode
);
457 gcc_assert (icode
!= CODE_FOR_nothing
);
459 if (to_mode
== full_mode
)
461 emit_unop_insn (icode
, to
, from
, UNKNOWN
);
465 new_from
= gen_reg_rtx (full_mode
);
466 emit_unop_insn (icode
, new_from
, from
, UNKNOWN
);
468 /* else proceed to integer conversions below. */
469 from_mode
= full_mode
;
473 /* Make sure both are fixed-point modes or both are not. */
474 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
) ==
475 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode
));
476 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
))
478 /* If we widen from_mode to to_mode and they are in the same class,
479 we won't saturate the result.
480 Otherwise, always saturate the result to play safe. */
481 if (GET_MODE_CLASS (from_mode
) == GET_MODE_CLASS (to_mode
)
482 && GET_MODE_SIZE (from_mode
) < GET_MODE_SIZE (to_mode
))
483 expand_fixed_convert (to
, from
, 0, 0);
485 expand_fixed_convert (to
, from
, 0, 1);
489 /* Now both modes are integers. */
491 /* Handle expanding beyond a word. */
492 if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
)
493 && GET_MODE_PRECISION (to_mode
) > BITS_PER_WORD
)
500 enum machine_mode lowpart_mode
;
501 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
503 /* Try converting directly if the insn is supported. */
504 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
507 /* If FROM is a SUBREG, put it into a register. Do this
508 so that we always generate the same set of insns for
509 better cse'ing; if an intermediate assignment occurred,
510 we won't be doing the operation directly on the SUBREG. */
511 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
512 from
= force_reg (from_mode
, from
);
513 emit_unop_insn (code
, to
, from
, equiv_code
);
516 /* Next, try converting via full word. */
517 else if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
518 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
519 != CODE_FOR_nothing
))
521 rtx word_to
= gen_reg_rtx (word_mode
);
524 if (reg_overlap_mentioned_p (to
, from
))
525 from
= force_reg (from_mode
, from
);
528 convert_move (word_to
, from
, unsignedp
);
529 emit_unop_insn (code
, to
, word_to
, equiv_code
);
533 /* No special multiword conversion insn; do it by hand. */
536 /* Since we will turn this into a no conflict block, we must ensure the
537 the source does not overlap the target so force it into an isolated
538 register when maybe so. Likewise for any MEM input, since the
539 conversion sequence might require several references to it and we
540 must ensure we're getting the same value every time. */
542 if (MEM_P (from
) || reg_overlap_mentioned_p (to
, from
))
543 from
= force_reg (from_mode
, from
);
545 /* Get a copy of FROM widened to a word, if necessary. */
546 if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
)
547 lowpart_mode
= word_mode
;
549 lowpart_mode
= from_mode
;
551 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
553 lowpart
= gen_lowpart (lowpart_mode
, to
);
554 emit_move_insn (lowpart
, lowfrom
);
556 /* Compute the value to put in each remaining word. */
558 fill_value
= const0_rtx
;
560 fill_value
= emit_store_flag_force (gen_reg_rtx (word_mode
),
561 LT
, lowfrom
, const0_rtx
,
562 lowpart_mode
, 0, -1);
564 /* Fill the remaining words. */
565 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
567 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
568 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
570 gcc_assert (subword
);
572 if (fill_value
!= subword
)
573 emit_move_insn (subword
, fill_value
);
576 insns
= get_insns ();
583 /* Truncating multi-word to a word or less. */
584 if (GET_MODE_PRECISION (from_mode
) > BITS_PER_WORD
585 && GET_MODE_PRECISION (to_mode
) <= BITS_PER_WORD
)
588 && ! MEM_VOLATILE_P (from
)
589 && direct_load
[(int) to_mode
]
590 && ! mode_dependent_address_p (XEXP (from
, 0),
591 MEM_ADDR_SPACE (from
)))
593 || GET_CODE (from
) == SUBREG
))
594 from
= force_reg (from_mode
, from
);
595 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
599 /* Now follow all the conversions between integers
600 no more than a word long. */
602 /* For truncation, usually we can just refer to FROM in a narrower mode. */
603 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
604 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
, from_mode
))
607 && ! MEM_VOLATILE_P (from
)
608 && direct_load
[(int) to_mode
]
609 && ! mode_dependent_address_p (XEXP (from
, 0),
610 MEM_ADDR_SPACE (from
)))
612 || GET_CODE (from
) == SUBREG
))
613 from
= force_reg (from_mode
, from
);
614 if (REG_P (from
) && REGNO (from
) < FIRST_PSEUDO_REGISTER
615 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
616 from
= copy_to_reg (from
);
617 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
621 /* Handle extension. */
622 if (GET_MODE_PRECISION (to_mode
) > GET_MODE_PRECISION (from_mode
))
624 /* Convert directly if that works. */
625 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
628 emit_unop_insn (code
, to
, from
, equiv_code
);
633 enum machine_mode intermediate
;
637 /* Search for a mode to convert via. */
638 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
639 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
640 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
642 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
643 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
, intermediate
)))
644 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
645 != CODE_FOR_nothing
))
647 convert_move (to
, convert_to_mode (intermediate
, from
,
648 unsignedp
), unsignedp
);
652 /* No suitable intermediate mode.
653 Generate what we need with shifts. */
654 shift_amount
= (GET_MODE_PRECISION (to_mode
)
655 - GET_MODE_PRECISION (from_mode
));
656 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
657 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
659 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
662 emit_move_insn (to
, tmp
);
667 /* Support special truncate insns for certain modes. */
668 if (convert_optab_handler (trunc_optab
, to_mode
,
669 from_mode
) != CODE_FOR_nothing
)
671 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, from_mode
),
676 /* Handle truncation of volatile memrefs, and so on;
677 the things that couldn't be truncated directly,
678 and for which there was no special instruction.
680 ??? Code above formerly short-circuited this, for most integer
681 mode pairs, with a force_reg in from_mode followed by a recursive
682 call to this routine. Appears always to have been wrong. */
683 if (GET_MODE_PRECISION (to_mode
) < GET_MODE_PRECISION (from_mode
))
685 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
686 emit_move_insn (to
, temp
);
690 /* Mode combination is not recognized. */
694 /* Return an rtx for a value that would result
695 from converting X to mode MODE.
696 Both X and MODE may be floating, or both integer.
697 UNSIGNEDP is nonzero if X is an unsigned value.
698 This can be done by referring to a part of X in place
699 or by copying to a new temporary with conversion. */
702 convert_to_mode (enum machine_mode mode
, rtx x
, int unsignedp
)
704 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
707 /* Return an rtx for a value that would result
708 from converting X from mode OLDMODE to mode MODE.
709 Both modes may be floating, or both integer.
710 UNSIGNEDP is nonzero if X is an unsigned value.
712 This can be done by referring to a part of X in place
713 or by copying to a new temporary with conversion.
715 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
718 convert_modes (enum machine_mode mode
, enum machine_mode oldmode
, rtx x
, int unsignedp
)
722 /* If FROM is a SUBREG that indicates that we have already done at least
723 the required extension, strip it. */
725 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
726 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
727 && SUBREG_CHECK_PROMOTED_SIGN (x
, unsignedp
))
728 x
= gen_lowpart (mode
, SUBREG_REG (x
));
730 if (GET_MODE (x
) != VOIDmode
)
731 oldmode
= GET_MODE (x
);
736 if (CONST_SCALAR_INT_P (x
) && GET_MODE_CLASS (mode
) == MODE_INT
)
738 /* If the caller did not tell us the old mode, then there is not
739 much to do with respect to canonicalization. We have to
740 assume that all the bits are significant. */
741 if (GET_MODE_CLASS (oldmode
) != MODE_INT
)
742 oldmode
= MAX_MODE_INT
;
743 wide_int w
= wide_int::from (std::make_pair (x
, oldmode
),
744 GET_MODE_PRECISION (mode
),
745 unsignedp
? UNSIGNED
: SIGNED
);
746 return immed_wide_int_const (w
, mode
);
749 /* We can do this with a gen_lowpart if both desired and current modes
750 are integer, and this is either a constant integer, a register, or a
752 if (GET_MODE_CLASS (mode
) == MODE_INT
753 && GET_MODE_CLASS (oldmode
) == MODE_INT
754 && GET_MODE_PRECISION (mode
) <= GET_MODE_PRECISION (oldmode
)
755 && ((MEM_P (x
) && !MEM_VOLATILE_P (x
) && direct_load
[(int) mode
])
757 && (!HARD_REGISTER_P (x
)
758 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
759 && TRULY_NOOP_TRUNCATION_MODES_P (mode
, GET_MODE (x
)))))
761 return gen_lowpart (mode
, x
);
763 /* Converting from integer constant into mode is always equivalent to an
765 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
767 gcc_assert (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (oldmode
));
768 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
771 temp
= gen_reg_rtx (mode
);
772 convert_move (temp
, x
, unsignedp
);
776 /* Return the largest alignment we can use for doing a move (or store)
777 of MAX_PIECES. ALIGN is the largest alignment we could use. */
780 alignment_for_piecewise_move (unsigned int max_pieces
, unsigned int align
)
782 enum machine_mode tmode
;
784 tmode
= mode_for_size (max_pieces
* BITS_PER_UNIT
, MODE_INT
, 1);
785 if (align
>= GET_MODE_ALIGNMENT (tmode
))
786 align
= GET_MODE_ALIGNMENT (tmode
);
789 enum machine_mode tmode
, xmode
;
791 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
793 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
794 if (GET_MODE_SIZE (tmode
) > max_pieces
795 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
798 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
804 /* Return the widest integer mode no wider than SIZE. If no such mode
805 can be found, return VOIDmode. */
807 static enum machine_mode
808 widest_int_mode_for_size (unsigned int size
)
810 enum machine_mode tmode
, mode
= VOIDmode
;
812 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
813 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
814 if (GET_MODE_SIZE (tmode
) < size
)
820 /* STORE_MAX_PIECES is the number of bytes at a time that we can
821 store efficiently. Due to internal GCC limitations, this is
822 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
823 for an immediate constant. */
825 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
827 /* Determine whether the LEN bytes can be moved by using several move
828 instructions. Return nonzero if a call to move_by_pieces should
832 can_move_by_pieces (unsigned HOST_WIDE_INT len ATTRIBUTE_UNUSED
,
833 unsigned int align ATTRIBUTE_UNUSED
)
835 return MOVE_BY_PIECES_P (len
, align
);
838 /* Generate several move instructions to copy LEN bytes from block FROM to
839 block TO. (These are MEM rtx's with BLKmode).
841 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
842 used to push FROM to the stack.
844 ALIGN is maximum stack alignment we can assume.
846 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
847 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
851 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
852 unsigned int align
, int endp
)
854 struct move_by_pieces_d data
;
855 enum machine_mode to_addr_mode
;
856 enum machine_mode from_addr_mode
= get_address_mode (from
);
857 rtx to_addr
, from_addr
= XEXP (from
, 0);
858 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
859 enum insn_code icode
;
861 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
864 data
.from_addr
= from_addr
;
867 to_addr_mode
= get_address_mode (to
);
868 to_addr
= XEXP (to
, 0);
871 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
872 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
874 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
878 to_addr_mode
= VOIDmode
;
882 #ifdef STACK_GROWS_DOWNWARD
888 data
.to_addr
= to_addr
;
891 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
892 || GET_CODE (from_addr
) == POST_INC
893 || GET_CODE (from_addr
) == POST_DEC
);
895 data
.explicit_inc_from
= 0;
896 data
.explicit_inc_to
= 0;
897 if (data
.reverse
) data
.offset
= len
;
900 /* If copying requires more than two move insns,
901 copy addresses to registers (to make displacements shorter)
902 and use post-increment if available. */
903 if (!(data
.autinc_from
&& data
.autinc_to
)
904 && move_by_pieces_ninsns (len
, align
, max_size
) > 2)
906 /* Find the mode of the largest move...
907 MODE might not be used depending on the definitions of the
908 USE_* macros below. */
909 enum machine_mode mode ATTRIBUTE_UNUSED
910 = widest_int_mode_for_size (max_size
);
912 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
914 data
.from_addr
= copy_to_mode_reg (from_addr_mode
,
915 plus_constant (from_addr_mode
,
917 data
.autinc_from
= 1;
918 data
.explicit_inc_from
= -1;
920 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
922 data
.from_addr
= copy_to_mode_reg (from_addr_mode
, from_addr
);
923 data
.autinc_from
= 1;
924 data
.explicit_inc_from
= 1;
926 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
927 data
.from_addr
= copy_to_mode_reg (from_addr_mode
, from_addr
);
928 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
930 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
931 plus_constant (to_addr_mode
,
934 data
.explicit_inc_to
= -1;
936 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
938 data
.to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
940 data
.explicit_inc_to
= 1;
942 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
943 data
.to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
946 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
948 /* First move what we can in the largest integer mode, then go to
949 successively smaller modes. */
951 while (max_size
> 1 && data
.len
> 0)
953 enum machine_mode mode
= widest_int_mode_for_size (max_size
);
955 if (mode
== VOIDmode
)
958 icode
= optab_handler (mov_optab
, mode
);
959 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
960 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
962 max_size
= GET_MODE_SIZE (mode
);
965 /* The code above should have handled everything. */
966 gcc_assert (!data
.len
);
972 gcc_assert (!data
.reverse
);
977 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
978 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
980 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
981 plus_constant (to_addr_mode
,
985 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
992 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
1000 /* Return number of insns required to move L bytes by pieces.
1001 ALIGN (in bits) is maximum alignment we can assume. */
1003 unsigned HOST_WIDE_INT
1004 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
,
1005 unsigned int max_size
)
1007 unsigned HOST_WIDE_INT n_insns
= 0;
1009 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
1011 while (max_size
> 1 && l
> 0)
1013 enum machine_mode mode
;
1014 enum insn_code icode
;
1016 mode
= widest_int_mode_for_size (max_size
);
1018 if (mode
== VOIDmode
)
1021 icode
= optab_handler (mov_optab
, mode
);
1022 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1023 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1025 max_size
= GET_MODE_SIZE (mode
);
1032 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1033 with move instructions for mode MODE. GENFUN is the gen_... function
1034 to make a move insn for that mode. DATA has all the other info. */
1037 move_by_pieces_1 (insn_gen_fn genfun
, machine_mode mode
,
1038 struct move_by_pieces_d
*data
)
1040 unsigned int size
= GET_MODE_SIZE (mode
);
1041 rtx to1
= NULL_RTX
, from1
;
1043 while (data
->len
>= size
)
1046 data
->offset
-= size
;
1050 if (data
->autinc_to
)
1051 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1054 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1057 if (data
->autinc_from
)
1058 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1061 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1063 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1064 emit_insn (gen_add2_insn (data
->to_addr
,
1065 gen_int_mode (-(HOST_WIDE_INT
) size
,
1066 GET_MODE (data
->to_addr
))));
1067 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1068 emit_insn (gen_add2_insn (data
->from_addr
,
1069 gen_int_mode (-(HOST_WIDE_INT
) size
,
1070 GET_MODE (data
->from_addr
))));
1073 emit_insn ((*genfun
) (to1
, from1
));
1076 #ifdef PUSH_ROUNDING
1077 emit_single_push_insn (mode
, from1
, NULL
);
1083 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1084 emit_insn (gen_add2_insn (data
->to_addr
,
1086 GET_MODE (data
->to_addr
))));
1087 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1088 emit_insn (gen_add2_insn (data
->from_addr
,
1090 GET_MODE (data
->from_addr
))));
1092 if (! data
->reverse
)
1093 data
->offset
+= size
;
1099 /* Emit code to move a block Y to a block X. This may be done with
1100 string-move instructions, with multiple scalar move instructions,
1101 or with a library call.
1103 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1104 SIZE is an rtx that says how long they are.
1105 ALIGN is the maximum alignment we can assume they have.
1106 METHOD describes what kind of copy this is, and what mechanisms may be used.
1107 MIN_SIZE is the minimal size of block to move
1108 MAX_SIZE is the maximal size of block to move, if it can not be represented
1109 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1111 Return the address of the new block, if memcpy is called and returns it,
1115 emit_block_move_hints (rtx x
, rtx y
, rtx size
, enum block_op_methods method
,
1116 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
1117 unsigned HOST_WIDE_INT min_size
,
1118 unsigned HOST_WIDE_INT max_size
,
1119 unsigned HOST_WIDE_INT probable_max_size
)
1126 if (CONST_INT_P (size
)
1127 && INTVAL (size
) == 0)
1132 case BLOCK_OP_NORMAL
:
1133 case BLOCK_OP_TAILCALL
:
1134 may_use_call
= true;
1137 case BLOCK_OP_CALL_PARM
:
1138 may_use_call
= block_move_libcall_safe_for_call_parm ();
1140 /* Make inhibit_defer_pop nonzero around the library call
1141 to force it to pop the arguments right away. */
1145 case BLOCK_OP_NO_LIBCALL
:
1146 may_use_call
= false;
1153 gcc_assert (MEM_P (x
) && MEM_P (y
));
1154 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1155 gcc_assert (align
>= BITS_PER_UNIT
);
1157 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1158 block copy is more efficient for other large modes, e.g. DCmode. */
1159 x
= adjust_address (x
, BLKmode
, 0);
1160 y
= adjust_address (y
, BLKmode
, 0);
1162 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1163 can be incorrect is coming from __builtin_memcpy. */
1164 if (CONST_INT_P (size
))
1166 x
= shallow_copy_rtx (x
);
1167 y
= shallow_copy_rtx (y
);
1168 set_mem_size (x
, INTVAL (size
));
1169 set_mem_size (y
, INTVAL (size
));
1172 if (CONST_INT_P (size
) && MOVE_BY_PIECES_P (INTVAL (size
), align
))
1173 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1174 else if (emit_block_move_via_movmem (x
, y
, size
, align
,
1175 expected_align
, expected_size
,
1176 min_size
, max_size
, probable_max_size
))
1178 else if (may_use_call
1179 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
))
1180 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y
)))
1182 /* Since x and y are passed to a libcall, mark the corresponding
1183 tree EXPR as addressable. */
1184 tree y_expr
= MEM_EXPR (y
);
1185 tree x_expr
= MEM_EXPR (x
);
1187 mark_addressable (y_expr
);
1189 mark_addressable (x_expr
);
1190 retval
= emit_block_move_via_libcall (x
, y
, size
,
1191 method
== BLOCK_OP_TAILCALL
);
1195 emit_block_move_via_loop (x
, y
, size
, align
);
1197 if (method
== BLOCK_OP_CALL_PARM
)
1204 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1206 unsigned HOST_WIDE_INT max
, min
= 0;
1207 if (GET_CODE (size
) == CONST_INT
)
1208 min
= max
= UINTVAL (size
);
1210 max
= GET_MODE_MASK (GET_MODE (size
));
1211 return emit_block_move_hints (x
, y
, size
, method
, 0, -1,
1215 /* A subroutine of emit_block_move. Returns true if calling the
1216 block move libcall will not clobber any parameters which may have
1217 already been placed on the stack. */
1220 block_move_libcall_safe_for_call_parm (void)
1222 #if defined (REG_PARM_STACK_SPACE)
1226 /* If arguments are pushed on the stack, then they're safe. */
1230 /* If registers go on the stack anyway, any argument is sure to clobber
1231 an outgoing argument. */
1232 #if defined (REG_PARM_STACK_SPACE)
1233 fn
= emit_block_move_libcall_fn (false);
1234 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1235 depend on its argument. */
1237 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn
? NULL_TREE
: TREE_TYPE (fn
)))
1238 && REG_PARM_STACK_SPACE (fn
) != 0)
1242 /* If any argument goes in memory, then it might clobber an outgoing
1245 CUMULATIVE_ARGS args_so_far_v
;
1246 cumulative_args_t args_so_far
;
1249 fn
= emit_block_move_libcall_fn (false);
1250 INIT_CUMULATIVE_ARGS (args_so_far_v
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1251 args_so_far
= pack_cumulative_args (&args_so_far_v
);
1253 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1254 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1256 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1257 rtx tmp
= targetm
.calls
.function_arg (args_so_far
, mode
,
1259 if (!tmp
|| !REG_P (tmp
))
1261 if (targetm
.calls
.arg_partial_bytes (args_so_far
, mode
, NULL
, 1))
1263 targetm
.calls
.function_arg_advance (args_so_far
, mode
,
1270 /* A subroutine of emit_block_move. Expand a movmem pattern;
1271 return true if successful. */
1274 emit_block_move_via_movmem (rtx x
, rtx y
, rtx size
, unsigned int align
,
1275 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
1276 unsigned HOST_WIDE_INT min_size
,
1277 unsigned HOST_WIDE_INT max_size
,
1278 unsigned HOST_WIDE_INT probable_max_size
)
1280 int save_volatile_ok
= volatile_ok
;
1281 enum machine_mode mode
;
1283 if (expected_align
< align
)
1284 expected_align
= align
;
1285 if (expected_size
!= -1)
1287 if ((unsigned HOST_WIDE_INT
)expected_size
> probable_max_size
)
1288 expected_size
= probable_max_size
;
1289 if ((unsigned HOST_WIDE_INT
)expected_size
< min_size
)
1290 expected_size
= min_size
;
1293 /* Since this is a move insn, we don't care about volatility. */
1296 /* Try the most limited insn first, because there's no point
1297 including more than one in the machine description unless
1298 the more limited one has some advantage. */
1300 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1301 mode
= GET_MODE_WIDER_MODE (mode
))
1303 enum insn_code code
= direct_optab_handler (movmem_optab
, mode
);
1305 if (code
!= CODE_FOR_nothing
1306 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1307 here because if SIZE is less than the mode mask, as it is
1308 returned by the macro, it will definitely be less than the
1309 actual mode mask. Since SIZE is within the Pmode address
1310 space, we limit MODE to Pmode. */
1311 && ((CONST_INT_P (size
)
1312 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1313 <= (GET_MODE_MASK (mode
) >> 1)))
1314 || max_size
<= (GET_MODE_MASK (mode
) >> 1)
1315 || GET_MODE_BITSIZE (mode
) >= GET_MODE_BITSIZE (Pmode
)))
1317 struct expand_operand ops
[9];
1320 /* ??? When called via emit_block_move_for_call, it'd be
1321 nice if there were some way to inform the backend, so
1322 that it doesn't fail the expansion because it thinks
1323 emitting the libcall would be more efficient. */
1324 nops
= insn_data
[(int) code
].n_generator_args
;
1325 gcc_assert (nops
== 4 || nops
== 6 || nops
== 8 || nops
== 9);
1327 create_fixed_operand (&ops
[0], x
);
1328 create_fixed_operand (&ops
[1], y
);
1329 /* The check above guarantees that this size conversion is valid. */
1330 create_convert_operand_to (&ops
[2], size
, mode
, true);
1331 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
1334 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
1335 create_integer_operand (&ops
[5], expected_size
);
1339 create_integer_operand (&ops
[6], min_size
);
1340 /* If we can not represent the maximal size,
1341 make parameter NULL. */
1342 if ((HOST_WIDE_INT
) max_size
!= -1)
1343 create_integer_operand (&ops
[7], max_size
);
1345 create_fixed_operand (&ops
[7], NULL
);
1349 /* If we can not represent the maximal size,
1350 make parameter NULL. */
1351 if ((HOST_WIDE_INT
) probable_max_size
!= -1)
1352 create_integer_operand (&ops
[8], probable_max_size
);
1354 create_fixed_operand (&ops
[8], NULL
);
1356 if (maybe_expand_insn (code
, nops
, ops
))
1358 volatile_ok
= save_volatile_ok
;
1364 volatile_ok
= save_volatile_ok
;
1368 /* A subroutine of emit_block_move. Expand a call to memcpy.
1369 Return the return value from memcpy, 0 otherwise. */
1372 emit_block_move_via_libcall (rtx dst
, rtx src
, rtx size
, bool tailcall
)
1374 rtx dst_addr
, src_addr
;
1375 tree call_expr
, fn
, src_tree
, dst_tree
, size_tree
;
1376 enum machine_mode size_mode
;
1379 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1380 pseudos. We can then place those new pseudos into a VAR_DECL and
1383 dst_addr
= copy_addr_to_reg (XEXP (dst
, 0));
1384 src_addr
= copy_addr_to_reg (XEXP (src
, 0));
1386 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1387 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1389 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1390 src_tree
= make_tree (ptr_type_node
, src_addr
);
1392 size_mode
= TYPE_MODE (sizetype
);
1394 size
= convert_to_mode (size_mode
, size
, 1);
1395 size
= copy_to_mode_reg (size_mode
, size
);
1397 /* It is incorrect to use the libcall calling conventions to call
1398 memcpy in this context. This could be a user call to memcpy and
1399 the user may wish to examine the return value from memcpy. For
1400 targets where libcalls and normal calls have different conventions
1401 for returning pointers, we could end up generating incorrect code. */
1403 size_tree
= make_tree (sizetype
, size
);
1405 fn
= emit_block_move_libcall_fn (true);
1406 call_expr
= build_call_expr (fn
, 3, dst_tree
, src_tree
, size_tree
);
1407 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
1409 retval
= expand_normal (call_expr
);
1414 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1415 for the function we use for block copies. */
1417 static GTY(()) tree block_move_fn
;
1420 init_block_move_fn (const char *asmspec
)
1424 tree args
, fn
, attrs
, attr_args
;
1426 fn
= get_identifier ("memcpy");
1427 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
1428 const_ptr_type_node
, sizetype
,
1431 fn
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
, fn
, args
);
1432 DECL_EXTERNAL (fn
) = 1;
1433 TREE_PUBLIC (fn
) = 1;
1434 DECL_ARTIFICIAL (fn
) = 1;
1435 TREE_NOTHROW (fn
) = 1;
1436 DECL_VISIBILITY (fn
) = VISIBILITY_DEFAULT
;
1437 DECL_VISIBILITY_SPECIFIED (fn
) = 1;
1439 attr_args
= build_tree_list (NULL_TREE
, build_string (1, "1"));
1440 attrs
= tree_cons (get_identifier ("fn spec"), attr_args
, NULL
);
1442 decl_attributes (&fn
, attrs
, ATTR_FLAG_BUILT_IN
);
1448 set_user_assembler_name (block_move_fn
, asmspec
);
1452 emit_block_move_libcall_fn (int for_call
)
1454 static bool emitted_extern
;
1457 init_block_move_fn (NULL
);
1459 if (for_call
&& !emitted_extern
)
1461 emitted_extern
= true;
1462 make_decl_rtl (block_move_fn
);
1465 return block_move_fn
;
1468 /* A subroutine of emit_block_move. Copy the data via an explicit
1469 loop. This is used only when libcalls are forbidden. */
1470 /* ??? It'd be nice to copy in hunks larger than QImode. */
1473 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1474 unsigned int align ATTRIBUTE_UNUSED
)
1476 rtx_code_label
*cmp_label
, *top_label
;
1477 rtx iter
, x_addr
, y_addr
, tmp
;
1478 enum machine_mode x_addr_mode
= get_address_mode (x
);
1479 enum machine_mode y_addr_mode
= get_address_mode (y
);
1480 enum machine_mode iter_mode
;
1482 iter_mode
= GET_MODE (size
);
1483 if (iter_mode
== VOIDmode
)
1484 iter_mode
= word_mode
;
1486 top_label
= gen_label_rtx ();
1487 cmp_label
= gen_label_rtx ();
1488 iter
= gen_reg_rtx (iter_mode
);
1490 emit_move_insn (iter
, const0_rtx
);
1492 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1493 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1494 do_pending_stack_adjust ();
1496 emit_jump (cmp_label
);
1497 emit_label (top_label
);
1499 tmp
= convert_modes (x_addr_mode
, iter_mode
, iter
, true);
1500 x_addr
= simplify_gen_binary (PLUS
, x_addr_mode
, x_addr
, tmp
);
1502 if (x_addr_mode
!= y_addr_mode
)
1503 tmp
= convert_modes (y_addr_mode
, iter_mode
, iter
, true);
1504 y_addr
= simplify_gen_binary (PLUS
, y_addr_mode
, y_addr
, tmp
);
1506 x
= change_address (x
, QImode
, x_addr
);
1507 y
= change_address (y
, QImode
, y_addr
);
1509 emit_move_insn (x
, y
);
1511 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1512 true, OPTAB_LIB_WIDEN
);
1514 emit_move_insn (iter
, tmp
);
1516 emit_label (cmp_label
);
1518 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1519 true, top_label
, REG_BR_PROB_BASE
* 90 / 100);
1522 /* Copy all or part of a value X into registers starting at REGNO.
1523 The number of registers to be filled is NREGS. */
1526 move_block_to_reg (int regno
, rtx x
, int nregs
, enum machine_mode mode
)
1529 #ifdef HAVE_load_multiple
1537 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
1538 x
= validize_mem (force_const_mem (mode
, x
));
1540 /* See if the machine can do this with a load multiple insn. */
1541 #ifdef HAVE_load_multiple
1542 if (HAVE_load_multiple
)
1544 last
= get_last_insn ();
1545 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1553 delete_insns_since (last
);
1557 for (i
= 0; i
< nregs
; i
++)
1558 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1559 operand_subword_force (x
, i
, mode
));
1562 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1563 The number of registers to be filled is NREGS. */
1566 move_block_from_reg (int regno
, rtx x
, int nregs
)
1573 /* See if the machine can do this with a store multiple insn. */
1574 #ifdef HAVE_store_multiple
1575 if (HAVE_store_multiple
)
1577 rtx_insn
*last
= get_last_insn ();
1578 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1586 delete_insns_since (last
);
1590 for (i
= 0; i
< nregs
; i
++)
1592 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1596 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1600 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1601 ORIG, where ORIG is a non-consecutive group of registers represented by
1602 a PARALLEL. The clone is identical to the original except in that the
1603 original set of registers is replaced by a new set of pseudo registers.
1604 The new set has the same modes as the original set. */
1607 gen_group_rtx (rtx orig
)
1612 gcc_assert (GET_CODE (orig
) == PARALLEL
);
1614 length
= XVECLEN (orig
, 0);
1615 tmps
= XALLOCAVEC (rtx
, length
);
1617 /* Skip a NULL entry in first slot. */
1618 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
1623 for (; i
< length
; i
++)
1625 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
1626 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
1628 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
1631 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
1634 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1635 except that values are placed in TMPS[i], and must later be moved
1636 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1639 emit_group_load_1 (rtx
*tmps
, rtx dst
, rtx orig_src
, tree type
, int ssize
)
1643 enum machine_mode m
= GET_MODE (orig_src
);
1645 gcc_assert (GET_CODE (dst
) == PARALLEL
);
1648 && !SCALAR_INT_MODE_P (m
)
1649 && !MEM_P (orig_src
)
1650 && GET_CODE (orig_src
) != CONCAT
)
1652 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_src
));
1653 if (imode
== BLKmode
)
1654 src
= assign_stack_temp (GET_MODE (orig_src
), ssize
);
1656 src
= gen_reg_rtx (imode
);
1657 if (imode
!= BLKmode
)
1658 src
= gen_lowpart (GET_MODE (orig_src
), src
);
1659 emit_move_insn (src
, orig_src
);
1660 /* ...and back again. */
1661 if (imode
!= BLKmode
)
1662 src
= gen_lowpart (imode
, src
);
1663 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1667 /* Check for a NULL entry, used to indicate that the parameter goes
1668 both on the stack and in registers. */
1669 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1674 /* Process the pieces. */
1675 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1677 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1678 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1679 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1682 /* Handle trailing fragments that run over the size of the struct. */
1683 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1685 /* Arrange to shift the fragment to where it belongs.
1686 extract_bit_field loads to the lsb of the reg. */
1688 #ifdef BLOCK_REG_PADDING
1689 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
1690 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1695 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1696 bytelen
= ssize
- bytepos
;
1697 gcc_assert (bytelen
> 0);
1700 /* If we won't be loading directly from memory, protect the real source
1701 from strange tricks we might play; but make sure that the source can
1702 be loaded directly into the destination. */
1704 if (!MEM_P (orig_src
)
1705 && (!CONSTANT_P (orig_src
)
1706 || (GET_MODE (orig_src
) != mode
1707 && GET_MODE (orig_src
) != VOIDmode
)))
1709 if (GET_MODE (orig_src
) == VOIDmode
)
1710 src
= gen_reg_rtx (mode
);
1712 src
= gen_reg_rtx (GET_MODE (orig_src
));
1714 emit_move_insn (src
, orig_src
);
1717 /* Optimize the access just a bit. */
1719 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
1720 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
1721 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1722 && bytelen
== GET_MODE_SIZE (mode
))
1724 tmps
[i
] = gen_reg_rtx (mode
);
1725 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
1727 else if (COMPLEX_MODE_P (mode
)
1728 && GET_MODE (src
) == mode
1729 && bytelen
== GET_MODE_SIZE (mode
))
1730 /* Let emit_move_complex do the bulk of the work. */
1732 else if (GET_CODE (src
) == CONCAT
)
1734 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
1735 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
1737 if ((bytepos
== 0 && bytelen
== slen0
)
1738 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
1740 /* The following assumes that the concatenated objects all
1741 have the same size. In this case, a simple calculation
1742 can be used to determine the object and the bit field
1744 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
1745 if (! CONSTANT_P (tmps
[i
])
1746 && (!REG_P (tmps
[i
]) || GET_MODE (tmps
[i
]) != mode
))
1747 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
1748 (bytepos
% slen0
) * BITS_PER_UNIT
,
1749 1, NULL_RTX
, mode
, mode
);
1755 gcc_assert (!bytepos
);
1756 mem
= assign_stack_temp (GET_MODE (src
), slen
);
1757 emit_move_insn (mem
, src
);
1758 tmps
[i
] = extract_bit_field (mem
, bytelen
* BITS_PER_UNIT
,
1759 0, 1, NULL_RTX
, mode
, mode
);
1762 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1763 SIMD register, which is currently broken. While we get GCC
1764 to emit proper RTL for these cases, let's dump to memory. */
1765 else if (VECTOR_MODE_P (GET_MODE (dst
))
1768 int slen
= GET_MODE_SIZE (GET_MODE (src
));
1771 mem
= assign_stack_temp (GET_MODE (src
), slen
);
1772 emit_move_insn (mem
, src
);
1773 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
1775 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
1776 && XVECLEN (dst
, 0) > 1)
1777 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE (dst
), bytepos
);
1778 else if (CONSTANT_P (src
))
1780 HOST_WIDE_INT len
= (HOST_WIDE_INT
) bytelen
;
1788 /* TODO: const_wide_int can have sizes other than this... */
1789 gcc_assert (2 * len
== ssize
);
1790 split_double (src
, &first
, &second
);
1797 else if (REG_P (src
) && GET_MODE (src
) == mode
)
1800 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
1801 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
1805 tmps
[i
] = expand_shift (LSHIFT_EXPR
, mode
, tmps
[i
],
1810 /* Emit code to move a block SRC of type TYPE to a block DST,
1811 where DST is non-consecutive registers represented by a PARALLEL.
1812 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1816 emit_group_load (rtx dst
, rtx src
, tree type
, int ssize
)
1821 tmps
= XALLOCAVEC (rtx
, XVECLEN (dst
, 0));
1822 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1824 /* Copy the extracted pieces into the proper (probable) hard regs. */
1825 for (i
= 0; i
< XVECLEN (dst
, 0); i
++)
1827 rtx d
= XEXP (XVECEXP (dst
, 0, i
), 0);
1830 emit_move_insn (d
, tmps
[i
]);
1834 /* Similar, but load SRC into new pseudos in a format that looks like
1835 PARALLEL. This can later be fed to emit_group_move to get things
1836 in the right place. */
1839 emit_group_load_into_temps (rtx parallel
, rtx src
, tree type
, int ssize
)
1844 vec
= rtvec_alloc (XVECLEN (parallel
, 0));
1845 emit_group_load_1 (&RTVEC_ELT (vec
, 0), parallel
, src
, type
, ssize
);
1847 /* Convert the vector to look just like the original PARALLEL, except
1848 with the computed values. */
1849 for (i
= 0; i
< XVECLEN (parallel
, 0); i
++)
1851 rtx e
= XVECEXP (parallel
, 0, i
);
1852 rtx d
= XEXP (e
, 0);
1856 d
= force_reg (GET_MODE (d
), RTVEC_ELT (vec
, i
));
1857 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), d
, XEXP (e
, 1));
1859 RTVEC_ELT (vec
, i
) = e
;
1862 return gen_rtx_PARALLEL (GET_MODE (parallel
), vec
);
1865 /* Emit code to move a block SRC to block DST, where SRC and DST are
1866 non-consecutive groups of registers, each represented by a PARALLEL. */
1869 emit_group_move (rtx dst
, rtx src
)
1873 gcc_assert (GET_CODE (src
) == PARALLEL
1874 && GET_CODE (dst
) == PARALLEL
1875 && XVECLEN (src
, 0) == XVECLEN (dst
, 0));
1877 /* Skip first entry if NULL. */
1878 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
1879 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
1880 XEXP (XVECEXP (src
, 0, i
), 0));
1883 /* Move a group of registers represented by a PARALLEL into pseudos. */
1886 emit_group_move_into_temps (rtx src
)
1888 rtvec vec
= rtvec_alloc (XVECLEN (src
, 0));
1891 for (i
= 0; i
< XVECLEN (src
, 0); i
++)
1893 rtx e
= XVECEXP (src
, 0, i
);
1894 rtx d
= XEXP (e
, 0);
1897 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), copy_to_reg (d
), XEXP (e
, 1));
1898 RTVEC_ELT (vec
, i
) = e
;
1901 return gen_rtx_PARALLEL (GET_MODE (src
), vec
);
1904 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1905 where SRC is non-consecutive registers represented by a PARALLEL.
1906 SSIZE represents the total size of block ORIG_DST, or -1 if not
1910 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1913 int start
, finish
, i
;
1914 enum machine_mode m
= GET_MODE (orig_dst
);
1916 gcc_assert (GET_CODE (src
) == PARALLEL
);
1918 if (!SCALAR_INT_MODE_P (m
)
1919 && !MEM_P (orig_dst
) && GET_CODE (orig_dst
) != CONCAT
)
1921 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_dst
));
1922 if (imode
== BLKmode
)
1923 dst
= assign_stack_temp (GET_MODE (orig_dst
), ssize
);
1925 dst
= gen_reg_rtx (imode
);
1926 emit_group_store (dst
, src
, type
, ssize
);
1927 if (imode
!= BLKmode
)
1928 dst
= gen_lowpart (GET_MODE (orig_dst
), dst
);
1929 emit_move_insn (orig_dst
, dst
);
1933 /* Check for a NULL entry, used to indicate that the parameter goes
1934 both on the stack and in registers. */
1935 if (XEXP (XVECEXP (src
, 0, 0), 0))
1939 finish
= XVECLEN (src
, 0);
1941 tmps
= XALLOCAVEC (rtx
, finish
);
1943 /* Copy the (probable) hard regs into pseudos. */
1944 for (i
= start
; i
< finish
; i
++)
1946 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
1947 if (!REG_P (reg
) || REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
1949 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
1950 emit_move_insn (tmps
[i
], reg
);
1956 /* If we won't be storing directly into memory, protect the real destination
1957 from strange tricks we might play. */
1959 if (GET_CODE (dst
) == PARALLEL
)
1963 /* We can get a PARALLEL dst if there is a conditional expression in
1964 a return statement. In that case, the dst and src are the same,
1965 so no action is necessary. */
1966 if (rtx_equal_p (dst
, src
))
1969 /* It is unclear if we can ever reach here, but we may as well handle
1970 it. Allocate a temporary, and split this into a store/load to/from
1972 temp
= assign_stack_temp (GET_MODE (dst
), ssize
);
1973 emit_group_store (temp
, src
, type
, ssize
);
1974 emit_group_load (dst
, temp
, type
, ssize
);
1977 else if (!MEM_P (dst
) && GET_CODE (dst
) != CONCAT
)
1979 enum machine_mode outer
= GET_MODE (dst
);
1980 enum machine_mode inner
;
1981 HOST_WIDE_INT bytepos
;
1985 if (!REG_P (dst
) || REGNO (dst
) < FIRST_PSEUDO_REGISTER
)
1986 dst
= gen_reg_rtx (outer
);
1988 /* Make life a bit easier for combine. */
1989 /* If the first element of the vector is the low part
1990 of the destination mode, use a paradoxical subreg to
1991 initialize the destination. */
1994 inner
= GET_MODE (tmps
[start
]);
1995 bytepos
= subreg_lowpart_offset (inner
, outer
);
1996 if (INTVAL (XEXP (XVECEXP (src
, 0, start
), 1)) == bytepos
)
1998 temp
= simplify_gen_subreg (outer
, tmps
[start
],
2002 emit_move_insn (dst
, temp
);
2009 /* If the first element wasn't the low part, try the last. */
2011 && start
< finish
- 1)
2013 inner
= GET_MODE (tmps
[finish
- 1]);
2014 bytepos
= subreg_lowpart_offset (inner
, outer
);
2015 if (INTVAL (XEXP (XVECEXP (src
, 0, finish
- 1), 1)) == bytepos
)
2017 temp
= simplify_gen_subreg (outer
, tmps
[finish
- 1],
2021 emit_move_insn (dst
, temp
);
2028 /* Otherwise, simply initialize the result to zero. */
2030 emit_move_insn (dst
, CONST0_RTX (outer
));
2033 /* Process the pieces. */
2034 for (i
= start
; i
< finish
; i
++)
2036 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2037 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2038 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2039 unsigned int adj_bytelen
;
2042 /* Handle trailing fragments that run over the size of the struct. */
2043 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2044 adj_bytelen
= ssize
- bytepos
;
2046 adj_bytelen
= bytelen
;
2048 if (GET_CODE (dst
) == CONCAT
)
2050 if (bytepos
+ adj_bytelen
2051 <= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2052 dest
= XEXP (dst
, 0);
2053 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2055 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2056 dest
= XEXP (dst
, 1);
2060 enum machine_mode dest_mode
= GET_MODE (dest
);
2061 enum machine_mode tmp_mode
= GET_MODE (tmps
[i
]);
2063 gcc_assert (bytepos
== 0 && XVECLEN (src
, 0));
2065 if (GET_MODE_ALIGNMENT (dest_mode
)
2066 >= GET_MODE_ALIGNMENT (tmp_mode
))
2068 dest
= assign_stack_temp (dest_mode
,
2069 GET_MODE_SIZE (dest_mode
));
2070 emit_move_insn (adjust_address (dest
,
2078 dest
= assign_stack_temp (tmp_mode
,
2079 GET_MODE_SIZE (tmp_mode
));
2080 emit_move_insn (dest
, tmps
[i
]);
2081 dst
= adjust_address (dest
, dest_mode
, bytepos
);
2087 /* Handle trailing fragments that run over the size of the struct. */
2088 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2090 /* store_bit_field always takes its value from the lsb.
2091 Move the fragment to the lsb if it's not already there. */
2093 #ifdef BLOCK_REG_PADDING
2094 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
2095 == (BYTES_BIG_ENDIAN
? upward
: downward
)
2101 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2102 tmps
[i
] = expand_shift (RSHIFT_EXPR
, mode
, tmps
[i
],
2106 /* Make sure not to write past the end of the struct. */
2107 store_bit_field (dest
,
2108 adj_bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2109 bytepos
* BITS_PER_UNIT
, ssize
* BITS_PER_UNIT
- 1,
2113 /* Optimize the access just a bit. */
2114 else if (MEM_P (dest
)
2115 && (!SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
2116 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2117 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2118 && bytelen
== GET_MODE_SIZE (mode
))
2119 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2122 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2123 0, 0, mode
, tmps
[i
]);
2126 /* Copy from the pseudo into the (probable) hard reg. */
2127 if (orig_dst
!= dst
)
2128 emit_move_insn (orig_dst
, dst
);
2131 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2132 of the value stored in X. */
2135 maybe_emit_group_store (rtx x
, tree type
)
2137 enum machine_mode mode
= TYPE_MODE (type
);
2138 gcc_checking_assert (GET_MODE (x
) == VOIDmode
|| GET_MODE (x
) == mode
);
2139 if (GET_CODE (x
) == PARALLEL
)
2141 rtx result
= gen_reg_rtx (mode
);
2142 emit_group_store (result
, x
, type
, int_size_in_bytes (type
));
2148 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2150 This is used on targets that return BLKmode values in registers. */
2153 copy_blkmode_from_reg (rtx target
, rtx srcreg
, tree type
)
2155 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2156 rtx src
= NULL
, dst
= NULL
;
2157 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2158 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2159 enum machine_mode mode
= GET_MODE (srcreg
);
2160 enum machine_mode tmode
= GET_MODE (target
);
2161 enum machine_mode copy_mode
;
2163 /* BLKmode registers created in the back-end shouldn't have survived. */
2164 gcc_assert (mode
!= BLKmode
);
2166 /* If the structure doesn't take up a whole number of words, see whether
2167 SRCREG is padded on the left or on the right. If it's on the left,
2168 set PADDING_CORRECTION to the number of bits to skip.
2170 In most ABIs, the structure will be returned at the least end of
2171 the register, which translates to right padding on little-endian
2172 targets and left padding on big-endian targets. The opposite
2173 holds if the structure is returned at the most significant
2174 end of the register. */
2175 if (bytes
% UNITS_PER_WORD
!= 0
2176 && (targetm
.calls
.return_in_msb (type
)
2178 : BYTES_BIG_ENDIAN
))
2180 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2182 /* We can use a single move if we have an exact mode for the size. */
2183 else if (MEM_P (target
)
2184 && (!SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
))
2185 || MEM_ALIGN (target
) >= GET_MODE_ALIGNMENT (mode
))
2186 && bytes
== GET_MODE_SIZE (mode
))
2188 emit_move_insn (adjust_address (target
, mode
, 0), srcreg
);
2192 /* And if we additionally have the same mode for a register. */
2193 else if (REG_P (target
)
2194 && GET_MODE (target
) == mode
2195 && bytes
== GET_MODE_SIZE (mode
))
2197 emit_move_insn (target
, srcreg
);
2201 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2202 into a new pseudo which is a full word. */
2203 if (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
)
2205 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
2209 /* Copy the structure BITSIZE bits at a time. If the target lives in
2210 memory, take care of not reading/writing past its end by selecting
2211 a copy mode suited to BITSIZE. This should always be possible given
2214 If the target lives in register, make sure not to select a copy mode
2215 larger than the mode of the register.
2217 We could probably emit more efficient code for machines which do not use
2218 strict alignment, but it doesn't seem worth the effort at the current
2221 copy_mode
= word_mode
;
2224 enum machine_mode mem_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
2225 if (mem_mode
!= BLKmode
)
2226 copy_mode
= mem_mode
;
2228 else if (REG_P (target
) && GET_MODE_BITSIZE (tmode
) < BITS_PER_WORD
)
2231 for (bitpos
= 0, xbitpos
= padding_correction
;
2232 bitpos
< bytes
* BITS_PER_UNIT
;
2233 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2235 /* We need a new source operand each time xbitpos is on a
2236 word boundary and when xbitpos == padding_correction
2237 (the first time through). */
2238 if (xbitpos
% BITS_PER_WORD
== 0 || xbitpos
== padding_correction
)
2239 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
, mode
);
2241 /* We need a new destination operand each time bitpos is on
2243 if (REG_P (target
) && GET_MODE_BITSIZE (tmode
) < BITS_PER_WORD
)
2245 else if (bitpos
% BITS_PER_WORD
== 0)
2246 dst
= operand_subword (target
, bitpos
/ BITS_PER_WORD
, 1, tmode
);
2248 /* Use xbitpos for the source extraction (right justified) and
2249 bitpos for the destination store (left justified). */
2250 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, 0, 0, copy_mode
,
2251 extract_bit_field (src
, bitsize
,
2252 xbitpos
% BITS_PER_WORD
, 1,
2253 NULL_RTX
, copy_mode
, copy_mode
));
2257 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2258 register if it contains any data, otherwise return null.
2260 This is used on targets that return BLKmode values in registers. */
2263 copy_blkmode_to_reg (enum machine_mode mode
, tree src
)
2266 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0, bytes
;
2267 unsigned int bitsize
;
2268 rtx
*dst_words
, dst
, x
, src_word
= NULL_RTX
, dst_word
= NULL_RTX
;
2269 enum machine_mode dst_mode
;
2271 gcc_assert (TYPE_MODE (TREE_TYPE (src
)) == BLKmode
);
2273 x
= expand_normal (src
);
2275 bytes
= int_size_in_bytes (TREE_TYPE (src
));
2279 /* If the structure doesn't take up a whole number of words, see
2280 whether the register value should be padded on the left or on
2281 the right. Set PADDING_CORRECTION to the number of padding
2282 bits needed on the left side.
2284 In most ABIs, the structure will be returned at the least end of
2285 the register, which translates to right padding on little-endian
2286 targets and left padding on big-endian targets. The opposite
2287 holds if the structure is returned at the most significant
2288 end of the register. */
2289 if (bytes
% UNITS_PER_WORD
!= 0
2290 && (targetm
.calls
.return_in_msb (TREE_TYPE (src
))
2292 : BYTES_BIG_ENDIAN
))
2293 padding_correction
= (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
)
2296 n_regs
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
2297 dst_words
= XALLOCAVEC (rtx
, n_regs
);
2298 bitsize
= MIN (TYPE_ALIGN (TREE_TYPE (src
)), BITS_PER_WORD
);
2300 /* Copy the structure BITSIZE bits at a time. */
2301 for (bitpos
= 0, xbitpos
= padding_correction
;
2302 bitpos
< bytes
* BITS_PER_UNIT
;
2303 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2305 /* We need a new destination pseudo each time xbitpos is
2306 on a word boundary and when xbitpos == padding_correction
2307 (the first time through). */
2308 if (xbitpos
% BITS_PER_WORD
== 0
2309 || xbitpos
== padding_correction
)
2311 /* Generate an appropriate register. */
2312 dst_word
= gen_reg_rtx (word_mode
);
2313 dst_words
[xbitpos
/ BITS_PER_WORD
] = dst_word
;
2315 /* Clear the destination before we move anything into it. */
2316 emit_move_insn (dst_word
, CONST0_RTX (word_mode
));
2319 /* We need a new source operand each time bitpos is on a word
2321 if (bitpos
% BITS_PER_WORD
== 0)
2322 src_word
= operand_subword_force (x
, bitpos
/ BITS_PER_WORD
, BLKmode
);
2324 /* Use bitpos for the source extraction (left justified) and
2325 xbitpos for the destination store (right justified). */
2326 store_bit_field (dst_word
, bitsize
, xbitpos
% BITS_PER_WORD
,
2328 extract_bit_field (src_word
, bitsize
,
2329 bitpos
% BITS_PER_WORD
, 1,
2330 NULL_RTX
, word_mode
, word_mode
));
2333 if (mode
== BLKmode
)
2335 /* Find the smallest integer mode large enough to hold the
2336 entire structure. */
2337 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2339 mode
= GET_MODE_WIDER_MODE (mode
))
2340 /* Have we found a large enough mode? */
2341 if (GET_MODE_SIZE (mode
) >= bytes
)
2344 /* A suitable mode should have been found. */
2345 gcc_assert (mode
!= VOIDmode
);
2348 if (GET_MODE_SIZE (mode
) < GET_MODE_SIZE (word_mode
))
2349 dst_mode
= word_mode
;
2352 dst
= gen_reg_rtx (dst_mode
);
2354 for (i
= 0; i
< n_regs
; i
++)
2355 emit_move_insn (operand_subword (dst
, i
, 0, dst_mode
), dst_words
[i
]);
2357 if (mode
!= dst_mode
)
2358 dst
= gen_lowpart (mode
, dst
);
2363 /* Add a USE expression for REG to the (possibly empty) list pointed
2364 to by CALL_FUSAGE. REG must denote a hard register. */
2367 use_reg_mode (rtx
*call_fusage
, rtx reg
, enum machine_mode mode
)
2369 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2372 = gen_rtx_EXPR_LIST (mode
, gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2375 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2376 to by CALL_FUSAGE. REG must denote a hard register. */
2379 clobber_reg_mode (rtx
*call_fusage
, rtx reg
, enum machine_mode mode
)
2381 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2384 = gen_rtx_EXPR_LIST (mode
, gen_rtx_CLOBBER (VOIDmode
, reg
), *call_fusage
);
2387 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2388 starting at REGNO. All of these registers must be hard registers. */
2391 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2395 gcc_assert (regno
+ nregs
<= FIRST_PSEUDO_REGISTER
);
2397 for (i
= 0; i
< nregs
; i
++)
2398 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2401 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2402 PARALLEL REGS. This is for calls that pass values in multiple
2403 non-contiguous locations. The Irix 6 ABI has examples of this. */
2406 use_group_regs (rtx
*call_fusage
, rtx regs
)
2410 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2412 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2414 /* A NULL entry means the parameter goes both on the stack and in
2415 registers. This can also be a MEM for targets that pass values
2416 partially on the stack and partially in registers. */
2417 if (reg
!= 0 && REG_P (reg
))
2418 use_reg (call_fusage
, reg
);
2422 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2423 assigment and the code of the expresion on the RHS is CODE. Return
2427 get_def_for_expr (tree name
, enum tree_code code
)
2431 if (TREE_CODE (name
) != SSA_NAME
)
2434 def_stmt
= get_gimple_for_ssa_name (name
);
2436 || gimple_assign_rhs_code (def_stmt
) != code
)
2442 #ifdef HAVE_conditional_move
2443 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2444 assigment and the class of the expresion on the RHS is CLASS. Return
2448 get_def_for_expr_class (tree name
, enum tree_code_class tclass
)
2452 if (TREE_CODE (name
) != SSA_NAME
)
2455 def_stmt
= get_gimple_for_ssa_name (name
);
2457 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) != tclass
)
2465 /* Determine whether the LEN bytes generated by CONSTFUN can be
2466 stored to memory using several move instructions. CONSTFUNDATA is
2467 a pointer which will be passed as argument in every CONSTFUN call.
2468 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2469 a memset operation and false if it's a copy of a constant string.
2470 Return nonzero if a call to store_by_pieces should succeed. */
2473 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
2474 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2475 void *constfundata
, unsigned int align
, bool memsetp
)
2477 unsigned HOST_WIDE_INT l
;
2478 unsigned int max_size
;
2479 HOST_WIDE_INT offset
= 0;
2480 enum machine_mode mode
;
2481 enum insn_code icode
;
2483 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2484 rtx cst ATTRIBUTE_UNUSED
;
2490 ? SET_BY_PIECES_P (len
, align
)
2491 : STORE_BY_PIECES_P (len
, align
)))
2494 align
= alignment_for_piecewise_move (STORE_MAX_PIECES
, align
);
2496 /* We would first store what we can in the largest integer mode, then go to
2497 successively smaller modes. */
2500 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2504 max_size
= STORE_MAX_PIECES
+ 1;
2505 while (max_size
> 1 && l
> 0)
2507 mode
= widest_int_mode_for_size (max_size
);
2509 if (mode
== VOIDmode
)
2512 icode
= optab_handler (mov_optab
, mode
);
2513 if (icode
!= CODE_FOR_nothing
2514 && align
>= GET_MODE_ALIGNMENT (mode
))
2516 unsigned int size
= GET_MODE_SIZE (mode
);
2523 cst
= (*constfun
) (constfundata
, offset
, mode
);
2524 if (!targetm
.legitimate_constant_p (mode
, cst
))
2534 max_size
= GET_MODE_SIZE (mode
);
2537 /* The code above should have handled everything. */
2544 /* Generate several move instructions to store LEN bytes generated by
2545 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2546 pointer which will be passed as argument in every CONSTFUN call.
2547 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2548 a memset operation and false if it's a copy of a constant string.
2549 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2550 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2554 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
2555 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2556 void *constfundata
, unsigned int align
, bool memsetp
, int endp
)
2558 enum machine_mode to_addr_mode
= get_address_mode (to
);
2559 struct store_by_pieces_d data
;
2563 gcc_assert (endp
!= 2);
2568 ? SET_BY_PIECES_P (len
, align
)
2569 : STORE_BY_PIECES_P (len
, align
));
2570 data
.constfun
= constfun
;
2571 data
.constfundata
= constfundata
;
2574 store_by_pieces_1 (&data
, align
);
2579 gcc_assert (!data
.reverse
);
2584 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2585 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2587 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
2588 plus_constant (to_addr_mode
,
2592 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2599 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2607 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2608 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2611 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
2613 struct store_by_pieces_d data
;
2618 data
.constfun
= clear_by_pieces_1
;
2619 data
.constfundata
= NULL
;
2622 store_by_pieces_1 (&data
, align
);
2625 /* Callback routine for clear_by_pieces.
2626 Return const0_rtx unconditionally. */
2629 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED
,
2630 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2631 enum machine_mode mode ATTRIBUTE_UNUSED
)
2636 /* Subroutine of clear_by_pieces and store_by_pieces.
2637 Generate several move instructions to store LEN bytes of block TO. (A MEM
2638 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2641 store_by_pieces_1 (struct store_by_pieces_d
*data ATTRIBUTE_UNUSED
,
2642 unsigned int align ATTRIBUTE_UNUSED
)
2644 enum machine_mode to_addr_mode
= get_address_mode (data
->to
);
2645 rtx to_addr
= XEXP (data
->to
, 0);
2646 unsigned int max_size
= STORE_MAX_PIECES
+ 1;
2647 enum insn_code icode
;
2650 data
->to_addr
= to_addr
;
2652 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2653 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2655 data
->explicit_inc_to
= 0;
2657 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2659 data
->offset
= data
->len
;
2661 /* If storing requires more than two move insns,
2662 copy addresses to registers (to make displacements shorter)
2663 and use post-increment if available. */
2664 if (!data
->autinc_to
2665 && move_by_pieces_ninsns (data
->len
, align
, max_size
) > 2)
2667 /* Determine the main mode we'll be using.
2668 MODE might not be used depending on the definitions of the
2669 USE_* macros below. */
2670 enum machine_mode mode ATTRIBUTE_UNUSED
2671 = widest_int_mode_for_size (max_size
);
2673 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2675 data
->to_addr
= copy_to_mode_reg (to_addr_mode
,
2676 plus_constant (to_addr_mode
,
2679 data
->autinc_to
= 1;
2680 data
->explicit_inc_to
= -1;
2683 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2684 && ! data
->autinc_to
)
2686 data
->to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
2687 data
->autinc_to
= 1;
2688 data
->explicit_inc_to
= 1;
2691 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2692 data
->to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
2695 align
= alignment_for_piecewise_move (STORE_MAX_PIECES
, align
);
2697 /* First store what we can in the largest integer mode, then go to
2698 successively smaller modes. */
2700 while (max_size
> 1 && data
->len
> 0)
2702 enum machine_mode mode
= widest_int_mode_for_size (max_size
);
2704 if (mode
== VOIDmode
)
2707 icode
= optab_handler (mov_optab
, mode
);
2708 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2709 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2711 max_size
= GET_MODE_SIZE (mode
);
2714 /* The code above should have handled everything. */
2715 gcc_assert (!data
->len
);
2718 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2719 with move instructions for mode MODE. GENFUN is the gen_... function
2720 to make a move insn for that mode. DATA has all the other info. */
2723 store_by_pieces_2 (insn_gen_fn genfun
, machine_mode mode
,
2724 struct store_by_pieces_d
*data
)
2726 unsigned int size
= GET_MODE_SIZE (mode
);
2729 while (data
->len
>= size
)
2732 data
->offset
-= size
;
2734 if (data
->autinc_to
)
2735 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2738 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2740 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2741 emit_insn (gen_add2_insn (data
->to_addr
,
2742 gen_int_mode (-(HOST_WIDE_INT
) size
,
2743 GET_MODE (data
->to_addr
))));
2745 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2746 emit_insn ((*genfun
) (to1
, cst
));
2748 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2749 emit_insn (gen_add2_insn (data
->to_addr
,
2751 GET_MODE (data
->to_addr
))));
2753 if (! data
->reverse
)
2754 data
->offset
+= size
;
2760 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2761 its length in bytes. */
2764 clear_storage_hints (rtx object
, rtx size
, enum block_op_methods method
,
2765 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
2766 unsigned HOST_WIDE_INT min_size
,
2767 unsigned HOST_WIDE_INT max_size
,
2768 unsigned HOST_WIDE_INT probable_max_size
)
2770 enum machine_mode mode
= GET_MODE (object
);
2773 gcc_assert (method
== BLOCK_OP_NORMAL
|| method
== BLOCK_OP_TAILCALL
);
2775 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2776 just move a zero. Otherwise, do this a piece at a time. */
2778 && CONST_INT_P (size
)
2779 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (mode
))
2781 rtx zero
= CONST0_RTX (mode
);
2784 emit_move_insn (object
, zero
);
2788 if (COMPLEX_MODE_P (mode
))
2790 zero
= CONST0_RTX (GET_MODE_INNER (mode
));
2793 write_complex_part (object
, zero
, 0);
2794 write_complex_part (object
, zero
, 1);
2800 if (size
== const0_rtx
)
2803 align
= MEM_ALIGN (object
);
2805 if (CONST_INT_P (size
)
2806 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2807 clear_by_pieces (object
, INTVAL (size
), align
);
2808 else if (set_storage_via_setmem (object
, size
, const0_rtx
, align
,
2809 expected_align
, expected_size
,
2810 min_size
, max_size
, probable_max_size
))
2812 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object
)))
2813 return set_storage_via_libcall (object
, size
, const0_rtx
,
2814 method
== BLOCK_OP_TAILCALL
);
2822 clear_storage (rtx object
, rtx size
, enum block_op_methods method
)
2824 unsigned HOST_WIDE_INT max
, min
= 0;
2825 if (GET_CODE (size
) == CONST_INT
)
2826 min
= max
= UINTVAL (size
);
2828 max
= GET_MODE_MASK (GET_MODE (size
));
2829 return clear_storage_hints (object
, size
, method
, 0, -1, min
, max
, max
);
2833 /* A subroutine of clear_storage. Expand a call to memset.
2834 Return the return value of memset, 0 otherwise. */
2837 set_storage_via_libcall (rtx object
, rtx size
, rtx val
, bool tailcall
)
2839 tree call_expr
, fn
, object_tree
, size_tree
, val_tree
;
2840 enum machine_mode size_mode
;
2843 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2844 place those into new pseudos into a VAR_DECL and use them later. */
2846 object
= copy_addr_to_reg (XEXP (object
, 0));
2848 size_mode
= TYPE_MODE (sizetype
);
2849 size
= convert_to_mode (size_mode
, size
, 1);
2850 size
= copy_to_mode_reg (size_mode
, size
);
2852 /* It is incorrect to use the libcall calling conventions to call
2853 memset in this context. This could be a user call to memset and
2854 the user may wish to examine the return value from memset. For
2855 targets where libcalls and normal calls have different conventions
2856 for returning pointers, we could end up generating incorrect code. */
2858 object_tree
= make_tree (ptr_type_node
, object
);
2859 if (!CONST_INT_P (val
))
2860 val
= convert_to_mode (TYPE_MODE (integer_type_node
), val
, 1);
2861 size_tree
= make_tree (sizetype
, size
);
2862 val_tree
= make_tree (integer_type_node
, val
);
2864 fn
= clear_storage_libcall_fn (true);
2865 call_expr
= build_call_expr (fn
, 3, object_tree
, val_tree
, size_tree
);
2866 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
2868 retval
= expand_normal (call_expr
);
2873 /* A subroutine of set_storage_via_libcall. Create the tree node
2874 for the function we use for block clears. */
2876 tree block_clear_fn
;
2879 init_block_clear_fn (const char *asmspec
)
2881 if (!block_clear_fn
)
2885 fn
= get_identifier ("memset");
2886 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2887 integer_type_node
, sizetype
,
2890 fn
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
, fn
, args
);
2891 DECL_EXTERNAL (fn
) = 1;
2892 TREE_PUBLIC (fn
) = 1;
2893 DECL_ARTIFICIAL (fn
) = 1;
2894 TREE_NOTHROW (fn
) = 1;
2895 DECL_VISIBILITY (fn
) = VISIBILITY_DEFAULT
;
2896 DECL_VISIBILITY_SPECIFIED (fn
) = 1;
2898 block_clear_fn
= fn
;
2902 set_user_assembler_name (block_clear_fn
, asmspec
);
2906 clear_storage_libcall_fn (int for_call
)
2908 static bool emitted_extern
;
2910 if (!block_clear_fn
)
2911 init_block_clear_fn (NULL
);
2913 if (for_call
&& !emitted_extern
)
2915 emitted_extern
= true;
2916 make_decl_rtl (block_clear_fn
);
2919 return block_clear_fn
;
2922 /* Expand a setmem pattern; return true if successful. */
2925 set_storage_via_setmem (rtx object
, rtx size
, rtx val
, unsigned int align
,
2926 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
2927 unsigned HOST_WIDE_INT min_size
,
2928 unsigned HOST_WIDE_INT max_size
,
2929 unsigned HOST_WIDE_INT probable_max_size
)
2931 /* Try the most limited insn first, because there's no point
2932 including more than one in the machine description unless
2933 the more limited one has some advantage. */
2935 enum machine_mode mode
;
2937 if (expected_align
< align
)
2938 expected_align
= align
;
2939 if (expected_size
!= -1)
2941 if ((unsigned HOST_WIDE_INT
)expected_size
> max_size
)
2942 expected_size
= max_size
;
2943 if ((unsigned HOST_WIDE_INT
)expected_size
< min_size
)
2944 expected_size
= min_size
;
2947 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2948 mode
= GET_MODE_WIDER_MODE (mode
))
2950 enum insn_code code
= direct_optab_handler (setmem_optab
, mode
);
2952 if (code
!= CODE_FOR_nothing
2953 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2954 here because if SIZE is less than the mode mask, as it is
2955 returned by the macro, it will definitely be less than the
2956 actual mode mask. Since SIZE is within the Pmode address
2957 space, we limit MODE to Pmode. */
2958 && ((CONST_INT_P (size
)
2959 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2960 <= (GET_MODE_MASK (mode
) >> 1)))
2961 || max_size
<= (GET_MODE_MASK (mode
) >> 1)
2962 || GET_MODE_BITSIZE (mode
) >= GET_MODE_BITSIZE (Pmode
)))
2964 struct expand_operand ops
[9];
2967 nops
= insn_data
[(int) code
].n_generator_args
;
2968 gcc_assert (nops
== 4 || nops
== 6 || nops
== 8 || nops
== 9);
2970 create_fixed_operand (&ops
[0], object
);
2971 /* The check above guarantees that this size conversion is valid. */
2972 create_convert_operand_to (&ops
[1], size
, mode
, true);
2973 create_convert_operand_from (&ops
[2], val
, byte_mode
, true);
2974 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
2977 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
2978 create_integer_operand (&ops
[5], expected_size
);
2982 create_integer_operand (&ops
[6], min_size
);
2983 /* If we can not represent the maximal size,
2984 make parameter NULL. */
2985 if ((HOST_WIDE_INT
) max_size
!= -1)
2986 create_integer_operand (&ops
[7], max_size
);
2988 create_fixed_operand (&ops
[7], NULL
);
2992 /* If we can not represent the maximal size,
2993 make parameter NULL. */
2994 if ((HOST_WIDE_INT
) probable_max_size
!= -1)
2995 create_integer_operand (&ops
[8], probable_max_size
);
2997 create_fixed_operand (&ops
[8], NULL
);
2999 if (maybe_expand_insn (code
, nops
, ops
))
3008 /* Write to one of the components of the complex value CPLX. Write VAL to
3009 the real part if IMAG_P is false, and the imaginary part if its true. */
3012 write_complex_part (rtx cplx
, rtx val
, bool imag_p
)
3014 enum machine_mode cmode
;
3015 enum machine_mode imode
;
3018 if (GET_CODE (cplx
) == CONCAT
)
3020 emit_move_insn (XEXP (cplx
, imag_p
), val
);
3024 cmode
= GET_MODE (cplx
);
3025 imode
= GET_MODE_INNER (cmode
);
3026 ibitsize
= GET_MODE_BITSIZE (imode
);
3028 /* For MEMs simplify_gen_subreg may generate an invalid new address
3029 because, e.g., the original address is considered mode-dependent
3030 by the target, which restricts simplify_subreg from invoking
3031 adjust_address_nv. Instead of preparing fallback support for an
3032 invalid address, we call adjust_address_nv directly. */
3035 emit_move_insn (adjust_address_nv (cplx
, imode
,
3036 imag_p
? GET_MODE_SIZE (imode
) : 0),
3041 /* If the sub-object is at least word sized, then we know that subregging
3042 will work. This special case is important, since store_bit_field
3043 wants to operate on integer modes, and there's rarely an OImode to
3044 correspond to TCmode. */
3045 if (ibitsize
>= BITS_PER_WORD
3046 /* For hard regs we have exact predicates. Assume we can split
3047 the original object if it spans an even number of hard regs.
3048 This special case is important for SCmode on 64-bit platforms
3049 where the natural size of floating-point regs is 32-bit. */
3051 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
3052 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0))
3054 rtx part
= simplify_gen_subreg (imode
, cplx
, cmode
,
3055 imag_p
? GET_MODE_SIZE (imode
) : 0);
3058 emit_move_insn (part
, val
);
3062 /* simplify_gen_subreg may fail for sub-word MEMs. */
3063 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
3066 store_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0, 0, 0, imode
, val
);
3069 /* Extract one of the components of the complex value CPLX. Extract the
3070 real part if IMAG_P is false, and the imaginary part if it's true. */
3073 read_complex_part (rtx cplx
, bool imag_p
)
3075 enum machine_mode cmode
, imode
;
3078 if (GET_CODE (cplx
) == CONCAT
)
3079 return XEXP (cplx
, imag_p
);
3081 cmode
= GET_MODE (cplx
);
3082 imode
= GET_MODE_INNER (cmode
);
3083 ibitsize
= GET_MODE_BITSIZE (imode
);
3085 /* Special case reads from complex constants that got spilled to memory. */
3086 if (MEM_P (cplx
) && GET_CODE (XEXP (cplx
, 0)) == SYMBOL_REF
)
3088 tree decl
= SYMBOL_REF_DECL (XEXP (cplx
, 0));
3089 if (decl
&& TREE_CODE (decl
) == COMPLEX_CST
)
3091 tree part
= imag_p
? TREE_IMAGPART (decl
) : TREE_REALPART (decl
);
3092 if (CONSTANT_CLASS_P (part
))
3093 return expand_expr (part
, NULL_RTX
, imode
, EXPAND_NORMAL
);
3097 /* For MEMs simplify_gen_subreg may generate an invalid new address
3098 because, e.g., the original address is considered mode-dependent
3099 by the target, which restricts simplify_subreg from invoking
3100 adjust_address_nv. Instead of preparing fallback support for an
3101 invalid address, we call adjust_address_nv directly. */
3103 return adjust_address_nv (cplx
, imode
,
3104 imag_p
? GET_MODE_SIZE (imode
) : 0);
3106 /* If the sub-object is at least word sized, then we know that subregging
3107 will work. This special case is important, since extract_bit_field
3108 wants to operate on integer modes, and there's rarely an OImode to
3109 correspond to TCmode. */
3110 if (ibitsize
>= BITS_PER_WORD
3111 /* For hard regs we have exact predicates. Assume we can split
3112 the original object if it spans an even number of hard regs.
3113 This special case is important for SCmode on 64-bit platforms
3114 where the natural size of floating-point regs is 32-bit. */
3116 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
3117 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0))
3119 rtx ret
= simplify_gen_subreg (imode
, cplx
, cmode
,
3120 imag_p
? GET_MODE_SIZE (imode
) : 0);
3124 /* simplify_gen_subreg may fail for sub-word MEMs. */
3125 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
3128 return extract_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0,
3129 true, NULL_RTX
, imode
, imode
);
3132 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3133 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3134 represented in NEW_MODE. If FORCE is true, this will never happen, as
3135 we'll force-create a SUBREG if needed. */
3138 emit_move_change_mode (enum machine_mode new_mode
,
3139 enum machine_mode old_mode
, rtx x
, bool force
)
3143 if (push_operand (x
, GET_MODE (x
)))
3145 ret
= gen_rtx_MEM (new_mode
, XEXP (x
, 0));
3146 MEM_COPY_ATTRIBUTES (ret
, x
);
3150 /* We don't have to worry about changing the address since the
3151 size in bytes is supposed to be the same. */
3152 if (reload_in_progress
)
3154 /* Copy the MEM to change the mode and move any
3155 substitutions from the old MEM to the new one. */
3156 ret
= adjust_address_nv (x
, new_mode
, 0);
3157 copy_replacements (x
, ret
);
3160 ret
= adjust_address (x
, new_mode
, 0);
3164 /* Note that we do want simplify_subreg's behavior of validating
3165 that the new mode is ok for a hard register. If we were to use
3166 simplify_gen_subreg, we would create the subreg, but would
3167 probably run into the target not being able to implement it. */
3168 /* Except, of course, when FORCE is true, when this is exactly what
3169 we want. Which is needed for CCmodes on some targets. */
3171 ret
= simplify_gen_subreg (new_mode
, x
, old_mode
, 0);
3173 ret
= simplify_subreg (new_mode
, x
, old_mode
, 0);
3179 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3180 an integer mode of the same size as MODE. Returns the instruction
3181 emitted, or NULL if such a move could not be generated. */
3184 emit_move_via_integer (enum machine_mode mode
, rtx x
, rtx y
, bool force
)
3186 enum machine_mode imode
;
3187 enum insn_code code
;
3189 /* There must exist a mode of the exact size we require. */
3190 imode
= int_mode_for_mode (mode
);
3191 if (imode
== BLKmode
)
3194 /* The target must support moves in this mode. */
3195 code
= optab_handler (mov_optab
, imode
);
3196 if (code
== CODE_FOR_nothing
)
3199 x
= emit_move_change_mode (imode
, mode
, x
, force
);
3202 y
= emit_move_change_mode (imode
, mode
, y
, force
);
3205 return emit_insn (GEN_FCN (code
) (x
, y
));
3208 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3209 Return an equivalent MEM that does not use an auto-increment. */
3212 emit_move_resolve_push (enum machine_mode mode
, rtx x
)
3214 enum rtx_code code
= GET_CODE (XEXP (x
, 0));
3215 HOST_WIDE_INT adjust
;
3218 adjust
= GET_MODE_SIZE (mode
);
3219 #ifdef PUSH_ROUNDING
3220 adjust
= PUSH_ROUNDING (adjust
);
3222 if (code
== PRE_DEC
|| code
== POST_DEC
)
3224 else if (code
== PRE_MODIFY
|| code
== POST_MODIFY
)
3226 rtx expr
= XEXP (XEXP (x
, 0), 1);
3229 gcc_assert (GET_CODE (expr
) == PLUS
|| GET_CODE (expr
) == MINUS
);
3230 gcc_assert (CONST_INT_P (XEXP (expr
, 1)));
3231 val
= INTVAL (XEXP (expr
, 1));
3232 if (GET_CODE (expr
) == MINUS
)
3234 gcc_assert (adjust
== val
|| adjust
== -val
);
3238 /* Do not use anti_adjust_stack, since we don't want to update
3239 stack_pointer_delta. */
3240 temp
= expand_simple_binop (Pmode
, PLUS
, stack_pointer_rtx
,
3241 gen_int_mode (adjust
, Pmode
), stack_pointer_rtx
,
3242 0, OPTAB_LIB_WIDEN
);
3243 if (temp
!= stack_pointer_rtx
)
3244 emit_move_insn (stack_pointer_rtx
, temp
);
3251 temp
= stack_pointer_rtx
;
3256 temp
= plus_constant (Pmode
, stack_pointer_rtx
, -adjust
);
3262 return replace_equiv_address (x
, temp
);
3265 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3266 X is known to satisfy push_operand, and MODE is known to be complex.
3267 Returns the last instruction emitted. */
3270 emit_move_complex_push (enum machine_mode mode
, rtx x
, rtx y
)
3272 enum machine_mode submode
= GET_MODE_INNER (mode
);
3275 #ifdef PUSH_ROUNDING
3276 unsigned int submodesize
= GET_MODE_SIZE (submode
);
3278 /* In case we output to the stack, but the size is smaller than the
3279 machine can push exactly, we need to use move instructions. */
3280 if (PUSH_ROUNDING (submodesize
) != submodesize
)
3282 x
= emit_move_resolve_push (mode
, x
);
3283 return emit_move_insn (x
, y
);
3287 /* Note that the real part always precedes the imag part in memory
3288 regardless of machine's endianness. */
3289 switch (GET_CODE (XEXP (x
, 0)))
3303 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3304 read_complex_part (y
, imag_first
));
3305 return emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3306 read_complex_part (y
, !imag_first
));
3309 /* A subroutine of emit_move_complex. Perform the move from Y to X
3310 via two moves of the parts. Returns the last instruction emitted. */
3313 emit_move_complex_parts (rtx x
, rtx y
)
3315 /* Show the output dies here. This is necessary for SUBREGs
3316 of pseudos since we cannot track their lifetimes correctly;
3317 hard regs shouldn't appear here except as return values. */
3318 if (!reload_completed
&& !reload_in_progress
3319 && REG_P (x
) && !reg_overlap_mentioned_p (x
, y
))
3322 write_complex_part (x
, read_complex_part (y
, false), false);
3323 write_complex_part (x
, read_complex_part (y
, true), true);
3325 return get_last_insn ();
3328 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3329 MODE is known to be complex. Returns the last instruction emitted. */
3332 emit_move_complex (enum machine_mode mode
, rtx x
, rtx y
)
3336 /* Need to take special care for pushes, to maintain proper ordering
3337 of the data, and possibly extra padding. */
3338 if (push_operand (x
, mode
))
3339 return emit_move_complex_push (mode
, x
, y
);
3341 /* See if we can coerce the target into moving both values at once, except
3342 for floating point where we favor moving as parts if this is easy. */
3343 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
3344 && optab_handler (mov_optab
, GET_MODE_INNER (mode
)) != CODE_FOR_nothing
3346 && HARD_REGISTER_P (x
)
3347 && hard_regno_nregs
[REGNO (x
)][mode
] == 1)
3349 && HARD_REGISTER_P (y
)
3350 && hard_regno_nregs
[REGNO (y
)][mode
] == 1))
3352 /* Not possible if the values are inherently not adjacent. */
3353 else if (GET_CODE (x
) == CONCAT
|| GET_CODE (y
) == CONCAT
)
3355 /* Is possible if both are registers (or subregs of registers). */
3356 else if (register_operand (x
, mode
) && register_operand (y
, mode
))
3358 /* If one of the operands is a memory, and alignment constraints
3359 are friendly enough, we may be able to do combined memory operations.
3360 We do not attempt this if Y is a constant because that combination is
3361 usually better with the by-parts thing below. */
3362 else if ((MEM_P (x
) ? !CONSTANT_P (y
) : MEM_P (y
))
3363 && (!STRICT_ALIGNMENT
3364 || get_mode_alignment (mode
) == BIGGEST_ALIGNMENT
))
3373 /* For memory to memory moves, optimal behavior can be had with the
3374 existing block move logic. */
3375 if (MEM_P (x
) && MEM_P (y
))
3377 emit_block_move (x
, y
, GEN_INT (GET_MODE_SIZE (mode
)),
3378 BLOCK_OP_NO_LIBCALL
);
3379 return get_last_insn ();
3382 ret
= emit_move_via_integer (mode
, x
, y
, true);
3387 return emit_move_complex_parts (x
, y
);
3390 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3391 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3394 emit_move_ccmode (enum machine_mode mode
, rtx x
, rtx y
)
3398 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3401 enum insn_code code
= optab_handler (mov_optab
, CCmode
);
3402 if (code
!= CODE_FOR_nothing
)
3404 x
= emit_move_change_mode (CCmode
, mode
, x
, true);
3405 y
= emit_move_change_mode (CCmode
, mode
, y
, true);
3406 return emit_insn (GEN_FCN (code
) (x
, y
));
3410 /* Otherwise, find the MODE_INT mode of the same width. */
3411 ret
= emit_move_via_integer (mode
, x
, y
, false);
3412 gcc_assert (ret
!= NULL
);
3416 /* Return true if word I of OP lies entirely in the
3417 undefined bits of a paradoxical subreg. */
3420 undefined_operand_subword_p (const_rtx op
, int i
)
3422 enum machine_mode innermode
, innermostmode
;
3424 if (GET_CODE (op
) != SUBREG
)
3426 innermode
= GET_MODE (op
);
3427 innermostmode
= GET_MODE (SUBREG_REG (op
));
3428 offset
= i
* UNITS_PER_WORD
+ SUBREG_BYTE (op
);
3429 /* The SUBREG_BYTE represents offset, as if the value were stored in
3430 memory, except for a paradoxical subreg where we define
3431 SUBREG_BYTE to be 0; undo this exception as in
3433 if (SUBREG_BYTE (op
) == 0
3434 && GET_MODE_SIZE (innermostmode
) < GET_MODE_SIZE (innermode
))
3436 int difference
= (GET_MODE_SIZE (innermostmode
) - GET_MODE_SIZE (innermode
));
3437 if (WORDS_BIG_ENDIAN
)
3438 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
3439 if (BYTES_BIG_ENDIAN
)
3440 offset
+= difference
% UNITS_PER_WORD
;
3442 if (offset
>= GET_MODE_SIZE (innermostmode
)
3443 || offset
<= -GET_MODE_SIZE (word_mode
))
3448 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3449 MODE is any multi-word or full-word mode that lacks a move_insn
3450 pattern. Note that you will get better code if you define such
3451 patterns, even if they must turn into multiple assembler instructions. */
3454 emit_move_multi_word (enum machine_mode mode
, rtx x
, rtx y
)
3456 rtx_insn
*last_insn
= 0;
3462 gcc_assert (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
);
3464 /* If X is a push on the stack, do the push now and replace
3465 X with a reference to the stack pointer. */
3466 if (push_operand (x
, mode
))
3467 x
= emit_move_resolve_push (mode
, x
);
3469 /* If we are in reload, see if either operand is a MEM whose address
3470 is scheduled for replacement. */
3471 if (reload_in_progress
&& MEM_P (x
)
3472 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3473 x
= replace_equiv_address_nv (x
, inner
);
3474 if (reload_in_progress
&& MEM_P (y
)
3475 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3476 y
= replace_equiv_address_nv (y
, inner
);
3480 need_clobber
= false;
3482 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3485 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3488 /* Do not generate code for a move if it would come entirely
3489 from the undefined bits of a paradoxical subreg. */
3490 if (undefined_operand_subword_p (y
, i
))
3493 ypart
= operand_subword (y
, i
, 1, mode
);
3495 /* If we can't get a part of Y, put Y into memory if it is a
3496 constant. Otherwise, force it into a register. Then we must
3497 be able to get a part of Y. */
3498 if (ypart
== 0 && CONSTANT_P (y
))
3500 y
= use_anchored_address (force_const_mem (mode
, y
));
3501 ypart
= operand_subword (y
, i
, 1, mode
);
3503 else if (ypart
== 0)
3504 ypart
= operand_subword_force (y
, i
, mode
);
3506 gcc_assert (xpart
&& ypart
);
3508 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3510 last_insn
= emit_move_insn (xpart
, ypart
);
3516 /* Show the output dies here. This is necessary for SUBREGs
3517 of pseudos since we cannot track their lifetimes correctly;
3518 hard regs shouldn't appear here except as return values.
3519 We never want to emit such a clobber after reload. */
3521 && ! (reload_in_progress
|| reload_completed
)
3522 && need_clobber
!= 0)
3530 /* Low level part of emit_move_insn.
3531 Called just like emit_move_insn, but assumes X and Y
3532 are basically valid. */
3535 emit_move_insn_1 (rtx x
, rtx y
)
3537 enum machine_mode mode
= GET_MODE (x
);
3538 enum insn_code code
;
3540 gcc_assert ((unsigned int) mode
< (unsigned int) MAX_MACHINE_MODE
);
3542 code
= optab_handler (mov_optab
, mode
);
3543 if (code
!= CODE_FOR_nothing
)
3544 return emit_insn (GEN_FCN (code
) (x
, y
));
3546 /* Expand complex moves by moving real part and imag part. */
3547 if (COMPLEX_MODE_P (mode
))
3548 return emit_move_complex (mode
, x
, y
);
3550 if (GET_MODE_CLASS (mode
) == MODE_DECIMAL_FLOAT
3551 || ALL_FIXED_POINT_MODE_P (mode
))
3553 rtx_insn
*result
= emit_move_via_integer (mode
, x
, y
, true);
3555 /* If we can't find an integer mode, use multi words. */
3559 return emit_move_multi_word (mode
, x
, y
);
3562 if (GET_MODE_CLASS (mode
) == MODE_CC
)
3563 return emit_move_ccmode (mode
, x
, y
);
3565 /* Try using a move pattern for the corresponding integer mode. This is
3566 only safe when simplify_subreg can convert MODE constants into integer
3567 constants. At present, it can only do this reliably if the value
3568 fits within a HOST_WIDE_INT. */
3569 if (!CONSTANT_P (y
) || GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
3571 rtx_insn
*ret
= emit_move_via_integer (mode
, x
, y
, lra_in_progress
);
3575 if (! lra_in_progress
|| recog (PATTERN (ret
), ret
, 0) >= 0)
3580 return emit_move_multi_word (mode
, x
, y
);
3583 /* Generate code to copy Y into X.
3584 Both Y and X must have the same mode, except that
3585 Y can be a constant with VOIDmode.
3586 This mode cannot be BLKmode; use emit_block_move for that.
3588 Return the last instruction emitted. */
3591 emit_move_insn (rtx x
, rtx y
)
3593 enum machine_mode mode
= GET_MODE (x
);
3594 rtx y_cst
= NULL_RTX
;
3595 rtx_insn
*last_insn
;
3598 gcc_assert (mode
!= BLKmode
3599 && (GET_MODE (y
) == mode
|| GET_MODE (y
) == VOIDmode
));
3604 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3605 && (last_insn
= compress_float_constant (x
, y
)))
3610 if (!targetm
.legitimate_constant_p (mode
, y
))
3612 y
= force_const_mem (mode
, y
);
3614 /* If the target's cannot_force_const_mem prevented the spill,
3615 assume that the target's move expanders will also take care
3616 of the non-legitimate constant. */
3620 y
= use_anchored_address (y
);
3624 /* If X or Y are memory references, verify that their addresses are valid
3627 && (! memory_address_addr_space_p (GET_MODE (x
), XEXP (x
, 0),
3629 && ! push_operand (x
, GET_MODE (x
))))
3630 x
= validize_mem (x
);
3633 && ! memory_address_addr_space_p (GET_MODE (y
), XEXP (y
, 0),
3634 MEM_ADDR_SPACE (y
)))
3635 y
= validize_mem (y
);
3637 gcc_assert (mode
!= BLKmode
);
3639 last_insn
= emit_move_insn_1 (x
, y
);
3641 if (y_cst
&& REG_P (x
)
3642 && (set
= single_set (last_insn
)) != NULL_RTX
3643 && SET_DEST (set
) == x
3644 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
3645 set_unique_reg_note (last_insn
, REG_EQUAL
, copy_rtx (y_cst
));
3650 /* If Y is representable exactly in a narrower mode, and the target can
3651 perform the extension directly from constant or memory, then emit the
3652 move as an extension. */
3655 compress_float_constant (rtx x
, rtx y
)
3657 enum machine_mode dstmode
= GET_MODE (x
);
3658 enum machine_mode orig_srcmode
= GET_MODE (y
);
3659 enum machine_mode srcmode
;
3661 int oldcost
, newcost
;
3662 bool speed
= optimize_insn_for_speed_p ();
3664 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3666 if (targetm
.legitimate_constant_p (dstmode
, y
))
3667 oldcost
= set_src_cost (y
, speed
);
3669 oldcost
= set_src_cost (force_const_mem (dstmode
, y
), speed
);
3671 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3672 srcmode
!= orig_srcmode
;
3673 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3677 rtx_insn
*last_insn
;
3679 /* Skip if the target can't extend this way. */
3680 ic
= can_extend_p (dstmode
, srcmode
, 0);
3681 if (ic
== CODE_FOR_nothing
)
3684 /* Skip if the narrowed value isn't exact. */
3685 if (! exact_real_truncate (srcmode
, &r
))
3688 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3690 if (targetm
.legitimate_constant_p (srcmode
, trunc_y
))
3692 /* Skip if the target needs extra instructions to perform
3694 if (!insn_operand_matches (ic
, 1, trunc_y
))
3696 /* This is valid, but may not be cheaper than the original. */
3697 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3699 if (oldcost
< newcost
)
3702 else if (float_extend_from_mem
[dstmode
][srcmode
])
3704 trunc_y
= force_const_mem (srcmode
, trunc_y
);
3705 /* This is valid, but may not be cheaper than the original. */
3706 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3708 if (oldcost
< newcost
)
3710 trunc_y
= validize_mem (trunc_y
);
3715 /* For CSE's benefit, force the compressed constant pool entry
3716 into a new pseudo. This constant may be used in different modes,
3717 and if not, combine will put things back together for us. */
3718 trunc_y
= force_reg (srcmode
, trunc_y
);
3720 /* If x is a hard register, perform the extension into a pseudo,
3721 so that e.g. stack realignment code is aware of it. */
3723 if (REG_P (x
) && HARD_REGISTER_P (x
))
3724 target
= gen_reg_rtx (dstmode
);
3726 emit_unop_insn (ic
, target
, trunc_y
, UNKNOWN
);
3727 last_insn
= get_last_insn ();
3730 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3733 return emit_move_insn (x
, target
);
3740 /* Pushing data onto the stack. */
3742 /* Push a block of length SIZE (perhaps variable)
3743 and return an rtx to address the beginning of the block.
3744 The value may be virtual_outgoing_args_rtx.
3746 EXTRA is the number of bytes of padding to push in addition to SIZE.
3747 BELOW nonzero means this padding comes at low addresses;
3748 otherwise, the padding comes at high addresses. */
3751 push_block (rtx size
, int extra
, int below
)
3755 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3756 if (CONSTANT_P (size
))
3757 anti_adjust_stack (plus_constant (Pmode
, size
, extra
));
3758 else if (REG_P (size
) && extra
== 0)
3759 anti_adjust_stack (size
);
3762 temp
= copy_to_mode_reg (Pmode
, size
);
3764 temp
= expand_binop (Pmode
, add_optab
, temp
,
3765 gen_int_mode (extra
, Pmode
),
3766 temp
, 0, OPTAB_LIB_WIDEN
);
3767 anti_adjust_stack (temp
);
3770 #ifndef STACK_GROWS_DOWNWARD
3776 temp
= virtual_outgoing_args_rtx
;
3777 if (extra
!= 0 && below
)
3778 temp
= plus_constant (Pmode
, temp
, extra
);
3782 if (CONST_INT_P (size
))
3783 temp
= plus_constant (Pmode
, virtual_outgoing_args_rtx
,
3784 -INTVAL (size
) - (below
? 0 : extra
));
3785 else if (extra
!= 0 && !below
)
3786 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3787 negate_rtx (Pmode
, plus_constant (Pmode
, size
,
3790 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3791 negate_rtx (Pmode
, size
));
3794 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3797 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3800 mem_autoinc_base (rtx mem
)
3804 rtx addr
= XEXP (mem
, 0);
3805 if (GET_RTX_CLASS (GET_CODE (addr
)) == RTX_AUTOINC
)
3806 return XEXP (addr
, 0);
3811 /* A utility routine used here, in reload, and in try_split. The insns
3812 after PREV up to and including LAST are known to adjust the stack,
3813 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3814 placing notes as appropriate. PREV may be NULL, indicating the
3815 entire insn sequence prior to LAST should be scanned.
3817 The set of allowed stack pointer modifications is small:
3818 (1) One or more auto-inc style memory references (aka pushes),
3819 (2) One or more addition/subtraction with the SP as destination,
3820 (3) A single move insn with the SP as destination,
3821 (4) A call_pop insn,
3822 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3824 Insns in the sequence that do not modify the SP are ignored,
3825 except for noreturn calls.
3827 The return value is the amount of adjustment that can be trivially
3828 verified, via immediate operand or auto-inc. If the adjustment
3829 cannot be trivially extracted, the return value is INT_MIN. */
3832 find_args_size_adjust (rtx_insn
*insn
)
3837 pat
= PATTERN (insn
);
3840 /* Look for a call_pop pattern. */
3843 /* We have to allow non-call_pop patterns for the case
3844 of emit_single_push_insn of a TLS address. */
3845 if (GET_CODE (pat
) != PARALLEL
)
3848 /* All call_pop have a stack pointer adjust in the parallel.
3849 The call itself is always first, and the stack adjust is
3850 usually last, so search from the end. */
3851 for (i
= XVECLEN (pat
, 0) - 1; i
> 0; --i
)
3853 set
= XVECEXP (pat
, 0, i
);
3854 if (GET_CODE (set
) != SET
)
3856 dest
= SET_DEST (set
);
3857 if (dest
== stack_pointer_rtx
)
3860 /* We'd better have found the stack pointer adjust. */
3863 /* Fall through to process the extracted SET and DEST
3864 as if it was a standalone insn. */
3866 else if (GET_CODE (pat
) == SET
)
3868 else if ((set
= single_set (insn
)) != NULL
)
3870 else if (GET_CODE (pat
) == PARALLEL
)
3872 /* ??? Some older ports use a parallel with a stack adjust
3873 and a store for a PUSH_ROUNDING pattern, rather than a
3874 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3875 /* ??? See h8300 and m68k, pushqi1. */
3876 for (i
= XVECLEN (pat
, 0) - 1; i
>= 0; --i
)
3878 set
= XVECEXP (pat
, 0, i
);
3879 if (GET_CODE (set
) != SET
)
3881 dest
= SET_DEST (set
);
3882 if (dest
== stack_pointer_rtx
)
3885 /* We do not expect an auto-inc of the sp in the parallel. */
3886 gcc_checking_assert (mem_autoinc_base (dest
) != stack_pointer_rtx
);
3887 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
3888 != stack_pointer_rtx
);
3896 dest
= SET_DEST (set
);
3898 /* Look for direct modifications of the stack pointer. */
3899 if (REG_P (dest
) && REGNO (dest
) == STACK_POINTER_REGNUM
)
3901 /* Look for a trivial adjustment, otherwise assume nothing. */
3902 /* Note that the SPU restore_stack_block pattern refers to
3903 the stack pointer in V4SImode. Consider that non-trivial. */
3904 if (SCALAR_INT_MODE_P (GET_MODE (dest
))
3905 && GET_CODE (SET_SRC (set
)) == PLUS
3906 && XEXP (SET_SRC (set
), 0) == stack_pointer_rtx
3907 && CONST_INT_P (XEXP (SET_SRC (set
), 1)))
3908 return INTVAL (XEXP (SET_SRC (set
), 1));
3909 /* ??? Reload can generate no-op moves, which will be cleaned
3910 up later. Recognize it and continue searching. */
3911 else if (rtx_equal_p (dest
, SET_SRC (set
)))
3914 return HOST_WIDE_INT_MIN
;
3920 /* Otherwise only think about autoinc patterns. */
3921 if (mem_autoinc_base (dest
) == stack_pointer_rtx
)
3924 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
3925 != stack_pointer_rtx
);
3927 else if (mem_autoinc_base (SET_SRC (set
)) == stack_pointer_rtx
)
3928 mem
= SET_SRC (set
);
3932 addr
= XEXP (mem
, 0);
3933 switch (GET_CODE (addr
))
3937 return GET_MODE_SIZE (GET_MODE (mem
));
3940 return -GET_MODE_SIZE (GET_MODE (mem
));
3943 addr
= XEXP (addr
, 1);
3944 gcc_assert (GET_CODE (addr
) == PLUS
);
3945 gcc_assert (XEXP (addr
, 0) == stack_pointer_rtx
);
3946 gcc_assert (CONST_INT_P (XEXP (addr
, 1)));
3947 return INTVAL (XEXP (addr
, 1));
3955 fixup_args_size_notes (rtx_insn
*prev
, rtx_insn
*last
, int end_args_size
)
3957 int args_size
= end_args_size
;
3958 bool saw_unknown
= false;
3961 for (insn
= last
; insn
!= prev
; insn
= PREV_INSN (insn
))
3963 HOST_WIDE_INT this_delta
;
3965 if (!NONDEBUG_INSN_P (insn
))
3968 this_delta
= find_args_size_adjust (insn
);
3969 if (this_delta
== 0)
3972 || ACCUMULATE_OUTGOING_ARGS
3973 || find_reg_note (insn
, REG_NORETURN
, NULL_RTX
) == NULL_RTX
)
3977 gcc_assert (!saw_unknown
);
3978 if (this_delta
== HOST_WIDE_INT_MIN
)
3981 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (args_size
));
3982 #ifdef STACK_GROWS_DOWNWARD
3983 this_delta
= -(unsigned HOST_WIDE_INT
) this_delta
;
3985 args_size
-= this_delta
;
3988 return saw_unknown
? INT_MIN
: args_size
;
3991 #ifdef PUSH_ROUNDING
3992 /* Emit single push insn. */
3995 emit_single_push_insn_1 (enum machine_mode mode
, rtx x
, tree type
)
3998 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
4000 enum insn_code icode
;
4002 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
4003 /* If there is push pattern, use it. Otherwise try old way of throwing
4004 MEM representing push operation to move expander. */
4005 icode
= optab_handler (push_optab
, mode
);
4006 if (icode
!= CODE_FOR_nothing
)
4008 struct expand_operand ops
[1];
4010 create_input_operand (&ops
[0], x
, mode
);
4011 if (maybe_expand_insn (icode
, 1, ops
))
4014 if (GET_MODE_SIZE (mode
) == rounded_size
)
4015 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
4016 /* If we are to pad downward, adjust the stack pointer first and
4017 then store X into the stack location using an offset. This is
4018 because emit_move_insn does not know how to pad; it does not have
4020 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
4022 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
4023 HOST_WIDE_INT offset
;
4025 emit_move_insn (stack_pointer_rtx
,
4026 expand_binop (Pmode
,
4027 #ifdef STACK_GROWS_DOWNWARD
4033 gen_int_mode (rounded_size
, Pmode
),
4034 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
4036 offset
= (HOST_WIDE_INT
) padding_size
;
4037 #ifdef STACK_GROWS_DOWNWARD
4038 if (STACK_PUSH_CODE
== POST_DEC
)
4039 /* We have already decremented the stack pointer, so get the
4041 offset
+= (HOST_WIDE_INT
) rounded_size
;
4043 if (STACK_PUSH_CODE
== POST_INC
)
4044 /* We have already incremented the stack pointer, so get the
4046 offset
-= (HOST_WIDE_INT
) rounded_size
;
4048 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
4049 gen_int_mode (offset
, Pmode
));
4053 #ifdef STACK_GROWS_DOWNWARD
4054 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4055 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
4056 gen_int_mode (-(HOST_WIDE_INT
) rounded_size
,
4059 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4060 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
4061 gen_int_mode (rounded_size
, Pmode
));
4063 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
4066 dest
= gen_rtx_MEM (mode
, dest_addr
);
4070 set_mem_attributes (dest
, type
, 1);
4072 if (cfun
->tail_call_marked
)
4073 /* Function incoming arguments may overlap with sibling call
4074 outgoing arguments and we cannot allow reordering of reads
4075 from function arguments with stores to outgoing arguments
4076 of sibling calls. */
4077 set_mem_alias_set (dest
, 0);
4079 emit_move_insn (dest
, x
);
4082 /* Emit and annotate a single push insn. */
4085 emit_single_push_insn (enum machine_mode mode
, rtx x
, tree type
)
4087 int delta
, old_delta
= stack_pointer_delta
;
4088 rtx_insn
*prev
= get_last_insn ();
4091 emit_single_push_insn_1 (mode
, x
, type
);
4093 last
= get_last_insn ();
4095 /* Notice the common case where we emitted exactly one insn. */
4096 if (PREV_INSN (last
) == prev
)
4098 add_reg_note (last
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
4102 delta
= fixup_args_size_notes (prev
, last
, stack_pointer_delta
);
4103 gcc_assert (delta
== INT_MIN
|| delta
== old_delta
);
4107 /* Generate code to push X onto the stack, assuming it has mode MODE and
4109 MODE is redundant except when X is a CONST_INT (since they don't
4111 SIZE is an rtx for the size of data to be copied (in bytes),
4112 needed only if X is BLKmode.
4114 ALIGN (in bits) is maximum alignment we can assume.
4116 If PARTIAL and REG are both nonzero, then copy that many of the first
4117 bytes of X into registers starting with REG, and push the rest of X.
4118 The amount of space pushed is decreased by PARTIAL bytes.
4119 REG must be a hard register in this case.
4120 If REG is zero but PARTIAL is not, take any all others actions for an
4121 argument partially in registers, but do not actually load any
4124 EXTRA is the amount in bytes of extra space to leave next to this arg.
4125 This is ignored if an argument block has already been allocated.
4127 On a machine that lacks real push insns, ARGS_ADDR is the address of
4128 the bottom of the argument block for this call. We use indexing off there
4129 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4130 argument block has not been preallocated.
4132 ARGS_SO_FAR is the size of args previously pushed for this call.
4134 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4135 for arguments passed in registers. If nonzero, it will be the number
4136 of bytes required. */
4139 emit_push_insn (rtx x
, enum machine_mode mode
, tree type
, rtx size
,
4140 unsigned int align
, int partial
, rtx reg
, int extra
,
4141 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
4145 enum direction stack_direction
4146 #ifdef STACK_GROWS_DOWNWARD
4152 /* Decide where to pad the argument: `downward' for below,
4153 `upward' for above, or `none' for don't pad it.
4154 Default is below for small data on big-endian machines; else above. */
4155 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
4157 /* Invert direction if stack is post-decrement.
4159 if (STACK_PUSH_CODE
== POST_DEC
)
4160 if (where_pad
!= none
)
4161 where_pad
= (where_pad
== downward
? upward
: downward
);
4166 || (STRICT_ALIGNMENT
&& align
< GET_MODE_ALIGNMENT (mode
)))
4168 /* Copy a block into the stack, entirely or partially. */
4175 offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4176 used
= partial
- offset
;
4178 if (mode
!= BLKmode
)
4180 /* A value is to be stored in an insufficiently aligned
4181 stack slot; copy via a suitably aligned slot if
4183 size
= GEN_INT (GET_MODE_SIZE (mode
));
4184 if (!MEM_P (xinner
))
4186 temp
= assign_temp (type
, 1, 1);
4187 emit_move_insn (temp
, xinner
);
4194 /* USED is now the # of bytes we need not copy to the stack
4195 because registers will take care of them. */
4198 xinner
= adjust_address (xinner
, BLKmode
, used
);
4200 /* If the partial register-part of the arg counts in its stack size,
4201 skip the part of stack space corresponding to the registers.
4202 Otherwise, start copying to the beginning of the stack space,
4203 by setting SKIP to 0. */
4204 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
4206 #ifdef PUSH_ROUNDING
4207 /* Do it with several push insns if that doesn't take lots of insns
4208 and if there is no difficulty with push insns that skip bytes
4209 on the stack for alignment purposes. */
4212 && CONST_INT_P (size
)
4214 && MEM_ALIGN (xinner
) >= align
4215 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
4216 /* Here we avoid the case of a structure whose weak alignment
4217 forces many pushes of a small amount of data,
4218 and such small pushes do rounding that causes trouble. */
4219 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
4220 || align
>= BIGGEST_ALIGNMENT
4221 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
4222 == (align
/ BITS_PER_UNIT
)))
4223 && (HOST_WIDE_INT
) PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
4225 /* Push padding now if padding above and stack grows down,
4226 or if padding below and stack grows up.
4227 But if space already allocated, this has already been done. */
4228 if (extra
&& args_addr
== 0
4229 && where_pad
!= none
&& where_pad
!= stack_direction
)
4230 anti_adjust_stack (GEN_INT (extra
));
4232 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
4235 #endif /* PUSH_ROUNDING */
4239 /* Otherwise make space on the stack and copy the data
4240 to the address of that space. */
4242 /* Deduct words put into registers from the size we must copy. */
4245 if (CONST_INT_P (size
))
4246 size
= GEN_INT (INTVAL (size
) - used
);
4248 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
4249 gen_int_mode (used
, GET_MODE (size
)),
4250 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4253 /* Get the address of the stack space.
4254 In this case, we do not deal with EXTRA separately.
4255 A single stack adjust will do. */
4258 temp
= push_block (size
, extra
, where_pad
== downward
);
4261 else if (CONST_INT_P (args_so_far
))
4262 temp
= memory_address (BLKmode
,
4263 plus_constant (Pmode
, args_addr
,
4264 skip
+ INTVAL (args_so_far
)));
4266 temp
= memory_address (BLKmode
,
4267 plus_constant (Pmode
,
4268 gen_rtx_PLUS (Pmode
,
4273 if (!ACCUMULATE_OUTGOING_ARGS
)
4275 /* If the source is referenced relative to the stack pointer,
4276 copy it to another register to stabilize it. We do not need
4277 to do this if we know that we won't be changing sp. */
4279 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
4280 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
4281 temp
= copy_to_reg (temp
);
4284 target
= gen_rtx_MEM (BLKmode
, temp
);
4286 /* We do *not* set_mem_attributes here, because incoming arguments
4287 may overlap with sibling call outgoing arguments and we cannot
4288 allow reordering of reads from function arguments with stores
4289 to outgoing arguments of sibling calls. We do, however, want
4290 to record the alignment of the stack slot. */
4291 /* ALIGN may well be better aligned than TYPE, e.g. due to
4292 PARM_BOUNDARY. Assume the caller isn't lying. */
4293 set_mem_align (target
, align
);
4295 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
4298 else if (partial
> 0)
4300 /* Scalar partly in registers. */
4302 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
4305 /* # bytes of start of argument
4306 that we must make space for but need not store. */
4307 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4308 int args_offset
= INTVAL (args_so_far
);
4311 /* Push padding now if padding above and stack grows down,
4312 or if padding below and stack grows up.
4313 But if space already allocated, this has already been done. */
4314 if (extra
&& args_addr
== 0
4315 && where_pad
!= none
&& where_pad
!= stack_direction
)
4316 anti_adjust_stack (GEN_INT (extra
));
4318 /* If we make space by pushing it, we might as well push
4319 the real data. Otherwise, we can leave OFFSET nonzero
4320 and leave the space uninitialized. */
4324 /* Now NOT_STACK gets the number of words that we don't need to
4325 allocate on the stack. Convert OFFSET to words too. */
4326 not_stack
= (partial
- offset
) / UNITS_PER_WORD
;
4327 offset
/= UNITS_PER_WORD
;
4329 /* If the partial register-part of the arg counts in its stack size,
4330 skip the part of stack space corresponding to the registers.
4331 Otherwise, start copying to the beginning of the stack space,
4332 by setting SKIP to 0. */
4333 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
4335 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
4336 x
= validize_mem (force_const_mem (mode
, x
));
4338 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4339 SUBREGs of such registers are not allowed. */
4340 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
4341 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
4342 x
= copy_to_reg (x
);
4344 /* Loop over all the words allocated on the stack for this arg. */
4345 /* We can do it by words, because any scalar bigger than a word
4346 has a size a multiple of a word. */
4347 for (i
= size
- 1; i
>= not_stack
; i
--)
4348 if (i
>= not_stack
+ offset
)
4349 emit_push_insn (operand_subword_force (x
, i
, mode
),
4350 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
4352 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
4354 reg_parm_stack_space
, alignment_pad
);
4361 /* Push padding now if padding above and stack grows down,
4362 or if padding below and stack grows up.
4363 But if space already allocated, this has already been done. */
4364 if (extra
&& args_addr
== 0
4365 && where_pad
!= none
&& where_pad
!= stack_direction
)
4366 anti_adjust_stack (GEN_INT (extra
));
4368 #ifdef PUSH_ROUNDING
4369 if (args_addr
== 0 && PUSH_ARGS
)
4370 emit_single_push_insn (mode
, x
, type
);
4374 if (CONST_INT_P (args_so_far
))
4376 = memory_address (mode
,
4377 plus_constant (Pmode
, args_addr
,
4378 INTVAL (args_so_far
)));
4380 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
4382 dest
= gen_rtx_MEM (mode
, addr
);
4384 /* We do *not* set_mem_attributes here, because incoming arguments
4385 may overlap with sibling call outgoing arguments and we cannot
4386 allow reordering of reads from function arguments with stores
4387 to outgoing arguments of sibling calls. We do, however, want
4388 to record the alignment of the stack slot. */
4389 /* ALIGN may well be better aligned than TYPE, e.g. due to
4390 PARM_BOUNDARY. Assume the caller isn't lying. */
4391 set_mem_align (dest
, align
);
4393 emit_move_insn (dest
, x
);
4397 /* If part should go in registers, copy that part
4398 into the appropriate registers. Do this now, at the end,
4399 since mem-to-mem copies above may do function calls. */
4400 if (partial
> 0 && reg
!= 0)
4402 /* Handle calls that pass values in multiple non-contiguous locations.
4403 The Irix 6 ABI has examples of this. */
4404 if (GET_CODE (reg
) == PARALLEL
)
4405 emit_group_load (reg
, x
, type
, -1);
4408 gcc_assert (partial
% UNITS_PER_WORD
== 0);
4409 move_block_to_reg (REGNO (reg
), x
, partial
/ UNITS_PER_WORD
, mode
);
4413 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
4414 anti_adjust_stack (GEN_INT (extra
));
4416 if (alignment_pad
&& args_addr
== 0)
4417 anti_adjust_stack (alignment_pad
);
4420 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4424 get_subtarget (rtx x
)
4428 /* Only registers can be subtargets. */
4430 /* Don't use hard regs to avoid extending their life. */
4431 || REGNO (x
) < FIRST_PSEUDO_REGISTER
4435 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4436 FIELD is a bitfield. Returns true if the optimization was successful,
4437 and there's nothing else to do. */
4440 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize
,
4441 unsigned HOST_WIDE_INT bitpos
,
4442 unsigned HOST_WIDE_INT bitregion_start
,
4443 unsigned HOST_WIDE_INT bitregion_end
,
4444 enum machine_mode mode1
, rtx str_rtx
,
4447 enum machine_mode str_mode
= GET_MODE (str_rtx
);
4448 unsigned int str_bitsize
= GET_MODE_BITSIZE (str_mode
);
4453 enum tree_code code
;
4455 if (mode1
!= VOIDmode
4456 || bitsize
>= BITS_PER_WORD
4457 || str_bitsize
> BITS_PER_WORD
4458 || TREE_SIDE_EFFECTS (to
)
4459 || TREE_THIS_VOLATILE (to
))
4463 if (TREE_CODE (src
) != SSA_NAME
)
4465 if (TREE_CODE (TREE_TYPE (src
)) != INTEGER_TYPE
)
4468 srcstmt
= get_gimple_for_ssa_name (src
);
4470 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt
)) != tcc_binary
)
4473 code
= gimple_assign_rhs_code (srcstmt
);
4475 op0
= gimple_assign_rhs1 (srcstmt
);
4477 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4478 to find its initialization. Hopefully the initialization will
4479 be from a bitfield load. */
4480 if (TREE_CODE (op0
) == SSA_NAME
)
4482 gimple op0stmt
= get_gimple_for_ssa_name (op0
);
4484 /* We want to eventually have OP0 be the same as TO, which
4485 should be a bitfield. */
4487 || !is_gimple_assign (op0stmt
)
4488 || gimple_assign_rhs_code (op0stmt
) != TREE_CODE (to
))
4490 op0
= gimple_assign_rhs1 (op0stmt
);
4493 op1
= gimple_assign_rhs2 (srcstmt
);
4495 if (!operand_equal_p (to
, op0
, 0))
4498 if (MEM_P (str_rtx
))
4500 unsigned HOST_WIDE_INT offset1
;
4502 if (str_bitsize
== 0 || str_bitsize
> BITS_PER_WORD
)
4503 str_mode
= word_mode
;
4504 str_mode
= get_best_mode (bitsize
, bitpos
,
4505 bitregion_start
, bitregion_end
,
4506 MEM_ALIGN (str_rtx
), str_mode
, 0);
4507 if (str_mode
== VOIDmode
)
4509 str_bitsize
= GET_MODE_BITSIZE (str_mode
);
4512 bitpos
%= str_bitsize
;
4513 offset1
= (offset1
- bitpos
) / BITS_PER_UNIT
;
4514 str_rtx
= adjust_address (str_rtx
, str_mode
, offset1
);
4516 else if (!REG_P (str_rtx
) && GET_CODE (str_rtx
) != SUBREG
)
4519 /* If the bit field covers the whole REG/MEM, store_field
4520 will likely generate better code. */
4521 if (bitsize
>= str_bitsize
)
4524 /* We can't handle fields split across multiple entities. */
4525 if (bitpos
+ bitsize
> str_bitsize
)
4528 if (BYTES_BIG_ENDIAN
)
4529 bitpos
= str_bitsize
- bitpos
- bitsize
;
4535 /* For now, just optimize the case of the topmost bitfield
4536 where we don't need to do any masking and also
4537 1 bit bitfields where xor can be used.
4538 We might win by one instruction for the other bitfields
4539 too if insv/extv instructions aren't used, so that
4540 can be added later. */
4541 if (bitpos
+ bitsize
!= str_bitsize
4542 && (bitsize
!= 1 || TREE_CODE (op1
) != INTEGER_CST
))
4545 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4546 value
= convert_modes (str_mode
,
4547 TYPE_MODE (TREE_TYPE (op1
)), value
,
4548 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4550 /* We may be accessing data outside the field, which means
4551 we can alias adjacent data. */
4552 if (MEM_P (str_rtx
))
4554 str_rtx
= shallow_copy_rtx (str_rtx
);
4555 set_mem_alias_set (str_rtx
, 0);
4556 set_mem_expr (str_rtx
, 0);
4559 binop
= code
== PLUS_EXPR
? add_optab
: sub_optab
;
4560 if (bitsize
== 1 && bitpos
+ bitsize
!= str_bitsize
)
4562 value
= expand_and (str_mode
, value
, const1_rtx
, NULL
);
4565 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4566 result
= expand_binop (str_mode
, binop
, str_rtx
,
4567 value
, str_rtx
, 1, OPTAB_WIDEN
);
4568 if (result
!= str_rtx
)
4569 emit_move_insn (str_rtx
, result
);
4574 if (TREE_CODE (op1
) != INTEGER_CST
)
4576 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4577 value
= convert_modes (str_mode
,
4578 TYPE_MODE (TREE_TYPE (op1
)), value
,
4579 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4581 /* We may be accessing data outside the field, which means
4582 we can alias adjacent data. */
4583 if (MEM_P (str_rtx
))
4585 str_rtx
= shallow_copy_rtx (str_rtx
);
4586 set_mem_alias_set (str_rtx
, 0);
4587 set_mem_expr (str_rtx
, 0);
4590 binop
= code
== BIT_IOR_EXPR
? ior_optab
: xor_optab
;
4591 if (bitpos
+ bitsize
!= str_bitsize
)
4593 rtx mask
= gen_int_mode (((unsigned HOST_WIDE_INT
) 1 << bitsize
) - 1,
4595 value
= expand_and (str_mode
, value
, mask
, NULL_RTX
);
4597 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4598 result
= expand_binop (str_mode
, binop
, str_rtx
,
4599 value
, str_rtx
, 1, OPTAB_WIDEN
);
4600 if (result
!= str_rtx
)
4601 emit_move_insn (str_rtx
, result
);
4611 /* In the C++ memory model, consecutive bit fields in a structure are
4612 considered one memory location.
4614 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4615 returns the bit range of consecutive bits in which this COMPONENT_REF
4616 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4617 and *OFFSET may be adjusted in the process.
4619 If the access does not need to be restricted, 0 is returned in both
4620 *BITSTART and *BITEND. */
4623 get_bit_range (unsigned HOST_WIDE_INT
*bitstart
,
4624 unsigned HOST_WIDE_INT
*bitend
,
4626 HOST_WIDE_INT
*bitpos
,
4629 HOST_WIDE_INT bitoffset
;
4632 gcc_assert (TREE_CODE (exp
) == COMPONENT_REF
);
4634 field
= TREE_OPERAND (exp
, 1);
4635 repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
4636 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4637 need to limit the range we can access. */
4640 *bitstart
= *bitend
= 0;
4644 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4645 part of a larger bit field, then the representative does not serve any
4646 useful purpose. This can occur in Ada. */
4647 if (handled_component_p (TREE_OPERAND (exp
, 0)))
4649 enum machine_mode rmode
;
4650 HOST_WIDE_INT rbitsize
, rbitpos
;
4654 get_inner_reference (TREE_OPERAND (exp
, 0), &rbitsize
, &rbitpos
,
4655 &roffset
, &rmode
, &unsignedp
, &volatilep
, false);
4656 if ((rbitpos
% BITS_PER_UNIT
) != 0)
4658 *bitstart
= *bitend
= 0;
4663 /* Compute the adjustment to bitpos from the offset of the field
4664 relative to the representative. DECL_FIELD_OFFSET of field and
4665 repr are the same by construction if they are not constants,
4666 see finish_bitfield_layout. */
4667 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field
))
4668 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr
)))
4669 bitoffset
= (tree_to_uhwi (DECL_FIELD_OFFSET (field
))
4670 - tree_to_uhwi (DECL_FIELD_OFFSET (repr
))) * BITS_PER_UNIT
;
4673 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
4674 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
4676 /* If the adjustment is larger than bitpos, we would have a negative bit
4677 position for the lower bound and this may wreak havoc later. Adjust
4678 offset and bitpos to make the lower bound non-negative in that case. */
4679 if (bitoffset
> *bitpos
)
4681 HOST_WIDE_INT adjust
= bitoffset
- *bitpos
;
4682 gcc_assert ((adjust
% BITS_PER_UNIT
) == 0);
4685 if (*offset
== NULL_TREE
)
4686 *offset
= size_int (-adjust
/ BITS_PER_UNIT
);
4689 = size_binop (MINUS_EXPR
, *offset
, size_int (adjust
/ BITS_PER_UNIT
));
4693 *bitstart
= *bitpos
- bitoffset
;
4695 *bitend
= *bitstart
+ tree_to_uhwi (DECL_SIZE (repr
)) - 1;
4698 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4699 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4700 DECL_RTL was not set yet, return NORTL. */
4703 addr_expr_of_non_mem_decl_p_1 (tree addr
, bool nortl
)
4705 if (TREE_CODE (addr
) != ADDR_EXPR
)
4708 tree base
= TREE_OPERAND (addr
, 0);
4711 || TREE_ADDRESSABLE (base
)
4712 || DECL_MODE (base
) == BLKmode
)
4715 if (!DECL_RTL_SET_P (base
))
4718 return (!MEM_P (DECL_RTL (base
)));
4721 /* Returns true if the MEM_REF REF refers to an object that does not
4722 reside in memory and has non-BLKmode. */
4725 mem_ref_refers_to_non_mem_p (tree ref
)
4727 tree base
= TREE_OPERAND (ref
, 0);
4728 return addr_expr_of_non_mem_decl_p_1 (base
, false);
4731 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4732 is true, try generating a nontemporal store. */
4735 expand_assignment (tree to
, tree from
, bool nontemporal
)
4739 enum machine_mode mode
;
4741 enum insn_code icode
;
4743 /* Don't crash if the lhs of the assignment was erroneous. */
4744 if (TREE_CODE (to
) == ERROR_MARK
)
4746 expand_normal (from
);
4750 /* Optimize away no-op moves without side-effects. */
4751 if (operand_equal_p (to
, from
, 0))
4754 /* Handle misaligned stores. */
4755 mode
= TYPE_MODE (TREE_TYPE (to
));
4756 if ((TREE_CODE (to
) == MEM_REF
4757 || TREE_CODE (to
) == TARGET_MEM_REF
)
4759 && !mem_ref_refers_to_non_mem_p (to
)
4760 && ((align
= get_object_alignment (to
))
4761 < GET_MODE_ALIGNMENT (mode
))
4762 && (((icode
= optab_handler (movmisalign_optab
, mode
))
4763 != CODE_FOR_nothing
)
4764 || SLOW_UNALIGNED_ACCESS (mode
, align
)))
4768 reg
= expand_expr (from
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4769 reg
= force_not_mem (reg
);
4770 mem
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4772 if (icode
!= CODE_FOR_nothing
)
4774 struct expand_operand ops
[2];
4776 create_fixed_operand (&ops
[0], mem
);
4777 create_input_operand (&ops
[1], reg
, mode
);
4778 /* The movmisalign<mode> pattern cannot fail, else the assignment
4779 would silently be omitted. */
4780 expand_insn (icode
, 2, ops
);
4783 store_bit_field (mem
, GET_MODE_BITSIZE (mode
), 0, 0, 0, mode
, reg
);
4787 /* Assignment of a structure component needs special treatment
4788 if the structure component's rtx is not simply a MEM.
4789 Assignment of an array element at a constant index, and assignment of
4790 an array element in an unaligned packed structure field, has the same
4791 problem. Same for (partially) storing into a non-memory object. */
4792 if (handled_component_p (to
)
4793 || (TREE_CODE (to
) == MEM_REF
4794 && mem_ref_refers_to_non_mem_p (to
))
4795 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
4797 enum machine_mode mode1
;
4798 HOST_WIDE_INT bitsize
, bitpos
;
4799 unsigned HOST_WIDE_INT bitregion_start
= 0;
4800 unsigned HOST_WIDE_INT bitregion_end
= 0;
4807 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
4808 &unsignedp
, &volatilep
, true);
4810 /* Make sure bitpos is not negative, it can wreak havoc later. */
4813 gcc_assert (offset
== NULL_TREE
);
4814 offset
= size_int (bitpos
>> (BITS_PER_UNIT
== 8
4815 ? 3 : exact_log2 (BITS_PER_UNIT
)));
4816 bitpos
&= BITS_PER_UNIT
- 1;
4819 if (TREE_CODE (to
) == COMPONENT_REF
4820 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to
, 1)))
4821 get_bit_range (&bitregion_start
, &bitregion_end
, to
, &bitpos
, &offset
);
4822 /* The C++ memory model naturally applies to byte-aligned fields.
4823 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4824 BITSIZE are not byte-aligned, there is no need to limit the range
4825 we can access. This can occur with packed structures in Ada. */
4826 else if (bitsize
> 0
4827 && bitsize
% BITS_PER_UNIT
== 0
4828 && bitpos
% BITS_PER_UNIT
== 0)
4830 bitregion_start
= bitpos
;
4831 bitregion_end
= bitpos
+ bitsize
- 1;
4834 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4836 /* If the field has a mode, we want to access it in the
4837 field's mode, not the computed mode.
4838 If a MEM has VOIDmode (external with incomplete type),
4839 use BLKmode for it instead. */
4842 if (mode1
!= VOIDmode
)
4843 to_rtx
= adjust_address (to_rtx
, mode1
, 0);
4844 else if (GET_MODE (to_rtx
) == VOIDmode
)
4845 to_rtx
= adjust_address (to_rtx
, BLKmode
, 0);
4850 enum machine_mode address_mode
;
4853 if (!MEM_P (to_rtx
))
4855 /* We can get constant negative offsets into arrays with broken
4856 user code. Translate this to a trap instead of ICEing. */
4857 gcc_assert (TREE_CODE (offset
) == INTEGER_CST
);
4858 expand_builtin_trap ();
4859 to_rtx
= gen_rtx_MEM (BLKmode
, const0_rtx
);
4862 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
4863 address_mode
= get_address_mode (to_rtx
);
4864 if (GET_MODE (offset_rtx
) != address_mode
)
4865 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
4867 /* If we have an expression in OFFSET_RTX and a non-zero
4868 byte offset in BITPOS, adding the byte offset before the
4869 OFFSET_RTX results in better intermediate code, which makes
4870 later rtl optimization passes perform better.
4872 We prefer intermediate code like this:
4874 r124:DI=r123:DI+0x18
4879 r124:DI=r123:DI+0x10
4880 [r124:DI+0x8]=r121:DI
4882 This is only done for aligned data values, as these can
4883 be expected to result in single move instructions. */
4884 if (mode1
!= VOIDmode
4887 && (bitpos
% bitsize
) == 0
4888 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
4889 && MEM_ALIGN (to_rtx
) >= GET_MODE_ALIGNMENT (mode1
))
4891 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
4892 bitregion_start
= 0;
4893 if (bitregion_end
>= (unsigned HOST_WIDE_INT
) bitpos
)
4894 bitregion_end
-= bitpos
;
4898 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4899 highest_pow2_factor_for_target (to
,
4903 /* No action is needed if the target is not a memory and the field
4904 lies completely outside that target. This can occur if the source
4905 code contains an out-of-bounds access to a small array. */
4907 && GET_MODE (to_rtx
) != BLKmode
4908 && (unsigned HOST_WIDE_INT
) bitpos
4909 >= GET_MODE_PRECISION (GET_MODE (to_rtx
)))
4911 expand_normal (from
);
4914 /* Handle expand_expr of a complex value returning a CONCAT. */
4915 else if (GET_CODE (to_rtx
) == CONCAT
)
4917 unsigned short mode_bitsize
= GET_MODE_BITSIZE (GET_MODE (to_rtx
));
4918 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from
)))
4920 && bitsize
== mode_bitsize
)
4921 result
= store_expr (from
, to_rtx
, false, nontemporal
);
4922 else if (bitsize
== mode_bitsize
/ 2
4923 && (bitpos
== 0 || bitpos
== mode_bitsize
/ 2))
4924 result
= store_expr (from
, XEXP (to_rtx
, bitpos
!= 0), false,
4926 else if (bitpos
+ bitsize
<= mode_bitsize
/ 2)
4927 result
= store_field (XEXP (to_rtx
, 0), bitsize
, bitpos
,
4928 bitregion_start
, bitregion_end
,
4930 get_alias_set (to
), nontemporal
);
4931 else if (bitpos
>= mode_bitsize
/ 2)
4932 result
= store_field (XEXP (to_rtx
, 1), bitsize
,
4933 bitpos
- mode_bitsize
/ 2,
4934 bitregion_start
, bitregion_end
,
4936 get_alias_set (to
), nontemporal
);
4937 else if (bitpos
== 0 && bitsize
== mode_bitsize
)
4940 result
= expand_normal (from
);
4941 from_rtx
= simplify_gen_subreg (GET_MODE (to_rtx
), result
,
4942 TYPE_MODE (TREE_TYPE (from
)), 0);
4943 emit_move_insn (XEXP (to_rtx
, 0),
4944 read_complex_part (from_rtx
, false));
4945 emit_move_insn (XEXP (to_rtx
, 1),
4946 read_complex_part (from_rtx
, true));
4950 rtx temp
= assign_stack_temp (GET_MODE (to_rtx
),
4951 GET_MODE_SIZE (GET_MODE (to_rtx
)));
4952 write_complex_part (temp
, XEXP (to_rtx
, 0), false);
4953 write_complex_part (temp
, XEXP (to_rtx
, 1), true);
4954 result
= store_field (temp
, bitsize
, bitpos
,
4955 bitregion_start
, bitregion_end
,
4957 get_alias_set (to
), nontemporal
);
4958 emit_move_insn (XEXP (to_rtx
, 0), read_complex_part (temp
, false));
4959 emit_move_insn (XEXP (to_rtx
, 1), read_complex_part (temp
, true));
4966 /* If the field is at offset zero, we could have been given the
4967 DECL_RTX of the parent struct. Don't munge it. */
4968 to_rtx
= shallow_copy_rtx (to_rtx
);
4969 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
4971 MEM_VOLATILE_P (to_rtx
) = 1;
4974 if (optimize_bitfield_assignment_op (bitsize
, bitpos
,
4975 bitregion_start
, bitregion_end
,
4980 result
= store_field (to_rtx
, bitsize
, bitpos
,
4981 bitregion_start
, bitregion_end
,
4983 get_alias_set (to
), nontemporal
);
4987 preserve_temp_slots (result
);
4992 /* If the rhs is a function call and its value is not an aggregate,
4993 call the function before we start to compute the lhs.
4994 This is needed for correct code for cases such as
4995 val = setjmp (buf) on machines where reference to val
4996 requires loading up part of an address in a separate insn.
4998 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4999 since it might be a promoted variable where the zero- or sign- extension
5000 needs to be done. Handling this in the normal way is safe because no
5001 computation is done before the call. The same is true for SSA names. */
5002 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
5003 && COMPLETE_TYPE_P (TREE_TYPE (from
))
5004 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
5005 && ! (((TREE_CODE (to
) == VAR_DECL
5006 || TREE_CODE (to
) == PARM_DECL
5007 || TREE_CODE (to
) == RESULT_DECL
)
5008 && REG_P (DECL_RTL (to
)))
5009 || TREE_CODE (to
) == SSA_NAME
))
5014 value
= expand_normal (from
);
5016 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5018 /* Handle calls that return values in multiple non-contiguous locations.
5019 The Irix 6 ABI has examples of this. */
5020 if (GET_CODE (to_rtx
) == PARALLEL
)
5022 if (GET_CODE (value
) == PARALLEL
)
5023 emit_group_move (to_rtx
, value
);
5025 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
5026 int_size_in_bytes (TREE_TYPE (from
)));
5028 else if (GET_CODE (value
) == PARALLEL
)
5029 emit_group_store (to_rtx
, value
, TREE_TYPE (from
),
5030 int_size_in_bytes (TREE_TYPE (from
)));
5031 else if (GET_MODE (to_rtx
) == BLKmode
)
5033 /* Handle calls that return BLKmode values in registers. */
5035 copy_blkmode_from_reg (to_rtx
, value
, TREE_TYPE (from
));
5037 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
5041 if (POINTER_TYPE_P (TREE_TYPE (to
)))
5042 value
= convert_memory_address_addr_space
5043 (GET_MODE (to_rtx
), value
,
5044 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to
))));
5046 emit_move_insn (to_rtx
, value
);
5048 preserve_temp_slots (to_rtx
);
5053 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5054 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5056 /* Don't move directly into a return register. */
5057 if (TREE_CODE (to
) == RESULT_DECL
5058 && (REG_P (to_rtx
) || GET_CODE (to_rtx
) == PARALLEL
))
5064 /* If the source is itself a return value, it still is in a pseudo at
5065 this point so we can move it back to the return register directly. */
5067 && TYPE_MODE (TREE_TYPE (from
)) == BLKmode
5068 && TREE_CODE (from
) != CALL_EXPR
)
5069 temp
= copy_blkmode_to_reg (GET_MODE (to_rtx
), from
);
5071 temp
= expand_expr (from
, NULL_RTX
, GET_MODE (to_rtx
), EXPAND_NORMAL
);
5073 /* Handle calls that return values in multiple non-contiguous locations.
5074 The Irix 6 ABI has examples of this. */
5075 if (GET_CODE (to_rtx
) == PARALLEL
)
5077 if (GET_CODE (temp
) == PARALLEL
)
5078 emit_group_move (to_rtx
, temp
);
5080 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
5081 int_size_in_bytes (TREE_TYPE (from
)));
5084 emit_move_insn (to_rtx
, temp
);
5086 preserve_temp_slots (to_rtx
);
5091 /* In case we are returning the contents of an object which overlaps
5092 the place the value is being stored, use a safe function when copying
5093 a value through a pointer into a structure value return block. */
5094 if (TREE_CODE (to
) == RESULT_DECL
5095 && TREE_CODE (from
) == INDIRECT_REF
5096 && ADDR_SPACE_GENERIC_P
5097 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from
, 0)))))
5098 && refs_may_alias_p (to
, from
)
5099 && cfun
->returns_struct
5100 && !cfun
->returns_pcc_struct
)
5105 size
= expr_size (from
);
5106 from_rtx
= expand_normal (from
);
5108 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
5109 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
5110 XEXP (from_rtx
, 0), Pmode
,
5111 convert_to_mode (TYPE_MODE (sizetype
),
5112 size
, TYPE_UNSIGNED (sizetype
)),
5113 TYPE_MODE (sizetype
));
5115 preserve_temp_slots (to_rtx
);
5120 /* Compute FROM and store the value in the rtx we got. */
5123 result
= store_expr (from
, to_rtx
, 0, nontemporal
);
5124 preserve_temp_slots (result
);
5129 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5130 succeeded, false otherwise. */
5133 emit_storent_insn (rtx to
, rtx from
)
5135 struct expand_operand ops
[2];
5136 enum machine_mode mode
= GET_MODE (to
);
5137 enum insn_code code
= optab_handler (storent_optab
, mode
);
5139 if (code
== CODE_FOR_nothing
)
5142 create_fixed_operand (&ops
[0], to
);
5143 create_input_operand (&ops
[1], from
, mode
);
5144 return maybe_expand_insn (code
, 2, ops
);
5147 /* Generate code for computing expression EXP,
5148 and storing the value into TARGET.
5150 If the mode is BLKmode then we may return TARGET itself.
5151 It turns out that in BLKmode it doesn't cause a problem.
5152 because C has no operators that could combine two different
5153 assignments into the same BLKmode object with different values
5154 with no sequence point. Will other languages need this to
5157 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5158 stack, and block moves may need to be treated specially.
5160 If NONTEMPORAL is true, try using a nontemporal store instruction. */
5163 store_expr (tree exp
, rtx target
, int call_param_p
, bool nontemporal
)
5166 rtx alt_rtl
= NULL_RTX
;
5167 location_t loc
= curr_insn_location ();
5169 if (VOID_TYPE_P (TREE_TYPE (exp
)))
5171 /* C++ can generate ?: expressions with a throw expression in one
5172 branch and an rvalue in the other. Here, we resolve attempts to
5173 store the throw expression's nonexistent result. */
5174 gcc_assert (!call_param_p
);
5175 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5178 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
5180 /* Perform first part of compound expression, then assign from second
5182 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
5183 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5184 return store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
,
5187 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
5189 /* For conditional expression, get safe form of the target. Then
5190 test the condition, doing the appropriate assignment on either
5191 side. This avoids the creation of unnecessary temporaries.
5192 For non-BLKmode, it is more efficient not to do this. */
5194 rtx_code_label
*lab1
= gen_label_rtx (), *lab2
= gen_label_rtx ();
5196 do_pending_stack_adjust ();
5198 jumpifnot (TREE_OPERAND (exp
, 0), lab1
, -1);
5199 store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
,
5201 emit_jump_insn (gen_jump (lab2
));
5204 store_expr (TREE_OPERAND (exp
, 2), target
, call_param_p
,
5211 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
5212 /* If this is a scalar in a register that is stored in a wider mode
5213 than the declared mode, compute the result into its declared mode
5214 and then convert to the wider mode. Our value is the computed
5217 rtx inner_target
= 0;
5219 /* We can do the conversion inside EXP, which will often result
5220 in some optimizations. Do the conversion in two steps: first
5221 change the signedness, if needed, then the extend. But don't
5222 do this if the type of EXP is a subtype of something else
5223 since then the conversion might involve more than just
5224 converting modes. */
5225 if (INTEGRAL_TYPE_P (TREE_TYPE (exp
))
5226 && TREE_TYPE (TREE_TYPE (exp
)) == 0
5227 && GET_MODE_PRECISION (GET_MODE (target
))
5228 == TYPE_PRECISION (TREE_TYPE (exp
)))
5230 if (!SUBREG_CHECK_PROMOTED_SIGN (target
,
5231 TYPE_UNSIGNED (TREE_TYPE (exp
))))
5233 /* Some types, e.g. Fortran's logical*4, won't have a signed
5234 version, so use the mode instead. */
5236 = (signed_or_unsigned_type_for
5237 (SUBREG_PROMOTED_SIGN (target
), TREE_TYPE (exp
)));
5239 ntype
= lang_hooks
.types
.type_for_mode
5240 (TYPE_MODE (TREE_TYPE (exp
)),
5241 SUBREG_PROMOTED_SIGN (target
));
5243 exp
= fold_convert_loc (loc
, ntype
, exp
);
5246 exp
= fold_convert_loc (loc
, lang_hooks
.types
.type_for_mode
5247 (GET_MODE (SUBREG_REG (target
)),
5248 SUBREG_PROMOTED_SIGN (target
)),
5251 inner_target
= SUBREG_REG (target
);
5254 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
5255 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5257 /* If TEMP is a VOIDmode constant, use convert_modes to make
5258 sure that we properly convert it. */
5259 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
5261 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
5262 temp
, SUBREG_PROMOTED_SIGN (target
));
5263 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
5264 GET_MODE (target
), temp
,
5265 SUBREG_PROMOTED_SIGN (target
));
5268 convert_move (SUBREG_REG (target
), temp
,
5269 SUBREG_PROMOTED_SIGN (target
));
5273 else if ((TREE_CODE (exp
) == STRING_CST
5274 || (TREE_CODE (exp
) == MEM_REF
5275 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
5276 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
5278 && integer_zerop (TREE_OPERAND (exp
, 1))))
5279 && !nontemporal
&& !call_param_p
5282 /* Optimize initialization of an array with a STRING_CST. */
5283 HOST_WIDE_INT exp_len
, str_copy_len
;
5285 tree str
= TREE_CODE (exp
) == STRING_CST
5286 ? exp
: TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5288 exp_len
= int_expr_size (exp
);
5292 if (TREE_STRING_LENGTH (str
) <= 0)
5295 str_copy_len
= strlen (TREE_STRING_POINTER (str
));
5296 if (str_copy_len
< TREE_STRING_LENGTH (str
) - 1)
5299 str_copy_len
= TREE_STRING_LENGTH (str
);
5300 if ((STORE_MAX_PIECES
& (STORE_MAX_PIECES
- 1)) == 0
5301 && TREE_STRING_POINTER (str
)[TREE_STRING_LENGTH (str
) - 1] == '\0')
5303 str_copy_len
+= STORE_MAX_PIECES
- 1;
5304 str_copy_len
&= ~(STORE_MAX_PIECES
- 1);
5306 str_copy_len
= MIN (str_copy_len
, exp_len
);
5307 if (!can_store_by_pieces (str_copy_len
, builtin_strncpy_read_str
,
5308 CONST_CAST (char *, TREE_STRING_POINTER (str
)),
5309 MEM_ALIGN (target
), false))
5314 dest_mem
= store_by_pieces (dest_mem
,
5315 str_copy_len
, builtin_strncpy_read_str
,
5317 TREE_STRING_POINTER (str
)),
5318 MEM_ALIGN (target
), false,
5319 exp_len
> str_copy_len
? 1 : 0);
5320 if (exp_len
> str_copy_len
)
5321 clear_storage (adjust_address (dest_mem
, BLKmode
, 0),
5322 GEN_INT (exp_len
- str_copy_len
),
5331 /* If we want to use a nontemporal store, force the value to
5333 tmp_target
= nontemporal
? NULL_RTX
: target
;
5334 temp
= expand_expr_real (exp
, tmp_target
, GET_MODE (target
),
5336 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
5340 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5341 the same as that of TARGET, adjust the constant. This is needed, for
5342 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5343 only a word-sized value. */
5344 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
5345 && TREE_CODE (exp
) != ERROR_MARK
5346 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
5347 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
5348 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5350 /* If value was not generated in the target, store it there.
5351 Convert the value to TARGET's type first if necessary and emit the
5352 pending incrementations that have been queued when expanding EXP.
5353 Note that we cannot emit the whole queue blindly because this will
5354 effectively disable the POST_INC optimization later.
5356 If TEMP and TARGET compare equal according to rtx_equal_p, but
5357 one or both of them are volatile memory refs, we have to distinguish
5359 - expand_expr has used TARGET. In this case, we must not generate
5360 another copy. This can be detected by TARGET being equal according
5362 - expand_expr has not used TARGET - that means that the source just
5363 happens to have the same RTX form. Since temp will have been created
5364 by expand_expr, it will compare unequal according to == .
5365 We must generate a copy in this case, to reach the correct number
5366 of volatile memory references. */
5368 if ((! rtx_equal_p (temp
, target
)
5369 || (temp
!= target
&& (side_effects_p (temp
)
5370 || side_effects_p (target
))))
5371 && TREE_CODE (exp
) != ERROR_MARK
5372 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5373 but TARGET is not valid memory reference, TEMP will differ
5374 from TARGET although it is really the same location. */
5376 && rtx_equal_p (alt_rtl
, target
)
5377 && !side_effects_p (alt_rtl
)
5378 && !side_effects_p (target
))
5379 /* If there's nothing to copy, don't bother. Don't call
5380 expr_size unless necessary, because some front-ends (C++)
5381 expr_size-hook must not be given objects that are not
5382 supposed to be bit-copied or bit-initialized. */
5383 && expr_size (exp
) != const0_rtx
)
5385 if (GET_MODE (temp
) != GET_MODE (target
) && GET_MODE (temp
) != VOIDmode
)
5387 if (GET_MODE (target
) == BLKmode
)
5389 /* Handle calls that return BLKmode values in registers. */
5390 if (REG_P (temp
) && TREE_CODE (exp
) == CALL_EXPR
)
5391 copy_blkmode_from_reg (target
, temp
, TREE_TYPE (exp
));
5393 store_bit_field (target
,
5394 INTVAL (expr_size (exp
)) * BITS_PER_UNIT
,
5395 0, 0, 0, GET_MODE (temp
), temp
);
5398 convert_move (target
, temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5401 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
5403 /* Handle copying a string constant into an array. The string
5404 constant may be shorter than the array. So copy just the string's
5405 actual length, and clear the rest. First get the size of the data
5406 type of the string, which is actually the size of the target. */
5407 rtx size
= expr_size (exp
);
5409 if (CONST_INT_P (size
)
5410 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
5411 emit_block_move (target
, temp
, size
,
5413 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5416 enum machine_mode pointer_mode
5417 = targetm
.addr_space
.pointer_mode (MEM_ADDR_SPACE (target
));
5418 enum machine_mode address_mode
= get_address_mode (target
);
5420 /* Compute the size of the data to copy from the string. */
5422 = size_binop_loc (loc
, MIN_EXPR
,
5423 make_tree (sizetype
, size
),
5424 size_int (TREE_STRING_LENGTH (exp
)));
5426 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
5428 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
5429 rtx_code_label
*label
= 0;
5431 /* Copy that much. */
5432 copy_size_rtx
= convert_to_mode (pointer_mode
, copy_size_rtx
,
5433 TYPE_UNSIGNED (sizetype
));
5434 emit_block_move (target
, temp
, copy_size_rtx
,
5436 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5438 /* Figure out how much is left in TARGET that we have to clear.
5439 Do all calculations in pointer_mode. */
5440 if (CONST_INT_P (copy_size_rtx
))
5442 size
= plus_constant (address_mode
, size
,
5443 -INTVAL (copy_size_rtx
));
5444 target
= adjust_address (target
, BLKmode
,
5445 INTVAL (copy_size_rtx
));
5449 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
5450 copy_size_rtx
, NULL_RTX
, 0,
5453 if (GET_MODE (copy_size_rtx
) != address_mode
)
5454 copy_size_rtx
= convert_to_mode (address_mode
,
5456 TYPE_UNSIGNED (sizetype
));
5458 target
= offset_address (target
, copy_size_rtx
,
5459 highest_pow2_factor (copy_size
));
5460 label
= gen_label_rtx ();
5461 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
5462 GET_MODE (size
), 0, label
);
5465 if (size
!= const0_rtx
)
5466 clear_storage (target
, size
, BLOCK_OP_NORMAL
);
5472 /* Handle calls that return values in multiple non-contiguous locations.
5473 The Irix 6 ABI has examples of this. */
5474 else if (GET_CODE (target
) == PARALLEL
)
5476 if (GET_CODE (temp
) == PARALLEL
)
5477 emit_group_move (target
, temp
);
5479 emit_group_load (target
, temp
, TREE_TYPE (exp
),
5480 int_size_in_bytes (TREE_TYPE (exp
)));
5482 else if (GET_CODE (temp
) == PARALLEL
)
5483 emit_group_store (target
, temp
, TREE_TYPE (exp
),
5484 int_size_in_bytes (TREE_TYPE (exp
)));
5485 else if (GET_MODE (temp
) == BLKmode
)
5486 emit_block_move (target
, temp
, expr_size (exp
),
5488 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5489 /* If we emit a nontemporal store, there is nothing else to do. */
5490 else if (nontemporal
&& emit_storent_insn (target
, temp
))
5494 temp
= force_operand (temp
, target
);
5496 emit_move_insn (target
, temp
);
5503 /* Return true if field F of structure TYPE is a flexible array. */
5506 flexible_array_member_p (const_tree f
, const_tree type
)
5511 return (DECL_CHAIN (f
) == NULL
5512 && TREE_CODE (tf
) == ARRAY_TYPE
5514 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf
))
5515 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf
)))
5516 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf
))
5517 && int_size_in_bytes (type
) >= 0);
5520 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5521 must have in order for it to completely initialize a value of type TYPE.
5522 Return -1 if the number isn't known.
5524 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5526 static HOST_WIDE_INT
5527 count_type_elements (const_tree type
, bool for_ctor_p
)
5529 switch (TREE_CODE (type
))
5535 nelts
= array_type_nelts (type
);
5536 if (nelts
&& tree_fits_uhwi_p (nelts
))
5538 unsigned HOST_WIDE_INT n
;
5540 n
= tree_to_uhwi (nelts
) + 1;
5541 if (n
== 0 || for_ctor_p
)
5544 return n
* count_type_elements (TREE_TYPE (type
), false);
5546 return for_ctor_p
? -1 : 1;
5551 unsigned HOST_WIDE_INT n
;
5555 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5556 if (TREE_CODE (f
) == FIELD_DECL
)
5559 n
+= count_type_elements (TREE_TYPE (f
), false);
5560 else if (!flexible_array_member_p (f
, type
))
5561 /* Don't count flexible arrays, which are not supposed
5562 to be initialized. */
5570 case QUAL_UNION_TYPE
:
5575 gcc_assert (!for_ctor_p
);
5576 /* Estimate the number of scalars in each field and pick the
5577 maximum. Other estimates would do instead; the idea is simply
5578 to make sure that the estimate is not sensitive to the ordering
5581 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5582 if (TREE_CODE (f
) == FIELD_DECL
)
5584 m
= count_type_elements (TREE_TYPE (f
), false);
5585 /* If the field doesn't span the whole union, add an extra
5586 scalar for the rest. */
5587 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f
)),
5588 TYPE_SIZE (type
)) != 1)
5600 return TYPE_VECTOR_SUBPARTS (type
);
5604 case FIXED_POINT_TYPE
:
5609 case REFERENCE_TYPE
:
5625 /* Helper for categorize_ctor_elements. Identical interface. */
5628 categorize_ctor_elements_1 (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
5629 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
5631 unsigned HOST_WIDE_INT idx
;
5632 HOST_WIDE_INT nz_elts
, init_elts
, num_fields
;
5633 tree value
, purpose
, elt_type
;
5635 /* Whether CTOR is a valid constant initializer, in accordance with what
5636 initializer_constant_valid_p does. If inferred from the constructor
5637 elements, true until proven otherwise. */
5638 bool const_from_elts_p
= constructor_static_from_elts_p (ctor
);
5639 bool const_p
= const_from_elts_p
? true : TREE_STATIC (ctor
);
5644 elt_type
= NULL_TREE
;
5646 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), idx
, purpose
, value
)
5648 HOST_WIDE_INT mult
= 1;
5650 if (purpose
&& TREE_CODE (purpose
) == RANGE_EXPR
)
5652 tree lo_index
= TREE_OPERAND (purpose
, 0);
5653 tree hi_index
= TREE_OPERAND (purpose
, 1);
5655 if (tree_fits_uhwi_p (lo_index
) && tree_fits_uhwi_p (hi_index
))
5656 mult
= (tree_to_uhwi (hi_index
)
5657 - tree_to_uhwi (lo_index
) + 1);
5660 elt_type
= TREE_TYPE (value
);
5662 switch (TREE_CODE (value
))
5666 HOST_WIDE_INT nz
= 0, ic
= 0;
5668 bool const_elt_p
= categorize_ctor_elements_1 (value
, &nz
, &ic
,
5671 nz_elts
+= mult
* nz
;
5672 init_elts
+= mult
* ic
;
5674 if (const_from_elts_p
&& const_p
)
5675 const_p
= const_elt_p
;
5682 if (!initializer_zerop (value
))
5688 nz_elts
+= mult
* TREE_STRING_LENGTH (value
);
5689 init_elts
+= mult
* TREE_STRING_LENGTH (value
);
5693 if (!initializer_zerop (TREE_REALPART (value
)))
5695 if (!initializer_zerop (TREE_IMAGPART (value
)))
5703 for (i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
5705 tree v
= VECTOR_CST_ELT (value
, i
);
5706 if (!initializer_zerop (v
))
5715 HOST_WIDE_INT tc
= count_type_elements (elt_type
, false);
5716 nz_elts
+= mult
* tc
;
5717 init_elts
+= mult
* tc
;
5719 if (const_from_elts_p
&& const_p
)
5720 const_p
= initializer_constant_valid_p (value
, elt_type
)
5727 if (*p_complete
&& !complete_ctor_at_level_p (TREE_TYPE (ctor
),
5728 num_fields
, elt_type
))
5729 *p_complete
= false;
5731 *p_nz_elts
+= nz_elts
;
5732 *p_init_elts
+= init_elts
;
5737 /* Examine CTOR to discover:
5738 * how many scalar fields are set to nonzero values,
5739 and place it in *P_NZ_ELTS;
5740 * how many scalar fields in total are in CTOR,
5741 and place it in *P_ELT_COUNT.
5742 * whether the constructor is complete -- in the sense that every
5743 meaningful byte is explicitly given a value --
5744 and place it in *P_COMPLETE.
5746 Return whether or not CTOR is a valid static constant initializer, the same
5747 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5750 categorize_ctor_elements (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
5751 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
5757 return categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_init_elts
, p_complete
);
5760 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5761 of which had type LAST_TYPE. Each element was itself a complete
5762 initializer, in the sense that every meaningful byte was explicitly
5763 given a value. Return true if the same is true for the constructor
5767 complete_ctor_at_level_p (const_tree type
, HOST_WIDE_INT num_elts
,
5768 const_tree last_type
)
5770 if (TREE_CODE (type
) == UNION_TYPE
5771 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
5776 gcc_assert (num_elts
== 1 && last_type
);
5778 /* ??? We could look at each element of the union, and find the
5779 largest element. Which would avoid comparing the size of the
5780 initialized element against any tail padding in the union.
5781 Doesn't seem worth the effort... */
5782 return simple_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (last_type
)) == 1;
5785 return count_type_elements (type
, true) == num_elts
;
5788 /* Return 1 if EXP contains mostly (3/4) zeros. */
5791 mostly_zeros_p (const_tree exp
)
5793 if (TREE_CODE (exp
) == CONSTRUCTOR
)
5795 HOST_WIDE_INT nz_elts
, init_elts
;
5798 categorize_ctor_elements (exp
, &nz_elts
, &init_elts
, &complete_p
);
5799 return !complete_p
|| nz_elts
< init_elts
/ 4;
5802 return initializer_zerop (exp
);
5805 /* Return 1 if EXP contains all zeros. */
5808 all_zeros_p (const_tree exp
)
5810 if (TREE_CODE (exp
) == CONSTRUCTOR
)
5812 HOST_WIDE_INT nz_elts
, init_elts
;
5815 categorize_ctor_elements (exp
, &nz_elts
, &init_elts
, &complete_p
);
5816 return nz_elts
== 0;
5819 return initializer_zerop (exp
);
5822 /* Helper function for store_constructor.
5823 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5824 CLEARED is as for store_constructor.
5825 ALIAS_SET is the alias set to use for any stores.
5827 This provides a recursive shortcut back to store_constructor when it isn't
5828 necessary to go through store_field. This is so that we can pass through
5829 the cleared field to let store_constructor know that we may not have to
5830 clear a substructure if the outer structure has already been cleared. */
5833 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
5834 HOST_WIDE_INT bitpos
, enum machine_mode mode
,
5835 tree exp
, int cleared
, alias_set_type alias_set
)
5837 if (TREE_CODE (exp
) == CONSTRUCTOR
5838 /* We can only call store_constructor recursively if the size and
5839 bit position are on a byte boundary. */
5840 && bitpos
% BITS_PER_UNIT
== 0
5841 && (bitsize
> 0 && bitsize
% BITS_PER_UNIT
== 0)
5842 /* If we have a nonzero bitpos for a register target, then we just
5843 let store_field do the bitfield handling. This is unlikely to
5844 generate unnecessary clear instructions anyways. */
5845 && (bitpos
== 0 || MEM_P (target
)))
5849 = adjust_address (target
,
5850 GET_MODE (target
) == BLKmode
5852 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
5853 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5856 /* Update the alias set, if required. */
5857 if (MEM_P (target
) && ! MEM_KEEP_ALIAS_SET_P (target
)
5858 && MEM_ALIAS_SET (target
) != 0)
5860 target
= copy_rtx (target
);
5861 set_mem_alias_set (target
, alias_set
);
5864 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
5867 store_field (target
, bitsize
, bitpos
, 0, 0, mode
, exp
, alias_set
, false);
5871 /* Returns the number of FIELD_DECLs in TYPE. */
5874 fields_length (const_tree type
)
5876 tree t
= TYPE_FIELDS (type
);
5879 for (; t
; t
= DECL_CHAIN (t
))
5880 if (TREE_CODE (t
) == FIELD_DECL
)
5887 /* Store the value of constructor EXP into the rtx TARGET.
5888 TARGET is either a REG or a MEM; we know it cannot conflict, since
5889 safe_from_p has been called.
5890 CLEARED is true if TARGET is known to have been zero'd.
5891 SIZE is the number of bytes of TARGET we are allowed to modify: this
5892 may not be the same as the size of EXP if we are assigning to a field
5893 which has been packed to exclude padding bits. */
5896 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
)
5898 tree type
= TREE_TYPE (exp
);
5899 #ifdef WORD_REGISTER_OPERATIONS
5900 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
5903 switch (TREE_CODE (type
))
5907 case QUAL_UNION_TYPE
:
5909 unsigned HOST_WIDE_INT idx
;
5912 /* If size is zero or the target is already cleared, do nothing. */
5913 if (size
== 0 || cleared
)
5915 /* We either clear the aggregate or indicate the value is dead. */
5916 else if ((TREE_CODE (type
) == UNION_TYPE
5917 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
5918 && ! CONSTRUCTOR_ELTS (exp
))
5919 /* If the constructor is empty, clear the union. */
5921 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
5925 /* If we are building a static constructor into a register,
5926 set the initial value as zero so we can fold the value into
5927 a constant. But if more than one register is involved,
5928 this probably loses. */
5929 else if (REG_P (target
) && TREE_STATIC (exp
)
5930 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
5932 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5936 /* If the constructor has fewer fields than the structure or
5937 if we are initializing the structure to mostly zeros, clear
5938 the whole structure first. Don't do this if TARGET is a
5939 register whose mode size isn't equal to SIZE since
5940 clear_storage can't handle this case. */
5942 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp
))
5943 != fields_length (type
))
5944 || mostly_zeros_p (exp
))
5946 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
5949 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
5953 if (REG_P (target
) && !cleared
)
5954 emit_clobber (target
);
5956 /* Store each element of the constructor into the
5957 corresponding field of TARGET. */
5958 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, field
, value
)
5960 enum machine_mode mode
;
5961 HOST_WIDE_INT bitsize
;
5962 HOST_WIDE_INT bitpos
= 0;
5964 rtx to_rtx
= target
;
5966 /* Just ignore missing fields. We cleared the whole
5967 structure, above, if any fields are missing. */
5971 if (cleared
&& initializer_zerop (value
))
5974 if (tree_fits_uhwi_p (DECL_SIZE (field
)))
5975 bitsize
= tree_to_uhwi (DECL_SIZE (field
));
5979 mode
= DECL_MODE (field
);
5980 if (DECL_BIT_FIELD (field
))
5983 offset
= DECL_FIELD_OFFSET (field
);
5984 if (tree_fits_shwi_p (offset
)
5985 && tree_fits_shwi_p (bit_position (field
)))
5987 bitpos
= int_bit_position (field
);
5991 bitpos
= tree_to_shwi (DECL_FIELD_BIT_OFFSET (field
));
5995 enum machine_mode address_mode
;
5999 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset
,
6000 make_tree (TREE_TYPE (exp
),
6003 offset_rtx
= expand_normal (offset
);
6004 gcc_assert (MEM_P (to_rtx
));
6006 address_mode
= get_address_mode (to_rtx
);
6007 if (GET_MODE (offset_rtx
) != address_mode
)
6008 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
6010 to_rtx
= offset_address (to_rtx
, offset_rtx
,
6011 highest_pow2_factor (offset
));
6014 #ifdef WORD_REGISTER_OPERATIONS
6015 /* If this initializes a field that is smaller than a
6016 word, at the start of a word, try to widen it to a full
6017 word. This special case allows us to output C++ member
6018 function initializations in a form that the optimizers
6021 && bitsize
< BITS_PER_WORD
6022 && bitpos
% BITS_PER_WORD
== 0
6023 && GET_MODE_CLASS (mode
) == MODE_INT
6024 && TREE_CODE (value
) == INTEGER_CST
6026 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
6028 tree type
= TREE_TYPE (value
);
6030 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
6032 type
= lang_hooks
.types
.type_for_mode
6033 (word_mode
, TYPE_UNSIGNED (type
));
6034 value
= fold_convert (type
, value
);
6037 if (BYTES_BIG_ENDIAN
)
6039 = fold_build2 (LSHIFT_EXPR
, type
, value
,
6040 build_int_cst (type
,
6041 BITS_PER_WORD
- bitsize
));
6042 bitsize
= BITS_PER_WORD
;
6047 if (MEM_P (to_rtx
) && !MEM_KEEP_ALIAS_SET_P (to_rtx
)
6048 && DECL_NONADDRESSABLE_P (field
))
6050 to_rtx
= copy_rtx (to_rtx
);
6051 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
6054 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
6056 get_alias_set (TREE_TYPE (field
)));
6063 unsigned HOST_WIDE_INT i
;
6066 tree elttype
= TREE_TYPE (type
);
6068 HOST_WIDE_INT minelt
= 0;
6069 HOST_WIDE_INT maxelt
= 0;
6071 domain
= TYPE_DOMAIN (type
);
6072 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
6073 && TYPE_MAX_VALUE (domain
)
6074 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain
))
6075 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain
)));
6077 /* If we have constant bounds for the range of the type, get them. */
6080 minelt
= tree_to_shwi (TYPE_MIN_VALUE (domain
));
6081 maxelt
= tree_to_shwi (TYPE_MAX_VALUE (domain
));
6084 /* If the constructor has fewer elements than the array, clear
6085 the whole array first. Similarly if this is static
6086 constructor of a non-BLKmode object. */
6089 else if (REG_P (target
) && TREE_STATIC (exp
))
6093 unsigned HOST_WIDE_INT idx
;
6095 HOST_WIDE_INT count
= 0, zero_count
= 0;
6096 need_to_clear
= ! const_bounds_p
;
6098 /* This loop is a more accurate version of the loop in
6099 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6100 is also needed to check for missing elements. */
6101 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, index
, value
)
6103 HOST_WIDE_INT this_node_count
;
6108 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
6110 tree lo_index
= TREE_OPERAND (index
, 0);
6111 tree hi_index
= TREE_OPERAND (index
, 1);
6113 if (! tree_fits_uhwi_p (lo_index
)
6114 || ! tree_fits_uhwi_p (hi_index
))
6120 this_node_count
= (tree_to_uhwi (hi_index
)
6121 - tree_to_uhwi (lo_index
) + 1);
6124 this_node_count
= 1;
6126 count
+= this_node_count
;
6127 if (mostly_zeros_p (value
))
6128 zero_count
+= this_node_count
;
6131 /* Clear the entire array first if there are any missing
6132 elements, or if the incidence of zero elements is >=
6135 && (count
< maxelt
- minelt
+ 1
6136 || 4 * zero_count
>= 3 * count
))
6140 if (need_to_clear
&& size
> 0)
6143 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6145 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
6149 if (!cleared
&& REG_P (target
))
6150 /* Inform later passes that the old value is dead. */
6151 emit_clobber (target
);
6153 /* Store each element of the constructor into the
6154 corresponding element of TARGET, determined by counting the
6156 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), i
, index
, value
)
6158 enum machine_mode mode
;
6159 HOST_WIDE_INT bitsize
;
6160 HOST_WIDE_INT bitpos
;
6161 rtx xtarget
= target
;
6163 if (cleared
&& initializer_zerop (value
))
6166 mode
= TYPE_MODE (elttype
);
6167 if (mode
== BLKmode
)
6168 bitsize
= (tree_fits_uhwi_p (TYPE_SIZE (elttype
))
6169 ? tree_to_uhwi (TYPE_SIZE (elttype
))
6172 bitsize
= GET_MODE_BITSIZE (mode
);
6174 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
6176 tree lo_index
= TREE_OPERAND (index
, 0);
6177 tree hi_index
= TREE_OPERAND (index
, 1);
6178 rtx index_r
, pos_rtx
;
6179 HOST_WIDE_INT lo
, hi
, count
;
6182 /* If the range is constant and "small", unroll the loop. */
6184 && tree_fits_shwi_p (lo_index
)
6185 && tree_fits_shwi_p (hi_index
)
6186 && (lo
= tree_to_shwi (lo_index
),
6187 hi
= tree_to_shwi (hi_index
),
6188 count
= hi
- lo
+ 1,
6191 || (tree_fits_uhwi_p (TYPE_SIZE (elttype
))
6192 && (tree_to_uhwi (TYPE_SIZE (elttype
)) * count
6195 lo
-= minelt
; hi
-= minelt
;
6196 for (; lo
<= hi
; lo
++)
6198 bitpos
= lo
* tree_to_shwi (TYPE_SIZE (elttype
));
6201 && !MEM_KEEP_ALIAS_SET_P (target
)
6202 && TREE_CODE (type
) == ARRAY_TYPE
6203 && TYPE_NONALIASED_COMPONENT (type
))
6205 target
= copy_rtx (target
);
6206 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6209 store_constructor_field
6210 (target
, bitsize
, bitpos
, mode
, value
, cleared
,
6211 get_alias_set (elttype
));
6216 rtx_code_label
*loop_start
= gen_label_rtx ();
6217 rtx_code_label
*loop_end
= gen_label_rtx ();
6220 expand_normal (hi_index
);
6222 index
= build_decl (EXPR_LOCATION (exp
),
6223 VAR_DECL
, NULL_TREE
, domain
);
6224 index_r
= gen_reg_rtx (promote_decl_mode (index
, NULL
));
6225 SET_DECL_RTL (index
, index_r
);
6226 store_expr (lo_index
, index_r
, 0, false);
6228 /* Build the head of the loop. */
6229 do_pending_stack_adjust ();
6230 emit_label (loop_start
);
6232 /* Assign value to element index. */
6234 fold_convert (ssizetype
,
6235 fold_build2 (MINUS_EXPR
,
6238 TYPE_MIN_VALUE (domain
)));
6241 size_binop (MULT_EXPR
, position
,
6242 fold_convert (ssizetype
,
6243 TYPE_SIZE_UNIT (elttype
)));
6245 pos_rtx
= expand_normal (position
);
6246 xtarget
= offset_address (target
, pos_rtx
,
6247 highest_pow2_factor (position
));
6248 xtarget
= adjust_address (xtarget
, mode
, 0);
6249 if (TREE_CODE (value
) == CONSTRUCTOR
)
6250 store_constructor (value
, xtarget
, cleared
,
6251 bitsize
/ BITS_PER_UNIT
);
6253 store_expr (value
, xtarget
, 0, false);
6255 /* Generate a conditional jump to exit the loop. */
6256 exit_cond
= build2 (LT_EXPR
, integer_type_node
,
6258 jumpif (exit_cond
, loop_end
, -1);
6260 /* Update the loop counter, and jump to the head of
6262 expand_assignment (index
,
6263 build2 (PLUS_EXPR
, TREE_TYPE (index
),
6264 index
, integer_one_node
),
6267 emit_jump (loop_start
);
6269 /* Build the end of the loop. */
6270 emit_label (loop_end
);
6273 else if ((index
!= 0 && ! tree_fits_shwi_p (index
))
6274 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype
)))
6279 index
= ssize_int (1);
6282 index
= fold_convert (ssizetype
,
6283 fold_build2 (MINUS_EXPR
,
6286 TYPE_MIN_VALUE (domain
)));
6289 size_binop (MULT_EXPR
, index
,
6290 fold_convert (ssizetype
,
6291 TYPE_SIZE_UNIT (elttype
)));
6292 xtarget
= offset_address (target
,
6293 expand_normal (position
),
6294 highest_pow2_factor (position
));
6295 xtarget
= adjust_address (xtarget
, mode
, 0);
6296 store_expr (value
, xtarget
, 0, false);
6301 bitpos
= ((tree_to_shwi (index
) - minelt
)
6302 * tree_to_uhwi (TYPE_SIZE (elttype
)));
6304 bitpos
= (i
* tree_to_uhwi (TYPE_SIZE (elttype
)));
6306 if (MEM_P (target
) && !MEM_KEEP_ALIAS_SET_P (target
)
6307 && TREE_CODE (type
) == ARRAY_TYPE
6308 && TYPE_NONALIASED_COMPONENT (type
))
6310 target
= copy_rtx (target
);
6311 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6313 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
6314 cleared
, get_alias_set (elttype
));
6322 unsigned HOST_WIDE_INT idx
;
6323 constructor_elt
*ce
;
6326 int icode
= CODE_FOR_nothing
;
6327 tree elttype
= TREE_TYPE (type
);
6328 int elt_size
= tree_to_uhwi (TYPE_SIZE (elttype
));
6329 enum machine_mode eltmode
= TYPE_MODE (elttype
);
6330 HOST_WIDE_INT bitsize
;
6331 HOST_WIDE_INT bitpos
;
6332 rtvec vector
= NULL
;
6334 alias_set_type alias
;
6336 gcc_assert (eltmode
!= BLKmode
);
6338 n_elts
= TYPE_VECTOR_SUBPARTS (type
);
6339 if (REG_P (target
) && VECTOR_MODE_P (GET_MODE (target
)))
6341 enum machine_mode mode
= GET_MODE (target
);
6343 icode
= (int) optab_handler (vec_init_optab
, mode
);
6344 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6345 if (icode
!= CODE_FOR_nothing
)
6349 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
6350 if (TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
)
6352 icode
= CODE_FOR_nothing
;
6356 if (icode
!= CODE_FOR_nothing
)
6360 vector
= rtvec_alloc (n_elts
);
6361 for (i
= 0; i
< n_elts
; i
++)
6362 RTVEC_ELT (vector
, i
) = CONST0_RTX (GET_MODE_INNER (mode
));
6366 /* If the constructor has fewer elements than the vector,
6367 clear the whole array first. Similarly if this is static
6368 constructor of a non-BLKmode object. */
6371 else if (REG_P (target
) && TREE_STATIC (exp
))
6375 unsigned HOST_WIDE_INT count
= 0, zero_count
= 0;
6378 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
6380 int n_elts_here
= tree_to_uhwi
6381 (int_const_binop (TRUNC_DIV_EXPR
,
6382 TYPE_SIZE (TREE_TYPE (value
)),
6383 TYPE_SIZE (elttype
)));
6385 count
+= n_elts_here
;
6386 if (mostly_zeros_p (value
))
6387 zero_count
+= n_elts_here
;
6390 /* Clear the entire vector first if there are any missing elements,
6391 or if the incidence of zero elements is >= 75%. */
6392 need_to_clear
= (count
< n_elts
|| 4 * zero_count
>= 3 * count
);
6395 if (need_to_clear
&& size
> 0 && !vector
)
6398 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6400 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
6404 /* Inform later passes that the old value is dead. */
6405 if (!cleared
&& !vector
&& REG_P (target
))
6406 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6409 alias
= MEM_ALIAS_SET (target
);
6411 alias
= get_alias_set (elttype
);
6413 /* Store each element of the constructor into the corresponding
6414 element of TARGET, determined by counting the elements. */
6415 for (idx
= 0, i
= 0;
6416 vec_safe_iterate (CONSTRUCTOR_ELTS (exp
), idx
, &ce
);
6417 idx
++, i
+= bitsize
/ elt_size
)
6419 HOST_WIDE_INT eltpos
;
6420 tree value
= ce
->value
;
6422 bitsize
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value
)));
6423 if (cleared
&& initializer_zerop (value
))
6427 eltpos
= tree_to_uhwi (ce
->index
);
6433 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6435 gcc_assert (TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
);
6436 RTVEC_ELT (vector
, eltpos
)
6437 = expand_normal (value
);
6441 enum machine_mode value_mode
=
6442 TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
6443 ? TYPE_MODE (TREE_TYPE (value
))
6445 bitpos
= eltpos
* elt_size
;
6446 store_constructor_field (target
, bitsize
, bitpos
, value_mode
,
6447 value
, cleared
, alias
);
6452 emit_insn (GEN_FCN (icode
)
6454 gen_rtx_PARALLEL (GET_MODE (target
), vector
)));
6463 /* Store the value of EXP (an expression tree)
6464 into a subfield of TARGET which has mode MODE and occupies
6465 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6466 If MODE is VOIDmode, it means that we are storing into a bit-field.
6468 BITREGION_START is bitpos of the first bitfield in this region.
6469 BITREGION_END is the bitpos of the ending bitfield in this region.
6470 These two fields are 0, if the C++ memory model does not apply,
6471 or we are not interested in keeping track of bitfield regions.
6473 Always return const0_rtx unless we have something particular to
6476 ALIAS_SET is the alias set for the destination. This value will
6477 (in general) be different from that for TARGET, since TARGET is a
6478 reference to the containing structure.
6480 If NONTEMPORAL is true, try generating a nontemporal store. */
6483 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
6484 unsigned HOST_WIDE_INT bitregion_start
,
6485 unsigned HOST_WIDE_INT bitregion_end
,
6486 enum machine_mode mode
, tree exp
,
6487 alias_set_type alias_set
, bool nontemporal
)
6489 if (TREE_CODE (exp
) == ERROR_MARK
)
6492 /* If we have nothing to store, do nothing unless the expression has
6495 return expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6497 if (GET_CODE (target
) == CONCAT
)
6499 /* We're storing into a struct containing a single __complex. */
6501 gcc_assert (!bitpos
);
6502 return store_expr (exp
, target
, 0, nontemporal
);
6505 /* If the structure is in a register or if the component
6506 is a bit field, we cannot use addressing to access it.
6507 Use bit-field techniques or SUBREG to store in it. */
6509 if (mode
== VOIDmode
6510 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
6511 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
6512 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
6514 || GET_CODE (target
) == SUBREG
6515 /* If the field isn't aligned enough to store as an ordinary memref,
6516 store it as a bit field. */
6518 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
6519 || bitpos
% GET_MODE_ALIGNMENT (mode
))
6520 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
6521 || (bitpos
% BITS_PER_UNIT
!= 0)))
6522 || (bitsize
>= 0 && mode
!= BLKmode
6523 && GET_MODE_BITSIZE (mode
) > bitsize
)
6524 /* If the RHS and field are a constant size and the size of the
6525 RHS isn't the same size as the bitfield, we must use bitfield
6528 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
6529 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0)
6530 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6531 decl we must use bitfield operations. */
6533 && TREE_CODE (exp
) == MEM_REF
6534 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
6535 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
6536 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp
, 0),0 ))
6537 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) != BLKmode
))
6542 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6543 implies a mask operation. If the precision is the same size as
6544 the field we're storing into, that mask is redundant. This is
6545 particularly common with bit field assignments generated by the
6547 nop_def
= get_def_for_expr (exp
, NOP_EXPR
);
6550 tree type
= TREE_TYPE (exp
);
6551 if (INTEGRAL_TYPE_P (type
)
6552 && TYPE_PRECISION (type
) < GET_MODE_BITSIZE (TYPE_MODE (type
))
6553 && bitsize
== TYPE_PRECISION (type
))
6555 tree op
= gimple_assign_rhs1 (nop_def
);
6556 type
= TREE_TYPE (op
);
6557 if (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) >= bitsize
)
6562 temp
= expand_normal (exp
);
6564 /* If BITSIZE is narrower than the size of the type of EXP
6565 we will be narrowing TEMP. Normally, what's wanted are the
6566 low-order bits. However, if EXP's type is a record and this is
6567 big-endian machine, we want the upper BITSIZE bits. */
6568 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
6569 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
6570 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
6571 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
6572 GET_MODE_BITSIZE (GET_MODE (temp
)) - bitsize
,
6575 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6576 if (mode
!= VOIDmode
&& mode
!= BLKmode
6577 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
6578 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
6580 /* If the modes of TEMP and TARGET are both BLKmode, both
6581 must be in memory and BITPOS must be aligned on a byte
6582 boundary. If so, we simply do a block copy. Likewise
6583 for a BLKmode-like TARGET. */
6584 if (GET_MODE (temp
) == BLKmode
6585 && (GET_MODE (target
) == BLKmode
6587 && GET_MODE_CLASS (GET_MODE (target
)) == MODE_INT
6588 && (bitpos
% BITS_PER_UNIT
) == 0
6589 && (bitsize
% BITS_PER_UNIT
) == 0)))
6591 gcc_assert (MEM_P (target
) && MEM_P (temp
)
6592 && (bitpos
% BITS_PER_UNIT
) == 0);
6594 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
6595 emit_block_move (target
, temp
,
6596 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
6603 /* Handle calls that return values in multiple non-contiguous locations.
6604 The Irix 6 ABI has examples of this. */
6605 if (GET_CODE (temp
) == PARALLEL
)
6607 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
6609 if (mode
== BLKmode
|| mode
== VOIDmode
)
6610 mode
= smallest_mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
);
6611 temp_target
= gen_reg_rtx (mode
);
6612 emit_group_store (temp_target
, temp
, TREE_TYPE (exp
), size
);
6615 else if (mode
== BLKmode
)
6617 /* Handle calls that return BLKmode values in registers. */
6618 if (REG_P (temp
) && TREE_CODE (exp
) == CALL_EXPR
)
6620 rtx temp_target
= gen_reg_rtx (GET_MODE (temp
));
6621 copy_blkmode_from_reg (temp_target
, temp
, TREE_TYPE (exp
));
6626 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
6628 mode
= smallest_mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
);
6629 temp_target
= gen_reg_rtx (mode
);
6631 = extract_bit_field (temp
, size
* BITS_PER_UNIT
, 0, 1,
6632 temp_target
, mode
, mode
);
6637 /* Store the value in the bitfield. */
6638 store_bit_field (target
, bitsize
, bitpos
,
6639 bitregion_start
, bitregion_end
,
6646 /* Now build a reference to just the desired component. */
6647 rtx to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
6649 if (to_rtx
== target
)
6650 to_rtx
= copy_rtx (to_rtx
);
6652 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
6653 set_mem_alias_set (to_rtx
, alias_set
);
6655 return store_expr (exp
, to_rtx
, 0, nontemporal
);
6659 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6660 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6661 codes and find the ultimate containing object, which we return.
6663 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6664 bit position, and *PUNSIGNEDP to the signedness of the field.
6665 If the position of the field is variable, we store a tree
6666 giving the variable offset (in units) in *POFFSET.
6667 This offset is in addition to the bit position.
6668 If the position is not variable, we store 0 in *POFFSET.
6670 If any of the extraction expressions is volatile,
6671 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6673 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6674 Otherwise, it is a mode that can be used to access the field.
6676 If the field describes a variable-sized object, *PMODE is set to
6677 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6678 this case, but the address of the object can be found.
6680 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6681 look through nodes that serve as markers of a greater alignment than
6682 the one that can be deduced from the expression. These nodes make it
6683 possible for front-ends to prevent temporaries from being created by
6684 the middle-end on alignment considerations. For that purpose, the
6685 normal operating mode at high-level is to always pass FALSE so that
6686 the ultimate containing object is really returned; moreover, the
6687 associated predicate handled_component_p will always return TRUE
6688 on these nodes, thus indicating that they are essentially handled
6689 by get_inner_reference. TRUE should only be passed when the caller
6690 is scanning the expression in order to build another representation
6691 and specifically knows how to handle these nodes; as such, this is
6692 the normal operating mode in the RTL expanders. */
6695 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
6696 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
6697 enum machine_mode
*pmode
, int *punsignedp
,
6698 int *pvolatilep
, bool keep_aligning
)
6701 enum machine_mode mode
= VOIDmode
;
6702 bool blkmode_bitfield
= false;
6703 tree offset
= size_zero_node
;
6704 offset_int bit_offset
= 0;
6706 /* First get the mode, signedness, and size. We do this from just the
6707 outermost expression. */
6709 if (TREE_CODE (exp
) == COMPONENT_REF
)
6711 tree field
= TREE_OPERAND (exp
, 1);
6712 size_tree
= DECL_SIZE (field
);
6713 if (flag_strict_volatile_bitfields
> 0
6714 && TREE_THIS_VOLATILE (exp
)
6715 && DECL_BIT_FIELD_TYPE (field
)
6716 && DECL_MODE (field
) != BLKmode
)
6717 /* Volatile bitfields should be accessed in the mode of the
6718 field's type, not the mode computed based on the bit
6720 mode
= TYPE_MODE (DECL_BIT_FIELD_TYPE (field
));
6721 else if (!DECL_BIT_FIELD (field
))
6722 mode
= DECL_MODE (field
);
6723 else if (DECL_MODE (field
) == BLKmode
)
6724 blkmode_bitfield
= true;
6726 *punsignedp
= DECL_UNSIGNED (field
);
6728 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
6730 size_tree
= TREE_OPERAND (exp
, 1);
6731 *punsignedp
= (! INTEGRAL_TYPE_P (TREE_TYPE (exp
))
6732 || TYPE_UNSIGNED (TREE_TYPE (exp
)));
6734 /* For vector types, with the correct size of access, use the mode of
6736 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == VECTOR_TYPE
6737 && TREE_TYPE (exp
) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
6738 && tree_int_cst_equal (size_tree
, TYPE_SIZE (TREE_TYPE (exp
))))
6739 mode
= TYPE_MODE (TREE_TYPE (exp
));
6743 mode
= TYPE_MODE (TREE_TYPE (exp
));
6744 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
6746 if (mode
== BLKmode
)
6747 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
6749 *pbitsize
= GET_MODE_BITSIZE (mode
);
6754 if (! tree_fits_uhwi_p (size_tree
))
6755 mode
= BLKmode
, *pbitsize
= -1;
6757 *pbitsize
= tree_to_uhwi (size_tree
);
6760 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6761 and find the ultimate containing object. */
6764 switch (TREE_CODE (exp
))
6767 bit_offset
+= wi::to_offset (TREE_OPERAND (exp
, 2));
6772 tree field
= TREE_OPERAND (exp
, 1);
6773 tree this_offset
= component_ref_field_offset (exp
);
6775 /* If this field hasn't been filled in yet, don't go past it.
6776 This should only happen when folding expressions made during
6777 type construction. */
6778 if (this_offset
== 0)
6781 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
6782 bit_offset
+= wi::to_offset (DECL_FIELD_BIT_OFFSET (field
));
6784 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6789 case ARRAY_RANGE_REF
:
6791 tree index
= TREE_OPERAND (exp
, 1);
6792 tree low_bound
= array_ref_low_bound (exp
);
6793 tree unit_size
= array_ref_element_size (exp
);
6795 /* We assume all arrays have sizes that are a multiple of a byte.
6796 First subtract the lower bound, if any, in the type of the
6797 index, then convert to sizetype and multiply by the size of
6798 the array element. */
6799 if (! integer_zerop (low_bound
))
6800 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
6803 offset
= size_binop (PLUS_EXPR
, offset
,
6804 size_binop (MULT_EXPR
,
6805 fold_convert (sizetype
, index
),
6814 bit_offset
+= *pbitsize
;
6817 case VIEW_CONVERT_EXPR
:
6818 if (keep_aligning
&& STRICT_ALIGNMENT
6819 && (TYPE_ALIGN (TREE_TYPE (exp
))
6820 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
6821 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
6822 < BIGGEST_ALIGNMENT
)
6823 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
6824 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
6829 /* Hand back the decl for MEM[&decl, off]. */
6830 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
6832 tree off
= TREE_OPERAND (exp
, 1);
6833 if (!integer_zerop (off
))
6835 offset_int boff
, coff
= mem_ref_offset (exp
);
6836 boff
= wi::lshift (coff
, LOG2_BITS_PER_UNIT
);
6839 exp
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6847 /* If any reference in the chain is volatile, the effect is volatile. */
6848 if (TREE_THIS_VOLATILE (exp
))
6851 exp
= TREE_OPERAND (exp
, 0);
6855 /* If OFFSET is constant, see if we can return the whole thing as a
6856 constant bit position. Make sure to handle overflow during
6858 if (TREE_CODE (offset
) == INTEGER_CST
)
6860 offset_int tem
= wi::sext (wi::to_offset (offset
),
6861 TYPE_PRECISION (sizetype
));
6862 tem
= wi::lshift (tem
, LOG2_BITS_PER_UNIT
);
6864 if (wi::fits_shwi_p (tem
))
6866 *pbitpos
= tem
.to_shwi ();
6867 *poffset
= offset
= NULL_TREE
;
6871 /* Otherwise, split it up. */
6874 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6875 if (wi::neg_p (bit_offset
))
6877 offset_int mask
= wi::mask
<offset_int
> (LOG2_BITS_PER_UNIT
, false);
6878 offset_int tem
= bit_offset
.and_not (mask
);
6879 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6880 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6882 tem
= wi::arshift (tem
, LOG2_BITS_PER_UNIT
);
6883 offset
= size_binop (PLUS_EXPR
, offset
,
6884 wide_int_to_tree (sizetype
, tem
));
6887 *pbitpos
= bit_offset
.to_shwi ();
6891 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6892 if (mode
== VOIDmode
6894 && (*pbitpos
% BITS_PER_UNIT
) == 0
6895 && (*pbitsize
% BITS_PER_UNIT
) == 0)
6903 /* Return a tree of sizetype representing the size, in bytes, of the element
6904 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6907 array_ref_element_size (tree exp
)
6909 tree aligned_size
= TREE_OPERAND (exp
, 3);
6910 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6911 location_t loc
= EXPR_LOCATION (exp
);
6913 /* If a size was specified in the ARRAY_REF, it's the size measured
6914 in alignment units of the element type. So multiply by that value. */
6917 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6918 sizetype from another type of the same width and signedness. */
6919 if (TREE_TYPE (aligned_size
) != sizetype
)
6920 aligned_size
= fold_convert_loc (loc
, sizetype
, aligned_size
);
6921 return size_binop_loc (loc
, MULT_EXPR
, aligned_size
,
6922 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
6925 /* Otherwise, take the size from that of the element type. Substitute
6926 any PLACEHOLDER_EXPR that we have. */
6928 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type
), exp
);
6931 /* Return a tree representing the lower bound of the array mentioned in
6932 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6935 array_ref_low_bound (tree exp
)
6937 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6939 /* If a lower bound is specified in EXP, use it. */
6940 if (TREE_OPERAND (exp
, 2))
6941 return TREE_OPERAND (exp
, 2);
6943 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6944 substituting for a PLACEHOLDER_EXPR as needed. */
6945 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
6946 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type
), exp
);
6948 /* Otherwise, return a zero of the appropriate type. */
6949 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp
, 1)), 0);
6952 /* Returns true if REF is an array reference to an array at the end of
6953 a structure. If this is the case, the array may be allocated larger
6954 than its upper bound implies. */
6957 array_at_struct_end_p (tree ref
)
6959 if (TREE_CODE (ref
) != ARRAY_REF
6960 && TREE_CODE (ref
) != ARRAY_RANGE_REF
)
6963 while (handled_component_p (ref
))
6965 /* If the reference chain contains a component reference to a
6966 non-union type and there follows another field the reference
6967 is not at the end of a structure. */
6968 if (TREE_CODE (ref
) == COMPONENT_REF
6969 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref
, 0))) == RECORD_TYPE
)
6971 tree nextf
= DECL_CHAIN (TREE_OPERAND (ref
, 1));
6972 while (nextf
&& TREE_CODE (nextf
) != FIELD_DECL
)
6973 nextf
= DECL_CHAIN (nextf
);
6978 ref
= TREE_OPERAND (ref
, 0);
6981 /* If the reference is based on a declared entity, the size of the array
6982 is constrained by its given domain. */
6989 /* Return a tree representing the upper bound of the array mentioned in
6990 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6993 array_ref_up_bound (tree exp
)
6995 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6997 /* If there is a domain type and it has an upper bound, use it, substituting
6998 for a PLACEHOLDER_EXPR as needed. */
6999 if (domain_type
&& TYPE_MAX_VALUE (domain_type
))
7000 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type
), exp
);
7002 /* Otherwise fail. */
7006 /* Return a tree representing the offset, in bytes, of the field referenced
7007 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
7010 component_ref_field_offset (tree exp
)
7012 tree aligned_offset
= TREE_OPERAND (exp
, 2);
7013 tree field
= TREE_OPERAND (exp
, 1);
7014 location_t loc
= EXPR_LOCATION (exp
);
7016 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
7017 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
7021 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
7022 sizetype from another type of the same width and signedness. */
7023 if (TREE_TYPE (aligned_offset
) != sizetype
)
7024 aligned_offset
= fold_convert_loc (loc
, sizetype
, aligned_offset
);
7025 return size_binop_loc (loc
, MULT_EXPR
, aligned_offset
,
7026 size_int (DECL_OFFSET_ALIGN (field
)
7030 /* Otherwise, take the offset from that of the field. Substitute
7031 any PLACEHOLDER_EXPR that we have. */
7033 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field
), exp
);
7036 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7038 static unsigned HOST_WIDE_INT
7039 target_align (const_tree target
)
7041 /* We might have a chain of nested references with intermediate misaligning
7042 bitfields components, so need to recurse to find out. */
7044 unsigned HOST_WIDE_INT this_align
, outer_align
;
7046 switch (TREE_CODE (target
))
7052 this_align
= DECL_ALIGN (TREE_OPERAND (target
, 1));
7053 outer_align
= target_align (TREE_OPERAND (target
, 0));
7054 return MIN (this_align
, outer_align
);
7057 case ARRAY_RANGE_REF
:
7058 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
7059 outer_align
= target_align (TREE_OPERAND (target
, 0));
7060 return MIN (this_align
, outer_align
);
7063 case NON_LVALUE_EXPR
:
7064 case VIEW_CONVERT_EXPR
:
7065 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
7066 outer_align
= target_align (TREE_OPERAND (target
, 0));
7067 return MAX (this_align
, outer_align
);
7070 return TYPE_ALIGN (TREE_TYPE (target
));
7075 /* Given an rtx VALUE that may contain additions and multiplications, return
7076 an equivalent value that just refers to a register, memory, or constant.
7077 This is done by generating instructions to perform the arithmetic and
7078 returning a pseudo-register containing the value.
7080 The returned value may be a REG, SUBREG, MEM or constant. */
7083 force_operand (rtx value
, rtx target
)
7086 /* Use subtarget as the target for operand 0 of a binary operation. */
7087 rtx subtarget
= get_subtarget (target
);
7088 enum rtx_code code
= GET_CODE (value
);
7090 /* Check for subreg applied to an expression produced by loop optimizer. */
7092 && !REG_P (SUBREG_REG (value
))
7093 && !MEM_P (SUBREG_REG (value
)))
7096 = simplify_gen_subreg (GET_MODE (value
),
7097 force_reg (GET_MODE (SUBREG_REG (value
)),
7098 force_operand (SUBREG_REG (value
),
7100 GET_MODE (SUBREG_REG (value
)),
7101 SUBREG_BYTE (value
));
7102 code
= GET_CODE (value
);
7105 /* Check for a PIC address load. */
7106 if ((code
== PLUS
|| code
== MINUS
)
7107 && XEXP (value
, 0) == pic_offset_table_rtx
7108 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
7109 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
7110 || GET_CODE (XEXP (value
, 1)) == CONST
))
7113 subtarget
= gen_reg_rtx (GET_MODE (value
));
7114 emit_move_insn (subtarget
, value
);
7118 if (ARITHMETIC_P (value
))
7120 op2
= XEXP (value
, 1);
7121 if (!CONSTANT_P (op2
) && !(REG_P (op2
) && op2
!= subtarget
))
7123 if (code
== MINUS
&& CONST_INT_P (op2
))
7126 op2
= negate_rtx (GET_MODE (value
), op2
);
7129 /* Check for an addition with OP2 a constant integer and our first
7130 operand a PLUS of a virtual register and something else. In that
7131 case, we want to emit the sum of the virtual register and the
7132 constant first and then add the other value. This allows virtual
7133 register instantiation to simply modify the constant rather than
7134 creating another one around this addition. */
7135 if (code
== PLUS
&& CONST_INT_P (op2
)
7136 && GET_CODE (XEXP (value
, 0)) == PLUS
7137 && REG_P (XEXP (XEXP (value
, 0), 0))
7138 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7139 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
7141 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
7142 XEXP (XEXP (value
, 0), 0), op2
,
7143 subtarget
, 0, OPTAB_LIB_WIDEN
);
7144 return expand_simple_binop (GET_MODE (value
), code
, temp
,
7145 force_operand (XEXP (XEXP (value
,
7147 target
, 0, OPTAB_LIB_WIDEN
);
7150 op1
= force_operand (XEXP (value
, 0), subtarget
);
7151 op2
= force_operand (op2
, NULL_RTX
);
7155 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
7157 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
7158 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7159 target
, 1, OPTAB_LIB_WIDEN
);
7161 return expand_divmod (0,
7162 FLOAT_MODE_P (GET_MODE (value
))
7163 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
7164 GET_MODE (value
), op1
, op2
, target
, 0);
7166 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7169 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
7172 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7175 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7176 target
, 0, OPTAB_LIB_WIDEN
);
7178 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7179 target
, 1, OPTAB_LIB_WIDEN
);
7182 if (UNARY_P (value
))
7185 target
= gen_reg_rtx (GET_MODE (value
));
7186 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
7193 case FLOAT_TRUNCATE
:
7194 convert_move (target
, op1
, code
== ZERO_EXTEND
);
7199 expand_fix (target
, op1
, code
== UNSIGNED_FIX
);
7203 case UNSIGNED_FLOAT
:
7204 expand_float (target
, op1
, code
== UNSIGNED_FLOAT
);
7208 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
7212 #ifdef INSN_SCHEDULING
7213 /* On machines that have insn scheduling, we want all memory reference to be
7214 explicit, so we need to deal with such paradoxical SUBREGs. */
7215 if (paradoxical_subreg_p (value
) && MEM_P (SUBREG_REG (value
)))
7217 = simplify_gen_subreg (GET_MODE (value
),
7218 force_reg (GET_MODE (SUBREG_REG (value
)),
7219 force_operand (SUBREG_REG (value
),
7221 GET_MODE (SUBREG_REG (value
)),
7222 SUBREG_BYTE (value
));
7228 /* Subroutine of expand_expr: return nonzero iff there is no way that
7229 EXP can reference X, which is being modified. TOP_P is nonzero if this
7230 call is going to be used to determine whether we need a temporary
7231 for EXP, as opposed to a recursive call to this function.
7233 It is always safe for this routine to return zero since it merely
7234 searches for optimization opportunities. */
7237 safe_from_p (const_rtx x
, tree exp
, int top_p
)
7243 /* If EXP has varying size, we MUST use a target since we currently
7244 have no way of allocating temporaries of variable size
7245 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7246 So we assume here that something at a higher level has prevented a
7247 clash. This is somewhat bogus, but the best we can do. Only
7248 do this when X is BLKmode and when we are at the top level. */
7249 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
7250 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
7251 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
7252 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
7253 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
7255 && GET_MODE (x
) == BLKmode
)
7256 /* If X is in the outgoing argument area, it is always safe. */
7258 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
7259 || (GET_CODE (XEXP (x
, 0)) == PLUS
7260 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
7263 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7264 find the underlying pseudo. */
7265 if (GET_CODE (x
) == SUBREG
)
7268 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7272 /* Now look at our tree code and possibly recurse. */
7273 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
7275 case tcc_declaration
:
7276 exp_rtl
= DECL_RTL_IF_SET (exp
);
7282 case tcc_exceptional
:
7283 if (TREE_CODE (exp
) == TREE_LIST
)
7287 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
7289 exp
= TREE_CHAIN (exp
);
7292 if (TREE_CODE (exp
) != TREE_LIST
)
7293 return safe_from_p (x
, exp
, 0);
7296 else if (TREE_CODE (exp
) == CONSTRUCTOR
)
7298 constructor_elt
*ce
;
7299 unsigned HOST_WIDE_INT idx
;
7301 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp
), idx
, ce
)
7302 if ((ce
->index
!= NULL_TREE
&& !safe_from_p (x
, ce
->index
, 0))
7303 || !safe_from_p (x
, ce
->value
, 0))
7307 else if (TREE_CODE (exp
) == ERROR_MARK
)
7308 return 1; /* An already-visited SAVE_EXPR? */
7313 /* The only case we look at here is the DECL_INITIAL inside a
7315 return (TREE_CODE (exp
) != DECL_EXPR
7316 || TREE_CODE (DECL_EXPR_DECL (exp
)) != VAR_DECL
7317 || !DECL_INITIAL (DECL_EXPR_DECL (exp
))
7318 || safe_from_p (x
, DECL_INITIAL (DECL_EXPR_DECL (exp
)), 0));
7321 case tcc_comparison
:
7322 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
7327 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7329 case tcc_expression
:
7332 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7333 the expression. If it is set, we conflict iff we are that rtx or
7334 both are in memory. Otherwise, we check all operands of the
7335 expression recursively. */
7337 switch (TREE_CODE (exp
))
7340 /* If the operand is static or we are static, we can't conflict.
7341 Likewise if we don't conflict with the operand at all. */
7342 if (staticp (TREE_OPERAND (exp
, 0))
7343 || TREE_STATIC (exp
)
7344 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
7347 /* Otherwise, the only way this can conflict is if we are taking
7348 the address of a DECL a that address if part of X, which is
7350 exp
= TREE_OPERAND (exp
, 0);
7353 if (!DECL_RTL_SET_P (exp
)
7354 || !MEM_P (DECL_RTL (exp
)))
7357 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
7363 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
7364 get_alias_set (exp
)))
7369 /* Assume that the call will clobber all hard registers and
7371 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7376 case WITH_CLEANUP_EXPR
:
7377 case CLEANUP_POINT_EXPR
:
7378 /* Lowered by gimplify.c. */
7382 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7388 /* If we have an rtx, we do not need to scan our operands. */
7392 nops
= TREE_OPERAND_LENGTH (exp
);
7393 for (i
= 0; i
< nops
; i
++)
7394 if (TREE_OPERAND (exp
, i
) != 0
7395 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
7401 /* Should never get a type here. */
7405 /* If we have an rtl, find any enclosed object. Then see if we conflict
7409 if (GET_CODE (exp_rtl
) == SUBREG
)
7411 exp_rtl
= SUBREG_REG (exp_rtl
);
7413 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
7417 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7418 are memory and they conflict. */
7419 return ! (rtx_equal_p (x
, exp_rtl
)
7420 || (MEM_P (x
) && MEM_P (exp_rtl
)
7421 && true_dependence (exp_rtl
, VOIDmode
, x
)));
7424 /* If we reach here, it is safe. */
7429 /* Return the highest power of two that EXP is known to be a multiple of.
7430 This is used in updating alignment of MEMs in array references. */
7432 unsigned HOST_WIDE_INT
7433 highest_pow2_factor (const_tree exp
)
7435 unsigned HOST_WIDE_INT ret
;
7436 int trailing_zeros
= tree_ctz (exp
);
7437 if (trailing_zeros
>= HOST_BITS_PER_WIDE_INT
)
7438 return BIGGEST_ALIGNMENT
;
7439 ret
= (unsigned HOST_WIDE_INT
) 1 << trailing_zeros
;
7440 if (ret
> BIGGEST_ALIGNMENT
)
7441 return BIGGEST_ALIGNMENT
;
7445 /* Similar, except that the alignment requirements of TARGET are
7446 taken into account. Assume it is at least as aligned as its
7447 type, unless it is a COMPONENT_REF in which case the layout of
7448 the structure gives the alignment. */
7450 static unsigned HOST_WIDE_INT
7451 highest_pow2_factor_for_target (const_tree target
, const_tree exp
)
7453 unsigned HOST_WIDE_INT talign
= target_align (target
) / BITS_PER_UNIT
;
7454 unsigned HOST_WIDE_INT factor
= highest_pow2_factor (exp
);
7456 return MAX (factor
, talign
);
7459 #ifdef HAVE_conditional_move
7460 /* Convert the tree comparison code TCODE to the rtl one where the
7461 signedness is UNSIGNEDP. */
7463 static enum rtx_code
7464 convert_tree_comp_to_rtx (enum tree_code tcode
, int unsignedp
)
7476 code
= unsignedp
? LTU
: LT
;
7479 code
= unsignedp
? LEU
: LE
;
7482 code
= unsignedp
? GTU
: GT
;
7485 code
= unsignedp
? GEU
: GE
;
7487 case UNORDERED_EXPR
:
7519 /* Subroutine of expand_expr. Expand the two operands of a binary
7520 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7521 The value may be stored in TARGET if TARGET is nonzero. The
7522 MODIFIER argument is as documented by expand_expr. */
7525 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
7526 enum expand_modifier modifier
)
7528 if (! safe_from_p (target
, exp1
, 1))
7530 if (operand_equal_p (exp0
, exp1
, 0))
7532 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7533 *op1
= copy_rtx (*op0
);
7537 /* If we need to preserve evaluation order, copy exp0 into its own
7538 temporary variable so that it can't be clobbered by exp1. */
7539 if (flag_evaluation_order
&& TREE_SIDE_EFFECTS (exp1
))
7540 exp0
= save_expr (exp0
);
7541 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7542 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
7547 /* Return a MEM that contains constant EXP. DEFER is as for
7548 output_constant_def and MODIFIER is as for expand_expr. */
7551 expand_expr_constant (tree exp
, int defer
, enum expand_modifier modifier
)
7555 mem
= output_constant_def (exp
, defer
);
7556 if (modifier
!= EXPAND_INITIALIZER
)
7557 mem
= use_anchored_address (mem
);
7561 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7562 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7565 expand_expr_addr_expr_1 (tree exp
, rtx target
, enum machine_mode tmode
,
7566 enum expand_modifier modifier
, addr_space_t as
)
7568 rtx result
, subtarget
;
7570 HOST_WIDE_INT bitsize
, bitpos
;
7571 int volatilep
, unsignedp
;
7572 enum machine_mode mode1
;
7574 /* If we are taking the address of a constant and are at the top level,
7575 we have to use output_constant_def since we can't call force_const_mem
7577 /* ??? This should be considered a front-end bug. We should not be
7578 generating ADDR_EXPR of something that isn't an LVALUE. The only
7579 exception here is STRING_CST. */
7580 if (CONSTANT_CLASS_P (exp
))
7582 result
= XEXP (expand_expr_constant (exp
, 0, modifier
), 0);
7583 if (modifier
< EXPAND_SUM
)
7584 result
= force_operand (result
, target
);
7588 /* Everything must be something allowed by is_gimple_addressable. */
7589 switch (TREE_CODE (exp
))
7592 /* This case will happen via recursion for &a->b. */
7593 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7597 tree tem
= TREE_OPERAND (exp
, 0);
7598 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
7599 tem
= fold_build_pointer_plus (tem
, TREE_OPERAND (exp
, 1));
7600 return expand_expr (tem
, target
, tmode
, modifier
);
7604 /* Expand the initializer like constants above. */
7605 result
= XEXP (expand_expr_constant (DECL_INITIAL (exp
),
7607 if (modifier
< EXPAND_SUM
)
7608 result
= force_operand (result
, target
);
7612 /* The real part of the complex number is always first, therefore
7613 the address is the same as the address of the parent object. */
7616 inner
= TREE_OPERAND (exp
, 0);
7620 /* The imaginary part of the complex number is always second.
7621 The expression is therefore always offset by the size of the
7624 bitpos
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp
)));
7625 inner
= TREE_OPERAND (exp
, 0);
7628 case COMPOUND_LITERAL_EXPR
:
7629 /* Allow COMPOUND_LITERAL_EXPR in initializers, if e.g.
7630 rtl_for_decl_init is called on DECL_INITIAL with
7631 COMPOUNT_LITERAL_EXPRs in it, they aren't gimplified. */
7632 if (modifier
== EXPAND_INITIALIZER
7633 && COMPOUND_LITERAL_EXPR_DECL (exp
))
7634 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp
),
7635 target
, tmode
, modifier
, as
);
7638 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7639 expand_expr, as that can have various side effects; LABEL_DECLs for
7640 example, may not have their DECL_RTL set yet. Expand the rtl of
7641 CONSTRUCTORs too, which should yield a memory reference for the
7642 constructor's contents. Assume language specific tree nodes can
7643 be expanded in some interesting way. */
7644 gcc_assert (TREE_CODE (exp
) < LAST_AND_UNUSED_TREE_CODE
);
7646 || TREE_CODE (exp
) == CONSTRUCTOR
7647 || TREE_CODE (exp
) == COMPOUND_LITERAL_EXPR
)
7649 result
= expand_expr (exp
, target
, tmode
,
7650 modifier
== EXPAND_INITIALIZER
7651 ? EXPAND_INITIALIZER
: EXPAND_CONST_ADDRESS
);
7653 /* If the DECL isn't in memory, then the DECL wasn't properly
7654 marked TREE_ADDRESSABLE, which will be either a front-end
7655 or a tree optimizer bug. */
7657 if (TREE_ADDRESSABLE (exp
)
7659 && ! targetm
.calls
.allocate_stack_slots_for_args ())
7661 error ("local frame unavailable (naked function?)");
7665 gcc_assert (MEM_P (result
));
7666 result
= XEXP (result
, 0);
7668 /* ??? Is this needed anymore? */
7670 TREE_USED (exp
) = 1;
7672 if (modifier
!= EXPAND_INITIALIZER
7673 && modifier
!= EXPAND_CONST_ADDRESS
7674 && modifier
!= EXPAND_SUM
)
7675 result
= force_operand (result
, target
);
7679 /* Pass FALSE as the last argument to get_inner_reference although
7680 we are expanding to RTL. The rationale is that we know how to
7681 handle "aligning nodes" here: we can just bypass them because
7682 they won't change the final object whose address will be returned
7683 (they actually exist only for that purpose). */
7684 inner
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7685 &mode1
, &unsignedp
, &volatilep
, false);
7689 /* We must have made progress. */
7690 gcc_assert (inner
!= exp
);
7692 subtarget
= offset
|| bitpos
? NULL_RTX
: target
;
7693 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7694 inner alignment, force the inner to be sufficiently aligned. */
7695 if (CONSTANT_CLASS_P (inner
)
7696 && TYPE_ALIGN (TREE_TYPE (inner
)) < TYPE_ALIGN (TREE_TYPE (exp
)))
7698 inner
= copy_node (inner
);
7699 TREE_TYPE (inner
) = copy_node (TREE_TYPE (inner
));
7700 TYPE_ALIGN (TREE_TYPE (inner
)) = TYPE_ALIGN (TREE_TYPE (exp
));
7701 TYPE_USER_ALIGN (TREE_TYPE (inner
)) = 1;
7703 result
= expand_expr_addr_expr_1 (inner
, subtarget
, tmode
, modifier
, as
);
7709 if (modifier
!= EXPAND_NORMAL
)
7710 result
= force_operand (result
, NULL
);
7711 tmp
= expand_expr (offset
, NULL_RTX
, tmode
,
7712 modifier
== EXPAND_INITIALIZER
7713 ? EXPAND_INITIALIZER
: EXPAND_NORMAL
);
7715 /* expand_expr is allowed to return an object in a mode other
7716 than TMODE. If it did, we need to convert. */
7717 if (GET_MODE (tmp
) != VOIDmode
&& tmode
!= GET_MODE (tmp
))
7718 tmp
= convert_modes (tmode
, GET_MODE (tmp
),
7719 tmp
, TYPE_UNSIGNED (TREE_TYPE (offset
)));
7720 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7721 tmp
= convert_memory_address_addr_space (tmode
, tmp
, as
);
7723 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7724 result
= simplify_gen_binary (PLUS
, tmode
, result
, tmp
);
7727 subtarget
= bitpos
? NULL_RTX
: target
;
7728 result
= expand_simple_binop (tmode
, PLUS
, result
, tmp
, subtarget
,
7729 1, OPTAB_LIB_WIDEN
);
7735 /* Someone beforehand should have rejected taking the address
7736 of such an object. */
7737 gcc_assert ((bitpos
% BITS_PER_UNIT
) == 0);
7739 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7740 result
= plus_constant (tmode
, result
, bitpos
/ BITS_PER_UNIT
);
7741 if (modifier
< EXPAND_SUM
)
7742 result
= force_operand (result
, target
);
7748 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7749 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7752 expand_expr_addr_expr (tree exp
, rtx target
, enum machine_mode tmode
,
7753 enum expand_modifier modifier
)
7755 addr_space_t as
= ADDR_SPACE_GENERIC
;
7756 enum machine_mode address_mode
= Pmode
;
7757 enum machine_mode pointer_mode
= ptr_mode
;
7758 enum machine_mode rmode
;
7761 /* Target mode of VOIDmode says "whatever's natural". */
7762 if (tmode
== VOIDmode
)
7763 tmode
= TYPE_MODE (TREE_TYPE (exp
));
7765 if (POINTER_TYPE_P (TREE_TYPE (exp
)))
7767 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
7768 address_mode
= targetm
.addr_space
.address_mode (as
);
7769 pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
7772 /* We can get called with some Weird Things if the user does silliness
7773 like "(short) &a". In that case, convert_memory_address won't do
7774 the right thing, so ignore the given target mode. */
7775 if (tmode
!= address_mode
&& tmode
!= pointer_mode
)
7776 tmode
= address_mode
;
7778 result
= expand_expr_addr_expr_1 (TREE_OPERAND (exp
, 0), target
,
7779 tmode
, modifier
, as
);
7781 /* Despite expand_expr claims concerning ignoring TMODE when not
7782 strictly convenient, stuff breaks if we don't honor it. Note
7783 that combined with the above, we only do this for pointer modes. */
7784 rmode
= GET_MODE (result
);
7785 if (rmode
== VOIDmode
)
7788 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7793 /* Generate code for computing CONSTRUCTOR EXP.
7794 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7795 is TRUE, instead of creating a temporary variable in memory
7796 NULL is returned and the caller needs to handle it differently. */
7799 expand_constructor (tree exp
, rtx target
, enum expand_modifier modifier
,
7800 bool avoid_temp_mem
)
7802 tree type
= TREE_TYPE (exp
);
7803 enum machine_mode mode
= TYPE_MODE (type
);
7805 /* Try to avoid creating a temporary at all. This is possible
7806 if all of the initializer is zero.
7807 FIXME: try to handle all [0..255] initializers we can handle
7809 if (TREE_STATIC (exp
)
7810 && !TREE_ADDRESSABLE (exp
)
7811 && target
!= 0 && mode
== BLKmode
7812 && all_zeros_p (exp
))
7814 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
7818 /* All elts simple constants => refer to a constant in memory. But
7819 if this is a non-BLKmode mode, let it store a field at a time
7820 since that should make a CONST_INT, CONST_WIDE_INT or
7821 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7822 use, it is best to store directly into the target unless the type
7823 is large enough that memcpy will be used. If we are making an
7824 initializer and all operands are constant, put it in memory as
7827 FIXME: Avoid trying to fill vector constructors piece-meal.
7828 Output them with output_constant_def below unless we're sure
7829 they're zeros. This should go away when vector initializers
7830 are treated like VECTOR_CST instead of arrays. */
7831 if ((TREE_STATIC (exp
)
7832 && ((mode
== BLKmode
7833 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
7834 || TREE_ADDRESSABLE (exp
)
7835 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type
))
7836 && (! MOVE_BY_PIECES_P
7837 (tree_to_uhwi (TYPE_SIZE_UNIT (type
)),
7839 && ! mostly_zeros_p (exp
))))
7840 || ((modifier
== EXPAND_INITIALIZER
|| modifier
== EXPAND_CONST_ADDRESS
)
7841 && TREE_CONSTANT (exp
)))
7848 constructor
= expand_expr_constant (exp
, 1, modifier
);
7850 if (modifier
!= EXPAND_CONST_ADDRESS
7851 && modifier
!= EXPAND_INITIALIZER
7852 && modifier
!= EXPAND_SUM
)
7853 constructor
= validize_mem (constructor
);
7858 /* Handle calls that pass values in multiple non-contiguous
7859 locations. The Irix 6 ABI has examples of this. */
7860 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
7861 || GET_CODE (target
) == PARALLEL
|| modifier
== EXPAND_STACK_PARM
)
7866 target
= assign_temp (type
, TREE_ADDRESSABLE (exp
), 1);
7869 store_constructor (exp
, target
, 0, int_expr_size (exp
));
7874 /* expand_expr: generate code for computing expression EXP.
7875 An rtx for the computed value is returned. The value is never null.
7876 In the case of a void EXP, const0_rtx is returned.
7878 The value may be stored in TARGET if TARGET is nonzero.
7879 TARGET is just a suggestion; callers must assume that
7880 the rtx returned may not be the same as TARGET.
7882 If TARGET is CONST0_RTX, it means that the value will be ignored.
7884 If TMODE is not VOIDmode, it suggests generating the
7885 result in mode TMODE. But this is done only when convenient.
7886 Otherwise, TMODE is ignored and the value generated in its natural mode.
7887 TMODE is just a suggestion; callers must assume that
7888 the rtx returned may not have mode TMODE.
7890 Note that TARGET may have neither TMODE nor MODE. In that case, it
7891 probably will not be used.
7893 If MODIFIER is EXPAND_SUM then when EXP is an addition
7894 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7895 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7896 products as above, or REG or MEM, or constant.
7897 Ordinarily in such cases we would output mul or add instructions
7898 and then return a pseudo reg containing the sum.
7900 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7901 it also marks a label as absolutely required (it can't be dead).
7902 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7903 This is used for outputting expressions used in initializers.
7905 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7906 with a constant address even if that address is not normally legitimate.
7907 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7909 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7910 a call parameter. Such targets require special care as we haven't yet
7911 marked TARGET so that it's safe from being trashed by libcalls. We
7912 don't want to use TARGET for anything but the final result;
7913 Intermediate values must go elsewhere. Additionally, calls to
7914 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7916 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7917 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7918 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7919 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7922 If INNER_REFERENCE_P is true, we are expanding an inner reference.
7923 In this case, we don't adjust a returned MEM rtx that wouldn't be
7924 sufficiently aligned for its mode; instead, it's up to the caller
7925 to deal with it afterwards. This is used to make sure that unaligned
7926 base objects for which out-of-bounds accesses are supported, for
7927 example record types with trailing arrays, aren't realigned behind
7928 the back of the caller.
7929 The normal operating mode is to pass FALSE for this parameter. */
7932 expand_expr_real (tree exp
, rtx target
, enum machine_mode tmode
,
7933 enum expand_modifier modifier
, rtx
*alt_rtl
,
7934 bool inner_reference_p
)
7938 /* Handle ERROR_MARK before anybody tries to access its type. */
7939 if (TREE_CODE (exp
) == ERROR_MARK
7940 || (TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
))
7942 ret
= CONST0_RTX (tmode
);
7943 return ret
? ret
: const0_rtx
;
7946 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
,
7951 /* Try to expand the conditional expression which is represented by
7952 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7953 return the rtl reg which repsents the result. Otherwise return
7957 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED
,
7958 tree treeop1 ATTRIBUTE_UNUSED
,
7959 tree treeop2 ATTRIBUTE_UNUSED
)
7961 #ifdef HAVE_conditional_move
7963 rtx op00
, op01
, op1
, op2
;
7964 enum rtx_code comparison_code
;
7965 enum machine_mode comparison_mode
;
7968 tree type
= TREE_TYPE (treeop1
);
7969 int unsignedp
= TYPE_UNSIGNED (type
);
7970 enum machine_mode mode
= TYPE_MODE (type
);
7971 enum machine_mode orig_mode
= mode
;
7973 /* If we cannot do a conditional move on the mode, try doing it
7974 with the promoted mode. */
7975 if (!can_conditionally_move_p (mode
))
7977 mode
= promote_mode (type
, mode
, &unsignedp
);
7978 if (!can_conditionally_move_p (mode
))
7980 temp
= assign_temp (type
, 0, 0); /* Use promoted mode for temp. */
7983 temp
= assign_temp (type
, 0, 1);
7986 expand_operands (treeop1
, treeop2
,
7987 temp
, &op1
, &op2
, EXPAND_NORMAL
);
7989 if (TREE_CODE (treeop0
) == SSA_NAME
7990 && (srcstmt
= get_def_for_expr_class (treeop0
, tcc_comparison
)))
7992 tree type
= TREE_TYPE (gimple_assign_rhs1 (srcstmt
));
7993 enum tree_code cmpcode
= gimple_assign_rhs_code (srcstmt
);
7994 op00
= expand_normal (gimple_assign_rhs1 (srcstmt
));
7995 op01
= expand_normal (gimple_assign_rhs2 (srcstmt
));
7996 comparison_mode
= TYPE_MODE (type
);
7997 unsignedp
= TYPE_UNSIGNED (type
);
7998 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
8000 else if (TREE_CODE_CLASS (TREE_CODE (treeop0
)) == tcc_comparison
)
8002 tree type
= TREE_TYPE (TREE_OPERAND (treeop0
, 0));
8003 enum tree_code cmpcode
= TREE_CODE (treeop0
);
8004 op00
= expand_normal (TREE_OPERAND (treeop0
, 0));
8005 op01
= expand_normal (TREE_OPERAND (treeop0
, 1));
8006 unsignedp
= TYPE_UNSIGNED (type
);
8007 comparison_mode
= TYPE_MODE (type
);
8008 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
8012 op00
= expand_normal (treeop0
);
8014 comparison_code
= NE
;
8015 comparison_mode
= TYPE_MODE (TREE_TYPE (treeop0
));
8018 if (GET_MODE (op1
) != mode
)
8019 op1
= gen_lowpart (mode
, op1
);
8021 if (GET_MODE (op2
) != mode
)
8022 op2
= gen_lowpart (mode
, op2
);
8024 /* Try to emit the conditional move. */
8025 insn
= emit_conditional_move (temp
, comparison_code
,
8026 op00
, op01
, comparison_mode
,
8030 /* If we could do the conditional move, emit the sequence,
8034 rtx_insn
*seq
= get_insns ();
8037 return convert_modes (orig_mode
, mode
, temp
, 0);
8040 /* Otherwise discard the sequence and fall back to code with
8048 expand_expr_real_2 (sepops ops
, rtx target
, enum machine_mode tmode
,
8049 enum expand_modifier modifier
)
8051 rtx op0
, op1
, op2
, temp
;
8054 enum machine_mode mode
;
8055 enum tree_code code
= ops
->code
;
8057 rtx subtarget
, original_target
;
8059 bool reduce_bit_field
;
8060 location_t loc
= ops
->location
;
8061 tree treeop0
, treeop1
, treeop2
;
8062 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8063 ? reduce_to_bit_field_precision ((expr), \
8069 mode
= TYPE_MODE (type
);
8070 unsignedp
= TYPE_UNSIGNED (type
);
8076 /* We should be called only on simple (binary or unary) expressions,
8077 exactly those that are valid in gimple expressions that aren't
8078 GIMPLE_SINGLE_RHS (or invalid). */
8079 gcc_assert (get_gimple_rhs_class (code
) == GIMPLE_UNARY_RHS
8080 || get_gimple_rhs_class (code
) == GIMPLE_BINARY_RHS
8081 || get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
);
8083 ignore
= (target
== const0_rtx
8084 || ((CONVERT_EXPR_CODE_P (code
)
8085 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
8086 && TREE_CODE (type
) == VOID_TYPE
));
8088 /* We should be called only if we need the result. */
8089 gcc_assert (!ignore
);
8091 /* An operation in what may be a bit-field type needs the
8092 result to be reduced to the precision of the bit-field type,
8093 which is narrower than that of the type's mode. */
8094 reduce_bit_field
= (INTEGRAL_TYPE_P (type
)
8095 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
));
8097 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
8100 /* Use subtarget as the target for operand 0 of a binary operation. */
8101 subtarget
= get_subtarget (target
);
8102 original_target
= target
;
8106 case NON_LVALUE_EXPR
:
8109 if (treeop0
== error_mark_node
)
8112 if (TREE_CODE (type
) == UNION_TYPE
)
8114 tree valtype
= TREE_TYPE (treeop0
);
8116 /* If both input and output are BLKmode, this conversion isn't doing
8117 anything except possibly changing memory attribute. */
8118 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
8120 rtx result
= expand_expr (treeop0
, target
, tmode
,
8123 result
= copy_rtx (result
);
8124 set_mem_attributes (result
, type
, 0);
8130 if (TYPE_MODE (type
) != BLKmode
)
8131 target
= gen_reg_rtx (TYPE_MODE (type
));
8133 target
= assign_temp (type
, 1, 1);
8137 /* Store data into beginning of memory target. */
8138 store_expr (treeop0
,
8139 adjust_address (target
, TYPE_MODE (valtype
), 0),
8140 modifier
== EXPAND_STACK_PARM
,
8145 gcc_assert (REG_P (target
));
8147 /* Store this field into a union of the proper type. */
8148 store_field (target
,
8149 MIN ((int_size_in_bytes (TREE_TYPE
8152 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
8153 0, 0, 0, TYPE_MODE (valtype
), treeop0
, 0, false);
8156 /* Return the entire union. */
8160 if (mode
== TYPE_MODE (TREE_TYPE (treeop0
)))
8162 op0
= expand_expr (treeop0
, target
, VOIDmode
,
8165 /* If the signedness of the conversion differs and OP0 is
8166 a promoted SUBREG, clear that indication since we now
8167 have to do the proper extension. */
8168 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)) != unsignedp
8169 && GET_CODE (op0
) == SUBREG
)
8170 SUBREG_PROMOTED_VAR_P (op0
) = 0;
8172 return REDUCE_BIT_FIELD (op0
);
8175 op0
= expand_expr (treeop0
, NULL_RTX
, mode
,
8176 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
);
8177 if (GET_MODE (op0
) == mode
)
8180 /* If OP0 is a constant, just convert it into the proper mode. */
8181 else if (CONSTANT_P (op0
))
8183 tree inner_type
= TREE_TYPE (treeop0
);
8184 enum machine_mode inner_mode
= GET_MODE (op0
);
8186 if (inner_mode
== VOIDmode
)
8187 inner_mode
= TYPE_MODE (inner_type
);
8189 if (modifier
== EXPAND_INITIALIZER
)
8190 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
,
8191 subreg_lowpart_offset (mode
,
8194 op0
= convert_modes (mode
, inner_mode
, op0
,
8195 TYPE_UNSIGNED (inner_type
));
8198 else if (modifier
== EXPAND_INITIALIZER
)
8199 op0
= gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
8201 else if (target
== 0)
8202 op0
= convert_to_mode (mode
, op0
,
8203 TYPE_UNSIGNED (TREE_TYPE
8207 convert_move (target
, op0
,
8208 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8212 return REDUCE_BIT_FIELD (op0
);
8214 case ADDR_SPACE_CONVERT_EXPR
:
8216 tree treeop0_type
= TREE_TYPE (treeop0
);
8218 addr_space_t as_from
;
8220 gcc_assert (POINTER_TYPE_P (type
));
8221 gcc_assert (POINTER_TYPE_P (treeop0_type
));
8223 as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type
));
8224 as_from
= TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type
));
8226 /* Conversions between pointers to the same address space should
8227 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8228 gcc_assert (as_to
!= as_from
);
8230 /* Ask target code to handle conversion between pointers
8231 to overlapping address spaces. */
8232 if (targetm
.addr_space
.subset_p (as_to
, as_from
)
8233 || targetm
.addr_space
.subset_p (as_from
, as_to
))
8235 op0
= expand_expr (treeop0
, NULL_RTX
, VOIDmode
, modifier
);
8236 op0
= targetm
.addr_space
.convert (op0
, treeop0_type
, type
);
8241 /* For disjoint address spaces, converting anything but
8242 a null pointer invokes undefined behaviour. We simply
8243 always return a null pointer here. */
8244 return CONST0_RTX (mode
);
8247 case POINTER_PLUS_EXPR
:
8248 /* Even though the sizetype mode and the pointer's mode can be different
8249 expand is able to handle this correctly and get the correct result out
8250 of the PLUS_EXPR code. */
8251 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8252 if sizetype precision is smaller than pointer precision. */
8253 if (TYPE_PRECISION (sizetype
) < TYPE_PRECISION (type
))
8254 treeop1
= fold_convert_loc (loc
, type
,
8255 fold_convert_loc (loc
, ssizetype
,
8257 /* If sizetype precision is larger than pointer precision, truncate the
8258 offset to have matching modes. */
8259 else if (TYPE_PRECISION (sizetype
) > TYPE_PRECISION (type
))
8260 treeop1
= fold_convert_loc (loc
, type
, treeop1
);
8263 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8264 something else, make sure we add the register to the constant and
8265 then to the other thing. This case can occur during strength
8266 reduction and doing it this way will produce better code if the
8267 frame pointer or argument pointer is eliminated.
8269 fold-const.c will ensure that the constant is always in the inner
8270 PLUS_EXPR, so the only case we need to do anything about is if
8271 sp, ap, or fp is our second argument, in which case we must swap
8272 the innermost first argument and our second argument. */
8274 if (TREE_CODE (treeop0
) == PLUS_EXPR
8275 && TREE_CODE (TREE_OPERAND (treeop0
, 1)) == INTEGER_CST
8276 && TREE_CODE (treeop1
) == VAR_DECL
8277 && (DECL_RTL (treeop1
) == frame_pointer_rtx
8278 || DECL_RTL (treeop1
) == stack_pointer_rtx
8279 || DECL_RTL (treeop1
) == arg_pointer_rtx
))
8284 /* If the result is to be ptr_mode and we are adding an integer to
8285 something, we might be forming a constant. So try to use
8286 plus_constant. If it produces a sum and we can't accept it,
8287 use force_operand. This allows P = &ARR[const] to generate
8288 efficient code on machines where a SYMBOL_REF is not a valid
8291 If this is an EXPAND_SUM call, always return the sum. */
8292 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
8293 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
8295 if (modifier
== EXPAND_STACK_PARM
)
8297 if (TREE_CODE (treeop0
) == INTEGER_CST
8298 && GET_MODE_PRECISION (mode
) <= HOST_BITS_PER_WIDE_INT
8299 && TREE_CONSTANT (treeop1
))
8303 enum machine_mode wmode
= TYPE_MODE (TREE_TYPE (treeop1
));
8305 op1
= expand_expr (treeop1
, subtarget
, VOIDmode
,
8307 /* Use wi::shwi to ensure that the constant is
8308 truncated according to the mode of OP1, then sign extended
8309 to a HOST_WIDE_INT. Using the constant directly can result
8310 in non-canonical RTL in a 64x32 cross compile. */
8311 wc
= TREE_INT_CST_LOW (treeop0
);
8313 immed_wide_int_const (wi::shwi (wc
, wmode
), wmode
);
8314 op1
= plus_constant (mode
, op1
, INTVAL (constant_part
));
8315 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8316 op1
= force_operand (op1
, target
);
8317 return REDUCE_BIT_FIELD (op1
);
8320 else if (TREE_CODE (treeop1
) == INTEGER_CST
8321 && GET_MODE_PRECISION (mode
) <= HOST_BITS_PER_WIDE_INT
8322 && TREE_CONSTANT (treeop0
))
8326 enum machine_mode wmode
= TYPE_MODE (TREE_TYPE (treeop0
));
8328 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8329 (modifier
== EXPAND_INITIALIZER
8330 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
8331 if (! CONSTANT_P (op0
))
8333 op1
= expand_expr (treeop1
, NULL_RTX
,
8334 VOIDmode
, modifier
);
8335 /* Return a PLUS if modifier says it's OK. */
8336 if (modifier
== EXPAND_SUM
8337 || modifier
== EXPAND_INITIALIZER
)
8338 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
8341 /* Use wi::shwi to ensure that the constant is
8342 truncated according to the mode of OP1, then sign extended
8343 to a HOST_WIDE_INT. Using the constant directly can result
8344 in non-canonical RTL in a 64x32 cross compile. */
8345 wc
= TREE_INT_CST_LOW (treeop1
);
8347 = immed_wide_int_const (wi::shwi (wc
, wmode
), wmode
);
8348 op0
= plus_constant (mode
, op0
, INTVAL (constant_part
));
8349 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8350 op0
= force_operand (op0
, target
);
8351 return REDUCE_BIT_FIELD (op0
);
8355 /* Use TER to expand pointer addition of a negated value
8356 as pointer subtraction. */
8357 if ((POINTER_TYPE_P (TREE_TYPE (treeop0
))
8358 || (TREE_CODE (TREE_TYPE (treeop0
)) == VECTOR_TYPE
8359 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0
)))))
8360 && TREE_CODE (treeop1
) == SSA_NAME
8361 && TYPE_MODE (TREE_TYPE (treeop0
))
8362 == TYPE_MODE (TREE_TYPE (treeop1
)))
8364 gimple def
= get_def_for_expr (treeop1
, NEGATE_EXPR
);
8367 treeop1
= gimple_assign_rhs1 (def
);
8373 /* No sense saving up arithmetic to be done
8374 if it's all in the wrong mode to form part of an address.
8375 And force_operand won't know whether to sign-extend or
8377 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8378 || mode
!= ptr_mode
)
8380 expand_operands (treeop0
, treeop1
,
8381 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8382 if (op0
== const0_rtx
)
8384 if (op1
== const0_rtx
)
8389 expand_operands (treeop0
, treeop1
,
8390 subtarget
, &op0
, &op1
, modifier
);
8391 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8395 /* For initializers, we are allowed to return a MINUS of two
8396 symbolic constants. Here we handle all cases when both operands
8398 /* Handle difference of two symbolic constants,
8399 for the sake of an initializer. */
8400 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8401 && really_constant_p (treeop0
)
8402 && really_constant_p (treeop1
))
8404 expand_operands (treeop0
, treeop1
,
8405 NULL_RTX
, &op0
, &op1
, modifier
);
8407 /* If the last operand is a CONST_INT, use plus_constant of
8408 the negated constant. Else make the MINUS. */
8409 if (CONST_INT_P (op1
))
8410 return REDUCE_BIT_FIELD (plus_constant (mode
, op0
,
8413 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode
, op0
, op1
));
8416 /* No sense saving up arithmetic to be done
8417 if it's all in the wrong mode to form part of an address.
8418 And force_operand won't know whether to sign-extend or
8420 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8421 || mode
!= ptr_mode
)
8424 expand_operands (treeop0
, treeop1
,
8425 subtarget
, &op0
, &op1
, modifier
);
8427 /* Convert A - const to A + (-const). */
8428 if (CONST_INT_P (op1
))
8430 op1
= negate_rtx (mode
, op1
);
8431 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8436 case WIDEN_MULT_PLUS_EXPR
:
8437 case WIDEN_MULT_MINUS_EXPR
:
8438 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8439 op2
= expand_normal (treeop2
);
8440 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
8444 case WIDEN_MULT_EXPR
:
8445 /* If first operand is constant, swap them.
8446 Thus the following special case checks need only
8447 check the second operand. */
8448 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8455 /* First, check if we have a multiplication of one signed and one
8456 unsigned operand. */
8457 if (TREE_CODE (treeop1
) != INTEGER_CST
8458 && (TYPE_UNSIGNED (TREE_TYPE (treeop0
))
8459 != TYPE_UNSIGNED (TREE_TYPE (treeop1
))))
8461 enum machine_mode innermode
= TYPE_MODE (TREE_TYPE (treeop0
));
8462 this_optab
= usmul_widen_optab
;
8463 if (find_widening_optab_handler (this_optab
, mode
, innermode
, 0)
8464 != CODE_FOR_nothing
)
8466 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
8467 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8470 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op1
, &op0
,
8472 /* op0 and op1 might still be constant, despite the above
8473 != INTEGER_CST check. Handle it. */
8474 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8476 op0
= convert_modes (innermode
, mode
, op0
, true);
8477 op1
= convert_modes (innermode
, mode
, op1
, false);
8478 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
,
8479 target
, unsignedp
));
8484 /* Check for a multiplication with matching signedness. */
8485 else if ((TREE_CODE (treeop1
) == INTEGER_CST
8486 && int_fits_type_p (treeop1
, TREE_TYPE (treeop0
)))
8487 || (TYPE_UNSIGNED (TREE_TYPE (treeop1
))
8488 == TYPE_UNSIGNED (TREE_TYPE (treeop0
))))
8490 tree op0type
= TREE_TYPE (treeop0
);
8491 enum machine_mode innermode
= TYPE_MODE (op0type
);
8492 bool zextend_p
= TYPE_UNSIGNED (op0type
);
8493 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
8494 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
8496 if (TREE_CODE (treeop0
) != INTEGER_CST
)
8498 if (find_widening_optab_handler (this_optab
, mode
, innermode
, 0)
8499 != CODE_FOR_nothing
)
8501 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8503 /* op0 and op1 might still be constant, despite the above
8504 != INTEGER_CST check. Handle it. */
8505 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8508 op0
= convert_modes (innermode
, mode
, op0
, zextend_p
);
8510 = convert_modes (innermode
, mode
, op1
,
8511 TYPE_UNSIGNED (TREE_TYPE (treeop1
)));
8512 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
,
8516 temp
= expand_widening_mult (mode
, op0
, op1
, target
,
8517 unsignedp
, this_optab
);
8518 return REDUCE_BIT_FIELD (temp
);
8520 if (find_widening_optab_handler (other_optab
, mode
, innermode
, 0)
8522 && innermode
== word_mode
)
8525 op0
= expand_normal (treeop0
);
8526 if (TREE_CODE (treeop1
) == INTEGER_CST
)
8527 op1
= convert_modes (innermode
, mode
,
8528 expand_normal (treeop1
),
8529 TYPE_UNSIGNED (TREE_TYPE (treeop1
)));
8531 op1
= expand_normal (treeop1
);
8532 /* op0 and op1 might still be constant, despite the above
8533 != INTEGER_CST check. Handle it. */
8534 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8535 goto widen_mult_const
;
8536 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8537 unsignedp
, OPTAB_LIB_WIDEN
);
8538 hipart
= gen_highpart (innermode
, temp
);
8539 htem
= expand_mult_highpart_adjust (innermode
, hipart
,
8543 emit_move_insn (hipart
, htem
);
8544 return REDUCE_BIT_FIELD (temp
);
8548 treeop0
= fold_build1 (CONVERT_EXPR
, type
, treeop0
);
8549 treeop1
= fold_build1 (CONVERT_EXPR
, type
, treeop1
);
8550 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8551 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8555 optab opt
= fma_optab
;
8558 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8560 if (optab_handler (fma_optab
, mode
) == CODE_FOR_nothing
)
8562 tree fn
= mathfn_built_in (TREE_TYPE (treeop0
), BUILT_IN_FMA
);
8565 gcc_assert (fn
!= NULL_TREE
);
8566 call_expr
= build_call_expr (fn
, 3, treeop0
, treeop1
, treeop2
);
8567 return expand_builtin (call_expr
, target
, subtarget
, mode
, false);
8570 def0
= get_def_for_expr (treeop0
, NEGATE_EXPR
);
8571 def2
= get_def_for_expr (treeop2
, NEGATE_EXPR
);
8576 && optab_handler (fnms_optab
, mode
) != CODE_FOR_nothing
)
8579 op0
= expand_normal (gimple_assign_rhs1 (def0
));
8580 op2
= expand_normal (gimple_assign_rhs1 (def2
));
8583 && optab_handler (fnma_optab
, mode
) != CODE_FOR_nothing
)
8586 op0
= expand_normal (gimple_assign_rhs1 (def0
));
8589 && optab_handler (fms_optab
, mode
) != CODE_FOR_nothing
)
8592 op2
= expand_normal (gimple_assign_rhs1 (def2
));
8596 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
8598 op2
= expand_normal (treeop2
);
8599 op1
= expand_normal (treeop1
);
8601 return expand_ternary_op (TYPE_MODE (type
), opt
,
8602 op0
, op1
, op2
, target
, 0);
8606 /* If this is a fixed-point operation, then we cannot use the code
8607 below because "expand_mult" doesn't support sat/no-sat fixed-point
8609 if (ALL_FIXED_POINT_MODE_P (mode
))
8612 /* If first operand is constant, swap them.
8613 Thus the following special case checks need only
8614 check the second operand. */
8615 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8622 /* Attempt to return something suitable for generating an
8623 indexed address, for machines that support that. */
8625 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
8626 && tree_fits_shwi_p (treeop1
))
8628 tree exp1
= treeop1
;
8630 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8634 op0
= force_operand (op0
, NULL_RTX
);
8636 op0
= copy_to_mode_reg (mode
, op0
);
8638 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode
, op0
,
8639 gen_int_mode (tree_to_shwi (exp1
),
8640 TYPE_MODE (TREE_TYPE (exp1
)))));
8643 if (modifier
== EXPAND_STACK_PARM
)
8646 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8647 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8649 case TRUNC_DIV_EXPR
:
8650 case FLOOR_DIV_EXPR
:
8652 case ROUND_DIV_EXPR
:
8653 case EXACT_DIV_EXPR
:
8654 /* If this is a fixed-point operation, then we cannot use the code
8655 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8657 if (ALL_FIXED_POINT_MODE_P (mode
))
8660 if (modifier
== EXPAND_STACK_PARM
)
8662 /* Possible optimization: compute the dividend with EXPAND_SUM
8663 then if the divisor is constant can optimize the case
8664 where some terms of the dividend have coeffs divisible by it. */
8665 expand_operands (treeop0
, treeop1
,
8666 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8667 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
8672 case MULT_HIGHPART_EXPR
:
8673 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8674 temp
= expand_mult_highpart (mode
, op0
, op1
, target
, unsignedp
);
8678 case TRUNC_MOD_EXPR
:
8679 case FLOOR_MOD_EXPR
:
8681 case ROUND_MOD_EXPR
:
8682 if (modifier
== EXPAND_STACK_PARM
)
8684 expand_operands (treeop0
, treeop1
,
8685 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8686 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
8688 case FIXED_CONVERT_EXPR
:
8689 op0
= expand_normal (treeop0
);
8690 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8691 target
= gen_reg_rtx (mode
);
8693 if ((TREE_CODE (TREE_TYPE (treeop0
)) == INTEGER_TYPE
8694 && TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
8695 || (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_UNSIGNED (type
)))
8696 expand_fixed_convert (target
, op0
, 1, TYPE_SATURATING (type
));
8698 expand_fixed_convert (target
, op0
, 0, TYPE_SATURATING (type
));
8701 case FIX_TRUNC_EXPR
:
8702 op0
= expand_normal (treeop0
);
8703 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8704 target
= gen_reg_rtx (mode
);
8705 expand_fix (target
, op0
, unsignedp
);
8709 op0
= expand_normal (treeop0
);
8710 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8711 target
= gen_reg_rtx (mode
);
8712 /* expand_float can't figure out what to do if FROM has VOIDmode.
8713 So give it the correct mode. With -O, cse will optimize this. */
8714 if (GET_MODE (op0
) == VOIDmode
)
8715 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0
)),
8717 expand_float (target
, op0
,
8718 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8722 op0
= expand_expr (treeop0
, subtarget
,
8723 VOIDmode
, EXPAND_NORMAL
);
8724 if (modifier
== EXPAND_STACK_PARM
)
8726 temp
= expand_unop (mode
,
8727 optab_for_tree_code (NEGATE_EXPR
, type
,
8731 return REDUCE_BIT_FIELD (temp
);
8734 op0
= expand_expr (treeop0
, subtarget
,
8735 VOIDmode
, EXPAND_NORMAL
);
8736 if (modifier
== EXPAND_STACK_PARM
)
8739 /* ABS_EXPR is not valid for complex arguments. */
8740 gcc_assert (GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
8741 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
);
8743 /* Unsigned abs is simply the operand. Testing here means we don't
8744 risk generating incorrect code below. */
8745 if (TYPE_UNSIGNED (type
))
8748 return expand_abs (mode
, op0
, target
, unsignedp
,
8749 safe_from_p (target
, treeop0
, 1));
8753 target
= original_target
;
8755 || modifier
== EXPAND_STACK_PARM
8756 || (MEM_P (target
) && MEM_VOLATILE_P (target
))
8757 || GET_MODE (target
) != mode
8759 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8760 target
= gen_reg_rtx (mode
);
8761 expand_operands (treeop0
, treeop1
,
8762 target
, &op0
, &op1
, EXPAND_NORMAL
);
8764 /* First try to do it with a special MIN or MAX instruction.
8765 If that does not win, use a conditional jump to select the proper
8767 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8768 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8773 /* At this point, a MEM target is no longer useful; we will get better
8776 if (! REG_P (target
))
8777 target
= gen_reg_rtx (mode
);
8779 /* If op1 was placed in target, swap op0 and op1. */
8780 if (target
!= op0
&& target
== op1
)
8787 /* We generate better code and avoid problems with op1 mentioning
8788 target by forcing op1 into a pseudo if it isn't a constant. */
8789 if (! CONSTANT_P (op1
))
8790 op1
= force_reg (mode
, op1
);
8793 enum rtx_code comparison_code
;
8796 if (code
== MAX_EXPR
)
8797 comparison_code
= unsignedp
? GEU
: GE
;
8799 comparison_code
= unsignedp
? LEU
: LE
;
8801 /* Canonicalize to comparisons against 0. */
8802 if (op1
== const1_rtx
)
8804 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8805 or (a != 0 ? a : 1) for unsigned.
8806 For MIN we are safe converting (a <= 1 ? a : 1)
8807 into (a <= 0 ? a : 1) */
8808 cmpop1
= const0_rtx
;
8809 if (code
== MAX_EXPR
)
8810 comparison_code
= unsignedp
? NE
: GT
;
8812 if (op1
== constm1_rtx
&& !unsignedp
)
8814 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8815 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8816 cmpop1
= const0_rtx
;
8817 if (code
== MIN_EXPR
)
8818 comparison_code
= LT
;
8820 #ifdef HAVE_conditional_move
8821 /* Use a conditional move if possible. */
8822 if (can_conditionally_move_p (mode
))
8828 /* Try to emit the conditional move. */
8829 insn
= emit_conditional_move (target
, comparison_code
,
8834 /* If we could do the conditional move, emit the sequence,
8838 rtx_insn
*seq
= get_insns ();
8844 /* Otherwise discard the sequence and fall back to code with
8850 emit_move_insn (target
, op0
);
8852 temp
= gen_label_rtx ();
8853 do_compare_rtx_and_jump (target
, cmpop1
, comparison_code
,
8854 unsignedp
, mode
, NULL_RTX
, NULL_RTX
, temp
,
8857 emit_move_insn (target
, op1
);
8862 op0
= expand_expr (treeop0
, subtarget
,
8863 VOIDmode
, EXPAND_NORMAL
);
8864 if (modifier
== EXPAND_STACK_PARM
)
8866 /* In case we have to reduce the result to bitfield precision
8867 for unsigned bitfield expand this as XOR with a proper constant
8869 if (reduce_bit_field
&& TYPE_UNSIGNED (type
))
8871 wide_int mask
= wi::mask (TYPE_PRECISION (type
),
8872 false, GET_MODE_PRECISION (mode
));
8874 temp
= expand_binop (mode
, xor_optab
, op0
,
8875 immed_wide_int_const (mask
, mode
),
8876 target
, 1, OPTAB_LIB_WIDEN
);
8879 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8883 /* ??? Can optimize bitwise operations with one arg constant.
8884 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8885 and (a bitwise1 b) bitwise2 b (etc)
8886 but that is probably not worth while. */
8895 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type
))
8896 || (GET_MODE_PRECISION (TYPE_MODE (type
))
8897 == TYPE_PRECISION (type
)));
8902 /* If this is a fixed-point operation, then we cannot use the code
8903 below because "expand_shift" doesn't support sat/no-sat fixed-point
8905 if (ALL_FIXED_POINT_MODE_P (mode
))
8908 if (! safe_from_p (subtarget
, treeop1
, 1))
8910 if (modifier
== EXPAND_STACK_PARM
)
8912 op0
= expand_expr (treeop0
, subtarget
,
8913 VOIDmode
, EXPAND_NORMAL
);
8914 temp
= expand_variable_shift (code
, mode
, op0
, treeop1
, target
,
8916 if (code
== LSHIFT_EXPR
)
8917 temp
= REDUCE_BIT_FIELD (temp
);
8920 /* Could determine the answer when only additive constants differ. Also,
8921 the addition of one can be handled by changing the condition. */
8928 case UNORDERED_EXPR
:
8936 temp
= do_store_flag (ops
,
8937 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
8938 tmode
!= VOIDmode
? tmode
: mode
);
8942 /* Use a compare and a jump for BLKmode comparisons, or for function
8943 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8946 || modifier
== EXPAND_STACK_PARM
8947 || ! safe_from_p (target
, treeop0
, 1)
8948 || ! safe_from_p (target
, treeop1
, 1)
8949 /* Make sure we don't have a hard reg (such as function's return
8950 value) live across basic blocks, if not optimizing. */
8951 || (!optimize
&& REG_P (target
)
8952 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8953 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8955 emit_move_insn (target
, const0_rtx
);
8957 op1
= gen_label_rtx ();
8958 jumpifnot_1 (code
, treeop0
, treeop1
, op1
, -1);
8960 if (TYPE_PRECISION (type
) == 1 && !TYPE_UNSIGNED (type
))
8961 emit_move_insn (target
, constm1_rtx
);
8963 emit_move_insn (target
, const1_rtx
);
8969 /* Get the rtx code of the operands. */
8970 op0
= expand_normal (treeop0
);
8971 op1
= expand_normal (treeop1
);
8974 target
= gen_reg_rtx (TYPE_MODE (type
));
8976 /* If target overlaps with op1, then either we need to force
8977 op1 into a pseudo (if target also overlaps with op0),
8978 or write the complex parts in reverse order. */
8979 switch (GET_CODE (target
))
8982 if (reg_overlap_mentioned_p (XEXP (target
, 0), op1
))
8984 if (reg_overlap_mentioned_p (XEXP (target
, 1), op0
))
8986 complex_expr_force_op1
:
8987 temp
= gen_reg_rtx (GET_MODE_INNER (GET_MODE (target
)));
8988 emit_move_insn (temp
, op1
);
8992 complex_expr_swap_order
:
8993 /* Move the imaginary (op1) and real (op0) parts to their
8995 write_complex_part (target
, op1
, true);
8996 write_complex_part (target
, op0
, false);
9002 temp
= adjust_address_nv (target
,
9003 GET_MODE_INNER (GET_MODE (target
)), 0);
9004 if (reg_overlap_mentioned_p (temp
, op1
))
9006 enum machine_mode imode
= GET_MODE_INNER (GET_MODE (target
));
9007 temp
= adjust_address_nv (target
, imode
,
9008 GET_MODE_SIZE (imode
));
9009 if (reg_overlap_mentioned_p (temp
, op0
))
9010 goto complex_expr_force_op1
;
9011 goto complex_expr_swap_order
;
9015 if (reg_overlap_mentioned_p (target
, op1
))
9017 if (reg_overlap_mentioned_p (target
, op0
))
9018 goto complex_expr_force_op1
;
9019 goto complex_expr_swap_order
;
9024 /* Move the real (op0) and imaginary (op1) parts to their location. */
9025 write_complex_part (target
, op0
, false);
9026 write_complex_part (target
, op1
, true);
9030 case WIDEN_SUM_EXPR
:
9032 tree oprnd0
= treeop0
;
9033 tree oprnd1
= treeop1
;
9035 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9036 target
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, op1
,
9041 case REDUC_MAX_EXPR
:
9042 case REDUC_MIN_EXPR
:
9043 case REDUC_PLUS_EXPR
:
9045 op0
= expand_normal (treeop0
);
9046 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9047 temp
= expand_unop (mode
, this_optab
, op0
, target
, unsignedp
);
9052 case VEC_LSHIFT_EXPR
:
9053 case VEC_RSHIFT_EXPR
:
9055 target
= expand_vec_shift_expr (ops
, target
);
9059 case VEC_UNPACK_HI_EXPR
:
9060 case VEC_UNPACK_LO_EXPR
:
9062 op0
= expand_normal (treeop0
);
9063 temp
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, NULL_RTX
,
9069 case VEC_UNPACK_FLOAT_HI_EXPR
:
9070 case VEC_UNPACK_FLOAT_LO_EXPR
:
9072 op0
= expand_normal (treeop0
);
9073 /* The signedness is determined from input operand. */
9074 temp
= expand_widen_pattern_expr
9075 (ops
, op0
, NULL_RTX
, NULL_RTX
,
9076 target
, TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
9082 case VEC_WIDEN_MULT_HI_EXPR
:
9083 case VEC_WIDEN_MULT_LO_EXPR
:
9084 case VEC_WIDEN_MULT_EVEN_EXPR
:
9085 case VEC_WIDEN_MULT_ODD_EXPR
:
9086 case VEC_WIDEN_LSHIFT_HI_EXPR
:
9087 case VEC_WIDEN_LSHIFT_LO_EXPR
:
9088 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9089 target
= expand_widen_pattern_expr (ops
, op0
, op1
, NULL_RTX
,
9091 gcc_assert (target
);
9094 case VEC_PACK_TRUNC_EXPR
:
9095 case VEC_PACK_SAT_EXPR
:
9096 case VEC_PACK_FIX_TRUNC_EXPR
:
9097 mode
= TYPE_MODE (TREE_TYPE (treeop0
));
9101 expand_operands (treeop0
, treeop1
, target
, &op0
, &op1
, EXPAND_NORMAL
);
9102 op2
= expand_normal (treeop2
);
9104 /* Careful here: if the target doesn't support integral vector modes,
9105 a constant selection vector could wind up smooshed into a normal
9106 integral constant. */
9107 if (CONSTANT_P (op2
) && GET_CODE (op2
) != CONST_VECTOR
)
9109 tree sel_type
= TREE_TYPE (treeop2
);
9110 enum machine_mode vmode
9111 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type
)),
9112 TYPE_VECTOR_SUBPARTS (sel_type
));
9113 gcc_assert (GET_MODE_CLASS (vmode
) == MODE_VECTOR_INT
);
9114 op2
= simplify_subreg (vmode
, op2
, TYPE_MODE (sel_type
), 0);
9115 gcc_assert (op2
&& GET_CODE (op2
) == CONST_VECTOR
);
9118 gcc_assert (GET_MODE_CLASS (GET_MODE (op2
)) == MODE_VECTOR_INT
);
9120 temp
= expand_vec_perm (mode
, op0
, op1
, op2
, target
);
9126 tree oprnd0
= treeop0
;
9127 tree oprnd1
= treeop1
;
9128 tree oprnd2
= treeop2
;
9131 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9132 op2
= expand_normal (oprnd2
);
9133 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
9140 tree oprnd0
= treeop0
;
9141 tree oprnd1
= treeop1
;
9142 tree oprnd2
= treeop2
;
9145 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9146 op2
= expand_normal (oprnd2
);
9147 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
9152 case REALIGN_LOAD_EXPR
:
9154 tree oprnd0
= treeop0
;
9155 tree oprnd1
= treeop1
;
9156 tree oprnd2
= treeop2
;
9159 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9160 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9161 op2
= expand_normal (oprnd2
);
9162 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
9169 /* A COND_EXPR with its type being VOID_TYPE represents a
9170 conditional jump and is handled in
9171 expand_gimple_cond_expr. */
9172 gcc_assert (!VOID_TYPE_P (type
));
9174 /* Note that COND_EXPRs whose type is a structure or union
9175 are required to be constructed to contain assignments of
9176 a temporary variable, so that we can evaluate them here
9177 for side effect only. If type is void, we must do likewise. */
9179 gcc_assert (!TREE_ADDRESSABLE (type
)
9181 && TREE_TYPE (treeop1
) != void_type_node
9182 && TREE_TYPE (treeop2
) != void_type_node
);
9184 temp
= expand_cond_expr_using_cmove (treeop0
, treeop1
, treeop2
);
9188 /* If we are not to produce a result, we have no target. Otherwise,
9189 if a target was specified use it; it will not be used as an
9190 intermediate target unless it is safe. If no target, use a
9193 if (modifier
!= EXPAND_STACK_PARM
9195 && safe_from_p (original_target
, treeop0
, 1)
9196 && GET_MODE (original_target
) == mode
9197 && !MEM_P (original_target
))
9198 temp
= original_target
;
9200 temp
= assign_temp (type
, 0, 1);
9202 do_pending_stack_adjust ();
9204 op0
= gen_label_rtx ();
9205 op1
= gen_label_rtx ();
9206 jumpifnot (treeop0
, op0
, -1);
9207 store_expr (treeop1
, temp
,
9208 modifier
== EXPAND_STACK_PARM
,
9211 emit_jump_insn (gen_jump (op1
));
9214 store_expr (treeop2
, temp
,
9215 modifier
== EXPAND_STACK_PARM
,
9223 target
= expand_vec_cond_expr (type
, treeop0
, treeop1
, treeop2
, target
);
9230 /* Here to do an ordinary binary operator. */
9232 expand_operands (treeop0
, treeop1
,
9233 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
9235 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9237 if (modifier
== EXPAND_STACK_PARM
)
9239 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9240 unsignedp
, OPTAB_LIB_WIDEN
);
9242 /* Bitwise operations do not need bitfield reduction as we expect their
9243 operands being properly truncated. */
9244 if (code
== BIT_XOR_EXPR
9245 || code
== BIT_AND_EXPR
9246 || code
== BIT_IOR_EXPR
)
9248 return REDUCE_BIT_FIELD (temp
);
9250 #undef REDUCE_BIT_FIELD
9253 /* Return TRUE if expression STMT is suitable for replacement.
9254 Never consider memory loads as replaceable, because those don't ever lead
9255 into constant expressions. */
9258 stmt_is_replaceable_p (gimple stmt
)
9260 if (ssa_is_replaceable_p (stmt
))
9262 /* Don't move around loads. */
9263 if (!gimple_assign_single_p (stmt
)
9264 || is_gimple_val (gimple_assign_rhs1 (stmt
)))
9271 expand_expr_real_1 (tree exp
, rtx target
, enum machine_mode tmode
,
9272 enum expand_modifier modifier
, rtx
*alt_rtl
,
9273 bool inner_reference_p
)
9275 rtx op0
, op1
, temp
, decl_rtl
;
9278 enum machine_mode mode
;
9279 enum tree_code code
= TREE_CODE (exp
);
9280 rtx subtarget
, original_target
;
9283 bool reduce_bit_field
;
9284 location_t loc
= EXPR_LOCATION (exp
);
9285 struct separate_ops ops
;
9286 tree treeop0
, treeop1
, treeop2
;
9287 tree ssa_name
= NULL_TREE
;
9290 type
= TREE_TYPE (exp
);
9291 mode
= TYPE_MODE (type
);
9292 unsignedp
= TYPE_UNSIGNED (type
);
9294 treeop0
= treeop1
= treeop2
= NULL_TREE
;
9295 if (!VL_EXP_CLASS_P (exp
))
9296 switch (TREE_CODE_LENGTH (code
))
9299 case 3: treeop2
= TREE_OPERAND (exp
, 2);
9300 case 2: treeop1
= TREE_OPERAND (exp
, 1);
9301 case 1: treeop0
= TREE_OPERAND (exp
, 0);
9311 ignore
= (target
== const0_rtx
9312 || ((CONVERT_EXPR_CODE_P (code
)
9313 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
9314 && TREE_CODE (type
) == VOID_TYPE
));
9316 /* An operation in what may be a bit-field type needs the
9317 result to be reduced to the precision of the bit-field type,
9318 which is narrower than that of the type's mode. */
9319 reduce_bit_field
= (!ignore
9320 && INTEGRAL_TYPE_P (type
)
9321 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
));
9323 /* If we are going to ignore this result, we need only do something
9324 if there is a side-effect somewhere in the expression. If there
9325 is, short-circuit the most common cases here. Note that we must
9326 not call expand_expr with anything but const0_rtx in case this
9327 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9331 if (! TREE_SIDE_EFFECTS (exp
))
9334 /* Ensure we reference a volatile object even if value is ignored, but
9335 don't do this if all we are doing is taking its address. */
9336 if (TREE_THIS_VOLATILE (exp
)
9337 && TREE_CODE (exp
) != FUNCTION_DECL
9338 && mode
!= VOIDmode
&& mode
!= BLKmode
9339 && modifier
!= EXPAND_CONST_ADDRESS
)
9341 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
9347 if (TREE_CODE_CLASS (code
) == tcc_unary
9348 || code
== BIT_FIELD_REF
9349 || code
== COMPONENT_REF
9350 || code
== INDIRECT_REF
)
9351 return expand_expr (treeop0
, const0_rtx
, VOIDmode
,
9354 else if (TREE_CODE_CLASS (code
) == tcc_binary
9355 || TREE_CODE_CLASS (code
) == tcc_comparison
9356 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
9358 expand_expr (treeop0
, const0_rtx
, VOIDmode
, modifier
);
9359 expand_expr (treeop1
, const0_rtx
, VOIDmode
, modifier
);
9366 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
9369 /* Use subtarget as the target for operand 0 of a binary operation. */
9370 subtarget
= get_subtarget (target
);
9371 original_target
= target
;
9377 tree function
= decl_function_context (exp
);
9379 temp
= label_rtx (exp
);
9380 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
9382 if (function
!= current_function_decl
9384 LABEL_REF_NONLOCAL_P (temp
) = 1;
9386 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
9391 /* ??? ivopts calls expander, without any preparation from
9392 out-of-ssa. So fake instructions as if this was an access to the
9393 base variable. This unnecessarily allocates a pseudo, see how we can
9394 reuse it, if partition base vars have it set already. */
9395 if (!currently_expanding_to_rtl
)
9397 tree var
= SSA_NAME_VAR (exp
);
9398 if (var
&& DECL_RTL_SET_P (var
))
9399 return DECL_RTL (var
);
9400 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp
)),
9401 LAST_VIRTUAL_REGISTER
+ 1);
9404 g
= get_gimple_for_ssa_name (exp
);
9405 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9407 && modifier
== EXPAND_INITIALIZER
9408 && !SSA_NAME_IS_DEFAULT_DEF (exp
)
9409 && (optimize
|| DECL_IGNORED_P (SSA_NAME_VAR (exp
)))
9410 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp
)))
9411 g
= SSA_NAME_DEF_STMT (exp
);
9415 ops
.code
= gimple_assign_rhs_code (g
);
9416 switch (get_gimple_rhs_class (ops
.code
))
9418 case GIMPLE_TERNARY_RHS
:
9419 ops
.op2
= gimple_assign_rhs3 (g
);
9421 case GIMPLE_BINARY_RHS
:
9422 ops
.op1
= gimple_assign_rhs2 (g
);
9424 case GIMPLE_UNARY_RHS
:
9425 ops
.op0
= gimple_assign_rhs1 (g
);
9426 ops
.type
= TREE_TYPE (gimple_assign_lhs (g
));
9427 ops
.location
= gimple_location (g
);
9428 r
= expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
9430 case GIMPLE_SINGLE_RHS
:
9432 location_t saved_loc
= curr_insn_location ();
9433 set_curr_insn_location (gimple_location (g
));
9434 r
= expand_expr_real (gimple_assign_rhs1 (g
), target
,
9435 tmode
, modifier
, NULL
, inner_reference_p
);
9436 set_curr_insn_location (saved_loc
);
9442 if (REG_P (r
) && !REG_EXPR (r
))
9443 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp
), r
);
9448 decl_rtl
= get_rtx_for_ssa_name (ssa_name
);
9449 exp
= SSA_NAME_VAR (ssa_name
);
9450 goto expand_decl_rtl
;
9454 /* If a static var's type was incomplete when the decl was written,
9455 but the type is complete now, lay out the decl now. */
9456 if (DECL_SIZE (exp
) == 0
9457 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
9458 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
9459 layout_decl (exp
, 0);
9461 /* ... fall through ... */
9465 decl_rtl
= DECL_RTL (exp
);
9467 gcc_assert (decl_rtl
);
9468 decl_rtl
= copy_rtx (decl_rtl
);
9469 /* Record writes to register variables. */
9470 if (modifier
== EXPAND_WRITE
9472 && HARD_REGISTER_P (decl_rtl
))
9473 add_to_hard_reg_set (&crtl
->asm_clobbers
,
9474 GET_MODE (decl_rtl
), REGNO (decl_rtl
));
9476 /* Ensure variable marked as used even if it doesn't go through
9477 a parser. If it hasn't be used yet, write out an external
9479 TREE_USED (exp
) = 1;
9481 /* Show we haven't gotten RTL for this yet. */
9484 /* Variables inherited from containing functions should have
9485 been lowered by this point. */
9486 context
= decl_function_context (exp
);
9487 gcc_assert (SCOPE_FILE_SCOPE_P (context
)
9488 || context
== current_function_decl
9489 || TREE_STATIC (exp
)
9490 || DECL_EXTERNAL (exp
)
9491 /* ??? C++ creates functions that are not TREE_STATIC. */
9492 || TREE_CODE (exp
) == FUNCTION_DECL
);
9494 /* This is the case of an array whose size is to be determined
9495 from its initializer, while the initializer is still being parsed.
9496 ??? We aren't parsing while expanding anymore. */
9498 if (MEM_P (decl_rtl
) && REG_P (XEXP (decl_rtl
, 0)))
9499 temp
= validize_mem (decl_rtl
);
9501 /* If DECL_RTL is memory, we are in the normal case and the
9502 address is not valid, get the address into a register. */
9504 else if (MEM_P (decl_rtl
) && modifier
!= EXPAND_INITIALIZER
)
9507 *alt_rtl
= decl_rtl
;
9508 decl_rtl
= use_anchored_address (decl_rtl
);
9509 if (modifier
!= EXPAND_CONST_ADDRESS
9510 && modifier
!= EXPAND_SUM
9511 && !memory_address_addr_space_p (DECL_MODE (exp
),
9513 MEM_ADDR_SPACE (decl_rtl
)))
9514 temp
= replace_equiv_address (decl_rtl
,
9515 copy_rtx (XEXP (decl_rtl
, 0)));
9518 /* If we got something, return it. But first, set the alignment
9519 if the address is a register. */
9522 if (MEM_P (temp
) && REG_P (XEXP (temp
, 0)))
9523 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
9528 /* If the mode of DECL_RTL does not match that of the decl,
9529 there are two cases: we are dealing with a BLKmode value
9530 that is returned in a register, or we are dealing with
9531 a promoted value. In the latter case, return a SUBREG
9532 of the wanted mode, but mark it so that we know that it
9533 was already extended. */
9534 if (REG_P (decl_rtl
)
9535 && DECL_MODE (exp
) != BLKmode
9536 && GET_MODE (decl_rtl
) != DECL_MODE (exp
))
9538 enum machine_mode pmode
;
9540 /* Get the signedness to be used for this variable. Ensure we get
9541 the same mode we got when the variable was declared. */
9542 if (code
== SSA_NAME
9543 && (g
= SSA_NAME_DEF_STMT (ssa_name
))
9544 && gimple_code (g
) == GIMPLE_CALL
9545 && !gimple_call_internal_p (g
))
9546 pmode
= promote_function_mode (type
, mode
, &unsignedp
,
9547 gimple_call_fntype (g
),
9550 pmode
= promote_decl_mode (exp
, &unsignedp
);
9551 gcc_assert (GET_MODE (decl_rtl
) == pmode
);
9553 temp
= gen_lowpart_SUBREG (mode
, decl_rtl
);
9554 SUBREG_PROMOTED_VAR_P (temp
) = 1;
9555 SUBREG_PROMOTED_SET (temp
, unsignedp
);
9562 /* Given that TYPE_PRECISION (type) is not always equal to
9563 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9564 the former to the latter according to the signedness of the
9566 temp
= immed_wide_int_const (wide_int::from
9568 GET_MODE_PRECISION (TYPE_MODE (type
)),
9575 tree tmp
= NULL_TREE
;
9576 if (GET_MODE_CLASS (mode
) == MODE_VECTOR_INT
9577 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FLOAT
9578 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FRACT
9579 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UFRACT
9580 || GET_MODE_CLASS (mode
) == MODE_VECTOR_ACCUM
9581 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UACCUM
)
9582 return const_vector_from_tree (exp
);
9583 if (GET_MODE_CLASS (mode
) == MODE_INT
)
9585 tree type_for_mode
= lang_hooks
.types
.type_for_mode (mode
, 1);
9587 tmp
= fold_unary_loc (loc
, VIEW_CONVERT_EXPR
, type_for_mode
, exp
);
9591 vec
<constructor_elt
, va_gc
> *v
;
9593 vec_alloc (v
, VECTOR_CST_NELTS (exp
));
9594 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
9595 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, VECTOR_CST_ELT (exp
, i
));
9596 tmp
= build_constructor (type
, v
);
9598 return expand_expr (tmp
, ignore
? const0_rtx
: target
,
9603 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
9606 /* If optimized, generate immediate CONST_DOUBLE
9607 which will be turned into memory by reload if necessary.
9609 We used to force a register so that loop.c could see it. But
9610 this does not allow gen_* patterns to perform optimizations with
9611 the constants. It also produces two insns in cases like "x = 1.0;".
9612 On most machines, floating-point constants are not permitted in
9613 many insns, so we'd end up copying it to a register in any case.
9615 Now, we do the copying in expand_binop, if appropriate. */
9616 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
9617 TYPE_MODE (TREE_TYPE (exp
)));
9620 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp
),
9621 TYPE_MODE (TREE_TYPE (exp
)));
9624 /* Handle evaluating a complex constant in a CONCAT target. */
9625 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
9627 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
9630 rtarg
= XEXP (original_target
, 0);
9631 itarg
= XEXP (original_target
, 1);
9633 /* Move the real and imaginary parts separately. */
9634 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, EXPAND_NORMAL
);
9635 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, EXPAND_NORMAL
);
9638 emit_move_insn (rtarg
, op0
);
9640 emit_move_insn (itarg
, op1
);
9642 return original_target
;
9645 /* ... fall through ... */
9648 temp
= expand_expr_constant (exp
, 1, modifier
);
9650 /* temp contains a constant address.
9651 On RISC machines where a constant address isn't valid,
9652 make some insns to get that address into a register. */
9653 if (modifier
!= EXPAND_CONST_ADDRESS
9654 && modifier
!= EXPAND_INITIALIZER
9655 && modifier
!= EXPAND_SUM
9656 && ! memory_address_addr_space_p (mode
, XEXP (temp
, 0),
9657 MEM_ADDR_SPACE (temp
)))
9658 return replace_equiv_address (temp
,
9659 copy_rtx (XEXP (temp
, 0)));
9665 rtx ret
= expand_expr_real_1 (val
, target
, tmode
, modifier
, alt_rtl
,
9668 if (!SAVE_EXPR_RESOLVED_P (exp
))
9670 /* We can indeed still hit this case, typically via builtin
9671 expanders calling save_expr immediately before expanding
9672 something. Assume this means that we only have to deal
9673 with non-BLKmode values. */
9674 gcc_assert (GET_MODE (ret
) != BLKmode
);
9676 val
= build_decl (curr_insn_location (),
9677 VAR_DECL
, NULL
, TREE_TYPE (exp
));
9678 DECL_ARTIFICIAL (val
) = 1;
9679 DECL_IGNORED_P (val
) = 1;
9681 TREE_OPERAND (exp
, 0) = treeop0
;
9682 SAVE_EXPR_RESOLVED_P (exp
) = 1;
9684 if (!CONSTANT_P (ret
))
9685 ret
= copy_to_reg (ret
);
9686 SET_DECL_RTL (val
, ret
);
9694 /* If we don't need the result, just ensure we evaluate any
9698 unsigned HOST_WIDE_INT idx
;
9701 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
9702 expand_expr (value
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
9707 return expand_constructor (exp
, target
, modifier
, false);
9709 case TARGET_MEM_REF
:
9712 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
9713 enum insn_code icode
;
9716 op0
= addr_for_mem_ref (exp
, as
, true);
9717 op0
= memory_address_addr_space (mode
, op0
, as
);
9718 temp
= gen_rtx_MEM (mode
, op0
);
9719 set_mem_attributes (temp
, exp
, 0);
9720 set_mem_addr_space (temp
, as
);
9721 align
= get_object_alignment (exp
);
9722 if (modifier
!= EXPAND_WRITE
9723 && modifier
!= EXPAND_MEMORY
9725 && align
< GET_MODE_ALIGNMENT (mode
)
9726 /* If the target does not have special handling for unaligned
9727 loads of mode then it can use regular moves for them. */
9728 && ((icode
= optab_handler (movmisalign_optab
, mode
))
9729 != CODE_FOR_nothing
))
9731 struct expand_operand ops
[2];
9733 /* We've already validated the memory, and we're creating a
9734 new pseudo destination. The predicates really can't fail,
9735 nor can the generator. */
9736 create_output_operand (&ops
[0], NULL_RTX
, mode
);
9737 create_fixed_operand (&ops
[1], temp
);
9738 expand_insn (icode
, 2, ops
);
9739 temp
= ops
[0].value
;
9747 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
9748 enum machine_mode address_mode
;
9749 tree base
= TREE_OPERAND (exp
, 0);
9751 enum insn_code icode
;
9753 /* Handle expansion of non-aliased memory with non-BLKmode. That
9754 might end up in a register. */
9755 if (mem_ref_refers_to_non_mem_p (exp
))
9757 HOST_WIDE_INT offset
= mem_ref_offset (exp
).to_short_addr ();
9758 base
= TREE_OPERAND (base
, 0);
9760 && tree_fits_uhwi_p (TYPE_SIZE (type
))
9761 && (GET_MODE_BITSIZE (DECL_MODE (base
))
9762 == tree_to_uhwi (TYPE_SIZE (type
))))
9763 return expand_expr (build1 (VIEW_CONVERT_EXPR
, type
, base
),
9764 target
, tmode
, modifier
);
9765 if (TYPE_MODE (type
) == BLKmode
)
9767 temp
= assign_stack_temp (DECL_MODE (base
),
9768 GET_MODE_SIZE (DECL_MODE (base
)));
9769 store_expr (base
, temp
, 0, false);
9770 temp
= adjust_address (temp
, BLKmode
, offset
);
9771 set_mem_size (temp
, int_size_in_bytes (type
));
9774 exp
= build3 (BIT_FIELD_REF
, type
, base
, TYPE_SIZE (type
),
9775 bitsize_int (offset
* BITS_PER_UNIT
));
9776 return expand_expr (exp
, target
, tmode
, modifier
);
9778 address_mode
= targetm
.addr_space
.address_mode (as
);
9779 base
= TREE_OPERAND (exp
, 0);
9780 if ((def_stmt
= get_def_for_expr (base
, BIT_AND_EXPR
)))
9782 tree mask
= gimple_assign_rhs2 (def_stmt
);
9783 base
= build2 (BIT_AND_EXPR
, TREE_TYPE (base
),
9784 gimple_assign_rhs1 (def_stmt
), mask
);
9785 TREE_OPERAND (exp
, 0) = base
;
9787 align
= get_object_alignment (exp
);
9788 op0
= expand_expr (base
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
9789 op0
= memory_address_addr_space (mode
, op0
, as
);
9790 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
9792 rtx off
= immed_wide_int_const (mem_ref_offset (exp
), address_mode
);
9793 op0
= simplify_gen_binary (PLUS
, address_mode
, op0
, off
);
9794 op0
= memory_address_addr_space (mode
, op0
, as
);
9796 temp
= gen_rtx_MEM (mode
, op0
);
9797 set_mem_attributes (temp
, exp
, 0);
9798 set_mem_addr_space (temp
, as
);
9799 if (TREE_THIS_VOLATILE (exp
))
9800 MEM_VOLATILE_P (temp
) = 1;
9801 if (modifier
!= EXPAND_WRITE
9802 && modifier
!= EXPAND_MEMORY
9803 && !inner_reference_p
9805 && align
< GET_MODE_ALIGNMENT (mode
))
9807 if ((icode
= optab_handler (movmisalign_optab
, mode
))
9808 != CODE_FOR_nothing
)
9810 struct expand_operand ops
[2];
9812 /* We've already validated the memory, and we're creating a
9813 new pseudo destination. The predicates really can't fail,
9814 nor can the generator. */
9815 create_output_operand (&ops
[0], NULL_RTX
, mode
);
9816 create_fixed_operand (&ops
[1], temp
);
9817 expand_insn (icode
, 2, ops
);
9818 temp
= ops
[0].value
;
9820 else if (SLOW_UNALIGNED_ACCESS (mode
, align
))
9821 temp
= extract_bit_field (temp
, GET_MODE_BITSIZE (mode
),
9822 0, TYPE_UNSIGNED (TREE_TYPE (exp
)),
9823 (modifier
== EXPAND_STACK_PARM
9824 ? NULL_RTX
: target
),
9833 tree array
= treeop0
;
9834 tree index
= treeop1
;
9837 /* Fold an expression like: "foo"[2].
9838 This is not done in fold so it won't happen inside &.
9839 Don't fold if this is for wide characters since it's too
9840 difficult to do correctly and this is a very rare case. */
9842 if (modifier
!= EXPAND_CONST_ADDRESS
9843 && modifier
!= EXPAND_INITIALIZER
9844 && modifier
!= EXPAND_MEMORY
)
9846 tree t
= fold_read_from_constant_string (exp
);
9849 return expand_expr (t
, target
, tmode
, modifier
);
9852 /* If this is a constant index into a constant array,
9853 just get the value from the array. Handle both the cases when
9854 we have an explicit constructor and when our operand is a variable
9855 that was declared const. */
9857 if (modifier
!= EXPAND_CONST_ADDRESS
9858 && modifier
!= EXPAND_INITIALIZER
9859 && modifier
!= EXPAND_MEMORY
9860 && TREE_CODE (array
) == CONSTRUCTOR
9861 && ! TREE_SIDE_EFFECTS (array
)
9862 && TREE_CODE (index
) == INTEGER_CST
)
9864 unsigned HOST_WIDE_INT ix
;
9867 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array
), ix
,
9869 if (tree_int_cst_equal (field
, index
))
9871 if (!TREE_SIDE_EFFECTS (value
))
9872 return expand_expr (fold (value
), target
, tmode
, modifier
);
9877 else if (optimize
>= 1
9878 && modifier
!= EXPAND_CONST_ADDRESS
9879 && modifier
!= EXPAND_INITIALIZER
9880 && modifier
!= EXPAND_MEMORY
9881 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
9882 && TREE_CODE (index
) == INTEGER_CST
9883 && (TREE_CODE (array
) == VAR_DECL
9884 || TREE_CODE (array
) == CONST_DECL
)
9885 && (init
= ctor_for_folding (array
)) != error_mark_node
)
9887 if (init
== NULL_TREE
)
9889 tree value
= build_zero_cst (type
);
9890 if (TREE_CODE (value
) == CONSTRUCTOR
)
9892 /* If VALUE is a CONSTRUCTOR, this optimization is only
9893 useful if this doesn't store the CONSTRUCTOR into
9894 memory. If it does, it is more efficient to just
9895 load the data from the array directly. */
9896 rtx ret
= expand_constructor (value
, target
,
9898 if (ret
== NULL_RTX
)
9903 return expand_expr (value
, target
, tmode
, modifier
);
9905 else if (TREE_CODE (init
) == CONSTRUCTOR
)
9907 unsigned HOST_WIDE_INT ix
;
9910 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), ix
,
9912 if (tree_int_cst_equal (field
, index
))
9914 if (TREE_SIDE_EFFECTS (value
))
9917 if (TREE_CODE (value
) == CONSTRUCTOR
)
9919 /* If VALUE is a CONSTRUCTOR, this
9920 optimization is only useful if
9921 this doesn't store the CONSTRUCTOR
9922 into memory. If it does, it is more
9923 efficient to just load the data from
9924 the array directly. */
9925 rtx ret
= expand_constructor (value
, target
,
9927 if (ret
== NULL_RTX
)
9932 expand_expr (fold (value
), target
, tmode
, modifier
);
9935 else if (TREE_CODE (init
) == STRING_CST
)
9937 tree low_bound
= array_ref_low_bound (exp
);
9938 tree index1
= fold_convert_loc (loc
, sizetype
, treeop1
);
9940 /* Optimize the special case of a zero lower bound.
9942 We convert the lower bound to sizetype to avoid problems
9943 with constant folding. E.g. suppose the lower bound is
9944 1 and its mode is QI. Without the conversion
9945 (ARRAY + (INDEX - (unsigned char)1))
9947 (ARRAY + (-(unsigned char)1) + INDEX)
9949 (ARRAY + 255 + INDEX). Oops! */
9950 if (!integer_zerop (low_bound
))
9951 index1
= size_diffop_loc (loc
, index1
,
9952 fold_convert_loc (loc
, sizetype
,
9955 if (compare_tree_int (index1
, TREE_STRING_LENGTH (init
)) < 0)
9957 tree type
= TREE_TYPE (TREE_TYPE (init
));
9958 enum machine_mode mode
= TYPE_MODE (type
);
9960 if (GET_MODE_CLASS (mode
) == MODE_INT
9961 && GET_MODE_SIZE (mode
) == 1)
9962 return gen_int_mode (TREE_STRING_POINTER (init
)
9963 [TREE_INT_CST_LOW (index1
)],
9969 goto normal_inner_ref
;
9972 /* If the operand is a CONSTRUCTOR, we can just extract the
9973 appropriate field if it is present. */
9974 if (TREE_CODE (treeop0
) == CONSTRUCTOR
)
9976 unsigned HOST_WIDE_INT idx
;
9979 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0
),
9981 if (field
== treeop1
9982 /* We can normally use the value of the field in the
9983 CONSTRUCTOR. However, if this is a bitfield in
9984 an integral mode that we can fit in a HOST_WIDE_INT,
9985 we must mask only the number of bits in the bitfield,
9986 since this is done implicitly by the constructor. If
9987 the bitfield does not meet either of those conditions,
9988 we can't do this optimization. */
9989 && (! DECL_BIT_FIELD (field
)
9990 || ((GET_MODE_CLASS (DECL_MODE (field
)) == MODE_INT
)
9991 && (GET_MODE_PRECISION (DECL_MODE (field
))
9992 <= HOST_BITS_PER_WIDE_INT
))))
9994 if (DECL_BIT_FIELD (field
)
9995 && modifier
== EXPAND_STACK_PARM
)
9997 op0
= expand_expr (value
, target
, tmode
, modifier
);
9998 if (DECL_BIT_FIELD (field
))
10000 HOST_WIDE_INT bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
10001 enum machine_mode imode
= TYPE_MODE (TREE_TYPE (field
));
10003 if (TYPE_UNSIGNED (TREE_TYPE (field
)))
10005 op1
= gen_int_mode (((HOST_WIDE_INT
) 1 << bitsize
) - 1,
10007 op0
= expand_and (imode
, op0
, op1
, target
);
10011 int count
= GET_MODE_PRECISION (imode
) - bitsize
;
10013 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
10015 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
10023 goto normal_inner_ref
;
10025 case BIT_FIELD_REF
:
10026 case ARRAY_RANGE_REF
:
10029 enum machine_mode mode1
, mode2
;
10030 HOST_WIDE_INT bitsize
, bitpos
;
10032 int volatilep
= 0, must_force_mem
;
10033 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
10034 &mode1
, &unsignedp
, &volatilep
, true);
10035 rtx orig_op0
, memloc
;
10036 bool mem_attrs_from_type
= false;
10038 /* If we got back the original object, something is wrong. Perhaps
10039 we are evaluating an expression too early. In any event, don't
10040 infinitely recurse. */
10041 gcc_assert (tem
!= exp
);
10043 /* If TEM's type is a union of variable size, pass TARGET to the inner
10044 computation, since it will need a temporary and TARGET is known
10045 to have to do. This occurs in unchecked conversion in Ada. */
10047 = expand_expr_real (tem
,
10048 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
10049 && COMPLETE_TYPE_P (TREE_TYPE (tem
))
10050 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
10052 && modifier
!= EXPAND_STACK_PARM
10053 ? target
: NULL_RTX
),
10055 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
,
10058 /* If the field has a mode, we want to access it in the
10059 field's mode, not the computed mode.
10060 If a MEM has VOIDmode (external with incomplete type),
10061 use BLKmode for it instead. */
10064 if (mode1
!= VOIDmode
)
10065 op0
= adjust_address (op0
, mode1
, 0);
10066 else if (GET_MODE (op0
) == VOIDmode
)
10067 op0
= adjust_address (op0
, BLKmode
, 0);
10071 = CONSTANT_P (op0
) ? TYPE_MODE (TREE_TYPE (tem
)) : GET_MODE (op0
);
10073 /* If we have either an offset, a BLKmode result, or a reference
10074 outside the underlying object, we must force it to memory.
10075 Such a case can occur in Ada if we have unchecked conversion
10076 of an expression from a scalar type to an aggregate type or
10077 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10078 passed a partially uninitialized object or a view-conversion
10079 to a larger size. */
10080 must_force_mem
= (offset
10081 || mode1
== BLKmode
10082 || bitpos
+ bitsize
> GET_MODE_BITSIZE (mode2
));
10084 /* Handle CONCAT first. */
10085 if (GET_CODE (op0
) == CONCAT
&& !must_force_mem
)
10088 && bitsize
== GET_MODE_BITSIZE (GET_MODE (op0
)))
10091 && bitsize
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
10094 op0
= XEXP (op0
, 0);
10095 mode2
= GET_MODE (op0
);
10097 else if (bitpos
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
10098 && bitsize
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 1)))
10102 op0
= XEXP (op0
, 1);
10104 mode2
= GET_MODE (op0
);
10107 /* Otherwise force into memory. */
10108 must_force_mem
= 1;
10111 /* If this is a constant, put it in a register if it is a legitimate
10112 constant and we don't need a memory reference. */
10113 if (CONSTANT_P (op0
)
10114 && mode2
!= BLKmode
10115 && targetm
.legitimate_constant_p (mode2
, op0
)
10116 && !must_force_mem
)
10117 op0
= force_reg (mode2
, op0
);
10119 /* Otherwise, if this is a constant, try to force it to the constant
10120 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10121 is a legitimate constant. */
10122 else if (CONSTANT_P (op0
) && (memloc
= force_const_mem (mode2
, op0
)))
10123 op0
= validize_mem (memloc
);
10125 /* Otherwise, if this is a constant or the object is not in memory
10126 and need be, put it there. */
10127 else if (CONSTANT_P (op0
) || (!MEM_P (op0
) && must_force_mem
))
10129 memloc
= assign_temp (TREE_TYPE (tem
), 1, 1);
10130 emit_move_insn (memloc
, op0
);
10132 mem_attrs_from_type
= true;
10137 enum machine_mode address_mode
;
10138 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
10141 gcc_assert (MEM_P (op0
));
10143 address_mode
= get_address_mode (op0
);
10144 if (GET_MODE (offset_rtx
) != address_mode
)
10145 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
10147 /* See the comment in expand_assignment for the rationale. */
10148 if (mode1
!= VOIDmode
10151 && (bitpos
% bitsize
) == 0
10152 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
10153 && MEM_ALIGN (op0
) >= GET_MODE_ALIGNMENT (mode1
))
10155 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10159 op0
= offset_address (op0
, offset_rtx
,
10160 highest_pow2_factor (offset
));
10163 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10164 record its alignment as BIGGEST_ALIGNMENT. */
10165 if (MEM_P (op0
) && bitpos
== 0 && offset
!= 0
10166 && is_aligning_offset (offset
, tem
))
10167 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
10169 /* Don't forget about volatility even if this is a bitfield. */
10170 if (MEM_P (op0
) && volatilep
&& ! MEM_VOLATILE_P (op0
))
10172 if (op0
== orig_op0
)
10173 op0
= copy_rtx (op0
);
10175 MEM_VOLATILE_P (op0
) = 1;
10178 /* In cases where an aligned union has an unaligned object
10179 as a field, we might be extracting a BLKmode value from
10180 an integer-mode (e.g., SImode) object. Handle this case
10181 by doing the extract into an object as wide as the field
10182 (which we know to be the width of a basic mode), then
10183 storing into memory, and changing the mode to BLKmode. */
10184 if (mode1
== VOIDmode
10185 || REG_P (op0
) || GET_CODE (op0
) == SUBREG
10186 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
10187 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
10188 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
10189 && modifier
!= EXPAND_CONST_ADDRESS
10190 && modifier
!= EXPAND_INITIALIZER
10191 && modifier
!= EXPAND_MEMORY
)
10192 /* If the bitfield is volatile and the bitsize
10193 is narrower than the access size of the bitfield,
10194 we need to extract bitfields from the access. */
10195 || (volatilep
&& TREE_CODE (exp
) == COMPONENT_REF
10196 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp
, 1))
10197 && mode1
!= BLKmode
10198 && bitsize
< GET_MODE_SIZE (mode1
) * BITS_PER_UNIT
)
10199 /* If the field isn't aligned enough to fetch as a memref,
10200 fetch it as a bit field. */
10201 || (mode1
!= BLKmode
10202 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
10203 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)
10205 && (MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
10206 || (bitpos
% GET_MODE_ALIGNMENT (mode1
) != 0))))
10207 && modifier
!= EXPAND_MEMORY
10208 && ((modifier
== EXPAND_CONST_ADDRESS
10209 || modifier
== EXPAND_INITIALIZER
)
10211 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
10212 || (bitpos
% BITS_PER_UNIT
!= 0)))
10213 /* If the type and the field are a constant size and the
10214 size of the type isn't the same size as the bitfield,
10215 we must use bitfield operations. */
10217 && TYPE_SIZE (TREE_TYPE (exp
))
10218 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
10219 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
10222 enum machine_mode ext_mode
= mode
;
10224 if (ext_mode
== BLKmode
10225 && ! (target
!= 0 && MEM_P (op0
)
10227 && bitpos
% BITS_PER_UNIT
== 0))
10228 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
10230 if (ext_mode
== BLKmode
)
10233 target
= assign_temp (type
, 1, 1);
10235 /* ??? Unlike the similar test a few lines below, this one is
10236 very likely obsolete. */
10240 /* In this case, BITPOS must start at a byte boundary and
10241 TARGET, if specified, must be a MEM. */
10242 gcc_assert (MEM_P (op0
)
10243 && (!target
|| MEM_P (target
))
10244 && !(bitpos
% BITS_PER_UNIT
));
10246 emit_block_move (target
,
10247 adjust_address (op0
, VOIDmode
,
10248 bitpos
/ BITS_PER_UNIT
),
10249 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
10251 (modifier
== EXPAND_STACK_PARM
10252 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
10257 /* If we have nothing to extract, the result will be 0 for targets
10258 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10259 return 0 for the sake of consistency, as reading a zero-sized
10260 bitfield is valid in Ada and the value is fully specified. */
10264 op0
= validize_mem (op0
);
10266 if (MEM_P (op0
) && REG_P (XEXP (op0
, 0)))
10267 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10269 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
10270 (modifier
== EXPAND_STACK_PARM
10271 ? NULL_RTX
: target
),
10272 ext_mode
, ext_mode
);
10274 /* If the result is a record type and BITSIZE is narrower than
10275 the mode of OP0, an integral mode, and this is a big endian
10276 machine, we must put the field into the high-order bits. */
10277 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
10278 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
10279 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
10280 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
10281 GET_MODE_BITSIZE (GET_MODE (op0
))
10282 - bitsize
, op0
, 1);
10284 /* If the result type is BLKmode, store the data into a temporary
10285 of the appropriate type, but with the mode corresponding to the
10286 mode for the data we have (op0's mode). */
10287 if (mode
== BLKmode
)
10290 = assign_stack_temp_for_type (ext_mode
,
10291 GET_MODE_BITSIZE (ext_mode
),
10293 emit_move_insn (new_rtx
, op0
);
10294 op0
= copy_rtx (new_rtx
);
10295 PUT_MODE (op0
, BLKmode
);
10301 /* If the result is BLKmode, use that to access the object
10303 if (mode
== BLKmode
)
10306 /* Get a reference to just this component. */
10307 if (modifier
== EXPAND_CONST_ADDRESS
10308 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
10309 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10311 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10313 if (op0
== orig_op0
)
10314 op0
= copy_rtx (op0
);
10316 /* If op0 is a temporary because of forcing to memory, pass only the
10317 type to set_mem_attributes so that the original expression is never
10318 marked as ADDRESSABLE through MEM_EXPR of the temporary. */
10319 if (mem_attrs_from_type
)
10320 set_mem_attributes (op0
, type
, 0);
10322 set_mem_attributes (op0
, exp
, 0);
10324 if (REG_P (XEXP (op0
, 0)))
10325 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10327 MEM_VOLATILE_P (op0
) |= volatilep
;
10328 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
10329 || modifier
== EXPAND_CONST_ADDRESS
10330 || modifier
== EXPAND_INITIALIZER
)
10334 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
10336 convert_move (target
, op0
, unsignedp
);
10341 return expand_expr (OBJ_TYPE_REF_EXPR (exp
), target
, tmode
, modifier
);
10344 /* All valid uses of __builtin_va_arg_pack () are removed during
10346 if (CALL_EXPR_VA_ARG_PACK (exp
))
10347 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
10349 tree fndecl
= get_callee_fndecl (exp
), attr
;
10352 && (attr
= lookup_attribute ("error",
10353 DECL_ATTRIBUTES (fndecl
))) != NULL
)
10354 error ("%Kcall to %qs declared with attribute error: %s",
10355 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
10356 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
10358 && (attr
= lookup_attribute ("warning",
10359 DECL_ATTRIBUTES (fndecl
))) != NULL
)
10360 warning_at (tree_nonartificial_location (exp
),
10361 0, "%Kcall to %qs declared with attribute warning: %s",
10362 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
10363 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
10365 /* Check for a built-in function. */
10366 if (fndecl
&& DECL_BUILT_IN (fndecl
))
10368 gcc_assert (DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_FRONTEND
);
10369 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
10372 return expand_call (exp
, target
, ignore
);
10374 case VIEW_CONVERT_EXPR
:
10377 /* If we are converting to BLKmode, try to avoid an intermediate
10378 temporary by fetching an inner memory reference. */
10379 if (mode
== BLKmode
10380 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
10381 && TYPE_MODE (TREE_TYPE (treeop0
)) != BLKmode
10382 && handled_component_p (treeop0
))
10384 enum machine_mode mode1
;
10385 HOST_WIDE_INT bitsize
, bitpos
;
10390 = get_inner_reference (treeop0
, &bitsize
, &bitpos
,
10391 &offset
, &mode1
, &unsignedp
, &volatilep
,
10395 /* ??? We should work harder and deal with non-zero offsets. */
10397 && (bitpos
% BITS_PER_UNIT
) == 0
10399 && compare_tree_int (TYPE_SIZE (type
), bitsize
) == 0)
10401 /* See the normal_inner_ref case for the rationale. */
10403 = expand_expr_real (tem
,
10404 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
10405 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
10407 && modifier
!= EXPAND_STACK_PARM
10408 ? target
: NULL_RTX
),
10410 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
,
10413 if (MEM_P (orig_op0
))
10417 /* Get a reference to just this component. */
10418 if (modifier
== EXPAND_CONST_ADDRESS
10419 || modifier
== EXPAND_SUM
10420 || modifier
== EXPAND_INITIALIZER
)
10421 op0
= adjust_address_nv (op0
, mode
, bitpos
/ BITS_PER_UNIT
);
10423 op0
= adjust_address (op0
, mode
, bitpos
/ BITS_PER_UNIT
);
10425 if (op0
== orig_op0
)
10426 op0
= copy_rtx (op0
);
10428 set_mem_attributes (op0
, treeop0
, 0);
10429 if (REG_P (XEXP (op0
, 0)))
10430 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10432 MEM_VOLATILE_P (op0
) |= volatilep
;
10438 op0
= expand_expr_real (treeop0
, NULL_RTX
, VOIDmode
, modifier
,
10439 NULL
, inner_reference_p
);
10441 /* If the input and output modes are both the same, we are done. */
10442 if (mode
== GET_MODE (op0
))
10444 /* If neither mode is BLKmode, and both modes are the same size
10445 then we can use gen_lowpart. */
10446 else if (mode
!= BLKmode
&& GET_MODE (op0
) != BLKmode
10447 && (GET_MODE_PRECISION (mode
)
10448 == GET_MODE_PRECISION (GET_MODE (op0
)))
10449 && !COMPLEX_MODE_P (GET_MODE (op0
)))
10451 if (GET_CODE (op0
) == SUBREG
)
10452 op0
= force_reg (GET_MODE (op0
), op0
);
10453 temp
= gen_lowpart_common (mode
, op0
);
10458 if (!REG_P (op0
) && !MEM_P (op0
))
10459 op0
= force_reg (GET_MODE (op0
), op0
);
10460 op0
= gen_lowpart (mode
, op0
);
10463 /* If both types are integral, convert from one mode to the other. */
10464 else if (INTEGRAL_TYPE_P (type
) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0
)))
10465 op0
= convert_modes (mode
, GET_MODE (op0
), op0
,
10466 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
10467 /* If the output type is a bit-field type, do an extraction. */
10468 else if (reduce_bit_field
)
10469 return extract_bit_field (op0
, TYPE_PRECISION (type
), 0,
10470 TYPE_UNSIGNED (type
), NULL_RTX
,
10472 /* As a last resort, spill op0 to memory, and reload it in a
10474 else if (!MEM_P (op0
))
10476 /* If the operand is not a MEM, force it into memory. Since we
10477 are going to be changing the mode of the MEM, don't call
10478 force_const_mem for constants because we don't allow pool
10479 constants to change mode. */
10480 tree inner_type
= TREE_TYPE (treeop0
);
10482 gcc_assert (!TREE_ADDRESSABLE (exp
));
10484 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
10486 = assign_stack_temp_for_type
10487 (TYPE_MODE (inner_type
),
10488 GET_MODE_SIZE (TYPE_MODE (inner_type
)), inner_type
);
10490 emit_move_insn (target
, op0
);
10494 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10495 output type is such that the operand is known to be aligned, indicate
10496 that it is. Otherwise, we need only be concerned about alignment for
10497 non-BLKmode results. */
10500 enum insn_code icode
;
10502 if (TYPE_ALIGN_OK (type
))
10504 /* ??? Copying the MEM without substantially changing it might
10505 run afoul of the code handling volatile memory references in
10506 store_expr, which assumes that TARGET is returned unmodified
10507 if it has been used. */
10508 op0
= copy_rtx (op0
);
10509 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
10511 else if (modifier
!= EXPAND_WRITE
10512 && modifier
!= EXPAND_MEMORY
10513 && !inner_reference_p
10515 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode
))
10517 /* If the target does have special handling for unaligned
10518 loads of mode then use them. */
10519 if ((icode
= optab_handler (movmisalign_optab
, mode
))
10520 != CODE_FOR_nothing
)
10524 op0
= adjust_address (op0
, mode
, 0);
10525 /* We've already validated the memory, and we're creating a
10526 new pseudo destination. The predicates really can't
10528 reg
= gen_reg_rtx (mode
);
10530 /* Nor can the insn generator. */
10531 insn
= GEN_FCN (icode
) (reg
, op0
);
10535 else if (STRICT_ALIGNMENT
)
10537 tree inner_type
= TREE_TYPE (treeop0
);
10538 HOST_WIDE_INT temp_size
10539 = MAX (int_size_in_bytes (inner_type
),
10540 (HOST_WIDE_INT
) GET_MODE_SIZE (mode
));
10542 = assign_stack_temp_for_type (mode
, temp_size
, type
);
10543 rtx new_with_op0_mode
10544 = adjust_address (new_rtx
, GET_MODE (op0
), 0);
10546 gcc_assert (!TREE_ADDRESSABLE (exp
));
10548 if (GET_MODE (op0
) == BLKmode
)
10549 emit_block_move (new_with_op0_mode
, op0
,
10550 GEN_INT (GET_MODE_SIZE (mode
)),
10551 (modifier
== EXPAND_STACK_PARM
10552 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
10554 emit_move_insn (new_with_op0_mode
, op0
);
10560 op0
= adjust_address (op0
, mode
, 0);
10567 tree lhs
= treeop0
;
10568 tree rhs
= treeop1
;
10569 gcc_assert (ignore
);
10571 /* Check for |= or &= of a bitfield of size one into another bitfield
10572 of size 1. In this case, (unless we need the result of the
10573 assignment) we can do this more efficiently with a
10574 test followed by an assignment, if necessary.
10576 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10577 things change so we do, this code should be enhanced to
10579 if (TREE_CODE (lhs
) == COMPONENT_REF
10580 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
10581 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
10582 && TREE_OPERAND (rhs
, 0) == lhs
10583 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
10584 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
10585 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
10587 rtx_code_label
*label
= gen_label_rtx ();
10588 int value
= TREE_CODE (rhs
) == BIT_IOR_EXPR
;
10589 do_jump (TREE_OPERAND (rhs
, 1),
10591 value
? 0 : label
, -1);
10592 expand_assignment (lhs
, build_int_cst (TREE_TYPE (rhs
), value
),
10594 do_pending_stack_adjust ();
10595 emit_label (label
);
10599 expand_assignment (lhs
, rhs
, false);
10604 return expand_expr_addr_expr (exp
, target
, tmode
, modifier
);
10606 case REALPART_EXPR
:
10607 op0
= expand_normal (treeop0
);
10608 return read_complex_part (op0
, false);
10610 case IMAGPART_EXPR
:
10611 op0
= expand_normal (treeop0
);
10612 return read_complex_part (op0
, true);
10619 /* Expanded in cfgexpand.c. */
10620 gcc_unreachable ();
10622 case TRY_CATCH_EXPR
:
10624 case EH_FILTER_EXPR
:
10625 case TRY_FINALLY_EXPR
:
10626 /* Lowered by tree-eh.c. */
10627 gcc_unreachable ();
10629 case WITH_CLEANUP_EXPR
:
10630 case CLEANUP_POINT_EXPR
:
10632 case CASE_LABEL_EXPR
:
10637 case COMPOUND_EXPR
:
10638 case PREINCREMENT_EXPR
:
10639 case PREDECREMENT_EXPR
:
10640 case POSTINCREMENT_EXPR
:
10641 case POSTDECREMENT_EXPR
:
10644 case COMPOUND_LITERAL_EXPR
:
10645 /* Lowered by gimplify.c. */
10646 gcc_unreachable ();
10649 /* Function descriptors are not valid except for as
10650 initialization constants, and should not be expanded. */
10651 gcc_unreachable ();
10653 case WITH_SIZE_EXPR
:
10654 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10655 have pulled out the size to use in whatever context it needed. */
10656 return expand_expr_real (treeop0
, original_target
, tmode
,
10657 modifier
, alt_rtl
, inner_reference_p
);
10660 return expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
10664 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10665 signedness of TYPE), possibly returning the result in TARGET. */
10667 reduce_to_bit_field_precision (rtx exp
, rtx target
, tree type
)
10669 HOST_WIDE_INT prec
= TYPE_PRECISION (type
);
10670 if (target
&& GET_MODE (target
) != GET_MODE (exp
))
10672 /* For constant values, reduce using build_int_cst_type. */
10673 if (CONST_INT_P (exp
))
10675 HOST_WIDE_INT value
= INTVAL (exp
);
10676 tree t
= build_int_cst_type (type
, value
);
10677 return expand_expr (t
, target
, VOIDmode
, EXPAND_NORMAL
);
10679 else if (TYPE_UNSIGNED (type
))
10681 enum machine_mode mode
= GET_MODE (exp
);
10682 rtx mask
= immed_wide_int_const
10683 (wi::mask (prec
, false, GET_MODE_PRECISION (mode
)), mode
);
10684 return expand_and (mode
, exp
, mask
, target
);
10688 int count
= GET_MODE_PRECISION (GET_MODE (exp
)) - prec
;
10689 exp
= expand_shift (LSHIFT_EXPR
, GET_MODE (exp
),
10690 exp
, count
, target
, 0);
10691 return expand_shift (RSHIFT_EXPR
, GET_MODE (exp
),
10692 exp
, count
, target
, 0);
10696 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10697 when applied to the address of EXP produces an address known to be
10698 aligned more than BIGGEST_ALIGNMENT. */
10701 is_aligning_offset (const_tree offset
, const_tree exp
)
10703 /* Strip off any conversions. */
10704 while (CONVERT_EXPR_P (offset
))
10705 offset
= TREE_OPERAND (offset
, 0);
10707 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10708 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10709 if (TREE_CODE (offset
) != BIT_AND_EXPR
10710 || !tree_fits_uhwi_p (TREE_OPERAND (offset
, 1))
10711 || compare_tree_int (TREE_OPERAND (offset
, 1),
10712 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
10713 || exact_log2 (tree_to_uhwi (TREE_OPERAND (offset
, 1)) + 1) < 0)
10716 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10717 It must be NEGATE_EXPR. Then strip any more conversions. */
10718 offset
= TREE_OPERAND (offset
, 0);
10719 while (CONVERT_EXPR_P (offset
))
10720 offset
= TREE_OPERAND (offset
, 0);
10722 if (TREE_CODE (offset
) != NEGATE_EXPR
)
10725 offset
= TREE_OPERAND (offset
, 0);
10726 while (CONVERT_EXPR_P (offset
))
10727 offset
= TREE_OPERAND (offset
, 0);
10729 /* This must now be the address of EXP. */
10730 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
10733 /* Return the tree node if an ARG corresponds to a string constant or zero
10734 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10735 in bytes within the string that ARG is accessing. The type of the
10736 offset will be `sizetype'. */
10739 string_constant (tree arg
, tree
*ptr_offset
)
10741 tree array
, offset
, lower_bound
;
10744 if (TREE_CODE (arg
) == ADDR_EXPR
)
10746 if (TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
10748 *ptr_offset
= size_zero_node
;
10749 return TREE_OPERAND (arg
, 0);
10751 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == VAR_DECL
)
10753 array
= TREE_OPERAND (arg
, 0);
10754 offset
= size_zero_node
;
10756 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
10758 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
10759 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
10760 if (TREE_CODE (array
) != STRING_CST
10761 && TREE_CODE (array
) != VAR_DECL
)
10764 /* Check if the array has a nonzero lower bound. */
10765 lower_bound
= array_ref_low_bound (TREE_OPERAND (arg
, 0));
10766 if (!integer_zerop (lower_bound
))
10768 /* If the offset and base aren't both constants, return 0. */
10769 if (TREE_CODE (lower_bound
) != INTEGER_CST
)
10771 if (TREE_CODE (offset
) != INTEGER_CST
)
10773 /* Adjust offset by the lower bound. */
10774 offset
= size_diffop (fold_convert (sizetype
, offset
),
10775 fold_convert (sizetype
, lower_bound
));
10778 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == MEM_REF
)
10780 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
10781 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
10782 if (TREE_CODE (array
) != ADDR_EXPR
)
10784 array
= TREE_OPERAND (array
, 0);
10785 if (TREE_CODE (array
) != STRING_CST
10786 && TREE_CODE (array
) != VAR_DECL
)
10792 else if (TREE_CODE (arg
) == PLUS_EXPR
|| TREE_CODE (arg
) == POINTER_PLUS_EXPR
)
10794 tree arg0
= TREE_OPERAND (arg
, 0);
10795 tree arg1
= TREE_OPERAND (arg
, 1);
10800 if (TREE_CODE (arg0
) == ADDR_EXPR
10801 && (TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
10802 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == VAR_DECL
))
10804 array
= TREE_OPERAND (arg0
, 0);
10807 else if (TREE_CODE (arg1
) == ADDR_EXPR
10808 && (TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
10809 || TREE_CODE (TREE_OPERAND (arg1
, 0)) == VAR_DECL
))
10811 array
= TREE_OPERAND (arg1
, 0);
10820 if (TREE_CODE (array
) == STRING_CST
)
10822 *ptr_offset
= fold_convert (sizetype
, offset
);
10825 else if (TREE_CODE (array
) == VAR_DECL
10826 || TREE_CODE (array
) == CONST_DECL
)
10829 tree init
= ctor_for_folding (array
);
10831 /* Variables initialized to string literals can be handled too. */
10832 if (init
== error_mark_node
10834 || TREE_CODE (init
) != STRING_CST
)
10837 /* Avoid const char foo[4] = "abcde"; */
10838 if (DECL_SIZE_UNIT (array
) == NULL_TREE
10839 || TREE_CODE (DECL_SIZE_UNIT (array
)) != INTEGER_CST
10840 || (length
= TREE_STRING_LENGTH (init
)) <= 0
10841 || compare_tree_int (DECL_SIZE_UNIT (array
), length
) < 0)
10844 /* If variable is bigger than the string literal, OFFSET must be constant
10845 and inside of the bounds of the string literal. */
10846 offset
= fold_convert (sizetype
, offset
);
10847 if (compare_tree_int (DECL_SIZE_UNIT (array
), length
) > 0
10848 && (! tree_fits_uhwi_p (offset
)
10849 || compare_tree_int (offset
, length
) >= 0))
10852 *ptr_offset
= offset
;
10859 /* Generate code to calculate OPS, and exploded expression
10860 using a store-flag instruction and return an rtx for the result.
10861 OPS reflects a comparison.
10863 If TARGET is nonzero, store the result there if convenient.
10865 Return zero if there is no suitable set-flag instruction
10866 available on this machine.
10868 Once expand_expr has been called on the arguments of the comparison,
10869 we are committed to doing the store flag, since it is not safe to
10870 re-evaluate the expression. We emit the store-flag insn by calling
10871 emit_store_flag, but only expand the arguments if we have a reason
10872 to believe that emit_store_flag will be successful. If we think that
10873 it will, but it isn't, we have to simulate the store-flag with a
10874 set/jump/set sequence. */
10877 do_store_flag (sepops ops
, rtx target
, enum machine_mode mode
)
10879 enum rtx_code code
;
10880 tree arg0
, arg1
, type
;
10882 enum machine_mode operand_mode
;
10885 rtx subtarget
= target
;
10886 location_t loc
= ops
->location
;
10891 /* Don't crash if the comparison was erroneous. */
10892 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
10895 type
= TREE_TYPE (arg0
);
10896 operand_mode
= TYPE_MODE (type
);
10897 unsignedp
= TYPE_UNSIGNED (type
);
10899 /* We won't bother with BLKmode store-flag operations because it would mean
10900 passing a lot of information to emit_store_flag. */
10901 if (operand_mode
== BLKmode
)
10904 /* We won't bother with store-flag operations involving function pointers
10905 when function pointers must be canonicalized before comparisons. */
10906 #ifdef HAVE_canonicalize_funcptr_for_compare
10907 if (HAVE_canonicalize_funcptr_for_compare
10908 && ((TREE_CODE (TREE_TYPE (arg0
)) == POINTER_TYPE
10909 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
)))
10911 || (TREE_CODE (TREE_TYPE (arg1
)) == POINTER_TYPE
10912 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
)))
10913 == FUNCTION_TYPE
))))
10920 /* For vector typed comparisons emit code to generate the desired
10921 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10922 expander for this. */
10923 if (TREE_CODE (ops
->type
) == VECTOR_TYPE
)
10925 tree ifexp
= build2 (ops
->code
, ops
->type
, arg0
, arg1
);
10926 tree if_true
= constant_boolean_node (true, ops
->type
);
10927 tree if_false
= constant_boolean_node (false, ops
->type
);
10928 return expand_vec_cond_expr (ops
->type
, ifexp
, if_true
, if_false
, target
);
10931 /* Get the rtx comparison code to use. We know that EXP is a comparison
10932 operation of some type. Some comparisons against 1 and -1 can be
10933 converted to comparisons with zero. Do so here so that the tests
10934 below will be aware that we have a comparison with zero. These
10935 tests will not catch constants in the first operand, but constants
10936 are rarely passed as the first operand. */
10947 if (integer_onep (arg1
))
10948 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10950 code
= unsignedp
? LTU
: LT
;
10953 if (! unsignedp
&& integer_all_onesp (arg1
))
10954 arg1
= integer_zero_node
, code
= LT
;
10956 code
= unsignedp
? LEU
: LE
;
10959 if (! unsignedp
&& integer_all_onesp (arg1
))
10960 arg1
= integer_zero_node
, code
= GE
;
10962 code
= unsignedp
? GTU
: GT
;
10965 if (integer_onep (arg1
))
10966 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
10968 code
= unsignedp
? GEU
: GE
;
10971 case UNORDERED_EXPR
:
10997 gcc_unreachable ();
11000 /* Put a constant second. */
11001 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
11002 || TREE_CODE (arg0
) == FIXED_CST
)
11004 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
11005 code
= swap_condition (code
);
11008 /* If this is an equality or inequality test of a single bit, we can
11009 do this by shifting the bit being tested to the low-order bit and
11010 masking the result with the constant 1. If the condition was EQ,
11011 we xor it with 1. This does not require an scc insn and is faster
11012 than an scc insn even if we have it.
11014 The code to make this transformation was moved into fold_single_bit_test,
11015 so we just call into the folder and expand its result. */
11017 if ((code
== NE
|| code
== EQ
)
11018 && integer_zerop (arg1
)
11019 && (TYPE_PRECISION (ops
->type
) != 1 || TYPE_UNSIGNED (ops
->type
)))
11021 gimple srcstmt
= get_def_for_expr (arg0
, BIT_AND_EXPR
);
11023 && integer_pow2p (gimple_assign_rhs2 (srcstmt
)))
11025 enum tree_code tcode
= code
== NE
? NE_EXPR
: EQ_EXPR
;
11026 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
11027 tree temp
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg1
),
11028 gimple_assign_rhs1 (srcstmt
),
11029 gimple_assign_rhs2 (srcstmt
));
11030 temp
= fold_single_bit_test (loc
, tcode
, temp
, arg1
, type
);
11032 return expand_expr (temp
, target
, VOIDmode
, EXPAND_NORMAL
);
11036 if (! get_subtarget (target
)
11037 || GET_MODE (subtarget
) != operand_mode
)
11040 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
11043 target
= gen_reg_rtx (mode
);
11045 /* Try a cstore if possible. */
11046 return emit_store_flag_force (target
, code
, op0
, op1
,
11047 operand_mode
, unsignedp
,
11048 (TYPE_PRECISION (ops
->type
) == 1
11049 && !TYPE_UNSIGNED (ops
->type
)) ? -1 : 1);
11053 /* Stubs in case we haven't got a casesi insn. */
11054 #ifndef HAVE_casesi
11055 # define HAVE_casesi 0
11056 # define gen_casesi(a, b, c, d, e) (0)
11057 # define CODE_FOR_casesi CODE_FOR_nothing
11060 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11061 0 otherwise (i.e. if there is no casesi instruction).
11063 DEFAULT_PROBABILITY is the probability of jumping to the default
11066 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
11067 rtx table_label
, rtx default_label
, rtx fallback_label
,
11068 int default_probability
)
11070 struct expand_operand ops
[5];
11071 enum machine_mode index_mode
= SImode
;
11072 rtx op1
, op2
, index
;
11077 /* Convert the index to SImode. */
11078 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
11080 enum machine_mode omode
= TYPE_MODE (index_type
);
11081 rtx rangertx
= expand_normal (range
);
11083 /* We must handle the endpoints in the original mode. */
11084 index_expr
= build2 (MINUS_EXPR
, index_type
,
11085 index_expr
, minval
);
11086 minval
= integer_zero_node
;
11087 index
= expand_normal (index_expr
);
11089 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
11090 omode
, 1, default_label
,
11091 default_probability
);
11092 /* Now we can safely truncate. */
11093 index
= convert_to_mode (index_mode
, index
, 0);
11097 if (TYPE_MODE (index_type
) != index_mode
)
11099 index_type
= lang_hooks
.types
.type_for_mode (index_mode
, 0);
11100 index_expr
= fold_convert (index_type
, index_expr
);
11103 index
= expand_normal (index_expr
);
11106 do_pending_stack_adjust ();
11108 op1
= expand_normal (minval
);
11109 op2
= expand_normal (range
);
11111 create_input_operand (&ops
[0], index
, index_mode
);
11112 create_convert_operand_from_type (&ops
[1], op1
, TREE_TYPE (minval
));
11113 create_convert_operand_from_type (&ops
[2], op2
, TREE_TYPE (range
));
11114 create_fixed_operand (&ops
[3], table_label
);
11115 create_fixed_operand (&ops
[4], (default_label
11117 : fallback_label
));
11118 expand_jump_insn (CODE_FOR_casesi
, 5, ops
);
11122 /* Attempt to generate a tablejump instruction; same concept. */
11123 #ifndef HAVE_tablejump
11124 #define HAVE_tablejump 0
11125 #define gen_tablejump(x, y) (0)
11128 /* Subroutine of the next function.
11130 INDEX is the value being switched on, with the lowest value
11131 in the table already subtracted.
11132 MODE is its expected mode (needed if INDEX is constant).
11133 RANGE is the length of the jump table.
11134 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11136 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11137 index value is out of range.
11138 DEFAULT_PROBABILITY is the probability of jumping to
11139 the default label. */
11142 do_tablejump (rtx index
, enum machine_mode mode
, rtx range
, rtx table_label
,
11143 rtx default_label
, int default_probability
)
11147 if (INTVAL (range
) > cfun
->cfg
->max_jumptable_ents
)
11148 cfun
->cfg
->max_jumptable_ents
= INTVAL (range
);
11150 /* Do an unsigned comparison (in the proper mode) between the index
11151 expression and the value which represents the length of the range.
11152 Since we just finished subtracting the lower bound of the range
11153 from the index expression, this comparison allows us to simultaneously
11154 check that the original index expression value is both greater than
11155 or equal to the minimum value of the range and less than or equal to
11156 the maximum value of the range. */
11159 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
11160 default_label
, default_probability
);
11163 /* If index is in range, it must fit in Pmode.
11164 Convert to Pmode so we can index with it. */
11166 index
= convert_to_mode (Pmode
, index
, 1);
11168 /* Don't let a MEM slip through, because then INDEX that comes
11169 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11170 and break_out_memory_refs will go to work on it and mess it up. */
11171 #ifdef PIC_CASE_VECTOR_ADDRESS
11172 if (flag_pic
&& !REG_P (index
))
11173 index
= copy_to_mode_reg (Pmode
, index
);
11176 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11177 GET_MODE_SIZE, because this indicates how large insns are. The other
11178 uses should all be Pmode, because they are addresses. This code
11179 could fail if addresses and insns are not the same size. */
11180 index
= simplify_gen_binary (MULT
, Pmode
, index
,
11181 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE
),
11183 index
= simplify_gen_binary (PLUS
, Pmode
, index
,
11184 gen_rtx_LABEL_REF (Pmode
, table_label
));
11186 #ifdef PIC_CASE_VECTOR_ADDRESS
11188 index
= PIC_CASE_VECTOR_ADDRESS (index
);
11191 index
= memory_address (CASE_VECTOR_MODE
, index
);
11192 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
11193 vector
= gen_const_mem (CASE_VECTOR_MODE
, index
);
11194 convert_move (temp
, vector
, 0);
11196 emit_jump_insn (gen_tablejump (temp
, table_label
));
11198 /* If we are generating PIC code or if the table is PC-relative, the
11199 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11200 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
11205 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
11206 rtx table_label
, rtx default_label
, int default_probability
)
11210 if (! HAVE_tablejump
)
11213 index_expr
= fold_build2 (MINUS_EXPR
, index_type
,
11214 fold_convert (index_type
, index_expr
),
11215 fold_convert (index_type
, minval
));
11216 index
= expand_normal (index_expr
);
11217 do_pending_stack_adjust ();
11219 do_tablejump (index
, TYPE_MODE (index_type
),
11220 convert_modes (TYPE_MODE (index_type
),
11221 TYPE_MODE (TREE_TYPE (range
)),
11222 expand_normal (range
),
11223 TYPE_UNSIGNED (TREE_TYPE (range
))),
11224 table_label
, default_label
, default_probability
);
11228 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11230 const_vector_from_tree (tree exp
)
11236 enum machine_mode inner
, mode
;
11238 mode
= TYPE_MODE (TREE_TYPE (exp
));
11240 if (initializer_zerop (exp
))
11241 return CONST0_RTX (mode
);
11243 units
= GET_MODE_NUNITS (mode
);
11244 inner
= GET_MODE_INNER (mode
);
11246 v
= rtvec_alloc (units
);
11248 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
11250 elt
= VECTOR_CST_ELT (exp
, i
);
11252 if (TREE_CODE (elt
) == REAL_CST
)
11253 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
11255 else if (TREE_CODE (elt
) == FIXED_CST
)
11256 RTVEC_ELT (v
, i
) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt
),
11259 RTVEC_ELT (v
, i
) = immed_wide_int_const (elt
, inner
);
11262 return gen_rtx_CONST_VECTOR (mode
, v
);
11265 /* Build a decl for a personality function given a language prefix. */
11268 build_personality_function (const char *lang
)
11270 const char *unwind_and_version
;
11274 switch (targetm_common
.except_unwind_info (&global_options
))
11279 unwind_and_version
= "_sj0";
11283 unwind_and_version
= "_v0";
11286 unwind_and_version
= "_seh0";
11289 gcc_unreachable ();
11292 name
= ACONCAT (("__", lang
, "_personality", unwind_and_version
, NULL
));
11294 type
= build_function_type_list (integer_type_node
, integer_type_node
,
11295 long_long_unsigned_type_node
,
11296 ptr_type_node
, ptr_type_node
, NULL_TREE
);
11297 decl
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
,
11298 get_identifier (name
), type
);
11299 DECL_ARTIFICIAL (decl
) = 1;
11300 DECL_EXTERNAL (decl
) = 1;
11301 TREE_PUBLIC (decl
) = 1;
11303 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11304 are the flags assigned by targetm.encode_section_info. */
11305 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl
), 0), NULL
);
11310 /* Extracts the personality function of DECL and returns the corresponding
11314 get_personality_function (tree decl
)
11316 tree personality
= DECL_FUNCTION_PERSONALITY (decl
);
11317 enum eh_personality_kind pk
;
11319 pk
= function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl
));
11320 if (pk
== eh_personality_none
)
11324 && pk
== eh_personality_any
)
11325 personality
= lang_hooks
.eh_personality ();
11327 if (pk
== eh_personality_lang
)
11328 gcc_assert (personality
!= NULL_TREE
);
11330 return XEXP (DECL_RTL (personality
), 0);
11333 #include "gt-expr.h"