1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
31 #include "hard-reg-set.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
43 #include "typeclass.h"
45 #include "langhooks.h"
48 #include "tree-iterator.h"
49 #include "tree-pass.h"
50 #include "tree-flow.h"
54 #include "diagnostic.h"
55 #include "ssaexpand.h"
56 #include "target-globals.h"
58 /* Decide whether a function's arguments should be processed
59 from first to last or from last to first.
61 They should if the stack and args grow in opposite directions, but
62 only if we have push insns. */
66 #ifndef PUSH_ARGS_REVERSED
67 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
68 #define PUSH_ARGS_REVERSED /* If it's last to first. */
74 #ifndef STACK_PUSH_CODE
75 #ifdef STACK_GROWS_DOWNWARD
76 #define STACK_PUSH_CODE PRE_DEC
78 #define STACK_PUSH_CODE PRE_INC
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
91 /* This structure is used by move_by_pieces to describe the move to
93 struct move_by_pieces_d
102 int explicit_inc_from
;
103 unsigned HOST_WIDE_INT len
;
104 HOST_WIDE_INT offset
;
108 /* This structure is used by store_by_pieces to describe the clear to
111 struct store_by_pieces_d
117 unsigned HOST_WIDE_INT len
;
118 HOST_WIDE_INT offset
;
119 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
);
124 static unsigned HOST_WIDE_INT
move_by_pieces_ninsns (unsigned HOST_WIDE_INT
,
127 static void move_by_pieces_1 (rtx (*) (rtx
, ...), enum machine_mode
,
128 struct move_by_pieces_d
*);
129 static bool block_move_libcall_safe_for_call_parm (void);
130 static bool emit_block_move_via_movmem (rtx
, rtx
, rtx
, unsigned, unsigned, HOST_WIDE_INT
);
131 static tree
emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
133 static rtx
clear_by_pieces_1 (void *, HOST_WIDE_INT
, enum machine_mode
);
134 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces_d
*, unsigned int);
136 static void store_by_pieces_2 (rtx (*) (rtx
, ...), enum machine_mode
,
137 struct store_by_pieces_d
*);
138 static tree
clear_storage_libcall_fn (int);
139 static rtx
compress_float_constant (rtx
, rtx
);
140 static rtx
get_subtarget (rtx
);
141 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
142 HOST_WIDE_INT
, enum machine_mode
,
143 tree
, tree
, int, alias_set_type
);
144 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
);
145 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
, enum machine_mode
,
146 tree
, tree
, alias_set_type
, bool);
148 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (const_tree
, const_tree
);
150 static int is_aligning_offset (const_tree
, const_tree
);
151 static void expand_operands (tree
, tree
, rtx
, rtx
*, rtx
*,
152 enum expand_modifier
);
153 static rtx
reduce_to_bit_field_precision (rtx
, rtx
, tree
);
154 static rtx
do_store_flag (sepops
, rtx
, enum machine_mode
);
156 static void emit_single_push_insn (enum machine_mode
, rtx
, tree
);
158 static void do_tablejump (rtx
, enum machine_mode
, rtx
, rtx
, rtx
);
159 static rtx
const_vector_from_tree (tree
);
160 static void write_complex_part (rtx
, rtx
, bool);
162 /* This macro is used to determine whether move_by_pieces should be called
163 to perform a structure copy. */
164 #ifndef MOVE_BY_PIECES_P
165 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
166 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
167 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
170 /* This macro is used to determine whether clear_by_pieces should be
171 called to clear storage. */
172 #ifndef CLEAR_BY_PIECES_P
173 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
174 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
175 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
178 /* This macro is used to determine whether store_by_pieces should be
179 called to "memset" storage with byte values other than zero. */
180 #ifndef SET_BY_PIECES_P
181 #define SET_BY_PIECES_P(SIZE, ALIGN) \
182 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
183 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
186 /* This macro is used to determine whether store_by_pieces should be
187 called to "memcpy" storage when the source is a constant string. */
188 #ifndef STORE_BY_PIECES_P
189 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
190 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
191 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
194 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
196 #ifndef SLOW_UNALIGNED_ACCESS
197 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
200 /* This is run to set up which modes can be used
201 directly in memory and to initialize the block move optab. It is run
202 at the beginning of compilation and when the target is reinitialized. */
205 init_expr_target (void)
208 enum machine_mode mode
;
213 /* Try indexing by frame ptr and try by stack ptr.
214 It is known that on the Convex the stack ptr isn't a valid index.
215 With luck, one or the other is valid on any machine. */
216 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
217 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
219 /* A scratch register we can modify in-place below to avoid
220 useless RTL allocations. */
221 reg
= gen_rtx_REG (VOIDmode
, -1);
223 insn
= rtx_alloc (INSN
);
224 pat
= gen_rtx_SET (VOIDmode
, NULL_RTX
, NULL_RTX
);
225 PATTERN (insn
) = pat
;
227 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
228 mode
= (enum machine_mode
) ((int) mode
+ 1))
232 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
233 PUT_MODE (mem
, mode
);
234 PUT_MODE (mem1
, mode
);
235 PUT_MODE (reg
, mode
);
237 /* See if there is some register that can be used in this mode and
238 directly loaded or stored from memory. */
240 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
241 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
242 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
245 if (! HARD_REGNO_MODE_OK (regno
, mode
))
248 SET_REGNO (reg
, regno
);
251 SET_DEST (pat
) = reg
;
252 if (recog (pat
, insn
, &num_clobbers
) >= 0)
253 direct_load
[(int) mode
] = 1;
255 SET_SRC (pat
) = mem1
;
256 SET_DEST (pat
) = reg
;
257 if (recog (pat
, insn
, &num_clobbers
) >= 0)
258 direct_load
[(int) mode
] = 1;
261 SET_DEST (pat
) = mem
;
262 if (recog (pat
, insn
, &num_clobbers
) >= 0)
263 direct_store
[(int) mode
] = 1;
266 SET_DEST (pat
) = mem1
;
267 if (recog (pat
, insn
, &num_clobbers
) >= 0)
268 direct_store
[(int) mode
] = 1;
272 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
274 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
275 mode
= GET_MODE_WIDER_MODE (mode
))
277 enum machine_mode srcmode
;
278 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
279 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
283 ic
= can_extend_p (mode
, srcmode
, 0);
284 if (ic
== CODE_FOR_nothing
)
287 PUT_MODE (mem
, srcmode
);
289 if ((*insn_data
[ic
].operand
[1].predicate
) (mem
, srcmode
))
290 float_extend_from_mem
[mode
][srcmode
] = true;
295 /* This is run at the start of compiling a function. */
300 memset (&crtl
->expr
, 0, sizeof (crtl
->expr
));
303 /* Copy data from FROM to TO, where the machine modes are not the same.
304 Both modes may be integer, or both may be floating, or both may be
306 UNSIGNEDP should be nonzero if FROM is an unsigned type.
307 This causes zero-extension instead of sign-extension. */
310 convert_move (rtx to
, rtx from
, int unsignedp
)
312 enum machine_mode to_mode
= GET_MODE (to
);
313 enum machine_mode from_mode
= GET_MODE (from
);
314 int to_real
= SCALAR_FLOAT_MODE_P (to_mode
);
315 int from_real
= SCALAR_FLOAT_MODE_P (from_mode
);
319 /* rtx code for making an equivalent value. */
320 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
321 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
324 gcc_assert (to_real
== from_real
);
325 gcc_assert (to_mode
!= BLKmode
);
326 gcc_assert (from_mode
!= BLKmode
);
328 /* If the source and destination are already the same, then there's
333 /* If FROM is a SUBREG that indicates that we have already done at least
334 the required extension, strip it. We don't handle such SUBREGs as
337 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
338 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
339 >= GET_MODE_SIZE (to_mode
))
340 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
341 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
343 gcc_assert (GET_CODE (to
) != SUBREG
|| !SUBREG_PROMOTED_VAR_P (to
));
345 if (to_mode
== from_mode
346 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
348 emit_move_insn (to
, from
);
352 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
354 gcc_assert (GET_MODE_BITSIZE (from_mode
) == GET_MODE_BITSIZE (to_mode
));
356 if (VECTOR_MODE_P (to_mode
))
357 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
359 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
361 emit_move_insn (to
, from
);
365 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
367 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
368 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
377 gcc_assert ((GET_MODE_PRECISION (from_mode
)
378 != GET_MODE_PRECISION (to_mode
))
379 || (DECIMAL_FLOAT_MODE_P (from_mode
)
380 != DECIMAL_FLOAT_MODE_P (to_mode
)));
382 if (GET_MODE_PRECISION (from_mode
) == GET_MODE_PRECISION (to_mode
))
383 /* Conversion between decimal float and binary float, same size. */
384 tab
= DECIMAL_FLOAT_MODE_P (from_mode
) ? trunc_optab
: sext_optab
;
385 else if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
390 /* Try converting directly if the insn is supported. */
392 code
= convert_optab_handler (tab
, to_mode
, from_mode
);
393 if (code
!= CODE_FOR_nothing
)
395 emit_unop_insn (code
, to
, from
,
396 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
400 /* Otherwise use a libcall. */
401 libcall
= convert_optab_libfunc (tab
, to_mode
, from_mode
);
403 /* Is this conversion implemented yet? */
404 gcc_assert (libcall
);
407 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
409 insns
= get_insns ();
411 emit_libcall_block (insns
, to
, value
,
412 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
414 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
418 /* Handle pointer conversion. */ /* SPEE 900220. */
419 /* Targets are expected to provide conversion insns between PxImode and
420 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
421 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
423 enum machine_mode full_mode
424 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode
), MODE_INT
);
426 gcc_assert (convert_optab_handler (trunc_optab
, to_mode
, full_mode
)
427 != CODE_FOR_nothing
);
429 if (full_mode
!= from_mode
)
430 from
= convert_to_mode (full_mode
, from
, unsignedp
);
431 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, full_mode
),
435 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
438 enum machine_mode full_mode
439 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode
), MODE_INT
);
441 gcc_assert (convert_optab_handler (sext_optab
, full_mode
, from_mode
)
442 != CODE_FOR_nothing
);
444 if (to_mode
== full_mode
)
446 emit_unop_insn (convert_optab_handler (sext_optab
, full_mode
,
452 new_from
= gen_reg_rtx (full_mode
);
453 emit_unop_insn (convert_optab_handler (sext_optab
, full_mode
, from_mode
),
454 new_from
, from
, UNKNOWN
);
456 /* else proceed to integer conversions below. */
457 from_mode
= full_mode
;
461 /* Make sure both are fixed-point modes or both are not. */
462 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
) ==
463 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode
));
464 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
))
466 /* If we widen from_mode to to_mode and they are in the same class,
467 we won't saturate the result.
468 Otherwise, always saturate the result to play safe. */
469 if (GET_MODE_CLASS (from_mode
) == GET_MODE_CLASS (to_mode
)
470 && GET_MODE_SIZE (from_mode
) < GET_MODE_SIZE (to_mode
))
471 expand_fixed_convert (to
, from
, 0, 0);
473 expand_fixed_convert (to
, from
, 0, 1);
477 /* Now both modes are integers. */
479 /* Handle expanding beyond a word. */
480 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
481 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
488 enum machine_mode lowpart_mode
;
489 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
491 /* Try converting directly if the insn is supported. */
492 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
495 /* If FROM is a SUBREG, put it into a register. Do this
496 so that we always generate the same set of insns for
497 better cse'ing; if an intermediate assignment occurred,
498 we won't be doing the operation directly on the SUBREG. */
499 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
500 from
= force_reg (from_mode
, from
);
501 emit_unop_insn (code
, to
, from
, equiv_code
);
504 /* Next, try converting via full word. */
505 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
506 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
507 != CODE_FOR_nothing
))
509 rtx word_to
= gen_reg_rtx (word_mode
);
512 if (reg_overlap_mentioned_p (to
, from
))
513 from
= force_reg (from_mode
, from
);
516 convert_move (word_to
, from
, unsignedp
);
517 emit_unop_insn (code
, to
, word_to
, equiv_code
);
521 /* No special multiword conversion insn; do it by hand. */
524 /* Since we will turn this into a no conflict block, we must ensure
525 that the source does not overlap the target. */
527 if (reg_overlap_mentioned_p (to
, from
))
528 from
= force_reg (from_mode
, from
);
530 /* Get a copy of FROM widened to a word, if necessary. */
531 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
532 lowpart_mode
= word_mode
;
534 lowpart_mode
= from_mode
;
536 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
538 lowpart
= gen_lowpart (lowpart_mode
, to
);
539 emit_move_insn (lowpart
, lowfrom
);
541 /* Compute the value to put in each remaining word. */
543 fill_value
= const0_rtx
;
545 fill_value
= emit_store_flag (gen_reg_rtx (word_mode
),
546 LT
, lowfrom
, const0_rtx
,
549 /* Fill the remaining words. */
550 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
552 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
553 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
555 gcc_assert (subword
);
557 if (fill_value
!= subword
)
558 emit_move_insn (subword
, fill_value
);
561 insns
= get_insns ();
568 /* Truncating multi-word to a word or less. */
569 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
570 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
573 && ! MEM_VOLATILE_P (from
)
574 && direct_load
[(int) to_mode
]
575 && ! mode_dependent_address_p (XEXP (from
, 0)))
577 || GET_CODE (from
) == SUBREG
))
578 from
= force_reg (from_mode
, from
);
579 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
583 /* Now follow all the conversions between integers
584 no more than a word long. */
586 /* For truncation, usually we can just refer to FROM in a narrower mode. */
587 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
588 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
589 GET_MODE_BITSIZE (from_mode
)))
592 && ! MEM_VOLATILE_P (from
)
593 && direct_load
[(int) to_mode
]
594 && ! mode_dependent_address_p (XEXP (from
, 0)))
596 || GET_CODE (from
) == SUBREG
))
597 from
= force_reg (from_mode
, from
);
598 if (REG_P (from
) && REGNO (from
) < FIRST_PSEUDO_REGISTER
599 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
600 from
= copy_to_reg (from
);
601 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
605 /* Handle extension. */
606 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
608 /* Convert directly if that works. */
609 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
612 emit_unop_insn (code
, to
, from
, equiv_code
);
617 enum machine_mode intermediate
;
621 /* Search for a mode to convert via. */
622 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
623 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
624 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
626 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
627 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
628 GET_MODE_BITSIZE (intermediate
))))
629 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
630 != CODE_FOR_nothing
))
632 convert_move (to
, convert_to_mode (intermediate
, from
,
633 unsignedp
), unsignedp
);
637 /* No suitable intermediate mode.
638 Generate what we need with shifts. */
639 shift_amount
= build_int_cst (NULL_TREE
,
640 GET_MODE_BITSIZE (to_mode
)
641 - GET_MODE_BITSIZE (from_mode
));
642 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
643 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
645 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
648 emit_move_insn (to
, tmp
);
653 /* Support special truncate insns for certain modes. */
654 if (convert_optab_handler (trunc_optab
, to_mode
,
655 from_mode
) != CODE_FOR_nothing
)
657 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, from_mode
),
662 /* Handle truncation of volatile memrefs, and so on;
663 the things that couldn't be truncated directly,
664 and for which there was no special instruction.
666 ??? Code above formerly short-circuited this, for most integer
667 mode pairs, with a force_reg in from_mode followed by a recursive
668 call to this routine. Appears always to have been wrong. */
669 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
671 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
672 emit_move_insn (to
, temp
);
676 /* Mode combination is not recognized. */
680 /* Return an rtx for a value that would result
681 from converting X to mode MODE.
682 Both X and MODE may be floating, or both integer.
683 UNSIGNEDP is nonzero if X is an unsigned value.
684 This can be done by referring to a part of X in place
685 or by copying to a new temporary with conversion. */
688 convert_to_mode (enum machine_mode mode
, rtx x
, int unsignedp
)
690 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
693 /* Return an rtx for a value that would result
694 from converting X from mode OLDMODE to mode MODE.
695 Both modes may be floating, or both integer.
696 UNSIGNEDP is nonzero if X is an unsigned value.
698 This can be done by referring to a part of X in place
699 or by copying to a new temporary with conversion.
701 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
704 convert_modes (enum machine_mode mode
, enum machine_mode oldmode
, rtx x
, int unsignedp
)
708 /* If FROM is a SUBREG that indicates that we have already done at least
709 the required extension, strip it. */
711 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
712 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
713 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
714 x
= gen_lowpart (mode
, x
);
716 if (GET_MODE (x
) != VOIDmode
)
717 oldmode
= GET_MODE (x
);
722 /* There is one case that we must handle specially: If we are converting
723 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
724 we are to interpret the constant as unsigned, gen_lowpart will do
725 the wrong if the constant appears negative. What we want to do is
726 make the high-order word of the constant zero, not all ones. */
728 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
729 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
730 && CONST_INT_P (x
) && INTVAL (x
) < 0)
732 double_int val
= uhwi_to_double_int (INTVAL (x
));
734 /* We need to zero extend VAL. */
735 if (oldmode
!= VOIDmode
)
736 val
= double_int_zext (val
, GET_MODE_BITSIZE (oldmode
));
738 return immed_double_int_const (val
, mode
);
741 /* We can do this with a gen_lowpart if both desired and current modes
742 are integer, and this is either a constant integer, a register, or a
743 non-volatile MEM. Except for the constant case where MODE is no
744 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
747 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
748 || (GET_MODE_CLASS (mode
) == MODE_INT
749 && GET_MODE_CLASS (oldmode
) == MODE_INT
750 && (GET_CODE (x
) == CONST_DOUBLE
751 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
752 && ((MEM_P (x
) && ! MEM_VOLATILE_P (x
)
753 && direct_load
[(int) mode
])
755 && (! HARD_REGISTER_P (x
)
756 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
757 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
758 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
760 /* ?? If we don't know OLDMODE, we have to assume here that
761 X does not need sign- or zero-extension. This may not be
762 the case, but it's the best we can do. */
763 if (CONST_INT_P (x
) && oldmode
!= VOIDmode
764 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
766 HOST_WIDE_INT val
= INTVAL (x
);
767 int width
= GET_MODE_BITSIZE (oldmode
);
769 /* We must sign or zero-extend in this case. Start by
770 zero-extending, then sign extend if we need to. */
771 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
773 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
774 val
|= (HOST_WIDE_INT
) (-1) << width
;
776 return gen_int_mode (val
, mode
);
779 return gen_lowpart (mode
, x
);
782 /* Converting from integer constant into mode is always equivalent to an
784 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
786 gcc_assert (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (oldmode
));
787 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
790 temp
= gen_reg_rtx (mode
);
791 convert_move (temp
, x
, unsignedp
);
795 /* STORE_MAX_PIECES is the number of bytes at a time that we can
796 store efficiently. Due to internal GCC limitations, this is
797 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
798 for an immediate constant. */
800 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
802 /* Determine whether the LEN bytes can be moved by using several move
803 instructions. Return nonzero if a call to move_by_pieces should
807 can_move_by_pieces (unsigned HOST_WIDE_INT len
,
808 unsigned int align ATTRIBUTE_UNUSED
)
810 return MOVE_BY_PIECES_P (len
, align
);
813 /* Generate several move instructions to copy LEN bytes from block FROM to
814 block TO. (These are MEM rtx's with BLKmode).
816 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
817 used to push FROM to the stack.
819 ALIGN is maximum stack alignment we can assume.
821 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
822 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
826 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
827 unsigned int align
, int endp
)
829 struct move_by_pieces_d data
;
830 enum machine_mode to_addr_mode
, from_addr_mode
831 = targetm
.addr_space
.address_mode (MEM_ADDR_SPACE (from
));
832 rtx to_addr
, from_addr
= XEXP (from
, 0);
833 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
834 enum machine_mode mode
= VOIDmode
, tmode
;
835 enum insn_code icode
;
837 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
840 data
.from_addr
= from_addr
;
843 to_addr_mode
= targetm
.addr_space
.address_mode (MEM_ADDR_SPACE (to
));
844 to_addr
= XEXP (to
, 0);
847 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
848 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
850 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
854 to_addr_mode
= VOIDmode
;
858 #ifdef STACK_GROWS_DOWNWARD
864 data
.to_addr
= to_addr
;
867 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
868 || GET_CODE (from_addr
) == POST_INC
869 || GET_CODE (from_addr
) == POST_DEC
);
871 data
.explicit_inc_from
= 0;
872 data
.explicit_inc_to
= 0;
873 if (data
.reverse
) data
.offset
= len
;
876 /* If copying requires more than two move insns,
877 copy addresses to registers (to make displacements shorter)
878 and use post-increment if available. */
879 if (!(data
.autinc_from
&& data
.autinc_to
)
880 && move_by_pieces_ninsns (len
, align
, max_size
) > 2)
882 /* Find the mode of the largest move... */
883 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
884 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
885 if (GET_MODE_SIZE (tmode
) < max_size
)
888 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
890 data
.from_addr
= copy_to_mode_reg (from_addr_mode
,
891 plus_constant (from_addr
, len
));
892 data
.autinc_from
= 1;
893 data
.explicit_inc_from
= -1;
895 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
897 data
.from_addr
= copy_to_mode_reg (from_addr_mode
, from_addr
);
898 data
.autinc_from
= 1;
899 data
.explicit_inc_from
= 1;
901 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
902 data
.from_addr
= copy_to_mode_reg (from_addr_mode
, from_addr
);
903 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
905 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
906 plus_constant (to_addr
, len
));
908 data
.explicit_inc_to
= -1;
910 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
912 data
.to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
914 data
.explicit_inc_to
= 1;
916 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
917 data
.to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
920 tmode
= mode_for_size (MOVE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
921 if (align
>= GET_MODE_ALIGNMENT (tmode
))
922 align
= GET_MODE_ALIGNMENT (tmode
);
925 enum machine_mode xmode
;
927 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
929 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
930 if (GET_MODE_SIZE (tmode
) > MOVE_MAX_PIECES
931 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
934 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
937 /* First move what we can in the largest integer mode, then go to
938 successively smaller modes. */
942 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
943 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
944 if (GET_MODE_SIZE (tmode
) < max_size
)
947 if (mode
== VOIDmode
)
950 icode
= optab_handler (mov_optab
, mode
);
951 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
952 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
954 max_size
= GET_MODE_SIZE (mode
);
957 /* The code above should have handled everything. */
958 gcc_assert (!data
.len
);
964 gcc_assert (!data
.reverse
);
969 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
970 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
972 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
973 plus_constant (data
.to_addr
,
976 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
983 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
991 /* Return number of insns required to move L bytes by pieces.
992 ALIGN (in bits) is maximum alignment we can assume. */
994 static unsigned HOST_WIDE_INT
995 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
,
996 unsigned int max_size
)
998 unsigned HOST_WIDE_INT n_insns
= 0;
999 enum machine_mode tmode
;
1001 tmode
= mode_for_size (MOVE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
1002 if (align
>= GET_MODE_ALIGNMENT (tmode
))
1003 align
= GET_MODE_ALIGNMENT (tmode
);
1006 enum machine_mode tmode
, xmode
;
1008 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
1010 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
1011 if (GET_MODE_SIZE (tmode
) > MOVE_MAX_PIECES
1012 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
1015 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
1018 while (max_size
> 1)
1020 enum machine_mode mode
= VOIDmode
;
1021 enum insn_code icode
;
1023 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1024 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1025 if (GET_MODE_SIZE (tmode
) < max_size
)
1028 if (mode
== VOIDmode
)
1031 icode
= optab_handler (mov_optab
, mode
);
1032 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1033 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1035 max_size
= GET_MODE_SIZE (mode
);
1042 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1043 with move instructions for mode MODE. GENFUN is the gen_... function
1044 to make a move insn for that mode. DATA has all the other info. */
1047 move_by_pieces_1 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
1048 struct move_by_pieces_d
*data
)
1050 unsigned int size
= GET_MODE_SIZE (mode
);
1051 rtx to1
= NULL_RTX
, from1
;
1053 while (data
->len
>= size
)
1056 data
->offset
-= size
;
1060 if (data
->autinc_to
)
1061 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1064 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1067 if (data
->autinc_from
)
1068 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1071 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1073 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1074 emit_insn (gen_add2_insn (data
->to_addr
,
1075 GEN_INT (-(HOST_WIDE_INT
)size
)));
1076 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1077 emit_insn (gen_add2_insn (data
->from_addr
,
1078 GEN_INT (-(HOST_WIDE_INT
)size
)));
1081 emit_insn ((*genfun
) (to1
, from1
));
1084 #ifdef PUSH_ROUNDING
1085 emit_single_push_insn (mode
, from1
, NULL
);
1091 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1092 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1093 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1094 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1096 if (! data
->reverse
)
1097 data
->offset
+= size
;
1103 /* Emit code to move a block Y to a block X. This may be done with
1104 string-move instructions, with multiple scalar move instructions,
1105 or with a library call.
1107 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1108 SIZE is an rtx that says how long they are.
1109 ALIGN is the maximum alignment we can assume they have.
1110 METHOD describes what kind of copy this is, and what mechanisms may be used.
1112 Return the address of the new block, if memcpy is called and returns it,
1116 emit_block_move_hints (rtx x
, rtx y
, rtx size
, enum block_op_methods method
,
1117 unsigned int expected_align
, HOST_WIDE_INT expected_size
)
1124 if (CONST_INT_P (size
)
1125 && INTVAL (size
) == 0)
1130 case BLOCK_OP_NORMAL
:
1131 case BLOCK_OP_TAILCALL
:
1132 may_use_call
= true;
1135 case BLOCK_OP_CALL_PARM
:
1136 may_use_call
= block_move_libcall_safe_for_call_parm ();
1138 /* Make inhibit_defer_pop nonzero around the library call
1139 to force it to pop the arguments right away. */
1143 case BLOCK_OP_NO_LIBCALL
:
1144 may_use_call
= false;
1151 gcc_assert (MEM_P (x
) && MEM_P (y
));
1152 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1153 gcc_assert (align
>= BITS_PER_UNIT
);
1155 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1156 block copy is more efficient for other large modes, e.g. DCmode. */
1157 x
= adjust_address (x
, BLKmode
, 0);
1158 y
= adjust_address (y
, BLKmode
, 0);
1160 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1161 can be incorrect is coming from __builtin_memcpy. */
1162 if (CONST_INT_P (size
))
1164 x
= shallow_copy_rtx (x
);
1165 y
= shallow_copy_rtx (y
);
1166 set_mem_size (x
, size
);
1167 set_mem_size (y
, size
);
1170 if (CONST_INT_P (size
) && MOVE_BY_PIECES_P (INTVAL (size
), align
))
1171 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1172 else if (emit_block_move_via_movmem (x
, y
, size
, align
,
1173 expected_align
, expected_size
))
1175 else if (may_use_call
1176 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
))
1177 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y
)))
1178 retval
= emit_block_move_via_libcall (x
, y
, size
,
1179 method
== BLOCK_OP_TAILCALL
);
1181 emit_block_move_via_loop (x
, y
, size
, align
);
1183 if (method
== BLOCK_OP_CALL_PARM
)
1190 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1192 return emit_block_move_hints (x
, y
, size
, method
, 0, -1);
1195 /* A subroutine of emit_block_move. Returns true if calling the
1196 block move libcall will not clobber any parameters which may have
1197 already been placed on the stack. */
1200 block_move_libcall_safe_for_call_parm (void)
1202 #if defined (REG_PARM_STACK_SPACE)
1206 /* If arguments are pushed on the stack, then they're safe. */
1210 /* If registers go on the stack anyway, any argument is sure to clobber
1211 an outgoing argument. */
1212 #if defined (REG_PARM_STACK_SPACE)
1213 fn
= emit_block_move_libcall_fn (false);
1214 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1215 depend on its argument. */
1217 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn
? NULL_TREE
: TREE_TYPE (fn
)))
1218 && REG_PARM_STACK_SPACE (fn
) != 0)
1222 /* If any argument goes in memory, then it might clobber an outgoing
1225 CUMULATIVE_ARGS args_so_far
;
1228 fn
= emit_block_move_libcall_fn (false);
1229 INIT_CUMULATIVE_ARGS (args_so_far
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1231 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1232 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1234 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1235 rtx tmp
= targetm
.calls
.function_arg (&args_so_far
, mode
,
1237 if (!tmp
|| !REG_P (tmp
))
1239 if (targetm
.calls
.arg_partial_bytes (&args_so_far
, mode
, NULL
, 1))
1241 targetm
.calls
.function_arg_advance (&args_so_far
, mode
,
1248 /* A subroutine of emit_block_move. Expand a movmem pattern;
1249 return true if successful. */
1252 emit_block_move_via_movmem (rtx x
, rtx y
, rtx size
, unsigned int align
,
1253 unsigned int expected_align
, HOST_WIDE_INT expected_size
)
1255 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1256 int save_volatile_ok
= volatile_ok
;
1257 enum machine_mode mode
;
1259 if (expected_align
< align
)
1260 expected_align
= align
;
1262 /* Since this is a move insn, we don't care about volatility. */
1265 /* Try the most limited insn first, because there's no point
1266 including more than one in the machine description unless
1267 the more limited one has some advantage. */
1269 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1270 mode
= GET_MODE_WIDER_MODE (mode
))
1272 enum insn_code code
= direct_optab_handler (movmem_optab
, mode
);
1273 insn_operand_predicate_fn pred
;
1275 if (code
!= CODE_FOR_nothing
1276 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1277 here because if SIZE is less than the mode mask, as it is
1278 returned by the macro, it will definitely be less than the
1279 actual mode mask. */
1280 && ((CONST_INT_P (size
)
1281 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1282 <= (GET_MODE_MASK (mode
) >> 1)))
1283 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1284 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1285 || (*pred
) (x
, BLKmode
))
1286 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1287 || (*pred
) (y
, BLKmode
))
1288 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1289 || (*pred
) (opalign
, VOIDmode
)))
1292 rtx last
= get_last_insn ();
1295 op2
= convert_to_mode (mode
, size
, 1);
1296 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1297 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1298 op2
= copy_to_mode_reg (mode
, op2
);
1300 /* ??? When called via emit_block_move_for_call, it'd be
1301 nice if there were some way to inform the backend, so
1302 that it doesn't fail the expansion because it thinks
1303 emitting the libcall would be more efficient. */
1305 if (insn_data
[(int) code
].n_operands
== 4)
1306 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1308 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
,
1309 GEN_INT (expected_align
1311 GEN_INT (expected_size
));
1315 volatile_ok
= save_volatile_ok
;
1319 delete_insns_since (last
);
1323 volatile_ok
= save_volatile_ok
;
1327 /* A subroutine of emit_block_move. Expand a call to memcpy.
1328 Return the return value from memcpy, 0 otherwise. */
1331 emit_block_move_via_libcall (rtx dst
, rtx src
, rtx size
, bool tailcall
)
1333 rtx dst_addr
, src_addr
;
1334 tree call_expr
, fn
, src_tree
, dst_tree
, size_tree
;
1335 enum machine_mode size_mode
;
1338 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1339 pseudos. We can then place those new pseudos into a VAR_DECL and
1342 dst_addr
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
1343 src_addr
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
1345 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1346 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1348 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1349 src_tree
= make_tree (ptr_type_node
, src_addr
);
1351 size_mode
= TYPE_MODE (sizetype
);
1353 size
= convert_to_mode (size_mode
, size
, 1);
1354 size
= copy_to_mode_reg (size_mode
, size
);
1356 /* It is incorrect to use the libcall calling conventions to call
1357 memcpy in this context. This could be a user call to memcpy and
1358 the user may wish to examine the return value from memcpy. For
1359 targets where libcalls and normal calls have different conventions
1360 for returning pointers, we could end up generating incorrect code. */
1362 size_tree
= make_tree (sizetype
, size
);
1364 fn
= emit_block_move_libcall_fn (true);
1365 call_expr
= build_call_expr (fn
, 3, dst_tree
, src_tree
, size_tree
);
1366 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
1368 retval
= expand_normal (call_expr
);
1373 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1374 for the function we use for block copies. The first time FOR_CALL
1375 is true, we call assemble_external. */
1377 static GTY(()) tree block_move_fn
;
1380 init_block_move_fn (const char *asmspec
)
1386 fn
= get_identifier ("memcpy");
1387 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
1388 const_ptr_type_node
, sizetype
,
1391 fn
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
, fn
, args
);
1392 DECL_EXTERNAL (fn
) = 1;
1393 TREE_PUBLIC (fn
) = 1;
1394 DECL_ARTIFICIAL (fn
) = 1;
1395 TREE_NOTHROW (fn
) = 1;
1396 DECL_VISIBILITY (fn
) = VISIBILITY_DEFAULT
;
1397 DECL_VISIBILITY_SPECIFIED (fn
) = 1;
1403 set_user_assembler_name (block_move_fn
, asmspec
);
1407 emit_block_move_libcall_fn (int for_call
)
1409 static bool emitted_extern
;
1412 init_block_move_fn (NULL
);
1414 if (for_call
&& !emitted_extern
)
1416 emitted_extern
= true;
1417 make_decl_rtl (block_move_fn
);
1418 assemble_external (block_move_fn
);
1421 return block_move_fn
;
1424 /* A subroutine of emit_block_move. Copy the data via an explicit
1425 loop. This is used only when libcalls are forbidden. */
1426 /* ??? It'd be nice to copy in hunks larger than QImode. */
1429 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1430 unsigned int align ATTRIBUTE_UNUSED
)
1432 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
1433 enum machine_mode x_addr_mode
1434 = targetm
.addr_space
.address_mode (MEM_ADDR_SPACE (x
));
1435 enum machine_mode y_addr_mode
1436 = targetm
.addr_space
.address_mode (MEM_ADDR_SPACE (y
));
1437 enum machine_mode iter_mode
;
1439 iter_mode
= GET_MODE (size
);
1440 if (iter_mode
== VOIDmode
)
1441 iter_mode
= word_mode
;
1443 top_label
= gen_label_rtx ();
1444 cmp_label
= gen_label_rtx ();
1445 iter
= gen_reg_rtx (iter_mode
);
1447 emit_move_insn (iter
, const0_rtx
);
1449 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1450 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1451 do_pending_stack_adjust ();
1453 emit_jump (cmp_label
);
1454 emit_label (top_label
);
1456 tmp
= convert_modes (x_addr_mode
, iter_mode
, iter
, true);
1457 x_addr
= gen_rtx_PLUS (x_addr_mode
, x_addr
, tmp
);
1459 if (x_addr_mode
!= y_addr_mode
)
1460 tmp
= convert_modes (y_addr_mode
, iter_mode
, iter
, true);
1461 y_addr
= gen_rtx_PLUS (y_addr_mode
, y_addr
, tmp
);
1463 x
= change_address (x
, QImode
, x_addr
);
1464 y
= change_address (y
, QImode
, y_addr
);
1466 emit_move_insn (x
, y
);
1468 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1469 true, OPTAB_LIB_WIDEN
);
1471 emit_move_insn (iter
, tmp
);
1473 emit_label (cmp_label
);
1475 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1479 /* Copy all or part of a value X into registers starting at REGNO.
1480 The number of registers to be filled is NREGS. */
1483 move_block_to_reg (int regno
, rtx x
, int nregs
, enum machine_mode mode
)
1486 #ifdef HAVE_load_multiple
1494 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1495 x
= validize_mem (force_const_mem (mode
, x
));
1497 /* See if the machine can do this with a load multiple insn. */
1498 #ifdef HAVE_load_multiple
1499 if (HAVE_load_multiple
)
1501 last
= get_last_insn ();
1502 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1510 delete_insns_since (last
);
1514 for (i
= 0; i
< nregs
; i
++)
1515 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1516 operand_subword_force (x
, i
, mode
));
1519 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1520 The number of registers to be filled is NREGS. */
1523 move_block_from_reg (int regno
, rtx x
, int nregs
)
1530 /* See if the machine can do this with a store multiple insn. */
1531 #ifdef HAVE_store_multiple
1532 if (HAVE_store_multiple
)
1534 rtx last
= get_last_insn ();
1535 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1543 delete_insns_since (last
);
1547 for (i
= 0; i
< nregs
; i
++)
1549 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1553 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1557 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1558 ORIG, where ORIG is a non-consecutive group of registers represented by
1559 a PARALLEL. The clone is identical to the original except in that the
1560 original set of registers is replaced by a new set of pseudo registers.
1561 The new set has the same modes as the original set. */
1564 gen_group_rtx (rtx orig
)
1569 gcc_assert (GET_CODE (orig
) == PARALLEL
);
1571 length
= XVECLEN (orig
, 0);
1572 tmps
= XALLOCAVEC (rtx
, length
);
1574 /* Skip a NULL entry in first slot. */
1575 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
1580 for (; i
< length
; i
++)
1582 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
1583 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
1585 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
1588 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
1591 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1592 except that values are placed in TMPS[i], and must later be moved
1593 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1596 emit_group_load_1 (rtx
*tmps
, rtx dst
, rtx orig_src
, tree type
, int ssize
)
1600 enum machine_mode m
= GET_MODE (orig_src
);
1602 gcc_assert (GET_CODE (dst
) == PARALLEL
);
1605 && !SCALAR_INT_MODE_P (m
)
1606 && !MEM_P (orig_src
)
1607 && GET_CODE (orig_src
) != CONCAT
)
1609 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_src
));
1610 if (imode
== BLKmode
)
1611 src
= assign_stack_temp (GET_MODE (orig_src
), ssize
, 0);
1613 src
= gen_reg_rtx (imode
);
1614 if (imode
!= BLKmode
)
1615 src
= gen_lowpart (GET_MODE (orig_src
), src
);
1616 emit_move_insn (src
, orig_src
);
1617 /* ...and back again. */
1618 if (imode
!= BLKmode
)
1619 src
= gen_lowpart (imode
, src
);
1620 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1624 /* Check for a NULL entry, used to indicate that the parameter goes
1625 both on the stack and in registers. */
1626 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1631 /* Process the pieces. */
1632 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1634 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1635 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1636 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1639 /* Handle trailing fragments that run over the size of the struct. */
1640 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1642 /* Arrange to shift the fragment to where it belongs.
1643 extract_bit_field loads to the lsb of the reg. */
1645 #ifdef BLOCK_REG_PADDING
1646 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
1647 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1652 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1653 bytelen
= ssize
- bytepos
;
1654 gcc_assert (bytelen
> 0);
1657 /* If we won't be loading directly from memory, protect the real source
1658 from strange tricks we might play; but make sure that the source can
1659 be loaded directly into the destination. */
1661 if (!MEM_P (orig_src
)
1662 && (!CONSTANT_P (orig_src
)
1663 || (GET_MODE (orig_src
) != mode
1664 && GET_MODE (orig_src
) != VOIDmode
)))
1666 if (GET_MODE (orig_src
) == VOIDmode
)
1667 src
= gen_reg_rtx (mode
);
1669 src
= gen_reg_rtx (GET_MODE (orig_src
));
1671 emit_move_insn (src
, orig_src
);
1674 /* Optimize the access just a bit. */
1676 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
1677 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
1678 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1679 && bytelen
== GET_MODE_SIZE (mode
))
1681 tmps
[i
] = gen_reg_rtx (mode
);
1682 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
1684 else if (COMPLEX_MODE_P (mode
)
1685 && GET_MODE (src
) == mode
1686 && bytelen
== GET_MODE_SIZE (mode
))
1687 /* Let emit_move_complex do the bulk of the work. */
1689 else if (GET_CODE (src
) == CONCAT
)
1691 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
1692 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
1694 if ((bytepos
== 0 && bytelen
== slen0
)
1695 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
1697 /* The following assumes that the concatenated objects all
1698 have the same size. In this case, a simple calculation
1699 can be used to determine the object and the bit field
1701 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
1702 if (! CONSTANT_P (tmps
[i
])
1703 && (!REG_P (tmps
[i
]) || GET_MODE (tmps
[i
]) != mode
))
1704 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
1705 (bytepos
% slen0
) * BITS_PER_UNIT
,
1706 1, false, NULL_RTX
, mode
, mode
);
1712 gcc_assert (!bytepos
);
1713 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1714 emit_move_insn (mem
, src
);
1715 tmps
[i
] = extract_bit_field (mem
, bytelen
* BITS_PER_UNIT
,
1716 0, 1, false, NULL_RTX
, mode
, mode
);
1719 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1720 SIMD register, which is currently broken. While we get GCC
1721 to emit proper RTL for these cases, let's dump to memory. */
1722 else if (VECTOR_MODE_P (GET_MODE (dst
))
1725 int slen
= GET_MODE_SIZE (GET_MODE (src
));
1728 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1729 emit_move_insn (mem
, src
);
1730 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
1732 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
1733 && XVECLEN (dst
, 0) > 1)
1734 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE(dst
), bytepos
);
1735 else if (CONSTANT_P (src
))
1737 HOST_WIDE_INT len
= (HOST_WIDE_INT
) bytelen
;
1745 gcc_assert (2 * len
== ssize
);
1746 split_double (src
, &first
, &second
);
1753 else if (REG_P (src
) && GET_MODE (src
) == mode
)
1756 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
1757 bytepos
* BITS_PER_UNIT
, 1, false, NULL_RTX
,
1761 tmps
[i
] = expand_shift (LSHIFT_EXPR
, mode
, tmps
[i
],
1762 build_int_cst (NULL_TREE
, shift
), tmps
[i
], 0);
1766 /* Emit code to move a block SRC of type TYPE to a block DST,
1767 where DST is non-consecutive registers represented by a PARALLEL.
1768 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1772 emit_group_load (rtx dst
, rtx src
, tree type
, int ssize
)
1777 tmps
= XALLOCAVEC (rtx
, XVECLEN (dst
, 0));
1778 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1780 /* Copy the extracted pieces into the proper (probable) hard regs. */
1781 for (i
= 0; i
< XVECLEN (dst
, 0); i
++)
1783 rtx d
= XEXP (XVECEXP (dst
, 0, i
), 0);
1786 emit_move_insn (d
, tmps
[i
]);
1790 /* Similar, but load SRC into new pseudos in a format that looks like
1791 PARALLEL. This can later be fed to emit_group_move to get things
1792 in the right place. */
1795 emit_group_load_into_temps (rtx parallel
, rtx src
, tree type
, int ssize
)
1800 vec
= rtvec_alloc (XVECLEN (parallel
, 0));
1801 emit_group_load_1 (&RTVEC_ELT (vec
, 0), parallel
, src
, type
, ssize
);
1803 /* Convert the vector to look just like the original PARALLEL, except
1804 with the computed values. */
1805 for (i
= 0; i
< XVECLEN (parallel
, 0); i
++)
1807 rtx e
= XVECEXP (parallel
, 0, i
);
1808 rtx d
= XEXP (e
, 0);
1812 d
= force_reg (GET_MODE (d
), RTVEC_ELT (vec
, i
));
1813 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), d
, XEXP (e
, 1));
1815 RTVEC_ELT (vec
, i
) = e
;
1818 return gen_rtx_PARALLEL (GET_MODE (parallel
), vec
);
1821 /* Emit code to move a block SRC to block DST, where SRC and DST are
1822 non-consecutive groups of registers, each represented by a PARALLEL. */
1825 emit_group_move (rtx dst
, rtx src
)
1829 gcc_assert (GET_CODE (src
) == PARALLEL
1830 && GET_CODE (dst
) == PARALLEL
1831 && XVECLEN (src
, 0) == XVECLEN (dst
, 0));
1833 /* Skip first entry if NULL. */
1834 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
1835 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
1836 XEXP (XVECEXP (src
, 0, i
), 0));
1839 /* Move a group of registers represented by a PARALLEL into pseudos. */
1842 emit_group_move_into_temps (rtx src
)
1844 rtvec vec
= rtvec_alloc (XVECLEN (src
, 0));
1847 for (i
= 0; i
< XVECLEN (src
, 0); i
++)
1849 rtx e
= XVECEXP (src
, 0, i
);
1850 rtx d
= XEXP (e
, 0);
1853 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), copy_to_reg (d
), XEXP (e
, 1));
1854 RTVEC_ELT (vec
, i
) = e
;
1857 return gen_rtx_PARALLEL (GET_MODE (src
), vec
);
1860 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1861 where SRC is non-consecutive registers represented by a PARALLEL.
1862 SSIZE represents the total size of block ORIG_DST, or -1 if not
1866 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1869 int start
, finish
, i
;
1870 enum machine_mode m
= GET_MODE (orig_dst
);
1872 gcc_assert (GET_CODE (src
) == PARALLEL
);
1874 if (!SCALAR_INT_MODE_P (m
)
1875 && !MEM_P (orig_dst
) && GET_CODE (orig_dst
) != CONCAT
)
1877 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_dst
));
1878 if (imode
== BLKmode
)
1879 dst
= assign_stack_temp (GET_MODE (orig_dst
), ssize
, 0);
1881 dst
= gen_reg_rtx (imode
);
1882 emit_group_store (dst
, src
, type
, ssize
);
1883 if (imode
!= BLKmode
)
1884 dst
= gen_lowpart (GET_MODE (orig_dst
), dst
);
1885 emit_move_insn (orig_dst
, dst
);
1889 /* Check for a NULL entry, used to indicate that the parameter goes
1890 both on the stack and in registers. */
1891 if (XEXP (XVECEXP (src
, 0, 0), 0))
1895 finish
= XVECLEN (src
, 0);
1897 tmps
= XALLOCAVEC (rtx
, finish
);
1899 /* Copy the (probable) hard regs into pseudos. */
1900 for (i
= start
; i
< finish
; i
++)
1902 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
1903 if (!REG_P (reg
) || REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
1905 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
1906 emit_move_insn (tmps
[i
], reg
);
1912 /* If we won't be storing directly into memory, protect the real destination
1913 from strange tricks we might play. */
1915 if (GET_CODE (dst
) == PARALLEL
)
1919 /* We can get a PARALLEL dst if there is a conditional expression in
1920 a return statement. In that case, the dst and src are the same,
1921 so no action is necessary. */
1922 if (rtx_equal_p (dst
, src
))
1925 /* It is unclear if we can ever reach here, but we may as well handle
1926 it. Allocate a temporary, and split this into a store/load to/from
1929 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
1930 emit_group_store (temp
, src
, type
, ssize
);
1931 emit_group_load (dst
, temp
, type
, ssize
);
1934 else if (!MEM_P (dst
) && GET_CODE (dst
) != CONCAT
)
1936 enum machine_mode outer
= GET_MODE (dst
);
1937 enum machine_mode inner
;
1938 HOST_WIDE_INT bytepos
;
1942 if (!REG_P (dst
) || REGNO (dst
) < FIRST_PSEUDO_REGISTER
)
1943 dst
= gen_reg_rtx (outer
);
1945 /* Make life a bit easier for combine. */
1946 /* If the first element of the vector is the low part
1947 of the destination mode, use a paradoxical subreg to
1948 initialize the destination. */
1951 inner
= GET_MODE (tmps
[start
]);
1952 bytepos
= subreg_lowpart_offset (inner
, outer
);
1953 if (INTVAL (XEXP (XVECEXP (src
, 0, start
), 1)) == bytepos
)
1955 temp
= simplify_gen_subreg (outer
, tmps
[start
],
1959 emit_move_insn (dst
, temp
);
1966 /* If the first element wasn't the low part, try the last. */
1968 && start
< finish
- 1)
1970 inner
= GET_MODE (tmps
[finish
- 1]);
1971 bytepos
= subreg_lowpart_offset (inner
, outer
);
1972 if (INTVAL (XEXP (XVECEXP (src
, 0, finish
- 1), 1)) == bytepos
)
1974 temp
= simplify_gen_subreg (outer
, tmps
[finish
- 1],
1978 emit_move_insn (dst
, temp
);
1985 /* Otherwise, simply initialize the result to zero. */
1987 emit_move_insn (dst
, CONST0_RTX (outer
));
1990 /* Process the pieces. */
1991 for (i
= start
; i
< finish
; i
++)
1993 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
1994 enum machine_mode mode
= GET_MODE (tmps
[i
]);
1995 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1996 unsigned int adj_bytelen
= bytelen
;
1999 /* Handle trailing fragments that run over the size of the struct. */
2000 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2001 adj_bytelen
= ssize
- bytepos
;
2003 if (GET_CODE (dst
) == CONCAT
)
2005 if (bytepos
+ adj_bytelen
2006 <= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2007 dest
= XEXP (dst
, 0);
2008 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2010 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2011 dest
= XEXP (dst
, 1);
2015 enum machine_mode dest_mode
= GET_MODE (dest
);
2016 enum machine_mode tmp_mode
= GET_MODE (tmps
[i
]);
2018 gcc_assert (bytepos
== 0 && XVECLEN (src
, 0));
2020 if (GET_MODE_ALIGNMENT (dest_mode
)
2021 >= GET_MODE_ALIGNMENT (tmp_mode
))
2023 dest
= assign_stack_temp (dest_mode
,
2024 GET_MODE_SIZE (dest_mode
),
2026 emit_move_insn (adjust_address (dest
,
2034 dest
= assign_stack_temp (tmp_mode
,
2035 GET_MODE_SIZE (tmp_mode
),
2037 emit_move_insn (dest
, tmps
[i
]);
2038 dst
= adjust_address (dest
, dest_mode
, bytepos
);
2044 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2046 /* store_bit_field always takes its value from the lsb.
2047 Move the fragment to the lsb if it's not already there. */
2049 #ifdef BLOCK_REG_PADDING
2050 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
2051 == (BYTES_BIG_ENDIAN
? upward
: downward
)
2057 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2058 tmps
[i
] = expand_shift (RSHIFT_EXPR
, mode
, tmps
[i
],
2059 build_int_cst (NULL_TREE
, shift
),
2062 bytelen
= adj_bytelen
;
2065 /* Optimize the access just a bit. */
2067 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
2068 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2069 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2070 && bytelen
== GET_MODE_SIZE (mode
))
2071 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2073 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2077 /* Copy from the pseudo into the (probable) hard reg. */
2078 if (orig_dst
!= dst
)
2079 emit_move_insn (orig_dst
, dst
);
2082 /* Generate code to copy a BLKmode object of TYPE out of a
2083 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2084 is null, a stack temporary is created. TGTBLK is returned.
2086 The purpose of this routine is to handle functions that return
2087 BLKmode structures in registers. Some machines (the PA for example)
2088 want to return all small structures in registers regardless of the
2089 structure's alignment. */
2092 copy_blkmode_from_reg (rtx tgtblk
, rtx srcreg
, tree type
)
2094 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2095 rtx src
= NULL
, dst
= NULL
;
2096 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2097 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2098 enum machine_mode copy_mode
;
2102 tgtblk
= assign_temp (build_qualified_type (type
,
2104 | TYPE_QUAL_CONST
)),
2106 preserve_temp_slots (tgtblk
);
2109 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2110 into a new pseudo which is a full word. */
2112 if (GET_MODE (srcreg
) != BLKmode
2113 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2114 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
2116 /* If the structure doesn't take up a whole number of words, see whether
2117 SRCREG is padded on the left or on the right. If it's on the left,
2118 set PADDING_CORRECTION to the number of bits to skip.
2120 In most ABIs, the structure will be returned at the least end of
2121 the register, which translates to right padding on little-endian
2122 targets and left padding on big-endian targets. The opposite
2123 holds if the structure is returned at the most significant
2124 end of the register. */
2125 if (bytes
% UNITS_PER_WORD
!= 0
2126 && (targetm
.calls
.return_in_msb (type
)
2128 : BYTES_BIG_ENDIAN
))
2130 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2132 /* Copy the structure BITSIZE bits at a time. If the target lives in
2133 memory, take care of not reading/writing past its end by selecting
2134 a copy mode suited to BITSIZE. This should always be possible given
2137 We could probably emit more efficient code for machines which do not use
2138 strict alignment, but it doesn't seem worth the effort at the current
2141 copy_mode
= word_mode
;
2144 enum machine_mode mem_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
2145 if (mem_mode
!= BLKmode
)
2146 copy_mode
= mem_mode
;
2149 for (bitpos
= 0, xbitpos
= padding_correction
;
2150 bitpos
< bytes
* BITS_PER_UNIT
;
2151 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2153 /* We need a new source operand each time xbitpos is on a
2154 word boundary and when xbitpos == padding_correction
2155 (the first time through). */
2156 if (xbitpos
% BITS_PER_WORD
== 0
2157 || xbitpos
== padding_correction
)
2158 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2161 /* We need a new destination operand each time bitpos is on
2163 if (bitpos
% BITS_PER_WORD
== 0)
2164 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2166 /* Use xbitpos for the source extraction (right justified) and
2167 bitpos for the destination store (left justified). */
2168 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, copy_mode
,
2169 extract_bit_field (src
, bitsize
,
2170 xbitpos
% BITS_PER_WORD
, 1, false,
2171 NULL_RTX
, copy_mode
, copy_mode
));
2177 /* Add a USE expression for REG to the (possibly empty) list pointed
2178 to by CALL_FUSAGE. REG must denote a hard register. */
2181 use_reg (rtx
*call_fusage
, rtx reg
)
2183 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2186 = gen_rtx_EXPR_LIST (VOIDmode
,
2187 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2190 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2191 starting at REGNO. All of these registers must be hard registers. */
2194 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2198 gcc_assert (regno
+ nregs
<= FIRST_PSEUDO_REGISTER
);
2200 for (i
= 0; i
< nregs
; i
++)
2201 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2204 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2205 PARALLEL REGS. This is for calls that pass values in multiple
2206 non-contiguous locations. The Irix 6 ABI has examples of this. */
2209 use_group_regs (rtx
*call_fusage
, rtx regs
)
2213 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2215 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2217 /* A NULL entry means the parameter goes both on the stack and in
2218 registers. This can also be a MEM for targets that pass values
2219 partially on the stack and partially in registers. */
2220 if (reg
!= 0 && REG_P (reg
))
2221 use_reg (call_fusage
, reg
);
2225 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2226 assigment and the code of the expresion on the RHS is CODE. Return
2230 get_def_for_expr (tree name
, enum tree_code code
)
2234 if (TREE_CODE (name
) != SSA_NAME
)
2237 def_stmt
= get_gimple_for_ssa_name (name
);
2239 || gimple_assign_rhs_code (def_stmt
) != code
)
2246 /* Determine whether the LEN bytes generated by CONSTFUN can be
2247 stored to memory using several move instructions. CONSTFUNDATA is
2248 a pointer which will be passed as argument in every CONSTFUN call.
2249 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2250 a memset operation and false if it's a copy of a constant string.
2251 Return nonzero if a call to store_by_pieces should succeed. */
2254 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
2255 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2256 void *constfundata
, unsigned int align
, bool memsetp
)
2258 unsigned HOST_WIDE_INT l
;
2259 unsigned int max_size
;
2260 HOST_WIDE_INT offset
= 0;
2261 enum machine_mode mode
, tmode
;
2262 enum insn_code icode
;
2270 ? SET_BY_PIECES_P (len
, align
)
2271 : STORE_BY_PIECES_P (len
, align
)))
2274 tmode
= mode_for_size (STORE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
2275 if (align
>= GET_MODE_ALIGNMENT (tmode
))
2276 align
= GET_MODE_ALIGNMENT (tmode
);
2279 enum machine_mode xmode
;
2281 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
2283 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
2284 if (GET_MODE_SIZE (tmode
) > STORE_MAX_PIECES
2285 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
2288 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
2291 /* We would first store what we can in the largest integer mode, then go to
2292 successively smaller modes. */
2295 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2300 max_size
= STORE_MAX_PIECES
+ 1;
2301 while (max_size
> 1)
2303 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2304 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2305 if (GET_MODE_SIZE (tmode
) < max_size
)
2308 if (mode
== VOIDmode
)
2311 icode
= optab_handler (mov_optab
, mode
);
2312 if (icode
!= CODE_FOR_nothing
2313 && align
>= GET_MODE_ALIGNMENT (mode
))
2315 unsigned int size
= GET_MODE_SIZE (mode
);
2322 cst
= (*constfun
) (constfundata
, offset
, mode
);
2323 if (!LEGITIMATE_CONSTANT_P (cst
))
2333 max_size
= GET_MODE_SIZE (mode
);
2336 /* The code above should have handled everything. */
2343 /* Generate several move instructions to store LEN bytes generated by
2344 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2345 pointer which will be passed as argument in every CONSTFUN call.
2346 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2347 a memset operation and false if it's a copy of a constant string.
2348 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2349 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2353 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
2354 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2355 void *constfundata
, unsigned int align
, bool memsetp
, int endp
)
2357 enum machine_mode to_addr_mode
2358 = targetm
.addr_space
.address_mode (MEM_ADDR_SPACE (to
));
2359 struct store_by_pieces_d data
;
2363 gcc_assert (endp
!= 2);
2368 ? SET_BY_PIECES_P (len
, align
)
2369 : STORE_BY_PIECES_P (len
, align
));
2370 data
.constfun
= constfun
;
2371 data
.constfundata
= constfundata
;
2374 store_by_pieces_1 (&data
, align
);
2379 gcc_assert (!data
.reverse
);
2384 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2385 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2387 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
2388 plus_constant (data
.to_addr
,
2391 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2398 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2406 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2407 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2410 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
2412 struct store_by_pieces_d data
;
2417 data
.constfun
= clear_by_pieces_1
;
2418 data
.constfundata
= NULL
;
2421 store_by_pieces_1 (&data
, align
);
2424 /* Callback routine for clear_by_pieces.
2425 Return const0_rtx unconditionally. */
2428 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED
,
2429 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2430 enum machine_mode mode ATTRIBUTE_UNUSED
)
2435 /* Subroutine of clear_by_pieces and store_by_pieces.
2436 Generate several move instructions to store LEN bytes of block TO. (A MEM
2437 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2440 store_by_pieces_1 (struct store_by_pieces_d
*data ATTRIBUTE_UNUSED
,
2441 unsigned int align ATTRIBUTE_UNUSED
)
2443 enum machine_mode to_addr_mode
2444 = targetm
.addr_space
.address_mode (MEM_ADDR_SPACE (data
->to
));
2445 rtx to_addr
= XEXP (data
->to
, 0);
2446 unsigned int max_size
= STORE_MAX_PIECES
+ 1;
2447 enum machine_mode mode
= VOIDmode
, tmode
;
2448 enum insn_code icode
;
2451 data
->to_addr
= to_addr
;
2453 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2454 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2456 data
->explicit_inc_to
= 0;
2458 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2460 data
->offset
= data
->len
;
2462 /* If storing requires more than two move insns,
2463 copy addresses to registers (to make displacements shorter)
2464 and use post-increment if available. */
2465 if (!data
->autinc_to
2466 && move_by_pieces_ninsns (data
->len
, align
, max_size
) > 2)
2468 /* Determine the main mode we'll be using. */
2469 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2470 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2471 if (GET_MODE_SIZE (tmode
) < max_size
)
2474 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2476 data
->to_addr
= copy_to_mode_reg (to_addr_mode
,
2477 plus_constant (to_addr
, data
->len
));
2478 data
->autinc_to
= 1;
2479 data
->explicit_inc_to
= -1;
2482 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2483 && ! data
->autinc_to
)
2485 data
->to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
2486 data
->autinc_to
= 1;
2487 data
->explicit_inc_to
= 1;
2490 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2491 data
->to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
2494 tmode
= mode_for_size (STORE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
2495 if (align
>= GET_MODE_ALIGNMENT (tmode
))
2496 align
= GET_MODE_ALIGNMENT (tmode
);
2499 enum machine_mode xmode
;
2501 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
2503 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
2504 if (GET_MODE_SIZE (tmode
) > STORE_MAX_PIECES
2505 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
2508 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
2511 /* First store what we can in the largest integer mode, then go to
2512 successively smaller modes. */
2514 while (max_size
> 1)
2516 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2517 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2518 if (GET_MODE_SIZE (tmode
) < max_size
)
2521 if (mode
== VOIDmode
)
2524 icode
= optab_handler (mov_optab
, mode
);
2525 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2526 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2528 max_size
= GET_MODE_SIZE (mode
);
2531 /* The code above should have handled everything. */
2532 gcc_assert (!data
->len
);
2535 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2536 with move instructions for mode MODE. GENFUN is the gen_... function
2537 to make a move insn for that mode. DATA has all the other info. */
2540 store_by_pieces_2 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
2541 struct store_by_pieces_d
*data
)
2543 unsigned int size
= GET_MODE_SIZE (mode
);
2546 while (data
->len
>= size
)
2549 data
->offset
-= size
;
2551 if (data
->autinc_to
)
2552 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2555 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2557 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2558 emit_insn (gen_add2_insn (data
->to_addr
,
2559 GEN_INT (-(HOST_WIDE_INT
) size
)));
2561 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2562 emit_insn ((*genfun
) (to1
, cst
));
2564 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2565 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2567 if (! data
->reverse
)
2568 data
->offset
+= size
;
2574 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2575 its length in bytes. */
2578 clear_storage_hints (rtx object
, rtx size
, enum block_op_methods method
,
2579 unsigned int expected_align
, HOST_WIDE_INT expected_size
)
2581 enum machine_mode mode
= GET_MODE (object
);
2584 gcc_assert (method
== BLOCK_OP_NORMAL
|| method
== BLOCK_OP_TAILCALL
);
2586 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2587 just move a zero. Otherwise, do this a piece at a time. */
2589 && CONST_INT_P (size
)
2590 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (mode
))
2592 rtx zero
= CONST0_RTX (mode
);
2595 emit_move_insn (object
, zero
);
2599 if (COMPLEX_MODE_P (mode
))
2601 zero
= CONST0_RTX (GET_MODE_INNER (mode
));
2604 write_complex_part (object
, zero
, 0);
2605 write_complex_part (object
, zero
, 1);
2611 if (size
== const0_rtx
)
2614 align
= MEM_ALIGN (object
);
2616 if (CONST_INT_P (size
)
2617 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2618 clear_by_pieces (object
, INTVAL (size
), align
);
2619 else if (set_storage_via_setmem (object
, size
, const0_rtx
, align
,
2620 expected_align
, expected_size
))
2622 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object
)))
2623 return set_storage_via_libcall (object
, size
, const0_rtx
,
2624 method
== BLOCK_OP_TAILCALL
);
2632 clear_storage (rtx object
, rtx size
, enum block_op_methods method
)
2634 return clear_storage_hints (object
, size
, method
, 0, -1);
2638 /* A subroutine of clear_storage. Expand a call to memset.
2639 Return the return value of memset, 0 otherwise. */
2642 set_storage_via_libcall (rtx object
, rtx size
, rtx val
, bool tailcall
)
2644 tree call_expr
, fn
, object_tree
, size_tree
, val_tree
;
2645 enum machine_mode size_mode
;
2648 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2649 place those into new pseudos into a VAR_DECL and use them later. */
2651 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2653 size_mode
= TYPE_MODE (sizetype
);
2654 size
= convert_to_mode (size_mode
, size
, 1);
2655 size
= copy_to_mode_reg (size_mode
, size
);
2657 /* It is incorrect to use the libcall calling conventions to call
2658 memset in this context. This could be a user call to memset and
2659 the user may wish to examine the return value from memset. For
2660 targets where libcalls and normal calls have different conventions
2661 for returning pointers, we could end up generating incorrect code. */
2663 object_tree
= make_tree (ptr_type_node
, object
);
2664 if (!CONST_INT_P (val
))
2665 val
= convert_to_mode (TYPE_MODE (integer_type_node
), val
, 1);
2666 size_tree
= make_tree (sizetype
, size
);
2667 val_tree
= make_tree (integer_type_node
, val
);
2669 fn
= clear_storage_libcall_fn (true);
2670 call_expr
= build_call_expr (fn
, 3, object_tree
, val_tree
, size_tree
);
2671 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
2673 retval
= expand_normal (call_expr
);
2678 /* A subroutine of set_storage_via_libcall. Create the tree node
2679 for the function we use for block clears. The first time FOR_CALL
2680 is true, we call assemble_external. */
2682 tree block_clear_fn
;
2685 init_block_clear_fn (const char *asmspec
)
2687 if (!block_clear_fn
)
2691 fn
= get_identifier ("memset");
2692 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2693 integer_type_node
, sizetype
,
2696 fn
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
, fn
, args
);
2697 DECL_EXTERNAL (fn
) = 1;
2698 TREE_PUBLIC (fn
) = 1;
2699 DECL_ARTIFICIAL (fn
) = 1;
2700 TREE_NOTHROW (fn
) = 1;
2701 DECL_VISIBILITY (fn
) = VISIBILITY_DEFAULT
;
2702 DECL_VISIBILITY_SPECIFIED (fn
) = 1;
2704 block_clear_fn
= fn
;
2708 set_user_assembler_name (block_clear_fn
, asmspec
);
2712 clear_storage_libcall_fn (int for_call
)
2714 static bool emitted_extern
;
2716 if (!block_clear_fn
)
2717 init_block_clear_fn (NULL
);
2719 if (for_call
&& !emitted_extern
)
2721 emitted_extern
= true;
2722 make_decl_rtl (block_clear_fn
);
2723 assemble_external (block_clear_fn
);
2726 return block_clear_fn
;
2729 /* Expand a setmem pattern; return true if successful. */
2732 set_storage_via_setmem (rtx object
, rtx size
, rtx val
, unsigned int align
,
2733 unsigned int expected_align
, HOST_WIDE_INT expected_size
)
2735 /* Try the most limited insn first, because there's no point
2736 including more than one in the machine description unless
2737 the more limited one has some advantage. */
2739 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2740 enum machine_mode mode
;
2742 if (expected_align
< align
)
2743 expected_align
= align
;
2745 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2746 mode
= GET_MODE_WIDER_MODE (mode
))
2748 enum insn_code code
= direct_optab_handler (setmem_optab
, mode
);
2749 insn_operand_predicate_fn pred
;
2751 if (code
!= CODE_FOR_nothing
2752 /* We don't need MODE to be narrower than
2753 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2754 the mode mask, as it is returned by the macro, it will
2755 definitely be less than the actual mode mask. */
2756 && ((CONST_INT_P (size
)
2757 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2758 <= (GET_MODE_MASK (mode
) >> 1)))
2759 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2760 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2761 || (*pred
) (object
, BLKmode
))
2762 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
2763 || (*pred
) (opalign
, VOIDmode
)))
2766 enum machine_mode char_mode
;
2767 rtx last
= get_last_insn ();
2770 opsize
= convert_to_mode (mode
, size
, 1);
2771 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2772 if (pred
!= 0 && ! (*pred
) (opsize
, mode
))
2773 opsize
= copy_to_mode_reg (mode
, opsize
);
2776 char_mode
= insn_data
[(int) code
].operand
[2].mode
;
2777 if (char_mode
!= VOIDmode
)
2779 opchar
= convert_to_mode (char_mode
, opchar
, 1);
2780 pred
= insn_data
[(int) code
].operand
[2].predicate
;
2781 if (pred
!= 0 && ! (*pred
) (opchar
, char_mode
))
2782 opchar
= copy_to_mode_reg (char_mode
, opchar
);
2785 if (insn_data
[(int) code
].n_operands
== 4)
2786 pat
= GEN_FCN ((int) code
) (object
, opsize
, opchar
, opalign
);
2788 pat
= GEN_FCN ((int) code
) (object
, opsize
, opchar
, opalign
,
2789 GEN_INT (expected_align
2791 GEN_INT (expected_size
));
2798 delete_insns_since (last
);
2806 /* Write to one of the components of the complex value CPLX. Write VAL to
2807 the real part if IMAG_P is false, and the imaginary part if its true. */
2810 write_complex_part (rtx cplx
, rtx val
, bool imag_p
)
2812 enum machine_mode cmode
;
2813 enum machine_mode imode
;
2816 if (GET_CODE (cplx
) == CONCAT
)
2818 emit_move_insn (XEXP (cplx
, imag_p
), val
);
2822 cmode
= GET_MODE (cplx
);
2823 imode
= GET_MODE_INNER (cmode
);
2824 ibitsize
= GET_MODE_BITSIZE (imode
);
2826 /* For MEMs simplify_gen_subreg may generate an invalid new address
2827 because, e.g., the original address is considered mode-dependent
2828 by the target, which restricts simplify_subreg from invoking
2829 adjust_address_nv. Instead of preparing fallback support for an
2830 invalid address, we call adjust_address_nv directly. */
2833 emit_move_insn (adjust_address_nv (cplx
, imode
,
2834 imag_p
? GET_MODE_SIZE (imode
) : 0),
2839 /* If the sub-object is at least word sized, then we know that subregging
2840 will work. This special case is important, since store_bit_field
2841 wants to operate on integer modes, and there's rarely an OImode to
2842 correspond to TCmode. */
2843 if (ibitsize
>= BITS_PER_WORD
2844 /* For hard regs we have exact predicates. Assume we can split
2845 the original object if it spans an even number of hard regs.
2846 This special case is important for SCmode on 64-bit platforms
2847 where the natural size of floating-point regs is 32-bit. */
2849 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
2850 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0))
2852 rtx part
= simplify_gen_subreg (imode
, cplx
, cmode
,
2853 imag_p
? GET_MODE_SIZE (imode
) : 0);
2856 emit_move_insn (part
, val
);
2860 /* simplify_gen_subreg may fail for sub-word MEMs. */
2861 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
2864 store_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0, imode
, val
);
2867 /* Extract one of the components of the complex value CPLX. Extract the
2868 real part if IMAG_P is false, and the imaginary part if it's true. */
2871 read_complex_part (rtx cplx
, bool imag_p
)
2873 enum machine_mode cmode
, imode
;
2876 if (GET_CODE (cplx
) == CONCAT
)
2877 return XEXP (cplx
, imag_p
);
2879 cmode
= GET_MODE (cplx
);
2880 imode
= GET_MODE_INNER (cmode
);
2881 ibitsize
= GET_MODE_BITSIZE (imode
);
2883 /* Special case reads from complex constants that got spilled to memory. */
2884 if (MEM_P (cplx
) && GET_CODE (XEXP (cplx
, 0)) == SYMBOL_REF
)
2886 tree decl
= SYMBOL_REF_DECL (XEXP (cplx
, 0));
2887 if (decl
&& TREE_CODE (decl
) == COMPLEX_CST
)
2889 tree part
= imag_p
? TREE_IMAGPART (decl
) : TREE_REALPART (decl
);
2890 if (CONSTANT_CLASS_P (part
))
2891 return expand_expr (part
, NULL_RTX
, imode
, EXPAND_NORMAL
);
2895 /* For MEMs simplify_gen_subreg may generate an invalid new address
2896 because, e.g., the original address is considered mode-dependent
2897 by the target, which restricts simplify_subreg from invoking
2898 adjust_address_nv. Instead of preparing fallback support for an
2899 invalid address, we call adjust_address_nv directly. */
2901 return adjust_address_nv (cplx
, imode
,
2902 imag_p
? GET_MODE_SIZE (imode
) : 0);
2904 /* If the sub-object is at least word sized, then we know that subregging
2905 will work. This special case is important, since extract_bit_field
2906 wants to operate on integer modes, and there's rarely an OImode to
2907 correspond to TCmode. */
2908 if (ibitsize
>= BITS_PER_WORD
2909 /* For hard regs we have exact predicates. Assume we can split
2910 the original object if it spans an even number of hard regs.
2911 This special case is important for SCmode on 64-bit platforms
2912 where the natural size of floating-point regs is 32-bit. */
2914 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
2915 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0))
2917 rtx ret
= simplify_gen_subreg (imode
, cplx
, cmode
,
2918 imag_p
? GET_MODE_SIZE (imode
) : 0);
2922 /* simplify_gen_subreg may fail for sub-word MEMs. */
2923 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
2926 return extract_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0,
2927 true, false, NULL_RTX
, imode
, imode
);
2930 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2931 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2932 represented in NEW_MODE. If FORCE is true, this will never happen, as
2933 we'll force-create a SUBREG if needed. */
2936 emit_move_change_mode (enum machine_mode new_mode
,
2937 enum machine_mode old_mode
, rtx x
, bool force
)
2941 if (push_operand (x
, GET_MODE (x
)))
2943 ret
= gen_rtx_MEM (new_mode
, XEXP (x
, 0));
2944 MEM_COPY_ATTRIBUTES (ret
, x
);
2948 /* We don't have to worry about changing the address since the
2949 size in bytes is supposed to be the same. */
2950 if (reload_in_progress
)
2952 /* Copy the MEM to change the mode and move any
2953 substitutions from the old MEM to the new one. */
2954 ret
= adjust_address_nv (x
, new_mode
, 0);
2955 copy_replacements (x
, ret
);
2958 ret
= adjust_address (x
, new_mode
, 0);
2962 /* Note that we do want simplify_subreg's behavior of validating
2963 that the new mode is ok for a hard register. If we were to use
2964 simplify_gen_subreg, we would create the subreg, but would
2965 probably run into the target not being able to implement it. */
2966 /* Except, of course, when FORCE is true, when this is exactly what
2967 we want. Which is needed for CCmodes on some targets. */
2969 ret
= simplify_gen_subreg (new_mode
, x
, old_mode
, 0);
2971 ret
= simplify_subreg (new_mode
, x
, old_mode
, 0);
2977 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2978 an integer mode of the same size as MODE. Returns the instruction
2979 emitted, or NULL if such a move could not be generated. */
2982 emit_move_via_integer (enum machine_mode mode
, rtx x
, rtx y
, bool force
)
2984 enum machine_mode imode
;
2985 enum insn_code code
;
2987 /* There must exist a mode of the exact size we require. */
2988 imode
= int_mode_for_mode (mode
);
2989 if (imode
== BLKmode
)
2992 /* The target must support moves in this mode. */
2993 code
= optab_handler (mov_optab
, imode
);
2994 if (code
== CODE_FOR_nothing
)
2997 x
= emit_move_change_mode (imode
, mode
, x
, force
);
3000 y
= emit_move_change_mode (imode
, mode
, y
, force
);
3003 return emit_insn (GEN_FCN (code
) (x
, y
));
3006 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3007 Return an equivalent MEM that does not use an auto-increment. */
3010 emit_move_resolve_push (enum machine_mode mode
, rtx x
)
3012 enum rtx_code code
= GET_CODE (XEXP (x
, 0));
3013 HOST_WIDE_INT adjust
;
3016 adjust
= GET_MODE_SIZE (mode
);
3017 #ifdef PUSH_ROUNDING
3018 adjust
= PUSH_ROUNDING (adjust
);
3020 if (code
== PRE_DEC
|| code
== POST_DEC
)
3022 else if (code
== PRE_MODIFY
|| code
== POST_MODIFY
)
3024 rtx expr
= XEXP (XEXP (x
, 0), 1);
3027 gcc_assert (GET_CODE (expr
) == PLUS
|| GET_CODE (expr
) == MINUS
);
3028 gcc_assert (CONST_INT_P (XEXP (expr
, 1)));
3029 val
= INTVAL (XEXP (expr
, 1));
3030 if (GET_CODE (expr
) == MINUS
)
3032 gcc_assert (adjust
== val
|| adjust
== -val
);
3036 /* Do not use anti_adjust_stack, since we don't want to update
3037 stack_pointer_delta. */
3038 temp
= expand_simple_binop (Pmode
, PLUS
, stack_pointer_rtx
,
3039 GEN_INT (adjust
), stack_pointer_rtx
,
3040 0, OPTAB_LIB_WIDEN
);
3041 if (temp
!= stack_pointer_rtx
)
3042 emit_move_insn (stack_pointer_rtx
, temp
);
3049 temp
= stack_pointer_rtx
;
3054 temp
= plus_constant (stack_pointer_rtx
, -adjust
);
3060 return replace_equiv_address (x
, temp
);
3063 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3064 X is known to satisfy push_operand, and MODE is known to be complex.
3065 Returns the last instruction emitted. */
3068 emit_move_complex_push (enum machine_mode mode
, rtx x
, rtx y
)
3070 enum machine_mode submode
= GET_MODE_INNER (mode
);
3073 #ifdef PUSH_ROUNDING
3074 unsigned int submodesize
= GET_MODE_SIZE (submode
);
3076 /* In case we output to the stack, but the size is smaller than the
3077 machine can push exactly, we need to use move instructions. */
3078 if (PUSH_ROUNDING (submodesize
) != submodesize
)
3080 x
= emit_move_resolve_push (mode
, x
);
3081 return emit_move_insn (x
, y
);
3085 /* Note that the real part always precedes the imag part in memory
3086 regardless of machine's endianness. */
3087 switch (GET_CODE (XEXP (x
, 0)))
3101 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3102 read_complex_part (y
, imag_first
));
3103 return emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3104 read_complex_part (y
, !imag_first
));
3107 /* A subroutine of emit_move_complex. Perform the move from Y to X
3108 via two moves of the parts. Returns the last instruction emitted. */
3111 emit_move_complex_parts (rtx x
, rtx y
)
3113 /* Show the output dies here. This is necessary for SUBREGs
3114 of pseudos since we cannot track their lifetimes correctly;
3115 hard regs shouldn't appear here except as return values. */
3116 if (!reload_completed
&& !reload_in_progress
3117 && REG_P (x
) && !reg_overlap_mentioned_p (x
, y
))
3120 write_complex_part (x
, read_complex_part (y
, false), false);
3121 write_complex_part (x
, read_complex_part (y
, true), true);
3123 return get_last_insn ();
3126 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3127 MODE is known to be complex. Returns the last instruction emitted. */
3130 emit_move_complex (enum machine_mode mode
, rtx x
, rtx y
)
3134 /* Need to take special care for pushes, to maintain proper ordering
3135 of the data, and possibly extra padding. */
3136 if (push_operand (x
, mode
))
3137 return emit_move_complex_push (mode
, x
, y
);
3139 /* See if we can coerce the target into moving both values at once. */
3141 /* Move floating point as parts. */
3142 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
3143 && optab_handler (mov_optab
, GET_MODE_INNER (mode
)) != CODE_FOR_nothing
)
3145 /* Not possible if the values are inherently not adjacent. */
3146 else if (GET_CODE (x
) == CONCAT
|| GET_CODE (y
) == CONCAT
)
3148 /* Is possible if both are registers (or subregs of registers). */
3149 else if (register_operand (x
, mode
) && register_operand (y
, mode
))
3151 /* If one of the operands is a memory, and alignment constraints
3152 are friendly enough, we may be able to do combined memory operations.
3153 We do not attempt this if Y is a constant because that combination is
3154 usually better with the by-parts thing below. */
3155 else if ((MEM_P (x
) ? !CONSTANT_P (y
) : MEM_P (y
))
3156 && (!STRICT_ALIGNMENT
3157 || get_mode_alignment (mode
) == BIGGEST_ALIGNMENT
))
3166 /* For memory to memory moves, optimal behavior can be had with the
3167 existing block move logic. */
3168 if (MEM_P (x
) && MEM_P (y
))
3170 emit_block_move (x
, y
, GEN_INT (GET_MODE_SIZE (mode
)),
3171 BLOCK_OP_NO_LIBCALL
);
3172 return get_last_insn ();
3175 ret
= emit_move_via_integer (mode
, x
, y
, true);
3180 return emit_move_complex_parts (x
, y
);
3183 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3184 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3187 emit_move_ccmode (enum machine_mode mode
, rtx x
, rtx y
)
3191 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3194 enum insn_code code
= optab_handler (mov_optab
, CCmode
);
3195 if (code
!= CODE_FOR_nothing
)
3197 x
= emit_move_change_mode (CCmode
, mode
, x
, true);
3198 y
= emit_move_change_mode (CCmode
, mode
, y
, true);
3199 return emit_insn (GEN_FCN (code
) (x
, y
));
3203 /* Otherwise, find the MODE_INT mode of the same width. */
3204 ret
= emit_move_via_integer (mode
, x
, y
, false);
3205 gcc_assert (ret
!= NULL
);
3209 /* Return true if word I of OP lies entirely in the
3210 undefined bits of a paradoxical subreg. */
3213 undefined_operand_subword_p (const_rtx op
, int i
)
3215 enum machine_mode innermode
, innermostmode
;
3217 if (GET_CODE (op
) != SUBREG
)
3219 innermode
= GET_MODE (op
);
3220 innermostmode
= GET_MODE (SUBREG_REG (op
));
3221 offset
= i
* UNITS_PER_WORD
+ SUBREG_BYTE (op
);
3222 /* The SUBREG_BYTE represents offset, as if the value were stored in
3223 memory, except for a paradoxical subreg where we define
3224 SUBREG_BYTE to be 0; undo this exception as in
3226 if (SUBREG_BYTE (op
) == 0
3227 && GET_MODE_SIZE (innermostmode
) < GET_MODE_SIZE (innermode
))
3229 int difference
= (GET_MODE_SIZE (innermostmode
) - GET_MODE_SIZE (innermode
));
3230 if (WORDS_BIG_ENDIAN
)
3231 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
3232 if (BYTES_BIG_ENDIAN
)
3233 offset
+= difference
% UNITS_PER_WORD
;
3235 if (offset
>= GET_MODE_SIZE (innermostmode
)
3236 || offset
<= -GET_MODE_SIZE (word_mode
))
3241 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3242 MODE is any multi-word or full-word mode that lacks a move_insn
3243 pattern. Note that you will get better code if you define such
3244 patterns, even if they must turn into multiple assembler instructions. */
3247 emit_move_multi_word (enum machine_mode mode
, rtx x
, rtx y
)
3254 gcc_assert (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
);
3256 /* If X is a push on the stack, do the push now and replace
3257 X with a reference to the stack pointer. */
3258 if (push_operand (x
, mode
))
3259 x
= emit_move_resolve_push (mode
, x
);
3261 /* If we are in reload, see if either operand is a MEM whose address
3262 is scheduled for replacement. */
3263 if (reload_in_progress
&& MEM_P (x
)
3264 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3265 x
= replace_equiv_address_nv (x
, inner
);
3266 if (reload_in_progress
&& MEM_P (y
)
3267 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3268 y
= replace_equiv_address_nv (y
, inner
);
3272 need_clobber
= false;
3274 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3277 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3280 /* Do not generate code for a move if it would come entirely
3281 from the undefined bits of a paradoxical subreg. */
3282 if (undefined_operand_subword_p (y
, i
))
3285 ypart
= operand_subword (y
, i
, 1, mode
);
3287 /* If we can't get a part of Y, put Y into memory if it is a
3288 constant. Otherwise, force it into a register. Then we must
3289 be able to get a part of Y. */
3290 if (ypart
== 0 && CONSTANT_P (y
))
3292 y
= use_anchored_address (force_const_mem (mode
, y
));
3293 ypart
= operand_subword (y
, i
, 1, mode
);
3295 else if (ypart
== 0)
3296 ypart
= operand_subword_force (y
, i
, mode
);
3298 gcc_assert (xpart
&& ypart
);
3300 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3302 last_insn
= emit_move_insn (xpart
, ypart
);
3308 /* Show the output dies here. This is necessary for SUBREGs
3309 of pseudos since we cannot track their lifetimes correctly;
3310 hard regs shouldn't appear here except as return values.
3311 We never want to emit such a clobber after reload. */
3313 && ! (reload_in_progress
|| reload_completed
)
3314 && need_clobber
!= 0)
3322 /* Low level part of emit_move_insn.
3323 Called just like emit_move_insn, but assumes X and Y
3324 are basically valid. */
3327 emit_move_insn_1 (rtx x
, rtx y
)
3329 enum machine_mode mode
= GET_MODE (x
);
3330 enum insn_code code
;
3332 gcc_assert ((unsigned int) mode
< (unsigned int) MAX_MACHINE_MODE
);
3334 code
= optab_handler (mov_optab
, mode
);
3335 if (code
!= CODE_FOR_nothing
)
3336 return emit_insn (GEN_FCN (code
) (x
, y
));
3338 /* Expand complex moves by moving real part and imag part. */
3339 if (COMPLEX_MODE_P (mode
))
3340 return emit_move_complex (mode
, x
, y
);
3342 if (GET_MODE_CLASS (mode
) == MODE_DECIMAL_FLOAT
3343 || ALL_FIXED_POINT_MODE_P (mode
))
3345 rtx result
= emit_move_via_integer (mode
, x
, y
, true);
3347 /* If we can't find an integer mode, use multi words. */
3351 return emit_move_multi_word (mode
, x
, y
);
3354 if (GET_MODE_CLASS (mode
) == MODE_CC
)
3355 return emit_move_ccmode (mode
, x
, y
);
3357 /* Try using a move pattern for the corresponding integer mode. This is
3358 only safe when simplify_subreg can convert MODE constants into integer
3359 constants. At present, it can only do this reliably if the value
3360 fits within a HOST_WIDE_INT. */
3361 if (!CONSTANT_P (y
) || GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
3363 rtx ret
= emit_move_via_integer (mode
, x
, y
, false);
3368 return emit_move_multi_word (mode
, x
, y
);
3371 /* Generate code to copy Y into X.
3372 Both Y and X must have the same mode, except that
3373 Y can be a constant with VOIDmode.
3374 This mode cannot be BLKmode; use emit_block_move for that.
3376 Return the last instruction emitted. */
3379 emit_move_insn (rtx x
, rtx y
)
3381 enum machine_mode mode
= GET_MODE (x
);
3382 rtx y_cst
= NULL_RTX
;
3385 gcc_assert (mode
!= BLKmode
3386 && (GET_MODE (y
) == mode
|| GET_MODE (y
) == VOIDmode
));
3391 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3392 && (last_insn
= compress_float_constant (x
, y
)))
3397 if (!LEGITIMATE_CONSTANT_P (y
))
3399 y
= force_const_mem (mode
, y
);
3401 /* If the target's cannot_force_const_mem prevented the spill,
3402 assume that the target's move expanders will also take care
3403 of the non-legitimate constant. */
3407 y
= use_anchored_address (y
);
3411 /* If X or Y are memory references, verify that their addresses are valid
3414 && (! memory_address_addr_space_p (GET_MODE (x
), XEXP (x
, 0),
3416 && ! push_operand (x
, GET_MODE (x
))))
3417 x
= validize_mem (x
);
3420 && ! memory_address_addr_space_p (GET_MODE (y
), XEXP (y
, 0),
3421 MEM_ADDR_SPACE (y
)))
3422 y
= validize_mem (y
);
3424 gcc_assert (mode
!= BLKmode
);
3426 last_insn
= emit_move_insn_1 (x
, y
);
3428 if (y_cst
&& REG_P (x
)
3429 && (set
= single_set (last_insn
)) != NULL_RTX
3430 && SET_DEST (set
) == x
3431 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
3432 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
3437 /* If Y is representable exactly in a narrower mode, and the target can
3438 perform the extension directly from constant or memory, then emit the
3439 move as an extension. */
3442 compress_float_constant (rtx x
, rtx y
)
3444 enum machine_mode dstmode
= GET_MODE (x
);
3445 enum machine_mode orig_srcmode
= GET_MODE (y
);
3446 enum machine_mode srcmode
;
3448 int oldcost
, newcost
;
3449 bool speed
= optimize_insn_for_speed_p ();
3451 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3453 if (LEGITIMATE_CONSTANT_P (y
))
3454 oldcost
= rtx_cost (y
, SET
, speed
);
3456 oldcost
= rtx_cost (force_const_mem (dstmode
, y
), SET
, speed
);
3458 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3459 srcmode
!= orig_srcmode
;
3460 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3463 rtx trunc_y
, last_insn
;
3465 /* Skip if the target can't extend this way. */
3466 ic
= can_extend_p (dstmode
, srcmode
, 0);
3467 if (ic
== CODE_FOR_nothing
)
3470 /* Skip if the narrowed value isn't exact. */
3471 if (! exact_real_truncate (srcmode
, &r
))
3474 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3476 if (LEGITIMATE_CONSTANT_P (trunc_y
))
3478 /* Skip if the target needs extra instructions to perform
3480 if (! (*insn_data
[ic
].operand
[1].predicate
) (trunc_y
, srcmode
))
3482 /* This is valid, but may not be cheaper than the original. */
3483 newcost
= rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
), SET
, speed
);
3484 if (oldcost
< newcost
)
3487 else if (float_extend_from_mem
[dstmode
][srcmode
])
3489 trunc_y
= force_const_mem (srcmode
, trunc_y
);
3490 /* This is valid, but may not be cheaper than the original. */
3491 newcost
= rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
), SET
, speed
);
3492 if (oldcost
< newcost
)
3494 trunc_y
= validize_mem (trunc_y
);
3499 /* For CSE's benefit, force the compressed constant pool entry
3500 into a new pseudo. This constant may be used in different modes,
3501 and if not, combine will put things back together for us. */
3502 trunc_y
= force_reg (srcmode
, trunc_y
);
3503 emit_unop_insn (ic
, x
, trunc_y
, UNKNOWN
);
3504 last_insn
= get_last_insn ();
3507 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3515 /* Pushing data onto the stack. */
3517 /* Push a block of length SIZE (perhaps variable)
3518 and return an rtx to address the beginning of the block.
3519 The value may be virtual_outgoing_args_rtx.
3521 EXTRA is the number of bytes of padding to push in addition to SIZE.
3522 BELOW nonzero means this padding comes at low addresses;
3523 otherwise, the padding comes at high addresses. */
3526 push_block (rtx size
, int extra
, int below
)
3530 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3531 if (CONSTANT_P (size
))
3532 anti_adjust_stack (plus_constant (size
, extra
));
3533 else if (REG_P (size
) && extra
== 0)
3534 anti_adjust_stack (size
);
3537 temp
= copy_to_mode_reg (Pmode
, size
);
3539 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3540 temp
, 0, OPTAB_LIB_WIDEN
);
3541 anti_adjust_stack (temp
);
3544 #ifndef STACK_GROWS_DOWNWARD
3550 temp
= virtual_outgoing_args_rtx
;
3551 if (extra
!= 0 && below
)
3552 temp
= plus_constant (temp
, extra
);
3556 if (CONST_INT_P (size
))
3557 temp
= plus_constant (virtual_outgoing_args_rtx
,
3558 -INTVAL (size
) - (below
? 0 : extra
));
3559 else if (extra
!= 0 && !below
)
3560 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3561 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3563 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3564 negate_rtx (Pmode
, size
));
3567 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3570 #ifdef PUSH_ROUNDING
3572 /* Emit single push insn. */
3575 emit_single_push_insn (enum machine_mode mode
, rtx x
, tree type
)
3578 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3580 enum insn_code icode
;
3581 insn_operand_predicate_fn pred
;
3583 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3584 /* If there is push pattern, use it. Otherwise try old way of throwing
3585 MEM representing push operation to move expander. */
3586 icode
= optab_handler (push_optab
, mode
);
3587 if (icode
!= CODE_FOR_nothing
)
3589 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3590 && !((*pred
) (x
, mode
))))
3591 x
= force_reg (mode
, x
);
3592 emit_insn (GEN_FCN (icode
) (x
));
3595 if (GET_MODE_SIZE (mode
) == rounded_size
)
3596 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3597 /* If we are to pad downward, adjust the stack pointer first and
3598 then store X into the stack location using an offset. This is
3599 because emit_move_insn does not know how to pad; it does not have
3601 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
3603 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
3604 HOST_WIDE_INT offset
;
3606 emit_move_insn (stack_pointer_rtx
,
3607 expand_binop (Pmode
,
3608 #ifdef STACK_GROWS_DOWNWARD
3614 GEN_INT (rounded_size
),
3615 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
3617 offset
= (HOST_WIDE_INT
) padding_size
;
3618 #ifdef STACK_GROWS_DOWNWARD
3619 if (STACK_PUSH_CODE
== POST_DEC
)
3620 /* We have already decremented the stack pointer, so get the
3622 offset
+= (HOST_WIDE_INT
) rounded_size
;
3624 if (STACK_PUSH_CODE
== POST_INC
)
3625 /* We have already incremented the stack pointer, so get the
3627 offset
-= (HOST_WIDE_INT
) rounded_size
;
3629 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (offset
));
3633 #ifdef STACK_GROWS_DOWNWARD
3634 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3635 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3636 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3638 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3639 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3640 GEN_INT (rounded_size
));
3642 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3645 dest
= gen_rtx_MEM (mode
, dest_addr
);
3649 set_mem_attributes (dest
, type
, 1);
3651 if (flag_optimize_sibling_calls
)
3652 /* Function incoming arguments may overlap with sibling call
3653 outgoing arguments and we cannot allow reordering of reads
3654 from function arguments with stores to outgoing arguments
3655 of sibling calls. */
3656 set_mem_alias_set (dest
, 0);
3658 emit_move_insn (dest
, x
);
3662 /* Generate code to push X onto the stack, assuming it has mode MODE and
3664 MODE is redundant except when X is a CONST_INT (since they don't
3666 SIZE is an rtx for the size of data to be copied (in bytes),
3667 needed only if X is BLKmode.
3669 ALIGN (in bits) is maximum alignment we can assume.
3671 If PARTIAL and REG are both nonzero, then copy that many of the first
3672 bytes of X into registers starting with REG, and push the rest of X.
3673 The amount of space pushed is decreased by PARTIAL bytes.
3674 REG must be a hard register in this case.
3675 If REG is zero but PARTIAL is not, take any all others actions for an
3676 argument partially in registers, but do not actually load any
3679 EXTRA is the amount in bytes of extra space to leave next to this arg.
3680 This is ignored if an argument block has already been allocated.
3682 On a machine that lacks real push insns, ARGS_ADDR is the address of
3683 the bottom of the argument block for this call. We use indexing off there
3684 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3685 argument block has not been preallocated.
3687 ARGS_SO_FAR is the size of args previously pushed for this call.
3689 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3690 for arguments passed in registers. If nonzero, it will be the number
3691 of bytes required. */
3694 emit_push_insn (rtx x
, enum machine_mode mode
, tree type
, rtx size
,
3695 unsigned int align
, int partial
, rtx reg
, int extra
,
3696 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
3700 enum direction stack_direction
3701 #ifdef STACK_GROWS_DOWNWARD
3707 /* Decide where to pad the argument: `downward' for below,
3708 `upward' for above, or `none' for don't pad it.
3709 Default is below for small data on big-endian machines; else above. */
3710 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3712 /* Invert direction if stack is post-decrement.
3714 if (STACK_PUSH_CODE
== POST_DEC
)
3715 if (where_pad
!= none
)
3716 where_pad
= (where_pad
== downward
? upward
: downward
);
3721 || (STRICT_ALIGNMENT
&& align
< GET_MODE_ALIGNMENT (mode
)))
3723 /* Copy a block into the stack, entirely or partially. */
3730 offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3731 used
= partial
- offset
;
3733 if (mode
!= BLKmode
)
3735 /* A value is to be stored in an insufficiently aligned
3736 stack slot; copy via a suitably aligned slot if
3738 size
= GEN_INT (GET_MODE_SIZE (mode
));
3739 if (!MEM_P (xinner
))
3741 temp
= assign_temp (type
, 0, 1, 1);
3742 emit_move_insn (temp
, xinner
);
3749 /* USED is now the # of bytes we need not copy to the stack
3750 because registers will take care of them. */
3753 xinner
= adjust_address (xinner
, BLKmode
, used
);
3755 /* If the partial register-part of the arg counts in its stack size,
3756 skip the part of stack space corresponding to the registers.
3757 Otherwise, start copying to the beginning of the stack space,
3758 by setting SKIP to 0. */
3759 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3761 #ifdef PUSH_ROUNDING
3762 /* Do it with several push insns if that doesn't take lots of insns
3763 and if there is no difficulty with push insns that skip bytes
3764 on the stack for alignment purposes. */
3767 && CONST_INT_P (size
)
3769 && MEM_ALIGN (xinner
) >= align
3770 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3771 /* Here we avoid the case of a structure whose weak alignment
3772 forces many pushes of a small amount of data,
3773 and such small pushes do rounding that causes trouble. */
3774 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3775 || align
>= BIGGEST_ALIGNMENT
3776 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3777 == (align
/ BITS_PER_UNIT
)))
3778 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3780 /* Push padding now if padding above and stack grows down,
3781 or if padding below and stack grows up.
3782 But if space already allocated, this has already been done. */
3783 if (extra
&& args_addr
== 0
3784 && where_pad
!= none
&& where_pad
!= stack_direction
)
3785 anti_adjust_stack (GEN_INT (extra
));
3787 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
3790 #endif /* PUSH_ROUNDING */
3794 /* Otherwise make space on the stack and copy the data
3795 to the address of that space. */
3797 /* Deduct words put into registers from the size we must copy. */
3800 if (CONST_INT_P (size
))
3801 size
= GEN_INT (INTVAL (size
) - used
);
3803 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3804 GEN_INT (used
), NULL_RTX
, 0,
3808 /* Get the address of the stack space.
3809 In this case, we do not deal with EXTRA separately.
3810 A single stack adjust will do. */
3813 temp
= push_block (size
, extra
, where_pad
== downward
);
3816 else if (CONST_INT_P (args_so_far
))
3817 temp
= memory_address (BLKmode
,
3818 plus_constant (args_addr
,
3819 skip
+ INTVAL (args_so_far
)));
3821 temp
= memory_address (BLKmode
,
3822 plus_constant (gen_rtx_PLUS (Pmode
,
3827 if (!ACCUMULATE_OUTGOING_ARGS
)
3829 /* If the source is referenced relative to the stack pointer,
3830 copy it to another register to stabilize it. We do not need
3831 to do this if we know that we won't be changing sp. */
3833 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3834 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3835 temp
= copy_to_reg (temp
);
3838 target
= gen_rtx_MEM (BLKmode
, temp
);
3840 /* We do *not* set_mem_attributes here, because incoming arguments
3841 may overlap with sibling call outgoing arguments and we cannot
3842 allow reordering of reads from function arguments with stores
3843 to outgoing arguments of sibling calls. We do, however, want
3844 to record the alignment of the stack slot. */
3845 /* ALIGN may well be better aligned than TYPE, e.g. due to
3846 PARM_BOUNDARY. Assume the caller isn't lying. */
3847 set_mem_align (target
, align
);
3849 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
3852 else if (partial
> 0)
3854 /* Scalar partly in registers. */
3856 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3859 /* # bytes of start of argument
3860 that we must make space for but need not store. */
3861 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3862 int args_offset
= INTVAL (args_so_far
);
3865 /* Push padding now if padding above and stack grows down,
3866 or if padding below and stack grows up.
3867 But if space already allocated, this has already been done. */
3868 if (extra
&& args_addr
== 0
3869 && where_pad
!= none
&& where_pad
!= stack_direction
)
3870 anti_adjust_stack (GEN_INT (extra
));
3872 /* If we make space by pushing it, we might as well push
3873 the real data. Otherwise, we can leave OFFSET nonzero
3874 and leave the space uninitialized. */
3878 /* Now NOT_STACK gets the number of words that we don't need to
3879 allocate on the stack. Convert OFFSET to words too. */
3880 not_stack
= (partial
- offset
) / UNITS_PER_WORD
;
3881 offset
/= UNITS_PER_WORD
;
3883 /* If the partial register-part of the arg counts in its stack size,
3884 skip the part of stack space corresponding to the registers.
3885 Otherwise, start copying to the beginning of the stack space,
3886 by setting SKIP to 0. */
3887 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3889 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3890 x
= validize_mem (force_const_mem (mode
, x
));
3892 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3893 SUBREGs of such registers are not allowed. */
3894 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
3895 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3896 x
= copy_to_reg (x
);
3898 /* Loop over all the words allocated on the stack for this arg. */
3899 /* We can do it by words, because any scalar bigger than a word
3900 has a size a multiple of a word. */
3901 #ifndef PUSH_ARGS_REVERSED
3902 for (i
= not_stack
; i
< size
; i
++)
3904 for (i
= size
- 1; i
>= not_stack
; i
--)
3906 if (i
>= not_stack
+ offset
)
3907 emit_push_insn (operand_subword_force (x
, i
, mode
),
3908 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3910 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3912 reg_parm_stack_space
, alignment_pad
);
3919 /* Push padding now if padding above and stack grows down,
3920 or if padding below and stack grows up.
3921 But if space already allocated, this has already been done. */
3922 if (extra
&& args_addr
== 0
3923 && where_pad
!= none
&& where_pad
!= stack_direction
)
3924 anti_adjust_stack (GEN_INT (extra
));
3926 #ifdef PUSH_ROUNDING
3927 if (args_addr
== 0 && PUSH_ARGS
)
3928 emit_single_push_insn (mode
, x
, type
);
3932 if (CONST_INT_P (args_so_far
))
3934 = memory_address (mode
,
3935 plus_constant (args_addr
,
3936 INTVAL (args_so_far
)));
3938 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3940 dest
= gen_rtx_MEM (mode
, addr
);
3942 /* We do *not* set_mem_attributes here, because incoming arguments
3943 may overlap with sibling call outgoing arguments and we cannot
3944 allow reordering of reads from function arguments with stores
3945 to outgoing arguments of sibling calls. We do, however, want
3946 to record the alignment of the stack slot. */
3947 /* ALIGN may well be better aligned than TYPE, e.g. due to
3948 PARM_BOUNDARY. Assume the caller isn't lying. */
3949 set_mem_align (dest
, align
);
3951 emit_move_insn (dest
, x
);
3955 /* If part should go in registers, copy that part
3956 into the appropriate registers. Do this now, at the end,
3957 since mem-to-mem copies above may do function calls. */
3958 if (partial
> 0 && reg
!= 0)
3960 /* Handle calls that pass values in multiple non-contiguous locations.
3961 The Irix 6 ABI has examples of this. */
3962 if (GET_CODE (reg
) == PARALLEL
)
3963 emit_group_load (reg
, x
, type
, -1);
3966 gcc_assert (partial
% UNITS_PER_WORD
== 0);
3967 move_block_to_reg (REGNO (reg
), x
, partial
/ UNITS_PER_WORD
, mode
);
3971 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3972 anti_adjust_stack (GEN_INT (extra
));
3974 if (alignment_pad
&& args_addr
== 0)
3975 anti_adjust_stack (alignment_pad
);
3978 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3982 get_subtarget (rtx x
)
3986 /* Only registers can be subtargets. */
3988 /* Don't use hard regs to avoid extending their life. */
3989 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3993 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3994 FIELD is a bitfield. Returns true if the optimization was successful,
3995 and there's nothing else to do. */
3998 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize
,
3999 unsigned HOST_WIDE_INT bitpos
,
4000 enum machine_mode mode1
, rtx str_rtx
,
4003 enum machine_mode str_mode
= GET_MODE (str_rtx
);
4004 unsigned int str_bitsize
= GET_MODE_BITSIZE (str_mode
);
4009 if (mode1
!= VOIDmode
4010 || bitsize
>= BITS_PER_WORD
4011 || str_bitsize
> BITS_PER_WORD
4012 || TREE_SIDE_EFFECTS (to
)
4013 || TREE_THIS_VOLATILE (to
))
4017 if (!BINARY_CLASS_P (src
)
4018 || TREE_CODE (TREE_TYPE (src
)) != INTEGER_TYPE
)
4021 op0
= TREE_OPERAND (src
, 0);
4022 op1
= TREE_OPERAND (src
, 1);
4025 if (!operand_equal_p (to
, op0
, 0))
4028 if (MEM_P (str_rtx
))
4030 unsigned HOST_WIDE_INT offset1
;
4032 if (str_bitsize
== 0 || str_bitsize
> BITS_PER_WORD
)
4033 str_mode
= word_mode
;
4034 str_mode
= get_best_mode (bitsize
, bitpos
,
4035 MEM_ALIGN (str_rtx
), str_mode
, 0);
4036 if (str_mode
== VOIDmode
)
4038 str_bitsize
= GET_MODE_BITSIZE (str_mode
);
4041 bitpos
%= str_bitsize
;
4042 offset1
= (offset1
- bitpos
) / BITS_PER_UNIT
;
4043 str_rtx
= adjust_address (str_rtx
, str_mode
, offset1
);
4045 else if (!REG_P (str_rtx
) && GET_CODE (str_rtx
) != SUBREG
)
4048 /* If the bit field covers the whole REG/MEM, store_field
4049 will likely generate better code. */
4050 if (bitsize
>= str_bitsize
)
4053 /* We can't handle fields split across multiple entities. */
4054 if (bitpos
+ bitsize
> str_bitsize
)
4057 if (BYTES_BIG_ENDIAN
)
4058 bitpos
= str_bitsize
- bitpos
- bitsize
;
4060 switch (TREE_CODE (src
))
4064 /* For now, just optimize the case of the topmost bitfield
4065 where we don't need to do any masking and also
4066 1 bit bitfields where xor can be used.
4067 We might win by one instruction for the other bitfields
4068 too if insv/extv instructions aren't used, so that
4069 can be added later. */
4070 if (bitpos
+ bitsize
!= str_bitsize
4071 && (bitsize
!= 1 || TREE_CODE (op1
) != INTEGER_CST
))
4074 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4075 value
= convert_modes (str_mode
,
4076 TYPE_MODE (TREE_TYPE (op1
)), value
,
4077 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4079 /* We may be accessing data outside the field, which means
4080 we can alias adjacent data. */
4081 if (MEM_P (str_rtx
))
4083 str_rtx
= shallow_copy_rtx (str_rtx
);
4084 set_mem_alias_set (str_rtx
, 0);
4085 set_mem_expr (str_rtx
, 0);
4088 binop
= TREE_CODE (src
) == PLUS_EXPR
? add_optab
: sub_optab
;
4089 if (bitsize
== 1 && bitpos
+ bitsize
!= str_bitsize
)
4091 value
= expand_and (str_mode
, value
, const1_rtx
, NULL
);
4094 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
,
4095 build_int_cst (NULL_TREE
, bitpos
),
4097 result
= expand_binop (str_mode
, binop
, str_rtx
,
4098 value
, str_rtx
, 1, OPTAB_WIDEN
);
4099 if (result
!= str_rtx
)
4100 emit_move_insn (str_rtx
, result
);
4105 if (TREE_CODE (op1
) != INTEGER_CST
)
4107 value
= expand_expr (op1
, NULL_RTX
, GET_MODE (str_rtx
), EXPAND_NORMAL
);
4108 value
= convert_modes (GET_MODE (str_rtx
),
4109 TYPE_MODE (TREE_TYPE (op1
)), value
,
4110 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4112 /* We may be accessing data outside the field, which means
4113 we can alias adjacent data. */
4114 if (MEM_P (str_rtx
))
4116 str_rtx
= shallow_copy_rtx (str_rtx
);
4117 set_mem_alias_set (str_rtx
, 0);
4118 set_mem_expr (str_rtx
, 0);
4121 binop
= TREE_CODE (src
) == BIT_IOR_EXPR
? ior_optab
: xor_optab
;
4122 if (bitpos
+ bitsize
!= GET_MODE_BITSIZE (GET_MODE (str_rtx
)))
4124 rtx mask
= GEN_INT (((unsigned HOST_WIDE_INT
) 1 << bitsize
)
4126 value
= expand_and (GET_MODE (str_rtx
), value
, mask
,
4129 value
= expand_shift (LSHIFT_EXPR
, GET_MODE (str_rtx
), value
,
4130 build_int_cst (NULL_TREE
, bitpos
),
4132 result
= expand_binop (GET_MODE (str_rtx
), binop
, str_rtx
,
4133 value
, str_rtx
, 1, OPTAB_WIDEN
);
4134 if (result
!= str_rtx
)
4135 emit_move_insn (str_rtx
, result
);
4146 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4147 is true, try generating a nontemporal store. */
4150 expand_assignment (tree to
, tree from
, bool nontemporal
)
4154 enum machine_mode mode
;
4157 /* Don't crash if the lhs of the assignment was erroneous. */
4158 if (TREE_CODE (to
) == ERROR_MARK
)
4160 result
= expand_normal (from
);
4164 /* Optimize away no-op moves without side-effects. */
4165 if (operand_equal_p (to
, from
, 0))
4168 mode
= TYPE_MODE (TREE_TYPE (to
));
4169 if ((TREE_CODE (to
) == MEM_REF
4170 || TREE_CODE (to
) == TARGET_MEM_REF
)
4172 && ((align
= MAX (TYPE_ALIGN (TREE_TYPE (to
)),
4173 get_object_alignment (to
, BIGGEST_ALIGNMENT
)))
4174 < (signed) GET_MODE_ALIGNMENT (mode
))
4175 && ((icode
= optab_handler (movmisalign_optab
, mode
))
4176 != CODE_FOR_nothing
))
4178 enum machine_mode address_mode
, op_mode1
;
4179 rtx insn
, reg
, op0
, mem
;
4181 reg
= expand_expr (from
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4182 reg
= force_not_mem (reg
);
4184 if (TREE_CODE (to
) == MEM_REF
)
4187 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to
, 1))));
4188 tree base
= TREE_OPERAND (to
, 0);
4189 address_mode
= targetm
.addr_space
.address_mode (as
);
4190 op0
= expand_expr (base
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4191 op0
= convert_memory_address_addr_space (address_mode
, op0
, as
);
4192 if (!integer_zerop (TREE_OPERAND (to
, 1)))
4195 = immed_double_int_const (mem_ref_offset (to
), address_mode
);
4196 op0
= simplify_gen_binary (PLUS
, address_mode
, op0
, off
);
4198 op0
= memory_address_addr_space (mode
, op0
, as
);
4199 mem
= gen_rtx_MEM (mode
, op0
);
4200 set_mem_attributes (mem
, to
, 0);
4201 set_mem_addr_space (mem
, as
);
4203 else if (TREE_CODE (to
) == TARGET_MEM_REF
)
4205 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (to
));
4206 struct mem_address addr
;
4208 get_address_description (to
, &addr
);
4209 op0
= addr_for_mem_ref (&addr
, as
, true);
4210 op0
= memory_address_addr_space (mode
, op0
, as
);
4211 mem
= gen_rtx_MEM (mode
, op0
);
4212 set_mem_attributes (mem
, to
, 0);
4213 set_mem_addr_space (mem
, as
);
4217 if (TREE_THIS_VOLATILE (to
))
4218 MEM_VOLATILE_P (mem
) = 1;
4220 op_mode1
= insn_data
[icode
].operand
[1].mode
;
4221 if (! (*insn_data
[icode
].operand
[1].predicate
) (reg
, op_mode1
)
4222 && op_mode1
!= VOIDmode
)
4223 reg
= copy_to_mode_reg (op_mode1
, reg
);
4225 insn
= GEN_FCN (icode
) (mem
, reg
);
4226 /* The movmisalign<mode> pattern cannot fail, else the assignment would
4227 silently be omitted. */
4228 gcc_assert (insn
!= NULL_RTX
);
4233 /* Assignment of a structure component needs special treatment
4234 if the structure component's rtx is not simply a MEM.
4235 Assignment of an array element at a constant index, and assignment of
4236 an array element in an unaligned packed structure field, has the same
4238 if (handled_component_p (to
)
4239 /* ??? We only need to handle MEM_REF here if the access is not
4240 a full access of the base object. */
4241 || (TREE_CODE (to
) == MEM_REF
4242 && TREE_CODE (TREE_OPERAND (to
, 0)) == ADDR_EXPR
)
4243 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
4245 enum machine_mode mode1
;
4246 HOST_WIDE_INT bitsize
, bitpos
;
4253 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
4254 &unsignedp
, &volatilep
, true);
4256 /* If we are going to use store_bit_field and extract_bit_field,
4257 make sure to_rtx will be safe for multiple use. */
4259 to_rtx
= expand_normal (tem
);
4261 /* If the bitfield is volatile, we want to access it in the
4262 field's mode, not the computed mode. */
4264 && GET_CODE (to_rtx
) == MEM
4265 && flag_strict_volatile_bitfields
> 0)
4266 to_rtx
= adjust_address (to_rtx
, mode1
, 0);
4270 enum machine_mode address_mode
;
4273 if (!MEM_P (to_rtx
))
4275 /* We can get constant negative offsets into arrays with broken
4276 user code. Translate this to a trap instead of ICEing. */
4277 gcc_assert (TREE_CODE (offset
) == INTEGER_CST
);
4278 expand_builtin_trap ();
4279 to_rtx
= gen_rtx_MEM (BLKmode
, const0_rtx
);
4282 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
4284 = targetm
.addr_space
.address_mode (MEM_ADDR_SPACE (to_rtx
));
4285 if (GET_MODE (offset_rtx
) != address_mode
)
4286 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
4288 /* A constant address in TO_RTX can have VOIDmode, we must not try
4289 to call force_reg for that case. Avoid that case. */
4291 && GET_MODE (to_rtx
) == BLKmode
4292 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
4294 && (bitpos
% bitsize
) == 0
4295 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
4296 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
4298 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
4302 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4303 highest_pow2_factor_for_target (to
,
4307 /* No action is needed if the target is not a memory and the field
4308 lies completely outside that target. This can occur if the source
4309 code contains an out-of-bounds access to a small array. */
4311 && GET_MODE (to_rtx
) != BLKmode
4312 && (unsigned HOST_WIDE_INT
) bitpos
4313 >= GET_MODE_BITSIZE (GET_MODE (to_rtx
)))
4315 expand_normal (from
);
4318 /* Handle expand_expr of a complex value returning a CONCAT. */
4319 else if (GET_CODE (to_rtx
) == CONCAT
)
4321 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from
))))
4323 gcc_assert (bitpos
== 0);
4324 result
= store_expr (from
, to_rtx
, false, nontemporal
);
4328 gcc_assert (bitpos
== 0 || bitpos
== GET_MODE_BITSIZE (mode1
));
4329 result
= store_expr (from
, XEXP (to_rtx
, bitpos
!= 0), false,
4337 /* If the field is at offset zero, we could have been given the
4338 DECL_RTX of the parent struct. Don't munge it. */
4339 to_rtx
= shallow_copy_rtx (to_rtx
);
4341 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
4343 /* Deal with volatile and readonly fields. The former is only
4344 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4346 MEM_VOLATILE_P (to_rtx
) = 1;
4347 if (component_uses_parent_alias_set (to
))
4348 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4351 if (optimize_bitfield_assignment_op (bitsize
, bitpos
, mode1
,
4355 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
4356 TREE_TYPE (tem
), get_alias_set (to
),
4361 preserve_temp_slots (result
);
4367 /* If the rhs is a function call and its value is not an aggregate,
4368 call the function before we start to compute the lhs.
4369 This is needed for correct code for cases such as
4370 val = setjmp (buf) on machines where reference to val
4371 requires loading up part of an address in a separate insn.
4373 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4374 since it might be a promoted variable where the zero- or sign- extension
4375 needs to be done. Handling this in the normal way is safe because no
4376 computation is done before the call. The same is true for SSA names. */
4377 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
4378 && COMPLETE_TYPE_P (TREE_TYPE (from
))
4379 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
4380 && ! (((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
4381 && REG_P (DECL_RTL (to
)))
4382 || TREE_CODE (to
) == SSA_NAME
))
4387 value
= expand_normal (from
);
4389 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4391 /* Handle calls that return values in multiple non-contiguous locations.
4392 The Irix 6 ABI has examples of this. */
4393 if (GET_CODE (to_rtx
) == PARALLEL
)
4394 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
4395 int_size_in_bytes (TREE_TYPE (from
)));
4396 else if (GET_MODE (to_rtx
) == BLKmode
)
4397 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
4400 if (POINTER_TYPE_P (TREE_TYPE (to
)))
4401 value
= convert_memory_address_addr_space
4402 (GET_MODE (to_rtx
), value
,
4403 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to
))));
4405 emit_move_insn (to_rtx
, value
);
4407 preserve_temp_slots (to_rtx
);
4413 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4414 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4417 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4419 /* Don't move directly into a return register. */
4420 if (TREE_CODE (to
) == RESULT_DECL
4421 && (REG_P (to_rtx
) || GET_CODE (to_rtx
) == PARALLEL
))
4426 temp
= expand_expr (from
, NULL_RTX
, GET_MODE (to_rtx
), EXPAND_NORMAL
);
4428 if (GET_CODE (to_rtx
) == PARALLEL
)
4429 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
4430 int_size_in_bytes (TREE_TYPE (from
)));
4432 emit_move_insn (to_rtx
, temp
);
4434 preserve_temp_slots (to_rtx
);
4440 /* In case we are returning the contents of an object which overlaps
4441 the place the value is being stored, use a safe function when copying
4442 a value through a pointer into a structure value return block. */
4443 if (TREE_CODE (to
) == RESULT_DECL
4444 && TREE_CODE (from
) == INDIRECT_REF
4445 && ADDR_SPACE_GENERIC_P
4446 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from
, 0)))))
4447 && refs_may_alias_p (to
, from
)
4448 && cfun
->returns_struct
4449 && !cfun
->returns_pcc_struct
)
4454 size
= expr_size (from
);
4455 from_rtx
= expand_normal (from
);
4457 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
4458 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
4459 XEXP (from_rtx
, 0), Pmode
,
4460 convert_to_mode (TYPE_MODE (sizetype
),
4461 size
, TYPE_UNSIGNED (sizetype
)),
4462 TYPE_MODE (sizetype
));
4464 preserve_temp_slots (to_rtx
);
4470 /* Compute FROM and store the value in the rtx we got. */
4473 result
= store_expr (from
, to_rtx
, 0, nontemporal
);
4474 preserve_temp_slots (result
);
4480 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4481 succeeded, false otherwise. */
4484 emit_storent_insn (rtx to
, rtx from
)
4486 enum machine_mode mode
= GET_MODE (to
), imode
;
4487 enum insn_code code
= optab_handler (storent_optab
, mode
);
4490 if (code
== CODE_FOR_nothing
)
4493 imode
= insn_data
[code
].operand
[0].mode
;
4494 if (!insn_data
[code
].operand
[0].predicate (to
, imode
))
4497 imode
= insn_data
[code
].operand
[1].mode
;
4498 if (!insn_data
[code
].operand
[1].predicate (from
, imode
))
4500 from
= copy_to_mode_reg (imode
, from
);
4501 if (!insn_data
[code
].operand
[1].predicate (from
, imode
))
4505 pattern
= GEN_FCN (code
) (to
, from
);
4506 if (pattern
== NULL_RTX
)
4509 emit_insn (pattern
);
4513 /* Generate code for computing expression EXP,
4514 and storing the value into TARGET.
4516 If the mode is BLKmode then we may return TARGET itself.
4517 It turns out that in BLKmode it doesn't cause a problem.
4518 because C has no operators that could combine two different
4519 assignments into the same BLKmode object with different values
4520 with no sequence point. Will other languages need this to
4523 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4524 stack, and block moves may need to be treated specially.
4526 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4529 store_expr (tree exp
, rtx target
, int call_param_p
, bool nontemporal
)
4532 rtx alt_rtl
= NULL_RTX
;
4533 location_t loc
= EXPR_LOCATION (exp
);
4535 if (VOID_TYPE_P (TREE_TYPE (exp
)))
4537 /* C++ can generate ?: expressions with a throw expression in one
4538 branch and an rvalue in the other. Here, we resolve attempts to
4539 store the throw expression's nonexistent result. */
4540 gcc_assert (!call_param_p
);
4541 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4544 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4546 /* Perform first part of compound expression, then assign from second
4548 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
4549 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4550 return store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
,
4553 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4555 /* For conditional expression, get safe form of the target. Then
4556 test the condition, doing the appropriate assignment on either
4557 side. This avoids the creation of unnecessary temporaries.
4558 For non-BLKmode, it is more efficient not to do this. */
4560 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4562 do_pending_stack_adjust ();
4564 jumpifnot (TREE_OPERAND (exp
, 0), lab1
, -1);
4565 store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
,
4567 emit_jump_insn (gen_jump (lab2
));
4570 store_expr (TREE_OPERAND (exp
, 2), target
, call_param_p
,
4577 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4578 /* If this is a scalar in a register that is stored in a wider mode
4579 than the declared mode, compute the result into its declared mode
4580 and then convert to the wider mode. Our value is the computed
4583 rtx inner_target
= 0;
4585 /* We can do the conversion inside EXP, which will often result
4586 in some optimizations. Do the conversion in two steps: first
4587 change the signedness, if needed, then the extend. But don't
4588 do this if the type of EXP is a subtype of something else
4589 since then the conversion might involve more than just
4590 converting modes. */
4591 if (INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4592 && TREE_TYPE (TREE_TYPE (exp
)) == 0
4593 && GET_MODE_PRECISION (GET_MODE (target
))
4594 == TYPE_PRECISION (TREE_TYPE (exp
)))
4596 if (TYPE_UNSIGNED (TREE_TYPE (exp
))
4597 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4599 /* Some types, e.g. Fortran's logical*4, won't have a signed
4600 version, so use the mode instead. */
4602 = (signed_or_unsigned_type_for
4603 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)));
4605 ntype
= lang_hooks
.types
.type_for_mode
4606 (TYPE_MODE (TREE_TYPE (exp
)),
4607 SUBREG_PROMOTED_UNSIGNED_P (target
));
4609 exp
= fold_convert_loc (loc
, ntype
, exp
);
4612 exp
= fold_convert_loc (loc
, lang_hooks
.types
.type_for_mode
4613 (GET_MODE (SUBREG_REG (target
)),
4614 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4617 inner_target
= SUBREG_REG (target
);
4620 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
4621 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4623 /* If TEMP is a VOIDmode constant, use convert_modes to make
4624 sure that we properly convert it. */
4625 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4627 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4628 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4629 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4630 GET_MODE (target
), temp
,
4631 SUBREG_PROMOTED_UNSIGNED_P (target
));
4634 convert_move (SUBREG_REG (target
), temp
,
4635 SUBREG_PROMOTED_UNSIGNED_P (target
));
4639 else if ((TREE_CODE (exp
) == STRING_CST
4640 || (TREE_CODE (exp
) == MEM_REF
4641 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
4642 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
4644 && integer_zerop (TREE_OPERAND (exp
, 1))))
4645 && !nontemporal
&& !call_param_p
4648 /* Optimize initialization of an array with a STRING_CST. */
4649 HOST_WIDE_INT exp_len
, str_copy_len
;
4651 tree str
= TREE_CODE (exp
) == STRING_CST
4652 ? exp
: TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4654 exp_len
= int_expr_size (exp
);
4658 if (TREE_STRING_LENGTH (str
) <= 0)
4661 str_copy_len
= strlen (TREE_STRING_POINTER (str
));
4662 if (str_copy_len
< TREE_STRING_LENGTH (str
) - 1)
4665 str_copy_len
= TREE_STRING_LENGTH (str
);
4666 if ((STORE_MAX_PIECES
& (STORE_MAX_PIECES
- 1)) == 0
4667 && TREE_STRING_POINTER (str
)[TREE_STRING_LENGTH (str
) - 1] == '\0')
4669 str_copy_len
+= STORE_MAX_PIECES
- 1;
4670 str_copy_len
&= ~(STORE_MAX_PIECES
- 1);
4672 str_copy_len
= MIN (str_copy_len
, exp_len
);
4673 if (!can_store_by_pieces (str_copy_len
, builtin_strncpy_read_str
,
4674 CONST_CAST (char *, TREE_STRING_POINTER (str
)),
4675 MEM_ALIGN (target
), false))
4680 dest_mem
= store_by_pieces (dest_mem
,
4681 str_copy_len
, builtin_strncpy_read_str
,
4683 TREE_STRING_POINTER (str
)),
4684 MEM_ALIGN (target
), false,
4685 exp_len
> str_copy_len
? 1 : 0);
4686 if (exp_len
> str_copy_len
)
4687 clear_storage (adjust_address (dest_mem
, BLKmode
, 0),
4688 GEN_INT (exp_len
- str_copy_len
),
4697 /* If we want to use a nontemporal store, force the value to
4699 tmp_target
= nontemporal
? NULL_RTX
: target
;
4700 temp
= expand_expr_real (exp
, tmp_target
, GET_MODE (target
),
4702 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
4706 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4707 the same as that of TARGET, adjust the constant. This is needed, for
4708 example, in case it is a CONST_DOUBLE and we want only a word-sized
4710 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4711 && TREE_CODE (exp
) != ERROR_MARK
4712 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4713 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4714 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
4716 /* If value was not generated in the target, store it there.
4717 Convert the value to TARGET's type first if necessary and emit the
4718 pending incrementations that have been queued when expanding EXP.
4719 Note that we cannot emit the whole queue blindly because this will
4720 effectively disable the POST_INC optimization later.
4722 If TEMP and TARGET compare equal according to rtx_equal_p, but
4723 one or both of them are volatile memory refs, we have to distinguish
4725 - expand_expr has used TARGET. In this case, we must not generate
4726 another copy. This can be detected by TARGET being equal according
4728 - expand_expr has not used TARGET - that means that the source just
4729 happens to have the same RTX form. Since temp will have been created
4730 by expand_expr, it will compare unequal according to == .
4731 We must generate a copy in this case, to reach the correct number
4732 of volatile memory references. */
4734 if ((! rtx_equal_p (temp
, target
)
4735 || (temp
!= target
&& (side_effects_p (temp
)
4736 || side_effects_p (target
))))
4737 && TREE_CODE (exp
) != ERROR_MARK
4738 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4739 but TARGET is not valid memory reference, TEMP will differ
4740 from TARGET although it is really the same location. */
4741 && !(alt_rtl
&& rtx_equal_p (alt_rtl
, target
))
4742 /* If there's nothing to copy, don't bother. Don't call
4743 expr_size unless necessary, because some front-ends (C++)
4744 expr_size-hook must not be given objects that are not
4745 supposed to be bit-copied or bit-initialized. */
4746 && expr_size (exp
) != const0_rtx
)
4748 if (GET_MODE (temp
) != GET_MODE (target
)
4749 && GET_MODE (temp
) != VOIDmode
)
4751 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
4752 if (GET_MODE (target
) == BLKmode
4753 && GET_MODE (temp
) == BLKmode
)
4754 emit_block_move (target
, temp
, expr_size (exp
),
4756 ? BLOCK_OP_CALL_PARM
4757 : BLOCK_OP_NORMAL
));
4758 else if (GET_MODE (target
) == BLKmode
)
4759 store_bit_field (target
, INTVAL (expr_size (exp
)) * BITS_PER_UNIT
,
4760 0, GET_MODE (temp
), temp
);
4762 convert_move (target
, temp
, unsignedp
);
4765 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4767 /* Handle copying a string constant into an array. The string
4768 constant may be shorter than the array. So copy just the string's
4769 actual length, and clear the rest. First get the size of the data
4770 type of the string, which is actually the size of the target. */
4771 rtx size
= expr_size (exp
);
4773 if (CONST_INT_P (size
)
4774 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4775 emit_block_move (target
, temp
, size
,
4777 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4780 enum machine_mode pointer_mode
4781 = targetm
.addr_space
.pointer_mode (MEM_ADDR_SPACE (target
));
4782 enum machine_mode address_mode
4783 = targetm
.addr_space
.address_mode (MEM_ADDR_SPACE (target
));
4785 /* Compute the size of the data to copy from the string. */
4787 = size_binop_loc (loc
, MIN_EXPR
,
4788 make_tree (sizetype
, size
),
4789 size_int (TREE_STRING_LENGTH (exp
)));
4791 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
4793 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4796 /* Copy that much. */
4797 copy_size_rtx
= convert_to_mode (pointer_mode
, copy_size_rtx
,
4798 TYPE_UNSIGNED (sizetype
));
4799 emit_block_move (target
, temp
, copy_size_rtx
,
4801 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4803 /* Figure out how much is left in TARGET that we have to clear.
4804 Do all calculations in pointer_mode. */
4805 if (CONST_INT_P (copy_size_rtx
))
4807 size
= plus_constant (size
, -INTVAL (copy_size_rtx
));
4808 target
= adjust_address (target
, BLKmode
,
4809 INTVAL (copy_size_rtx
));
4813 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
4814 copy_size_rtx
, NULL_RTX
, 0,
4817 if (GET_MODE (copy_size_rtx
) != address_mode
)
4818 copy_size_rtx
= convert_to_mode (address_mode
,
4820 TYPE_UNSIGNED (sizetype
));
4822 target
= offset_address (target
, copy_size_rtx
,
4823 highest_pow2_factor (copy_size
));
4824 label
= gen_label_rtx ();
4825 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4826 GET_MODE (size
), 0, label
);
4829 if (size
!= const0_rtx
)
4830 clear_storage (target
, size
, BLOCK_OP_NORMAL
);
4836 /* Handle calls that return values in multiple non-contiguous locations.
4837 The Irix 6 ABI has examples of this. */
4838 else if (GET_CODE (target
) == PARALLEL
)
4839 emit_group_load (target
, temp
, TREE_TYPE (exp
),
4840 int_size_in_bytes (TREE_TYPE (exp
)));
4841 else if (GET_MODE (temp
) == BLKmode
)
4842 emit_block_move (target
, temp
, expr_size (exp
),
4844 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4845 else if (nontemporal
4846 && emit_storent_insn (target
, temp
))
4847 /* If we managed to emit a nontemporal store, there is nothing else to
4852 temp
= force_operand (temp
, target
);
4854 emit_move_insn (target
, temp
);
4861 /* Helper for categorize_ctor_elements. Identical interface. */
4864 categorize_ctor_elements_1 (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
4865 HOST_WIDE_INT
*p_elt_count
,
4868 unsigned HOST_WIDE_INT idx
;
4869 HOST_WIDE_INT nz_elts
, elt_count
;
4870 tree value
, purpose
;
4872 /* Whether CTOR is a valid constant initializer, in accordance with what
4873 initializer_constant_valid_p does. If inferred from the constructor
4874 elements, true until proven otherwise. */
4875 bool const_from_elts_p
= constructor_static_from_elts_p (ctor
);
4876 bool const_p
= const_from_elts_p
? true : TREE_STATIC (ctor
);
4881 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), idx
, purpose
, value
)
4883 HOST_WIDE_INT mult
= 1;
4885 if (TREE_CODE (purpose
) == RANGE_EXPR
)
4887 tree lo_index
= TREE_OPERAND (purpose
, 0);
4888 tree hi_index
= TREE_OPERAND (purpose
, 1);
4890 if (host_integerp (lo_index
, 1) && host_integerp (hi_index
, 1))
4891 mult
= (tree_low_cst (hi_index
, 1)
4892 - tree_low_cst (lo_index
, 1) + 1);
4895 switch (TREE_CODE (value
))
4899 HOST_WIDE_INT nz
= 0, ic
= 0;
4902 = categorize_ctor_elements_1 (value
, &nz
, &ic
, p_must_clear
);
4904 nz_elts
+= mult
* nz
;
4905 elt_count
+= mult
* ic
;
4907 if (const_from_elts_p
&& const_p
)
4908 const_p
= const_elt_p
;
4915 if (!initializer_zerop (value
))
4921 nz_elts
+= mult
* TREE_STRING_LENGTH (value
);
4922 elt_count
+= mult
* TREE_STRING_LENGTH (value
);
4926 if (!initializer_zerop (TREE_REALPART (value
)))
4928 if (!initializer_zerop (TREE_IMAGPART (value
)))
4936 for (v
= TREE_VECTOR_CST_ELTS (value
); v
; v
= TREE_CHAIN (v
))
4938 if (!initializer_zerop (TREE_VALUE (v
)))
4947 HOST_WIDE_INT tc
= count_type_elements (TREE_TYPE (value
), true);
4950 nz_elts
+= mult
* tc
;
4951 elt_count
+= mult
* tc
;
4953 if (const_from_elts_p
&& const_p
)
4954 const_p
= initializer_constant_valid_p (value
, TREE_TYPE (value
))
4962 && (TREE_CODE (TREE_TYPE (ctor
)) == UNION_TYPE
4963 || TREE_CODE (TREE_TYPE (ctor
)) == QUAL_UNION_TYPE
))
4966 bool clear_this
= true;
4968 if (!VEC_empty (constructor_elt
, CONSTRUCTOR_ELTS (ctor
)))
4970 /* We don't expect more than one element of the union to be
4971 initialized. Not sure what we should do otherwise... */
4972 gcc_assert (VEC_length (constructor_elt
, CONSTRUCTOR_ELTS (ctor
))
4975 init_sub_type
= TREE_TYPE (VEC_index (constructor_elt
,
4976 CONSTRUCTOR_ELTS (ctor
),
4979 /* ??? We could look at each element of the union, and find the
4980 largest element. Which would avoid comparing the size of the
4981 initialized element against any tail padding in the union.
4982 Doesn't seem worth the effort... */
4983 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor
)),
4984 TYPE_SIZE (init_sub_type
)) == 1)
4986 /* And now we have to find out if the element itself is fully
4987 constructed. E.g. for union { struct { int a, b; } s; } u
4988 = { .s = { .a = 1 } }. */
4989 if (elt_count
== count_type_elements (init_sub_type
, false))
4994 *p_must_clear
= clear_this
;
4997 *p_nz_elts
+= nz_elts
;
4998 *p_elt_count
+= elt_count
;
5003 /* Examine CTOR to discover:
5004 * how many scalar fields are set to nonzero values,
5005 and place it in *P_NZ_ELTS;
5006 * how many scalar fields in total are in CTOR,
5007 and place it in *P_ELT_COUNT.
5008 * if a type is a union, and the initializer from the constructor
5009 is not the largest element in the union, then set *p_must_clear.
5011 Return whether or not CTOR is a valid static constant initializer, the same
5012 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5015 categorize_ctor_elements (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
5016 HOST_WIDE_INT
*p_elt_count
,
5021 *p_must_clear
= false;
5024 categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_elt_count
, p_must_clear
);
5027 /* Count the number of scalars in TYPE. Return -1 on overflow or
5028 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
5029 array member at the end of the structure. */
5032 count_type_elements (const_tree type
, bool allow_flexarr
)
5034 const HOST_WIDE_INT max
= ~((HOST_WIDE_INT
)1 << (HOST_BITS_PER_WIDE_INT
-1));
5035 switch (TREE_CODE (type
))
5039 tree telts
= array_type_nelts (type
);
5040 if (telts
&& host_integerp (telts
, 1))
5042 HOST_WIDE_INT n
= tree_low_cst (telts
, 1) + 1;
5043 HOST_WIDE_INT m
= count_type_elements (TREE_TYPE (type
), false);
5046 else if (max
/ n
> m
)
5054 HOST_WIDE_INT n
= 0, t
;
5057 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5058 if (TREE_CODE (f
) == FIELD_DECL
)
5060 t
= count_type_elements (TREE_TYPE (f
), false);
5063 /* Check for structures with flexible array member. */
5064 tree tf
= TREE_TYPE (f
);
5066 && DECL_CHAIN (f
) == NULL
5067 && TREE_CODE (tf
) == ARRAY_TYPE
5069 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf
))
5070 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf
)))
5071 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf
))
5072 && int_size_in_bytes (type
) >= 0)
5084 case QUAL_UNION_TYPE
:
5091 return TYPE_VECTOR_SUBPARTS (type
);
5095 case FIXED_POINT_TYPE
:
5100 case REFERENCE_TYPE
:
5115 /* Return 1 if EXP contains mostly (3/4) zeros. */
5118 mostly_zeros_p (const_tree exp
)
5120 if (TREE_CODE (exp
) == CONSTRUCTOR
)
5123 HOST_WIDE_INT nz_elts
, count
, elts
;
5126 categorize_ctor_elements (exp
, &nz_elts
, &count
, &must_clear
);
5130 elts
= count_type_elements (TREE_TYPE (exp
), false);
5132 return nz_elts
< elts
/ 4;
5135 return initializer_zerop (exp
);
5138 /* Return 1 if EXP contains all zeros. */
5141 all_zeros_p (const_tree exp
)
5143 if (TREE_CODE (exp
) == CONSTRUCTOR
)
5146 HOST_WIDE_INT nz_elts
, count
;
5149 categorize_ctor_elements (exp
, &nz_elts
, &count
, &must_clear
);
5150 return nz_elts
== 0;
5153 return initializer_zerop (exp
);
5156 /* Helper function for store_constructor.
5157 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5158 TYPE is the type of the CONSTRUCTOR, not the element type.
5159 CLEARED is as for store_constructor.
5160 ALIAS_SET is the alias set to use for any stores.
5162 This provides a recursive shortcut back to store_constructor when it isn't
5163 necessary to go through store_field. This is so that we can pass through
5164 the cleared field to let store_constructor know that we may not have to
5165 clear a substructure if the outer structure has already been cleared. */
5168 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
5169 HOST_WIDE_INT bitpos
, enum machine_mode mode
,
5170 tree exp
, tree type
, int cleared
,
5171 alias_set_type alias_set
)
5173 if (TREE_CODE (exp
) == CONSTRUCTOR
5174 /* We can only call store_constructor recursively if the size and
5175 bit position are on a byte boundary. */
5176 && bitpos
% BITS_PER_UNIT
== 0
5177 && (bitsize
> 0 && bitsize
% BITS_PER_UNIT
== 0)
5178 /* If we have a nonzero bitpos for a register target, then we just
5179 let store_field do the bitfield handling. This is unlikely to
5180 generate unnecessary clear instructions anyways. */
5181 && (bitpos
== 0 || MEM_P (target
)))
5185 = adjust_address (target
,
5186 GET_MODE (target
) == BLKmode
5188 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
5189 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5192 /* Update the alias set, if required. */
5193 if (MEM_P (target
) && ! MEM_KEEP_ALIAS_SET_P (target
)
5194 && MEM_ALIAS_SET (target
) != 0)
5196 target
= copy_rtx (target
);
5197 set_mem_alias_set (target
, alias_set
);
5200 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
5203 store_field (target
, bitsize
, bitpos
, mode
, exp
, type
, alias_set
, false);
5206 /* Store the value of constructor EXP into the rtx TARGET.
5207 TARGET is either a REG or a MEM; we know it cannot conflict, since
5208 safe_from_p has been called.
5209 CLEARED is true if TARGET is known to have been zero'd.
5210 SIZE is the number of bytes of TARGET we are allowed to modify: this
5211 may not be the same as the size of EXP if we are assigning to a field
5212 which has been packed to exclude padding bits. */
5215 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
)
5217 tree type
= TREE_TYPE (exp
);
5218 #ifdef WORD_REGISTER_OPERATIONS
5219 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
5222 switch (TREE_CODE (type
))
5226 case QUAL_UNION_TYPE
:
5228 unsigned HOST_WIDE_INT idx
;
5231 /* If size is zero or the target is already cleared, do nothing. */
5232 if (size
== 0 || cleared
)
5234 /* We either clear the aggregate or indicate the value is dead. */
5235 else if ((TREE_CODE (type
) == UNION_TYPE
5236 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
5237 && ! CONSTRUCTOR_ELTS (exp
))
5238 /* If the constructor is empty, clear the union. */
5240 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
5244 /* If we are building a static constructor into a register,
5245 set the initial value as zero so we can fold the value into
5246 a constant. But if more than one register is involved,
5247 this probably loses. */
5248 else if (REG_P (target
) && TREE_STATIC (exp
)
5249 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
5251 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5255 /* If the constructor has fewer fields than the structure or
5256 if we are initializing the structure to mostly zeros, clear
5257 the whole structure first. Don't do this if TARGET is a
5258 register whose mode size isn't equal to SIZE since
5259 clear_storage can't handle this case. */
5261 && (((int)VEC_length (constructor_elt
, CONSTRUCTOR_ELTS (exp
))
5262 != fields_length (type
))
5263 || mostly_zeros_p (exp
))
5265 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
5268 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
5272 if (REG_P (target
) && !cleared
)
5273 emit_clobber (target
);
5275 /* Store each element of the constructor into the
5276 corresponding field of TARGET. */
5277 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, field
, value
)
5279 enum machine_mode mode
;
5280 HOST_WIDE_INT bitsize
;
5281 HOST_WIDE_INT bitpos
= 0;
5283 rtx to_rtx
= target
;
5285 /* Just ignore missing fields. We cleared the whole
5286 structure, above, if any fields are missing. */
5290 if (cleared
&& initializer_zerop (value
))
5293 if (host_integerp (DECL_SIZE (field
), 1))
5294 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
5298 mode
= DECL_MODE (field
);
5299 if (DECL_BIT_FIELD (field
))
5302 offset
= DECL_FIELD_OFFSET (field
);
5303 if (host_integerp (offset
, 0)
5304 && host_integerp (bit_position (field
), 0))
5306 bitpos
= int_bit_position (field
);
5310 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
5314 enum machine_mode address_mode
;
5318 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset
,
5319 make_tree (TREE_TYPE (exp
),
5322 offset_rtx
= expand_normal (offset
);
5323 gcc_assert (MEM_P (to_rtx
));
5326 = targetm
.addr_space
.address_mode (MEM_ADDR_SPACE (to_rtx
));
5327 if (GET_MODE (offset_rtx
) != address_mode
)
5328 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
5330 to_rtx
= offset_address (to_rtx
, offset_rtx
,
5331 highest_pow2_factor (offset
));
5334 #ifdef WORD_REGISTER_OPERATIONS
5335 /* If this initializes a field that is smaller than a
5336 word, at the start of a word, try to widen it to a full
5337 word. This special case allows us to output C++ member
5338 function initializations in a form that the optimizers
5341 && bitsize
< BITS_PER_WORD
5342 && bitpos
% BITS_PER_WORD
== 0
5343 && GET_MODE_CLASS (mode
) == MODE_INT
5344 && TREE_CODE (value
) == INTEGER_CST
5346 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
5348 tree type
= TREE_TYPE (value
);
5350 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
5352 type
= lang_hooks
.types
.type_for_size
5353 (BITS_PER_WORD
, TYPE_UNSIGNED (type
));
5354 value
= fold_convert (type
, value
);
5357 if (BYTES_BIG_ENDIAN
)
5359 = fold_build2 (LSHIFT_EXPR
, type
, value
,
5360 build_int_cst (type
,
5361 BITS_PER_WORD
- bitsize
));
5362 bitsize
= BITS_PER_WORD
;
5367 if (MEM_P (to_rtx
) && !MEM_KEEP_ALIAS_SET_P (to_rtx
)
5368 && DECL_NONADDRESSABLE_P (field
))
5370 to_rtx
= copy_rtx (to_rtx
);
5371 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
5374 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
5375 value
, type
, cleared
,
5376 get_alias_set (TREE_TYPE (field
)));
5383 unsigned HOST_WIDE_INT i
;
5386 tree elttype
= TREE_TYPE (type
);
5388 HOST_WIDE_INT minelt
= 0;
5389 HOST_WIDE_INT maxelt
= 0;
5391 domain
= TYPE_DOMAIN (type
);
5392 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
5393 && TYPE_MAX_VALUE (domain
)
5394 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
5395 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
5397 /* If we have constant bounds for the range of the type, get them. */
5400 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
5401 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
5404 /* If the constructor has fewer elements than the array, clear
5405 the whole array first. Similarly if this is static
5406 constructor of a non-BLKmode object. */
5409 else if (REG_P (target
) && TREE_STATIC (exp
))
5413 unsigned HOST_WIDE_INT idx
;
5415 HOST_WIDE_INT count
= 0, zero_count
= 0;
5416 need_to_clear
= ! const_bounds_p
;
5418 /* This loop is a more accurate version of the loop in
5419 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5420 is also needed to check for missing elements. */
5421 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, index
, value
)
5423 HOST_WIDE_INT this_node_count
;
5428 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
5430 tree lo_index
= TREE_OPERAND (index
, 0);
5431 tree hi_index
= TREE_OPERAND (index
, 1);
5433 if (! host_integerp (lo_index
, 1)
5434 || ! host_integerp (hi_index
, 1))
5440 this_node_count
= (tree_low_cst (hi_index
, 1)
5441 - tree_low_cst (lo_index
, 1) + 1);
5444 this_node_count
= 1;
5446 count
+= this_node_count
;
5447 if (mostly_zeros_p (value
))
5448 zero_count
+= this_node_count
;
5451 /* Clear the entire array first if there are any missing
5452 elements, or if the incidence of zero elements is >=
5455 && (count
< maxelt
- minelt
+ 1
5456 || 4 * zero_count
>= 3 * count
))
5460 if (need_to_clear
&& size
> 0)
5463 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5465 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
5469 if (!cleared
&& REG_P (target
))
5470 /* Inform later passes that the old value is dead. */
5471 emit_clobber (target
);
5473 /* Store each element of the constructor into the
5474 corresponding element of TARGET, determined by counting the
5476 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), i
, index
, value
)
5478 enum machine_mode mode
;
5479 HOST_WIDE_INT bitsize
;
5480 HOST_WIDE_INT bitpos
;
5481 rtx xtarget
= target
;
5483 if (cleared
&& initializer_zerop (value
))
5486 mode
= TYPE_MODE (elttype
);
5487 if (mode
== BLKmode
)
5488 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
5489 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
5492 bitsize
= GET_MODE_BITSIZE (mode
);
5494 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
5496 tree lo_index
= TREE_OPERAND (index
, 0);
5497 tree hi_index
= TREE_OPERAND (index
, 1);
5498 rtx index_r
, pos_rtx
;
5499 HOST_WIDE_INT lo
, hi
, count
;
5502 /* If the range is constant and "small", unroll the loop. */
5504 && host_integerp (lo_index
, 0)
5505 && host_integerp (hi_index
, 0)
5506 && (lo
= tree_low_cst (lo_index
, 0),
5507 hi
= tree_low_cst (hi_index
, 0),
5508 count
= hi
- lo
+ 1,
5511 || (host_integerp (TYPE_SIZE (elttype
), 1)
5512 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
5515 lo
-= minelt
; hi
-= minelt
;
5516 for (; lo
<= hi
; lo
++)
5518 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
5521 && !MEM_KEEP_ALIAS_SET_P (target
)
5522 && TREE_CODE (type
) == ARRAY_TYPE
5523 && TYPE_NONALIASED_COMPONENT (type
))
5525 target
= copy_rtx (target
);
5526 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5529 store_constructor_field
5530 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
5531 get_alias_set (elttype
));
5536 rtx loop_start
= gen_label_rtx ();
5537 rtx loop_end
= gen_label_rtx ();
5540 expand_normal (hi_index
);
5542 index
= build_decl (EXPR_LOCATION (exp
),
5543 VAR_DECL
, NULL_TREE
, domain
);
5544 index_r
= gen_reg_rtx (promote_decl_mode (index
, NULL
));
5545 SET_DECL_RTL (index
, index_r
);
5546 store_expr (lo_index
, index_r
, 0, false);
5548 /* Build the head of the loop. */
5549 do_pending_stack_adjust ();
5550 emit_label (loop_start
);
5552 /* Assign value to element index. */
5554 fold_convert (ssizetype
,
5555 fold_build2 (MINUS_EXPR
,
5558 TYPE_MIN_VALUE (domain
)));
5561 size_binop (MULT_EXPR
, position
,
5562 fold_convert (ssizetype
,
5563 TYPE_SIZE_UNIT (elttype
)));
5565 pos_rtx
= expand_normal (position
);
5566 xtarget
= offset_address (target
, pos_rtx
,
5567 highest_pow2_factor (position
));
5568 xtarget
= adjust_address (xtarget
, mode
, 0);
5569 if (TREE_CODE (value
) == CONSTRUCTOR
)
5570 store_constructor (value
, xtarget
, cleared
,
5571 bitsize
/ BITS_PER_UNIT
);
5573 store_expr (value
, xtarget
, 0, false);
5575 /* Generate a conditional jump to exit the loop. */
5576 exit_cond
= build2 (LT_EXPR
, integer_type_node
,
5578 jumpif (exit_cond
, loop_end
, -1);
5580 /* Update the loop counter, and jump to the head of
5582 expand_assignment (index
,
5583 build2 (PLUS_EXPR
, TREE_TYPE (index
),
5584 index
, integer_one_node
),
5587 emit_jump (loop_start
);
5589 /* Build the end of the loop. */
5590 emit_label (loop_end
);
5593 else if ((index
!= 0 && ! host_integerp (index
, 0))
5594 || ! host_integerp (TYPE_SIZE (elttype
), 1))
5599 index
= ssize_int (1);
5602 index
= fold_convert (ssizetype
,
5603 fold_build2 (MINUS_EXPR
,
5606 TYPE_MIN_VALUE (domain
)));
5609 size_binop (MULT_EXPR
, index
,
5610 fold_convert (ssizetype
,
5611 TYPE_SIZE_UNIT (elttype
)));
5612 xtarget
= offset_address (target
,
5613 expand_normal (position
),
5614 highest_pow2_factor (position
));
5615 xtarget
= adjust_address (xtarget
, mode
, 0);
5616 store_expr (value
, xtarget
, 0, false);
5621 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
5622 * tree_low_cst (TYPE_SIZE (elttype
), 1));
5624 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
5626 if (MEM_P (target
) && !MEM_KEEP_ALIAS_SET_P (target
)
5627 && TREE_CODE (type
) == ARRAY_TYPE
5628 && TYPE_NONALIASED_COMPONENT (type
))
5630 target
= copy_rtx (target
);
5631 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5633 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
5634 type
, cleared
, get_alias_set (elttype
));
5642 unsigned HOST_WIDE_INT idx
;
5643 constructor_elt
*ce
;
5647 tree elttype
= TREE_TYPE (type
);
5648 int elt_size
= tree_low_cst (TYPE_SIZE (elttype
), 1);
5649 enum machine_mode eltmode
= TYPE_MODE (elttype
);
5650 HOST_WIDE_INT bitsize
;
5651 HOST_WIDE_INT bitpos
;
5652 rtvec vector
= NULL
;
5654 alias_set_type alias
;
5656 gcc_assert (eltmode
!= BLKmode
);
5658 n_elts
= TYPE_VECTOR_SUBPARTS (type
);
5659 if (REG_P (target
) && VECTOR_MODE_P (GET_MODE (target
)))
5661 enum machine_mode mode
= GET_MODE (target
);
5663 icode
= (int) optab_handler (vec_init_optab
, mode
);
5664 if (icode
!= CODE_FOR_nothing
)
5668 vector
= rtvec_alloc (n_elts
);
5669 for (i
= 0; i
< n_elts
; i
++)
5670 RTVEC_ELT (vector
, i
) = CONST0_RTX (GET_MODE_INNER (mode
));
5674 /* If the constructor has fewer elements than the vector,
5675 clear the whole array first. Similarly if this is static
5676 constructor of a non-BLKmode object. */
5679 else if (REG_P (target
) && TREE_STATIC (exp
))
5683 unsigned HOST_WIDE_INT count
= 0, zero_count
= 0;
5686 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
5688 int n_elts_here
= tree_low_cst
5689 (int_const_binop (TRUNC_DIV_EXPR
,
5690 TYPE_SIZE (TREE_TYPE (value
)),
5691 TYPE_SIZE (elttype
), 0), 1);
5693 count
+= n_elts_here
;
5694 if (mostly_zeros_p (value
))
5695 zero_count
+= n_elts_here
;
5698 /* Clear the entire vector first if there are any missing elements,
5699 or if the incidence of zero elements is >= 75%. */
5700 need_to_clear
= (count
< n_elts
|| 4 * zero_count
>= 3 * count
);
5703 if (need_to_clear
&& size
> 0 && !vector
)
5706 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5708 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
5712 /* Inform later passes that the old value is dead. */
5713 if (!cleared
&& !vector
&& REG_P (target
))
5714 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5717 alias
= MEM_ALIAS_SET (target
);
5719 alias
= get_alias_set (elttype
);
5721 /* Store each element of the constructor into the corresponding
5722 element of TARGET, determined by counting the elements. */
5723 for (idx
= 0, i
= 0;
5724 VEC_iterate (constructor_elt
, CONSTRUCTOR_ELTS (exp
), idx
, ce
);
5725 idx
++, i
+= bitsize
/ elt_size
)
5727 HOST_WIDE_INT eltpos
;
5728 tree value
= ce
->value
;
5730 bitsize
= tree_low_cst (TYPE_SIZE (TREE_TYPE (value
)), 1);
5731 if (cleared
&& initializer_zerop (value
))
5735 eltpos
= tree_low_cst (ce
->index
, 1);
5741 /* Vector CONSTRUCTORs should only be built from smaller
5742 vectors in the case of BLKmode vectors. */
5743 gcc_assert (TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
);
5744 RTVEC_ELT (vector
, eltpos
)
5745 = expand_normal (value
);
5749 enum machine_mode value_mode
=
5750 TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
5751 ? TYPE_MODE (TREE_TYPE (value
))
5753 bitpos
= eltpos
* elt_size
;
5754 store_constructor_field (target
, bitsize
, bitpos
,
5755 value_mode
, value
, type
,
5761 emit_insn (GEN_FCN (icode
)
5763 gen_rtx_PARALLEL (GET_MODE (target
), vector
)));
5772 /* Store the value of EXP (an expression tree)
5773 into a subfield of TARGET which has mode MODE and occupies
5774 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5775 If MODE is VOIDmode, it means that we are storing into a bit-field.
5777 Always return const0_rtx unless we have something particular to
5780 TYPE is the type of the underlying object,
5782 ALIAS_SET is the alias set for the destination. This value will
5783 (in general) be different from that for TARGET, since TARGET is a
5784 reference to the containing structure.
5786 If NONTEMPORAL is true, try generating a nontemporal store. */
5789 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
5790 enum machine_mode mode
, tree exp
, tree type
,
5791 alias_set_type alias_set
, bool nontemporal
)
5793 if (TREE_CODE (exp
) == ERROR_MARK
)
5796 /* If we have nothing to store, do nothing unless the expression has
5799 return expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5801 /* If we are storing into an unaligned field of an aligned union that is
5802 in a register, we may have the mode of TARGET being an integer mode but
5803 MODE == BLKmode. In that case, get an aligned object whose size and
5804 alignment are the same as TARGET and store TARGET into it (we can avoid
5805 the store if the field being stored is the entire width of TARGET). Then
5806 call ourselves recursively to store the field into a BLKmode version of
5807 that object. Finally, load from the object into TARGET. This is not
5808 very efficient in general, but should only be slightly more expensive
5809 than the otherwise-required unaligned accesses. Perhaps this can be
5810 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5811 twice, once with emit_move_insn and once via store_field. */
5814 && (REG_P (target
) || GET_CODE (target
) == SUBREG
))
5816 rtx object
= assign_temp (type
, 0, 1, 1);
5817 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
5819 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5820 emit_move_insn (object
, target
);
5822 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, type
, alias_set
,
5825 emit_move_insn (target
, object
);
5827 /* We want to return the BLKmode version of the data. */
5831 if (GET_CODE (target
) == CONCAT
)
5833 /* We're storing into a struct containing a single __complex. */
5835 gcc_assert (!bitpos
);
5836 return store_expr (exp
, target
, 0, nontemporal
);
5839 /* If the structure is in a register or if the component
5840 is a bit field, we cannot use addressing to access it.
5841 Use bit-field techniques or SUBREG to store in it. */
5843 if (mode
== VOIDmode
5844 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5845 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5846 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5848 || GET_CODE (target
) == SUBREG
5849 /* If the field isn't aligned enough to store as an ordinary memref,
5850 store it as a bit field. */
5852 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
5853 || bitpos
% GET_MODE_ALIGNMENT (mode
))
5854 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
5855 || (bitpos
% BITS_PER_UNIT
!= 0)))
5856 /* If the RHS and field are a constant size and the size of the
5857 RHS isn't the same size as the bitfield, we must use bitfield
5860 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5861 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0)
5862 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
5863 decl we must use bitfield operations. */
5865 && TREE_CODE (exp
) == MEM_REF
5866 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
5867 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
5868 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp
, 0),0 ))
5869 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) != BLKmode
))
5874 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5875 implies a mask operation. If the precision is the same size as
5876 the field we're storing into, that mask is redundant. This is
5877 particularly common with bit field assignments generated by the
5879 nop_def
= get_def_for_expr (exp
, NOP_EXPR
);
5882 tree type
= TREE_TYPE (exp
);
5883 if (INTEGRAL_TYPE_P (type
)
5884 && TYPE_PRECISION (type
) < GET_MODE_BITSIZE (TYPE_MODE (type
))
5885 && bitsize
== TYPE_PRECISION (type
))
5887 tree op
= gimple_assign_rhs1 (nop_def
);
5888 type
= TREE_TYPE (op
);
5889 if (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) >= bitsize
)
5894 temp
= expand_normal (exp
);
5896 /* If BITSIZE is narrower than the size of the type of EXP
5897 we will be narrowing TEMP. Normally, what's wanted are the
5898 low-order bits. However, if EXP's type is a record and this is
5899 big-endian machine, we want the upper BITSIZE bits. */
5900 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5901 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
5902 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5903 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5904 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5908 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5910 if (mode
!= VOIDmode
&& mode
!= BLKmode
5911 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5912 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5914 /* If the modes of TEMP and TARGET are both BLKmode, both
5915 must be in memory and BITPOS must be aligned on a byte
5916 boundary. If so, we simply do a block copy. Likewise
5917 for a BLKmode-like TARGET. */
5918 if (GET_MODE (temp
) == BLKmode
5919 && (GET_MODE (target
) == BLKmode
5921 && GET_MODE_CLASS (GET_MODE (target
)) == MODE_INT
5922 && (bitpos
% BITS_PER_UNIT
) == 0
5923 && (bitsize
% BITS_PER_UNIT
) == 0)))
5925 gcc_assert (MEM_P (target
) && MEM_P (temp
)
5926 && (bitpos
% BITS_PER_UNIT
) == 0);
5928 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5929 emit_block_move (target
, temp
,
5930 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5937 /* Store the value in the bitfield. */
5938 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
);
5944 /* Now build a reference to just the desired component. */
5945 rtx to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5947 if (to_rtx
== target
)
5948 to_rtx
= copy_rtx (to_rtx
);
5950 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5951 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5952 set_mem_alias_set (to_rtx
, alias_set
);
5954 return store_expr (exp
, to_rtx
, 0, nontemporal
);
5958 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5959 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5960 codes and find the ultimate containing object, which we return.
5962 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5963 bit position, and *PUNSIGNEDP to the signedness of the field.
5964 If the position of the field is variable, we store a tree
5965 giving the variable offset (in units) in *POFFSET.
5966 This offset is in addition to the bit position.
5967 If the position is not variable, we store 0 in *POFFSET.
5969 If any of the extraction expressions is volatile,
5970 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5972 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
5973 Otherwise, it is a mode that can be used to access the field.
5975 If the field describes a variable-sized object, *PMODE is set to
5976 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
5977 this case, but the address of the object can be found.
5979 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5980 look through nodes that serve as markers of a greater alignment than
5981 the one that can be deduced from the expression. These nodes make it
5982 possible for front-ends to prevent temporaries from being created by
5983 the middle-end on alignment considerations. For that purpose, the
5984 normal operating mode at high-level is to always pass FALSE so that
5985 the ultimate containing object is really returned; moreover, the
5986 associated predicate handled_component_p will always return TRUE
5987 on these nodes, thus indicating that they are essentially handled
5988 by get_inner_reference. TRUE should only be passed when the caller
5989 is scanning the expression in order to build another representation
5990 and specifically knows how to handle these nodes; as such, this is
5991 the normal operating mode in the RTL expanders. */
5994 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
5995 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
5996 enum machine_mode
*pmode
, int *punsignedp
,
5997 int *pvolatilep
, bool keep_aligning
)
6000 enum machine_mode mode
= VOIDmode
;
6001 bool blkmode_bitfield
= false;
6002 tree offset
= size_zero_node
;
6003 double_int bit_offset
= double_int_zero
;
6005 /* First get the mode, signedness, and size. We do this from just the
6006 outermost expression. */
6008 if (TREE_CODE (exp
) == COMPONENT_REF
)
6010 tree field
= TREE_OPERAND (exp
, 1);
6011 size_tree
= DECL_SIZE (field
);
6012 if (!DECL_BIT_FIELD (field
))
6013 mode
= DECL_MODE (field
);
6014 else if (DECL_MODE (field
) == BLKmode
)
6015 blkmode_bitfield
= true;
6016 else if (TREE_THIS_VOLATILE (exp
)
6017 && flag_strict_volatile_bitfields
> 0)
6018 /* Volatile bitfields should be accessed in the mode of the
6019 field's type, not the mode computed based on the bit
6021 mode
= TYPE_MODE (DECL_BIT_FIELD_TYPE (field
));
6023 *punsignedp
= DECL_UNSIGNED (field
);
6025 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
6027 size_tree
= TREE_OPERAND (exp
, 1);
6028 *punsignedp
= (! INTEGRAL_TYPE_P (TREE_TYPE (exp
))
6029 || TYPE_UNSIGNED (TREE_TYPE (exp
)));
6031 /* For vector types, with the correct size of access, use the mode of
6033 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == VECTOR_TYPE
6034 && TREE_TYPE (exp
) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
6035 && tree_int_cst_equal (size_tree
, TYPE_SIZE (TREE_TYPE (exp
))))
6036 mode
= TYPE_MODE (TREE_TYPE (exp
));
6040 mode
= TYPE_MODE (TREE_TYPE (exp
));
6041 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
6043 if (mode
== BLKmode
)
6044 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
6046 *pbitsize
= GET_MODE_BITSIZE (mode
);
6051 if (! host_integerp (size_tree
, 1))
6052 mode
= BLKmode
, *pbitsize
= -1;
6054 *pbitsize
= tree_low_cst (size_tree
, 1);
6057 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6058 and find the ultimate containing object. */
6061 switch (TREE_CODE (exp
))
6065 = double_int_add (bit_offset
,
6066 tree_to_double_int (TREE_OPERAND (exp
, 2)));
6071 tree field
= TREE_OPERAND (exp
, 1);
6072 tree this_offset
= component_ref_field_offset (exp
);
6074 /* If this field hasn't been filled in yet, don't go past it.
6075 This should only happen when folding expressions made during
6076 type construction. */
6077 if (this_offset
== 0)
6080 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
6081 bit_offset
= double_int_add (bit_offset
,
6083 (DECL_FIELD_BIT_OFFSET (field
)));
6085 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6090 case ARRAY_RANGE_REF
:
6092 tree index
= TREE_OPERAND (exp
, 1);
6093 tree low_bound
= array_ref_low_bound (exp
);
6094 tree unit_size
= array_ref_element_size (exp
);
6096 /* We assume all arrays have sizes that are a multiple of a byte.
6097 First subtract the lower bound, if any, in the type of the
6098 index, then convert to sizetype and multiply by the size of
6099 the array element. */
6100 if (! integer_zerop (low_bound
))
6101 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
6104 offset
= size_binop (PLUS_EXPR
, offset
,
6105 size_binop (MULT_EXPR
,
6106 fold_convert (sizetype
, index
),
6115 bit_offset
= double_int_add (bit_offset
,
6116 uhwi_to_double_int (*pbitsize
));
6119 case VIEW_CONVERT_EXPR
:
6120 if (keep_aligning
&& STRICT_ALIGNMENT
6121 && (TYPE_ALIGN (TREE_TYPE (exp
))
6122 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
6123 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
6124 < BIGGEST_ALIGNMENT
)
6125 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
6126 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
6131 /* Hand back the decl for MEM[&decl, off]. */
6132 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
6134 tree off
= TREE_OPERAND (exp
, 1);
6135 if (!integer_zerop (off
))
6137 double_int boff
, coff
= mem_ref_offset (exp
);
6138 boff
= double_int_lshift (coff
,
6140 ? 3 : exact_log2 (BITS_PER_UNIT
),
6141 HOST_BITS_PER_DOUBLE_INT
, true);
6142 bit_offset
= double_int_add (bit_offset
, boff
);
6144 exp
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6152 /* If any reference in the chain is volatile, the effect is volatile. */
6153 if (TREE_THIS_VOLATILE (exp
))
6156 exp
= TREE_OPERAND (exp
, 0);
6160 /* If OFFSET is constant, see if we can return the whole thing as a
6161 constant bit position. Make sure to handle overflow during
6163 if (host_integerp (offset
, 0))
6165 double_int tem
= double_int_lshift (tree_to_double_int (offset
),
6167 ? 3 : exact_log2 (BITS_PER_UNIT
),
6168 HOST_BITS_PER_DOUBLE_INT
, true);
6169 tem
= double_int_add (tem
, bit_offset
);
6170 if (double_int_fits_in_shwi_p (tem
))
6172 *pbitpos
= double_int_to_shwi (tem
);
6173 *poffset
= offset
= NULL_TREE
;
6177 /* Otherwise, split it up. */
6180 *pbitpos
= double_int_to_shwi (bit_offset
);
6184 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6185 if (mode
== VOIDmode
6187 && (*pbitpos
% BITS_PER_UNIT
) == 0
6188 && (*pbitsize
% BITS_PER_UNIT
) == 0)
6196 /* Given an expression EXP that may be a COMPONENT_REF, an ARRAY_REF or an
6197 ARRAY_RANGE_REF, look for whether EXP or any nested component-refs within
6198 EXP is marked as PACKED. */
6201 contains_packed_reference (const_tree exp
)
6203 bool packed_p
= false;
6207 switch (TREE_CODE (exp
))
6211 tree field
= TREE_OPERAND (exp
, 1);
6212 packed_p
= DECL_PACKED (field
)
6213 || TYPE_PACKED (TREE_TYPE (field
))
6214 || TYPE_PACKED (TREE_TYPE (exp
));
6222 case ARRAY_RANGE_REF
:
6225 case VIEW_CONVERT_EXPR
:
6231 exp
= TREE_OPERAND (exp
, 0);
6237 /* Return a tree of sizetype representing the size, in bytes, of the element
6238 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6241 array_ref_element_size (tree exp
)
6243 tree aligned_size
= TREE_OPERAND (exp
, 3);
6244 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6245 location_t loc
= EXPR_LOCATION (exp
);
6247 /* If a size was specified in the ARRAY_REF, it's the size measured
6248 in alignment units of the element type. So multiply by that value. */
6251 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6252 sizetype from another type of the same width and signedness. */
6253 if (TREE_TYPE (aligned_size
) != sizetype
)
6254 aligned_size
= fold_convert_loc (loc
, sizetype
, aligned_size
);
6255 return size_binop_loc (loc
, MULT_EXPR
, aligned_size
,
6256 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
6259 /* Otherwise, take the size from that of the element type. Substitute
6260 any PLACEHOLDER_EXPR that we have. */
6262 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type
), exp
);
6265 /* Return a tree representing the lower bound of the array mentioned in
6266 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6269 array_ref_low_bound (tree exp
)
6271 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6273 /* If a lower bound is specified in EXP, use it. */
6274 if (TREE_OPERAND (exp
, 2))
6275 return TREE_OPERAND (exp
, 2);
6277 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6278 substituting for a PLACEHOLDER_EXPR as needed. */
6279 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
6280 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type
), exp
);
6282 /* Otherwise, return a zero of the appropriate type. */
6283 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp
, 1)), 0);
6286 /* Return a tree representing the upper bound of the array mentioned in
6287 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6290 array_ref_up_bound (tree exp
)
6292 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6294 /* If there is a domain type and it has an upper bound, use it, substituting
6295 for a PLACEHOLDER_EXPR as needed. */
6296 if (domain_type
&& TYPE_MAX_VALUE (domain_type
))
6297 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type
), exp
);
6299 /* Otherwise fail. */
6303 /* Return a tree representing the offset, in bytes, of the field referenced
6304 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6307 component_ref_field_offset (tree exp
)
6309 tree aligned_offset
= TREE_OPERAND (exp
, 2);
6310 tree field
= TREE_OPERAND (exp
, 1);
6311 location_t loc
= EXPR_LOCATION (exp
);
6313 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6314 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6318 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6319 sizetype from another type of the same width and signedness. */
6320 if (TREE_TYPE (aligned_offset
) != sizetype
)
6321 aligned_offset
= fold_convert_loc (loc
, sizetype
, aligned_offset
);
6322 return size_binop_loc (loc
, MULT_EXPR
, aligned_offset
,
6323 size_int (DECL_OFFSET_ALIGN (field
)
6327 /* Otherwise, take the offset from that of the field. Substitute
6328 any PLACEHOLDER_EXPR that we have. */
6330 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field
), exp
);
6333 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6335 static unsigned HOST_WIDE_INT
6336 target_align (const_tree target
)
6338 /* We might have a chain of nested references with intermediate misaligning
6339 bitfields components, so need to recurse to find out. */
6341 unsigned HOST_WIDE_INT this_align
, outer_align
;
6343 switch (TREE_CODE (target
))
6349 this_align
= DECL_ALIGN (TREE_OPERAND (target
, 1));
6350 outer_align
= target_align (TREE_OPERAND (target
, 0));
6351 return MIN (this_align
, outer_align
);
6354 case ARRAY_RANGE_REF
:
6355 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
6356 outer_align
= target_align (TREE_OPERAND (target
, 0));
6357 return MIN (this_align
, outer_align
);
6360 case NON_LVALUE_EXPR
:
6361 case VIEW_CONVERT_EXPR
:
6362 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
6363 outer_align
= target_align (TREE_OPERAND (target
, 0));
6364 return MAX (this_align
, outer_align
);
6367 return TYPE_ALIGN (TREE_TYPE (target
));
6372 /* Given an rtx VALUE that may contain additions and multiplications, return
6373 an equivalent value that just refers to a register, memory, or constant.
6374 This is done by generating instructions to perform the arithmetic and
6375 returning a pseudo-register containing the value.
6377 The returned value may be a REG, SUBREG, MEM or constant. */
6380 force_operand (rtx value
, rtx target
)
6383 /* Use subtarget as the target for operand 0 of a binary operation. */
6384 rtx subtarget
= get_subtarget (target
);
6385 enum rtx_code code
= GET_CODE (value
);
6387 /* Check for subreg applied to an expression produced by loop optimizer. */
6389 && !REG_P (SUBREG_REG (value
))
6390 && !MEM_P (SUBREG_REG (value
)))
6393 = simplify_gen_subreg (GET_MODE (value
),
6394 force_reg (GET_MODE (SUBREG_REG (value
)),
6395 force_operand (SUBREG_REG (value
),
6397 GET_MODE (SUBREG_REG (value
)),
6398 SUBREG_BYTE (value
));
6399 code
= GET_CODE (value
);
6402 /* Check for a PIC address load. */
6403 if ((code
== PLUS
|| code
== MINUS
)
6404 && XEXP (value
, 0) == pic_offset_table_rtx
6405 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
6406 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
6407 || GET_CODE (XEXP (value
, 1)) == CONST
))
6410 subtarget
= gen_reg_rtx (GET_MODE (value
));
6411 emit_move_insn (subtarget
, value
);
6415 if (ARITHMETIC_P (value
))
6417 op2
= XEXP (value
, 1);
6418 if (!CONSTANT_P (op2
) && !(REG_P (op2
) && op2
!= subtarget
))
6420 if (code
== MINUS
&& CONST_INT_P (op2
))
6423 op2
= negate_rtx (GET_MODE (value
), op2
);
6426 /* Check for an addition with OP2 a constant integer and our first
6427 operand a PLUS of a virtual register and something else. In that
6428 case, we want to emit the sum of the virtual register and the
6429 constant first and then add the other value. This allows virtual
6430 register instantiation to simply modify the constant rather than
6431 creating another one around this addition. */
6432 if (code
== PLUS
&& CONST_INT_P (op2
)
6433 && GET_CODE (XEXP (value
, 0)) == PLUS
6434 && REG_P (XEXP (XEXP (value
, 0), 0))
6435 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6436 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
6438 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
6439 XEXP (XEXP (value
, 0), 0), op2
,
6440 subtarget
, 0, OPTAB_LIB_WIDEN
);
6441 return expand_simple_binop (GET_MODE (value
), code
, temp
,
6442 force_operand (XEXP (XEXP (value
,
6444 target
, 0, OPTAB_LIB_WIDEN
);
6447 op1
= force_operand (XEXP (value
, 0), subtarget
);
6448 op2
= force_operand (op2
, NULL_RTX
);
6452 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
6454 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
6455 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
6456 target
, 1, OPTAB_LIB_WIDEN
);
6458 return expand_divmod (0,
6459 FLOAT_MODE_P (GET_MODE (value
))
6460 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
6461 GET_MODE (value
), op1
, op2
, target
, 0);
6463 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
6466 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
6469 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
6472 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
6473 target
, 0, OPTAB_LIB_WIDEN
);
6475 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
6476 target
, 1, OPTAB_LIB_WIDEN
);
6479 if (UNARY_P (value
))
6482 target
= gen_reg_rtx (GET_MODE (value
));
6483 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
6490 case FLOAT_TRUNCATE
:
6491 convert_move (target
, op1
, code
== ZERO_EXTEND
);
6496 expand_fix (target
, op1
, code
== UNSIGNED_FIX
);
6500 case UNSIGNED_FLOAT
:
6501 expand_float (target
, op1
, code
== UNSIGNED_FLOAT
);
6505 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
6509 #ifdef INSN_SCHEDULING
6510 /* On machines that have insn scheduling, we want all memory reference to be
6511 explicit, so we need to deal with such paradoxical SUBREGs. */
6512 if (GET_CODE (value
) == SUBREG
&& MEM_P (SUBREG_REG (value
))
6513 && (GET_MODE_SIZE (GET_MODE (value
))
6514 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
6516 = simplify_gen_subreg (GET_MODE (value
),
6517 force_reg (GET_MODE (SUBREG_REG (value
)),
6518 force_operand (SUBREG_REG (value
),
6520 GET_MODE (SUBREG_REG (value
)),
6521 SUBREG_BYTE (value
));
6527 /* Subroutine of expand_expr: return nonzero iff there is no way that
6528 EXP can reference X, which is being modified. TOP_P is nonzero if this
6529 call is going to be used to determine whether we need a temporary
6530 for EXP, as opposed to a recursive call to this function.
6532 It is always safe for this routine to return zero since it merely
6533 searches for optimization opportunities. */
6536 safe_from_p (const_rtx x
, tree exp
, int top_p
)
6542 /* If EXP has varying size, we MUST use a target since we currently
6543 have no way of allocating temporaries of variable size
6544 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6545 So we assume here that something at a higher level has prevented a
6546 clash. This is somewhat bogus, but the best we can do. Only
6547 do this when X is BLKmode and when we are at the top level. */
6548 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
6549 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
6550 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
6551 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
6552 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
6554 && GET_MODE (x
) == BLKmode
)
6555 /* If X is in the outgoing argument area, it is always safe. */
6557 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
6558 || (GET_CODE (XEXP (x
, 0)) == PLUS
6559 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
6562 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6563 find the underlying pseudo. */
6564 if (GET_CODE (x
) == SUBREG
)
6567 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6571 /* Now look at our tree code and possibly recurse. */
6572 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
6574 case tcc_declaration
:
6575 exp_rtl
= DECL_RTL_IF_SET (exp
);
6581 case tcc_exceptional
:
6582 if (TREE_CODE (exp
) == TREE_LIST
)
6586 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
6588 exp
= TREE_CHAIN (exp
);
6591 if (TREE_CODE (exp
) != TREE_LIST
)
6592 return safe_from_p (x
, exp
, 0);
6595 else if (TREE_CODE (exp
) == CONSTRUCTOR
)
6597 constructor_elt
*ce
;
6598 unsigned HOST_WIDE_INT idx
;
6600 FOR_EACH_VEC_ELT (constructor_elt
, CONSTRUCTOR_ELTS (exp
), idx
, ce
)
6601 if ((ce
->index
!= NULL_TREE
&& !safe_from_p (x
, ce
->index
, 0))
6602 || !safe_from_p (x
, ce
->value
, 0))
6606 else if (TREE_CODE (exp
) == ERROR_MARK
)
6607 return 1; /* An already-visited SAVE_EXPR? */
6612 /* The only case we look at here is the DECL_INITIAL inside a
6614 return (TREE_CODE (exp
) != DECL_EXPR
6615 || TREE_CODE (DECL_EXPR_DECL (exp
)) != VAR_DECL
6616 || !DECL_INITIAL (DECL_EXPR_DECL (exp
))
6617 || safe_from_p (x
, DECL_INITIAL (DECL_EXPR_DECL (exp
)), 0));
6620 case tcc_comparison
:
6621 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
6626 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6628 case tcc_expression
:
6631 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6632 the expression. If it is set, we conflict iff we are that rtx or
6633 both are in memory. Otherwise, we check all operands of the
6634 expression recursively. */
6636 switch (TREE_CODE (exp
))
6639 /* If the operand is static or we are static, we can't conflict.
6640 Likewise if we don't conflict with the operand at all. */
6641 if (staticp (TREE_OPERAND (exp
, 0))
6642 || TREE_STATIC (exp
)
6643 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
6646 /* Otherwise, the only way this can conflict is if we are taking
6647 the address of a DECL a that address if part of X, which is
6649 exp
= TREE_OPERAND (exp
, 0);
6652 if (!DECL_RTL_SET_P (exp
)
6653 || !MEM_P (DECL_RTL (exp
)))
6656 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
6662 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
6663 get_alias_set (exp
)))
6668 /* Assume that the call will clobber all hard registers and
6670 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6675 case WITH_CLEANUP_EXPR
:
6676 case CLEANUP_POINT_EXPR
:
6677 /* Lowered by gimplify.c. */
6681 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6687 /* If we have an rtx, we do not need to scan our operands. */
6691 nops
= TREE_OPERAND_LENGTH (exp
);
6692 for (i
= 0; i
< nops
; i
++)
6693 if (TREE_OPERAND (exp
, i
) != 0
6694 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
6700 /* Should never get a type here. */
6704 /* If we have an rtl, find any enclosed object. Then see if we conflict
6708 if (GET_CODE (exp_rtl
) == SUBREG
)
6710 exp_rtl
= SUBREG_REG (exp_rtl
);
6712 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
6716 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6717 are memory and they conflict. */
6718 return ! (rtx_equal_p (x
, exp_rtl
)
6719 || (MEM_P (x
) && MEM_P (exp_rtl
)
6720 && true_dependence (exp_rtl
, VOIDmode
, x
,
6721 rtx_addr_varies_p
)));
6724 /* If we reach here, it is safe. */
6729 /* Return the highest power of two that EXP is known to be a multiple of.
6730 This is used in updating alignment of MEMs in array references. */
6732 unsigned HOST_WIDE_INT
6733 highest_pow2_factor (const_tree exp
)
6735 unsigned HOST_WIDE_INT c0
, c1
;
6737 switch (TREE_CODE (exp
))
6740 /* We can find the lowest bit that's a one. If the low
6741 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6742 We need to handle this case since we can find it in a COND_EXPR,
6743 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6744 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6746 if (TREE_OVERFLOW (exp
))
6747 return BIGGEST_ALIGNMENT
;
6750 /* Note: tree_low_cst is intentionally not used here,
6751 we don't care about the upper bits. */
6752 c0
= TREE_INT_CST_LOW (exp
);
6754 return c0
? c0
: BIGGEST_ALIGNMENT
;
6758 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
6759 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6760 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6761 return MIN (c0
, c1
);
6764 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6765 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6768 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6770 if (integer_pow2p (TREE_OPERAND (exp
, 1))
6771 && host_integerp (TREE_OPERAND (exp
, 1), 1))
6773 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6774 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
6775 return MAX (1, c0
/ c1
);
6780 /* The highest power of two of a bit-and expression is the maximum of
6781 that of its operands. We typically get here for a complex LHS and
6782 a constant negative power of two on the RHS to force an explicit
6783 alignment, so don't bother looking at the LHS. */
6784 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6788 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
6791 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6794 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6795 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
6796 return MIN (c0
, c1
);
6805 /* Similar, except that the alignment requirements of TARGET are
6806 taken into account. Assume it is at least as aligned as its
6807 type, unless it is a COMPONENT_REF in which case the layout of
6808 the structure gives the alignment. */
6810 static unsigned HOST_WIDE_INT
6811 highest_pow2_factor_for_target (const_tree target
, const_tree exp
)
6813 unsigned HOST_WIDE_INT talign
= target_align (target
) / BITS_PER_UNIT
;
6814 unsigned HOST_WIDE_INT factor
= highest_pow2_factor (exp
);
6816 return MAX (factor
, talign
);
6819 /* Subroutine of expand_expr. Expand the two operands of a binary
6820 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6821 The value may be stored in TARGET if TARGET is nonzero. The
6822 MODIFIER argument is as documented by expand_expr. */
6825 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
6826 enum expand_modifier modifier
)
6828 if (! safe_from_p (target
, exp1
, 1))
6830 if (operand_equal_p (exp0
, exp1
, 0))
6832 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6833 *op1
= copy_rtx (*op0
);
6837 /* If we need to preserve evaluation order, copy exp0 into its own
6838 temporary variable so that it can't be clobbered by exp1. */
6839 if (flag_evaluation_order
&& TREE_SIDE_EFFECTS (exp1
))
6840 exp0
= save_expr (exp0
);
6841 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6842 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
6847 /* Return a MEM that contains constant EXP. DEFER is as for
6848 output_constant_def and MODIFIER is as for expand_expr. */
6851 expand_expr_constant (tree exp
, int defer
, enum expand_modifier modifier
)
6855 mem
= output_constant_def (exp
, defer
);
6856 if (modifier
!= EXPAND_INITIALIZER
)
6857 mem
= use_anchored_address (mem
);
6861 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6862 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6865 expand_expr_addr_expr_1 (tree exp
, rtx target
, enum machine_mode tmode
,
6866 enum expand_modifier modifier
, addr_space_t as
)
6868 rtx result
, subtarget
;
6870 HOST_WIDE_INT bitsize
, bitpos
;
6871 int volatilep
, unsignedp
;
6872 enum machine_mode mode1
;
6874 /* If we are taking the address of a constant and are at the top level,
6875 we have to use output_constant_def since we can't call force_const_mem
6877 /* ??? This should be considered a front-end bug. We should not be
6878 generating ADDR_EXPR of something that isn't an LVALUE. The only
6879 exception here is STRING_CST. */
6880 if (CONSTANT_CLASS_P (exp
))
6881 return XEXP (expand_expr_constant (exp
, 0, modifier
), 0);
6883 /* Everything must be something allowed by is_gimple_addressable. */
6884 switch (TREE_CODE (exp
))
6887 /* This case will happen via recursion for &a->b. */
6888 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6892 tree tem
= TREE_OPERAND (exp
, 0);
6893 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
6894 tem
= build2 (POINTER_PLUS_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
6896 double_int_to_tree (sizetype
, mem_ref_offset (exp
)));
6897 return expand_expr (tem
, target
, tmode
, modifier
);
6901 /* Expand the initializer like constants above. */
6902 return XEXP (expand_expr_constant (DECL_INITIAL (exp
), 0, modifier
), 0);
6905 /* The real part of the complex number is always first, therefore
6906 the address is the same as the address of the parent object. */
6909 inner
= TREE_OPERAND (exp
, 0);
6913 /* The imaginary part of the complex number is always second.
6914 The expression is therefore always offset by the size of the
6917 bitpos
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp
)));
6918 inner
= TREE_OPERAND (exp
, 0);
6922 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6923 expand_expr, as that can have various side effects; LABEL_DECLs for
6924 example, may not have their DECL_RTL set yet. Expand the rtl of
6925 CONSTRUCTORs too, which should yield a memory reference for the
6926 constructor's contents. Assume language specific tree nodes can
6927 be expanded in some interesting way. */
6928 gcc_assert (TREE_CODE (exp
) < LAST_AND_UNUSED_TREE_CODE
);
6930 || TREE_CODE (exp
) == CONSTRUCTOR
6931 || TREE_CODE (exp
) == COMPOUND_LITERAL_EXPR
)
6933 result
= expand_expr (exp
, target
, tmode
,
6934 modifier
== EXPAND_INITIALIZER
6935 ? EXPAND_INITIALIZER
: EXPAND_CONST_ADDRESS
);
6937 /* If the DECL isn't in memory, then the DECL wasn't properly
6938 marked TREE_ADDRESSABLE, which will be either a front-end
6939 or a tree optimizer bug. */
6940 gcc_assert (MEM_P (result
));
6941 result
= XEXP (result
, 0);
6943 /* ??? Is this needed anymore? */
6944 if (DECL_P (exp
) && !TREE_USED (exp
) == 0)
6946 assemble_external (exp
);
6947 TREE_USED (exp
) = 1;
6950 if (modifier
!= EXPAND_INITIALIZER
6951 && modifier
!= EXPAND_CONST_ADDRESS
)
6952 result
= force_operand (result
, target
);
6956 /* Pass FALSE as the last argument to get_inner_reference although
6957 we are expanding to RTL. The rationale is that we know how to
6958 handle "aligning nodes" here: we can just bypass them because
6959 they won't change the final object whose address will be returned
6960 (they actually exist only for that purpose). */
6961 inner
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
6962 &mode1
, &unsignedp
, &volatilep
, false);
6966 /* We must have made progress. */
6967 gcc_assert (inner
!= exp
);
6969 subtarget
= offset
|| bitpos
? NULL_RTX
: target
;
6970 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
6971 inner alignment, force the inner to be sufficiently aligned. */
6972 if (CONSTANT_CLASS_P (inner
)
6973 && TYPE_ALIGN (TREE_TYPE (inner
)) < TYPE_ALIGN (TREE_TYPE (exp
)))
6975 inner
= copy_node (inner
);
6976 TREE_TYPE (inner
) = copy_node (TREE_TYPE (inner
));
6977 TYPE_ALIGN (TREE_TYPE (inner
)) = TYPE_ALIGN (TREE_TYPE (exp
));
6978 TYPE_USER_ALIGN (TREE_TYPE (inner
)) = 1;
6980 result
= expand_expr_addr_expr_1 (inner
, subtarget
, tmode
, modifier
, as
);
6986 if (modifier
!= EXPAND_NORMAL
)
6987 result
= force_operand (result
, NULL
);
6988 tmp
= expand_expr (offset
, NULL_RTX
, tmode
,
6989 modifier
== EXPAND_INITIALIZER
6990 ? EXPAND_INITIALIZER
: EXPAND_NORMAL
);
6992 result
= convert_memory_address_addr_space (tmode
, result
, as
);
6993 tmp
= convert_memory_address_addr_space (tmode
, tmp
, as
);
6995 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
6996 result
= gen_rtx_PLUS (tmode
, result
, tmp
);
6999 subtarget
= bitpos
? NULL_RTX
: target
;
7000 result
= expand_simple_binop (tmode
, PLUS
, result
, tmp
, subtarget
,
7001 1, OPTAB_LIB_WIDEN
);
7007 /* Someone beforehand should have rejected taking the address
7008 of such an object. */
7009 gcc_assert ((bitpos
% BITS_PER_UNIT
) == 0);
7011 result
= plus_constant (result
, bitpos
/ BITS_PER_UNIT
);
7012 if (modifier
< EXPAND_SUM
)
7013 result
= force_operand (result
, target
);
7019 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7020 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7023 expand_expr_addr_expr (tree exp
, rtx target
, enum machine_mode tmode
,
7024 enum expand_modifier modifier
)
7026 addr_space_t as
= ADDR_SPACE_GENERIC
;
7027 enum machine_mode address_mode
= Pmode
;
7028 enum machine_mode pointer_mode
= ptr_mode
;
7029 enum machine_mode rmode
;
7032 /* Target mode of VOIDmode says "whatever's natural". */
7033 if (tmode
== VOIDmode
)
7034 tmode
= TYPE_MODE (TREE_TYPE (exp
));
7036 if (POINTER_TYPE_P (TREE_TYPE (exp
)))
7038 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
7039 address_mode
= targetm
.addr_space
.address_mode (as
);
7040 pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
7043 /* We can get called with some Weird Things if the user does silliness
7044 like "(short) &a". In that case, convert_memory_address won't do
7045 the right thing, so ignore the given target mode. */
7046 if (tmode
!= address_mode
&& tmode
!= pointer_mode
)
7047 tmode
= address_mode
;
7049 result
= expand_expr_addr_expr_1 (TREE_OPERAND (exp
, 0), target
,
7050 tmode
, modifier
, as
);
7052 /* Despite expand_expr claims concerning ignoring TMODE when not
7053 strictly convenient, stuff breaks if we don't honor it. Note
7054 that combined with the above, we only do this for pointer modes. */
7055 rmode
= GET_MODE (result
);
7056 if (rmode
== VOIDmode
)
7059 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7064 /* Generate code for computing CONSTRUCTOR EXP.
7065 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7066 is TRUE, instead of creating a temporary variable in memory
7067 NULL is returned and the caller needs to handle it differently. */
7070 expand_constructor (tree exp
, rtx target
, enum expand_modifier modifier
,
7071 bool avoid_temp_mem
)
7073 tree type
= TREE_TYPE (exp
);
7074 enum machine_mode mode
= TYPE_MODE (type
);
7076 /* Try to avoid creating a temporary at all. This is possible
7077 if all of the initializer is zero.
7078 FIXME: try to handle all [0..255] initializers we can handle
7080 if (TREE_STATIC (exp
)
7081 && !TREE_ADDRESSABLE (exp
)
7082 && target
!= 0 && mode
== BLKmode
7083 && all_zeros_p (exp
))
7085 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
7089 /* All elts simple constants => refer to a constant in memory. But
7090 if this is a non-BLKmode mode, let it store a field at a time
7091 since that should make a CONST_INT or CONST_DOUBLE when we
7092 fold. Likewise, if we have a target we can use, it is best to
7093 store directly into the target unless the type is large enough
7094 that memcpy will be used. If we are making an initializer and
7095 all operands are constant, put it in memory as well.
7097 FIXME: Avoid trying to fill vector constructors piece-meal.
7098 Output them with output_constant_def below unless we're sure
7099 they're zeros. This should go away when vector initializers
7100 are treated like VECTOR_CST instead of arrays. */
7101 if ((TREE_STATIC (exp
)
7102 && ((mode
== BLKmode
7103 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
7104 || TREE_ADDRESSABLE (exp
)
7105 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
7106 && (! MOVE_BY_PIECES_P
7107 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
7109 && ! mostly_zeros_p (exp
))))
7110 || ((modifier
== EXPAND_INITIALIZER
|| modifier
== EXPAND_CONST_ADDRESS
)
7111 && TREE_CONSTANT (exp
)))
7118 constructor
= expand_expr_constant (exp
, 1, modifier
);
7120 if (modifier
!= EXPAND_CONST_ADDRESS
7121 && modifier
!= EXPAND_INITIALIZER
7122 && modifier
!= EXPAND_SUM
)
7123 constructor
= validize_mem (constructor
);
7128 /* Handle calls that pass values in multiple non-contiguous
7129 locations. The Irix 6 ABI has examples of this. */
7130 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
7131 || GET_CODE (target
) == PARALLEL
|| modifier
== EXPAND_STACK_PARM
)
7137 = assign_temp (build_qualified_type (type
, (TYPE_QUALS (type
)
7138 | (TREE_READONLY (exp
)
7139 * TYPE_QUAL_CONST
))),
7140 0, TREE_ADDRESSABLE (exp
), 1);
7143 store_constructor (exp
, target
, 0, int_expr_size (exp
));
7148 /* expand_expr: generate code for computing expression EXP.
7149 An rtx for the computed value is returned. The value is never null.
7150 In the case of a void EXP, const0_rtx is returned.
7152 The value may be stored in TARGET if TARGET is nonzero.
7153 TARGET is just a suggestion; callers must assume that
7154 the rtx returned may not be the same as TARGET.
7156 If TARGET is CONST0_RTX, it means that the value will be ignored.
7158 If TMODE is not VOIDmode, it suggests generating the
7159 result in mode TMODE. But this is done only when convenient.
7160 Otherwise, TMODE is ignored and the value generated in its natural mode.
7161 TMODE is just a suggestion; callers must assume that
7162 the rtx returned may not have mode TMODE.
7164 Note that TARGET may have neither TMODE nor MODE. In that case, it
7165 probably will not be used.
7167 If MODIFIER is EXPAND_SUM then when EXP is an addition
7168 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7169 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7170 products as above, or REG or MEM, or constant.
7171 Ordinarily in such cases we would output mul or add instructions
7172 and then return a pseudo reg containing the sum.
7174 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7175 it also marks a label as absolutely required (it can't be dead).
7176 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7177 This is used for outputting expressions used in initializers.
7179 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7180 with a constant address even if that address is not normally legitimate.
7181 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7183 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7184 a call parameter. Such targets require special care as we haven't yet
7185 marked TARGET so that it's safe from being trashed by libcalls. We
7186 don't want to use TARGET for anything but the final result;
7187 Intermediate values must go elsewhere. Additionally, calls to
7188 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7190 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7191 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7192 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7193 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7197 expand_expr_real (tree exp
, rtx target
, enum machine_mode tmode
,
7198 enum expand_modifier modifier
, rtx
*alt_rtl
)
7202 /* Handle ERROR_MARK before anybody tries to access its type. */
7203 if (TREE_CODE (exp
) == ERROR_MARK
7204 || (TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
))
7206 ret
= CONST0_RTX (tmode
);
7207 return ret
? ret
: const0_rtx
;
7210 /* If this is an expression of some kind and it has an associated line
7211 number, then emit the line number before expanding the expression.
7213 We need to save and restore the file and line information so that
7214 errors discovered during expansion are emitted with the right
7215 information. It would be better of the diagnostic routines
7216 used the file/line information embedded in the tree nodes rather
7218 if (cfun
&& EXPR_HAS_LOCATION (exp
))
7220 location_t saved_location
= input_location
;
7221 location_t saved_curr_loc
= get_curr_insn_source_location ();
7222 tree saved_block
= get_curr_insn_block ();
7223 input_location
= EXPR_LOCATION (exp
);
7224 set_curr_insn_source_location (input_location
);
7226 /* Record where the insns produced belong. */
7227 set_curr_insn_block (TREE_BLOCK (exp
));
7229 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
7231 input_location
= saved_location
;
7232 set_curr_insn_block (saved_block
);
7233 set_curr_insn_source_location (saved_curr_loc
);
7237 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
7244 expand_expr_real_2 (sepops ops
, rtx target
, enum machine_mode tmode
,
7245 enum expand_modifier modifier
)
7247 rtx op0
, op1
, op2
, temp
;
7250 enum machine_mode mode
;
7251 enum tree_code code
= ops
->code
;
7253 rtx subtarget
, original_target
;
7255 bool reduce_bit_field
;
7256 location_t loc
= ops
->location
;
7257 tree treeop0
, treeop1
;
7258 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7259 ? reduce_to_bit_field_precision ((expr), \
7265 mode
= TYPE_MODE (type
);
7266 unsignedp
= TYPE_UNSIGNED (type
);
7271 /* We should be called only on simple (binary or unary) expressions,
7272 exactly those that are valid in gimple expressions that aren't
7273 GIMPLE_SINGLE_RHS (or invalid). */
7274 gcc_assert (get_gimple_rhs_class (code
) == GIMPLE_UNARY_RHS
7275 || get_gimple_rhs_class (code
) == GIMPLE_BINARY_RHS
7276 || get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
);
7278 ignore
= (target
== const0_rtx
7279 || ((CONVERT_EXPR_CODE_P (code
)
7280 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
7281 && TREE_CODE (type
) == VOID_TYPE
));
7283 /* We should be called only if we need the result. */
7284 gcc_assert (!ignore
);
7286 /* An operation in what may be a bit-field type needs the
7287 result to be reduced to the precision of the bit-field type,
7288 which is narrower than that of the type's mode. */
7289 reduce_bit_field
= (TREE_CODE (type
) == INTEGER_TYPE
7290 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
));
7292 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
7295 /* Use subtarget as the target for operand 0 of a binary operation. */
7296 subtarget
= get_subtarget (target
);
7297 original_target
= target
;
7301 case NON_LVALUE_EXPR
:
7304 if (treeop0
== error_mark_node
)
7307 if (TREE_CODE (type
) == UNION_TYPE
)
7309 tree valtype
= TREE_TYPE (treeop0
);
7311 /* If both input and output are BLKmode, this conversion isn't doing
7312 anything except possibly changing memory attribute. */
7313 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7315 rtx result
= expand_expr (treeop0
, target
, tmode
,
7318 result
= copy_rtx (result
);
7319 set_mem_attributes (result
, type
, 0);
7325 if (TYPE_MODE (type
) != BLKmode
)
7326 target
= gen_reg_rtx (TYPE_MODE (type
));
7328 target
= assign_temp (type
, 0, 1, 1);
7332 /* Store data into beginning of memory target. */
7333 store_expr (treeop0
,
7334 adjust_address (target
, TYPE_MODE (valtype
), 0),
7335 modifier
== EXPAND_STACK_PARM
,
7340 gcc_assert (REG_P (target
));
7342 /* Store this field into a union of the proper type. */
7343 store_field (target
,
7344 MIN ((int_size_in_bytes (TREE_TYPE
7347 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7348 0, TYPE_MODE (valtype
), treeop0
,
7352 /* Return the entire union. */
7356 if (mode
== TYPE_MODE (TREE_TYPE (treeop0
)))
7358 op0
= expand_expr (treeop0
, target
, VOIDmode
,
7361 /* If the signedness of the conversion differs and OP0 is
7362 a promoted SUBREG, clear that indication since we now
7363 have to do the proper extension. */
7364 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)) != unsignedp
7365 && GET_CODE (op0
) == SUBREG
)
7366 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7368 return REDUCE_BIT_FIELD (op0
);
7371 op0
= expand_expr (treeop0
, NULL_RTX
, mode
,
7372 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
);
7373 if (GET_MODE (op0
) == mode
)
7376 /* If OP0 is a constant, just convert it into the proper mode. */
7377 else if (CONSTANT_P (op0
))
7379 tree inner_type
= TREE_TYPE (treeop0
);
7380 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
7382 if (modifier
== EXPAND_INITIALIZER
)
7383 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
,
7384 subreg_lowpart_offset (mode
,
7387 op0
= convert_modes (mode
, inner_mode
, op0
,
7388 TYPE_UNSIGNED (inner_type
));
7391 else if (modifier
== EXPAND_INITIALIZER
)
7392 op0
= gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7394 else if (target
== 0)
7395 op0
= convert_to_mode (mode
, op0
,
7396 TYPE_UNSIGNED (TREE_TYPE
7400 convert_move (target
, op0
,
7401 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
7405 return REDUCE_BIT_FIELD (op0
);
7407 case ADDR_SPACE_CONVERT_EXPR
:
7409 tree treeop0_type
= TREE_TYPE (treeop0
);
7411 addr_space_t as_from
;
7413 gcc_assert (POINTER_TYPE_P (type
));
7414 gcc_assert (POINTER_TYPE_P (treeop0_type
));
7416 as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type
));
7417 as_from
= TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type
));
7419 /* Conversions between pointers to the same address space should
7420 have been implemented via CONVERT_EXPR / NOP_EXPR. */
7421 gcc_assert (as_to
!= as_from
);
7423 /* Ask target code to handle conversion between pointers
7424 to overlapping address spaces. */
7425 if (targetm
.addr_space
.subset_p (as_to
, as_from
)
7426 || targetm
.addr_space
.subset_p (as_from
, as_to
))
7428 op0
= expand_expr (treeop0
, NULL_RTX
, VOIDmode
, modifier
);
7429 op0
= targetm
.addr_space
.convert (op0
, treeop0_type
, type
);
7434 /* For disjoint address spaces, converting anything but
7435 a null pointer invokes undefined behaviour. We simply
7436 always return a null pointer here. */
7437 return CONST0_RTX (mode
);
7440 case POINTER_PLUS_EXPR
:
7441 /* Even though the sizetype mode and the pointer's mode can be different
7442 expand is able to handle this correctly and get the correct result out
7443 of the PLUS_EXPR code. */
7444 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
7445 if sizetype precision is smaller than pointer precision. */
7446 if (TYPE_PRECISION (sizetype
) < TYPE_PRECISION (type
))
7447 treeop1
= fold_convert_loc (loc
, type
,
7448 fold_convert_loc (loc
, ssizetype
,
7451 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7452 something else, make sure we add the register to the constant and
7453 then to the other thing. This case can occur during strength
7454 reduction and doing it this way will produce better code if the
7455 frame pointer or argument pointer is eliminated.
7457 fold-const.c will ensure that the constant is always in the inner
7458 PLUS_EXPR, so the only case we need to do anything about is if
7459 sp, ap, or fp is our second argument, in which case we must swap
7460 the innermost first argument and our second argument. */
7462 if (TREE_CODE (treeop0
) == PLUS_EXPR
7463 && TREE_CODE (TREE_OPERAND (treeop0
, 1)) == INTEGER_CST
7464 && TREE_CODE (treeop1
) == VAR_DECL
7465 && (DECL_RTL (treeop1
) == frame_pointer_rtx
7466 || DECL_RTL (treeop1
) == stack_pointer_rtx
7467 || DECL_RTL (treeop1
) == arg_pointer_rtx
))
7471 treeop1
= TREE_OPERAND (treeop0
, 0);
7472 TREE_OPERAND (treeop0
, 0) = t
;
7475 /* If the result is to be ptr_mode and we are adding an integer to
7476 something, we might be forming a constant. So try to use
7477 plus_constant. If it produces a sum and we can't accept it,
7478 use force_operand. This allows P = &ARR[const] to generate
7479 efficient code on machines where a SYMBOL_REF is not a valid
7482 If this is an EXPAND_SUM call, always return the sum. */
7483 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7484 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
7486 if (modifier
== EXPAND_STACK_PARM
)
7488 if (TREE_CODE (treeop0
) == INTEGER_CST
7489 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7490 && TREE_CONSTANT (treeop1
))
7494 op1
= expand_expr (treeop1
, subtarget
, VOIDmode
,
7496 /* Use immed_double_const to ensure that the constant is
7497 truncated according to the mode of OP1, then sign extended
7498 to a HOST_WIDE_INT. Using the constant directly can result
7499 in non-canonical RTL in a 64x32 cross compile. */
7501 = immed_double_const (TREE_INT_CST_LOW (treeop0
),
7503 TYPE_MODE (TREE_TYPE (treeop1
)));
7504 op1
= plus_constant (op1
, INTVAL (constant_part
));
7505 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7506 op1
= force_operand (op1
, target
);
7507 return REDUCE_BIT_FIELD (op1
);
7510 else if (TREE_CODE (treeop1
) == INTEGER_CST
7511 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7512 && TREE_CONSTANT (treeop0
))
7516 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
7517 (modifier
== EXPAND_INITIALIZER
7518 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
7519 if (! CONSTANT_P (op0
))
7521 op1
= expand_expr (treeop1
, NULL_RTX
,
7522 VOIDmode
, modifier
);
7523 /* Return a PLUS if modifier says it's OK. */
7524 if (modifier
== EXPAND_SUM
7525 || modifier
== EXPAND_INITIALIZER
)
7526 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7529 /* Use immed_double_const to ensure that the constant is
7530 truncated according to the mode of OP1, then sign extended
7531 to a HOST_WIDE_INT. Using the constant directly can result
7532 in non-canonical RTL in a 64x32 cross compile. */
7534 = immed_double_const (TREE_INT_CST_LOW (treeop1
),
7536 TYPE_MODE (TREE_TYPE (treeop0
)));
7537 op0
= plus_constant (op0
, INTVAL (constant_part
));
7538 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7539 op0
= force_operand (op0
, target
);
7540 return REDUCE_BIT_FIELD (op0
);
7544 /* Use TER to expand pointer addition of a negated value
7545 as pointer subtraction. */
7546 if ((POINTER_TYPE_P (TREE_TYPE (treeop0
))
7547 || (TREE_CODE (TREE_TYPE (treeop0
)) == VECTOR_TYPE
7548 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0
)))))
7549 && TREE_CODE (treeop1
) == SSA_NAME
7550 && TYPE_MODE (TREE_TYPE (treeop0
))
7551 == TYPE_MODE (TREE_TYPE (treeop1
)))
7553 gimple def
= get_def_for_expr (treeop1
, NEGATE_EXPR
);
7556 treeop1
= gimple_assign_rhs1 (def
);
7562 /* No sense saving up arithmetic to be done
7563 if it's all in the wrong mode to form part of an address.
7564 And force_operand won't know whether to sign-extend or
7566 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7567 || mode
!= ptr_mode
)
7569 expand_operands (treeop0
, treeop1
,
7570 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
7571 if (op0
== const0_rtx
)
7573 if (op1
== const0_rtx
)
7578 expand_operands (treeop0
, treeop1
,
7579 subtarget
, &op0
, &op1
, modifier
);
7580 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
7584 /* For initializers, we are allowed to return a MINUS of two
7585 symbolic constants. Here we handle all cases when both operands
7587 /* Handle difference of two symbolic constants,
7588 for the sake of an initializer. */
7589 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7590 && really_constant_p (treeop0
)
7591 && really_constant_p (treeop1
))
7593 expand_operands (treeop0
, treeop1
,
7594 NULL_RTX
, &op0
, &op1
, modifier
);
7596 /* If the last operand is a CONST_INT, use plus_constant of
7597 the negated constant. Else make the MINUS. */
7598 if (CONST_INT_P (op1
))
7599 return REDUCE_BIT_FIELD (plus_constant (op0
, - INTVAL (op1
)));
7601 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode
, op0
, op1
));
7604 /* No sense saving up arithmetic to be done
7605 if it's all in the wrong mode to form part of an address.
7606 And force_operand won't know whether to sign-extend or
7608 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7609 || mode
!= ptr_mode
)
7612 expand_operands (treeop0
, treeop1
,
7613 subtarget
, &op0
, &op1
, modifier
);
7615 /* Convert A - const to A + (-const). */
7616 if (CONST_INT_P (op1
))
7618 op1
= negate_rtx (mode
, op1
);
7619 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
7624 case WIDEN_MULT_PLUS_EXPR
:
7625 case WIDEN_MULT_MINUS_EXPR
:
7626 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
7627 op2
= expand_normal (ops
->op2
);
7628 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
7632 case WIDEN_MULT_EXPR
:
7633 /* If first operand is constant, swap them.
7634 Thus the following special case checks need only
7635 check the second operand. */
7636 if (TREE_CODE (treeop0
) == INTEGER_CST
)
7643 /* First, check if we have a multiplication of one signed and one
7644 unsigned operand. */
7645 if (TREE_CODE (treeop1
) != INTEGER_CST
7646 && (TYPE_UNSIGNED (TREE_TYPE (treeop0
))
7647 != TYPE_UNSIGNED (TREE_TYPE (treeop1
))))
7649 enum machine_mode innermode
= TYPE_MODE (TREE_TYPE (treeop0
));
7650 this_optab
= usmul_widen_optab
;
7651 if (mode
== GET_MODE_2XWIDER_MODE (innermode
))
7653 if (optab_handler (this_optab
, mode
) != CODE_FOR_nothing
)
7655 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
7656 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
,
7659 expand_operands (treeop0
, treeop1
, subtarget
, &op1
, &op0
,
7665 /* Check for a multiplication with matching signedness. */
7666 else if ((TREE_CODE (treeop1
) == INTEGER_CST
7667 && int_fits_type_p (treeop1
, TREE_TYPE (treeop0
)))
7668 || (TYPE_UNSIGNED (TREE_TYPE (treeop1
))
7669 == TYPE_UNSIGNED (TREE_TYPE (treeop0
))))
7671 tree op0type
= TREE_TYPE (treeop0
);
7672 enum machine_mode innermode
= TYPE_MODE (op0type
);
7673 bool zextend_p
= TYPE_UNSIGNED (op0type
);
7674 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
7675 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
7677 if (mode
== GET_MODE_2XWIDER_MODE (innermode
))
7679 if (optab_handler (this_optab
, mode
) != CODE_FOR_nothing
)
7681 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
7683 temp
= expand_widening_mult (mode
, op0
, op1
, target
,
7684 unsignedp
, this_optab
);
7685 return REDUCE_BIT_FIELD (temp
);
7687 if (optab_handler (other_optab
, mode
) != CODE_FOR_nothing
7688 && innermode
== word_mode
)
7691 op0
= expand_normal (treeop0
);
7692 if (TREE_CODE (treeop1
) == INTEGER_CST
)
7693 op1
= convert_modes (innermode
, mode
,
7694 expand_normal (treeop1
), unsignedp
);
7696 op1
= expand_normal (treeop1
);
7697 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
7698 unsignedp
, OPTAB_LIB_WIDEN
);
7699 hipart
= gen_highpart (innermode
, temp
);
7700 htem
= expand_mult_highpart_adjust (innermode
, hipart
,
7704 emit_move_insn (hipart
, htem
);
7705 return REDUCE_BIT_FIELD (temp
);
7709 treeop0
= fold_build1 (CONVERT_EXPR
, type
, treeop0
);
7710 treeop1
= fold_build1 (CONVERT_EXPR
, type
, treeop1
);
7711 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
7712 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
7715 /* If this is a fixed-point operation, then we cannot use the code
7716 below because "expand_mult" doesn't support sat/no-sat fixed-point
7718 if (ALL_FIXED_POINT_MODE_P (mode
))
7721 /* If first operand is constant, swap them.
7722 Thus the following special case checks need only
7723 check the second operand. */
7724 if (TREE_CODE (treeop0
) == INTEGER_CST
)
7731 /* Attempt to return something suitable for generating an
7732 indexed address, for machines that support that. */
7734 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
7735 && host_integerp (treeop1
, 0))
7737 tree exp1
= treeop1
;
7739 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
7743 op0
= force_operand (op0
, NULL_RTX
);
7745 op0
= copy_to_mode_reg (mode
, op0
);
7747 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode
, op0
,
7748 gen_int_mode (tree_low_cst (exp1
, 0),
7749 TYPE_MODE (TREE_TYPE (exp1
)))));
7752 if (modifier
== EXPAND_STACK_PARM
)
7755 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
7756 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
7758 case TRUNC_DIV_EXPR
:
7759 case FLOOR_DIV_EXPR
:
7761 case ROUND_DIV_EXPR
:
7762 case EXACT_DIV_EXPR
:
7763 /* If this is a fixed-point operation, then we cannot use the code
7764 below because "expand_divmod" doesn't support sat/no-sat fixed-point
7766 if (ALL_FIXED_POINT_MODE_P (mode
))
7769 if (modifier
== EXPAND_STACK_PARM
)
7771 /* Possible optimization: compute the dividend with EXPAND_SUM
7772 then if the divisor is constant can optimize the case
7773 where some terms of the dividend have coeffs divisible by it. */
7774 expand_operands (treeop0
, treeop1
,
7775 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
7776 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
7781 case TRUNC_MOD_EXPR
:
7782 case FLOOR_MOD_EXPR
:
7784 case ROUND_MOD_EXPR
:
7785 if (modifier
== EXPAND_STACK_PARM
)
7787 expand_operands (treeop0
, treeop1
,
7788 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
7789 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
7791 case FIXED_CONVERT_EXPR
:
7792 op0
= expand_normal (treeop0
);
7793 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
7794 target
= gen_reg_rtx (mode
);
7796 if ((TREE_CODE (TREE_TYPE (treeop0
)) == INTEGER_TYPE
7797 && TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
7798 || (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_UNSIGNED (type
)))
7799 expand_fixed_convert (target
, op0
, 1, TYPE_SATURATING (type
));
7801 expand_fixed_convert (target
, op0
, 0, TYPE_SATURATING (type
));
7804 case FIX_TRUNC_EXPR
:
7805 op0
= expand_normal (treeop0
);
7806 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
7807 target
= gen_reg_rtx (mode
);
7808 expand_fix (target
, op0
, unsignedp
);
7812 op0
= expand_normal (treeop0
);
7813 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
7814 target
= gen_reg_rtx (mode
);
7815 /* expand_float can't figure out what to do if FROM has VOIDmode.
7816 So give it the correct mode. With -O, cse will optimize this. */
7817 if (GET_MODE (op0
) == VOIDmode
)
7818 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0
)),
7820 expand_float (target
, op0
,
7821 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
7825 op0
= expand_expr (treeop0
, subtarget
,
7826 VOIDmode
, EXPAND_NORMAL
);
7827 if (modifier
== EXPAND_STACK_PARM
)
7829 temp
= expand_unop (mode
,
7830 optab_for_tree_code (NEGATE_EXPR
, type
,
7834 return REDUCE_BIT_FIELD (temp
);
7837 op0
= expand_expr (treeop0
, subtarget
,
7838 VOIDmode
, EXPAND_NORMAL
);
7839 if (modifier
== EXPAND_STACK_PARM
)
7842 /* ABS_EXPR is not valid for complex arguments. */
7843 gcc_assert (GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7844 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
);
7846 /* Unsigned abs is simply the operand. Testing here means we don't
7847 risk generating incorrect code below. */
7848 if (TYPE_UNSIGNED (type
))
7851 return expand_abs (mode
, op0
, target
, unsignedp
,
7852 safe_from_p (target
, treeop0
, 1));
7856 target
= original_target
;
7858 || modifier
== EXPAND_STACK_PARM
7859 || (MEM_P (target
) && MEM_VOLATILE_P (target
))
7860 || GET_MODE (target
) != mode
7862 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
7863 target
= gen_reg_rtx (mode
);
7864 expand_operands (treeop0
, treeop1
,
7865 target
, &op0
, &op1
, EXPAND_NORMAL
);
7867 /* First try to do it with a special MIN or MAX instruction.
7868 If that does not win, use a conditional jump to select the proper
7870 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
7871 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
7876 /* At this point, a MEM target is no longer useful; we will get better
7879 if (! REG_P (target
))
7880 target
= gen_reg_rtx (mode
);
7882 /* If op1 was placed in target, swap op0 and op1. */
7883 if (target
!= op0
&& target
== op1
)
7890 /* We generate better code and avoid problems with op1 mentioning
7891 target by forcing op1 into a pseudo if it isn't a constant. */
7892 if (! CONSTANT_P (op1
))
7893 op1
= force_reg (mode
, op1
);
7896 enum rtx_code comparison_code
;
7899 if (code
== MAX_EXPR
)
7900 comparison_code
= unsignedp
? GEU
: GE
;
7902 comparison_code
= unsignedp
? LEU
: LE
;
7904 /* Canonicalize to comparisons against 0. */
7905 if (op1
== const1_rtx
)
7907 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
7908 or (a != 0 ? a : 1) for unsigned.
7909 For MIN we are safe converting (a <= 1 ? a : 1)
7910 into (a <= 0 ? a : 1) */
7911 cmpop1
= const0_rtx
;
7912 if (code
== MAX_EXPR
)
7913 comparison_code
= unsignedp
? NE
: GT
;
7915 if (op1
== constm1_rtx
&& !unsignedp
)
7917 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
7918 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
7919 cmpop1
= const0_rtx
;
7920 if (code
== MIN_EXPR
)
7921 comparison_code
= LT
;
7923 #ifdef HAVE_conditional_move
7924 /* Use a conditional move if possible. */
7925 if (can_conditionally_move_p (mode
))
7929 /* ??? Same problem as in expmed.c: emit_conditional_move
7930 forces a stack adjustment via compare_from_rtx, and we
7931 lose the stack adjustment if the sequence we are about
7932 to create is discarded. */
7933 do_pending_stack_adjust ();
7937 /* Try to emit the conditional move. */
7938 insn
= emit_conditional_move (target
, comparison_code
,
7943 /* If we could do the conditional move, emit the sequence,
7947 rtx seq
= get_insns ();
7953 /* Otherwise discard the sequence and fall back to code with
7959 emit_move_insn (target
, op0
);
7961 temp
= gen_label_rtx ();
7962 do_compare_rtx_and_jump (target
, cmpop1
, comparison_code
,
7963 unsignedp
, mode
, NULL_RTX
, NULL_RTX
, temp
,
7966 emit_move_insn (target
, op1
);
7971 op0
= expand_expr (treeop0
, subtarget
,
7972 VOIDmode
, EXPAND_NORMAL
);
7973 if (modifier
== EXPAND_STACK_PARM
)
7975 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
7979 /* ??? Can optimize bitwise operations with one arg constant.
7980 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7981 and (a bitwise1 b) bitwise2 b (etc)
7982 but that is probably not worth while. */
7984 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7985 boolean values when we want in all cases to compute both of them. In
7986 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7987 as actual zero-or-1 values and then bitwise anding. In cases where
7988 there cannot be any side effects, better code would be made by
7989 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7990 how to recognize those cases. */
7992 case TRUTH_AND_EXPR
:
7993 code
= BIT_AND_EXPR
;
7998 code
= BIT_IOR_EXPR
;
8002 case TRUTH_XOR_EXPR
:
8003 code
= BIT_XOR_EXPR
;
8009 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type
))
8010 || (GET_MODE_PRECISION (TYPE_MODE (type
))
8011 == TYPE_PRECISION (type
)));
8016 /* If this is a fixed-point operation, then we cannot use the code
8017 below because "expand_shift" doesn't support sat/no-sat fixed-point
8019 if (ALL_FIXED_POINT_MODE_P (mode
))
8022 if (! safe_from_p (subtarget
, treeop1
, 1))
8024 if (modifier
== EXPAND_STACK_PARM
)
8026 op0
= expand_expr (treeop0
, subtarget
,
8027 VOIDmode
, EXPAND_NORMAL
);
8028 temp
= expand_shift (code
, mode
, op0
, treeop1
, target
,
8030 if (code
== LSHIFT_EXPR
)
8031 temp
= REDUCE_BIT_FIELD (temp
);
8034 /* Could determine the answer when only additive constants differ. Also,
8035 the addition of one can be handled by changing the condition. */
8042 case UNORDERED_EXPR
:
8050 temp
= do_store_flag (ops
,
8051 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
8052 tmode
!= VOIDmode
? tmode
: mode
);
8056 /* Use a compare and a jump for BLKmode comparisons, or for function
8057 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8060 || modifier
== EXPAND_STACK_PARM
8061 || ! safe_from_p (target
, treeop0
, 1)
8062 || ! safe_from_p (target
, treeop1
, 1)
8063 /* Make sure we don't have a hard reg (such as function's return
8064 value) live across basic blocks, if not optimizing. */
8065 || (!optimize
&& REG_P (target
)
8066 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8067 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8069 emit_move_insn (target
, const0_rtx
);
8071 op1
= gen_label_rtx ();
8072 jumpifnot_1 (code
, treeop0
, treeop1
, op1
, -1);
8074 emit_move_insn (target
, const1_rtx
);
8079 case TRUTH_NOT_EXPR
:
8080 if (modifier
== EXPAND_STACK_PARM
)
8082 op0
= expand_expr (treeop0
, target
,
8083 VOIDmode
, EXPAND_NORMAL
);
8084 /* The parser is careful to generate TRUTH_NOT_EXPR
8085 only with operands that are always zero or one. */
8086 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8087 target
, 1, OPTAB_LIB_WIDEN
);
8092 /* Get the rtx code of the operands. */
8093 op0
= expand_normal (treeop0
);
8094 op1
= expand_normal (treeop1
);
8097 target
= gen_reg_rtx (TYPE_MODE (type
));
8099 /* Move the real (op0) and imaginary (op1) parts to their location. */
8100 write_complex_part (target
, op0
, false);
8101 write_complex_part (target
, op1
, true);
8105 case WIDEN_SUM_EXPR
:
8107 tree oprnd0
= treeop0
;
8108 tree oprnd1
= treeop1
;
8110 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8111 target
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, op1
,
8116 case REDUC_MAX_EXPR
:
8117 case REDUC_MIN_EXPR
:
8118 case REDUC_PLUS_EXPR
:
8120 op0
= expand_normal (treeop0
);
8121 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8122 temp
= expand_unop (mode
, this_optab
, op0
, target
, unsignedp
);
8127 case VEC_EXTRACT_EVEN_EXPR
:
8128 case VEC_EXTRACT_ODD_EXPR
:
8130 expand_operands (treeop0
, treeop1
,
8131 NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8132 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8133 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8139 case VEC_INTERLEAVE_HIGH_EXPR
:
8140 case VEC_INTERLEAVE_LOW_EXPR
:
8142 expand_operands (treeop0
, treeop1
,
8143 NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8144 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8145 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8151 case VEC_LSHIFT_EXPR
:
8152 case VEC_RSHIFT_EXPR
:
8154 target
= expand_vec_shift_expr (ops
, target
);
8158 case VEC_UNPACK_HI_EXPR
:
8159 case VEC_UNPACK_LO_EXPR
:
8161 op0
= expand_normal (treeop0
);
8162 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8163 temp
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, NULL_RTX
,
8169 case VEC_UNPACK_FLOAT_HI_EXPR
:
8170 case VEC_UNPACK_FLOAT_LO_EXPR
:
8172 op0
= expand_normal (treeop0
);
8173 /* The signedness is determined from input operand. */
8174 this_optab
= optab_for_tree_code (code
,
8175 TREE_TYPE (treeop0
),
8177 temp
= expand_widen_pattern_expr
8178 (ops
, op0
, NULL_RTX
, NULL_RTX
,
8179 target
, TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8185 case VEC_WIDEN_MULT_HI_EXPR
:
8186 case VEC_WIDEN_MULT_LO_EXPR
:
8188 tree oprnd0
= treeop0
;
8189 tree oprnd1
= treeop1
;
8191 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8192 target
= expand_widen_pattern_expr (ops
, op0
, op1
, NULL_RTX
,
8194 gcc_assert (target
);
8198 case VEC_PACK_TRUNC_EXPR
:
8199 case VEC_PACK_SAT_EXPR
:
8200 case VEC_PACK_FIX_TRUNC_EXPR
:
8201 mode
= TYPE_MODE (TREE_TYPE (treeop0
));
8208 /* Here to do an ordinary binary operator. */
8210 expand_operands (treeop0
, treeop1
,
8211 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8213 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8215 if (modifier
== EXPAND_STACK_PARM
)
8217 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
8218 unsignedp
, OPTAB_LIB_WIDEN
);
8220 return REDUCE_BIT_FIELD (temp
);
8222 #undef REDUCE_BIT_FIELD
8225 expand_expr_real_1 (tree exp
, rtx target
, enum machine_mode tmode
,
8226 enum expand_modifier modifier
, rtx
*alt_rtl
)
8228 rtx op0
, op1
, temp
, decl_rtl
;
8231 enum machine_mode mode
;
8232 enum tree_code code
= TREE_CODE (exp
);
8234 rtx subtarget
, original_target
;
8237 bool reduce_bit_field
;
8238 location_t loc
= EXPR_LOCATION (exp
);
8239 struct separate_ops ops
;
8240 tree treeop0
, treeop1
, treeop2
;
8241 tree ssa_name
= NULL_TREE
;
8244 type
= TREE_TYPE (exp
);
8245 mode
= TYPE_MODE (type
);
8246 unsignedp
= TYPE_UNSIGNED (type
);
8248 treeop0
= treeop1
= treeop2
= NULL_TREE
;
8249 if (!VL_EXP_CLASS_P (exp
))
8250 switch (TREE_CODE_LENGTH (code
))
8253 case 3: treeop2
= TREE_OPERAND (exp
, 2);
8254 case 2: treeop1
= TREE_OPERAND (exp
, 1);
8255 case 1: treeop0
= TREE_OPERAND (exp
, 0);
8265 ignore
= (target
== const0_rtx
8266 || ((CONVERT_EXPR_CODE_P (code
)
8267 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
8268 && TREE_CODE (type
) == VOID_TYPE
));
8270 /* An operation in what may be a bit-field type needs the
8271 result to be reduced to the precision of the bit-field type,
8272 which is narrower than that of the type's mode. */
8273 reduce_bit_field
= (!ignore
8274 && TREE_CODE (type
) == INTEGER_TYPE
8275 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
));
8277 /* If we are going to ignore this result, we need only do something
8278 if there is a side-effect somewhere in the expression. If there
8279 is, short-circuit the most common cases here. Note that we must
8280 not call expand_expr with anything but const0_rtx in case this
8281 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
8285 if (! TREE_SIDE_EFFECTS (exp
))
8288 /* Ensure we reference a volatile object even if value is ignored, but
8289 don't do this if all we are doing is taking its address. */
8290 if (TREE_THIS_VOLATILE (exp
)
8291 && TREE_CODE (exp
) != FUNCTION_DECL
8292 && mode
!= VOIDmode
&& mode
!= BLKmode
8293 && modifier
!= EXPAND_CONST_ADDRESS
)
8295 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
8297 temp
= copy_to_reg (temp
);
8301 if (TREE_CODE_CLASS (code
) == tcc_unary
8302 || code
== COMPONENT_REF
|| code
== INDIRECT_REF
)
8303 return expand_expr (treeop0
, const0_rtx
, VOIDmode
,
8306 else if (TREE_CODE_CLASS (code
) == tcc_binary
8307 || TREE_CODE_CLASS (code
) == tcc_comparison
8308 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
8310 expand_expr (treeop0
, const0_rtx
, VOIDmode
, modifier
);
8311 expand_expr (treeop1
, const0_rtx
, VOIDmode
, modifier
);
8314 else if (code
== BIT_FIELD_REF
)
8316 expand_expr (treeop0
, const0_rtx
, VOIDmode
, modifier
);
8317 expand_expr (treeop1
, const0_rtx
, VOIDmode
, modifier
);
8318 expand_expr (treeop2
, const0_rtx
, VOIDmode
, modifier
);
8325 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
8328 /* Use subtarget as the target for operand 0 of a binary operation. */
8329 subtarget
= get_subtarget (target
);
8330 original_target
= target
;
8336 tree function
= decl_function_context (exp
);
8338 temp
= label_rtx (exp
);
8339 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
8341 if (function
!= current_function_decl
8343 LABEL_REF_NONLOCAL_P (temp
) = 1;
8345 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
8350 /* ??? ivopts calls expander, without any preparation from
8351 out-of-ssa. So fake instructions as if this was an access to the
8352 base variable. This unnecessarily allocates a pseudo, see how we can
8353 reuse it, if partition base vars have it set already. */
8354 if (!currently_expanding_to_rtl
)
8355 return expand_expr_real_1 (SSA_NAME_VAR (exp
), target
, tmode
, modifier
,
8358 g
= get_gimple_for_ssa_name (exp
);
8360 return expand_expr_real (gimple_assign_rhs_to_tree (g
), target
, tmode
,
8364 decl_rtl
= get_rtx_for_ssa_name (ssa_name
);
8365 exp
= SSA_NAME_VAR (ssa_name
);
8366 goto expand_decl_rtl
;
8370 /* If a static var's type was incomplete when the decl was written,
8371 but the type is complete now, lay out the decl now. */
8372 if (DECL_SIZE (exp
) == 0
8373 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
8374 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
8375 layout_decl (exp
, 0);
8377 /* ... fall through ... */
8381 decl_rtl
= DECL_RTL (exp
);
8383 gcc_assert (decl_rtl
);
8384 decl_rtl
= copy_rtx (decl_rtl
);
8385 /* Record writes to register variables. */
8386 if (modifier
== EXPAND_WRITE
&& REG_P (decl_rtl
)
8387 && REGNO (decl_rtl
) < FIRST_PSEUDO_REGISTER
)
8389 int i
= REGNO (decl_rtl
);
8390 int nregs
= hard_regno_nregs
[i
][GET_MODE (decl_rtl
)];
8393 SET_HARD_REG_BIT (crtl
->asm_clobbers
, i
);
8399 /* Ensure variable marked as used even if it doesn't go through
8400 a parser. If it hasn't be used yet, write out an external
8402 if (! TREE_USED (exp
))
8404 assemble_external (exp
);
8405 TREE_USED (exp
) = 1;
8408 /* Show we haven't gotten RTL for this yet. */
8411 /* Variables inherited from containing functions should have
8412 been lowered by this point. */
8413 context
= decl_function_context (exp
);
8414 gcc_assert (!context
8415 || context
== current_function_decl
8416 || TREE_STATIC (exp
)
8417 || DECL_EXTERNAL (exp
)
8418 /* ??? C++ creates functions that are not TREE_STATIC. */
8419 || TREE_CODE (exp
) == FUNCTION_DECL
);
8421 /* This is the case of an array whose size is to be determined
8422 from its initializer, while the initializer is still being parsed.
8425 if (MEM_P (decl_rtl
) && REG_P (XEXP (decl_rtl
, 0)))
8426 temp
= validize_mem (decl_rtl
);
8428 /* If DECL_RTL is memory, we are in the normal case and the
8429 address is not valid, get the address into a register. */
8431 else if (MEM_P (decl_rtl
) && modifier
!= EXPAND_INITIALIZER
)
8434 *alt_rtl
= decl_rtl
;
8435 decl_rtl
= use_anchored_address (decl_rtl
);
8436 if (modifier
!= EXPAND_CONST_ADDRESS
8437 && modifier
!= EXPAND_SUM
8438 && !memory_address_addr_space_p (DECL_MODE (exp
),
8440 MEM_ADDR_SPACE (decl_rtl
)))
8441 temp
= replace_equiv_address (decl_rtl
,
8442 copy_rtx (XEXP (decl_rtl
, 0)));
8445 /* If we got something, return it. But first, set the alignment
8446 if the address is a register. */
8449 if (MEM_P (temp
) && REG_P (XEXP (temp
, 0)))
8450 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
8455 /* If the mode of DECL_RTL does not match that of the decl, it
8456 must be a promoted value. We return a SUBREG of the wanted mode,
8457 but mark it so that we know that it was already extended. */
8458 if (REG_P (decl_rtl
) && GET_MODE (decl_rtl
) != DECL_MODE (exp
))
8460 enum machine_mode pmode
;
8462 /* Get the signedness to be used for this variable. Ensure we get
8463 the same mode we got when the variable was declared. */
8464 if (code
== SSA_NAME
8465 && (g
= SSA_NAME_DEF_STMT (ssa_name
))
8466 && gimple_code (g
) == GIMPLE_CALL
)
8467 pmode
= promote_function_mode (type
, mode
, &unsignedp
,
8469 (TREE_TYPE (gimple_call_fn (g
))),
8472 pmode
= promote_decl_mode (exp
, &unsignedp
);
8473 gcc_assert (GET_MODE (decl_rtl
) == pmode
);
8475 temp
= gen_lowpart_SUBREG (mode
, decl_rtl
);
8476 SUBREG_PROMOTED_VAR_P (temp
) = 1;
8477 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
8484 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
8485 TREE_INT_CST_HIGH (exp
), mode
);
8491 tree tmp
= NULL_TREE
;
8492 if (GET_MODE_CLASS (mode
) == MODE_VECTOR_INT
8493 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FLOAT
8494 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FRACT
8495 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UFRACT
8496 || GET_MODE_CLASS (mode
) == MODE_VECTOR_ACCUM
8497 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UACCUM
)
8498 return const_vector_from_tree (exp
);
8499 if (GET_MODE_CLASS (mode
) == MODE_INT
)
8501 tree type_for_mode
= lang_hooks
.types
.type_for_mode (mode
, 1);
8503 tmp
= fold_unary_loc (loc
, VIEW_CONVERT_EXPR
, type_for_mode
, exp
);
8506 tmp
= build_constructor_from_list (type
,
8507 TREE_VECTOR_CST_ELTS (exp
));
8508 return expand_expr (tmp
, ignore
? const0_rtx
: target
,
8513 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
8516 /* If optimized, generate immediate CONST_DOUBLE
8517 which will be turned into memory by reload if necessary.
8519 We used to force a register so that loop.c could see it. But
8520 this does not allow gen_* patterns to perform optimizations with
8521 the constants. It also produces two insns in cases like "x = 1.0;".
8522 On most machines, floating-point constants are not permitted in
8523 many insns, so we'd end up copying it to a register in any case.
8525 Now, we do the copying in expand_binop, if appropriate. */
8526 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
8527 TYPE_MODE (TREE_TYPE (exp
)));
8530 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp
),
8531 TYPE_MODE (TREE_TYPE (exp
)));
8534 /* Handle evaluating a complex constant in a CONCAT target. */
8535 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
8537 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8540 rtarg
= XEXP (original_target
, 0);
8541 itarg
= XEXP (original_target
, 1);
8543 /* Move the real and imaginary parts separately. */
8544 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, EXPAND_NORMAL
);
8545 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, EXPAND_NORMAL
);
8548 emit_move_insn (rtarg
, op0
);
8550 emit_move_insn (itarg
, op1
);
8552 return original_target
;
8555 /* ... fall through ... */
8558 temp
= expand_expr_constant (exp
, 1, modifier
);
8560 /* temp contains a constant address.
8561 On RISC machines where a constant address isn't valid,
8562 make some insns to get that address into a register. */
8563 if (modifier
!= EXPAND_CONST_ADDRESS
8564 && modifier
!= EXPAND_INITIALIZER
8565 && modifier
!= EXPAND_SUM
8566 && ! memory_address_addr_space_p (mode
, XEXP (temp
, 0),
8567 MEM_ADDR_SPACE (temp
)))
8568 return replace_equiv_address (temp
,
8569 copy_rtx (XEXP (temp
, 0)));
8575 rtx ret
= expand_expr_real_1 (val
, target
, tmode
, modifier
, alt_rtl
);
8577 if (!SAVE_EXPR_RESOLVED_P (exp
))
8579 /* We can indeed still hit this case, typically via builtin
8580 expanders calling save_expr immediately before expanding
8581 something. Assume this means that we only have to deal
8582 with non-BLKmode values. */
8583 gcc_assert (GET_MODE (ret
) != BLKmode
);
8585 val
= build_decl (EXPR_LOCATION (exp
),
8586 VAR_DECL
, NULL
, TREE_TYPE (exp
));
8587 DECL_ARTIFICIAL (val
) = 1;
8588 DECL_IGNORED_P (val
) = 1;
8590 TREE_OPERAND (exp
, 0) = treeop0
;
8591 SAVE_EXPR_RESOLVED_P (exp
) = 1;
8593 if (!CONSTANT_P (ret
))
8594 ret
= copy_to_reg (ret
);
8595 SET_DECL_RTL (val
, ret
);
8603 /* If we don't need the result, just ensure we evaluate any
8607 unsigned HOST_WIDE_INT idx
;
8610 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
8611 expand_expr (value
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
8616 return expand_constructor (exp
, target
, modifier
, false);
8618 case TARGET_MEM_REF
:
8620 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (exp
));
8621 struct mem_address addr
;
8624 get_address_description (exp
, &addr
);
8625 op0
= addr_for_mem_ref (&addr
, as
, true);
8626 op0
= memory_address_addr_space (mode
, op0
, as
);
8627 temp
= gen_rtx_MEM (mode
, op0
);
8628 set_mem_attributes (temp
, exp
, 0);
8629 set_mem_addr_space (temp
, as
);
8630 align
= MAX (TYPE_ALIGN (TREE_TYPE (exp
)),
8631 get_object_alignment (exp
, BIGGEST_ALIGNMENT
));
8633 && (unsigned) align
< GET_MODE_ALIGNMENT (mode
)
8634 /* If the target does not have special handling for unaligned
8635 loads of mode then it can use regular moves for them. */
8636 && ((icode
= optab_handler (movmisalign_optab
, mode
))
8637 != CODE_FOR_nothing
))
8641 /* We've already validated the memory, and we're creating a
8642 new pseudo destination. The predicates really can't fail. */
8643 reg
= gen_reg_rtx (mode
);
8645 /* Nor can the insn generator. */
8646 insn
= GEN_FCN (icode
) (reg
, temp
);
8647 gcc_assert (insn
!= NULL_RTX
);
8658 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
8659 enum machine_mode address_mode
;
8660 tree base
= TREE_OPERAND (exp
, 0);
8663 /* Handle expansion of non-aliased memory with non-BLKmode. That
8664 might end up in a register. */
8665 if (TREE_CODE (base
) == ADDR_EXPR
)
8667 HOST_WIDE_INT offset
= mem_ref_offset (exp
).low
;
8669 base
= TREE_OPERAND (base
, 0);
8673 base
= get_addr_base_and_unit_offset (base
, &off
);
8677 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
8678 decl we must use bitfield operations. */
8680 && !TREE_ADDRESSABLE (base
)
8681 && DECL_MODE (base
) != BLKmode
8682 && DECL_RTL_SET_P (base
)
8683 && !MEM_P (DECL_RTL (base
)))
8687 && host_integerp (TYPE_SIZE (TREE_TYPE (exp
)), 1)
8688 && (GET_MODE_BITSIZE (DECL_MODE (base
))
8689 == TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp
)))))
8690 return expand_expr (build1 (VIEW_CONVERT_EXPR
,
8691 TREE_TYPE (exp
), base
),
8692 target
, tmode
, modifier
);
8693 bit_offset
= bitsize_int (offset
* BITS_PER_UNIT
);
8694 bftype
= TREE_TYPE (base
);
8695 if (TYPE_MODE (TREE_TYPE (exp
)) != BLKmode
)
8696 bftype
= TREE_TYPE (exp
);
8697 return expand_expr (build3 (BIT_FIELD_REF
, bftype
,
8699 TYPE_SIZE (TREE_TYPE (exp
)),
8701 target
, tmode
, modifier
);
8704 address_mode
= targetm
.addr_space
.address_mode (as
);
8705 base
= TREE_OPERAND (exp
, 0);
8706 if ((def_stmt
= get_def_for_expr (base
, BIT_AND_EXPR
)))
8708 tree mask
= gimple_assign_rhs2 (def_stmt
);
8709 base
= build2 (BIT_AND_EXPR
, TREE_TYPE (base
),
8710 gimple_assign_rhs1 (def_stmt
), mask
);
8711 TREE_OPERAND (exp
, 0) = base
;
8713 align
= MAX (TYPE_ALIGN (TREE_TYPE (exp
)),
8714 get_object_alignment (exp
, BIGGEST_ALIGNMENT
));
8715 op0
= expand_expr (base
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
8716 op0
= convert_memory_address_addr_space (address_mode
, op0
, as
);
8717 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
8720 = immed_double_int_const (mem_ref_offset (exp
), address_mode
);
8721 op0
= simplify_gen_binary (PLUS
, address_mode
, op0
, off
);
8723 op0
= memory_address_addr_space (mode
, op0
, as
);
8724 temp
= gen_rtx_MEM (mode
, op0
);
8725 set_mem_attributes (temp
, exp
, 0);
8726 set_mem_addr_space (temp
, as
);
8727 if (TREE_THIS_VOLATILE (exp
))
8728 MEM_VOLATILE_P (temp
) = 1;
8730 && (unsigned) align
< GET_MODE_ALIGNMENT (mode
)
8731 /* If the target does not have special handling for unaligned
8732 loads of mode then it can use regular moves for them. */
8733 && ((icode
= optab_handler (movmisalign_optab
, mode
))
8734 != CODE_FOR_nothing
))
8738 /* We've already validated the memory, and we're creating a
8739 new pseudo destination. The predicates really can't fail. */
8740 reg
= gen_reg_rtx (mode
);
8742 /* Nor can the insn generator. */
8743 insn
= GEN_FCN (icode
) (reg
, temp
);
8754 tree array
= treeop0
;
8755 tree index
= treeop1
;
8757 /* Fold an expression like: "foo"[2].
8758 This is not done in fold so it won't happen inside &.
8759 Don't fold if this is for wide characters since it's too
8760 difficult to do correctly and this is a very rare case. */
8762 if (modifier
!= EXPAND_CONST_ADDRESS
8763 && modifier
!= EXPAND_INITIALIZER
8764 && modifier
!= EXPAND_MEMORY
)
8766 tree t
= fold_read_from_constant_string (exp
);
8769 return expand_expr (t
, target
, tmode
, modifier
);
8772 /* If this is a constant index into a constant array,
8773 just get the value from the array. Handle both the cases when
8774 we have an explicit constructor and when our operand is a variable
8775 that was declared const. */
8777 if (modifier
!= EXPAND_CONST_ADDRESS
8778 && modifier
!= EXPAND_INITIALIZER
8779 && modifier
!= EXPAND_MEMORY
8780 && TREE_CODE (array
) == CONSTRUCTOR
8781 && ! TREE_SIDE_EFFECTS (array
)
8782 && TREE_CODE (index
) == INTEGER_CST
)
8784 unsigned HOST_WIDE_INT ix
;
8787 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array
), ix
,
8789 if (tree_int_cst_equal (field
, index
))
8791 if (!TREE_SIDE_EFFECTS (value
))
8792 return expand_expr (fold (value
), target
, tmode
, modifier
);
8797 else if (optimize
>= 1
8798 && modifier
!= EXPAND_CONST_ADDRESS
8799 && modifier
!= EXPAND_INITIALIZER
8800 && modifier
!= EXPAND_MEMORY
8801 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
8802 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
8803 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
8804 && const_value_known_p (array
))
8806 if (TREE_CODE (index
) == INTEGER_CST
)
8808 tree init
= DECL_INITIAL (array
);
8810 if (TREE_CODE (init
) == CONSTRUCTOR
)
8812 unsigned HOST_WIDE_INT ix
;
8815 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), ix
,
8817 if (tree_int_cst_equal (field
, index
))
8819 if (TREE_SIDE_EFFECTS (value
))
8822 if (TREE_CODE (value
) == CONSTRUCTOR
)
8824 /* If VALUE is a CONSTRUCTOR, this
8825 optimization is only useful if
8826 this doesn't store the CONSTRUCTOR
8827 into memory. If it does, it is more
8828 efficient to just load the data from
8829 the array directly. */
8830 rtx ret
= expand_constructor (value
, target
,
8832 if (ret
== NULL_RTX
)
8836 return expand_expr (fold (value
), target
, tmode
,
8840 else if(TREE_CODE (init
) == STRING_CST
)
8842 tree index1
= index
;
8843 tree low_bound
= array_ref_low_bound (exp
);
8844 index1
= fold_convert_loc (loc
, sizetype
,
8847 /* Optimize the special-case of a zero lower bound.
8849 We convert the low_bound to sizetype to avoid some problems
8850 with constant folding. (E.g. suppose the lower bound is 1,
8851 and its mode is QI. Without the conversion,l (ARRAY
8852 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8853 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
8855 if (! integer_zerop (low_bound
))
8856 index1
= size_diffop_loc (loc
, index1
,
8857 fold_convert_loc (loc
, sizetype
,
8860 if (0 > compare_tree_int (index1
,
8861 TREE_STRING_LENGTH (init
)))
8863 tree type
= TREE_TYPE (TREE_TYPE (init
));
8864 enum machine_mode mode
= TYPE_MODE (type
);
8866 if (GET_MODE_CLASS (mode
) == MODE_INT
8867 && GET_MODE_SIZE (mode
) == 1)
8868 return gen_int_mode (TREE_STRING_POINTER (init
)
8869 [TREE_INT_CST_LOW (index1
)],
8876 goto normal_inner_ref
;
8879 /* If the operand is a CONSTRUCTOR, we can just extract the
8880 appropriate field if it is present. */
8881 if (TREE_CODE (treeop0
) == CONSTRUCTOR
)
8883 unsigned HOST_WIDE_INT idx
;
8886 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0
),
8888 if (field
== treeop1
8889 /* We can normally use the value of the field in the
8890 CONSTRUCTOR. However, if this is a bitfield in
8891 an integral mode that we can fit in a HOST_WIDE_INT,
8892 we must mask only the number of bits in the bitfield,
8893 since this is done implicitly by the constructor. If
8894 the bitfield does not meet either of those conditions,
8895 we can't do this optimization. */
8896 && (! DECL_BIT_FIELD (field
)
8897 || ((GET_MODE_CLASS (DECL_MODE (field
)) == MODE_INT
)
8898 && (GET_MODE_BITSIZE (DECL_MODE (field
))
8899 <= HOST_BITS_PER_WIDE_INT
))))
8901 if (DECL_BIT_FIELD (field
)
8902 && modifier
== EXPAND_STACK_PARM
)
8904 op0
= expand_expr (value
, target
, tmode
, modifier
);
8905 if (DECL_BIT_FIELD (field
))
8907 HOST_WIDE_INT bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
8908 enum machine_mode imode
= TYPE_MODE (TREE_TYPE (field
));
8910 if (TYPE_UNSIGNED (TREE_TYPE (field
)))
8912 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
8913 op0
= expand_and (imode
, op0
, op1
, target
);
8918 = build_int_cst (NULL_TREE
,
8919 GET_MODE_BITSIZE (imode
) - bitsize
);
8921 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
8923 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
8931 goto normal_inner_ref
;
8934 case ARRAY_RANGE_REF
:
8937 enum machine_mode mode1
, mode2
;
8938 HOST_WIDE_INT bitsize
, bitpos
;
8940 int volatilep
= 0, must_force_mem
;
8941 bool packedp
= false;
8942 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
8943 &mode1
, &unsignedp
, &volatilep
, true);
8944 rtx orig_op0
, memloc
;
8946 /* If we got back the original object, something is wrong. Perhaps
8947 we are evaluating an expression too early. In any event, don't
8948 infinitely recurse. */
8949 gcc_assert (tem
!= exp
);
8951 if (TYPE_PACKED (TREE_TYPE (TREE_OPERAND (exp
, 0)))
8952 || (TREE_CODE (TREE_OPERAND (exp
, 1)) == FIELD_DECL
8953 && DECL_PACKED (TREE_OPERAND (exp
, 1))))
8956 /* If TEM's type is a union of variable size, pass TARGET to the inner
8957 computation, since it will need a temporary and TARGET is known
8958 to have to do. This occurs in unchecked conversion in Ada. */
8961 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
8962 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
8964 && modifier
!= EXPAND_STACK_PARM
8965 ? target
: NULL_RTX
),
8967 (modifier
== EXPAND_INITIALIZER
8968 || modifier
== EXPAND_CONST_ADDRESS
8969 || modifier
== EXPAND_STACK_PARM
)
8970 ? modifier
: EXPAND_NORMAL
);
8973 /* If the bitfield is volatile, we want to access it in the
8974 field's mode, not the computed mode. */
8976 && GET_CODE (op0
) == MEM
8977 && flag_strict_volatile_bitfields
> 0)
8978 op0
= adjust_address (op0
, mode1
, 0);
8981 = CONSTANT_P (op0
) ? TYPE_MODE (TREE_TYPE (tem
)) : GET_MODE (op0
);
8983 /* If we have either an offset, a BLKmode result, or a reference
8984 outside the underlying object, we must force it to memory.
8985 Such a case can occur in Ada if we have unchecked conversion
8986 of an expression from a scalar type to an aggregate type or
8987 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
8988 passed a partially uninitialized object or a view-conversion
8989 to a larger size. */
8990 must_force_mem
= (offset
8992 || bitpos
+ bitsize
> GET_MODE_BITSIZE (mode2
));
8994 /* Handle CONCAT first. */
8995 if (GET_CODE (op0
) == CONCAT
&& !must_force_mem
)
8998 && bitsize
== GET_MODE_BITSIZE (GET_MODE (op0
)))
9001 && bitsize
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
9004 op0
= XEXP (op0
, 0);
9005 mode2
= GET_MODE (op0
);
9007 else if (bitpos
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
9008 && bitsize
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 1)))
9012 op0
= XEXP (op0
, 1);
9014 mode2
= GET_MODE (op0
);
9017 /* Otherwise force into memory. */
9021 /* If this is a constant, put it in a register if it is a legitimate
9022 constant and we don't need a memory reference. */
9023 if (CONSTANT_P (op0
)
9025 && LEGITIMATE_CONSTANT_P (op0
)
9027 op0
= force_reg (mode2
, op0
);
9029 /* Otherwise, if this is a constant, try to force it to the constant
9030 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
9031 is a legitimate constant. */
9032 else if (CONSTANT_P (op0
) && (memloc
= force_const_mem (mode2
, op0
)))
9033 op0
= validize_mem (memloc
);
9035 /* Otherwise, if this is a constant or the object is not in memory
9036 and need be, put it there. */
9037 else if (CONSTANT_P (op0
) || (!MEM_P (op0
) && must_force_mem
))
9039 tree nt
= build_qualified_type (TREE_TYPE (tem
),
9040 (TYPE_QUALS (TREE_TYPE (tem
))
9041 | TYPE_QUAL_CONST
));
9042 memloc
= assign_temp (nt
, 1, 1, 1);
9043 emit_move_insn (memloc
, op0
);
9049 enum machine_mode address_mode
;
9050 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
9053 gcc_assert (MEM_P (op0
));
9056 = targetm
.addr_space
.address_mode (MEM_ADDR_SPACE (op0
));
9057 if (GET_MODE (offset_rtx
) != address_mode
)
9058 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
9060 if (GET_MODE (op0
) == BLKmode
9061 /* A constant address in OP0 can have VOIDmode, we must
9062 not try to call force_reg in that case. */
9063 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
9065 && (bitpos
% bitsize
) == 0
9066 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
9067 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
9069 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
9073 op0
= offset_address (op0
, offset_rtx
,
9074 highest_pow2_factor (offset
));
9077 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
9078 record its alignment as BIGGEST_ALIGNMENT. */
9079 if (MEM_P (op0
) && bitpos
== 0 && offset
!= 0
9080 && is_aligning_offset (offset
, tem
))
9081 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
9083 /* Don't forget about volatility even if this is a bitfield. */
9084 if (MEM_P (op0
) && volatilep
&& ! MEM_VOLATILE_P (op0
))
9086 if (op0
== orig_op0
)
9087 op0
= copy_rtx (op0
);
9089 MEM_VOLATILE_P (op0
) = 1;
9092 /* In cases where an aligned union has an unaligned object
9093 as a field, we might be extracting a BLKmode value from
9094 an integer-mode (e.g., SImode) object. Handle this case
9095 by doing the extract into an object as wide as the field
9096 (which we know to be the width of a basic mode), then
9097 storing into memory, and changing the mode to BLKmode. */
9098 if (mode1
== VOIDmode
9099 || REG_P (op0
) || GET_CODE (op0
) == SUBREG
9100 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
9101 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
9102 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
9103 && modifier
!= EXPAND_CONST_ADDRESS
9104 && modifier
!= EXPAND_INITIALIZER
)
9105 /* If the field is volatile, we always want an aligned
9107 || (volatilep
&& flag_strict_volatile_bitfields
> 0)
9108 /* If the field isn't aligned enough to fetch as a memref,
9109 fetch it as a bit field. */
9110 || (mode1
!= BLKmode
9111 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
9112 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)
9114 && (MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
9115 || (bitpos
% GET_MODE_ALIGNMENT (mode1
) != 0))))
9116 && ((modifier
== EXPAND_CONST_ADDRESS
9117 || modifier
== EXPAND_INITIALIZER
)
9119 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
9120 || (bitpos
% BITS_PER_UNIT
!= 0)))
9121 /* If the type and the field are a constant size and the
9122 size of the type isn't the same size as the bitfield,
9123 we must use bitfield operations. */
9125 && TYPE_SIZE (TREE_TYPE (exp
))
9126 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
9127 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
9130 enum machine_mode ext_mode
= mode
;
9132 if (ext_mode
== BLKmode
9133 && ! (target
!= 0 && MEM_P (op0
)
9135 && bitpos
% BITS_PER_UNIT
== 0))
9136 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
9138 if (ext_mode
== BLKmode
)
9141 target
= assign_temp (type
, 0, 1, 1);
9146 /* In this case, BITPOS must start at a byte boundary and
9147 TARGET, if specified, must be a MEM. */
9148 gcc_assert (MEM_P (op0
)
9149 && (!target
|| MEM_P (target
))
9150 && !(bitpos
% BITS_PER_UNIT
));
9152 emit_block_move (target
,
9153 adjust_address (op0
, VOIDmode
,
9154 bitpos
/ BITS_PER_UNIT
),
9155 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
9157 (modifier
== EXPAND_STACK_PARM
9158 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
9163 op0
= validize_mem (op0
);
9165 if (MEM_P (op0
) && REG_P (XEXP (op0
, 0)))
9166 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
9168 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
, packedp
,
9169 (modifier
== EXPAND_STACK_PARM
9170 ? NULL_RTX
: target
),
9171 ext_mode
, ext_mode
);
9173 /* If the result is a record type and BITSIZE is narrower than
9174 the mode of OP0, an integral mode, and this is a big endian
9175 machine, we must put the field into the high-order bits. */
9176 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
9177 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
9178 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
9179 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
9180 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
9184 /* If the result type is BLKmode, store the data into a temporary
9185 of the appropriate type, but with the mode corresponding to the
9186 mode for the data we have (op0's mode). It's tempting to make
9187 this a constant type, since we know it's only being stored once,
9188 but that can cause problems if we are taking the address of this
9189 COMPONENT_REF because the MEM of any reference via that address
9190 will have flags corresponding to the type, which will not
9191 necessarily be constant. */
9192 if (mode
== BLKmode
)
9194 HOST_WIDE_INT size
= GET_MODE_BITSIZE (ext_mode
);
9197 /* If the reference doesn't use the alias set of its type,
9198 we cannot create the temporary using that type. */
9199 if (component_uses_parent_alias_set (exp
))
9201 new_rtx
= assign_stack_local (ext_mode
, size
, 0);
9202 set_mem_alias_set (new_rtx
, get_alias_set (exp
));
9205 new_rtx
= assign_stack_temp_for_type (ext_mode
, size
, 0, type
);
9207 emit_move_insn (new_rtx
, op0
);
9208 op0
= copy_rtx (new_rtx
);
9209 PUT_MODE (op0
, BLKmode
);
9210 set_mem_attributes (op0
, exp
, 1);
9216 /* If the result is BLKmode, use that to access the object
9218 if (mode
== BLKmode
)
9221 /* Get a reference to just this component. */
9222 if (modifier
== EXPAND_CONST_ADDRESS
9223 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
9224 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
9226 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
9228 if (op0
== orig_op0
)
9229 op0
= copy_rtx (op0
);
9231 set_mem_attributes (op0
, exp
, 0);
9232 if (REG_P (XEXP (op0
, 0)))
9233 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
9235 MEM_VOLATILE_P (op0
) |= volatilep
;
9236 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
9237 || modifier
== EXPAND_CONST_ADDRESS
9238 || modifier
== EXPAND_INITIALIZER
)
9240 else if (target
== 0)
9241 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
9243 convert_move (target
, op0
, unsignedp
);
9248 return expand_expr (OBJ_TYPE_REF_EXPR (exp
), target
, tmode
, modifier
);
9251 /* All valid uses of __builtin_va_arg_pack () are removed during
9253 if (CALL_EXPR_VA_ARG_PACK (exp
))
9254 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
9256 tree fndecl
= get_callee_fndecl (exp
), attr
;
9259 && (attr
= lookup_attribute ("error",
9260 DECL_ATTRIBUTES (fndecl
))) != NULL
)
9261 error ("%Kcall to %qs declared with attribute error: %s",
9262 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
9263 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
9265 && (attr
= lookup_attribute ("warning",
9266 DECL_ATTRIBUTES (fndecl
))) != NULL
)
9267 warning_at (tree_nonartificial_location (exp
),
9268 0, "%Kcall to %qs declared with attribute warning: %s",
9269 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
9270 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
9272 /* Check for a built-in function. */
9273 if (fndecl
&& DECL_BUILT_IN (fndecl
))
9275 gcc_assert (DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_FRONTEND
);
9276 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
9279 return expand_call (exp
, target
, ignore
);
9281 case VIEW_CONVERT_EXPR
:
9284 /* If we are converting to BLKmode, try to avoid an intermediate
9285 temporary by fetching an inner memory reference. */
9287 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
9288 && TYPE_MODE (TREE_TYPE (treeop0
)) != BLKmode
9289 && handled_component_p (treeop0
))
9291 enum machine_mode mode1
;
9292 HOST_WIDE_INT bitsize
, bitpos
;
9297 = get_inner_reference (treeop0
, &bitsize
, &bitpos
,
9298 &offset
, &mode1
, &unsignedp
, &volatilep
,
9302 /* ??? We should work harder and deal with non-zero offsets. */
9304 && (bitpos
% BITS_PER_UNIT
) == 0
9306 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) == 0)
9308 /* See the normal_inner_ref case for the rationale. */
9311 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
9312 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
9314 && modifier
!= EXPAND_STACK_PARM
9315 ? target
: NULL_RTX
),
9317 (modifier
== EXPAND_INITIALIZER
9318 || modifier
== EXPAND_CONST_ADDRESS
9319 || modifier
== EXPAND_STACK_PARM
)
9320 ? modifier
: EXPAND_NORMAL
);
9322 if (MEM_P (orig_op0
))
9326 /* Get a reference to just this component. */
9327 if (modifier
== EXPAND_CONST_ADDRESS
9328 || modifier
== EXPAND_SUM
9329 || modifier
== EXPAND_INITIALIZER
)
9330 op0
= adjust_address_nv (op0
, mode
, bitpos
/ BITS_PER_UNIT
);
9332 op0
= adjust_address (op0
, mode
, bitpos
/ BITS_PER_UNIT
);
9334 if (op0
== orig_op0
)
9335 op0
= copy_rtx (op0
);
9337 set_mem_attributes (op0
, treeop0
, 0);
9338 if (REG_P (XEXP (op0
, 0)))
9339 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
9341 MEM_VOLATILE_P (op0
) |= volatilep
;
9347 op0
= expand_expr (treeop0
,
9348 NULL_RTX
, VOIDmode
, modifier
);
9350 /* If the input and output modes are both the same, we are done. */
9351 if (mode
== GET_MODE (op0
))
9353 /* If neither mode is BLKmode, and both modes are the same size
9354 then we can use gen_lowpart. */
9355 else if (mode
!= BLKmode
&& GET_MODE (op0
) != BLKmode
9356 && GET_MODE_SIZE (mode
) == GET_MODE_SIZE (GET_MODE (op0
))
9357 && !COMPLEX_MODE_P (GET_MODE (op0
)))
9359 if (GET_CODE (op0
) == SUBREG
)
9360 op0
= force_reg (GET_MODE (op0
), op0
);
9361 temp
= gen_lowpart_common (mode
, op0
);
9366 if (!REG_P (op0
) && !MEM_P (op0
))
9367 op0
= force_reg (GET_MODE (op0
), op0
);
9368 op0
= gen_lowpart (mode
, op0
);
9371 /* If both types are integral, convert from one mode to the other. */
9372 else if (INTEGRAL_TYPE_P (type
) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0
)))
9373 op0
= convert_modes (mode
, GET_MODE (op0
), op0
,
9374 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
9375 /* As a last resort, spill op0 to memory, and reload it in a
9377 else if (!MEM_P (op0
))
9379 /* If the operand is not a MEM, force it into memory. Since we
9380 are going to be changing the mode of the MEM, don't call
9381 force_const_mem for constants because we don't allow pool
9382 constants to change mode. */
9383 tree inner_type
= TREE_TYPE (treeop0
);
9385 gcc_assert (!TREE_ADDRESSABLE (exp
));
9387 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
9389 = assign_stack_temp_for_type
9390 (TYPE_MODE (inner_type
),
9391 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
9393 emit_move_insn (target
, op0
);
9397 /* At this point, OP0 is in the correct mode. If the output type is
9398 such that the operand is known to be aligned, indicate that it is.
9399 Otherwise, we need only be concerned about alignment for non-BLKmode
9403 op0
= copy_rtx (op0
);
9405 if (TYPE_ALIGN_OK (type
))
9406 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
9407 else if (STRICT_ALIGNMENT
9409 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode
))
9411 tree inner_type
= TREE_TYPE (treeop0
);
9412 HOST_WIDE_INT temp_size
9413 = MAX (int_size_in_bytes (inner_type
),
9414 (HOST_WIDE_INT
) GET_MODE_SIZE (mode
));
9416 = assign_stack_temp_for_type (mode
, temp_size
, 0, type
);
9417 rtx new_with_op0_mode
9418 = adjust_address (new_rtx
, GET_MODE (op0
), 0);
9420 gcc_assert (!TREE_ADDRESSABLE (exp
));
9422 if (GET_MODE (op0
) == BLKmode
)
9423 emit_block_move (new_with_op0_mode
, op0
,
9424 GEN_INT (GET_MODE_SIZE (mode
)),
9425 (modifier
== EXPAND_STACK_PARM
9426 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
9428 emit_move_insn (new_with_op0_mode
, op0
);
9433 op0
= adjust_address (op0
, mode
, 0);
9438 /* Use a compare and a jump for BLKmode comparisons, or for function
9439 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
9441 /* Although TRUTH_{AND,OR}IF_EXPR aren't present in GIMPLE, they
9442 are occassionally created by folding during expansion. */
9443 case TRUTH_ANDIF_EXPR
:
9444 case TRUTH_ORIF_EXPR
:
9447 || modifier
== EXPAND_STACK_PARM
9448 || ! safe_from_p (target
, treeop0
, 1)
9449 || ! safe_from_p (target
, treeop1
, 1)
9450 /* Make sure we don't have a hard reg (such as function's return
9451 value) live across basic blocks, if not optimizing. */
9452 || (!optimize
&& REG_P (target
)
9453 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
9454 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
9457 emit_move_insn (target
, const0_rtx
);
9459 op1
= gen_label_rtx ();
9460 jumpifnot_1 (code
, treeop0
, treeop1
, op1
, -1);
9463 emit_move_insn (target
, const1_rtx
);
9466 return ignore
? const0_rtx
: target
;
9468 case STATEMENT_LIST
:
9470 tree_stmt_iterator iter
;
9472 gcc_assert (ignore
);
9474 for (iter
= tsi_start (exp
); !tsi_end_p (iter
); tsi_next (&iter
))
9475 expand_expr (tsi_stmt (iter
), const0_rtx
, VOIDmode
, modifier
);
9480 /* A COND_EXPR with its type being VOID_TYPE represents a
9481 conditional jump and is handled in
9482 expand_gimple_cond_expr. */
9483 gcc_assert (!VOID_TYPE_P (type
));
9485 /* Note that COND_EXPRs whose type is a structure or union
9486 are required to be constructed to contain assignments of
9487 a temporary variable, so that we can evaluate them here
9488 for side effect only. If type is void, we must do likewise. */
9490 gcc_assert (!TREE_ADDRESSABLE (type
)
9492 && TREE_TYPE (treeop1
) != void_type_node
9493 && TREE_TYPE (treeop2
) != void_type_node
);
9495 /* If we are not to produce a result, we have no target. Otherwise,
9496 if a target was specified use it; it will not be used as an
9497 intermediate target unless it is safe. If no target, use a
9500 if (modifier
!= EXPAND_STACK_PARM
9502 && safe_from_p (original_target
, treeop0
, 1)
9503 && GET_MODE (original_target
) == mode
9504 #ifdef HAVE_conditional_move
9505 && (! can_conditionally_move_p (mode
)
9506 || REG_P (original_target
))
9508 && !MEM_P (original_target
))
9509 temp
= original_target
;
9511 temp
= assign_temp (type
, 0, 0, 1);
9513 do_pending_stack_adjust ();
9515 op0
= gen_label_rtx ();
9516 op1
= gen_label_rtx ();
9517 jumpifnot (treeop0
, op0
, -1);
9518 store_expr (treeop1
, temp
,
9519 modifier
== EXPAND_STACK_PARM
,
9522 emit_jump_insn (gen_jump (op1
));
9525 store_expr (treeop2
, temp
,
9526 modifier
== EXPAND_STACK_PARM
,
9534 target
= expand_vec_cond_expr (type
, treeop0
, treeop1
, treeop2
, target
);
9541 gcc_assert (ignore
);
9543 /* Check for |= or &= of a bitfield of size one into another bitfield
9544 of size 1. In this case, (unless we need the result of the
9545 assignment) we can do this more efficiently with a
9546 test followed by an assignment, if necessary.
9548 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9549 things change so we do, this code should be enhanced to
9551 if (TREE_CODE (lhs
) == COMPONENT_REF
9552 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
9553 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
9554 && TREE_OPERAND (rhs
, 0) == lhs
9555 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
9556 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
9557 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
9559 rtx label
= gen_label_rtx ();
9560 int value
= TREE_CODE (rhs
) == BIT_IOR_EXPR
;
9561 do_jump (TREE_OPERAND (rhs
, 1),
9563 value
? 0 : label
, -1);
9564 expand_assignment (lhs
, build_int_cst (TREE_TYPE (rhs
), value
),
9565 MOVE_NONTEMPORAL (exp
));
9566 do_pending_stack_adjust ();
9571 expand_assignment (lhs
, rhs
, MOVE_NONTEMPORAL (exp
));
9576 return expand_expr_addr_expr (exp
, target
, tmode
, modifier
);
9579 op0
= expand_normal (treeop0
);
9580 return read_complex_part (op0
, false);
9583 op0
= expand_normal (treeop0
);
9584 return read_complex_part (op0
, true);
9591 /* Expanded in cfgexpand.c. */
9594 case TRY_CATCH_EXPR
:
9596 case EH_FILTER_EXPR
:
9597 case TRY_FINALLY_EXPR
:
9598 /* Lowered by tree-eh.c. */
9601 case WITH_CLEANUP_EXPR
:
9602 case CLEANUP_POINT_EXPR
:
9604 case CASE_LABEL_EXPR
:
9610 case PREINCREMENT_EXPR
:
9611 case PREDECREMENT_EXPR
:
9612 case POSTINCREMENT_EXPR
:
9613 case POSTDECREMENT_EXPR
:
9616 /* Lowered by gimplify.c. */
9620 /* Function descriptors are not valid except for as
9621 initialization constants, and should not be expanded. */
9624 case WITH_SIZE_EXPR
:
9625 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9626 have pulled out the size to use in whatever context it needed. */
9627 return expand_expr_real (treeop0
, original_target
, tmode
,
9630 case REALIGN_LOAD_EXPR
:
9632 tree oprnd0
= treeop0
;
9633 tree oprnd1
= treeop1
;
9634 tree oprnd2
= treeop2
;
9637 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9638 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9639 op2
= expand_normal (oprnd2
);
9640 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
9648 tree oprnd0
= treeop0
;
9649 tree oprnd1
= treeop1
;
9650 tree oprnd2
= treeop2
;
9653 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9654 op2
= expand_normal (oprnd2
);
9655 target
= expand_widen_pattern_expr (&ops
, op0
, op1
, op2
,
9660 case COMPOUND_LITERAL_EXPR
:
9662 /* Initialize the anonymous variable declared in the compound
9663 literal, then return the variable. */
9664 tree decl
= COMPOUND_LITERAL_EXPR_DECL (exp
);
9666 /* Create RTL for this variable. */
9667 if (!DECL_RTL_SET_P (decl
))
9669 if (DECL_HARD_REGISTER (decl
))
9670 /* The user specified an assembler name for this variable.
9672 rest_of_decl_compilation (decl
, 0, 0);
9677 return expand_expr_real (decl
, original_target
, tmode
,
9682 return expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
9686 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9687 signedness of TYPE), possibly returning the result in TARGET. */
9689 reduce_to_bit_field_precision (rtx exp
, rtx target
, tree type
)
9691 HOST_WIDE_INT prec
= TYPE_PRECISION (type
);
9692 if (target
&& GET_MODE (target
) != GET_MODE (exp
))
9694 /* For constant values, reduce using build_int_cst_type. */
9695 if (CONST_INT_P (exp
))
9697 HOST_WIDE_INT value
= INTVAL (exp
);
9698 tree t
= build_int_cst_type (type
, value
);
9699 return expand_expr (t
, target
, VOIDmode
, EXPAND_NORMAL
);
9701 else if (TYPE_UNSIGNED (type
))
9703 rtx mask
= immed_double_int_const (double_int_mask (prec
),
9705 return expand_and (GET_MODE (exp
), exp
, mask
, target
);
9709 tree count
= build_int_cst (NULL_TREE
,
9710 GET_MODE_BITSIZE (GET_MODE (exp
)) - prec
);
9711 exp
= expand_shift (LSHIFT_EXPR
, GET_MODE (exp
), exp
, count
, target
, 0);
9712 return expand_shift (RSHIFT_EXPR
, GET_MODE (exp
), exp
, count
, target
, 0);
9716 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9717 when applied to the address of EXP produces an address known to be
9718 aligned more than BIGGEST_ALIGNMENT. */
9721 is_aligning_offset (const_tree offset
, const_tree exp
)
9723 /* Strip off any conversions. */
9724 while (CONVERT_EXPR_P (offset
))
9725 offset
= TREE_OPERAND (offset
, 0);
9727 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9728 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9729 if (TREE_CODE (offset
) != BIT_AND_EXPR
9730 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
9731 || compare_tree_int (TREE_OPERAND (offset
, 1),
9732 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
9733 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
9736 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9737 It must be NEGATE_EXPR. Then strip any more conversions. */
9738 offset
= TREE_OPERAND (offset
, 0);
9739 while (CONVERT_EXPR_P (offset
))
9740 offset
= TREE_OPERAND (offset
, 0);
9742 if (TREE_CODE (offset
) != NEGATE_EXPR
)
9745 offset
= TREE_OPERAND (offset
, 0);
9746 while (CONVERT_EXPR_P (offset
))
9747 offset
= TREE_OPERAND (offset
, 0);
9749 /* This must now be the address of EXP. */
9750 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
9753 /* Return the tree node if an ARG corresponds to a string constant or zero
9754 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9755 in bytes within the string that ARG is accessing. The type of the
9756 offset will be `sizetype'. */
9759 string_constant (tree arg
, tree
*ptr_offset
)
9761 tree array
, offset
, lower_bound
;
9764 if (TREE_CODE (arg
) == ADDR_EXPR
)
9766 if (TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
9768 *ptr_offset
= size_zero_node
;
9769 return TREE_OPERAND (arg
, 0);
9771 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == VAR_DECL
)
9773 array
= TREE_OPERAND (arg
, 0);
9774 offset
= size_zero_node
;
9776 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
9778 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
9779 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
9780 if (TREE_CODE (array
) != STRING_CST
9781 && TREE_CODE (array
) != VAR_DECL
)
9784 /* Check if the array has a nonzero lower bound. */
9785 lower_bound
= array_ref_low_bound (TREE_OPERAND (arg
, 0));
9786 if (!integer_zerop (lower_bound
))
9788 /* If the offset and base aren't both constants, return 0. */
9789 if (TREE_CODE (lower_bound
) != INTEGER_CST
)
9791 if (TREE_CODE (offset
) != INTEGER_CST
)
9793 /* Adjust offset by the lower bound. */
9794 offset
= size_diffop (fold_convert (sizetype
, offset
),
9795 fold_convert (sizetype
, lower_bound
));
9801 else if (TREE_CODE (arg
) == PLUS_EXPR
|| TREE_CODE (arg
) == POINTER_PLUS_EXPR
)
9803 tree arg0
= TREE_OPERAND (arg
, 0);
9804 tree arg1
= TREE_OPERAND (arg
, 1);
9809 if (TREE_CODE (arg0
) == ADDR_EXPR
9810 && (TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
9811 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == VAR_DECL
))
9813 array
= TREE_OPERAND (arg0
, 0);
9816 else if (TREE_CODE (arg1
) == ADDR_EXPR
9817 && (TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
9818 || TREE_CODE (TREE_OPERAND (arg1
, 0)) == VAR_DECL
))
9820 array
= TREE_OPERAND (arg1
, 0);
9829 if (TREE_CODE (array
) == STRING_CST
)
9831 *ptr_offset
= fold_convert (sizetype
, offset
);
9834 else if (TREE_CODE (array
) == VAR_DECL
9835 || TREE_CODE (array
) == CONST_DECL
)
9839 /* Variables initialized to string literals can be handled too. */
9840 if (!const_value_known_p (array
)
9841 || !DECL_INITIAL (array
)
9842 || TREE_CODE (DECL_INITIAL (array
)) != STRING_CST
)
9845 /* Avoid const char foo[4] = "abcde"; */
9846 if (DECL_SIZE_UNIT (array
) == NULL_TREE
9847 || TREE_CODE (DECL_SIZE_UNIT (array
)) != INTEGER_CST
9848 || (length
= TREE_STRING_LENGTH (DECL_INITIAL (array
))) <= 0
9849 || compare_tree_int (DECL_SIZE_UNIT (array
), length
) < 0)
9852 /* If variable is bigger than the string literal, OFFSET must be constant
9853 and inside of the bounds of the string literal. */
9854 offset
= fold_convert (sizetype
, offset
);
9855 if (compare_tree_int (DECL_SIZE_UNIT (array
), length
) > 0
9856 && (! host_integerp (offset
, 1)
9857 || compare_tree_int (offset
, length
) >= 0))
9860 *ptr_offset
= offset
;
9861 return DECL_INITIAL (array
);
9867 /* Generate code to calculate OPS, and exploded expression
9868 using a store-flag instruction and return an rtx for the result.
9869 OPS reflects a comparison.
9871 If TARGET is nonzero, store the result there if convenient.
9873 Return zero if there is no suitable set-flag instruction
9874 available on this machine.
9876 Once expand_expr has been called on the arguments of the comparison,
9877 we are committed to doing the store flag, since it is not safe to
9878 re-evaluate the expression. We emit the store-flag insn by calling
9879 emit_store_flag, but only expand the arguments if we have a reason
9880 to believe that emit_store_flag will be successful. If we think that
9881 it will, but it isn't, we have to simulate the store-flag with a
9882 set/jump/set sequence. */
9885 do_store_flag (sepops ops
, rtx target
, enum machine_mode mode
)
9888 tree arg0
, arg1
, type
;
9890 enum machine_mode operand_mode
;
9893 rtx subtarget
= target
;
9894 location_t loc
= ops
->location
;
9899 /* Don't crash if the comparison was erroneous. */
9900 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
9903 type
= TREE_TYPE (arg0
);
9904 operand_mode
= TYPE_MODE (type
);
9905 unsignedp
= TYPE_UNSIGNED (type
);
9907 /* We won't bother with BLKmode store-flag operations because it would mean
9908 passing a lot of information to emit_store_flag. */
9909 if (operand_mode
== BLKmode
)
9912 /* We won't bother with store-flag operations involving function pointers
9913 when function pointers must be canonicalized before comparisons. */
9914 #ifdef HAVE_canonicalize_funcptr_for_compare
9915 if (HAVE_canonicalize_funcptr_for_compare
9916 && ((TREE_CODE (TREE_TYPE (arg0
)) == POINTER_TYPE
9917 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
)))
9919 || (TREE_CODE (TREE_TYPE (arg1
)) == POINTER_TYPE
9920 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
)))
9921 == FUNCTION_TYPE
))))
9928 /* Get the rtx comparison code to use. We know that EXP is a comparison
9929 operation of some type. Some comparisons against 1 and -1 can be
9930 converted to comparisons with zero. Do so here so that the tests
9931 below will be aware that we have a comparison with zero. These
9932 tests will not catch constants in the first operand, but constants
9933 are rarely passed as the first operand. */
9944 if (integer_onep (arg1
))
9945 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
9947 code
= unsignedp
? LTU
: LT
;
9950 if (! unsignedp
&& integer_all_onesp (arg1
))
9951 arg1
= integer_zero_node
, code
= LT
;
9953 code
= unsignedp
? LEU
: LE
;
9956 if (! unsignedp
&& integer_all_onesp (arg1
))
9957 arg1
= integer_zero_node
, code
= GE
;
9959 code
= unsignedp
? GTU
: GT
;
9962 if (integer_onep (arg1
))
9963 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
9965 code
= unsignedp
? GEU
: GE
;
9968 case UNORDERED_EXPR
:
9997 /* Put a constant second. */
9998 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
9999 || TREE_CODE (arg0
) == FIXED_CST
)
10001 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
10002 code
= swap_condition (code
);
10005 /* If this is an equality or inequality test of a single bit, we can
10006 do this by shifting the bit being tested to the low-order bit and
10007 masking the result with the constant 1. If the condition was EQ,
10008 we xor it with 1. This does not require an scc insn and is faster
10009 than an scc insn even if we have it.
10011 The code to make this transformation was moved into fold_single_bit_test,
10012 so we just call into the folder and expand its result. */
10014 if ((code
== NE
|| code
== EQ
)
10015 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
10016 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10018 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
10019 return expand_expr (fold_single_bit_test (loc
,
10020 code
== NE
? NE_EXPR
: EQ_EXPR
,
10022 target
, VOIDmode
, EXPAND_NORMAL
);
10025 if (! get_subtarget (target
)
10026 || GET_MODE (subtarget
) != operand_mode
)
10029 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
10032 target
= gen_reg_rtx (mode
);
10034 /* Try a cstore if possible. */
10035 return emit_store_flag_force (target
, code
, op0
, op1
,
10036 operand_mode
, unsignedp
, 1);
10040 /* Stubs in case we haven't got a casesi insn. */
10041 #ifndef HAVE_casesi
10042 # define HAVE_casesi 0
10043 # define gen_casesi(a, b, c, d, e) (0)
10044 # define CODE_FOR_casesi CODE_FOR_nothing
10047 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10048 0 otherwise (i.e. if there is no casesi instruction). */
10050 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
10051 rtx table_label ATTRIBUTE_UNUSED
, rtx default_label
,
10052 rtx fallback_label ATTRIBUTE_UNUSED
)
10054 enum machine_mode index_mode
= SImode
;
10055 int index_bits
= GET_MODE_BITSIZE (index_mode
);
10056 rtx op1
, op2
, index
;
10057 enum machine_mode op_mode
;
10062 /* Convert the index to SImode. */
10063 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
10065 enum machine_mode omode
= TYPE_MODE (index_type
);
10066 rtx rangertx
= expand_normal (range
);
10068 /* We must handle the endpoints in the original mode. */
10069 index_expr
= build2 (MINUS_EXPR
, index_type
,
10070 index_expr
, minval
);
10071 minval
= integer_zero_node
;
10072 index
= expand_normal (index_expr
);
10074 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
10075 omode
, 1, default_label
);
10076 /* Now we can safely truncate. */
10077 index
= convert_to_mode (index_mode
, index
, 0);
10081 if (TYPE_MODE (index_type
) != index_mode
)
10083 index_type
= lang_hooks
.types
.type_for_size (index_bits
, 0);
10084 index_expr
= fold_convert (index_type
, index_expr
);
10087 index
= expand_normal (index_expr
);
10090 do_pending_stack_adjust ();
10092 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
10093 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
10095 index
= copy_to_mode_reg (op_mode
, index
);
10097 op1
= expand_normal (minval
);
10099 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
10100 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
10101 op1
, TYPE_UNSIGNED (TREE_TYPE (minval
)));
10102 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
10104 op1
= copy_to_mode_reg (op_mode
, op1
);
10106 op2
= expand_normal (range
);
10108 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
10109 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
10110 op2
, TYPE_UNSIGNED (TREE_TYPE (range
)));
10111 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
10113 op2
= copy_to_mode_reg (op_mode
, op2
);
10115 emit_jump_insn (gen_casesi (index
, op1
, op2
,
10116 table_label
, !default_label
10117 ? fallback_label
: default_label
));
10121 /* Attempt to generate a tablejump instruction; same concept. */
10122 #ifndef HAVE_tablejump
10123 #define HAVE_tablejump 0
10124 #define gen_tablejump(x, y) (0)
10127 /* Subroutine of the next function.
10129 INDEX is the value being switched on, with the lowest value
10130 in the table already subtracted.
10131 MODE is its expected mode (needed if INDEX is constant).
10132 RANGE is the length of the jump table.
10133 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10135 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10136 index value is out of range. */
10139 do_tablejump (rtx index
, enum machine_mode mode
, rtx range
, rtx table_label
,
10144 if (INTVAL (range
) > cfun
->cfg
->max_jumptable_ents
)
10145 cfun
->cfg
->max_jumptable_ents
= INTVAL (range
);
10147 /* Do an unsigned comparison (in the proper mode) between the index
10148 expression and the value which represents the length of the range.
10149 Since we just finished subtracting the lower bound of the range
10150 from the index expression, this comparison allows us to simultaneously
10151 check that the original index expression value is both greater than
10152 or equal to the minimum value of the range and less than or equal to
10153 the maximum value of the range. */
10156 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
10159 /* If index is in range, it must fit in Pmode.
10160 Convert to Pmode so we can index with it. */
10162 index
= convert_to_mode (Pmode
, index
, 1);
10164 /* Don't let a MEM slip through, because then INDEX that comes
10165 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10166 and break_out_memory_refs will go to work on it and mess it up. */
10167 #ifdef PIC_CASE_VECTOR_ADDRESS
10168 if (flag_pic
&& !REG_P (index
))
10169 index
= copy_to_mode_reg (Pmode
, index
);
10172 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10173 GET_MODE_SIZE, because this indicates how large insns are. The other
10174 uses should all be Pmode, because they are addresses. This code
10175 could fail if addresses and insns are not the same size. */
10176 index
= gen_rtx_PLUS (Pmode
,
10177 gen_rtx_MULT (Pmode
, index
,
10178 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
10179 gen_rtx_LABEL_REF (Pmode
, table_label
));
10180 #ifdef PIC_CASE_VECTOR_ADDRESS
10182 index
= PIC_CASE_VECTOR_ADDRESS (index
);
10185 index
= memory_address (CASE_VECTOR_MODE
, index
);
10186 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
10187 vector
= gen_const_mem (CASE_VECTOR_MODE
, index
);
10188 convert_move (temp
, vector
, 0);
10190 emit_jump_insn (gen_tablejump (temp
, table_label
));
10192 /* If we are generating PIC code or if the table is PC-relative, the
10193 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10194 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
10199 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
10200 rtx table_label
, rtx default_label
)
10204 if (! HAVE_tablejump
)
10207 index_expr
= fold_build2 (MINUS_EXPR
, index_type
,
10208 fold_convert (index_type
, index_expr
),
10209 fold_convert (index_type
, minval
));
10210 index
= expand_normal (index_expr
);
10211 do_pending_stack_adjust ();
10213 do_tablejump (index
, TYPE_MODE (index_type
),
10214 convert_modes (TYPE_MODE (index_type
),
10215 TYPE_MODE (TREE_TYPE (range
)),
10216 expand_normal (range
),
10217 TYPE_UNSIGNED (TREE_TYPE (range
))),
10218 table_label
, default_label
);
10222 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10224 const_vector_from_tree (tree exp
)
10229 enum machine_mode inner
, mode
;
10231 mode
= TYPE_MODE (TREE_TYPE (exp
));
10233 if (initializer_zerop (exp
))
10234 return CONST0_RTX (mode
);
10236 units
= GET_MODE_NUNITS (mode
);
10237 inner
= GET_MODE_INNER (mode
);
10239 v
= rtvec_alloc (units
);
10241 link
= TREE_VECTOR_CST_ELTS (exp
);
10242 for (i
= 0; link
; link
= TREE_CHAIN (link
), ++i
)
10244 elt
= TREE_VALUE (link
);
10246 if (TREE_CODE (elt
) == REAL_CST
)
10247 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
10249 else if (TREE_CODE (elt
) == FIXED_CST
)
10250 RTVEC_ELT (v
, i
) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt
),
10253 RTVEC_ELT (v
, i
) = immed_double_int_const (tree_to_double_int (elt
),
10257 /* Initialize remaining elements to 0. */
10258 for (; i
< units
; ++i
)
10259 RTVEC_ELT (v
, i
) = CONST0_RTX (inner
);
10261 return gen_rtx_CONST_VECTOR (mode
, v
);
10264 /* Build a decl for a personality function given a language prefix. */
10267 build_personality_function (const char *lang
)
10269 const char *unwind_and_version
;
10273 switch (targetm
.except_unwind_info ())
10278 unwind_and_version
= "_sj0";
10282 unwind_and_version
= "_v0";
10285 gcc_unreachable ();
10288 name
= ACONCAT (("__", lang
, "_personality", unwind_and_version
, NULL
));
10290 type
= build_function_type_list (integer_type_node
, integer_type_node
,
10291 long_long_unsigned_type_node
,
10292 ptr_type_node
, ptr_type_node
, NULL_TREE
);
10293 decl
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
,
10294 get_identifier (name
), type
);
10295 DECL_ARTIFICIAL (decl
) = 1;
10296 DECL_EXTERNAL (decl
) = 1;
10297 TREE_PUBLIC (decl
) = 1;
10299 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
10300 are the flags assigned by targetm.encode_section_info. */
10301 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl
), 0), NULL
);
10306 /* Extracts the personality function of DECL and returns the corresponding
10310 get_personality_function (tree decl
)
10312 tree personality
= DECL_FUNCTION_PERSONALITY (decl
);
10313 enum eh_personality_kind pk
;
10315 pk
= function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl
));
10316 if (pk
== eh_personality_none
)
10320 && pk
== eh_personality_any
)
10321 personality
= lang_hooks
.eh_personality ();
10323 if (pk
== eh_personality_lang
)
10324 gcc_assert (personality
!= NULL_TREE
);
10326 return XEXP (DECL_RTL (personality
), 0);
10329 #include "gt-expr.h"