1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
31 #include "hard-reg-set.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
43 #include "typeclass.h"
45 #include "langhooks.h"
48 #include "tree-iterator.h"
49 #include "tree-pass.h"
50 #include "tree-flow.h"
52 #include "common/common-target.h"
55 #include "diagnostic.h"
56 #include "ssaexpand.h"
57 #include "target-globals.h"
60 /* Decide whether a function's arguments should be processed
61 from first to last or from last to first.
63 They should if the stack and args grow in opposite directions, but
64 only if we have push insns. */
68 #ifndef PUSH_ARGS_REVERSED
69 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
70 #define PUSH_ARGS_REVERSED /* If it's last to first. */
76 #ifndef STACK_PUSH_CODE
77 #ifdef STACK_GROWS_DOWNWARD
78 #define STACK_PUSH_CODE PRE_DEC
80 #define STACK_PUSH_CODE PRE_INC
85 /* If this is nonzero, we do not bother generating VOLATILE
86 around volatile memory references, and we are willing to
87 output indirect addresses. If cse is to follow, we reject
88 indirect addresses so a useful potential cse is generated;
89 if it is used only once, instruction combination will produce
90 the same indirect address eventually. */
93 /* This structure is used by move_by_pieces to describe the move to
95 struct move_by_pieces_d
104 int explicit_inc_from
;
105 unsigned HOST_WIDE_INT len
;
106 HOST_WIDE_INT offset
;
110 /* This structure is used by store_by_pieces to describe the clear to
113 struct store_by_pieces_d
119 unsigned HOST_WIDE_INT len
;
120 HOST_WIDE_INT offset
;
121 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
);
126 static void move_by_pieces_1 (rtx (*) (rtx
, ...), enum machine_mode
,
127 struct move_by_pieces_d
*);
128 static bool block_move_libcall_safe_for_call_parm (void);
129 static bool emit_block_move_via_movmem (rtx
, rtx
, rtx
, unsigned, unsigned, HOST_WIDE_INT
);
130 static tree
emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
132 static rtx
clear_by_pieces_1 (void *, HOST_WIDE_INT
, enum machine_mode
);
133 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces_d
*, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx
, ...), enum machine_mode
,
136 struct store_by_pieces_d
*);
137 static tree
clear_storage_libcall_fn (int);
138 static rtx
compress_float_constant (rtx
, rtx
);
139 static rtx
get_subtarget (rtx
);
140 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
141 HOST_WIDE_INT
, enum machine_mode
,
142 tree
, tree
, int, alias_set_type
);
143 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
);
144 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
,
145 unsigned HOST_WIDE_INT
, unsigned HOST_WIDE_INT
,
147 tree
, tree
, alias_set_type
, bool);
149 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (const_tree
, const_tree
);
151 static int is_aligning_offset (const_tree
, const_tree
);
152 static void expand_operands (tree
, tree
, rtx
, rtx
*, rtx
*,
153 enum expand_modifier
);
154 static rtx
reduce_to_bit_field_precision (rtx
, rtx
, tree
);
155 static rtx
do_store_flag (sepops
, rtx
, enum machine_mode
);
157 static void emit_single_push_insn (enum machine_mode
, rtx
, tree
);
159 static void do_tablejump (rtx
, enum machine_mode
, rtx
, rtx
, rtx
);
160 static rtx
const_vector_from_tree (tree
);
161 static void write_complex_part (rtx
, rtx
, bool);
163 /* This macro is used to determine whether move_by_pieces should be called
164 to perform a structure copy. */
165 #ifndef MOVE_BY_PIECES_P
166 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
167 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
168 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
171 /* This macro is used to determine whether clear_by_pieces should be
172 called to clear storage. */
173 #ifndef CLEAR_BY_PIECES_P
174 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
175 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
176 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
179 /* This macro is used to determine whether store_by_pieces should be
180 called to "memset" storage with byte values other than zero. */
181 #ifndef SET_BY_PIECES_P
182 #define SET_BY_PIECES_P(SIZE, ALIGN) \
183 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
184 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
187 /* This macro is used to determine whether store_by_pieces should be
188 called to "memcpy" storage when the source is a constant string. */
189 #ifndef STORE_BY_PIECES_P
190 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
191 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
192 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
195 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
197 #ifndef SLOW_UNALIGNED_ACCESS
198 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
201 /* This is run to set up which modes can be used
202 directly in memory and to initialize the block move optab. It is run
203 at the beginning of compilation and when the target is reinitialized. */
206 init_expr_target (void)
209 enum machine_mode mode
;
214 /* Try indexing by frame ptr and try by stack ptr.
215 It is known that on the Convex the stack ptr isn't a valid index.
216 With luck, one or the other is valid on any machine. */
217 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
218 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
220 /* A scratch register we can modify in-place below to avoid
221 useless RTL allocations. */
222 reg
= gen_rtx_REG (VOIDmode
, -1);
224 insn
= rtx_alloc (INSN
);
225 pat
= gen_rtx_SET (VOIDmode
, NULL_RTX
, NULL_RTX
);
226 PATTERN (insn
) = pat
;
228 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
229 mode
= (enum machine_mode
) ((int) mode
+ 1))
233 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
234 PUT_MODE (mem
, mode
);
235 PUT_MODE (mem1
, mode
);
236 PUT_MODE (reg
, mode
);
238 /* See if there is some register that can be used in this mode and
239 directly loaded or stored from memory. */
241 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
242 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
243 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
246 if (! HARD_REGNO_MODE_OK (regno
, mode
))
249 SET_REGNO (reg
, regno
);
252 SET_DEST (pat
) = reg
;
253 if (recog (pat
, insn
, &num_clobbers
) >= 0)
254 direct_load
[(int) mode
] = 1;
256 SET_SRC (pat
) = mem1
;
257 SET_DEST (pat
) = reg
;
258 if (recog (pat
, insn
, &num_clobbers
) >= 0)
259 direct_load
[(int) mode
] = 1;
262 SET_DEST (pat
) = mem
;
263 if (recog (pat
, insn
, &num_clobbers
) >= 0)
264 direct_store
[(int) mode
] = 1;
267 SET_DEST (pat
) = mem1
;
268 if (recog (pat
, insn
, &num_clobbers
) >= 0)
269 direct_store
[(int) mode
] = 1;
273 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
275 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
276 mode
= GET_MODE_WIDER_MODE (mode
))
278 enum machine_mode srcmode
;
279 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
280 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
284 ic
= can_extend_p (mode
, srcmode
, 0);
285 if (ic
== CODE_FOR_nothing
)
288 PUT_MODE (mem
, srcmode
);
290 if (insn_operand_matches (ic
, 1, mem
))
291 float_extend_from_mem
[mode
][srcmode
] = true;
296 /* This is run at the start of compiling a function. */
301 memset (&crtl
->expr
, 0, sizeof (crtl
->expr
));
304 /* Copy data from FROM to TO, where the machine modes are not the same.
305 Both modes may be integer, or both may be floating, or both may be
307 UNSIGNEDP should be nonzero if FROM is an unsigned type.
308 This causes zero-extension instead of sign-extension. */
311 convert_move (rtx to
, rtx from
, int unsignedp
)
313 enum machine_mode to_mode
= GET_MODE (to
);
314 enum machine_mode from_mode
= GET_MODE (from
);
315 int to_real
= SCALAR_FLOAT_MODE_P (to_mode
);
316 int from_real
= SCALAR_FLOAT_MODE_P (from_mode
);
320 /* rtx code for making an equivalent value. */
321 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
322 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
325 gcc_assert (to_real
== from_real
);
326 gcc_assert (to_mode
!= BLKmode
);
327 gcc_assert (from_mode
!= BLKmode
);
329 /* If the source and destination are already the same, then there's
334 /* If FROM is a SUBREG that indicates that we have already done at least
335 the required extension, strip it. We don't handle such SUBREGs as
338 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
339 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from
)))
340 >= GET_MODE_PRECISION (to_mode
))
341 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
342 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
344 gcc_assert (GET_CODE (to
) != SUBREG
|| !SUBREG_PROMOTED_VAR_P (to
));
346 if (to_mode
== from_mode
347 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
349 emit_move_insn (to
, from
);
353 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
355 gcc_assert (GET_MODE_BITSIZE (from_mode
) == GET_MODE_BITSIZE (to_mode
));
357 if (VECTOR_MODE_P (to_mode
))
358 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
360 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
362 emit_move_insn (to
, from
);
366 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
368 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
369 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
378 gcc_assert ((GET_MODE_PRECISION (from_mode
)
379 != GET_MODE_PRECISION (to_mode
))
380 || (DECIMAL_FLOAT_MODE_P (from_mode
)
381 != DECIMAL_FLOAT_MODE_P (to_mode
)));
383 if (GET_MODE_PRECISION (from_mode
) == GET_MODE_PRECISION (to_mode
))
384 /* Conversion between decimal float and binary float, same size. */
385 tab
= DECIMAL_FLOAT_MODE_P (from_mode
) ? trunc_optab
: sext_optab
;
386 else if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
391 /* Try converting directly if the insn is supported. */
393 code
= convert_optab_handler (tab
, to_mode
, from_mode
);
394 if (code
!= CODE_FOR_nothing
)
396 emit_unop_insn (code
, to
, from
,
397 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
401 /* Otherwise use a libcall. */
402 libcall
= convert_optab_libfunc (tab
, to_mode
, from_mode
);
404 /* Is this conversion implemented yet? */
405 gcc_assert (libcall
);
408 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
410 insns
= get_insns ();
412 emit_libcall_block (insns
, to
, value
,
413 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
415 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
419 /* Handle pointer conversion. */ /* SPEE 900220. */
420 /* Targets are expected to provide conversion insns between PxImode and
421 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
422 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
424 enum machine_mode full_mode
425 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode
), MODE_INT
);
427 gcc_assert (convert_optab_handler (trunc_optab
, to_mode
, full_mode
)
428 != CODE_FOR_nothing
);
430 if (full_mode
!= from_mode
)
431 from
= convert_to_mode (full_mode
, from
, unsignedp
);
432 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, full_mode
),
436 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
439 enum machine_mode full_mode
440 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode
), MODE_INT
);
441 convert_optab ctab
= unsignedp
? zext_optab
: sext_optab
;
442 enum insn_code icode
;
444 icode
= convert_optab_handler (ctab
, full_mode
, from_mode
);
445 gcc_assert (icode
!= CODE_FOR_nothing
);
447 if (to_mode
== full_mode
)
449 emit_unop_insn (icode
, to
, from
, UNKNOWN
);
453 new_from
= gen_reg_rtx (full_mode
);
454 emit_unop_insn (icode
, new_from
, from
, UNKNOWN
);
456 /* else proceed to integer conversions below. */
457 from_mode
= full_mode
;
461 /* Make sure both are fixed-point modes or both are not. */
462 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
) ==
463 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode
));
464 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
))
466 /* If we widen from_mode to to_mode and they are in the same class,
467 we won't saturate the result.
468 Otherwise, always saturate the result to play safe. */
469 if (GET_MODE_CLASS (from_mode
) == GET_MODE_CLASS (to_mode
)
470 && GET_MODE_SIZE (from_mode
) < GET_MODE_SIZE (to_mode
))
471 expand_fixed_convert (to
, from
, 0, 0);
473 expand_fixed_convert (to
, from
, 0, 1);
477 /* Now both modes are integers. */
479 /* Handle expanding beyond a word. */
480 if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
)
481 && GET_MODE_PRECISION (to_mode
) > BITS_PER_WORD
)
488 enum machine_mode lowpart_mode
;
489 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
491 /* Try converting directly if the insn is supported. */
492 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
495 /* If FROM is a SUBREG, put it into a register. Do this
496 so that we always generate the same set of insns for
497 better cse'ing; if an intermediate assignment occurred,
498 we won't be doing the operation directly on the SUBREG. */
499 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
500 from
= force_reg (from_mode
, from
);
501 emit_unop_insn (code
, to
, from
, equiv_code
);
504 /* Next, try converting via full word. */
505 else if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
506 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
507 != CODE_FOR_nothing
))
509 rtx word_to
= gen_reg_rtx (word_mode
);
512 if (reg_overlap_mentioned_p (to
, from
))
513 from
= force_reg (from_mode
, from
);
516 convert_move (word_to
, from
, unsignedp
);
517 emit_unop_insn (code
, to
, word_to
, equiv_code
);
521 /* No special multiword conversion insn; do it by hand. */
524 /* Since we will turn this into a no conflict block, we must ensure
525 that the source does not overlap the target. */
527 if (reg_overlap_mentioned_p (to
, from
))
528 from
= force_reg (from_mode
, from
);
530 /* Get a copy of FROM widened to a word, if necessary. */
531 if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
)
532 lowpart_mode
= word_mode
;
534 lowpart_mode
= from_mode
;
536 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
538 lowpart
= gen_lowpart (lowpart_mode
, to
);
539 emit_move_insn (lowpart
, lowfrom
);
541 /* Compute the value to put in each remaining word. */
543 fill_value
= const0_rtx
;
545 fill_value
= emit_store_flag (gen_reg_rtx (word_mode
),
546 LT
, lowfrom
, const0_rtx
,
549 /* Fill the remaining words. */
550 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
552 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
553 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
555 gcc_assert (subword
);
557 if (fill_value
!= subword
)
558 emit_move_insn (subword
, fill_value
);
561 insns
= get_insns ();
568 /* Truncating multi-word to a word or less. */
569 if (GET_MODE_PRECISION (from_mode
) > BITS_PER_WORD
570 && GET_MODE_PRECISION (to_mode
) <= BITS_PER_WORD
)
573 && ! MEM_VOLATILE_P (from
)
574 && direct_load
[(int) to_mode
]
575 && ! mode_dependent_address_p (XEXP (from
, 0)))
577 || GET_CODE (from
) == SUBREG
))
578 from
= force_reg (from_mode
, from
);
579 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
583 /* Now follow all the conversions between integers
584 no more than a word long. */
586 /* For truncation, usually we can just refer to FROM in a narrower mode. */
587 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
588 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
, from_mode
))
591 && ! MEM_VOLATILE_P (from
)
592 && direct_load
[(int) to_mode
]
593 && ! mode_dependent_address_p (XEXP (from
, 0)))
595 || GET_CODE (from
) == SUBREG
))
596 from
= force_reg (from_mode
, from
);
597 if (REG_P (from
) && REGNO (from
) < FIRST_PSEUDO_REGISTER
598 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
599 from
= copy_to_reg (from
);
600 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
604 /* Handle extension. */
605 if (GET_MODE_PRECISION (to_mode
) > GET_MODE_PRECISION (from_mode
))
607 /* Convert directly if that works. */
608 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
611 emit_unop_insn (code
, to
, from
, equiv_code
);
616 enum machine_mode intermediate
;
620 /* Search for a mode to convert via. */
621 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
622 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
623 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
625 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
626 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
, intermediate
)))
627 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
628 != CODE_FOR_nothing
))
630 convert_move (to
, convert_to_mode (intermediate
, from
,
631 unsignedp
), unsignedp
);
635 /* No suitable intermediate mode.
636 Generate what we need with shifts. */
637 shift_amount
= (GET_MODE_PRECISION (to_mode
)
638 - GET_MODE_PRECISION (from_mode
));
639 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
640 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
642 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
645 emit_move_insn (to
, tmp
);
650 /* Support special truncate insns for certain modes. */
651 if (convert_optab_handler (trunc_optab
, to_mode
,
652 from_mode
) != CODE_FOR_nothing
)
654 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, from_mode
),
659 /* Handle truncation of volatile memrefs, and so on;
660 the things that couldn't be truncated directly,
661 and for which there was no special instruction.
663 ??? Code above formerly short-circuited this, for most integer
664 mode pairs, with a force_reg in from_mode followed by a recursive
665 call to this routine. Appears always to have been wrong. */
666 if (GET_MODE_PRECISION (to_mode
) < GET_MODE_PRECISION (from_mode
))
668 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
669 emit_move_insn (to
, temp
);
673 /* Mode combination is not recognized. */
677 /* Return an rtx for a value that would result
678 from converting X to mode MODE.
679 Both X and MODE may be floating, or both integer.
680 UNSIGNEDP is nonzero if X is an unsigned value.
681 This can be done by referring to a part of X in place
682 or by copying to a new temporary with conversion. */
685 convert_to_mode (enum machine_mode mode
, rtx x
, int unsignedp
)
687 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
690 /* Return an rtx for a value that would result
691 from converting X from mode OLDMODE to mode MODE.
692 Both modes may be floating, or both integer.
693 UNSIGNEDP is nonzero if X is an unsigned value.
695 This can be done by referring to a part of X in place
696 or by copying to a new temporary with conversion.
698 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
701 convert_modes (enum machine_mode mode
, enum machine_mode oldmode
, rtx x
, int unsignedp
)
705 /* If FROM is a SUBREG that indicates that we have already done at least
706 the required extension, strip it. */
708 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
709 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
710 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
711 x
= gen_lowpart (mode
, x
);
713 if (GET_MODE (x
) != VOIDmode
)
714 oldmode
= GET_MODE (x
);
719 /* There is one case that we must handle specially: If we are converting
720 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
721 we are to interpret the constant as unsigned, gen_lowpart will do
722 the wrong if the constant appears negative. What we want to do is
723 make the high-order word of the constant zero, not all ones. */
725 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
726 && GET_MODE_BITSIZE (mode
) == HOST_BITS_PER_DOUBLE_INT
727 && CONST_INT_P (x
) && INTVAL (x
) < 0)
729 double_int val
= uhwi_to_double_int (INTVAL (x
));
731 /* We need to zero extend VAL. */
732 if (oldmode
!= VOIDmode
)
733 val
= double_int_zext (val
, GET_MODE_BITSIZE (oldmode
));
735 return immed_double_int_const (val
, mode
);
738 /* We can do this with a gen_lowpart if both desired and current modes
739 are integer, and this is either a constant integer, a register, or a
740 non-volatile MEM. Except for the constant case where MODE is no
741 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
744 && GET_MODE_PRECISION (mode
) <= HOST_BITS_PER_WIDE_INT
)
745 || (GET_MODE_CLASS (mode
) == MODE_INT
746 && GET_MODE_CLASS (oldmode
) == MODE_INT
747 && (GET_CODE (x
) == CONST_DOUBLE
748 || (GET_MODE_PRECISION (mode
) <= GET_MODE_PRECISION (oldmode
)
749 && ((MEM_P (x
) && ! MEM_VOLATILE_P (x
)
750 && direct_load
[(int) mode
])
752 && (! HARD_REGISTER_P (x
)
753 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
754 && TRULY_NOOP_TRUNCATION_MODES_P (mode
,
757 /* ?? If we don't know OLDMODE, we have to assume here that
758 X does not need sign- or zero-extension. This may not be
759 the case, but it's the best we can do. */
760 if (CONST_INT_P (x
) && oldmode
!= VOIDmode
761 && GET_MODE_PRECISION (mode
) > GET_MODE_PRECISION (oldmode
))
763 HOST_WIDE_INT val
= INTVAL (x
);
765 /* We must sign or zero-extend in this case. Start by
766 zero-extending, then sign extend if we need to. */
767 val
&= GET_MODE_MASK (oldmode
);
769 && val_signbit_known_set_p (oldmode
, val
))
770 val
|= ~GET_MODE_MASK (oldmode
);
772 return gen_int_mode (val
, mode
);
775 return gen_lowpart (mode
, x
);
778 /* Converting from integer constant into mode is always equivalent to an
780 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
782 gcc_assert (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (oldmode
));
783 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
786 temp
= gen_reg_rtx (mode
);
787 convert_move (temp
, x
, unsignedp
);
791 /* Return the largest alignment we can use for doing a move (or store)
792 of MAX_PIECES. ALIGN is the largest alignment we could use. */
795 alignment_for_piecewise_move (unsigned int max_pieces
, unsigned int align
)
797 enum machine_mode tmode
;
799 tmode
= mode_for_size (max_pieces
* BITS_PER_UNIT
, MODE_INT
, 1);
800 if (align
>= GET_MODE_ALIGNMENT (tmode
))
801 align
= GET_MODE_ALIGNMENT (tmode
);
804 enum machine_mode tmode
, xmode
;
806 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
808 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
809 if (GET_MODE_SIZE (tmode
) > max_pieces
810 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
813 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
819 /* Return the widest integer mode no wider than SIZE. If no such mode
820 can be found, return VOIDmode. */
822 static enum machine_mode
823 widest_int_mode_for_size (unsigned int size
)
825 enum machine_mode tmode
, mode
= VOIDmode
;
827 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
828 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
829 if (GET_MODE_SIZE (tmode
) < size
)
835 /* STORE_MAX_PIECES is the number of bytes at a time that we can
836 store efficiently. Due to internal GCC limitations, this is
837 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
838 for an immediate constant. */
840 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
842 /* Determine whether the LEN bytes can be moved by using several move
843 instructions. Return nonzero if a call to move_by_pieces should
847 can_move_by_pieces (unsigned HOST_WIDE_INT len
,
848 unsigned int align ATTRIBUTE_UNUSED
)
850 return MOVE_BY_PIECES_P (len
, align
);
853 /* Generate several move instructions to copy LEN bytes from block FROM to
854 block TO. (These are MEM rtx's with BLKmode).
856 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
857 used to push FROM to the stack.
859 ALIGN is maximum stack alignment we can assume.
861 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
862 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
866 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
867 unsigned int align
, int endp
)
869 struct move_by_pieces_d data
;
870 enum machine_mode to_addr_mode
;
871 enum machine_mode from_addr_mode
= get_address_mode (from
);
872 rtx to_addr
, from_addr
= XEXP (from
, 0);
873 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
874 enum insn_code icode
;
876 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
879 data
.from_addr
= from_addr
;
882 to_addr_mode
= get_address_mode (to
);
883 to_addr
= XEXP (to
, 0);
886 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
887 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
889 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
893 to_addr_mode
= VOIDmode
;
897 #ifdef STACK_GROWS_DOWNWARD
903 data
.to_addr
= to_addr
;
906 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
907 || GET_CODE (from_addr
) == POST_INC
908 || GET_CODE (from_addr
) == POST_DEC
);
910 data
.explicit_inc_from
= 0;
911 data
.explicit_inc_to
= 0;
912 if (data
.reverse
) data
.offset
= len
;
915 /* If copying requires more than two move insns,
916 copy addresses to registers (to make displacements shorter)
917 and use post-increment if available. */
918 if (!(data
.autinc_from
&& data
.autinc_to
)
919 && move_by_pieces_ninsns (len
, align
, max_size
) > 2)
921 /* Find the mode of the largest move...
922 MODE might not be used depending on the definitions of the
923 USE_* macros below. */
924 enum machine_mode mode ATTRIBUTE_UNUSED
925 = widest_int_mode_for_size (max_size
);
927 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
929 data
.from_addr
= copy_to_mode_reg (from_addr_mode
,
930 plus_constant (from_addr_mode
,
932 data
.autinc_from
= 1;
933 data
.explicit_inc_from
= -1;
935 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
937 data
.from_addr
= copy_to_mode_reg (from_addr_mode
, from_addr
);
938 data
.autinc_from
= 1;
939 data
.explicit_inc_from
= 1;
941 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
942 data
.from_addr
= copy_to_mode_reg (from_addr_mode
, from_addr
);
943 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
945 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
946 plus_constant (to_addr_mode
,
949 data
.explicit_inc_to
= -1;
951 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
953 data
.to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
955 data
.explicit_inc_to
= 1;
957 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
958 data
.to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
961 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
963 /* First move what we can in the largest integer mode, then go to
964 successively smaller modes. */
968 enum machine_mode mode
= widest_int_mode_for_size (max_size
);
970 if (mode
== VOIDmode
)
973 icode
= optab_handler (mov_optab
, mode
);
974 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
975 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
977 max_size
= GET_MODE_SIZE (mode
);
980 /* The code above should have handled everything. */
981 gcc_assert (!data
.len
);
987 gcc_assert (!data
.reverse
);
992 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
993 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
995 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
996 plus_constant (to_addr_mode
,
1000 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
1007 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
1015 /* Return number of insns required to move L bytes by pieces.
1016 ALIGN (in bits) is maximum alignment we can assume. */
1018 unsigned HOST_WIDE_INT
1019 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
,
1020 unsigned int max_size
)
1022 unsigned HOST_WIDE_INT n_insns
= 0;
1024 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
1026 while (max_size
> 1)
1028 enum machine_mode mode
;
1029 enum insn_code icode
;
1031 mode
= widest_int_mode_for_size (max_size
);
1033 if (mode
== VOIDmode
)
1036 icode
= optab_handler (mov_optab
, mode
);
1037 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1038 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1040 max_size
= GET_MODE_SIZE (mode
);
1047 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1048 with move instructions for mode MODE. GENFUN is the gen_... function
1049 to make a move insn for that mode. DATA has all the other info. */
1052 move_by_pieces_1 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
1053 struct move_by_pieces_d
*data
)
1055 unsigned int size
= GET_MODE_SIZE (mode
);
1056 rtx to1
= NULL_RTX
, from1
;
1058 while (data
->len
>= size
)
1061 data
->offset
-= size
;
1065 if (data
->autinc_to
)
1066 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1069 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1072 if (data
->autinc_from
)
1073 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1076 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1078 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1079 emit_insn (gen_add2_insn (data
->to_addr
,
1080 GEN_INT (-(HOST_WIDE_INT
)size
)));
1081 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1082 emit_insn (gen_add2_insn (data
->from_addr
,
1083 GEN_INT (-(HOST_WIDE_INT
)size
)));
1086 emit_insn ((*genfun
) (to1
, from1
));
1089 #ifdef PUSH_ROUNDING
1090 emit_single_push_insn (mode
, from1
, NULL
);
1096 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1097 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1098 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1099 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1101 if (! data
->reverse
)
1102 data
->offset
+= size
;
1108 /* Emit code to move a block Y to a block X. This may be done with
1109 string-move instructions, with multiple scalar move instructions,
1110 or with a library call.
1112 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1113 SIZE is an rtx that says how long they are.
1114 ALIGN is the maximum alignment we can assume they have.
1115 METHOD describes what kind of copy this is, and what mechanisms may be used.
1117 Return the address of the new block, if memcpy is called and returns it,
1121 emit_block_move_hints (rtx x
, rtx y
, rtx size
, enum block_op_methods method
,
1122 unsigned int expected_align
, HOST_WIDE_INT expected_size
)
1129 if (CONST_INT_P (size
)
1130 && INTVAL (size
) == 0)
1135 case BLOCK_OP_NORMAL
:
1136 case BLOCK_OP_TAILCALL
:
1137 may_use_call
= true;
1140 case BLOCK_OP_CALL_PARM
:
1141 may_use_call
= block_move_libcall_safe_for_call_parm ();
1143 /* Make inhibit_defer_pop nonzero around the library call
1144 to force it to pop the arguments right away. */
1148 case BLOCK_OP_NO_LIBCALL
:
1149 may_use_call
= false;
1156 gcc_assert (MEM_P (x
) && MEM_P (y
));
1157 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1158 gcc_assert (align
>= BITS_PER_UNIT
);
1160 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1161 block copy is more efficient for other large modes, e.g. DCmode. */
1162 x
= adjust_address (x
, BLKmode
, 0);
1163 y
= adjust_address (y
, BLKmode
, 0);
1165 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1166 can be incorrect is coming from __builtin_memcpy. */
1167 if (CONST_INT_P (size
))
1169 x
= shallow_copy_rtx (x
);
1170 y
= shallow_copy_rtx (y
);
1171 set_mem_size (x
, INTVAL (size
));
1172 set_mem_size (y
, INTVAL (size
));
1175 if (CONST_INT_P (size
) && MOVE_BY_PIECES_P (INTVAL (size
), align
))
1176 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1177 else if (emit_block_move_via_movmem (x
, y
, size
, align
,
1178 expected_align
, expected_size
))
1180 else if (may_use_call
1181 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
))
1182 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y
)))
1184 /* Since x and y are passed to a libcall, mark the corresponding
1185 tree EXPR as addressable. */
1186 tree y_expr
= MEM_EXPR (y
);
1187 tree x_expr
= MEM_EXPR (x
);
1189 mark_addressable (y_expr
);
1191 mark_addressable (x_expr
);
1192 retval
= emit_block_move_via_libcall (x
, y
, size
,
1193 method
== BLOCK_OP_TAILCALL
);
1197 emit_block_move_via_loop (x
, y
, size
, align
);
1199 if (method
== BLOCK_OP_CALL_PARM
)
1206 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1208 return emit_block_move_hints (x
, y
, size
, method
, 0, -1);
1211 /* A subroutine of emit_block_move. Returns true if calling the
1212 block move libcall will not clobber any parameters which may have
1213 already been placed on the stack. */
1216 block_move_libcall_safe_for_call_parm (void)
1218 #if defined (REG_PARM_STACK_SPACE)
1222 /* If arguments are pushed on the stack, then they're safe. */
1226 /* If registers go on the stack anyway, any argument is sure to clobber
1227 an outgoing argument. */
1228 #if defined (REG_PARM_STACK_SPACE)
1229 fn
= emit_block_move_libcall_fn (false);
1230 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1231 depend on its argument. */
1233 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn
? NULL_TREE
: TREE_TYPE (fn
)))
1234 && REG_PARM_STACK_SPACE (fn
) != 0)
1238 /* If any argument goes in memory, then it might clobber an outgoing
1241 CUMULATIVE_ARGS args_so_far_v
;
1242 cumulative_args_t args_so_far
;
1245 fn
= emit_block_move_libcall_fn (false);
1246 INIT_CUMULATIVE_ARGS (args_so_far_v
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1247 args_so_far
= pack_cumulative_args (&args_so_far_v
);
1249 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1250 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1252 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1253 rtx tmp
= targetm
.calls
.function_arg (args_so_far
, mode
,
1255 if (!tmp
|| !REG_P (tmp
))
1257 if (targetm
.calls
.arg_partial_bytes (args_so_far
, mode
, NULL
, 1))
1259 targetm
.calls
.function_arg_advance (args_so_far
, mode
,
1266 /* A subroutine of emit_block_move. Expand a movmem pattern;
1267 return true if successful. */
1270 emit_block_move_via_movmem (rtx x
, rtx y
, rtx size
, unsigned int align
,
1271 unsigned int expected_align
, HOST_WIDE_INT expected_size
)
1273 int save_volatile_ok
= volatile_ok
;
1274 enum machine_mode mode
;
1276 if (expected_align
< align
)
1277 expected_align
= align
;
1279 /* Since this is a move insn, we don't care about volatility. */
1282 /* Try the most limited insn first, because there's no point
1283 including more than one in the machine description unless
1284 the more limited one has some advantage. */
1286 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1287 mode
= GET_MODE_WIDER_MODE (mode
))
1289 enum insn_code code
= direct_optab_handler (movmem_optab
, mode
);
1291 if (code
!= CODE_FOR_nothing
1292 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1293 here because if SIZE is less than the mode mask, as it is
1294 returned by the macro, it will definitely be less than the
1295 actual mode mask. */
1296 && ((CONST_INT_P (size
)
1297 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1298 <= (GET_MODE_MASK (mode
) >> 1)))
1299 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
))
1301 struct expand_operand ops
[6];
1304 /* ??? When called via emit_block_move_for_call, it'd be
1305 nice if there were some way to inform the backend, so
1306 that it doesn't fail the expansion because it thinks
1307 emitting the libcall would be more efficient. */
1308 nops
= insn_data
[(int) code
].n_generator_args
;
1309 gcc_assert (nops
== 4 || nops
== 6);
1311 create_fixed_operand (&ops
[0], x
);
1312 create_fixed_operand (&ops
[1], y
);
1313 /* The check above guarantees that this size conversion is valid. */
1314 create_convert_operand_to (&ops
[2], size
, mode
, true);
1315 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
1318 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
1319 create_integer_operand (&ops
[5], expected_size
);
1321 if (maybe_expand_insn (code
, nops
, ops
))
1323 volatile_ok
= save_volatile_ok
;
1329 volatile_ok
= save_volatile_ok
;
1333 /* A subroutine of emit_block_move. Expand a call to memcpy.
1334 Return the return value from memcpy, 0 otherwise. */
1337 emit_block_move_via_libcall (rtx dst
, rtx src
, rtx size
, bool tailcall
)
1339 rtx dst_addr
, src_addr
;
1340 tree call_expr
, fn
, src_tree
, dst_tree
, size_tree
;
1341 enum machine_mode size_mode
;
1344 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1345 pseudos. We can then place those new pseudos into a VAR_DECL and
1348 dst_addr
= copy_addr_to_reg (XEXP (dst
, 0));
1349 src_addr
= copy_addr_to_reg (XEXP (src
, 0));
1351 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1352 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1354 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1355 src_tree
= make_tree (ptr_type_node
, src_addr
);
1357 size_mode
= TYPE_MODE (sizetype
);
1359 size
= convert_to_mode (size_mode
, size
, 1);
1360 size
= copy_to_mode_reg (size_mode
, size
);
1362 /* It is incorrect to use the libcall calling conventions to call
1363 memcpy in this context. This could be a user call to memcpy and
1364 the user may wish to examine the return value from memcpy. For
1365 targets where libcalls and normal calls have different conventions
1366 for returning pointers, we could end up generating incorrect code. */
1368 size_tree
= make_tree (sizetype
, size
);
1370 fn
= emit_block_move_libcall_fn (true);
1371 call_expr
= build_call_expr (fn
, 3, dst_tree
, src_tree
, size_tree
);
1372 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
1374 retval
= expand_normal (call_expr
);
1379 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1380 for the function we use for block copies. */
1382 static GTY(()) tree block_move_fn
;
1385 init_block_move_fn (const char *asmspec
)
1389 tree args
, fn
, attrs
, attr_args
;
1391 fn
= get_identifier ("memcpy");
1392 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
1393 const_ptr_type_node
, sizetype
,
1396 fn
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
, fn
, args
);
1397 DECL_EXTERNAL (fn
) = 1;
1398 TREE_PUBLIC (fn
) = 1;
1399 DECL_ARTIFICIAL (fn
) = 1;
1400 TREE_NOTHROW (fn
) = 1;
1401 DECL_VISIBILITY (fn
) = VISIBILITY_DEFAULT
;
1402 DECL_VISIBILITY_SPECIFIED (fn
) = 1;
1404 attr_args
= build_tree_list (NULL_TREE
, build_string (1, "1"));
1405 attrs
= tree_cons (get_identifier ("fn spec"), attr_args
, NULL
);
1407 decl_attributes (&fn
, attrs
, ATTR_FLAG_BUILT_IN
);
1413 set_user_assembler_name (block_move_fn
, asmspec
);
1417 emit_block_move_libcall_fn (int for_call
)
1419 static bool emitted_extern
;
1422 init_block_move_fn (NULL
);
1424 if (for_call
&& !emitted_extern
)
1426 emitted_extern
= true;
1427 make_decl_rtl (block_move_fn
);
1430 return block_move_fn
;
1433 /* A subroutine of emit_block_move. Copy the data via an explicit
1434 loop. This is used only when libcalls are forbidden. */
1435 /* ??? It'd be nice to copy in hunks larger than QImode. */
1438 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1439 unsigned int align ATTRIBUTE_UNUSED
)
1441 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
1442 enum machine_mode x_addr_mode
= get_address_mode (x
);
1443 enum machine_mode y_addr_mode
= get_address_mode (y
);
1444 enum machine_mode iter_mode
;
1446 iter_mode
= GET_MODE (size
);
1447 if (iter_mode
== VOIDmode
)
1448 iter_mode
= word_mode
;
1450 top_label
= gen_label_rtx ();
1451 cmp_label
= gen_label_rtx ();
1452 iter
= gen_reg_rtx (iter_mode
);
1454 emit_move_insn (iter
, const0_rtx
);
1456 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1457 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1458 do_pending_stack_adjust ();
1460 emit_jump (cmp_label
);
1461 emit_label (top_label
);
1463 tmp
= convert_modes (x_addr_mode
, iter_mode
, iter
, true);
1464 x_addr
= gen_rtx_PLUS (x_addr_mode
, x_addr
, tmp
);
1466 if (x_addr_mode
!= y_addr_mode
)
1467 tmp
= convert_modes (y_addr_mode
, iter_mode
, iter
, true);
1468 y_addr
= gen_rtx_PLUS (y_addr_mode
, y_addr
, tmp
);
1470 x
= change_address (x
, QImode
, x_addr
);
1471 y
= change_address (y
, QImode
, y_addr
);
1473 emit_move_insn (x
, y
);
1475 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1476 true, OPTAB_LIB_WIDEN
);
1478 emit_move_insn (iter
, tmp
);
1480 emit_label (cmp_label
);
1482 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1486 /* Copy all or part of a value X into registers starting at REGNO.
1487 The number of registers to be filled is NREGS. */
1490 move_block_to_reg (int regno
, rtx x
, int nregs
, enum machine_mode mode
)
1493 #ifdef HAVE_load_multiple
1501 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
1502 x
= validize_mem (force_const_mem (mode
, x
));
1504 /* See if the machine can do this with a load multiple insn. */
1505 #ifdef HAVE_load_multiple
1506 if (HAVE_load_multiple
)
1508 last
= get_last_insn ();
1509 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1517 delete_insns_since (last
);
1521 for (i
= 0; i
< nregs
; i
++)
1522 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1523 operand_subword_force (x
, i
, mode
));
1526 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1527 The number of registers to be filled is NREGS. */
1530 move_block_from_reg (int regno
, rtx x
, int nregs
)
1537 /* See if the machine can do this with a store multiple insn. */
1538 #ifdef HAVE_store_multiple
1539 if (HAVE_store_multiple
)
1541 rtx last
= get_last_insn ();
1542 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1550 delete_insns_since (last
);
1554 for (i
= 0; i
< nregs
; i
++)
1556 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1560 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1564 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1565 ORIG, where ORIG is a non-consecutive group of registers represented by
1566 a PARALLEL. The clone is identical to the original except in that the
1567 original set of registers is replaced by a new set of pseudo registers.
1568 The new set has the same modes as the original set. */
1571 gen_group_rtx (rtx orig
)
1576 gcc_assert (GET_CODE (orig
) == PARALLEL
);
1578 length
= XVECLEN (orig
, 0);
1579 tmps
= XALLOCAVEC (rtx
, length
);
1581 /* Skip a NULL entry in first slot. */
1582 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
1587 for (; i
< length
; i
++)
1589 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
1590 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
1592 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
1595 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
1598 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1599 except that values are placed in TMPS[i], and must later be moved
1600 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1603 emit_group_load_1 (rtx
*tmps
, rtx dst
, rtx orig_src
, tree type
, int ssize
)
1607 enum machine_mode m
= GET_MODE (orig_src
);
1609 gcc_assert (GET_CODE (dst
) == PARALLEL
);
1612 && !SCALAR_INT_MODE_P (m
)
1613 && !MEM_P (orig_src
)
1614 && GET_CODE (orig_src
) != CONCAT
)
1616 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_src
));
1617 if (imode
== BLKmode
)
1618 src
= assign_stack_temp (GET_MODE (orig_src
), ssize
);
1620 src
= gen_reg_rtx (imode
);
1621 if (imode
!= BLKmode
)
1622 src
= gen_lowpart (GET_MODE (orig_src
), src
);
1623 emit_move_insn (src
, orig_src
);
1624 /* ...and back again. */
1625 if (imode
!= BLKmode
)
1626 src
= gen_lowpart (imode
, src
);
1627 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1631 /* Check for a NULL entry, used to indicate that the parameter goes
1632 both on the stack and in registers. */
1633 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1638 /* Process the pieces. */
1639 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1641 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1642 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1643 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1646 /* Handle trailing fragments that run over the size of the struct. */
1647 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1649 /* Arrange to shift the fragment to where it belongs.
1650 extract_bit_field loads to the lsb of the reg. */
1652 #ifdef BLOCK_REG_PADDING
1653 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
1654 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1659 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1660 bytelen
= ssize
- bytepos
;
1661 gcc_assert (bytelen
> 0);
1664 /* If we won't be loading directly from memory, protect the real source
1665 from strange tricks we might play; but make sure that the source can
1666 be loaded directly into the destination. */
1668 if (!MEM_P (orig_src
)
1669 && (!CONSTANT_P (orig_src
)
1670 || (GET_MODE (orig_src
) != mode
1671 && GET_MODE (orig_src
) != VOIDmode
)))
1673 if (GET_MODE (orig_src
) == VOIDmode
)
1674 src
= gen_reg_rtx (mode
);
1676 src
= gen_reg_rtx (GET_MODE (orig_src
));
1678 emit_move_insn (src
, orig_src
);
1681 /* Optimize the access just a bit. */
1683 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
1684 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
1685 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1686 && bytelen
== GET_MODE_SIZE (mode
))
1688 tmps
[i
] = gen_reg_rtx (mode
);
1689 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
1691 else if (COMPLEX_MODE_P (mode
)
1692 && GET_MODE (src
) == mode
1693 && bytelen
== GET_MODE_SIZE (mode
))
1694 /* Let emit_move_complex do the bulk of the work. */
1696 else if (GET_CODE (src
) == CONCAT
)
1698 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
1699 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
1701 if ((bytepos
== 0 && bytelen
== slen0
)
1702 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
1704 /* The following assumes that the concatenated objects all
1705 have the same size. In this case, a simple calculation
1706 can be used to determine the object and the bit field
1708 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
1709 if (! CONSTANT_P (tmps
[i
])
1710 && (!REG_P (tmps
[i
]) || GET_MODE (tmps
[i
]) != mode
))
1711 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
1712 (bytepos
% slen0
) * BITS_PER_UNIT
,
1713 1, false, NULL_RTX
, mode
, mode
);
1719 gcc_assert (!bytepos
);
1720 mem
= assign_stack_temp (GET_MODE (src
), slen
);
1721 emit_move_insn (mem
, src
);
1722 tmps
[i
] = extract_bit_field (mem
, bytelen
* BITS_PER_UNIT
,
1723 0, 1, false, NULL_RTX
, mode
, mode
);
1726 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1727 SIMD register, which is currently broken. While we get GCC
1728 to emit proper RTL for these cases, let's dump to memory. */
1729 else if (VECTOR_MODE_P (GET_MODE (dst
))
1732 int slen
= GET_MODE_SIZE (GET_MODE (src
));
1735 mem
= assign_stack_temp (GET_MODE (src
), slen
);
1736 emit_move_insn (mem
, src
);
1737 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
1739 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
1740 && XVECLEN (dst
, 0) > 1)
1741 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE(dst
), bytepos
);
1742 else if (CONSTANT_P (src
))
1744 HOST_WIDE_INT len
= (HOST_WIDE_INT
) bytelen
;
1752 gcc_assert (2 * len
== ssize
);
1753 split_double (src
, &first
, &second
);
1760 else if (REG_P (src
) && GET_MODE (src
) == mode
)
1763 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
1764 bytepos
* BITS_PER_UNIT
, 1, false, NULL_RTX
,
1768 tmps
[i
] = expand_shift (LSHIFT_EXPR
, mode
, tmps
[i
],
1773 /* Emit code to move a block SRC of type TYPE to a block DST,
1774 where DST is non-consecutive registers represented by a PARALLEL.
1775 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1779 emit_group_load (rtx dst
, rtx src
, tree type
, int ssize
)
1784 tmps
= XALLOCAVEC (rtx
, XVECLEN (dst
, 0));
1785 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1787 /* Copy the extracted pieces into the proper (probable) hard regs. */
1788 for (i
= 0; i
< XVECLEN (dst
, 0); i
++)
1790 rtx d
= XEXP (XVECEXP (dst
, 0, i
), 0);
1793 emit_move_insn (d
, tmps
[i
]);
1797 /* Similar, but load SRC into new pseudos in a format that looks like
1798 PARALLEL. This can later be fed to emit_group_move to get things
1799 in the right place. */
1802 emit_group_load_into_temps (rtx parallel
, rtx src
, tree type
, int ssize
)
1807 vec
= rtvec_alloc (XVECLEN (parallel
, 0));
1808 emit_group_load_1 (&RTVEC_ELT (vec
, 0), parallel
, src
, type
, ssize
);
1810 /* Convert the vector to look just like the original PARALLEL, except
1811 with the computed values. */
1812 for (i
= 0; i
< XVECLEN (parallel
, 0); i
++)
1814 rtx e
= XVECEXP (parallel
, 0, i
);
1815 rtx d
= XEXP (e
, 0);
1819 d
= force_reg (GET_MODE (d
), RTVEC_ELT (vec
, i
));
1820 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), d
, XEXP (e
, 1));
1822 RTVEC_ELT (vec
, i
) = e
;
1825 return gen_rtx_PARALLEL (GET_MODE (parallel
), vec
);
1828 /* Emit code to move a block SRC to block DST, where SRC and DST are
1829 non-consecutive groups of registers, each represented by a PARALLEL. */
1832 emit_group_move (rtx dst
, rtx src
)
1836 gcc_assert (GET_CODE (src
) == PARALLEL
1837 && GET_CODE (dst
) == PARALLEL
1838 && XVECLEN (src
, 0) == XVECLEN (dst
, 0));
1840 /* Skip first entry if NULL. */
1841 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
1842 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
1843 XEXP (XVECEXP (src
, 0, i
), 0));
1846 /* Move a group of registers represented by a PARALLEL into pseudos. */
1849 emit_group_move_into_temps (rtx src
)
1851 rtvec vec
= rtvec_alloc (XVECLEN (src
, 0));
1854 for (i
= 0; i
< XVECLEN (src
, 0); i
++)
1856 rtx e
= XVECEXP (src
, 0, i
);
1857 rtx d
= XEXP (e
, 0);
1860 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), copy_to_reg (d
), XEXP (e
, 1));
1861 RTVEC_ELT (vec
, i
) = e
;
1864 return gen_rtx_PARALLEL (GET_MODE (src
), vec
);
1867 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1868 where SRC is non-consecutive registers represented by a PARALLEL.
1869 SSIZE represents the total size of block ORIG_DST, or -1 if not
1873 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1876 int start
, finish
, i
;
1877 enum machine_mode m
= GET_MODE (orig_dst
);
1879 gcc_assert (GET_CODE (src
) == PARALLEL
);
1881 if (!SCALAR_INT_MODE_P (m
)
1882 && !MEM_P (orig_dst
) && GET_CODE (orig_dst
) != CONCAT
)
1884 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_dst
));
1885 if (imode
== BLKmode
)
1886 dst
= assign_stack_temp (GET_MODE (orig_dst
), ssize
);
1888 dst
= gen_reg_rtx (imode
);
1889 emit_group_store (dst
, src
, type
, ssize
);
1890 if (imode
!= BLKmode
)
1891 dst
= gen_lowpart (GET_MODE (orig_dst
), dst
);
1892 emit_move_insn (orig_dst
, dst
);
1896 /* Check for a NULL entry, used to indicate that the parameter goes
1897 both on the stack and in registers. */
1898 if (XEXP (XVECEXP (src
, 0, 0), 0))
1902 finish
= XVECLEN (src
, 0);
1904 tmps
= XALLOCAVEC (rtx
, finish
);
1906 /* Copy the (probable) hard regs into pseudos. */
1907 for (i
= start
; i
< finish
; i
++)
1909 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
1910 if (!REG_P (reg
) || REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
1912 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
1913 emit_move_insn (tmps
[i
], reg
);
1919 /* If we won't be storing directly into memory, protect the real destination
1920 from strange tricks we might play. */
1922 if (GET_CODE (dst
) == PARALLEL
)
1926 /* We can get a PARALLEL dst if there is a conditional expression in
1927 a return statement. In that case, the dst and src are the same,
1928 so no action is necessary. */
1929 if (rtx_equal_p (dst
, src
))
1932 /* It is unclear if we can ever reach here, but we may as well handle
1933 it. Allocate a temporary, and split this into a store/load to/from
1936 temp
= assign_stack_temp (GET_MODE (dst
), ssize
);
1937 emit_group_store (temp
, src
, type
, ssize
);
1938 emit_group_load (dst
, temp
, type
, ssize
);
1941 else if (!MEM_P (dst
) && GET_CODE (dst
) != CONCAT
)
1943 enum machine_mode outer
= GET_MODE (dst
);
1944 enum machine_mode inner
;
1945 HOST_WIDE_INT bytepos
;
1949 if (!REG_P (dst
) || REGNO (dst
) < FIRST_PSEUDO_REGISTER
)
1950 dst
= gen_reg_rtx (outer
);
1952 /* Make life a bit easier for combine. */
1953 /* If the first element of the vector is the low part
1954 of the destination mode, use a paradoxical subreg to
1955 initialize the destination. */
1958 inner
= GET_MODE (tmps
[start
]);
1959 bytepos
= subreg_lowpart_offset (inner
, outer
);
1960 if (INTVAL (XEXP (XVECEXP (src
, 0, start
), 1)) == bytepos
)
1962 temp
= simplify_gen_subreg (outer
, tmps
[start
],
1966 emit_move_insn (dst
, temp
);
1973 /* If the first element wasn't the low part, try the last. */
1975 && start
< finish
- 1)
1977 inner
= GET_MODE (tmps
[finish
- 1]);
1978 bytepos
= subreg_lowpart_offset (inner
, outer
);
1979 if (INTVAL (XEXP (XVECEXP (src
, 0, finish
- 1), 1)) == bytepos
)
1981 temp
= simplify_gen_subreg (outer
, tmps
[finish
- 1],
1985 emit_move_insn (dst
, temp
);
1992 /* Otherwise, simply initialize the result to zero. */
1994 emit_move_insn (dst
, CONST0_RTX (outer
));
1997 /* Process the pieces. */
1998 for (i
= start
; i
< finish
; i
++)
2000 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2001 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2002 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2003 unsigned int adj_bytelen
= bytelen
;
2006 /* Handle trailing fragments that run over the size of the struct. */
2007 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2008 adj_bytelen
= ssize
- bytepos
;
2010 if (GET_CODE (dst
) == CONCAT
)
2012 if (bytepos
+ adj_bytelen
2013 <= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2014 dest
= XEXP (dst
, 0);
2015 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2017 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2018 dest
= XEXP (dst
, 1);
2022 enum machine_mode dest_mode
= GET_MODE (dest
);
2023 enum machine_mode tmp_mode
= GET_MODE (tmps
[i
]);
2025 gcc_assert (bytepos
== 0 && XVECLEN (src
, 0));
2027 if (GET_MODE_ALIGNMENT (dest_mode
)
2028 >= GET_MODE_ALIGNMENT (tmp_mode
))
2030 dest
= assign_stack_temp (dest_mode
,
2031 GET_MODE_SIZE (dest_mode
));
2032 emit_move_insn (adjust_address (dest
,
2040 dest
= assign_stack_temp (tmp_mode
,
2041 GET_MODE_SIZE (tmp_mode
));
2042 emit_move_insn (dest
, tmps
[i
]);
2043 dst
= adjust_address (dest
, dest_mode
, bytepos
);
2049 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2051 /* store_bit_field always takes its value from the lsb.
2052 Move the fragment to the lsb if it's not already there. */
2054 #ifdef BLOCK_REG_PADDING
2055 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
2056 == (BYTES_BIG_ENDIAN
? upward
: downward
)
2062 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2063 tmps
[i
] = expand_shift (RSHIFT_EXPR
, mode
, tmps
[i
],
2066 bytelen
= adj_bytelen
;
2069 /* Optimize the access just a bit. */
2071 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
2072 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2073 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2074 && bytelen
== GET_MODE_SIZE (mode
))
2075 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2077 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2078 0, 0, mode
, tmps
[i
]);
2081 /* Copy from the pseudo into the (probable) hard reg. */
2082 if (orig_dst
!= dst
)
2083 emit_move_insn (orig_dst
, dst
);
2086 /* Generate code to copy a BLKmode object of TYPE out of a
2087 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2088 is null, a stack temporary is created. TGTBLK is returned.
2090 The purpose of this routine is to handle functions that return
2091 BLKmode structures in registers. Some machines (the PA for example)
2092 want to return all small structures in registers regardless of the
2093 structure's alignment. */
2096 copy_blkmode_from_reg (rtx tgtblk
, rtx srcreg
, tree type
)
2098 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2099 rtx src
= NULL
, dst
= NULL
;
2100 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2101 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2102 enum machine_mode copy_mode
;
2106 tgtblk
= assign_temp (build_qualified_type (type
,
2108 | TYPE_QUAL_CONST
)),
2110 preserve_temp_slots (tgtblk
);
2113 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2114 into a new pseudo which is a full word. */
2116 if (GET_MODE (srcreg
) != BLKmode
2117 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2118 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
2120 /* If the structure doesn't take up a whole number of words, see whether
2121 SRCREG is padded on the left or on the right. If it's on the left,
2122 set PADDING_CORRECTION to the number of bits to skip.
2124 In most ABIs, the structure will be returned at the least end of
2125 the register, which translates to right padding on little-endian
2126 targets and left padding on big-endian targets. The opposite
2127 holds if the structure is returned at the most significant
2128 end of the register. */
2129 if (bytes
% UNITS_PER_WORD
!= 0
2130 && (targetm
.calls
.return_in_msb (type
)
2132 : BYTES_BIG_ENDIAN
))
2134 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2136 /* Copy the structure BITSIZE bits at a time. If the target lives in
2137 memory, take care of not reading/writing past its end by selecting
2138 a copy mode suited to BITSIZE. This should always be possible given
2141 We could probably emit more efficient code for machines which do not use
2142 strict alignment, but it doesn't seem worth the effort at the current
2145 copy_mode
= word_mode
;
2148 enum machine_mode mem_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
2149 if (mem_mode
!= BLKmode
)
2150 copy_mode
= mem_mode
;
2153 for (bitpos
= 0, xbitpos
= padding_correction
;
2154 bitpos
< bytes
* BITS_PER_UNIT
;
2155 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2157 /* We need a new source operand each time xbitpos is on a
2158 word boundary and when xbitpos == padding_correction
2159 (the first time through). */
2160 if (xbitpos
% BITS_PER_WORD
== 0
2161 || xbitpos
== padding_correction
)
2162 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2165 /* We need a new destination operand each time bitpos is on
2167 if (bitpos
% BITS_PER_WORD
== 0)
2168 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2170 /* Use xbitpos for the source extraction (right justified) and
2171 bitpos for the destination store (left justified). */
2172 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, 0, 0, copy_mode
,
2173 extract_bit_field (src
, bitsize
,
2174 xbitpos
% BITS_PER_WORD
, 1, false,
2175 NULL_RTX
, copy_mode
, copy_mode
));
2181 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2182 register if it contains any data, otherwise return null.
2184 This is used on targets that return BLKmode values in registers. */
2187 copy_blkmode_to_reg (enum machine_mode mode
, tree src
)
2190 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0, bytes
;
2191 unsigned int bitsize
;
2192 rtx
*dst_words
, dst
, x
, src_word
= NULL_RTX
, dst_word
= NULL_RTX
;
2193 enum machine_mode dst_mode
;
2195 gcc_assert (TYPE_MODE (TREE_TYPE (src
)) == BLKmode
);
2197 x
= expand_normal (src
);
2199 bytes
= int_size_in_bytes (TREE_TYPE (src
));
2203 /* If the structure doesn't take up a whole number of words, see
2204 whether the register value should be padded on the left or on
2205 the right. Set PADDING_CORRECTION to the number of padding
2206 bits needed on the left side.
2208 In most ABIs, the structure will be returned at the least end of
2209 the register, which translates to right padding on little-endian
2210 targets and left padding on big-endian targets. The opposite
2211 holds if the structure is returned at the most significant
2212 end of the register. */
2213 if (bytes
% UNITS_PER_WORD
!= 0
2214 && (targetm
.calls
.return_in_msb (TREE_TYPE (src
))
2216 : BYTES_BIG_ENDIAN
))
2217 padding_correction
= (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
)
2220 n_regs
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
2221 dst_words
= XALLOCAVEC (rtx
, n_regs
);
2222 bitsize
= MIN (TYPE_ALIGN (TREE_TYPE (src
)), BITS_PER_WORD
);
2224 /* Copy the structure BITSIZE bits at a time. */
2225 for (bitpos
= 0, xbitpos
= padding_correction
;
2226 bitpos
< bytes
* BITS_PER_UNIT
;
2227 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2229 /* We need a new destination pseudo each time xbitpos is
2230 on a word boundary and when xbitpos == padding_correction
2231 (the first time through). */
2232 if (xbitpos
% BITS_PER_WORD
== 0
2233 || xbitpos
== padding_correction
)
2235 /* Generate an appropriate register. */
2236 dst_word
= gen_reg_rtx (word_mode
);
2237 dst_words
[xbitpos
/ BITS_PER_WORD
] = dst_word
;
2239 /* Clear the destination before we move anything into it. */
2240 emit_move_insn (dst_word
, CONST0_RTX (word_mode
));
2243 /* We need a new source operand each time bitpos is on a word
2245 if (bitpos
% BITS_PER_WORD
== 0)
2246 src_word
= operand_subword_force (x
, bitpos
/ BITS_PER_WORD
, BLKmode
);
2248 /* Use bitpos for the source extraction (left justified) and
2249 xbitpos for the destination store (right justified). */
2250 store_bit_field (dst_word
, bitsize
, xbitpos
% BITS_PER_WORD
,
2252 extract_bit_field (src_word
, bitsize
,
2253 bitpos
% BITS_PER_WORD
, 1, false,
2254 NULL_RTX
, word_mode
, word_mode
));
2257 if (mode
== BLKmode
)
2259 /* Find the smallest integer mode large enough to hold the
2260 entire structure. */
2261 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2263 mode
= GET_MODE_WIDER_MODE (mode
))
2264 /* Have we found a large enough mode? */
2265 if (GET_MODE_SIZE (mode
) >= bytes
)
2268 /* A suitable mode should have been found. */
2269 gcc_assert (mode
!= VOIDmode
);
2272 if (GET_MODE_SIZE (mode
) < GET_MODE_SIZE (word_mode
))
2273 dst_mode
= word_mode
;
2276 dst
= gen_reg_rtx (dst_mode
);
2278 for (i
= 0; i
< n_regs
; i
++)
2279 emit_move_insn (operand_subword (dst
, i
, 0, dst_mode
), dst_words
[i
]);
2281 if (mode
!= dst_mode
)
2282 dst
= gen_lowpart (mode
, dst
);
2287 /* Add a USE expression for REG to the (possibly empty) list pointed
2288 to by CALL_FUSAGE. REG must denote a hard register. */
2291 use_reg_mode (rtx
*call_fusage
, rtx reg
, enum machine_mode mode
)
2293 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2296 = gen_rtx_EXPR_LIST (mode
, gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2299 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2300 starting at REGNO. All of these registers must be hard registers. */
2303 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2307 gcc_assert (regno
+ nregs
<= FIRST_PSEUDO_REGISTER
);
2309 for (i
= 0; i
< nregs
; i
++)
2310 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2313 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2314 PARALLEL REGS. This is for calls that pass values in multiple
2315 non-contiguous locations. The Irix 6 ABI has examples of this. */
2318 use_group_regs (rtx
*call_fusage
, rtx regs
)
2322 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2324 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2326 /* A NULL entry means the parameter goes both on the stack and in
2327 registers. This can also be a MEM for targets that pass values
2328 partially on the stack and partially in registers. */
2329 if (reg
!= 0 && REG_P (reg
))
2330 use_reg (call_fusage
, reg
);
2334 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2335 assigment and the code of the expresion on the RHS is CODE. Return
2339 get_def_for_expr (tree name
, enum tree_code code
)
2343 if (TREE_CODE (name
) != SSA_NAME
)
2346 def_stmt
= get_gimple_for_ssa_name (name
);
2348 || gimple_assign_rhs_code (def_stmt
) != code
)
2354 #ifdef HAVE_conditional_move
2355 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2356 assigment and the class of the expresion on the RHS is CLASS. Return
2360 get_def_for_expr_class (tree name
, enum tree_code_class tclass
)
2364 if (TREE_CODE (name
) != SSA_NAME
)
2367 def_stmt
= get_gimple_for_ssa_name (name
);
2369 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) != tclass
)
2377 /* Determine whether the LEN bytes generated by CONSTFUN can be
2378 stored to memory using several move instructions. CONSTFUNDATA is
2379 a pointer which will be passed as argument in every CONSTFUN call.
2380 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2381 a memset operation and false if it's a copy of a constant string.
2382 Return nonzero if a call to store_by_pieces should succeed. */
2385 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
2386 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2387 void *constfundata
, unsigned int align
, bool memsetp
)
2389 unsigned HOST_WIDE_INT l
;
2390 unsigned int max_size
;
2391 HOST_WIDE_INT offset
= 0;
2392 enum machine_mode mode
;
2393 enum insn_code icode
;
2395 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2396 rtx cst ATTRIBUTE_UNUSED
;
2402 ? SET_BY_PIECES_P (len
, align
)
2403 : STORE_BY_PIECES_P (len
, align
)))
2406 align
= alignment_for_piecewise_move (STORE_MAX_PIECES
, align
);
2408 /* We would first store what we can in the largest integer mode, then go to
2409 successively smaller modes. */
2412 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2416 max_size
= STORE_MAX_PIECES
+ 1;
2417 while (max_size
> 1)
2419 mode
= widest_int_mode_for_size (max_size
);
2421 if (mode
== VOIDmode
)
2424 icode
= optab_handler (mov_optab
, mode
);
2425 if (icode
!= CODE_FOR_nothing
2426 && align
>= GET_MODE_ALIGNMENT (mode
))
2428 unsigned int size
= GET_MODE_SIZE (mode
);
2435 cst
= (*constfun
) (constfundata
, offset
, mode
);
2436 if (!targetm
.legitimate_constant_p (mode
, cst
))
2446 max_size
= GET_MODE_SIZE (mode
);
2449 /* The code above should have handled everything. */
2456 /* Generate several move instructions to store LEN bytes generated by
2457 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2458 pointer which will be passed as argument in every CONSTFUN call.
2459 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2460 a memset operation and false if it's a copy of a constant string.
2461 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2462 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2466 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
2467 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2468 void *constfundata
, unsigned int align
, bool memsetp
, int endp
)
2470 enum machine_mode to_addr_mode
= get_address_mode (to
);
2471 struct store_by_pieces_d data
;
2475 gcc_assert (endp
!= 2);
2480 ? SET_BY_PIECES_P (len
, align
)
2481 : STORE_BY_PIECES_P (len
, align
));
2482 data
.constfun
= constfun
;
2483 data
.constfundata
= constfundata
;
2486 store_by_pieces_1 (&data
, align
);
2491 gcc_assert (!data
.reverse
);
2496 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2497 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2499 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
2500 plus_constant (to_addr_mode
,
2504 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2511 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2519 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2520 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2523 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
2525 struct store_by_pieces_d data
;
2530 data
.constfun
= clear_by_pieces_1
;
2531 data
.constfundata
= NULL
;
2534 store_by_pieces_1 (&data
, align
);
2537 /* Callback routine for clear_by_pieces.
2538 Return const0_rtx unconditionally. */
2541 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED
,
2542 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2543 enum machine_mode mode ATTRIBUTE_UNUSED
)
2548 /* Subroutine of clear_by_pieces and store_by_pieces.
2549 Generate several move instructions to store LEN bytes of block TO. (A MEM
2550 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2553 store_by_pieces_1 (struct store_by_pieces_d
*data ATTRIBUTE_UNUSED
,
2554 unsigned int align ATTRIBUTE_UNUSED
)
2556 enum machine_mode to_addr_mode
= get_address_mode (data
->to
);
2557 rtx to_addr
= XEXP (data
->to
, 0);
2558 unsigned int max_size
= STORE_MAX_PIECES
+ 1;
2559 enum insn_code icode
;
2562 data
->to_addr
= to_addr
;
2564 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2565 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2567 data
->explicit_inc_to
= 0;
2569 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2571 data
->offset
= data
->len
;
2573 /* If storing requires more than two move insns,
2574 copy addresses to registers (to make displacements shorter)
2575 and use post-increment if available. */
2576 if (!data
->autinc_to
2577 && move_by_pieces_ninsns (data
->len
, align
, max_size
) > 2)
2579 /* Determine the main mode we'll be using.
2580 MODE might not be used depending on the definitions of the
2581 USE_* macros below. */
2582 enum machine_mode mode ATTRIBUTE_UNUSED
2583 = widest_int_mode_for_size (max_size
);
2585 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2587 data
->to_addr
= copy_to_mode_reg (to_addr_mode
,
2588 plus_constant (to_addr_mode
,
2591 data
->autinc_to
= 1;
2592 data
->explicit_inc_to
= -1;
2595 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2596 && ! data
->autinc_to
)
2598 data
->to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
2599 data
->autinc_to
= 1;
2600 data
->explicit_inc_to
= 1;
2603 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2604 data
->to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
2607 align
= alignment_for_piecewise_move (STORE_MAX_PIECES
, align
);
2609 /* First store what we can in the largest integer mode, then go to
2610 successively smaller modes. */
2612 while (max_size
> 1)
2614 enum machine_mode mode
= widest_int_mode_for_size (max_size
);
2616 if (mode
== VOIDmode
)
2619 icode
= optab_handler (mov_optab
, mode
);
2620 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2621 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2623 max_size
= GET_MODE_SIZE (mode
);
2626 /* The code above should have handled everything. */
2627 gcc_assert (!data
->len
);
2630 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2631 with move instructions for mode MODE. GENFUN is the gen_... function
2632 to make a move insn for that mode. DATA has all the other info. */
2635 store_by_pieces_2 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
2636 struct store_by_pieces_d
*data
)
2638 unsigned int size
= GET_MODE_SIZE (mode
);
2641 while (data
->len
>= size
)
2644 data
->offset
-= size
;
2646 if (data
->autinc_to
)
2647 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2650 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2652 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2653 emit_insn (gen_add2_insn (data
->to_addr
,
2654 GEN_INT (-(HOST_WIDE_INT
) size
)));
2656 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2657 emit_insn ((*genfun
) (to1
, cst
));
2659 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2660 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2662 if (! data
->reverse
)
2663 data
->offset
+= size
;
2669 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2670 its length in bytes. */
2673 clear_storage_hints (rtx object
, rtx size
, enum block_op_methods method
,
2674 unsigned int expected_align
, HOST_WIDE_INT expected_size
)
2676 enum machine_mode mode
= GET_MODE (object
);
2679 gcc_assert (method
== BLOCK_OP_NORMAL
|| method
== BLOCK_OP_TAILCALL
);
2681 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2682 just move a zero. Otherwise, do this a piece at a time. */
2684 && CONST_INT_P (size
)
2685 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (mode
))
2687 rtx zero
= CONST0_RTX (mode
);
2690 emit_move_insn (object
, zero
);
2694 if (COMPLEX_MODE_P (mode
))
2696 zero
= CONST0_RTX (GET_MODE_INNER (mode
));
2699 write_complex_part (object
, zero
, 0);
2700 write_complex_part (object
, zero
, 1);
2706 if (size
== const0_rtx
)
2709 align
= MEM_ALIGN (object
);
2711 if (CONST_INT_P (size
)
2712 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2713 clear_by_pieces (object
, INTVAL (size
), align
);
2714 else if (set_storage_via_setmem (object
, size
, const0_rtx
, align
,
2715 expected_align
, expected_size
))
2717 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object
)))
2718 return set_storage_via_libcall (object
, size
, const0_rtx
,
2719 method
== BLOCK_OP_TAILCALL
);
2727 clear_storage (rtx object
, rtx size
, enum block_op_methods method
)
2729 return clear_storage_hints (object
, size
, method
, 0, -1);
2733 /* A subroutine of clear_storage. Expand a call to memset.
2734 Return the return value of memset, 0 otherwise. */
2737 set_storage_via_libcall (rtx object
, rtx size
, rtx val
, bool tailcall
)
2739 tree call_expr
, fn
, object_tree
, size_tree
, val_tree
;
2740 enum machine_mode size_mode
;
2743 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2744 place those into new pseudos into a VAR_DECL and use them later. */
2746 object
= copy_addr_to_reg (XEXP (object
, 0));
2748 size_mode
= TYPE_MODE (sizetype
);
2749 size
= convert_to_mode (size_mode
, size
, 1);
2750 size
= copy_to_mode_reg (size_mode
, size
);
2752 /* It is incorrect to use the libcall calling conventions to call
2753 memset in this context. This could be a user call to memset and
2754 the user may wish to examine the return value from memset. For
2755 targets where libcalls and normal calls have different conventions
2756 for returning pointers, we could end up generating incorrect code. */
2758 object_tree
= make_tree (ptr_type_node
, object
);
2759 if (!CONST_INT_P (val
))
2760 val
= convert_to_mode (TYPE_MODE (integer_type_node
), val
, 1);
2761 size_tree
= make_tree (sizetype
, size
);
2762 val_tree
= make_tree (integer_type_node
, val
);
2764 fn
= clear_storage_libcall_fn (true);
2765 call_expr
= build_call_expr (fn
, 3, object_tree
, val_tree
, size_tree
);
2766 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
2768 retval
= expand_normal (call_expr
);
2773 /* A subroutine of set_storage_via_libcall. Create the tree node
2774 for the function we use for block clears. */
2776 tree block_clear_fn
;
2779 init_block_clear_fn (const char *asmspec
)
2781 if (!block_clear_fn
)
2785 fn
= get_identifier ("memset");
2786 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2787 integer_type_node
, sizetype
,
2790 fn
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
, fn
, args
);
2791 DECL_EXTERNAL (fn
) = 1;
2792 TREE_PUBLIC (fn
) = 1;
2793 DECL_ARTIFICIAL (fn
) = 1;
2794 TREE_NOTHROW (fn
) = 1;
2795 DECL_VISIBILITY (fn
) = VISIBILITY_DEFAULT
;
2796 DECL_VISIBILITY_SPECIFIED (fn
) = 1;
2798 block_clear_fn
= fn
;
2802 set_user_assembler_name (block_clear_fn
, asmspec
);
2806 clear_storage_libcall_fn (int for_call
)
2808 static bool emitted_extern
;
2810 if (!block_clear_fn
)
2811 init_block_clear_fn (NULL
);
2813 if (for_call
&& !emitted_extern
)
2815 emitted_extern
= true;
2816 make_decl_rtl (block_clear_fn
);
2819 return block_clear_fn
;
2822 /* Expand a setmem pattern; return true if successful. */
2825 set_storage_via_setmem (rtx object
, rtx size
, rtx val
, unsigned int align
,
2826 unsigned int expected_align
, HOST_WIDE_INT expected_size
)
2828 /* Try the most limited insn first, because there's no point
2829 including more than one in the machine description unless
2830 the more limited one has some advantage. */
2832 enum machine_mode mode
;
2834 if (expected_align
< align
)
2835 expected_align
= align
;
2837 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2838 mode
= GET_MODE_WIDER_MODE (mode
))
2840 enum insn_code code
= direct_optab_handler (setmem_optab
, mode
);
2842 if (code
!= CODE_FOR_nothing
2843 /* We don't need MODE to be narrower than
2844 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2845 the mode mask, as it is returned by the macro, it will
2846 definitely be less than the actual mode mask. */
2847 && ((CONST_INT_P (size
)
2848 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2849 <= (GET_MODE_MASK (mode
) >> 1)))
2850 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
))
2852 struct expand_operand ops
[6];
2855 nops
= insn_data
[(int) code
].n_generator_args
;
2856 gcc_assert (nops
== 4 || nops
== 6);
2858 create_fixed_operand (&ops
[0], object
);
2859 /* The check above guarantees that this size conversion is valid. */
2860 create_convert_operand_to (&ops
[1], size
, mode
, true);
2861 create_convert_operand_from (&ops
[2], val
, byte_mode
, true);
2862 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
2865 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
2866 create_integer_operand (&ops
[5], expected_size
);
2868 if (maybe_expand_insn (code
, nops
, ops
))
2877 /* Write to one of the components of the complex value CPLX. Write VAL to
2878 the real part if IMAG_P is false, and the imaginary part if its true. */
2881 write_complex_part (rtx cplx
, rtx val
, bool imag_p
)
2883 enum machine_mode cmode
;
2884 enum machine_mode imode
;
2887 if (GET_CODE (cplx
) == CONCAT
)
2889 emit_move_insn (XEXP (cplx
, imag_p
), val
);
2893 cmode
= GET_MODE (cplx
);
2894 imode
= GET_MODE_INNER (cmode
);
2895 ibitsize
= GET_MODE_BITSIZE (imode
);
2897 /* For MEMs simplify_gen_subreg may generate an invalid new address
2898 because, e.g., the original address is considered mode-dependent
2899 by the target, which restricts simplify_subreg from invoking
2900 adjust_address_nv. Instead of preparing fallback support for an
2901 invalid address, we call adjust_address_nv directly. */
2904 emit_move_insn (adjust_address_nv (cplx
, imode
,
2905 imag_p
? GET_MODE_SIZE (imode
) : 0),
2910 /* If the sub-object is at least word sized, then we know that subregging
2911 will work. This special case is important, since store_bit_field
2912 wants to operate on integer modes, and there's rarely an OImode to
2913 correspond to TCmode. */
2914 if (ibitsize
>= BITS_PER_WORD
2915 /* For hard regs we have exact predicates. Assume we can split
2916 the original object if it spans an even number of hard regs.
2917 This special case is important for SCmode on 64-bit platforms
2918 where the natural size of floating-point regs is 32-bit. */
2920 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
2921 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0))
2923 rtx part
= simplify_gen_subreg (imode
, cplx
, cmode
,
2924 imag_p
? GET_MODE_SIZE (imode
) : 0);
2927 emit_move_insn (part
, val
);
2931 /* simplify_gen_subreg may fail for sub-word MEMs. */
2932 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
2935 store_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0, 0, 0, imode
, val
);
2938 /* Extract one of the components of the complex value CPLX. Extract the
2939 real part if IMAG_P is false, and the imaginary part if it's true. */
2942 read_complex_part (rtx cplx
, bool imag_p
)
2944 enum machine_mode cmode
, imode
;
2947 if (GET_CODE (cplx
) == CONCAT
)
2948 return XEXP (cplx
, imag_p
);
2950 cmode
= GET_MODE (cplx
);
2951 imode
= GET_MODE_INNER (cmode
);
2952 ibitsize
= GET_MODE_BITSIZE (imode
);
2954 /* Special case reads from complex constants that got spilled to memory. */
2955 if (MEM_P (cplx
) && GET_CODE (XEXP (cplx
, 0)) == SYMBOL_REF
)
2957 tree decl
= SYMBOL_REF_DECL (XEXP (cplx
, 0));
2958 if (decl
&& TREE_CODE (decl
) == COMPLEX_CST
)
2960 tree part
= imag_p
? TREE_IMAGPART (decl
) : TREE_REALPART (decl
);
2961 if (CONSTANT_CLASS_P (part
))
2962 return expand_expr (part
, NULL_RTX
, imode
, EXPAND_NORMAL
);
2966 /* For MEMs simplify_gen_subreg may generate an invalid new address
2967 because, e.g., the original address is considered mode-dependent
2968 by the target, which restricts simplify_subreg from invoking
2969 adjust_address_nv. Instead of preparing fallback support for an
2970 invalid address, we call adjust_address_nv directly. */
2972 return adjust_address_nv (cplx
, imode
,
2973 imag_p
? GET_MODE_SIZE (imode
) : 0);
2975 /* If the sub-object is at least word sized, then we know that subregging
2976 will work. This special case is important, since extract_bit_field
2977 wants to operate on integer modes, and there's rarely an OImode to
2978 correspond to TCmode. */
2979 if (ibitsize
>= BITS_PER_WORD
2980 /* For hard regs we have exact predicates. Assume we can split
2981 the original object if it spans an even number of hard regs.
2982 This special case is important for SCmode on 64-bit platforms
2983 where the natural size of floating-point regs is 32-bit. */
2985 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
2986 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0))
2988 rtx ret
= simplify_gen_subreg (imode
, cplx
, cmode
,
2989 imag_p
? GET_MODE_SIZE (imode
) : 0);
2993 /* simplify_gen_subreg may fail for sub-word MEMs. */
2994 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
2997 return extract_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0,
2998 true, false, NULL_RTX
, imode
, imode
);
3001 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3002 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3003 represented in NEW_MODE. If FORCE is true, this will never happen, as
3004 we'll force-create a SUBREG if needed. */
3007 emit_move_change_mode (enum machine_mode new_mode
,
3008 enum machine_mode old_mode
, rtx x
, bool force
)
3012 if (push_operand (x
, GET_MODE (x
)))
3014 ret
= gen_rtx_MEM (new_mode
, XEXP (x
, 0));
3015 MEM_COPY_ATTRIBUTES (ret
, x
);
3019 /* We don't have to worry about changing the address since the
3020 size in bytes is supposed to be the same. */
3021 if (reload_in_progress
)
3023 /* Copy the MEM to change the mode and move any
3024 substitutions from the old MEM to the new one. */
3025 ret
= adjust_address_nv (x
, new_mode
, 0);
3026 copy_replacements (x
, ret
);
3029 ret
= adjust_address (x
, new_mode
, 0);
3033 /* Note that we do want simplify_subreg's behavior of validating
3034 that the new mode is ok for a hard register. If we were to use
3035 simplify_gen_subreg, we would create the subreg, but would
3036 probably run into the target not being able to implement it. */
3037 /* Except, of course, when FORCE is true, when this is exactly what
3038 we want. Which is needed for CCmodes on some targets. */
3040 ret
= simplify_gen_subreg (new_mode
, x
, old_mode
, 0);
3042 ret
= simplify_subreg (new_mode
, x
, old_mode
, 0);
3048 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3049 an integer mode of the same size as MODE. Returns the instruction
3050 emitted, or NULL if such a move could not be generated. */
3053 emit_move_via_integer (enum machine_mode mode
, rtx x
, rtx y
, bool force
)
3055 enum machine_mode imode
;
3056 enum insn_code code
;
3058 /* There must exist a mode of the exact size we require. */
3059 imode
= int_mode_for_mode (mode
);
3060 if (imode
== BLKmode
)
3063 /* The target must support moves in this mode. */
3064 code
= optab_handler (mov_optab
, imode
);
3065 if (code
== CODE_FOR_nothing
)
3068 x
= emit_move_change_mode (imode
, mode
, x
, force
);
3071 y
= emit_move_change_mode (imode
, mode
, y
, force
);
3074 return emit_insn (GEN_FCN (code
) (x
, y
));
3077 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3078 Return an equivalent MEM that does not use an auto-increment. */
3081 emit_move_resolve_push (enum machine_mode mode
, rtx x
)
3083 enum rtx_code code
= GET_CODE (XEXP (x
, 0));
3084 HOST_WIDE_INT adjust
;
3087 adjust
= GET_MODE_SIZE (mode
);
3088 #ifdef PUSH_ROUNDING
3089 adjust
= PUSH_ROUNDING (adjust
);
3091 if (code
== PRE_DEC
|| code
== POST_DEC
)
3093 else if (code
== PRE_MODIFY
|| code
== POST_MODIFY
)
3095 rtx expr
= XEXP (XEXP (x
, 0), 1);
3098 gcc_assert (GET_CODE (expr
) == PLUS
|| GET_CODE (expr
) == MINUS
);
3099 gcc_assert (CONST_INT_P (XEXP (expr
, 1)));
3100 val
= INTVAL (XEXP (expr
, 1));
3101 if (GET_CODE (expr
) == MINUS
)
3103 gcc_assert (adjust
== val
|| adjust
== -val
);
3107 /* Do not use anti_adjust_stack, since we don't want to update
3108 stack_pointer_delta. */
3109 temp
= expand_simple_binop (Pmode
, PLUS
, stack_pointer_rtx
,
3110 GEN_INT (adjust
), stack_pointer_rtx
,
3111 0, OPTAB_LIB_WIDEN
);
3112 if (temp
!= stack_pointer_rtx
)
3113 emit_move_insn (stack_pointer_rtx
, temp
);
3120 temp
= stack_pointer_rtx
;
3125 temp
= plus_constant (Pmode
, stack_pointer_rtx
, -adjust
);
3131 return replace_equiv_address (x
, temp
);
3134 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3135 X is known to satisfy push_operand, and MODE is known to be complex.
3136 Returns the last instruction emitted. */
3139 emit_move_complex_push (enum machine_mode mode
, rtx x
, rtx y
)
3141 enum machine_mode submode
= GET_MODE_INNER (mode
);
3144 #ifdef PUSH_ROUNDING
3145 unsigned int submodesize
= GET_MODE_SIZE (submode
);
3147 /* In case we output to the stack, but the size is smaller than the
3148 machine can push exactly, we need to use move instructions. */
3149 if (PUSH_ROUNDING (submodesize
) != submodesize
)
3151 x
= emit_move_resolve_push (mode
, x
);
3152 return emit_move_insn (x
, y
);
3156 /* Note that the real part always precedes the imag part in memory
3157 regardless of machine's endianness. */
3158 switch (GET_CODE (XEXP (x
, 0)))
3172 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3173 read_complex_part (y
, imag_first
));
3174 return emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3175 read_complex_part (y
, !imag_first
));
3178 /* A subroutine of emit_move_complex. Perform the move from Y to X
3179 via two moves of the parts. Returns the last instruction emitted. */
3182 emit_move_complex_parts (rtx x
, rtx y
)
3184 /* Show the output dies here. This is necessary for SUBREGs
3185 of pseudos since we cannot track their lifetimes correctly;
3186 hard regs shouldn't appear here except as return values. */
3187 if (!reload_completed
&& !reload_in_progress
3188 && REG_P (x
) && !reg_overlap_mentioned_p (x
, y
))
3191 write_complex_part (x
, read_complex_part (y
, false), false);
3192 write_complex_part (x
, read_complex_part (y
, true), true);
3194 return get_last_insn ();
3197 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3198 MODE is known to be complex. Returns the last instruction emitted. */
3201 emit_move_complex (enum machine_mode mode
, rtx x
, rtx y
)
3205 /* Need to take special care for pushes, to maintain proper ordering
3206 of the data, and possibly extra padding. */
3207 if (push_operand (x
, mode
))
3208 return emit_move_complex_push (mode
, x
, y
);
3210 /* See if we can coerce the target into moving both values at once. */
3212 /* Move floating point as parts. */
3213 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
3214 && optab_handler (mov_optab
, GET_MODE_INNER (mode
)) != CODE_FOR_nothing
)
3216 /* Not possible if the values are inherently not adjacent. */
3217 else if (GET_CODE (x
) == CONCAT
|| GET_CODE (y
) == CONCAT
)
3219 /* Is possible if both are registers (or subregs of registers). */
3220 else if (register_operand (x
, mode
) && register_operand (y
, mode
))
3222 /* If one of the operands is a memory, and alignment constraints
3223 are friendly enough, we may be able to do combined memory operations.
3224 We do not attempt this if Y is a constant because that combination is
3225 usually better with the by-parts thing below. */
3226 else if ((MEM_P (x
) ? !CONSTANT_P (y
) : MEM_P (y
))
3227 && (!STRICT_ALIGNMENT
3228 || get_mode_alignment (mode
) == BIGGEST_ALIGNMENT
))
3237 /* For memory to memory moves, optimal behavior can be had with the
3238 existing block move logic. */
3239 if (MEM_P (x
) && MEM_P (y
))
3241 emit_block_move (x
, y
, GEN_INT (GET_MODE_SIZE (mode
)),
3242 BLOCK_OP_NO_LIBCALL
);
3243 return get_last_insn ();
3246 ret
= emit_move_via_integer (mode
, x
, y
, true);
3251 return emit_move_complex_parts (x
, y
);
3254 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3255 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3258 emit_move_ccmode (enum machine_mode mode
, rtx x
, rtx y
)
3262 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3265 enum insn_code code
= optab_handler (mov_optab
, CCmode
);
3266 if (code
!= CODE_FOR_nothing
)
3268 x
= emit_move_change_mode (CCmode
, mode
, x
, true);
3269 y
= emit_move_change_mode (CCmode
, mode
, y
, true);
3270 return emit_insn (GEN_FCN (code
) (x
, y
));
3274 /* Otherwise, find the MODE_INT mode of the same width. */
3275 ret
= emit_move_via_integer (mode
, x
, y
, false);
3276 gcc_assert (ret
!= NULL
);
3280 /* Return true if word I of OP lies entirely in the
3281 undefined bits of a paradoxical subreg. */
3284 undefined_operand_subword_p (const_rtx op
, int i
)
3286 enum machine_mode innermode
, innermostmode
;
3288 if (GET_CODE (op
) != SUBREG
)
3290 innermode
= GET_MODE (op
);
3291 innermostmode
= GET_MODE (SUBREG_REG (op
));
3292 offset
= i
* UNITS_PER_WORD
+ SUBREG_BYTE (op
);
3293 /* The SUBREG_BYTE represents offset, as if the value were stored in
3294 memory, except for a paradoxical subreg where we define
3295 SUBREG_BYTE to be 0; undo this exception as in
3297 if (SUBREG_BYTE (op
) == 0
3298 && GET_MODE_SIZE (innermostmode
) < GET_MODE_SIZE (innermode
))
3300 int difference
= (GET_MODE_SIZE (innermostmode
) - GET_MODE_SIZE (innermode
));
3301 if (WORDS_BIG_ENDIAN
)
3302 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
3303 if (BYTES_BIG_ENDIAN
)
3304 offset
+= difference
% UNITS_PER_WORD
;
3306 if (offset
>= GET_MODE_SIZE (innermostmode
)
3307 || offset
<= -GET_MODE_SIZE (word_mode
))
3312 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3313 MODE is any multi-word or full-word mode that lacks a move_insn
3314 pattern. Note that you will get better code if you define such
3315 patterns, even if they must turn into multiple assembler instructions. */
3318 emit_move_multi_word (enum machine_mode mode
, rtx x
, rtx y
)
3325 gcc_assert (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
);
3327 /* If X is a push on the stack, do the push now and replace
3328 X with a reference to the stack pointer. */
3329 if (push_operand (x
, mode
))
3330 x
= emit_move_resolve_push (mode
, x
);
3332 /* If we are in reload, see if either operand is a MEM whose address
3333 is scheduled for replacement. */
3334 if (reload_in_progress
&& MEM_P (x
)
3335 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3336 x
= replace_equiv_address_nv (x
, inner
);
3337 if (reload_in_progress
&& MEM_P (y
)
3338 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3339 y
= replace_equiv_address_nv (y
, inner
);
3343 need_clobber
= false;
3345 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3348 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3351 /* Do not generate code for a move if it would come entirely
3352 from the undefined bits of a paradoxical subreg. */
3353 if (undefined_operand_subword_p (y
, i
))
3356 ypart
= operand_subword (y
, i
, 1, mode
);
3358 /* If we can't get a part of Y, put Y into memory if it is a
3359 constant. Otherwise, force it into a register. Then we must
3360 be able to get a part of Y. */
3361 if (ypart
== 0 && CONSTANT_P (y
))
3363 y
= use_anchored_address (force_const_mem (mode
, y
));
3364 ypart
= operand_subword (y
, i
, 1, mode
);
3366 else if (ypart
== 0)
3367 ypart
= operand_subword_force (y
, i
, mode
);
3369 gcc_assert (xpart
&& ypart
);
3371 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3373 last_insn
= emit_move_insn (xpart
, ypart
);
3379 /* Show the output dies here. This is necessary for SUBREGs
3380 of pseudos since we cannot track their lifetimes correctly;
3381 hard regs shouldn't appear here except as return values.
3382 We never want to emit such a clobber after reload. */
3384 && ! (reload_in_progress
|| reload_completed
)
3385 && need_clobber
!= 0)
3393 /* Low level part of emit_move_insn.
3394 Called just like emit_move_insn, but assumes X and Y
3395 are basically valid. */
3398 emit_move_insn_1 (rtx x
, rtx y
)
3400 enum machine_mode mode
= GET_MODE (x
);
3401 enum insn_code code
;
3403 gcc_assert ((unsigned int) mode
< (unsigned int) MAX_MACHINE_MODE
);
3405 code
= optab_handler (mov_optab
, mode
);
3406 if (code
!= CODE_FOR_nothing
)
3407 return emit_insn (GEN_FCN (code
) (x
, y
));
3409 /* Expand complex moves by moving real part and imag part. */
3410 if (COMPLEX_MODE_P (mode
))
3411 return emit_move_complex (mode
, x
, y
);
3413 if (GET_MODE_CLASS (mode
) == MODE_DECIMAL_FLOAT
3414 || ALL_FIXED_POINT_MODE_P (mode
))
3416 rtx result
= emit_move_via_integer (mode
, x
, y
, true);
3418 /* If we can't find an integer mode, use multi words. */
3422 return emit_move_multi_word (mode
, x
, y
);
3425 if (GET_MODE_CLASS (mode
) == MODE_CC
)
3426 return emit_move_ccmode (mode
, x
, y
);
3428 /* Try using a move pattern for the corresponding integer mode. This is
3429 only safe when simplify_subreg can convert MODE constants into integer
3430 constants. At present, it can only do this reliably if the value
3431 fits within a HOST_WIDE_INT. */
3432 if (!CONSTANT_P (y
) || GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
3434 rtx ret
= emit_move_via_integer (mode
, x
, y
, false);
3439 return emit_move_multi_word (mode
, x
, y
);
3442 /* Generate code to copy Y into X.
3443 Both Y and X must have the same mode, except that
3444 Y can be a constant with VOIDmode.
3445 This mode cannot be BLKmode; use emit_block_move for that.
3447 Return the last instruction emitted. */
3450 emit_move_insn (rtx x
, rtx y
)
3452 enum machine_mode mode
= GET_MODE (x
);
3453 rtx y_cst
= NULL_RTX
;
3456 gcc_assert (mode
!= BLKmode
3457 && (GET_MODE (y
) == mode
|| GET_MODE (y
) == VOIDmode
));
3462 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3463 && (last_insn
= compress_float_constant (x
, y
)))
3468 if (!targetm
.legitimate_constant_p (mode
, y
))
3470 y
= force_const_mem (mode
, y
);
3472 /* If the target's cannot_force_const_mem prevented the spill,
3473 assume that the target's move expanders will also take care
3474 of the non-legitimate constant. */
3478 y
= use_anchored_address (y
);
3482 /* If X or Y are memory references, verify that their addresses are valid
3485 && (! memory_address_addr_space_p (GET_MODE (x
), XEXP (x
, 0),
3487 && ! push_operand (x
, GET_MODE (x
))))
3488 x
= validize_mem (x
);
3491 && ! memory_address_addr_space_p (GET_MODE (y
), XEXP (y
, 0),
3492 MEM_ADDR_SPACE (y
)))
3493 y
= validize_mem (y
);
3495 gcc_assert (mode
!= BLKmode
);
3497 last_insn
= emit_move_insn_1 (x
, y
);
3499 if (y_cst
&& REG_P (x
)
3500 && (set
= single_set (last_insn
)) != NULL_RTX
3501 && SET_DEST (set
) == x
3502 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
3503 set_unique_reg_note (last_insn
, REG_EQUAL
, copy_rtx (y_cst
));
3508 /* If Y is representable exactly in a narrower mode, and the target can
3509 perform the extension directly from constant or memory, then emit the
3510 move as an extension. */
3513 compress_float_constant (rtx x
, rtx y
)
3515 enum machine_mode dstmode
= GET_MODE (x
);
3516 enum machine_mode orig_srcmode
= GET_MODE (y
);
3517 enum machine_mode srcmode
;
3519 int oldcost
, newcost
;
3520 bool speed
= optimize_insn_for_speed_p ();
3522 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3524 if (targetm
.legitimate_constant_p (dstmode
, y
))
3525 oldcost
= set_src_cost (y
, speed
);
3527 oldcost
= set_src_cost (force_const_mem (dstmode
, y
), speed
);
3529 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3530 srcmode
!= orig_srcmode
;
3531 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3534 rtx trunc_y
, last_insn
;
3536 /* Skip if the target can't extend this way. */
3537 ic
= can_extend_p (dstmode
, srcmode
, 0);
3538 if (ic
== CODE_FOR_nothing
)
3541 /* Skip if the narrowed value isn't exact. */
3542 if (! exact_real_truncate (srcmode
, &r
))
3545 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3547 if (targetm
.legitimate_constant_p (srcmode
, trunc_y
))
3549 /* Skip if the target needs extra instructions to perform
3551 if (!insn_operand_matches (ic
, 1, trunc_y
))
3553 /* This is valid, but may not be cheaper than the original. */
3554 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3556 if (oldcost
< newcost
)
3559 else if (float_extend_from_mem
[dstmode
][srcmode
])
3561 trunc_y
= force_const_mem (srcmode
, trunc_y
);
3562 /* This is valid, but may not be cheaper than the original. */
3563 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3565 if (oldcost
< newcost
)
3567 trunc_y
= validize_mem (trunc_y
);
3572 /* For CSE's benefit, force the compressed constant pool entry
3573 into a new pseudo. This constant may be used in different modes,
3574 and if not, combine will put things back together for us. */
3575 trunc_y
= force_reg (srcmode
, trunc_y
);
3576 emit_unop_insn (ic
, x
, trunc_y
, UNKNOWN
);
3577 last_insn
= get_last_insn ();
3580 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3588 /* Pushing data onto the stack. */
3590 /* Push a block of length SIZE (perhaps variable)
3591 and return an rtx to address the beginning of the block.
3592 The value may be virtual_outgoing_args_rtx.
3594 EXTRA is the number of bytes of padding to push in addition to SIZE.
3595 BELOW nonzero means this padding comes at low addresses;
3596 otherwise, the padding comes at high addresses. */
3599 push_block (rtx size
, int extra
, int below
)
3603 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3604 if (CONSTANT_P (size
))
3605 anti_adjust_stack (plus_constant (Pmode
, size
, extra
));
3606 else if (REG_P (size
) && extra
== 0)
3607 anti_adjust_stack (size
);
3610 temp
= copy_to_mode_reg (Pmode
, size
);
3612 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3613 temp
, 0, OPTAB_LIB_WIDEN
);
3614 anti_adjust_stack (temp
);
3617 #ifndef STACK_GROWS_DOWNWARD
3623 temp
= virtual_outgoing_args_rtx
;
3624 if (extra
!= 0 && below
)
3625 temp
= plus_constant (Pmode
, temp
, extra
);
3629 if (CONST_INT_P (size
))
3630 temp
= plus_constant (Pmode
, virtual_outgoing_args_rtx
,
3631 -INTVAL (size
) - (below
? 0 : extra
));
3632 else if (extra
!= 0 && !below
)
3633 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3634 negate_rtx (Pmode
, plus_constant (Pmode
, size
,
3637 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3638 negate_rtx (Pmode
, size
));
3641 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3644 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3647 mem_autoinc_base (rtx mem
)
3651 rtx addr
= XEXP (mem
, 0);
3652 if (GET_RTX_CLASS (GET_CODE (addr
)) == RTX_AUTOINC
)
3653 return XEXP (addr
, 0);
3658 /* A utility routine used here, in reload, and in try_split. The insns
3659 after PREV up to and including LAST are known to adjust the stack,
3660 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3661 placing notes as appropriate. PREV may be NULL, indicating the
3662 entire insn sequence prior to LAST should be scanned.
3664 The set of allowed stack pointer modifications is small:
3665 (1) One or more auto-inc style memory references (aka pushes),
3666 (2) One or more addition/subtraction with the SP as destination,
3667 (3) A single move insn with the SP as destination,
3668 (4) A call_pop insn,
3669 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3671 Insns in the sequence that do not modify the SP are ignored,
3672 except for noreturn calls.
3674 The return value is the amount of adjustment that can be trivially
3675 verified, via immediate operand or auto-inc. If the adjustment
3676 cannot be trivially extracted, the return value is INT_MIN. */
3679 find_args_size_adjust (rtx insn
)
3684 pat
= PATTERN (insn
);
3687 /* Look for a call_pop pattern. */
3690 /* We have to allow non-call_pop patterns for the case
3691 of emit_single_push_insn of a TLS address. */
3692 if (GET_CODE (pat
) != PARALLEL
)
3695 /* All call_pop have a stack pointer adjust in the parallel.
3696 The call itself is always first, and the stack adjust is
3697 usually last, so search from the end. */
3698 for (i
= XVECLEN (pat
, 0) - 1; i
> 0; --i
)
3700 set
= XVECEXP (pat
, 0, i
);
3701 if (GET_CODE (set
) != SET
)
3703 dest
= SET_DEST (set
);
3704 if (dest
== stack_pointer_rtx
)
3707 /* We'd better have found the stack pointer adjust. */
3710 /* Fall through to process the extracted SET and DEST
3711 as if it was a standalone insn. */
3713 else if (GET_CODE (pat
) == SET
)
3715 else if ((set
= single_set (insn
)) != NULL
)
3717 else if (GET_CODE (pat
) == PARALLEL
)
3719 /* ??? Some older ports use a parallel with a stack adjust
3720 and a store for a PUSH_ROUNDING pattern, rather than a
3721 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3722 /* ??? See h8300 and m68k, pushqi1. */
3723 for (i
= XVECLEN (pat
, 0) - 1; i
>= 0; --i
)
3725 set
= XVECEXP (pat
, 0, i
);
3726 if (GET_CODE (set
) != SET
)
3728 dest
= SET_DEST (set
);
3729 if (dest
== stack_pointer_rtx
)
3732 /* We do not expect an auto-inc of the sp in the parallel. */
3733 gcc_checking_assert (mem_autoinc_base (dest
) != stack_pointer_rtx
);
3734 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
3735 != stack_pointer_rtx
);
3743 dest
= SET_DEST (set
);
3745 /* Look for direct modifications of the stack pointer. */
3746 if (REG_P (dest
) && REGNO (dest
) == STACK_POINTER_REGNUM
)
3748 /* Look for a trivial adjustment, otherwise assume nothing. */
3749 /* Note that the SPU restore_stack_block pattern refers to
3750 the stack pointer in V4SImode. Consider that non-trivial. */
3751 if (SCALAR_INT_MODE_P (GET_MODE (dest
))
3752 && GET_CODE (SET_SRC (set
)) == PLUS
3753 && XEXP (SET_SRC (set
), 0) == stack_pointer_rtx
3754 && CONST_INT_P (XEXP (SET_SRC (set
), 1)))
3755 return INTVAL (XEXP (SET_SRC (set
), 1));
3756 /* ??? Reload can generate no-op moves, which will be cleaned
3757 up later. Recognize it and continue searching. */
3758 else if (rtx_equal_p (dest
, SET_SRC (set
)))
3761 return HOST_WIDE_INT_MIN
;
3767 /* Otherwise only think about autoinc patterns. */
3768 if (mem_autoinc_base (dest
) == stack_pointer_rtx
)
3771 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
3772 != stack_pointer_rtx
);
3774 else if (mem_autoinc_base (SET_SRC (set
)) == stack_pointer_rtx
)
3775 mem
= SET_SRC (set
);
3779 addr
= XEXP (mem
, 0);
3780 switch (GET_CODE (addr
))
3784 return GET_MODE_SIZE (GET_MODE (mem
));
3787 return -GET_MODE_SIZE (GET_MODE (mem
));
3790 addr
= XEXP (addr
, 1);
3791 gcc_assert (GET_CODE (addr
) == PLUS
);
3792 gcc_assert (XEXP (addr
, 0) == stack_pointer_rtx
);
3793 gcc_assert (CONST_INT_P (XEXP (addr
, 1)));
3794 return INTVAL (XEXP (addr
, 1));
3802 fixup_args_size_notes (rtx prev
, rtx last
, int end_args_size
)
3804 int args_size
= end_args_size
;
3805 bool saw_unknown
= false;
3808 for (insn
= last
; insn
!= prev
; insn
= PREV_INSN (insn
))
3810 HOST_WIDE_INT this_delta
;
3812 if (!NONDEBUG_INSN_P (insn
))
3815 this_delta
= find_args_size_adjust (insn
);
3816 if (this_delta
== 0)
3819 || ACCUMULATE_OUTGOING_ARGS
3820 || find_reg_note (insn
, REG_NORETURN
, NULL_RTX
) == NULL_RTX
)
3824 gcc_assert (!saw_unknown
);
3825 if (this_delta
== HOST_WIDE_INT_MIN
)
3828 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (args_size
));
3829 #ifdef STACK_GROWS_DOWNWARD
3830 this_delta
= -this_delta
;
3832 args_size
-= this_delta
;
3835 return saw_unknown
? INT_MIN
: args_size
;
3838 #ifdef PUSH_ROUNDING
3839 /* Emit single push insn. */
3842 emit_single_push_insn_1 (enum machine_mode mode
, rtx x
, tree type
)
3845 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3847 enum insn_code icode
;
3849 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3850 /* If there is push pattern, use it. Otherwise try old way of throwing
3851 MEM representing push operation to move expander. */
3852 icode
= optab_handler (push_optab
, mode
);
3853 if (icode
!= CODE_FOR_nothing
)
3855 struct expand_operand ops
[1];
3857 create_input_operand (&ops
[0], x
, mode
);
3858 if (maybe_expand_insn (icode
, 1, ops
))
3861 if (GET_MODE_SIZE (mode
) == rounded_size
)
3862 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3863 /* If we are to pad downward, adjust the stack pointer first and
3864 then store X into the stack location using an offset. This is
3865 because emit_move_insn does not know how to pad; it does not have
3867 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
3869 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
3870 HOST_WIDE_INT offset
;
3872 emit_move_insn (stack_pointer_rtx
,
3873 expand_binop (Pmode
,
3874 #ifdef STACK_GROWS_DOWNWARD
3880 GEN_INT (rounded_size
),
3881 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
3883 offset
= (HOST_WIDE_INT
) padding_size
;
3884 #ifdef STACK_GROWS_DOWNWARD
3885 if (STACK_PUSH_CODE
== POST_DEC
)
3886 /* We have already decremented the stack pointer, so get the
3888 offset
+= (HOST_WIDE_INT
) rounded_size
;
3890 if (STACK_PUSH_CODE
== POST_INC
)
3891 /* We have already incremented the stack pointer, so get the
3893 offset
-= (HOST_WIDE_INT
) rounded_size
;
3895 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (offset
));
3899 #ifdef STACK_GROWS_DOWNWARD
3900 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3901 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3902 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3904 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3905 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3906 GEN_INT (rounded_size
));
3908 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3911 dest
= gen_rtx_MEM (mode
, dest_addr
);
3915 set_mem_attributes (dest
, type
, 1);
3917 if (flag_optimize_sibling_calls
)
3918 /* Function incoming arguments may overlap with sibling call
3919 outgoing arguments and we cannot allow reordering of reads
3920 from function arguments with stores to outgoing arguments
3921 of sibling calls. */
3922 set_mem_alias_set (dest
, 0);
3924 emit_move_insn (dest
, x
);
3927 /* Emit and annotate a single push insn. */
3930 emit_single_push_insn (enum machine_mode mode
, rtx x
, tree type
)
3932 int delta
, old_delta
= stack_pointer_delta
;
3933 rtx prev
= get_last_insn ();
3936 emit_single_push_insn_1 (mode
, x
, type
);
3938 last
= get_last_insn ();
3940 /* Notice the common case where we emitted exactly one insn. */
3941 if (PREV_INSN (last
) == prev
)
3943 add_reg_note (last
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
3947 delta
= fixup_args_size_notes (prev
, last
, stack_pointer_delta
);
3948 gcc_assert (delta
== INT_MIN
|| delta
== old_delta
);
3952 /* Generate code to push X onto the stack, assuming it has mode MODE and
3954 MODE is redundant except when X is a CONST_INT (since they don't
3956 SIZE is an rtx for the size of data to be copied (in bytes),
3957 needed only if X is BLKmode.
3959 ALIGN (in bits) is maximum alignment we can assume.
3961 If PARTIAL and REG are both nonzero, then copy that many of the first
3962 bytes of X into registers starting with REG, and push the rest of X.
3963 The amount of space pushed is decreased by PARTIAL bytes.
3964 REG must be a hard register in this case.
3965 If REG is zero but PARTIAL is not, take any all others actions for an
3966 argument partially in registers, but do not actually load any
3969 EXTRA is the amount in bytes of extra space to leave next to this arg.
3970 This is ignored if an argument block has already been allocated.
3972 On a machine that lacks real push insns, ARGS_ADDR is the address of
3973 the bottom of the argument block for this call. We use indexing off there
3974 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3975 argument block has not been preallocated.
3977 ARGS_SO_FAR is the size of args previously pushed for this call.
3979 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3980 for arguments passed in registers. If nonzero, it will be the number
3981 of bytes required. */
3984 emit_push_insn (rtx x
, enum machine_mode mode
, tree type
, rtx size
,
3985 unsigned int align
, int partial
, rtx reg
, int extra
,
3986 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
3990 enum direction stack_direction
3991 #ifdef STACK_GROWS_DOWNWARD
3997 /* Decide where to pad the argument: `downward' for below,
3998 `upward' for above, or `none' for don't pad it.
3999 Default is below for small data on big-endian machines; else above. */
4000 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
4002 /* Invert direction if stack is post-decrement.
4004 if (STACK_PUSH_CODE
== POST_DEC
)
4005 if (where_pad
!= none
)
4006 where_pad
= (where_pad
== downward
? upward
: downward
);
4011 || (STRICT_ALIGNMENT
&& align
< GET_MODE_ALIGNMENT (mode
)))
4013 /* Copy a block into the stack, entirely or partially. */
4020 offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4021 used
= partial
- offset
;
4023 if (mode
!= BLKmode
)
4025 /* A value is to be stored in an insufficiently aligned
4026 stack slot; copy via a suitably aligned slot if
4028 size
= GEN_INT (GET_MODE_SIZE (mode
));
4029 if (!MEM_P (xinner
))
4031 temp
= assign_temp (type
, 1, 1);
4032 emit_move_insn (temp
, xinner
);
4039 /* USED is now the # of bytes we need not copy to the stack
4040 because registers will take care of them. */
4043 xinner
= adjust_address (xinner
, BLKmode
, used
);
4045 /* If the partial register-part of the arg counts in its stack size,
4046 skip the part of stack space corresponding to the registers.
4047 Otherwise, start copying to the beginning of the stack space,
4048 by setting SKIP to 0. */
4049 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
4051 #ifdef PUSH_ROUNDING
4052 /* Do it with several push insns if that doesn't take lots of insns
4053 and if there is no difficulty with push insns that skip bytes
4054 on the stack for alignment purposes. */
4057 && CONST_INT_P (size
)
4059 && MEM_ALIGN (xinner
) >= align
4060 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
4061 /* Here we avoid the case of a structure whose weak alignment
4062 forces many pushes of a small amount of data,
4063 and such small pushes do rounding that causes trouble. */
4064 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
4065 || align
>= BIGGEST_ALIGNMENT
4066 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
4067 == (align
/ BITS_PER_UNIT
)))
4068 && (HOST_WIDE_INT
) PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
4070 /* Push padding now if padding above and stack grows down,
4071 or if padding below and stack grows up.
4072 But if space already allocated, this has already been done. */
4073 if (extra
&& args_addr
== 0
4074 && where_pad
!= none
&& where_pad
!= stack_direction
)
4075 anti_adjust_stack (GEN_INT (extra
));
4077 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
4080 #endif /* PUSH_ROUNDING */
4084 /* Otherwise make space on the stack and copy the data
4085 to the address of that space. */
4087 /* Deduct words put into registers from the size we must copy. */
4090 if (CONST_INT_P (size
))
4091 size
= GEN_INT (INTVAL (size
) - used
);
4093 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
4094 GEN_INT (used
), NULL_RTX
, 0,
4098 /* Get the address of the stack space.
4099 In this case, we do not deal with EXTRA separately.
4100 A single stack adjust will do. */
4103 temp
= push_block (size
, extra
, where_pad
== downward
);
4106 else if (CONST_INT_P (args_so_far
))
4107 temp
= memory_address (BLKmode
,
4108 plus_constant (Pmode
, args_addr
,
4109 skip
+ INTVAL (args_so_far
)));
4111 temp
= memory_address (BLKmode
,
4112 plus_constant (Pmode
,
4113 gen_rtx_PLUS (Pmode
,
4118 if (!ACCUMULATE_OUTGOING_ARGS
)
4120 /* If the source is referenced relative to the stack pointer,
4121 copy it to another register to stabilize it. We do not need
4122 to do this if we know that we won't be changing sp. */
4124 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
4125 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
4126 temp
= copy_to_reg (temp
);
4129 target
= gen_rtx_MEM (BLKmode
, temp
);
4131 /* We do *not* set_mem_attributes here, because incoming arguments
4132 may overlap with sibling call outgoing arguments and we cannot
4133 allow reordering of reads from function arguments with stores
4134 to outgoing arguments of sibling calls. We do, however, want
4135 to record the alignment of the stack slot. */
4136 /* ALIGN may well be better aligned than TYPE, e.g. due to
4137 PARM_BOUNDARY. Assume the caller isn't lying. */
4138 set_mem_align (target
, align
);
4140 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
4143 else if (partial
> 0)
4145 /* Scalar partly in registers. */
4147 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
4150 /* # bytes of start of argument
4151 that we must make space for but need not store. */
4152 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4153 int args_offset
= INTVAL (args_so_far
);
4156 /* Push padding now if padding above and stack grows down,
4157 or if padding below and stack grows up.
4158 But if space already allocated, this has already been done. */
4159 if (extra
&& args_addr
== 0
4160 && where_pad
!= none
&& where_pad
!= stack_direction
)
4161 anti_adjust_stack (GEN_INT (extra
));
4163 /* If we make space by pushing it, we might as well push
4164 the real data. Otherwise, we can leave OFFSET nonzero
4165 and leave the space uninitialized. */
4169 /* Now NOT_STACK gets the number of words that we don't need to
4170 allocate on the stack. Convert OFFSET to words too. */
4171 not_stack
= (partial
- offset
) / UNITS_PER_WORD
;
4172 offset
/= UNITS_PER_WORD
;
4174 /* If the partial register-part of the arg counts in its stack size,
4175 skip the part of stack space corresponding to the registers.
4176 Otherwise, start copying to the beginning of the stack space,
4177 by setting SKIP to 0. */
4178 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
4180 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
4181 x
= validize_mem (force_const_mem (mode
, x
));
4183 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4184 SUBREGs of such registers are not allowed. */
4185 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
4186 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
4187 x
= copy_to_reg (x
);
4189 /* Loop over all the words allocated on the stack for this arg. */
4190 /* We can do it by words, because any scalar bigger than a word
4191 has a size a multiple of a word. */
4192 #ifndef PUSH_ARGS_REVERSED
4193 for (i
= not_stack
; i
< size
; i
++)
4195 for (i
= size
- 1; i
>= not_stack
; i
--)
4197 if (i
>= not_stack
+ offset
)
4198 emit_push_insn (operand_subword_force (x
, i
, mode
),
4199 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
4201 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
4203 reg_parm_stack_space
, alignment_pad
);
4210 /* Push padding now if padding above and stack grows down,
4211 or if padding below and stack grows up.
4212 But if space already allocated, this has already been done. */
4213 if (extra
&& args_addr
== 0
4214 && where_pad
!= none
&& where_pad
!= stack_direction
)
4215 anti_adjust_stack (GEN_INT (extra
));
4217 #ifdef PUSH_ROUNDING
4218 if (args_addr
== 0 && PUSH_ARGS
)
4219 emit_single_push_insn (mode
, x
, type
);
4223 if (CONST_INT_P (args_so_far
))
4225 = memory_address (mode
,
4226 plus_constant (Pmode
, args_addr
,
4227 INTVAL (args_so_far
)));
4229 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
4231 dest
= gen_rtx_MEM (mode
, addr
);
4233 /* We do *not* set_mem_attributes here, because incoming arguments
4234 may overlap with sibling call outgoing arguments and we cannot
4235 allow reordering of reads from function arguments with stores
4236 to outgoing arguments of sibling calls. We do, however, want
4237 to record the alignment of the stack slot. */
4238 /* ALIGN may well be better aligned than TYPE, e.g. due to
4239 PARM_BOUNDARY. Assume the caller isn't lying. */
4240 set_mem_align (dest
, align
);
4242 emit_move_insn (dest
, x
);
4246 /* If part should go in registers, copy that part
4247 into the appropriate registers. Do this now, at the end,
4248 since mem-to-mem copies above may do function calls. */
4249 if (partial
> 0 && reg
!= 0)
4251 /* Handle calls that pass values in multiple non-contiguous locations.
4252 The Irix 6 ABI has examples of this. */
4253 if (GET_CODE (reg
) == PARALLEL
)
4254 emit_group_load (reg
, x
, type
, -1);
4257 gcc_assert (partial
% UNITS_PER_WORD
== 0);
4258 move_block_to_reg (REGNO (reg
), x
, partial
/ UNITS_PER_WORD
, mode
);
4262 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
4263 anti_adjust_stack (GEN_INT (extra
));
4265 if (alignment_pad
&& args_addr
== 0)
4266 anti_adjust_stack (alignment_pad
);
4269 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4273 get_subtarget (rtx x
)
4277 /* Only registers can be subtargets. */
4279 /* Don't use hard regs to avoid extending their life. */
4280 || REGNO (x
) < FIRST_PSEUDO_REGISTER
4284 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4285 FIELD is a bitfield. Returns true if the optimization was successful,
4286 and there's nothing else to do. */
4289 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize
,
4290 unsigned HOST_WIDE_INT bitpos
,
4291 unsigned HOST_WIDE_INT bitregion_start
,
4292 unsigned HOST_WIDE_INT bitregion_end
,
4293 enum machine_mode mode1
, rtx str_rtx
,
4296 enum machine_mode str_mode
= GET_MODE (str_rtx
);
4297 unsigned int str_bitsize
= GET_MODE_BITSIZE (str_mode
);
4302 enum tree_code code
;
4304 if (mode1
!= VOIDmode
4305 || bitsize
>= BITS_PER_WORD
4306 || str_bitsize
> BITS_PER_WORD
4307 || TREE_SIDE_EFFECTS (to
)
4308 || TREE_THIS_VOLATILE (to
))
4312 if (TREE_CODE (src
) != SSA_NAME
)
4314 if (TREE_CODE (TREE_TYPE (src
)) != INTEGER_TYPE
)
4317 srcstmt
= get_gimple_for_ssa_name (src
);
4319 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt
)) != tcc_binary
)
4322 code
= gimple_assign_rhs_code (srcstmt
);
4324 op0
= gimple_assign_rhs1 (srcstmt
);
4326 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4327 to find its initialization. Hopefully the initialization will
4328 be from a bitfield load. */
4329 if (TREE_CODE (op0
) == SSA_NAME
)
4331 gimple op0stmt
= get_gimple_for_ssa_name (op0
);
4333 /* We want to eventually have OP0 be the same as TO, which
4334 should be a bitfield. */
4336 || !is_gimple_assign (op0stmt
)
4337 || gimple_assign_rhs_code (op0stmt
) != TREE_CODE (to
))
4339 op0
= gimple_assign_rhs1 (op0stmt
);
4342 op1
= gimple_assign_rhs2 (srcstmt
);
4344 if (!operand_equal_p (to
, op0
, 0))
4347 if (MEM_P (str_rtx
))
4349 unsigned HOST_WIDE_INT offset1
;
4351 if (str_bitsize
== 0 || str_bitsize
> BITS_PER_WORD
)
4352 str_mode
= word_mode
;
4353 str_mode
= get_best_mode (bitsize
, bitpos
,
4354 bitregion_start
, bitregion_end
,
4355 MEM_ALIGN (str_rtx
), str_mode
, 0);
4356 if (str_mode
== VOIDmode
)
4358 str_bitsize
= GET_MODE_BITSIZE (str_mode
);
4361 bitpos
%= str_bitsize
;
4362 offset1
= (offset1
- bitpos
) / BITS_PER_UNIT
;
4363 str_rtx
= adjust_address (str_rtx
, str_mode
, offset1
);
4365 else if (!REG_P (str_rtx
) && GET_CODE (str_rtx
) != SUBREG
)
4368 /* If the bit field covers the whole REG/MEM, store_field
4369 will likely generate better code. */
4370 if (bitsize
>= str_bitsize
)
4373 /* We can't handle fields split across multiple entities. */
4374 if (bitpos
+ bitsize
> str_bitsize
)
4377 if (BYTES_BIG_ENDIAN
)
4378 bitpos
= str_bitsize
- bitpos
- bitsize
;
4384 /* For now, just optimize the case of the topmost bitfield
4385 where we don't need to do any masking and also
4386 1 bit bitfields where xor can be used.
4387 We might win by one instruction for the other bitfields
4388 too if insv/extv instructions aren't used, so that
4389 can be added later. */
4390 if (bitpos
+ bitsize
!= str_bitsize
4391 && (bitsize
!= 1 || TREE_CODE (op1
) != INTEGER_CST
))
4394 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4395 value
= convert_modes (str_mode
,
4396 TYPE_MODE (TREE_TYPE (op1
)), value
,
4397 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4399 /* We may be accessing data outside the field, which means
4400 we can alias adjacent data. */
4401 if (MEM_P (str_rtx
))
4403 str_rtx
= shallow_copy_rtx (str_rtx
);
4404 set_mem_alias_set (str_rtx
, 0);
4405 set_mem_expr (str_rtx
, 0);
4408 binop
= code
== PLUS_EXPR
? add_optab
: sub_optab
;
4409 if (bitsize
== 1 && bitpos
+ bitsize
!= str_bitsize
)
4411 value
= expand_and (str_mode
, value
, const1_rtx
, NULL
);
4414 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4415 result
= expand_binop (str_mode
, binop
, str_rtx
,
4416 value
, str_rtx
, 1, OPTAB_WIDEN
);
4417 if (result
!= str_rtx
)
4418 emit_move_insn (str_rtx
, result
);
4423 if (TREE_CODE (op1
) != INTEGER_CST
)
4425 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4426 value
= convert_modes (str_mode
,
4427 TYPE_MODE (TREE_TYPE (op1
)), value
,
4428 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4430 /* We may be accessing data outside the field, which means
4431 we can alias adjacent data. */
4432 if (MEM_P (str_rtx
))
4434 str_rtx
= shallow_copy_rtx (str_rtx
);
4435 set_mem_alias_set (str_rtx
, 0);
4436 set_mem_expr (str_rtx
, 0);
4439 binop
= code
== BIT_IOR_EXPR
? ior_optab
: xor_optab
;
4440 if (bitpos
+ bitsize
!= str_bitsize
)
4442 rtx mask
= GEN_INT (((unsigned HOST_WIDE_INT
) 1 << bitsize
) - 1);
4443 value
= expand_and (str_mode
, value
, mask
, NULL_RTX
);
4445 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4446 result
= expand_binop (str_mode
, binop
, str_rtx
,
4447 value
, str_rtx
, 1, OPTAB_WIDEN
);
4448 if (result
!= str_rtx
)
4449 emit_move_insn (str_rtx
, result
);
4459 /* In the C++ memory model, consecutive bit fields in a structure are
4460 considered one memory location.
4462 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4463 returns the bit range of consecutive bits in which this COMPONENT_REF
4464 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4465 and *OFFSET may be adjusted in the process.
4467 If the access does not need to be restricted, 0 is returned in both
4468 *BITSTART and *BITEND. */
4471 get_bit_range (unsigned HOST_WIDE_INT
*bitstart
,
4472 unsigned HOST_WIDE_INT
*bitend
,
4474 HOST_WIDE_INT
*bitpos
,
4477 HOST_WIDE_INT bitoffset
;
4480 gcc_assert (TREE_CODE (exp
) == COMPONENT_REF
);
4482 field
= TREE_OPERAND (exp
, 1);
4483 repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
4484 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4485 need to limit the range we can access. */
4488 *bitstart
= *bitend
= 0;
4492 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4493 part of a larger bit field, then the representative does not serve any
4494 useful purpose. This can occur in Ada. */
4495 if (handled_component_p (TREE_OPERAND (exp
, 0)))
4497 enum machine_mode rmode
;
4498 HOST_WIDE_INT rbitsize
, rbitpos
;
4502 get_inner_reference (TREE_OPERAND (exp
, 0), &rbitsize
, &rbitpos
,
4503 &roffset
, &rmode
, &unsignedp
, &volatilep
, false);
4504 if ((rbitpos
% BITS_PER_UNIT
) != 0)
4506 *bitstart
= *bitend
= 0;
4511 /* Compute the adjustment to bitpos from the offset of the field
4512 relative to the representative. DECL_FIELD_OFFSET of field and
4513 repr are the same by construction if they are not constants,
4514 see finish_bitfield_layout. */
4515 if (host_integerp (DECL_FIELD_OFFSET (field
), 1)
4516 && host_integerp (DECL_FIELD_OFFSET (repr
), 1))
4517 bitoffset
= (tree_low_cst (DECL_FIELD_OFFSET (field
), 1)
4518 - tree_low_cst (DECL_FIELD_OFFSET (repr
), 1)) * BITS_PER_UNIT
;
4521 bitoffset
+= (tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 1)
4522 - tree_low_cst (DECL_FIELD_BIT_OFFSET (repr
), 1));
4524 /* If the adjustment is larger than bitpos, we would have a negative bit
4525 position for the lower bound and this may wreak havoc later. This can
4526 occur only if we have a non-null offset, so adjust offset and bitpos
4527 to make the lower bound non-negative. */
4528 if (bitoffset
> *bitpos
)
4530 HOST_WIDE_INT adjust
= bitoffset
- *bitpos
;
4532 gcc_assert ((adjust
% BITS_PER_UNIT
) == 0);
4533 gcc_assert (*offset
!= NULL_TREE
);
4537 = size_binop (MINUS_EXPR
, *offset
, size_int (adjust
/ BITS_PER_UNIT
));
4541 *bitstart
= *bitpos
- bitoffset
;
4543 *bitend
= *bitstart
+ tree_low_cst (DECL_SIZE (repr
), 1) - 1;
4546 /* Returns true if the MEM_REF REF refers to an object that does not
4547 reside in memory and has non-BLKmode. */
4550 mem_ref_refers_to_non_mem_p (tree ref
)
4552 tree base
= TREE_OPERAND (ref
, 0);
4553 if (TREE_CODE (base
) != ADDR_EXPR
)
4555 base
= TREE_OPERAND (base
, 0);
4556 return (DECL_P (base
)
4557 && !TREE_ADDRESSABLE (base
)
4558 && DECL_MODE (base
) != BLKmode
4559 && DECL_RTL_SET_P (base
)
4560 && !MEM_P (DECL_RTL (base
)));
4563 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4564 is true, try generating a nontemporal store. */
4567 expand_assignment (tree to
, tree from
, bool nontemporal
)
4571 enum machine_mode mode
;
4573 enum insn_code icode
;
4575 /* Don't crash if the lhs of the assignment was erroneous. */
4576 if (TREE_CODE (to
) == ERROR_MARK
)
4578 expand_normal (from
);
4582 /* Optimize away no-op moves without side-effects. */
4583 if (operand_equal_p (to
, from
, 0))
4586 /* Handle misaligned stores. */
4587 mode
= TYPE_MODE (TREE_TYPE (to
));
4588 if ((TREE_CODE (to
) == MEM_REF
4589 || TREE_CODE (to
) == TARGET_MEM_REF
)
4591 && !mem_ref_refers_to_non_mem_p (to
)
4592 && ((align
= get_object_or_type_alignment (to
))
4593 < GET_MODE_ALIGNMENT (mode
))
4594 && (((icode
= optab_handler (movmisalign_optab
, mode
))
4595 != CODE_FOR_nothing
)
4596 || SLOW_UNALIGNED_ACCESS (mode
, align
)))
4600 reg
= expand_expr (from
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4601 reg
= force_not_mem (reg
);
4602 mem
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4604 if (icode
!= CODE_FOR_nothing
)
4606 struct expand_operand ops
[2];
4608 create_fixed_operand (&ops
[0], mem
);
4609 create_input_operand (&ops
[1], reg
, mode
);
4610 /* The movmisalign<mode> pattern cannot fail, else the assignment
4611 would silently be omitted. */
4612 expand_insn (icode
, 2, ops
);
4615 store_bit_field (mem
, GET_MODE_BITSIZE (mode
),
4616 0, 0, 0, mode
, reg
);
4620 /* Assignment of a structure component needs special treatment
4621 if the structure component's rtx is not simply a MEM.
4622 Assignment of an array element at a constant index, and assignment of
4623 an array element in an unaligned packed structure field, has the same
4624 problem. Same for (partially) storing into a non-memory object. */
4625 if (handled_component_p (to
)
4626 || (TREE_CODE (to
) == MEM_REF
4627 && mem_ref_refers_to_non_mem_p (to
))
4628 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
4630 enum machine_mode mode1
;
4631 HOST_WIDE_INT bitsize
, bitpos
;
4632 unsigned HOST_WIDE_INT bitregion_start
= 0;
4633 unsigned HOST_WIDE_INT bitregion_end
= 0;
4642 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
4643 &unsignedp
, &volatilep
, true);
4645 if (TREE_CODE (to
) == COMPONENT_REF
4646 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to
, 1)))
4647 get_bit_range (&bitregion_start
, &bitregion_end
, to
, &bitpos
, &offset
);
4649 /* If we are going to use store_bit_field and extract_bit_field,
4650 make sure to_rtx will be safe for multiple use. */
4651 mode
= TYPE_MODE (TREE_TYPE (tem
));
4652 if (TREE_CODE (tem
) == MEM_REF
4654 && ((align
= get_object_or_type_alignment (tem
))
4655 < GET_MODE_ALIGNMENT (mode
))
4656 && ((icode
= optab_handler (movmisalign_optab
, mode
))
4657 != CODE_FOR_nothing
))
4659 struct expand_operand ops
[2];
4662 to_rtx
= gen_reg_rtx (mode
);
4663 mem
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4665 /* If the misaligned store doesn't overwrite all bits, perform
4666 rmw cycle on MEM. */
4667 if (bitsize
!= GET_MODE_BITSIZE (mode
))
4669 create_input_operand (&ops
[0], to_rtx
, mode
);
4670 create_fixed_operand (&ops
[1], mem
);
4671 /* The movmisalign<mode> pattern cannot fail, else the assignment
4672 would silently be omitted. */
4673 expand_insn (icode
, 2, ops
);
4675 mem
= copy_rtx (mem
);
4681 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4684 /* If the bitfield is volatile, we want to access it in the
4685 field's mode, not the computed mode.
4686 If a MEM has VOIDmode (external with incomplete type),
4687 use BLKmode for it instead. */
4690 if (volatilep
&& flag_strict_volatile_bitfields
> 0)
4691 to_rtx
= adjust_address (to_rtx
, mode1
, 0);
4692 else if (GET_MODE (to_rtx
) == VOIDmode
)
4693 to_rtx
= adjust_address (to_rtx
, BLKmode
, 0);
4698 enum machine_mode address_mode
;
4701 if (!MEM_P (to_rtx
))
4703 /* We can get constant negative offsets into arrays with broken
4704 user code. Translate this to a trap instead of ICEing. */
4705 gcc_assert (TREE_CODE (offset
) == INTEGER_CST
);
4706 expand_builtin_trap ();
4707 to_rtx
= gen_rtx_MEM (BLKmode
, const0_rtx
);
4710 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
4711 address_mode
= get_address_mode (to_rtx
);
4712 if (GET_MODE (offset_rtx
) != address_mode
)
4713 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
4715 /* A constant address in TO_RTX can have VOIDmode, we must not try
4716 to call force_reg for that case. Avoid that case. */
4718 && GET_MODE (to_rtx
) == BLKmode
4719 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
4721 && (bitpos
% bitsize
) == 0
4722 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
4723 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
4725 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
4729 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4730 highest_pow2_factor_for_target (to
,
4734 /* No action is needed if the target is not a memory and the field
4735 lies completely outside that target. This can occur if the source
4736 code contains an out-of-bounds access to a small array. */
4738 && GET_MODE (to_rtx
) != BLKmode
4739 && (unsigned HOST_WIDE_INT
) bitpos
4740 >= GET_MODE_PRECISION (GET_MODE (to_rtx
)))
4742 expand_normal (from
);
4745 /* Handle expand_expr of a complex value returning a CONCAT. */
4746 else if (GET_CODE (to_rtx
) == CONCAT
)
4748 unsigned short mode_bitsize
= GET_MODE_BITSIZE (GET_MODE (to_rtx
));
4749 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from
)))
4751 && bitsize
== mode_bitsize
)
4752 result
= store_expr (from
, to_rtx
, false, nontemporal
);
4753 else if (bitsize
== mode_bitsize
/ 2
4754 && (bitpos
== 0 || bitpos
== mode_bitsize
/ 2))
4755 result
= store_expr (from
, XEXP (to_rtx
, bitpos
!= 0), false,
4757 else if (bitpos
+ bitsize
<= mode_bitsize
/ 2)
4758 result
= store_field (XEXP (to_rtx
, 0), bitsize
, bitpos
,
4759 bitregion_start
, bitregion_end
,
4760 mode1
, from
, TREE_TYPE (tem
),
4761 get_alias_set (to
), nontemporal
);
4762 else if (bitpos
>= mode_bitsize
/ 2)
4763 result
= store_field (XEXP (to_rtx
, 1), bitsize
,
4764 bitpos
- mode_bitsize
/ 2,
4765 bitregion_start
, bitregion_end
,
4767 TREE_TYPE (tem
), get_alias_set (to
),
4769 else if (bitpos
== 0 && bitsize
== mode_bitsize
)
4772 result
= expand_normal (from
);
4773 from_rtx
= simplify_gen_subreg (GET_MODE (to_rtx
), result
,
4774 TYPE_MODE (TREE_TYPE (from
)), 0);
4775 emit_move_insn (XEXP (to_rtx
, 0),
4776 read_complex_part (from_rtx
, false));
4777 emit_move_insn (XEXP (to_rtx
, 1),
4778 read_complex_part (from_rtx
, true));
4782 rtx temp
= assign_stack_temp (GET_MODE (to_rtx
),
4783 GET_MODE_SIZE (GET_MODE (to_rtx
)));
4784 write_complex_part (temp
, XEXP (to_rtx
, 0), false);
4785 write_complex_part (temp
, XEXP (to_rtx
, 1), true);
4786 result
= store_field (temp
, bitsize
, bitpos
,
4787 bitregion_start
, bitregion_end
,
4789 TREE_TYPE (tem
), get_alias_set (to
),
4791 emit_move_insn (XEXP (to_rtx
, 0), read_complex_part (temp
, false));
4792 emit_move_insn (XEXP (to_rtx
, 1), read_complex_part (temp
, true));
4799 /* If the field is at offset zero, we could have been given the
4800 DECL_RTX of the parent struct. Don't munge it. */
4801 to_rtx
= shallow_copy_rtx (to_rtx
);
4803 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
4805 /* Deal with volatile and readonly fields. The former is only
4806 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4808 MEM_VOLATILE_P (to_rtx
) = 1;
4809 if (component_uses_parent_alias_set (to
))
4810 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4813 if (optimize_bitfield_assignment_op (bitsize
, bitpos
,
4814 bitregion_start
, bitregion_end
,
4819 result
= store_field (to_rtx
, bitsize
, bitpos
,
4820 bitregion_start
, bitregion_end
,
4822 TREE_TYPE (tem
), get_alias_set (to
),
4828 struct expand_operand ops
[2];
4830 create_fixed_operand (&ops
[0], mem
);
4831 create_input_operand (&ops
[1], to_rtx
, mode
);
4832 /* The movmisalign<mode> pattern cannot fail, else the assignment
4833 would silently be omitted. */
4834 expand_insn (icode
, 2, ops
);
4838 preserve_temp_slots (result
);
4843 /* If the rhs is a function call and its value is not an aggregate,
4844 call the function before we start to compute the lhs.
4845 This is needed for correct code for cases such as
4846 val = setjmp (buf) on machines where reference to val
4847 requires loading up part of an address in a separate insn.
4849 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4850 since it might be a promoted variable where the zero- or sign- extension
4851 needs to be done. Handling this in the normal way is safe because no
4852 computation is done before the call. The same is true for SSA names. */
4853 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
4854 && COMPLETE_TYPE_P (TREE_TYPE (from
))
4855 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
4856 && ! (((TREE_CODE (to
) == VAR_DECL
4857 || TREE_CODE (to
) == PARM_DECL
4858 || TREE_CODE (to
) == RESULT_DECL
)
4859 && REG_P (DECL_RTL (to
)))
4860 || TREE_CODE (to
) == SSA_NAME
))
4865 value
= expand_normal (from
);
4867 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4869 /* Handle calls that return values in multiple non-contiguous locations.
4870 The Irix 6 ABI has examples of this. */
4871 if (GET_CODE (to_rtx
) == PARALLEL
)
4872 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
4873 int_size_in_bytes (TREE_TYPE (from
)));
4874 else if (GET_MODE (to_rtx
) == BLKmode
)
4875 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
4878 if (POINTER_TYPE_P (TREE_TYPE (to
)))
4879 value
= convert_memory_address_addr_space
4880 (GET_MODE (to_rtx
), value
,
4881 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to
))));
4883 emit_move_insn (to_rtx
, value
);
4885 preserve_temp_slots (to_rtx
);
4890 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
4891 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4893 /* Don't move directly into a return register. */
4894 if (TREE_CODE (to
) == RESULT_DECL
4895 && (REG_P (to_rtx
) || GET_CODE (to_rtx
) == PARALLEL
))
4900 if (REG_P (to_rtx
) && TYPE_MODE (TREE_TYPE (from
)) == BLKmode
)
4901 temp
= copy_blkmode_to_reg (GET_MODE (to_rtx
), from
);
4903 temp
= expand_expr (from
, NULL_RTX
, GET_MODE (to_rtx
), EXPAND_NORMAL
);
4905 if (GET_CODE (to_rtx
) == PARALLEL
)
4906 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
4907 int_size_in_bytes (TREE_TYPE (from
)));
4909 emit_move_insn (to_rtx
, temp
);
4911 preserve_temp_slots (to_rtx
);
4916 /* In case we are returning the contents of an object which overlaps
4917 the place the value is being stored, use a safe function when copying
4918 a value through a pointer into a structure value return block. */
4919 if (TREE_CODE (to
) == RESULT_DECL
4920 && TREE_CODE (from
) == INDIRECT_REF
4921 && ADDR_SPACE_GENERIC_P
4922 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from
, 0)))))
4923 && refs_may_alias_p (to
, from
)
4924 && cfun
->returns_struct
4925 && !cfun
->returns_pcc_struct
)
4930 size
= expr_size (from
);
4931 from_rtx
= expand_normal (from
);
4933 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
4934 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
4935 XEXP (from_rtx
, 0), Pmode
,
4936 convert_to_mode (TYPE_MODE (sizetype
),
4937 size
, TYPE_UNSIGNED (sizetype
)),
4938 TYPE_MODE (sizetype
));
4940 preserve_temp_slots (to_rtx
);
4945 /* Compute FROM and store the value in the rtx we got. */
4948 result
= store_expr (from
, to_rtx
, 0, nontemporal
);
4949 preserve_temp_slots (result
);
4954 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4955 succeeded, false otherwise. */
4958 emit_storent_insn (rtx to
, rtx from
)
4960 struct expand_operand ops
[2];
4961 enum machine_mode mode
= GET_MODE (to
);
4962 enum insn_code code
= optab_handler (storent_optab
, mode
);
4964 if (code
== CODE_FOR_nothing
)
4967 create_fixed_operand (&ops
[0], to
);
4968 create_input_operand (&ops
[1], from
, mode
);
4969 return maybe_expand_insn (code
, 2, ops
);
4972 /* Generate code for computing expression EXP,
4973 and storing the value into TARGET.
4975 If the mode is BLKmode then we may return TARGET itself.
4976 It turns out that in BLKmode it doesn't cause a problem.
4977 because C has no operators that could combine two different
4978 assignments into the same BLKmode object with different values
4979 with no sequence point. Will other languages need this to
4982 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4983 stack, and block moves may need to be treated specially.
4985 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4988 store_expr (tree exp
, rtx target
, int call_param_p
, bool nontemporal
)
4991 rtx alt_rtl
= NULL_RTX
;
4992 location_t loc
= EXPR_LOCATION (exp
);
4994 if (VOID_TYPE_P (TREE_TYPE (exp
)))
4996 /* C++ can generate ?: expressions with a throw expression in one
4997 branch and an rvalue in the other. Here, we resolve attempts to
4998 store the throw expression's nonexistent result. */
4999 gcc_assert (!call_param_p
);
5000 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5003 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
5005 /* Perform first part of compound expression, then assign from second
5007 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
5008 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5009 return store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
,
5012 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
5014 /* For conditional expression, get safe form of the target. Then
5015 test the condition, doing the appropriate assignment on either
5016 side. This avoids the creation of unnecessary temporaries.
5017 For non-BLKmode, it is more efficient not to do this. */
5019 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
5021 do_pending_stack_adjust ();
5023 jumpifnot (TREE_OPERAND (exp
, 0), lab1
, -1);
5024 store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
,
5026 emit_jump_insn (gen_jump (lab2
));
5029 store_expr (TREE_OPERAND (exp
, 2), target
, call_param_p
,
5036 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
5037 /* If this is a scalar in a register that is stored in a wider mode
5038 than the declared mode, compute the result into its declared mode
5039 and then convert to the wider mode. Our value is the computed
5042 rtx inner_target
= 0;
5044 /* We can do the conversion inside EXP, which will often result
5045 in some optimizations. Do the conversion in two steps: first
5046 change the signedness, if needed, then the extend. But don't
5047 do this if the type of EXP is a subtype of something else
5048 since then the conversion might involve more than just
5049 converting modes. */
5050 if (INTEGRAL_TYPE_P (TREE_TYPE (exp
))
5051 && TREE_TYPE (TREE_TYPE (exp
)) == 0
5052 && GET_MODE_PRECISION (GET_MODE (target
))
5053 == TYPE_PRECISION (TREE_TYPE (exp
)))
5055 if (TYPE_UNSIGNED (TREE_TYPE (exp
))
5056 != SUBREG_PROMOTED_UNSIGNED_P (target
))
5058 /* Some types, e.g. Fortran's logical*4, won't have a signed
5059 version, so use the mode instead. */
5061 = (signed_or_unsigned_type_for
5062 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)));
5064 ntype
= lang_hooks
.types
.type_for_mode
5065 (TYPE_MODE (TREE_TYPE (exp
)),
5066 SUBREG_PROMOTED_UNSIGNED_P (target
));
5068 exp
= fold_convert_loc (loc
, ntype
, exp
);
5071 exp
= fold_convert_loc (loc
, lang_hooks
.types
.type_for_mode
5072 (GET_MODE (SUBREG_REG (target
)),
5073 SUBREG_PROMOTED_UNSIGNED_P (target
)),
5076 inner_target
= SUBREG_REG (target
);
5079 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
5080 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5082 /* If TEMP is a VOIDmode constant, use convert_modes to make
5083 sure that we properly convert it. */
5084 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
5086 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
5087 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
5088 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
5089 GET_MODE (target
), temp
,
5090 SUBREG_PROMOTED_UNSIGNED_P (target
));
5093 convert_move (SUBREG_REG (target
), temp
,
5094 SUBREG_PROMOTED_UNSIGNED_P (target
));
5098 else if ((TREE_CODE (exp
) == STRING_CST
5099 || (TREE_CODE (exp
) == MEM_REF
5100 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
5101 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
5103 && integer_zerop (TREE_OPERAND (exp
, 1))))
5104 && !nontemporal
&& !call_param_p
5107 /* Optimize initialization of an array with a STRING_CST. */
5108 HOST_WIDE_INT exp_len
, str_copy_len
;
5110 tree str
= TREE_CODE (exp
) == STRING_CST
5111 ? exp
: TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5113 exp_len
= int_expr_size (exp
);
5117 if (TREE_STRING_LENGTH (str
) <= 0)
5120 str_copy_len
= strlen (TREE_STRING_POINTER (str
));
5121 if (str_copy_len
< TREE_STRING_LENGTH (str
) - 1)
5124 str_copy_len
= TREE_STRING_LENGTH (str
);
5125 if ((STORE_MAX_PIECES
& (STORE_MAX_PIECES
- 1)) == 0
5126 && TREE_STRING_POINTER (str
)[TREE_STRING_LENGTH (str
) - 1] == '\0')
5128 str_copy_len
+= STORE_MAX_PIECES
- 1;
5129 str_copy_len
&= ~(STORE_MAX_PIECES
- 1);
5131 str_copy_len
= MIN (str_copy_len
, exp_len
);
5132 if (!can_store_by_pieces (str_copy_len
, builtin_strncpy_read_str
,
5133 CONST_CAST (char *, TREE_STRING_POINTER (str
)),
5134 MEM_ALIGN (target
), false))
5139 dest_mem
= store_by_pieces (dest_mem
,
5140 str_copy_len
, builtin_strncpy_read_str
,
5142 TREE_STRING_POINTER (str
)),
5143 MEM_ALIGN (target
), false,
5144 exp_len
> str_copy_len
? 1 : 0);
5145 if (exp_len
> str_copy_len
)
5146 clear_storage (adjust_address (dest_mem
, BLKmode
, 0),
5147 GEN_INT (exp_len
- str_copy_len
),
5156 /* If we want to use a nontemporal store, force the value to
5158 tmp_target
= nontemporal
? NULL_RTX
: target
;
5159 temp
= expand_expr_real (exp
, tmp_target
, GET_MODE (target
),
5161 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
5165 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5166 the same as that of TARGET, adjust the constant. This is needed, for
5167 example, in case it is a CONST_DOUBLE and we want only a word-sized
5169 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
5170 && TREE_CODE (exp
) != ERROR_MARK
5171 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
5172 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
5173 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5175 /* If value was not generated in the target, store it there.
5176 Convert the value to TARGET's type first if necessary and emit the
5177 pending incrementations that have been queued when expanding EXP.
5178 Note that we cannot emit the whole queue blindly because this will
5179 effectively disable the POST_INC optimization later.
5181 If TEMP and TARGET compare equal according to rtx_equal_p, but
5182 one or both of them are volatile memory refs, we have to distinguish
5184 - expand_expr has used TARGET. In this case, we must not generate
5185 another copy. This can be detected by TARGET being equal according
5187 - expand_expr has not used TARGET - that means that the source just
5188 happens to have the same RTX form. Since temp will have been created
5189 by expand_expr, it will compare unequal according to == .
5190 We must generate a copy in this case, to reach the correct number
5191 of volatile memory references. */
5193 if ((! rtx_equal_p (temp
, target
)
5194 || (temp
!= target
&& (side_effects_p (temp
)
5195 || side_effects_p (target
))))
5196 && TREE_CODE (exp
) != ERROR_MARK
5197 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5198 but TARGET is not valid memory reference, TEMP will differ
5199 from TARGET although it is really the same location. */
5201 && rtx_equal_p (alt_rtl
, target
)
5202 && !side_effects_p (alt_rtl
)
5203 && !side_effects_p (target
))
5204 /* If there's nothing to copy, don't bother. Don't call
5205 expr_size unless necessary, because some front-ends (C++)
5206 expr_size-hook must not be given objects that are not
5207 supposed to be bit-copied or bit-initialized. */
5208 && expr_size (exp
) != const0_rtx
)
5210 if (GET_MODE (temp
) != GET_MODE (target
)
5211 && GET_MODE (temp
) != VOIDmode
)
5213 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
5214 if (GET_MODE (target
) == BLKmode
5215 && GET_MODE (temp
) == BLKmode
)
5216 emit_block_move (target
, temp
, expr_size (exp
),
5218 ? BLOCK_OP_CALL_PARM
5219 : BLOCK_OP_NORMAL
));
5220 else if (GET_MODE (target
) == BLKmode
)
5221 store_bit_field (target
, INTVAL (expr_size (exp
)) * BITS_PER_UNIT
,
5222 0, 0, 0, GET_MODE (temp
), temp
);
5224 convert_move (target
, temp
, unsignedp
);
5227 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
5229 /* Handle copying a string constant into an array. The string
5230 constant may be shorter than the array. So copy just the string's
5231 actual length, and clear the rest. First get the size of the data
5232 type of the string, which is actually the size of the target. */
5233 rtx size
= expr_size (exp
);
5235 if (CONST_INT_P (size
)
5236 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
5237 emit_block_move (target
, temp
, size
,
5239 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5242 enum machine_mode pointer_mode
5243 = targetm
.addr_space
.pointer_mode (MEM_ADDR_SPACE (target
));
5244 enum machine_mode address_mode
= get_address_mode (target
);
5246 /* Compute the size of the data to copy from the string. */
5248 = size_binop_loc (loc
, MIN_EXPR
,
5249 make_tree (sizetype
, size
),
5250 size_int (TREE_STRING_LENGTH (exp
)));
5252 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
5254 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
5257 /* Copy that much. */
5258 copy_size_rtx
= convert_to_mode (pointer_mode
, copy_size_rtx
,
5259 TYPE_UNSIGNED (sizetype
));
5260 emit_block_move (target
, temp
, copy_size_rtx
,
5262 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5264 /* Figure out how much is left in TARGET that we have to clear.
5265 Do all calculations in pointer_mode. */
5266 if (CONST_INT_P (copy_size_rtx
))
5268 size
= plus_constant (address_mode
, size
,
5269 -INTVAL (copy_size_rtx
));
5270 target
= adjust_address (target
, BLKmode
,
5271 INTVAL (copy_size_rtx
));
5275 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
5276 copy_size_rtx
, NULL_RTX
, 0,
5279 if (GET_MODE (copy_size_rtx
) != address_mode
)
5280 copy_size_rtx
= convert_to_mode (address_mode
,
5282 TYPE_UNSIGNED (sizetype
));
5284 target
= offset_address (target
, copy_size_rtx
,
5285 highest_pow2_factor (copy_size
));
5286 label
= gen_label_rtx ();
5287 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
5288 GET_MODE (size
), 0, label
);
5291 if (size
!= const0_rtx
)
5292 clear_storage (target
, size
, BLOCK_OP_NORMAL
);
5298 /* Handle calls that return values in multiple non-contiguous locations.
5299 The Irix 6 ABI has examples of this. */
5300 else if (GET_CODE (target
) == PARALLEL
)
5301 emit_group_load (target
, temp
, TREE_TYPE (exp
),
5302 int_size_in_bytes (TREE_TYPE (exp
)));
5303 else if (GET_MODE (temp
) == BLKmode
)
5304 emit_block_move (target
, temp
, expr_size (exp
),
5306 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5307 else if (nontemporal
5308 && emit_storent_insn (target
, temp
))
5309 /* If we managed to emit a nontemporal store, there is nothing else to
5314 temp
= force_operand (temp
, target
);
5316 emit_move_insn (target
, temp
);
5323 /* Return true if field F of structure TYPE is a flexible array. */
5326 flexible_array_member_p (const_tree f
, const_tree type
)
5331 return (DECL_CHAIN (f
) == NULL
5332 && TREE_CODE (tf
) == ARRAY_TYPE
5334 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf
))
5335 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf
)))
5336 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf
))
5337 && int_size_in_bytes (type
) >= 0);
5340 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5341 must have in order for it to completely initialize a value of type TYPE.
5342 Return -1 if the number isn't known.
5344 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5346 static HOST_WIDE_INT
5347 count_type_elements (const_tree type
, bool for_ctor_p
)
5349 switch (TREE_CODE (type
))
5355 nelts
= array_type_nelts (type
);
5356 if (nelts
&& host_integerp (nelts
, 1))
5358 unsigned HOST_WIDE_INT n
;
5360 n
= tree_low_cst (nelts
, 1) + 1;
5361 if (n
== 0 || for_ctor_p
)
5364 return n
* count_type_elements (TREE_TYPE (type
), false);
5366 return for_ctor_p
? -1 : 1;
5371 unsigned HOST_WIDE_INT n
;
5375 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5376 if (TREE_CODE (f
) == FIELD_DECL
)
5379 n
+= count_type_elements (TREE_TYPE (f
), false);
5380 else if (!flexible_array_member_p (f
, type
))
5381 /* Don't count flexible arrays, which are not supposed
5382 to be initialized. */
5390 case QUAL_UNION_TYPE
:
5395 gcc_assert (!for_ctor_p
);
5396 /* Estimate the number of scalars in each field and pick the
5397 maximum. Other estimates would do instead; the idea is simply
5398 to make sure that the estimate is not sensitive to the ordering
5401 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5402 if (TREE_CODE (f
) == FIELD_DECL
)
5404 m
= count_type_elements (TREE_TYPE (f
), false);
5405 /* If the field doesn't span the whole union, add an extra
5406 scalar for the rest. */
5407 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f
)),
5408 TYPE_SIZE (type
)) != 1)
5420 return TYPE_VECTOR_SUBPARTS (type
);
5424 case FIXED_POINT_TYPE
:
5429 case REFERENCE_TYPE
:
5445 /* Helper for categorize_ctor_elements. Identical interface. */
5448 categorize_ctor_elements_1 (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
5449 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
5451 unsigned HOST_WIDE_INT idx
;
5452 HOST_WIDE_INT nz_elts
, init_elts
, num_fields
;
5453 tree value
, purpose
, elt_type
;
5455 /* Whether CTOR is a valid constant initializer, in accordance with what
5456 initializer_constant_valid_p does. If inferred from the constructor
5457 elements, true until proven otherwise. */
5458 bool const_from_elts_p
= constructor_static_from_elts_p (ctor
);
5459 bool const_p
= const_from_elts_p
? true : TREE_STATIC (ctor
);
5464 elt_type
= NULL_TREE
;
5466 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), idx
, purpose
, value
)
5468 HOST_WIDE_INT mult
= 1;
5470 if (TREE_CODE (purpose
) == RANGE_EXPR
)
5472 tree lo_index
= TREE_OPERAND (purpose
, 0);
5473 tree hi_index
= TREE_OPERAND (purpose
, 1);
5475 if (host_integerp (lo_index
, 1) && host_integerp (hi_index
, 1))
5476 mult
= (tree_low_cst (hi_index
, 1)
5477 - tree_low_cst (lo_index
, 1) + 1);
5480 elt_type
= TREE_TYPE (value
);
5482 switch (TREE_CODE (value
))
5486 HOST_WIDE_INT nz
= 0, ic
= 0;
5488 bool const_elt_p
= categorize_ctor_elements_1 (value
, &nz
, &ic
,
5491 nz_elts
+= mult
* nz
;
5492 init_elts
+= mult
* ic
;
5494 if (const_from_elts_p
&& const_p
)
5495 const_p
= const_elt_p
;
5502 if (!initializer_zerop (value
))
5508 nz_elts
+= mult
* TREE_STRING_LENGTH (value
);
5509 init_elts
+= mult
* TREE_STRING_LENGTH (value
);
5513 if (!initializer_zerop (TREE_REALPART (value
)))
5515 if (!initializer_zerop (TREE_IMAGPART (value
)))
5523 for (i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
5525 tree v
= VECTOR_CST_ELT (value
, i
);
5526 if (!initializer_zerop (v
))
5535 HOST_WIDE_INT tc
= count_type_elements (elt_type
, false);
5536 nz_elts
+= mult
* tc
;
5537 init_elts
+= mult
* tc
;
5539 if (const_from_elts_p
&& const_p
)
5540 const_p
= initializer_constant_valid_p (value
, elt_type
)
5547 if (*p_complete
&& !complete_ctor_at_level_p (TREE_TYPE (ctor
),
5548 num_fields
, elt_type
))
5549 *p_complete
= false;
5551 *p_nz_elts
+= nz_elts
;
5552 *p_init_elts
+= init_elts
;
5557 /* Examine CTOR to discover:
5558 * how many scalar fields are set to nonzero values,
5559 and place it in *P_NZ_ELTS;
5560 * how many scalar fields in total are in CTOR,
5561 and place it in *P_ELT_COUNT.
5562 * whether the constructor is complete -- in the sense that every
5563 meaningful byte is explicitly given a value --
5564 and place it in *P_COMPLETE.
5566 Return whether or not CTOR is a valid static constant initializer, the same
5567 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5570 categorize_ctor_elements (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
5571 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
5577 return categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_init_elts
, p_complete
);
5580 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5581 of which had type LAST_TYPE. Each element was itself a complete
5582 initializer, in the sense that every meaningful byte was explicitly
5583 given a value. Return true if the same is true for the constructor
5587 complete_ctor_at_level_p (const_tree type
, HOST_WIDE_INT num_elts
,
5588 const_tree last_type
)
5590 if (TREE_CODE (type
) == UNION_TYPE
5591 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
5596 gcc_assert (num_elts
== 1 && last_type
);
5598 /* ??? We could look at each element of the union, and find the
5599 largest element. Which would avoid comparing the size of the
5600 initialized element against any tail padding in the union.
5601 Doesn't seem worth the effort... */
5602 return simple_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (last_type
)) == 1;
5605 return count_type_elements (type
, true) == num_elts
;
5608 /* Return 1 if EXP contains mostly (3/4) zeros. */
5611 mostly_zeros_p (const_tree exp
)
5613 if (TREE_CODE (exp
) == CONSTRUCTOR
)
5615 HOST_WIDE_INT nz_elts
, init_elts
;
5618 categorize_ctor_elements (exp
, &nz_elts
, &init_elts
, &complete_p
);
5619 return !complete_p
|| nz_elts
< init_elts
/ 4;
5622 return initializer_zerop (exp
);
5625 /* Return 1 if EXP contains all zeros. */
5628 all_zeros_p (const_tree exp
)
5630 if (TREE_CODE (exp
) == CONSTRUCTOR
)
5632 HOST_WIDE_INT nz_elts
, init_elts
;
5635 categorize_ctor_elements (exp
, &nz_elts
, &init_elts
, &complete_p
);
5636 return nz_elts
== 0;
5639 return initializer_zerop (exp
);
5642 /* Helper function for store_constructor.
5643 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5644 TYPE is the type of the CONSTRUCTOR, not the element type.
5645 CLEARED is as for store_constructor.
5646 ALIAS_SET is the alias set to use for any stores.
5648 This provides a recursive shortcut back to store_constructor when it isn't
5649 necessary to go through store_field. This is so that we can pass through
5650 the cleared field to let store_constructor know that we may not have to
5651 clear a substructure if the outer structure has already been cleared. */
5654 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
5655 HOST_WIDE_INT bitpos
, enum machine_mode mode
,
5656 tree exp
, tree type
, int cleared
,
5657 alias_set_type alias_set
)
5659 if (TREE_CODE (exp
) == CONSTRUCTOR
5660 /* We can only call store_constructor recursively if the size and
5661 bit position are on a byte boundary. */
5662 && bitpos
% BITS_PER_UNIT
== 0
5663 && (bitsize
> 0 && bitsize
% BITS_PER_UNIT
== 0)
5664 /* If we have a nonzero bitpos for a register target, then we just
5665 let store_field do the bitfield handling. This is unlikely to
5666 generate unnecessary clear instructions anyways. */
5667 && (bitpos
== 0 || MEM_P (target
)))
5671 = adjust_address (target
,
5672 GET_MODE (target
) == BLKmode
5674 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
5675 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5678 /* Update the alias set, if required. */
5679 if (MEM_P (target
) && ! MEM_KEEP_ALIAS_SET_P (target
)
5680 && MEM_ALIAS_SET (target
) != 0)
5682 target
= copy_rtx (target
);
5683 set_mem_alias_set (target
, alias_set
);
5686 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
5689 store_field (target
, bitsize
, bitpos
, 0, 0, mode
, exp
, type
, alias_set
,
5693 /* Store the value of constructor EXP into the rtx TARGET.
5694 TARGET is either a REG or a MEM; we know it cannot conflict, since
5695 safe_from_p has been called.
5696 CLEARED is true if TARGET is known to have been zero'd.
5697 SIZE is the number of bytes of TARGET we are allowed to modify: this
5698 may not be the same as the size of EXP if we are assigning to a field
5699 which has been packed to exclude padding bits. */
5702 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
)
5704 tree type
= TREE_TYPE (exp
);
5705 #ifdef WORD_REGISTER_OPERATIONS
5706 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
5709 switch (TREE_CODE (type
))
5713 case QUAL_UNION_TYPE
:
5715 unsigned HOST_WIDE_INT idx
;
5718 /* If size is zero or the target is already cleared, do nothing. */
5719 if (size
== 0 || cleared
)
5721 /* We either clear the aggregate or indicate the value is dead. */
5722 else if ((TREE_CODE (type
) == UNION_TYPE
5723 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
5724 && ! CONSTRUCTOR_ELTS (exp
))
5725 /* If the constructor is empty, clear the union. */
5727 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
5731 /* If we are building a static constructor into a register,
5732 set the initial value as zero so we can fold the value into
5733 a constant. But if more than one register is involved,
5734 this probably loses. */
5735 else if (REG_P (target
) && TREE_STATIC (exp
)
5736 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
5738 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5742 /* If the constructor has fewer fields than the structure or
5743 if we are initializing the structure to mostly zeros, clear
5744 the whole structure first. Don't do this if TARGET is a
5745 register whose mode size isn't equal to SIZE since
5746 clear_storage can't handle this case. */
5748 && (((int)VEC_length (constructor_elt
, CONSTRUCTOR_ELTS (exp
))
5749 != fields_length (type
))
5750 || mostly_zeros_p (exp
))
5752 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
5755 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
5759 if (REG_P (target
) && !cleared
)
5760 emit_clobber (target
);
5762 /* Store each element of the constructor into the
5763 corresponding field of TARGET. */
5764 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, field
, value
)
5766 enum machine_mode mode
;
5767 HOST_WIDE_INT bitsize
;
5768 HOST_WIDE_INT bitpos
= 0;
5770 rtx to_rtx
= target
;
5772 /* Just ignore missing fields. We cleared the whole
5773 structure, above, if any fields are missing. */
5777 if (cleared
&& initializer_zerop (value
))
5780 if (host_integerp (DECL_SIZE (field
), 1))
5781 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
5785 mode
= DECL_MODE (field
);
5786 if (DECL_BIT_FIELD (field
))
5789 offset
= DECL_FIELD_OFFSET (field
);
5790 if (host_integerp (offset
, 0)
5791 && host_integerp (bit_position (field
), 0))
5793 bitpos
= int_bit_position (field
);
5797 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
5801 enum machine_mode address_mode
;
5805 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset
,
5806 make_tree (TREE_TYPE (exp
),
5809 offset_rtx
= expand_normal (offset
);
5810 gcc_assert (MEM_P (to_rtx
));
5812 address_mode
= get_address_mode (to_rtx
);
5813 if (GET_MODE (offset_rtx
) != address_mode
)
5814 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
5816 to_rtx
= offset_address (to_rtx
, offset_rtx
,
5817 highest_pow2_factor (offset
));
5820 #ifdef WORD_REGISTER_OPERATIONS
5821 /* If this initializes a field that is smaller than a
5822 word, at the start of a word, try to widen it to a full
5823 word. This special case allows us to output C++ member
5824 function initializations in a form that the optimizers
5827 && bitsize
< BITS_PER_WORD
5828 && bitpos
% BITS_PER_WORD
== 0
5829 && GET_MODE_CLASS (mode
) == MODE_INT
5830 && TREE_CODE (value
) == INTEGER_CST
5832 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
5834 tree type
= TREE_TYPE (value
);
5836 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
5838 type
= lang_hooks
.types
.type_for_mode
5839 (word_mode
, TYPE_UNSIGNED (type
));
5840 value
= fold_convert (type
, value
);
5843 if (BYTES_BIG_ENDIAN
)
5845 = fold_build2 (LSHIFT_EXPR
, type
, value
,
5846 build_int_cst (type
,
5847 BITS_PER_WORD
- bitsize
));
5848 bitsize
= BITS_PER_WORD
;
5853 if (MEM_P (to_rtx
) && !MEM_KEEP_ALIAS_SET_P (to_rtx
)
5854 && DECL_NONADDRESSABLE_P (field
))
5856 to_rtx
= copy_rtx (to_rtx
);
5857 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
5860 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
5861 value
, type
, cleared
,
5862 get_alias_set (TREE_TYPE (field
)));
5869 unsigned HOST_WIDE_INT i
;
5872 tree elttype
= TREE_TYPE (type
);
5874 HOST_WIDE_INT minelt
= 0;
5875 HOST_WIDE_INT maxelt
= 0;
5877 domain
= TYPE_DOMAIN (type
);
5878 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
5879 && TYPE_MAX_VALUE (domain
)
5880 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
5881 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
5883 /* If we have constant bounds for the range of the type, get them. */
5886 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
5887 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
5890 /* If the constructor has fewer elements than the array, clear
5891 the whole array first. Similarly if this is static
5892 constructor of a non-BLKmode object. */
5895 else if (REG_P (target
) && TREE_STATIC (exp
))
5899 unsigned HOST_WIDE_INT idx
;
5901 HOST_WIDE_INT count
= 0, zero_count
= 0;
5902 need_to_clear
= ! const_bounds_p
;
5904 /* This loop is a more accurate version of the loop in
5905 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5906 is also needed to check for missing elements. */
5907 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, index
, value
)
5909 HOST_WIDE_INT this_node_count
;
5914 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
5916 tree lo_index
= TREE_OPERAND (index
, 0);
5917 tree hi_index
= TREE_OPERAND (index
, 1);
5919 if (! host_integerp (lo_index
, 1)
5920 || ! host_integerp (hi_index
, 1))
5926 this_node_count
= (tree_low_cst (hi_index
, 1)
5927 - tree_low_cst (lo_index
, 1) + 1);
5930 this_node_count
= 1;
5932 count
+= this_node_count
;
5933 if (mostly_zeros_p (value
))
5934 zero_count
+= this_node_count
;
5937 /* Clear the entire array first if there are any missing
5938 elements, or if the incidence of zero elements is >=
5941 && (count
< maxelt
- minelt
+ 1
5942 || 4 * zero_count
>= 3 * count
))
5946 if (need_to_clear
&& size
> 0)
5949 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5951 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
5955 if (!cleared
&& REG_P (target
))
5956 /* Inform later passes that the old value is dead. */
5957 emit_clobber (target
);
5959 /* Store each element of the constructor into the
5960 corresponding element of TARGET, determined by counting the
5962 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), i
, index
, value
)
5964 enum machine_mode mode
;
5965 HOST_WIDE_INT bitsize
;
5966 HOST_WIDE_INT bitpos
;
5967 rtx xtarget
= target
;
5969 if (cleared
&& initializer_zerop (value
))
5972 mode
= TYPE_MODE (elttype
);
5973 if (mode
== BLKmode
)
5974 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
5975 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
5978 bitsize
= GET_MODE_BITSIZE (mode
);
5980 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
5982 tree lo_index
= TREE_OPERAND (index
, 0);
5983 tree hi_index
= TREE_OPERAND (index
, 1);
5984 rtx index_r
, pos_rtx
;
5985 HOST_WIDE_INT lo
, hi
, count
;
5988 /* If the range is constant and "small", unroll the loop. */
5990 && host_integerp (lo_index
, 0)
5991 && host_integerp (hi_index
, 0)
5992 && (lo
= tree_low_cst (lo_index
, 0),
5993 hi
= tree_low_cst (hi_index
, 0),
5994 count
= hi
- lo
+ 1,
5997 || (host_integerp (TYPE_SIZE (elttype
), 1)
5998 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
6001 lo
-= minelt
; hi
-= minelt
;
6002 for (; lo
<= hi
; lo
++)
6004 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
6007 && !MEM_KEEP_ALIAS_SET_P (target
)
6008 && TREE_CODE (type
) == ARRAY_TYPE
6009 && TYPE_NONALIASED_COMPONENT (type
))
6011 target
= copy_rtx (target
);
6012 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6015 store_constructor_field
6016 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
6017 get_alias_set (elttype
));
6022 rtx loop_start
= gen_label_rtx ();
6023 rtx loop_end
= gen_label_rtx ();
6026 expand_normal (hi_index
);
6028 index
= build_decl (EXPR_LOCATION (exp
),
6029 VAR_DECL
, NULL_TREE
, domain
);
6030 index_r
= gen_reg_rtx (promote_decl_mode (index
, NULL
));
6031 SET_DECL_RTL (index
, index_r
);
6032 store_expr (lo_index
, index_r
, 0, false);
6034 /* Build the head of the loop. */
6035 do_pending_stack_adjust ();
6036 emit_label (loop_start
);
6038 /* Assign value to element index. */
6040 fold_convert (ssizetype
,
6041 fold_build2 (MINUS_EXPR
,
6044 TYPE_MIN_VALUE (domain
)));
6047 size_binop (MULT_EXPR
, position
,
6048 fold_convert (ssizetype
,
6049 TYPE_SIZE_UNIT (elttype
)));
6051 pos_rtx
= expand_normal (position
);
6052 xtarget
= offset_address (target
, pos_rtx
,
6053 highest_pow2_factor (position
));
6054 xtarget
= adjust_address (xtarget
, mode
, 0);
6055 if (TREE_CODE (value
) == CONSTRUCTOR
)
6056 store_constructor (value
, xtarget
, cleared
,
6057 bitsize
/ BITS_PER_UNIT
);
6059 store_expr (value
, xtarget
, 0, false);
6061 /* Generate a conditional jump to exit the loop. */
6062 exit_cond
= build2 (LT_EXPR
, integer_type_node
,
6064 jumpif (exit_cond
, loop_end
, -1);
6066 /* Update the loop counter, and jump to the head of
6068 expand_assignment (index
,
6069 build2 (PLUS_EXPR
, TREE_TYPE (index
),
6070 index
, integer_one_node
),
6073 emit_jump (loop_start
);
6075 /* Build the end of the loop. */
6076 emit_label (loop_end
);
6079 else if ((index
!= 0 && ! host_integerp (index
, 0))
6080 || ! host_integerp (TYPE_SIZE (elttype
), 1))
6085 index
= ssize_int (1);
6088 index
= fold_convert (ssizetype
,
6089 fold_build2 (MINUS_EXPR
,
6092 TYPE_MIN_VALUE (domain
)));
6095 size_binop (MULT_EXPR
, index
,
6096 fold_convert (ssizetype
,
6097 TYPE_SIZE_UNIT (elttype
)));
6098 xtarget
= offset_address (target
,
6099 expand_normal (position
),
6100 highest_pow2_factor (position
));
6101 xtarget
= adjust_address (xtarget
, mode
, 0);
6102 store_expr (value
, xtarget
, 0, false);
6107 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
6108 * tree_low_cst (TYPE_SIZE (elttype
), 1));
6110 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
6112 if (MEM_P (target
) && !MEM_KEEP_ALIAS_SET_P (target
)
6113 && TREE_CODE (type
) == ARRAY_TYPE
6114 && TYPE_NONALIASED_COMPONENT (type
))
6116 target
= copy_rtx (target
);
6117 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6119 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
6120 type
, cleared
, get_alias_set (elttype
));
6128 unsigned HOST_WIDE_INT idx
;
6129 constructor_elt
*ce
;
6133 tree elttype
= TREE_TYPE (type
);
6134 int elt_size
= tree_low_cst (TYPE_SIZE (elttype
), 1);
6135 enum machine_mode eltmode
= TYPE_MODE (elttype
);
6136 HOST_WIDE_INT bitsize
;
6137 HOST_WIDE_INT bitpos
;
6138 rtvec vector
= NULL
;
6140 alias_set_type alias
;
6142 gcc_assert (eltmode
!= BLKmode
);
6144 n_elts
= TYPE_VECTOR_SUBPARTS (type
);
6145 if (REG_P (target
) && VECTOR_MODE_P (GET_MODE (target
)))
6147 enum machine_mode mode
= GET_MODE (target
);
6149 icode
= (int) optab_handler (vec_init_optab
, mode
);
6150 if (icode
!= CODE_FOR_nothing
)
6154 vector
= rtvec_alloc (n_elts
);
6155 for (i
= 0; i
< n_elts
; i
++)
6156 RTVEC_ELT (vector
, i
) = CONST0_RTX (GET_MODE_INNER (mode
));
6160 /* If the constructor has fewer elements than the vector,
6161 clear the whole array first. Similarly if this is static
6162 constructor of a non-BLKmode object. */
6165 else if (REG_P (target
) && TREE_STATIC (exp
))
6169 unsigned HOST_WIDE_INT count
= 0, zero_count
= 0;
6172 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
6174 int n_elts_here
= tree_low_cst
6175 (int_const_binop (TRUNC_DIV_EXPR
,
6176 TYPE_SIZE (TREE_TYPE (value
)),
6177 TYPE_SIZE (elttype
)), 1);
6179 count
+= n_elts_here
;
6180 if (mostly_zeros_p (value
))
6181 zero_count
+= n_elts_here
;
6184 /* Clear the entire vector first if there are any missing elements,
6185 or if the incidence of zero elements is >= 75%. */
6186 need_to_clear
= (count
< n_elts
|| 4 * zero_count
>= 3 * count
);
6189 if (need_to_clear
&& size
> 0 && !vector
)
6192 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6194 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
6198 /* Inform later passes that the old value is dead. */
6199 if (!cleared
&& !vector
&& REG_P (target
))
6200 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6203 alias
= MEM_ALIAS_SET (target
);
6205 alias
= get_alias_set (elttype
);
6207 /* Store each element of the constructor into the corresponding
6208 element of TARGET, determined by counting the elements. */
6209 for (idx
= 0, i
= 0;
6210 VEC_iterate (constructor_elt
, CONSTRUCTOR_ELTS (exp
), idx
, ce
);
6211 idx
++, i
+= bitsize
/ elt_size
)
6213 HOST_WIDE_INT eltpos
;
6214 tree value
= ce
->value
;
6216 bitsize
= tree_low_cst (TYPE_SIZE (TREE_TYPE (value
)), 1);
6217 if (cleared
&& initializer_zerop (value
))
6221 eltpos
= tree_low_cst (ce
->index
, 1);
6227 /* Vector CONSTRUCTORs should only be built from smaller
6228 vectors in the case of BLKmode vectors. */
6229 gcc_assert (TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
);
6230 RTVEC_ELT (vector
, eltpos
)
6231 = expand_normal (value
);
6235 enum machine_mode value_mode
=
6236 TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
6237 ? TYPE_MODE (TREE_TYPE (value
))
6239 bitpos
= eltpos
* elt_size
;
6240 store_constructor_field (target
, bitsize
, bitpos
,
6241 value_mode
, value
, type
,
6247 emit_insn (GEN_FCN (icode
)
6249 gen_rtx_PARALLEL (GET_MODE (target
), vector
)));
6258 /* Store the value of EXP (an expression tree)
6259 into a subfield of TARGET which has mode MODE and occupies
6260 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6261 If MODE is VOIDmode, it means that we are storing into a bit-field.
6263 BITREGION_START is bitpos of the first bitfield in this region.
6264 BITREGION_END is the bitpos of the ending bitfield in this region.
6265 These two fields are 0, if the C++ memory model does not apply,
6266 or we are not interested in keeping track of bitfield regions.
6268 Always return const0_rtx unless we have something particular to
6271 TYPE is the type of the underlying object,
6273 ALIAS_SET is the alias set for the destination. This value will
6274 (in general) be different from that for TARGET, since TARGET is a
6275 reference to the containing structure.
6277 If NONTEMPORAL is true, try generating a nontemporal store. */
6280 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
6281 unsigned HOST_WIDE_INT bitregion_start
,
6282 unsigned HOST_WIDE_INT bitregion_end
,
6283 enum machine_mode mode
, tree exp
, tree type
,
6284 alias_set_type alias_set
, bool nontemporal
)
6286 if (TREE_CODE (exp
) == ERROR_MARK
)
6289 /* If we have nothing to store, do nothing unless the expression has
6292 return expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6294 /* If we are storing into an unaligned field of an aligned union that is
6295 in a register, we may have the mode of TARGET being an integer mode but
6296 MODE == BLKmode. In that case, get an aligned object whose size and
6297 alignment are the same as TARGET and store TARGET into it (we can avoid
6298 the store if the field being stored is the entire width of TARGET). Then
6299 call ourselves recursively to store the field into a BLKmode version of
6300 that object. Finally, load from the object into TARGET. This is not
6301 very efficient in general, but should only be slightly more expensive
6302 than the otherwise-required unaligned accesses. Perhaps this can be
6303 cleaned up later. It's tempting to make OBJECT readonly, but it's set
6304 twice, once with emit_move_insn and once via store_field. */
6307 && (REG_P (target
) || GET_CODE (target
) == SUBREG
))
6309 rtx object
= assign_temp (type
, 1, 1);
6310 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
6312 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
6313 emit_move_insn (object
, target
);
6315 store_field (blk_object
, bitsize
, bitpos
,
6316 bitregion_start
, bitregion_end
,
6317 mode
, exp
, type
, MEM_ALIAS_SET (blk_object
), nontemporal
);
6319 emit_move_insn (target
, object
);
6321 /* We want to return the BLKmode version of the data. */
6325 if (GET_CODE (target
) == CONCAT
)
6327 /* We're storing into a struct containing a single __complex. */
6329 gcc_assert (!bitpos
);
6330 return store_expr (exp
, target
, 0, nontemporal
);
6333 /* If the structure is in a register or if the component
6334 is a bit field, we cannot use addressing to access it.
6335 Use bit-field techniques or SUBREG to store in it. */
6337 if (mode
== VOIDmode
6338 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
6339 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
6340 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
6342 || GET_CODE (target
) == SUBREG
6343 /* If the field isn't aligned enough to store as an ordinary memref,
6344 store it as a bit field. */
6346 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
6347 || bitpos
% GET_MODE_ALIGNMENT (mode
))
6348 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
6349 || (bitpos
% BITS_PER_UNIT
!= 0)))
6350 || (bitsize
>= 0 && mode
!= BLKmode
6351 && GET_MODE_BITSIZE (mode
) > bitsize
)
6352 /* If the RHS and field are a constant size and the size of the
6353 RHS isn't the same size as the bitfield, we must use bitfield
6356 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
6357 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0)
6358 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6359 decl we must use bitfield operations. */
6361 && TREE_CODE (exp
) == MEM_REF
6362 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
6363 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
6364 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp
, 0),0 ))
6365 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) != BLKmode
))
6370 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6371 implies a mask operation. If the precision is the same size as
6372 the field we're storing into, that mask is redundant. This is
6373 particularly common with bit field assignments generated by the
6375 nop_def
= get_def_for_expr (exp
, NOP_EXPR
);
6378 tree type
= TREE_TYPE (exp
);
6379 if (INTEGRAL_TYPE_P (type
)
6380 && TYPE_PRECISION (type
) < GET_MODE_BITSIZE (TYPE_MODE (type
))
6381 && bitsize
== TYPE_PRECISION (type
))
6383 tree op
= gimple_assign_rhs1 (nop_def
);
6384 type
= TREE_TYPE (op
);
6385 if (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) >= bitsize
)
6390 temp
= expand_normal (exp
);
6392 /* If BITSIZE is narrower than the size of the type of EXP
6393 we will be narrowing TEMP. Normally, what's wanted are the
6394 low-order bits. However, if EXP's type is a record and this is
6395 big-endian machine, we want the upper BITSIZE bits. */
6396 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
6397 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
6398 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
6399 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
6400 GET_MODE_BITSIZE (GET_MODE (temp
)) - bitsize
,
6403 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6404 if (mode
!= VOIDmode
&& mode
!= BLKmode
6405 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
6406 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
6408 /* If the modes of TEMP and TARGET are both BLKmode, both
6409 must be in memory and BITPOS must be aligned on a byte
6410 boundary. If so, we simply do a block copy. Likewise
6411 for a BLKmode-like TARGET. */
6412 if (GET_MODE (temp
) == BLKmode
6413 && (GET_MODE (target
) == BLKmode
6415 && GET_MODE_CLASS (GET_MODE (target
)) == MODE_INT
6416 && (bitpos
% BITS_PER_UNIT
) == 0
6417 && (bitsize
% BITS_PER_UNIT
) == 0)))
6419 gcc_assert (MEM_P (target
) && MEM_P (temp
)
6420 && (bitpos
% BITS_PER_UNIT
) == 0);
6422 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
6423 emit_block_move (target
, temp
,
6424 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
6431 /* Store the value in the bitfield. */
6432 store_bit_field (target
, bitsize
, bitpos
,
6433 bitregion_start
, bitregion_end
,
6440 /* Now build a reference to just the desired component. */
6441 rtx to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
6443 if (to_rtx
== target
)
6444 to_rtx
= copy_rtx (to_rtx
);
6446 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
6447 set_mem_alias_set (to_rtx
, alias_set
);
6449 return store_expr (exp
, to_rtx
, 0, nontemporal
);
6453 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6454 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6455 codes and find the ultimate containing object, which we return.
6457 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6458 bit position, and *PUNSIGNEDP to the signedness of the field.
6459 If the position of the field is variable, we store a tree
6460 giving the variable offset (in units) in *POFFSET.
6461 This offset is in addition to the bit position.
6462 If the position is not variable, we store 0 in *POFFSET.
6464 If any of the extraction expressions is volatile,
6465 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6467 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6468 Otherwise, it is a mode that can be used to access the field.
6470 If the field describes a variable-sized object, *PMODE is set to
6471 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6472 this case, but the address of the object can be found.
6474 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6475 look through nodes that serve as markers of a greater alignment than
6476 the one that can be deduced from the expression. These nodes make it
6477 possible for front-ends to prevent temporaries from being created by
6478 the middle-end on alignment considerations. For that purpose, the
6479 normal operating mode at high-level is to always pass FALSE so that
6480 the ultimate containing object is really returned; moreover, the
6481 associated predicate handled_component_p will always return TRUE
6482 on these nodes, thus indicating that they are essentially handled
6483 by get_inner_reference. TRUE should only be passed when the caller
6484 is scanning the expression in order to build another representation
6485 and specifically knows how to handle these nodes; as such, this is
6486 the normal operating mode in the RTL expanders. */
6489 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
6490 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
6491 enum machine_mode
*pmode
, int *punsignedp
,
6492 int *pvolatilep
, bool keep_aligning
)
6495 enum machine_mode mode
= VOIDmode
;
6496 bool blkmode_bitfield
= false;
6497 tree offset
= size_zero_node
;
6498 double_int bit_offset
= double_int_zero
;
6500 /* First get the mode, signedness, and size. We do this from just the
6501 outermost expression. */
6503 if (TREE_CODE (exp
) == COMPONENT_REF
)
6505 tree field
= TREE_OPERAND (exp
, 1);
6506 size_tree
= DECL_SIZE (field
);
6507 if (!DECL_BIT_FIELD (field
))
6508 mode
= DECL_MODE (field
);
6509 else if (DECL_MODE (field
) == BLKmode
)
6510 blkmode_bitfield
= true;
6511 else if (TREE_THIS_VOLATILE (exp
)
6512 && flag_strict_volatile_bitfields
> 0)
6513 /* Volatile bitfields should be accessed in the mode of the
6514 field's type, not the mode computed based on the bit
6516 mode
= TYPE_MODE (DECL_BIT_FIELD_TYPE (field
));
6518 *punsignedp
= DECL_UNSIGNED (field
);
6520 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
6522 size_tree
= TREE_OPERAND (exp
, 1);
6523 *punsignedp
= (! INTEGRAL_TYPE_P (TREE_TYPE (exp
))
6524 || TYPE_UNSIGNED (TREE_TYPE (exp
)));
6526 /* For vector types, with the correct size of access, use the mode of
6528 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == VECTOR_TYPE
6529 && TREE_TYPE (exp
) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
6530 && tree_int_cst_equal (size_tree
, TYPE_SIZE (TREE_TYPE (exp
))))
6531 mode
= TYPE_MODE (TREE_TYPE (exp
));
6535 mode
= TYPE_MODE (TREE_TYPE (exp
));
6536 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
6538 if (mode
== BLKmode
)
6539 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
6541 *pbitsize
= GET_MODE_BITSIZE (mode
);
6546 if (! host_integerp (size_tree
, 1))
6547 mode
= BLKmode
, *pbitsize
= -1;
6549 *pbitsize
= tree_low_cst (size_tree
, 1);
6552 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6553 and find the ultimate containing object. */
6556 switch (TREE_CODE (exp
))
6560 = double_int_add (bit_offset
,
6561 tree_to_double_int (TREE_OPERAND (exp
, 2)));
6566 tree field
= TREE_OPERAND (exp
, 1);
6567 tree this_offset
= component_ref_field_offset (exp
);
6569 /* If this field hasn't been filled in yet, don't go past it.
6570 This should only happen when folding expressions made during
6571 type construction. */
6572 if (this_offset
== 0)
6575 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
6576 bit_offset
= double_int_add (bit_offset
,
6578 (DECL_FIELD_BIT_OFFSET (field
)));
6580 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6585 case ARRAY_RANGE_REF
:
6587 tree index
= TREE_OPERAND (exp
, 1);
6588 tree low_bound
= array_ref_low_bound (exp
);
6589 tree unit_size
= array_ref_element_size (exp
);
6591 /* We assume all arrays have sizes that are a multiple of a byte.
6592 First subtract the lower bound, if any, in the type of the
6593 index, then convert to sizetype and multiply by the size of
6594 the array element. */
6595 if (! integer_zerop (low_bound
))
6596 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
6599 offset
= size_binop (PLUS_EXPR
, offset
,
6600 size_binop (MULT_EXPR
,
6601 fold_convert (sizetype
, index
),
6610 bit_offset
= double_int_add (bit_offset
,
6611 uhwi_to_double_int (*pbitsize
));
6614 case VIEW_CONVERT_EXPR
:
6615 if (keep_aligning
&& STRICT_ALIGNMENT
6616 && (TYPE_ALIGN (TREE_TYPE (exp
))
6617 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
6618 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
6619 < BIGGEST_ALIGNMENT
)
6620 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
6621 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
6626 /* Hand back the decl for MEM[&decl, off]. */
6627 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
6629 tree off
= TREE_OPERAND (exp
, 1);
6630 if (!integer_zerop (off
))
6632 double_int boff
, coff
= mem_ref_offset (exp
);
6633 boff
= double_int_lshift (coff
,
6635 ? 3 : exact_log2 (BITS_PER_UNIT
),
6636 HOST_BITS_PER_DOUBLE_INT
, true);
6637 bit_offset
= double_int_add (bit_offset
, boff
);
6639 exp
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6647 /* If any reference in the chain is volatile, the effect is volatile. */
6648 if (TREE_THIS_VOLATILE (exp
))
6651 exp
= TREE_OPERAND (exp
, 0);
6655 /* If OFFSET is constant, see if we can return the whole thing as a
6656 constant bit position. Make sure to handle overflow during
6658 if (TREE_CODE (offset
) == INTEGER_CST
)
6660 double_int tem
= tree_to_double_int (offset
);
6661 tem
= double_int_sext (tem
, TYPE_PRECISION (sizetype
));
6662 tem
= double_int_lshift (tem
,
6664 ? 3 : exact_log2 (BITS_PER_UNIT
),
6665 HOST_BITS_PER_DOUBLE_INT
, true);
6666 tem
= double_int_add (tem
, bit_offset
);
6667 if (double_int_fits_in_shwi_p (tem
))
6669 *pbitpos
= double_int_to_shwi (tem
);
6670 *poffset
= offset
= NULL_TREE
;
6674 /* Otherwise, split it up. */
6677 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6678 if (double_int_negative_p (bit_offset
))
6681 = double_int_mask (BITS_PER_UNIT
== 8
6682 ? 3 : exact_log2 (BITS_PER_UNIT
));
6683 double_int tem
= double_int_and_not (bit_offset
, mask
);
6684 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6685 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6686 bit_offset
= double_int_sub (bit_offset
, tem
);
6687 tem
= double_int_rshift (tem
,
6689 ? 3 : exact_log2 (BITS_PER_UNIT
),
6690 HOST_BITS_PER_DOUBLE_INT
, true);
6691 offset
= size_binop (PLUS_EXPR
, offset
,
6692 double_int_to_tree (sizetype
, tem
));
6695 *pbitpos
= double_int_to_shwi (bit_offset
);
6699 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6700 if (mode
== VOIDmode
6702 && (*pbitpos
% BITS_PER_UNIT
) == 0
6703 && (*pbitsize
% BITS_PER_UNIT
) == 0)
6711 /* Given an expression EXP that may be a COMPONENT_REF, an ARRAY_REF or an
6712 ARRAY_RANGE_REF, look for whether EXP or any nested component-refs within
6713 EXP is marked as PACKED. */
6716 contains_packed_reference (const_tree exp
)
6718 bool packed_p
= false;
6722 switch (TREE_CODE (exp
))
6726 tree field
= TREE_OPERAND (exp
, 1);
6727 packed_p
= DECL_PACKED (field
)
6728 || TYPE_PACKED (TREE_TYPE (field
))
6729 || TYPE_PACKED (TREE_TYPE (exp
));
6737 case ARRAY_RANGE_REF
:
6740 case VIEW_CONVERT_EXPR
:
6746 exp
= TREE_OPERAND (exp
, 0);
6752 /* Return a tree of sizetype representing the size, in bytes, of the element
6753 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6756 array_ref_element_size (tree exp
)
6758 tree aligned_size
= TREE_OPERAND (exp
, 3);
6759 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6760 location_t loc
= EXPR_LOCATION (exp
);
6762 /* If a size was specified in the ARRAY_REF, it's the size measured
6763 in alignment units of the element type. So multiply by that value. */
6766 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6767 sizetype from another type of the same width and signedness. */
6768 if (TREE_TYPE (aligned_size
) != sizetype
)
6769 aligned_size
= fold_convert_loc (loc
, sizetype
, aligned_size
);
6770 return size_binop_loc (loc
, MULT_EXPR
, aligned_size
,
6771 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
6774 /* Otherwise, take the size from that of the element type. Substitute
6775 any PLACEHOLDER_EXPR that we have. */
6777 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type
), exp
);
6780 /* Return a tree representing the lower bound of the array mentioned in
6781 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6784 array_ref_low_bound (tree exp
)
6786 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6788 /* If a lower bound is specified in EXP, use it. */
6789 if (TREE_OPERAND (exp
, 2))
6790 return TREE_OPERAND (exp
, 2);
6792 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6793 substituting for a PLACEHOLDER_EXPR as needed. */
6794 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
6795 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type
), exp
);
6797 /* Otherwise, return a zero of the appropriate type. */
6798 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp
, 1)), 0);
6801 /* Returns true if REF is an array reference to an array at the end of
6802 a structure. If this is the case, the array may be allocated larger
6803 than its upper bound implies. */
6806 array_at_struct_end_p (tree ref
)
6808 if (TREE_CODE (ref
) != ARRAY_REF
6809 && TREE_CODE (ref
) != ARRAY_RANGE_REF
)
6812 while (handled_component_p (ref
))
6814 /* If the reference chain contains a component reference to a
6815 non-union type and there follows another field the reference
6816 is not at the end of a structure. */
6817 if (TREE_CODE (ref
) == COMPONENT_REF
6818 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref
, 0))) == RECORD_TYPE
)
6820 tree nextf
= DECL_CHAIN (TREE_OPERAND (ref
, 1));
6821 while (nextf
&& TREE_CODE (nextf
) != FIELD_DECL
)
6822 nextf
= DECL_CHAIN (nextf
);
6827 ref
= TREE_OPERAND (ref
, 0);
6830 /* If the reference is based on a declared entity, the size of the array
6831 is constrained by its given domain. */
6838 /* Return a tree representing the upper bound of the array mentioned in
6839 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6842 array_ref_up_bound (tree exp
)
6844 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6846 /* If there is a domain type and it has an upper bound, use it, substituting
6847 for a PLACEHOLDER_EXPR as needed. */
6848 if (domain_type
&& TYPE_MAX_VALUE (domain_type
))
6849 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type
), exp
);
6851 /* Otherwise fail. */
6855 /* Return a tree representing the offset, in bytes, of the field referenced
6856 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6859 component_ref_field_offset (tree exp
)
6861 tree aligned_offset
= TREE_OPERAND (exp
, 2);
6862 tree field
= TREE_OPERAND (exp
, 1);
6863 location_t loc
= EXPR_LOCATION (exp
);
6865 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6866 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6870 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6871 sizetype from another type of the same width and signedness. */
6872 if (TREE_TYPE (aligned_offset
) != sizetype
)
6873 aligned_offset
= fold_convert_loc (loc
, sizetype
, aligned_offset
);
6874 return size_binop_loc (loc
, MULT_EXPR
, aligned_offset
,
6875 size_int (DECL_OFFSET_ALIGN (field
)
6879 /* Otherwise, take the offset from that of the field. Substitute
6880 any PLACEHOLDER_EXPR that we have. */
6882 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field
), exp
);
6885 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6887 static unsigned HOST_WIDE_INT
6888 target_align (const_tree target
)
6890 /* We might have a chain of nested references with intermediate misaligning
6891 bitfields components, so need to recurse to find out. */
6893 unsigned HOST_WIDE_INT this_align
, outer_align
;
6895 switch (TREE_CODE (target
))
6901 this_align
= DECL_ALIGN (TREE_OPERAND (target
, 1));
6902 outer_align
= target_align (TREE_OPERAND (target
, 0));
6903 return MIN (this_align
, outer_align
);
6906 case ARRAY_RANGE_REF
:
6907 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
6908 outer_align
= target_align (TREE_OPERAND (target
, 0));
6909 return MIN (this_align
, outer_align
);
6912 case NON_LVALUE_EXPR
:
6913 case VIEW_CONVERT_EXPR
:
6914 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
6915 outer_align
= target_align (TREE_OPERAND (target
, 0));
6916 return MAX (this_align
, outer_align
);
6919 return TYPE_ALIGN (TREE_TYPE (target
));
6924 /* Given an rtx VALUE that may contain additions and multiplications, return
6925 an equivalent value that just refers to a register, memory, or constant.
6926 This is done by generating instructions to perform the arithmetic and
6927 returning a pseudo-register containing the value.
6929 The returned value may be a REG, SUBREG, MEM or constant. */
6932 force_operand (rtx value
, rtx target
)
6935 /* Use subtarget as the target for operand 0 of a binary operation. */
6936 rtx subtarget
= get_subtarget (target
);
6937 enum rtx_code code
= GET_CODE (value
);
6939 /* Check for subreg applied to an expression produced by loop optimizer. */
6941 && !REG_P (SUBREG_REG (value
))
6942 && !MEM_P (SUBREG_REG (value
)))
6945 = simplify_gen_subreg (GET_MODE (value
),
6946 force_reg (GET_MODE (SUBREG_REG (value
)),
6947 force_operand (SUBREG_REG (value
),
6949 GET_MODE (SUBREG_REG (value
)),
6950 SUBREG_BYTE (value
));
6951 code
= GET_CODE (value
);
6954 /* Check for a PIC address load. */
6955 if ((code
== PLUS
|| code
== MINUS
)
6956 && XEXP (value
, 0) == pic_offset_table_rtx
6957 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
6958 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
6959 || GET_CODE (XEXP (value
, 1)) == CONST
))
6962 subtarget
= gen_reg_rtx (GET_MODE (value
));
6963 emit_move_insn (subtarget
, value
);
6967 if (ARITHMETIC_P (value
))
6969 op2
= XEXP (value
, 1);
6970 if (!CONSTANT_P (op2
) && !(REG_P (op2
) && op2
!= subtarget
))
6972 if (code
== MINUS
&& CONST_INT_P (op2
))
6975 op2
= negate_rtx (GET_MODE (value
), op2
);
6978 /* Check for an addition with OP2 a constant integer and our first
6979 operand a PLUS of a virtual register and something else. In that
6980 case, we want to emit the sum of the virtual register and the
6981 constant first and then add the other value. This allows virtual
6982 register instantiation to simply modify the constant rather than
6983 creating another one around this addition. */
6984 if (code
== PLUS
&& CONST_INT_P (op2
)
6985 && GET_CODE (XEXP (value
, 0)) == PLUS
6986 && REG_P (XEXP (XEXP (value
, 0), 0))
6987 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6988 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
6990 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
6991 XEXP (XEXP (value
, 0), 0), op2
,
6992 subtarget
, 0, OPTAB_LIB_WIDEN
);
6993 return expand_simple_binop (GET_MODE (value
), code
, temp
,
6994 force_operand (XEXP (XEXP (value
,
6996 target
, 0, OPTAB_LIB_WIDEN
);
6999 op1
= force_operand (XEXP (value
, 0), subtarget
);
7000 op2
= force_operand (op2
, NULL_RTX
);
7004 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
7006 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
7007 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7008 target
, 1, OPTAB_LIB_WIDEN
);
7010 return expand_divmod (0,
7011 FLOAT_MODE_P (GET_MODE (value
))
7012 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
7013 GET_MODE (value
), op1
, op2
, target
, 0);
7015 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7018 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
7021 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7024 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7025 target
, 0, OPTAB_LIB_WIDEN
);
7027 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7028 target
, 1, OPTAB_LIB_WIDEN
);
7031 if (UNARY_P (value
))
7034 target
= gen_reg_rtx (GET_MODE (value
));
7035 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
7042 case FLOAT_TRUNCATE
:
7043 convert_move (target
, op1
, code
== ZERO_EXTEND
);
7048 expand_fix (target
, op1
, code
== UNSIGNED_FIX
);
7052 case UNSIGNED_FLOAT
:
7053 expand_float (target
, op1
, code
== UNSIGNED_FLOAT
);
7057 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
7061 #ifdef INSN_SCHEDULING
7062 /* On machines that have insn scheduling, we want all memory reference to be
7063 explicit, so we need to deal with such paradoxical SUBREGs. */
7064 if (paradoxical_subreg_p (value
) && MEM_P (SUBREG_REG (value
)))
7066 = simplify_gen_subreg (GET_MODE (value
),
7067 force_reg (GET_MODE (SUBREG_REG (value
)),
7068 force_operand (SUBREG_REG (value
),
7070 GET_MODE (SUBREG_REG (value
)),
7071 SUBREG_BYTE (value
));
7077 /* Subroutine of expand_expr: return nonzero iff there is no way that
7078 EXP can reference X, which is being modified. TOP_P is nonzero if this
7079 call is going to be used to determine whether we need a temporary
7080 for EXP, as opposed to a recursive call to this function.
7082 It is always safe for this routine to return zero since it merely
7083 searches for optimization opportunities. */
7086 safe_from_p (const_rtx x
, tree exp
, int top_p
)
7092 /* If EXP has varying size, we MUST use a target since we currently
7093 have no way of allocating temporaries of variable size
7094 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7095 So we assume here that something at a higher level has prevented a
7096 clash. This is somewhat bogus, but the best we can do. Only
7097 do this when X is BLKmode and when we are at the top level. */
7098 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
7099 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
7100 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
7101 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
7102 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
7104 && GET_MODE (x
) == BLKmode
)
7105 /* If X is in the outgoing argument area, it is always safe. */
7107 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
7108 || (GET_CODE (XEXP (x
, 0)) == PLUS
7109 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
7112 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7113 find the underlying pseudo. */
7114 if (GET_CODE (x
) == SUBREG
)
7117 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7121 /* Now look at our tree code and possibly recurse. */
7122 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
7124 case tcc_declaration
:
7125 exp_rtl
= DECL_RTL_IF_SET (exp
);
7131 case tcc_exceptional
:
7132 if (TREE_CODE (exp
) == TREE_LIST
)
7136 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
7138 exp
= TREE_CHAIN (exp
);
7141 if (TREE_CODE (exp
) != TREE_LIST
)
7142 return safe_from_p (x
, exp
, 0);
7145 else if (TREE_CODE (exp
) == CONSTRUCTOR
)
7147 constructor_elt
*ce
;
7148 unsigned HOST_WIDE_INT idx
;
7150 FOR_EACH_VEC_ELT (constructor_elt
, CONSTRUCTOR_ELTS (exp
), idx
, ce
)
7151 if ((ce
->index
!= NULL_TREE
&& !safe_from_p (x
, ce
->index
, 0))
7152 || !safe_from_p (x
, ce
->value
, 0))
7156 else if (TREE_CODE (exp
) == ERROR_MARK
)
7157 return 1; /* An already-visited SAVE_EXPR? */
7162 /* The only case we look at here is the DECL_INITIAL inside a
7164 return (TREE_CODE (exp
) != DECL_EXPR
7165 || TREE_CODE (DECL_EXPR_DECL (exp
)) != VAR_DECL
7166 || !DECL_INITIAL (DECL_EXPR_DECL (exp
))
7167 || safe_from_p (x
, DECL_INITIAL (DECL_EXPR_DECL (exp
)), 0));
7170 case tcc_comparison
:
7171 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
7176 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7178 case tcc_expression
:
7181 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7182 the expression. If it is set, we conflict iff we are that rtx or
7183 both are in memory. Otherwise, we check all operands of the
7184 expression recursively. */
7186 switch (TREE_CODE (exp
))
7189 /* If the operand is static or we are static, we can't conflict.
7190 Likewise if we don't conflict with the operand at all. */
7191 if (staticp (TREE_OPERAND (exp
, 0))
7192 || TREE_STATIC (exp
)
7193 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
7196 /* Otherwise, the only way this can conflict is if we are taking
7197 the address of a DECL a that address if part of X, which is
7199 exp
= TREE_OPERAND (exp
, 0);
7202 if (!DECL_RTL_SET_P (exp
)
7203 || !MEM_P (DECL_RTL (exp
)))
7206 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
7212 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
7213 get_alias_set (exp
)))
7218 /* Assume that the call will clobber all hard registers and
7220 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7225 case WITH_CLEANUP_EXPR
:
7226 case CLEANUP_POINT_EXPR
:
7227 /* Lowered by gimplify.c. */
7231 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7237 /* If we have an rtx, we do not need to scan our operands. */
7241 nops
= TREE_OPERAND_LENGTH (exp
);
7242 for (i
= 0; i
< nops
; i
++)
7243 if (TREE_OPERAND (exp
, i
) != 0
7244 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
7250 /* Should never get a type here. */
7254 /* If we have an rtl, find any enclosed object. Then see if we conflict
7258 if (GET_CODE (exp_rtl
) == SUBREG
)
7260 exp_rtl
= SUBREG_REG (exp_rtl
);
7262 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
7266 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7267 are memory and they conflict. */
7268 return ! (rtx_equal_p (x
, exp_rtl
)
7269 || (MEM_P (x
) && MEM_P (exp_rtl
)
7270 && true_dependence (exp_rtl
, VOIDmode
, x
)));
7273 /* If we reach here, it is safe. */
7278 /* Return the highest power of two that EXP is known to be a multiple of.
7279 This is used in updating alignment of MEMs in array references. */
7281 unsigned HOST_WIDE_INT
7282 highest_pow2_factor (const_tree exp
)
7284 unsigned HOST_WIDE_INT c0
, c1
;
7286 switch (TREE_CODE (exp
))
7289 /* We can find the lowest bit that's a one. If the low
7290 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
7291 We need to handle this case since we can find it in a COND_EXPR,
7292 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
7293 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
7295 if (TREE_OVERFLOW (exp
))
7296 return BIGGEST_ALIGNMENT
;
7299 /* Note: tree_low_cst is intentionally not used here,
7300 we don't care about the upper bits. */
7301 c0
= TREE_INT_CST_LOW (exp
);
7303 return c0
? c0
: BIGGEST_ALIGNMENT
;
7307 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
7308 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
7309 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
7310 return MIN (c0
, c1
);
7313 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
7314 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
7317 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
7319 if (integer_pow2p (TREE_OPERAND (exp
, 1))
7320 && host_integerp (TREE_OPERAND (exp
, 1), 1))
7322 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
7323 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
7324 return MAX (1, c0
/ c1
);
7329 /* The highest power of two of a bit-and expression is the maximum of
7330 that of its operands. We typically get here for a complex LHS and
7331 a constant negative power of two on the RHS to force an explicit
7332 alignment, so don't bother looking at the LHS. */
7333 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
7337 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
7340 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
7343 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
7344 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
7345 return MIN (c0
, c1
);
7354 /* Similar, except that the alignment requirements of TARGET are
7355 taken into account. Assume it is at least as aligned as its
7356 type, unless it is a COMPONENT_REF in which case the layout of
7357 the structure gives the alignment. */
7359 static unsigned HOST_WIDE_INT
7360 highest_pow2_factor_for_target (const_tree target
, const_tree exp
)
7362 unsigned HOST_WIDE_INT talign
= target_align (target
) / BITS_PER_UNIT
;
7363 unsigned HOST_WIDE_INT factor
= highest_pow2_factor (exp
);
7365 return MAX (factor
, talign
);
7368 #ifdef HAVE_conditional_move
7369 /* Convert the tree comparison code TCODE to the rtl one where the
7370 signedness is UNSIGNEDP. */
7372 static enum rtx_code
7373 convert_tree_comp_to_rtx (enum tree_code tcode
, int unsignedp
)
7385 code
= unsignedp
? LTU
: LT
;
7388 code
= unsignedp
? LEU
: LE
;
7391 code
= unsignedp
? GTU
: GT
;
7394 code
= unsignedp
? GEU
: GE
;
7396 case UNORDERED_EXPR
:
7428 /* Subroutine of expand_expr. Expand the two operands of a binary
7429 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7430 The value may be stored in TARGET if TARGET is nonzero. The
7431 MODIFIER argument is as documented by expand_expr. */
7434 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
7435 enum expand_modifier modifier
)
7437 if (! safe_from_p (target
, exp1
, 1))
7439 if (operand_equal_p (exp0
, exp1
, 0))
7441 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7442 *op1
= copy_rtx (*op0
);
7446 /* If we need to preserve evaluation order, copy exp0 into its own
7447 temporary variable so that it can't be clobbered by exp1. */
7448 if (flag_evaluation_order
&& TREE_SIDE_EFFECTS (exp1
))
7449 exp0
= save_expr (exp0
);
7450 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7451 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
7456 /* Return a MEM that contains constant EXP. DEFER is as for
7457 output_constant_def and MODIFIER is as for expand_expr. */
7460 expand_expr_constant (tree exp
, int defer
, enum expand_modifier modifier
)
7464 mem
= output_constant_def (exp
, defer
);
7465 if (modifier
!= EXPAND_INITIALIZER
)
7466 mem
= use_anchored_address (mem
);
7470 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7471 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7474 expand_expr_addr_expr_1 (tree exp
, rtx target
, enum machine_mode tmode
,
7475 enum expand_modifier modifier
, addr_space_t as
)
7477 rtx result
, subtarget
;
7479 HOST_WIDE_INT bitsize
, bitpos
;
7480 int volatilep
, unsignedp
;
7481 enum machine_mode mode1
;
7483 /* If we are taking the address of a constant and are at the top level,
7484 we have to use output_constant_def since we can't call force_const_mem
7486 /* ??? This should be considered a front-end bug. We should not be
7487 generating ADDR_EXPR of something that isn't an LVALUE. The only
7488 exception here is STRING_CST. */
7489 if (CONSTANT_CLASS_P (exp
))
7491 result
= XEXP (expand_expr_constant (exp
, 0, modifier
), 0);
7492 if (modifier
< EXPAND_SUM
)
7493 result
= force_operand (result
, target
);
7497 /* Everything must be something allowed by is_gimple_addressable. */
7498 switch (TREE_CODE (exp
))
7501 /* This case will happen via recursion for &a->b. */
7502 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7506 tree tem
= TREE_OPERAND (exp
, 0);
7507 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
7508 tem
= fold_build_pointer_plus (tem
, TREE_OPERAND (exp
, 1));
7509 return expand_expr (tem
, target
, tmode
, modifier
);
7513 /* Expand the initializer like constants above. */
7514 result
= XEXP (expand_expr_constant (DECL_INITIAL (exp
),
7516 if (modifier
< EXPAND_SUM
)
7517 result
= force_operand (result
, target
);
7521 /* The real part of the complex number is always first, therefore
7522 the address is the same as the address of the parent object. */
7525 inner
= TREE_OPERAND (exp
, 0);
7529 /* The imaginary part of the complex number is always second.
7530 The expression is therefore always offset by the size of the
7533 bitpos
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp
)));
7534 inner
= TREE_OPERAND (exp
, 0);
7538 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7539 expand_expr, as that can have various side effects; LABEL_DECLs for
7540 example, may not have their DECL_RTL set yet. Expand the rtl of
7541 CONSTRUCTORs too, which should yield a memory reference for the
7542 constructor's contents. Assume language specific tree nodes can
7543 be expanded in some interesting way. */
7544 gcc_assert (TREE_CODE (exp
) < LAST_AND_UNUSED_TREE_CODE
);
7546 || TREE_CODE (exp
) == CONSTRUCTOR
7547 || TREE_CODE (exp
) == COMPOUND_LITERAL_EXPR
)
7549 result
= expand_expr (exp
, target
, tmode
,
7550 modifier
== EXPAND_INITIALIZER
7551 ? EXPAND_INITIALIZER
: EXPAND_CONST_ADDRESS
);
7553 /* If the DECL isn't in memory, then the DECL wasn't properly
7554 marked TREE_ADDRESSABLE, which will be either a front-end
7555 or a tree optimizer bug. */
7557 if (TREE_ADDRESSABLE (exp
)
7559 && ! targetm
.calls
.allocate_stack_slots_for_args())
7561 error ("local frame unavailable (naked function?)");
7565 gcc_assert (MEM_P (result
));
7566 result
= XEXP (result
, 0);
7568 /* ??? Is this needed anymore? */
7570 TREE_USED (exp
) = 1;
7572 if (modifier
!= EXPAND_INITIALIZER
7573 && modifier
!= EXPAND_CONST_ADDRESS
7574 && modifier
!= EXPAND_SUM
)
7575 result
= force_operand (result
, target
);
7579 /* Pass FALSE as the last argument to get_inner_reference although
7580 we are expanding to RTL. The rationale is that we know how to
7581 handle "aligning nodes" here: we can just bypass them because
7582 they won't change the final object whose address will be returned
7583 (they actually exist only for that purpose). */
7584 inner
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7585 &mode1
, &unsignedp
, &volatilep
, false);
7589 /* We must have made progress. */
7590 gcc_assert (inner
!= exp
);
7592 subtarget
= offset
|| bitpos
? NULL_RTX
: target
;
7593 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7594 inner alignment, force the inner to be sufficiently aligned. */
7595 if (CONSTANT_CLASS_P (inner
)
7596 && TYPE_ALIGN (TREE_TYPE (inner
)) < TYPE_ALIGN (TREE_TYPE (exp
)))
7598 inner
= copy_node (inner
);
7599 TREE_TYPE (inner
) = copy_node (TREE_TYPE (inner
));
7600 TYPE_ALIGN (TREE_TYPE (inner
)) = TYPE_ALIGN (TREE_TYPE (exp
));
7601 TYPE_USER_ALIGN (TREE_TYPE (inner
)) = 1;
7603 result
= expand_expr_addr_expr_1 (inner
, subtarget
, tmode
, modifier
, as
);
7609 if (modifier
!= EXPAND_NORMAL
)
7610 result
= force_operand (result
, NULL
);
7611 tmp
= expand_expr (offset
, NULL_RTX
, tmode
,
7612 modifier
== EXPAND_INITIALIZER
7613 ? EXPAND_INITIALIZER
: EXPAND_NORMAL
);
7615 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7616 tmp
= convert_memory_address_addr_space (tmode
, tmp
, as
);
7618 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7619 result
= simplify_gen_binary (PLUS
, tmode
, result
, tmp
);
7622 subtarget
= bitpos
? NULL_RTX
: target
;
7623 result
= expand_simple_binop (tmode
, PLUS
, result
, tmp
, subtarget
,
7624 1, OPTAB_LIB_WIDEN
);
7630 /* Someone beforehand should have rejected taking the address
7631 of such an object. */
7632 gcc_assert ((bitpos
% BITS_PER_UNIT
) == 0);
7634 result
= plus_constant (tmode
, result
, bitpos
/ BITS_PER_UNIT
);
7635 if (modifier
< EXPAND_SUM
)
7636 result
= force_operand (result
, target
);
7642 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7643 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7646 expand_expr_addr_expr (tree exp
, rtx target
, enum machine_mode tmode
,
7647 enum expand_modifier modifier
)
7649 addr_space_t as
= ADDR_SPACE_GENERIC
;
7650 enum machine_mode address_mode
= Pmode
;
7651 enum machine_mode pointer_mode
= ptr_mode
;
7652 enum machine_mode rmode
;
7655 /* Target mode of VOIDmode says "whatever's natural". */
7656 if (tmode
== VOIDmode
)
7657 tmode
= TYPE_MODE (TREE_TYPE (exp
));
7659 if (POINTER_TYPE_P (TREE_TYPE (exp
)))
7661 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
7662 address_mode
= targetm
.addr_space
.address_mode (as
);
7663 pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
7666 /* We can get called with some Weird Things if the user does silliness
7667 like "(short) &a". In that case, convert_memory_address won't do
7668 the right thing, so ignore the given target mode. */
7669 if (tmode
!= address_mode
&& tmode
!= pointer_mode
)
7670 tmode
= address_mode
;
7672 result
= expand_expr_addr_expr_1 (TREE_OPERAND (exp
, 0), target
,
7673 tmode
, modifier
, as
);
7675 /* Despite expand_expr claims concerning ignoring TMODE when not
7676 strictly convenient, stuff breaks if we don't honor it. Note
7677 that combined with the above, we only do this for pointer modes. */
7678 rmode
= GET_MODE (result
);
7679 if (rmode
== VOIDmode
)
7682 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7687 /* Generate code for computing CONSTRUCTOR EXP.
7688 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7689 is TRUE, instead of creating a temporary variable in memory
7690 NULL is returned and the caller needs to handle it differently. */
7693 expand_constructor (tree exp
, rtx target
, enum expand_modifier modifier
,
7694 bool avoid_temp_mem
)
7696 tree type
= TREE_TYPE (exp
);
7697 enum machine_mode mode
= TYPE_MODE (type
);
7699 /* Try to avoid creating a temporary at all. This is possible
7700 if all of the initializer is zero.
7701 FIXME: try to handle all [0..255] initializers we can handle
7703 if (TREE_STATIC (exp
)
7704 && !TREE_ADDRESSABLE (exp
)
7705 && target
!= 0 && mode
== BLKmode
7706 && all_zeros_p (exp
))
7708 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
7712 /* All elts simple constants => refer to a constant in memory. But
7713 if this is a non-BLKmode mode, let it store a field at a time
7714 since that should make a CONST_INT or CONST_DOUBLE when we
7715 fold. Likewise, if we have a target we can use, it is best to
7716 store directly into the target unless the type is large enough
7717 that memcpy will be used. If we are making an initializer and
7718 all operands are constant, put it in memory as well.
7720 FIXME: Avoid trying to fill vector constructors piece-meal.
7721 Output them with output_constant_def below unless we're sure
7722 they're zeros. This should go away when vector initializers
7723 are treated like VECTOR_CST instead of arrays. */
7724 if ((TREE_STATIC (exp
)
7725 && ((mode
== BLKmode
7726 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
7727 || TREE_ADDRESSABLE (exp
)
7728 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
7729 && (! MOVE_BY_PIECES_P
7730 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
7732 && ! mostly_zeros_p (exp
))))
7733 || ((modifier
== EXPAND_INITIALIZER
|| modifier
== EXPAND_CONST_ADDRESS
)
7734 && TREE_CONSTANT (exp
)))
7741 constructor
= expand_expr_constant (exp
, 1, modifier
);
7743 if (modifier
!= EXPAND_CONST_ADDRESS
7744 && modifier
!= EXPAND_INITIALIZER
7745 && modifier
!= EXPAND_SUM
)
7746 constructor
= validize_mem (constructor
);
7751 /* Handle calls that pass values in multiple non-contiguous
7752 locations. The Irix 6 ABI has examples of this. */
7753 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
7754 || GET_CODE (target
) == PARALLEL
|| modifier
== EXPAND_STACK_PARM
)
7760 = assign_temp (build_qualified_type (type
, (TYPE_QUALS (type
)
7761 | (TREE_READONLY (exp
)
7762 * TYPE_QUAL_CONST
))),
7763 TREE_ADDRESSABLE (exp
), 1);
7766 store_constructor (exp
, target
, 0, int_expr_size (exp
));
7771 /* expand_expr: generate code for computing expression EXP.
7772 An rtx for the computed value is returned. The value is never null.
7773 In the case of a void EXP, const0_rtx is returned.
7775 The value may be stored in TARGET if TARGET is nonzero.
7776 TARGET is just a suggestion; callers must assume that
7777 the rtx returned may not be the same as TARGET.
7779 If TARGET is CONST0_RTX, it means that the value will be ignored.
7781 If TMODE is not VOIDmode, it suggests generating the
7782 result in mode TMODE. But this is done only when convenient.
7783 Otherwise, TMODE is ignored and the value generated in its natural mode.
7784 TMODE is just a suggestion; callers must assume that
7785 the rtx returned may not have mode TMODE.
7787 Note that TARGET may have neither TMODE nor MODE. In that case, it
7788 probably will not be used.
7790 If MODIFIER is EXPAND_SUM then when EXP is an addition
7791 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7792 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7793 products as above, or REG or MEM, or constant.
7794 Ordinarily in such cases we would output mul or add instructions
7795 and then return a pseudo reg containing the sum.
7797 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7798 it also marks a label as absolutely required (it can't be dead).
7799 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7800 This is used for outputting expressions used in initializers.
7802 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7803 with a constant address even if that address is not normally legitimate.
7804 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7806 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7807 a call parameter. Such targets require special care as we haven't yet
7808 marked TARGET so that it's safe from being trashed by libcalls. We
7809 don't want to use TARGET for anything but the final result;
7810 Intermediate values must go elsewhere. Additionally, calls to
7811 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7813 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7814 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7815 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7816 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7820 expand_expr_real (tree exp
, rtx target
, enum machine_mode tmode
,
7821 enum expand_modifier modifier
, rtx
*alt_rtl
)
7825 /* Handle ERROR_MARK before anybody tries to access its type. */
7826 if (TREE_CODE (exp
) == ERROR_MARK
7827 || (TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
))
7829 ret
= CONST0_RTX (tmode
);
7830 return ret
? ret
: const0_rtx
;
7833 /* If this is an expression of some kind and it has an associated line
7834 number, then emit the line number before expanding the expression.
7836 We need to save and restore the file and line information so that
7837 errors discovered during expansion are emitted with the right
7838 information. It would be better of the diagnostic routines
7839 used the file/line information embedded in the tree nodes rather
7841 if (cfun
&& EXPR_HAS_LOCATION (exp
))
7843 location_t saved_location
= input_location
;
7844 location_t saved_curr_loc
= get_curr_insn_source_location ();
7845 tree saved_block
= get_curr_insn_block ();
7846 input_location
= EXPR_LOCATION (exp
);
7847 set_curr_insn_source_location (input_location
);
7849 /* Record where the insns produced belong. */
7850 set_curr_insn_block (TREE_BLOCK (exp
));
7852 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
7854 input_location
= saved_location
;
7855 set_curr_insn_block (saved_block
);
7856 set_curr_insn_source_location (saved_curr_loc
);
7860 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
7866 /* Try to expand the conditional expression which is represented by
7867 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7868 return the rtl reg which repsents the result. Otherwise return
7872 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED
,
7873 tree treeop1 ATTRIBUTE_UNUSED
,
7874 tree treeop2 ATTRIBUTE_UNUSED
)
7876 #ifdef HAVE_conditional_move
7878 rtx op00
, op01
, op1
, op2
;
7879 enum rtx_code comparison_code
;
7880 enum machine_mode comparison_mode
;
7883 tree type
= TREE_TYPE (treeop1
);
7884 int unsignedp
= TYPE_UNSIGNED (type
);
7885 enum machine_mode mode
= TYPE_MODE (type
);
7887 temp
= assign_temp (type
, 0, 1);
7889 /* If we cannot do a conditional move on the mode, try doing it
7890 with the promoted mode. */
7891 if (!can_conditionally_move_p (mode
))
7892 mode
= promote_mode (type
, mode
, &unsignedp
);
7894 if (!can_conditionally_move_p (mode
))
7898 expand_operands (treeop1
, treeop2
,
7899 temp
, &op1
, &op2
, EXPAND_NORMAL
);
7901 if (TREE_CODE (treeop0
) == SSA_NAME
7902 && (srcstmt
= get_def_for_expr_class (treeop0
, tcc_comparison
)))
7904 tree type
= TREE_TYPE (gimple_assign_rhs1 (srcstmt
));
7905 enum tree_code cmpcode
= gimple_assign_rhs_code (srcstmt
);
7906 op00
= expand_normal (gimple_assign_rhs1 (srcstmt
));
7907 op01
= expand_normal (gimple_assign_rhs2 (srcstmt
));
7908 comparison_mode
= TYPE_MODE (type
);
7909 unsignedp
= TYPE_UNSIGNED (type
);
7910 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
7912 else if (TREE_CODE_CLASS (TREE_CODE (treeop0
)) == tcc_comparison
)
7914 tree type
= TREE_TYPE (TREE_OPERAND (treeop0
, 0));
7915 enum tree_code cmpcode
= TREE_CODE (treeop0
);
7916 op00
= expand_normal (TREE_OPERAND (treeop0
, 0));
7917 op01
= expand_normal (TREE_OPERAND (treeop0
, 1));
7918 unsignedp
= TYPE_UNSIGNED (type
);
7919 comparison_mode
= TYPE_MODE (type
);
7920 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
7924 op00
= expand_normal (treeop0
);
7926 comparison_code
= NE
;
7927 comparison_mode
= TYPE_MODE (TREE_TYPE (treeop0
));
7930 if (GET_MODE (op1
) != mode
)
7931 op1
= gen_lowpart (mode
, op1
);
7933 if (GET_MODE (op2
) != mode
)
7934 op2
= gen_lowpart (mode
, op2
);
7936 /* Try to emit the conditional move. */
7937 insn
= emit_conditional_move (temp
, comparison_code
,
7938 op00
, op01
, comparison_mode
,
7942 /* If we could do the conditional move, emit the sequence,
7946 rtx seq
= get_insns ();
7952 /* Otherwise discard the sequence and fall back to code with
7960 expand_expr_real_2 (sepops ops
, rtx target
, enum machine_mode tmode
,
7961 enum expand_modifier modifier
)
7963 rtx op0
, op1
, op2
, temp
;
7966 enum machine_mode mode
;
7967 enum tree_code code
= ops
->code
;
7969 rtx subtarget
, original_target
;
7971 bool reduce_bit_field
;
7972 location_t loc
= ops
->location
;
7973 tree treeop0
, treeop1
, treeop2
;
7974 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7975 ? reduce_to_bit_field_precision ((expr), \
7981 mode
= TYPE_MODE (type
);
7982 unsignedp
= TYPE_UNSIGNED (type
);
7988 /* We should be called only on simple (binary or unary) expressions,
7989 exactly those that are valid in gimple expressions that aren't
7990 GIMPLE_SINGLE_RHS (or invalid). */
7991 gcc_assert (get_gimple_rhs_class (code
) == GIMPLE_UNARY_RHS
7992 || get_gimple_rhs_class (code
) == GIMPLE_BINARY_RHS
7993 || get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
);
7995 ignore
= (target
== const0_rtx
7996 || ((CONVERT_EXPR_CODE_P (code
)
7997 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
7998 && TREE_CODE (type
) == VOID_TYPE
));
8000 /* We should be called only if we need the result. */
8001 gcc_assert (!ignore
);
8003 /* An operation in what may be a bit-field type needs the
8004 result to be reduced to the precision of the bit-field type,
8005 which is narrower than that of the type's mode. */
8006 reduce_bit_field
= (INTEGRAL_TYPE_P (type
)
8007 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
));
8009 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
8012 /* Use subtarget as the target for operand 0 of a binary operation. */
8013 subtarget
= get_subtarget (target
);
8014 original_target
= target
;
8018 case NON_LVALUE_EXPR
:
8021 if (treeop0
== error_mark_node
)
8024 if (TREE_CODE (type
) == UNION_TYPE
)
8026 tree valtype
= TREE_TYPE (treeop0
);
8028 /* If both input and output are BLKmode, this conversion isn't doing
8029 anything except possibly changing memory attribute. */
8030 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
8032 rtx result
= expand_expr (treeop0
, target
, tmode
,
8035 result
= copy_rtx (result
);
8036 set_mem_attributes (result
, type
, 0);
8042 if (TYPE_MODE (type
) != BLKmode
)
8043 target
= gen_reg_rtx (TYPE_MODE (type
));
8045 target
= assign_temp (type
, 1, 1);
8049 /* Store data into beginning of memory target. */
8050 store_expr (treeop0
,
8051 adjust_address (target
, TYPE_MODE (valtype
), 0),
8052 modifier
== EXPAND_STACK_PARM
,
8057 gcc_assert (REG_P (target
));
8059 /* Store this field into a union of the proper type. */
8060 store_field (target
,
8061 MIN ((int_size_in_bytes (TREE_TYPE
8064 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
8065 0, 0, 0, TYPE_MODE (valtype
), treeop0
,
8069 /* Return the entire union. */
8073 if (mode
== TYPE_MODE (TREE_TYPE (treeop0
)))
8075 op0
= expand_expr (treeop0
, target
, VOIDmode
,
8078 /* If the signedness of the conversion differs and OP0 is
8079 a promoted SUBREG, clear that indication since we now
8080 have to do the proper extension. */
8081 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)) != unsignedp
8082 && GET_CODE (op0
) == SUBREG
)
8083 SUBREG_PROMOTED_VAR_P (op0
) = 0;
8085 return REDUCE_BIT_FIELD (op0
);
8088 op0
= expand_expr (treeop0
, NULL_RTX
, mode
,
8089 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
);
8090 if (GET_MODE (op0
) == mode
)
8093 /* If OP0 is a constant, just convert it into the proper mode. */
8094 else if (CONSTANT_P (op0
))
8096 tree inner_type
= TREE_TYPE (treeop0
);
8097 enum machine_mode inner_mode
= GET_MODE (op0
);
8099 if (inner_mode
== VOIDmode
)
8100 inner_mode
= TYPE_MODE (inner_type
);
8102 if (modifier
== EXPAND_INITIALIZER
)
8103 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
,
8104 subreg_lowpart_offset (mode
,
8107 op0
= convert_modes (mode
, inner_mode
, op0
,
8108 TYPE_UNSIGNED (inner_type
));
8111 else if (modifier
== EXPAND_INITIALIZER
)
8112 op0
= gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
8114 else if (target
== 0)
8115 op0
= convert_to_mode (mode
, op0
,
8116 TYPE_UNSIGNED (TREE_TYPE
8120 convert_move (target
, op0
,
8121 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8125 return REDUCE_BIT_FIELD (op0
);
8127 case ADDR_SPACE_CONVERT_EXPR
:
8129 tree treeop0_type
= TREE_TYPE (treeop0
);
8131 addr_space_t as_from
;
8133 gcc_assert (POINTER_TYPE_P (type
));
8134 gcc_assert (POINTER_TYPE_P (treeop0_type
));
8136 as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type
));
8137 as_from
= TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type
));
8139 /* Conversions between pointers to the same address space should
8140 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8141 gcc_assert (as_to
!= as_from
);
8143 /* Ask target code to handle conversion between pointers
8144 to overlapping address spaces. */
8145 if (targetm
.addr_space
.subset_p (as_to
, as_from
)
8146 || targetm
.addr_space
.subset_p (as_from
, as_to
))
8148 op0
= expand_expr (treeop0
, NULL_RTX
, VOIDmode
, modifier
);
8149 op0
= targetm
.addr_space
.convert (op0
, treeop0_type
, type
);
8154 /* For disjoint address spaces, converting anything but
8155 a null pointer invokes undefined behaviour. We simply
8156 always return a null pointer here. */
8157 return CONST0_RTX (mode
);
8160 case POINTER_PLUS_EXPR
:
8161 /* Even though the sizetype mode and the pointer's mode can be different
8162 expand is able to handle this correctly and get the correct result out
8163 of the PLUS_EXPR code. */
8164 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8165 if sizetype precision is smaller than pointer precision. */
8166 if (TYPE_PRECISION (sizetype
) < TYPE_PRECISION (type
))
8167 treeop1
= fold_convert_loc (loc
, type
,
8168 fold_convert_loc (loc
, ssizetype
,
8170 /* If sizetype precision is larger than pointer precision, truncate the
8171 offset to have matching modes. */
8172 else if (TYPE_PRECISION (sizetype
) > TYPE_PRECISION (type
))
8173 treeop1
= fold_convert_loc (loc
, type
, treeop1
);
8176 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8177 something else, make sure we add the register to the constant and
8178 then to the other thing. This case can occur during strength
8179 reduction and doing it this way will produce better code if the
8180 frame pointer or argument pointer is eliminated.
8182 fold-const.c will ensure that the constant is always in the inner
8183 PLUS_EXPR, so the only case we need to do anything about is if
8184 sp, ap, or fp is our second argument, in which case we must swap
8185 the innermost first argument and our second argument. */
8187 if (TREE_CODE (treeop0
) == PLUS_EXPR
8188 && TREE_CODE (TREE_OPERAND (treeop0
, 1)) == INTEGER_CST
8189 && TREE_CODE (treeop1
) == VAR_DECL
8190 && (DECL_RTL (treeop1
) == frame_pointer_rtx
8191 || DECL_RTL (treeop1
) == stack_pointer_rtx
8192 || DECL_RTL (treeop1
) == arg_pointer_rtx
))
8197 /* If the result is to be ptr_mode and we are adding an integer to
8198 something, we might be forming a constant. So try to use
8199 plus_constant. If it produces a sum and we can't accept it,
8200 use force_operand. This allows P = &ARR[const] to generate
8201 efficient code on machines where a SYMBOL_REF is not a valid
8204 If this is an EXPAND_SUM call, always return the sum. */
8205 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
8206 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
8208 if (modifier
== EXPAND_STACK_PARM
)
8210 if (TREE_CODE (treeop0
) == INTEGER_CST
8211 && GET_MODE_PRECISION (mode
) <= HOST_BITS_PER_WIDE_INT
8212 && TREE_CONSTANT (treeop1
))
8216 op1
= expand_expr (treeop1
, subtarget
, VOIDmode
,
8218 /* Use immed_double_const to ensure that the constant is
8219 truncated according to the mode of OP1, then sign extended
8220 to a HOST_WIDE_INT. Using the constant directly can result
8221 in non-canonical RTL in a 64x32 cross compile. */
8223 = immed_double_const (TREE_INT_CST_LOW (treeop0
),
8225 TYPE_MODE (TREE_TYPE (treeop1
)));
8226 op1
= plus_constant (mode
, op1
, INTVAL (constant_part
));
8227 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8228 op1
= force_operand (op1
, target
);
8229 return REDUCE_BIT_FIELD (op1
);
8232 else if (TREE_CODE (treeop1
) == INTEGER_CST
8233 && GET_MODE_PRECISION (mode
) <= HOST_BITS_PER_WIDE_INT
8234 && TREE_CONSTANT (treeop0
))
8238 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8239 (modifier
== EXPAND_INITIALIZER
8240 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
8241 if (! CONSTANT_P (op0
))
8243 op1
= expand_expr (treeop1
, NULL_RTX
,
8244 VOIDmode
, modifier
);
8245 /* Return a PLUS if modifier says it's OK. */
8246 if (modifier
== EXPAND_SUM
8247 || modifier
== EXPAND_INITIALIZER
)
8248 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
8251 /* Use immed_double_const to ensure that the constant is
8252 truncated according to the mode of OP1, then sign extended
8253 to a HOST_WIDE_INT. Using the constant directly can result
8254 in non-canonical RTL in a 64x32 cross compile. */
8256 = immed_double_const (TREE_INT_CST_LOW (treeop1
),
8258 TYPE_MODE (TREE_TYPE (treeop0
)));
8259 op0
= plus_constant (mode
, op0
, INTVAL (constant_part
));
8260 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8261 op0
= force_operand (op0
, target
);
8262 return REDUCE_BIT_FIELD (op0
);
8266 /* Use TER to expand pointer addition of a negated value
8267 as pointer subtraction. */
8268 if ((POINTER_TYPE_P (TREE_TYPE (treeop0
))
8269 || (TREE_CODE (TREE_TYPE (treeop0
)) == VECTOR_TYPE
8270 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0
)))))
8271 && TREE_CODE (treeop1
) == SSA_NAME
8272 && TYPE_MODE (TREE_TYPE (treeop0
))
8273 == TYPE_MODE (TREE_TYPE (treeop1
)))
8275 gimple def
= get_def_for_expr (treeop1
, NEGATE_EXPR
);
8278 treeop1
= gimple_assign_rhs1 (def
);
8284 /* No sense saving up arithmetic to be done
8285 if it's all in the wrong mode to form part of an address.
8286 And force_operand won't know whether to sign-extend or
8288 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8289 || mode
!= ptr_mode
)
8291 expand_operands (treeop0
, treeop1
,
8292 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8293 if (op0
== const0_rtx
)
8295 if (op1
== const0_rtx
)
8300 expand_operands (treeop0
, treeop1
,
8301 subtarget
, &op0
, &op1
, modifier
);
8302 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8306 /* For initializers, we are allowed to return a MINUS of two
8307 symbolic constants. Here we handle all cases when both operands
8309 /* Handle difference of two symbolic constants,
8310 for the sake of an initializer. */
8311 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8312 && really_constant_p (treeop0
)
8313 && really_constant_p (treeop1
))
8315 expand_operands (treeop0
, treeop1
,
8316 NULL_RTX
, &op0
, &op1
, modifier
);
8318 /* If the last operand is a CONST_INT, use plus_constant of
8319 the negated constant. Else make the MINUS. */
8320 if (CONST_INT_P (op1
))
8321 return REDUCE_BIT_FIELD (plus_constant (mode
, op0
,
8324 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode
, op0
, op1
));
8327 /* No sense saving up arithmetic to be done
8328 if it's all in the wrong mode to form part of an address.
8329 And force_operand won't know whether to sign-extend or
8331 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8332 || mode
!= ptr_mode
)
8335 expand_operands (treeop0
, treeop1
,
8336 subtarget
, &op0
, &op1
, modifier
);
8338 /* Convert A - const to A + (-const). */
8339 if (CONST_INT_P (op1
))
8341 op1
= negate_rtx (mode
, op1
);
8342 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8347 case WIDEN_MULT_PLUS_EXPR
:
8348 case WIDEN_MULT_MINUS_EXPR
:
8349 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8350 op2
= expand_normal (treeop2
);
8351 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
8355 case WIDEN_MULT_EXPR
:
8356 /* If first operand is constant, swap them.
8357 Thus the following special case checks need only
8358 check the second operand. */
8359 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8366 /* First, check if we have a multiplication of one signed and one
8367 unsigned operand. */
8368 if (TREE_CODE (treeop1
) != INTEGER_CST
8369 && (TYPE_UNSIGNED (TREE_TYPE (treeop0
))
8370 != TYPE_UNSIGNED (TREE_TYPE (treeop1
))))
8372 enum machine_mode innermode
= TYPE_MODE (TREE_TYPE (treeop0
));
8373 this_optab
= usmul_widen_optab
;
8374 if (find_widening_optab_handler (this_optab
, mode
, innermode
, 0)
8375 != CODE_FOR_nothing
)
8377 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
8378 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8381 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op1
, &op0
,
8386 /* Check for a multiplication with matching signedness. */
8387 else if ((TREE_CODE (treeop1
) == INTEGER_CST
8388 && int_fits_type_p (treeop1
, TREE_TYPE (treeop0
)))
8389 || (TYPE_UNSIGNED (TREE_TYPE (treeop1
))
8390 == TYPE_UNSIGNED (TREE_TYPE (treeop0
))))
8392 tree op0type
= TREE_TYPE (treeop0
);
8393 enum machine_mode innermode
= TYPE_MODE (op0type
);
8394 bool zextend_p
= TYPE_UNSIGNED (op0type
);
8395 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
8396 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
8398 if (TREE_CODE (treeop0
) != INTEGER_CST
)
8400 if (find_widening_optab_handler (this_optab
, mode
, innermode
, 0)
8401 != CODE_FOR_nothing
)
8403 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8405 temp
= expand_widening_mult (mode
, op0
, op1
, target
,
8406 unsignedp
, this_optab
);
8407 return REDUCE_BIT_FIELD (temp
);
8409 if (find_widening_optab_handler (other_optab
, mode
, innermode
, 0)
8411 && innermode
== word_mode
)
8414 op0
= expand_normal (treeop0
);
8415 if (TREE_CODE (treeop1
) == INTEGER_CST
)
8416 op1
= convert_modes (innermode
, mode
,
8417 expand_normal (treeop1
), unsignedp
);
8419 op1
= expand_normal (treeop1
);
8420 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8421 unsignedp
, OPTAB_LIB_WIDEN
);
8422 hipart
= gen_highpart (innermode
, temp
);
8423 htem
= expand_mult_highpart_adjust (innermode
, hipart
,
8427 emit_move_insn (hipart
, htem
);
8428 return REDUCE_BIT_FIELD (temp
);
8432 treeop0
= fold_build1 (CONVERT_EXPR
, type
, treeop0
);
8433 treeop1
= fold_build1 (CONVERT_EXPR
, type
, treeop1
);
8434 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8435 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8439 optab opt
= fma_optab
;
8442 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8444 if (optab_handler (fma_optab
, mode
) == CODE_FOR_nothing
)
8446 tree fn
= mathfn_built_in (TREE_TYPE (treeop0
), BUILT_IN_FMA
);
8449 gcc_assert (fn
!= NULL_TREE
);
8450 call_expr
= build_call_expr (fn
, 3, treeop0
, treeop1
, treeop2
);
8451 return expand_builtin (call_expr
, target
, subtarget
, mode
, false);
8454 def0
= get_def_for_expr (treeop0
, NEGATE_EXPR
);
8455 def2
= get_def_for_expr (treeop2
, NEGATE_EXPR
);
8460 && optab_handler (fnms_optab
, mode
) != CODE_FOR_nothing
)
8463 op0
= expand_normal (gimple_assign_rhs1 (def0
));
8464 op2
= expand_normal (gimple_assign_rhs1 (def2
));
8467 && optab_handler (fnma_optab
, mode
) != CODE_FOR_nothing
)
8470 op0
= expand_normal (gimple_assign_rhs1 (def0
));
8473 && optab_handler (fms_optab
, mode
) != CODE_FOR_nothing
)
8476 op2
= expand_normal (gimple_assign_rhs1 (def2
));
8480 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
8482 op2
= expand_normal (treeop2
);
8483 op1
= expand_normal (treeop1
);
8485 return expand_ternary_op (TYPE_MODE (type
), opt
,
8486 op0
, op1
, op2
, target
, 0);
8490 /* If this is a fixed-point operation, then we cannot use the code
8491 below because "expand_mult" doesn't support sat/no-sat fixed-point
8493 if (ALL_FIXED_POINT_MODE_P (mode
))
8496 /* If first operand is constant, swap them.
8497 Thus the following special case checks need only
8498 check the second operand. */
8499 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8506 /* Attempt to return something suitable for generating an
8507 indexed address, for machines that support that. */
8509 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
8510 && host_integerp (treeop1
, 0))
8512 tree exp1
= treeop1
;
8514 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8518 op0
= force_operand (op0
, NULL_RTX
);
8520 op0
= copy_to_mode_reg (mode
, op0
);
8522 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode
, op0
,
8523 gen_int_mode (tree_low_cst (exp1
, 0),
8524 TYPE_MODE (TREE_TYPE (exp1
)))));
8527 if (modifier
== EXPAND_STACK_PARM
)
8530 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8531 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8533 case TRUNC_DIV_EXPR
:
8534 case FLOOR_DIV_EXPR
:
8536 case ROUND_DIV_EXPR
:
8537 case EXACT_DIV_EXPR
:
8538 /* If this is a fixed-point operation, then we cannot use the code
8539 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8541 if (ALL_FIXED_POINT_MODE_P (mode
))
8544 if (modifier
== EXPAND_STACK_PARM
)
8546 /* Possible optimization: compute the dividend with EXPAND_SUM
8547 then if the divisor is constant can optimize the case
8548 where some terms of the dividend have coeffs divisible by it. */
8549 expand_operands (treeop0
, treeop1
,
8550 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8551 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
8556 case TRUNC_MOD_EXPR
:
8557 case FLOOR_MOD_EXPR
:
8559 case ROUND_MOD_EXPR
:
8560 if (modifier
== EXPAND_STACK_PARM
)
8562 expand_operands (treeop0
, treeop1
,
8563 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8564 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
8566 case FIXED_CONVERT_EXPR
:
8567 op0
= expand_normal (treeop0
);
8568 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8569 target
= gen_reg_rtx (mode
);
8571 if ((TREE_CODE (TREE_TYPE (treeop0
)) == INTEGER_TYPE
8572 && TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
8573 || (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_UNSIGNED (type
)))
8574 expand_fixed_convert (target
, op0
, 1, TYPE_SATURATING (type
));
8576 expand_fixed_convert (target
, op0
, 0, TYPE_SATURATING (type
));
8579 case FIX_TRUNC_EXPR
:
8580 op0
= expand_normal (treeop0
);
8581 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8582 target
= gen_reg_rtx (mode
);
8583 expand_fix (target
, op0
, unsignedp
);
8587 op0
= expand_normal (treeop0
);
8588 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8589 target
= gen_reg_rtx (mode
);
8590 /* expand_float can't figure out what to do if FROM has VOIDmode.
8591 So give it the correct mode. With -O, cse will optimize this. */
8592 if (GET_MODE (op0
) == VOIDmode
)
8593 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0
)),
8595 expand_float (target
, op0
,
8596 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8600 op0
= expand_expr (treeop0
, subtarget
,
8601 VOIDmode
, EXPAND_NORMAL
);
8602 if (modifier
== EXPAND_STACK_PARM
)
8604 temp
= expand_unop (mode
,
8605 optab_for_tree_code (NEGATE_EXPR
, type
,
8609 return REDUCE_BIT_FIELD (temp
);
8612 op0
= expand_expr (treeop0
, subtarget
,
8613 VOIDmode
, EXPAND_NORMAL
);
8614 if (modifier
== EXPAND_STACK_PARM
)
8617 /* ABS_EXPR is not valid for complex arguments. */
8618 gcc_assert (GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
8619 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
);
8621 /* Unsigned abs is simply the operand. Testing here means we don't
8622 risk generating incorrect code below. */
8623 if (TYPE_UNSIGNED (type
))
8626 return expand_abs (mode
, op0
, target
, unsignedp
,
8627 safe_from_p (target
, treeop0
, 1));
8631 target
= original_target
;
8633 || modifier
== EXPAND_STACK_PARM
8634 || (MEM_P (target
) && MEM_VOLATILE_P (target
))
8635 || GET_MODE (target
) != mode
8637 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8638 target
= gen_reg_rtx (mode
);
8639 expand_operands (treeop0
, treeop1
,
8640 target
, &op0
, &op1
, EXPAND_NORMAL
);
8642 /* First try to do it with a special MIN or MAX instruction.
8643 If that does not win, use a conditional jump to select the proper
8645 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8646 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8651 /* At this point, a MEM target is no longer useful; we will get better
8654 if (! REG_P (target
))
8655 target
= gen_reg_rtx (mode
);
8657 /* If op1 was placed in target, swap op0 and op1. */
8658 if (target
!= op0
&& target
== op1
)
8665 /* We generate better code and avoid problems with op1 mentioning
8666 target by forcing op1 into a pseudo if it isn't a constant. */
8667 if (! CONSTANT_P (op1
))
8668 op1
= force_reg (mode
, op1
);
8671 enum rtx_code comparison_code
;
8674 if (code
== MAX_EXPR
)
8675 comparison_code
= unsignedp
? GEU
: GE
;
8677 comparison_code
= unsignedp
? LEU
: LE
;
8679 /* Canonicalize to comparisons against 0. */
8680 if (op1
== const1_rtx
)
8682 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8683 or (a != 0 ? a : 1) for unsigned.
8684 For MIN we are safe converting (a <= 1 ? a : 1)
8685 into (a <= 0 ? a : 1) */
8686 cmpop1
= const0_rtx
;
8687 if (code
== MAX_EXPR
)
8688 comparison_code
= unsignedp
? NE
: GT
;
8690 if (op1
== constm1_rtx
&& !unsignedp
)
8692 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8693 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8694 cmpop1
= const0_rtx
;
8695 if (code
== MIN_EXPR
)
8696 comparison_code
= LT
;
8698 #ifdef HAVE_conditional_move
8699 /* Use a conditional move if possible. */
8700 if (can_conditionally_move_p (mode
))
8704 /* ??? Same problem as in expmed.c: emit_conditional_move
8705 forces a stack adjustment via compare_from_rtx, and we
8706 lose the stack adjustment if the sequence we are about
8707 to create is discarded. */
8708 do_pending_stack_adjust ();
8712 /* Try to emit the conditional move. */
8713 insn
= emit_conditional_move (target
, comparison_code
,
8718 /* If we could do the conditional move, emit the sequence,
8722 rtx seq
= get_insns ();
8728 /* Otherwise discard the sequence and fall back to code with
8734 emit_move_insn (target
, op0
);
8736 temp
= gen_label_rtx ();
8737 do_compare_rtx_and_jump (target
, cmpop1
, comparison_code
,
8738 unsignedp
, mode
, NULL_RTX
, NULL_RTX
, temp
,
8741 emit_move_insn (target
, op1
);
8746 op0
= expand_expr (treeop0
, subtarget
,
8747 VOIDmode
, EXPAND_NORMAL
);
8748 if (modifier
== EXPAND_STACK_PARM
)
8750 /* In case we have to reduce the result to bitfield precision
8751 for unsigned bitfield expand this as XOR with a proper constant
8753 if (reduce_bit_field
&& TYPE_UNSIGNED (type
))
8754 temp
= expand_binop (mode
, xor_optab
, op0
,
8755 immed_double_int_const
8756 (double_int_mask (TYPE_PRECISION (type
)), mode
),
8757 target
, 1, OPTAB_LIB_WIDEN
);
8759 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8763 /* ??? Can optimize bitwise operations with one arg constant.
8764 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8765 and (a bitwise1 b) bitwise2 b (etc)
8766 but that is probably not worth while. */
8775 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type
))
8776 || (GET_MODE_PRECISION (TYPE_MODE (type
))
8777 == TYPE_PRECISION (type
)));
8782 /* If this is a fixed-point operation, then we cannot use the code
8783 below because "expand_shift" doesn't support sat/no-sat fixed-point
8785 if (ALL_FIXED_POINT_MODE_P (mode
))
8788 if (! safe_from_p (subtarget
, treeop1
, 1))
8790 if (modifier
== EXPAND_STACK_PARM
)
8792 op0
= expand_expr (treeop0
, subtarget
,
8793 VOIDmode
, EXPAND_NORMAL
);
8794 temp
= expand_variable_shift (code
, mode
, op0
, treeop1
, target
,
8796 if (code
== LSHIFT_EXPR
)
8797 temp
= REDUCE_BIT_FIELD (temp
);
8800 /* Could determine the answer when only additive constants differ. Also,
8801 the addition of one can be handled by changing the condition. */
8808 case UNORDERED_EXPR
:
8816 temp
= do_store_flag (ops
,
8817 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
8818 tmode
!= VOIDmode
? tmode
: mode
);
8822 /* Use a compare and a jump for BLKmode comparisons, or for function
8823 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8826 || modifier
== EXPAND_STACK_PARM
8827 || ! safe_from_p (target
, treeop0
, 1)
8828 || ! safe_from_p (target
, treeop1
, 1)
8829 /* Make sure we don't have a hard reg (such as function's return
8830 value) live across basic blocks, if not optimizing. */
8831 || (!optimize
&& REG_P (target
)
8832 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8833 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8835 emit_move_insn (target
, const0_rtx
);
8837 op1
= gen_label_rtx ();
8838 jumpifnot_1 (code
, treeop0
, treeop1
, op1
, -1);
8840 if (TYPE_PRECISION (type
) == 1 && !TYPE_UNSIGNED (type
))
8841 emit_move_insn (target
, constm1_rtx
);
8843 emit_move_insn (target
, const1_rtx
);
8849 /* Get the rtx code of the operands. */
8850 op0
= expand_normal (treeop0
);
8851 op1
= expand_normal (treeop1
);
8854 target
= gen_reg_rtx (TYPE_MODE (type
));
8856 /* Move the real (op0) and imaginary (op1) parts to their location. */
8857 write_complex_part (target
, op0
, false);
8858 write_complex_part (target
, op1
, true);
8862 case WIDEN_SUM_EXPR
:
8864 tree oprnd0
= treeop0
;
8865 tree oprnd1
= treeop1
;
8867 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8868 target
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, op1
,
8873 case REDUC_MAX_EXPR
:
8874 case REDUC_MIN_EXPR
:
8875 case REDUC_PLUS_EXPR
:
8877 op0
= expand_normal (treeop0
);
8878 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8879 temp
= expand_unop (mode
, this_optab
, op0
, target
, unsignedp
);
8884 case VEC_LSHIFT_EXPR
:
8885 case VEC_RSHIFT_EXPR
:
8887 target
= expand_vec_shift_expr (ops
, target
);
8891 case VEC_UNPACK_HI_EXPR
:
8892 case VEC_UNPACK_LO_EXPR
:
8894 op0
= expand_normal (treeop0
);
8895 temp
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, NULL_RTX
,
8901 case VEC_UNPACK_FLOAT_HI_EXPR
:
8902 case VEC_UNPACK_FLOAT_LO_EXPR
:
8904 op0
= expand_normal (treeop0
);
8905 /* The signedness is determined from input operand. */
8906 temp
= expand_widen_pattern_expr
8907 (ops
, op0
, NULL_RTX
, NULL_RTX
,
8908 target
, TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8914 case VEC_WIDEN_MULT_HI_EXPR
:
8915 case VEC_WIDEN_MULT_LO_EXPR
:
8917 tree oprnd0
= treeop0
;
8918 tree oprnd1
= treeop1
;
8920 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8921 target
= expand_widen_pattern_expr (ops
, op0
, op1
, NULL_RTX
,
8923 gcc_assert (target
);
8927 case VEC_WIDEN_LSHIFT_HI_EXPR
:
8928 case VEC_WIDEN_LSHIFT_LO_EXPR
:
8930 tree oprnd0
= treeop0
;
8931 tree oprnd1
= treeop1
;
8933 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8934 target
= expand_widen_pattern_expr (ops
, op0
, op1
, NULL_RTX
,
8936 gcc_assert (target
);
8940 case VEC_PACK_TRUNC_EXPR
:
8941 case VEC_PACK_SAT_EXPR
:
8942 case VEC_PACK_FIX_TRUNC_EXPR
:
8943 mode
= TYPE_MODE (TREE_TYPE (treeop0
));
8947 expand_operands (treeop0
, treeop1
, target
, &op0
, &op1
, EXPAND_NORMAL
);
8948 op2
= expand_normal (treeop2
);
8950 /* Careful here: if the target doesn't support integral vector modes,
8951 a constant selection vector could wind up smooshed into a normal
8952 integral constant. */
8953 if (CONSTANT_P (op2
) && GET_CODE (op2
) != CONST_VECTOR
)
8955 tree sel_type
= TREE_TYPE (treeop2
);
8956 enum machine_mode vmode
8957 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type
)),
8958 TYPE_VECTOR_SUBPARTS (sel_type
));
8959 gcc_assert (GET_MODE_CLASS (vmode
) == MODE_VECTOR_INT
);
8960 op2
= simplify_subreg (vmode
, op2
, TYPE_MODE (sel_type
), 0);
8961 gcc_assert (op2
&& GET_CODE (op2
) == CONST_VECTOR
);
8964 gcc_assert (GET_MODE_CLASS (GET_MODE (op2
)) == MODE_VECTOR_INT
);
8966 temp
= expand_vec_perm (mode
, op0
, op1
, op2
, target
);
8972 tree oprnd0
= treeop0
;
8973 tree oprnd1
= treeop1
;
8974 tree oprnd2
= treeop2
;
8977 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8978 op2
= expand_normal (oprnd2
);
8979 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
8984 case REALIGN_LOAD_EXPR
:
8986 tree oprnd0
= treeop0
;
8987 tree oprnd1
= treeop1
;
8988 tree oprnd2
= treeop2
;
8991 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8992 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8993 op2
= expand_normal (oprnd2
);
8994 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
9001 /* A COND_EXPR with its type being VOID_TYPE represents a
9002 conditional jump and is handled in
9003 expand_gimple_cond_expr. */
9004 gcc_assert (!VOID_TYPE_P (type
));
9006 /* Note that COND_EXPRs whose type is a structure or union
9007 are required to be constructed to contain assignments of
9008 a temporary variable, so that we can evaluate them here
9009 for side effect only. If type is void, we must do likewise. */
9011 gcc_assert (!TREE_ADDRESSABLE (type
)
9013 && TREE_TYPE (treeop1
) != void_type_node
9014 && TREE_TYPE (treeop2
) != void_type_node
);
9016 temp
= expand_cond_expr_using_cmove (treeop0
, treeop1
, treeop2
);
9020 /* If we are not to produce a result, we have no target. Otherwise,
9021 if a target was specified use it; it will not be used as an
9022 intermediate target unless it is safe. If no target, use a
9025 if (modifier
!= EXPAND_STACK_PARM
9027 && safe_from_p (original_target
, treeop0
, 1)
9028 && GET_MODE (original_target
) == mode
9029 && !MEM_P (original_target
))
9030 temp
= original_target
;
9032 temp
= assign_temp (type
, 0, 1);
9034 do_pending_stack_adjust ();
9036 op0
= gen_label_rtx ();
9037 op1
= gen_label_rtx ();
9038 jumpifnot (treeop0
, op0
, -1);
9039 store_expr (treeop1
, temp
,
9040 modifier
== EXPAND_STACK_PARM
,
9043 emit_jump_insn (gen_jump (op1
));
9046 store_expr (treeop2
, temp
,
9047 modifier
== EXPAND_STACK_PARM
,
9055 target
= expand_vec_cond_expr (type
, treeop0
, treeop1
, treeop2
, target
);
9062 /* Here to do an ordinary binary operator. */
9064 expand_operands (treeop0
, treeop1
,
9065 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
9067 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9069 if (modifier
== EXPAND_STACK_PARM
)
9071 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9072 unsignedp
, OPTAB_LIB_WIDEN
);
9074 /* Bitwise operations do not need bitfield reduction as we expect their
9075 operands being properly truncated. */
9076 if (code
== BIT_XOR_EXPR
9077 || code
== BIT_AND_EXPR
9078 || code
== BIT_IOR_EXPR
)
9080 return REDUCE_BIT_FIELD (temp
);
9082 #undef REDUCE_BIT_FIELD
9085 expand_expr_real_1 (tree exp
, rtx target
, enum machine_mode tmode
,
9086 enum expand_modifier modifier
, rtx
*alt_rtl
)
9088 rtx op0
, op1
, temp
, decl_rtl
;
9091 enum machine_mode mode
;
9092 enum tree_code code
= TREE_CODE (exp
);
9093 rtx subtarget
, original_target
;
9096 bool reduce_bit_field
;
9097 location_t loc
= EXPR_LOCATION (exp
);
9098 struct separate_ops ops
;
9099 tree treeop0
, treeop1
, treeop2
;
9100 tree ssa_name
= NULL_TREE
;
9103 type
= TREE_TYPE (exp
);
9104 mode
= TYPE_MODE (type
);
9105 unsignedp
= TYPE_UNSIGNED (type
);
9107 treeop0
= treeop1
= treeop2
= NULL_TREE
;
9108 if (!VL_EXP_CLASS_P (exp
))
9109 switch (TREE_CODE_LENGTH (code
))
9112 case 3: treeop2
= TREE_OPERAND (exp
, 2);
9113 case 2: treeop1
= TREE_OPERAND (exp
, 1);
9114 case 1: treeop0
= TREE_OPERAND (exp
, 0);
9124 ignore
= (target
== const0_rtx
9125 || ((CONVERT_EXPR_CODE_P (code
)
9126 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
9127 && TREE_CODE (type
) == VOID_TYPE
));
9129 /* An operation in what may be a bit-field type needs the
9130 result to be reduced to the precision of the bit-field type,
9131 which is narrower than that of the type's mode. */
9132 reduce_bit_field
= (!ignore
9133 && INTEGRAL_TYPE_P (type
)
9134 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
));
9136 /* If we are going to ignore this result, we need only do something
9137 if there is a side-effect somewhere in the expression. If there
9138 is, short-circuit the most common cases here. Note that we must
9139 not call expand_expr with anything but const0_rtx in case this
9140 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9144 if (! TREE_SIDE_EFFECTS (exp
))
9147 /* Ensure we reference a volatile object even if value is ignored, but
9148 don't do this if all we are doing is taking its address. */
9149 if (TREE_THIS_VOLATILE (exp
)
9150 && TREE_CODE (exp
) != FUNCTION_DECL
9151 && mode
!= VOIDmode
&& mode
!= BLKmode
9152 && modifier
!= EXPAND_CONST_ADDRESS
)
9154 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
9160 if (TREE_CODE_CLASS (code
) == tcc_unary
9161 || code
== COMPONENT_REF
|| code
== INDIRECT_REF
)
9162 return expand_expr (treeop0
, const0_rtx
, VOIDmode
,
9165 else if (TREE_CODE_CLASS (code
) == tcc_binary
9166 || TREE_CODE_CLASS (code
) == tcc_comparison
9167 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
9169 expand_expr (treeop0
, const0_rtx
, VOIDmode
, modifier
);
9170 expand_expr (treeop1
, const0_rtx
, VOIDmode
, modifier
);
9173 else if (code
== BIT_FIELD_REF
)
9175 expand_expr (treeop0
, const0_rtx
, VOIDmode
, modifier
);
9176 expand_expr (treeop1
, const0_rtx
, VOIDmode
, modifier
);
9177 expand_expr (treeop2
, const0_rtx
, VOIDmode
, modifier
);
9184 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
9187 /* Use subtarget as the target for operand 0 of a binary operation. */
9188 subtarget
= get_subtarget (target
);
9189 original_target
= target
;
9195 tree function
= decl_function_context (exp
);
9197 temp
= label_rtx (exp
);
9198 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
9200 if (function
!= current_function_decl
9202 LABEL_REF_NONLOCAL_P (temp
) = 1;
9204 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
9209 /* ??? ivopts calls expander, without any preparation from
9210 out-of-ssa. So fake instructions as if this was an access to the
9211 base variable. This unnecessarily allocates a pseudo, see how we can
9212 reuse it, if partition base vars have it set already. */
9213 if (!currently_expanding_to_rtl
)
9214 return expand_expr_real_1 (SSA_NAME_VAR (exp
), target
, tmode
, modifier
,
9217 g
= get_gimple_for_ssa_name (exp
);
9218 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9220 && modifier
== EXPAND_INITIALIZER
9221 && !SSA_NAME_IS_DEFAULT_DEF (exp
)
9222 && (optimize
|| DECL_IGNORED_P (SSA_NAME_VAR (exp
)))
9223 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp
)))
9224 g
= SSA_NAME_DEF_STMT (exp
);
9227 rtx r
= expand_expr_real (gimple_assign_rhs_to_tree (g
), target
,
9228 tmode
, modifier
, NULL
);
9229 if (REG_P (r
) && !REG_EXPR (r
))
9230 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp
), r
);
9235 decl_rtl
= get_rtx_for_ssa_name (ssa_name
);
9236 exp
= SSA_NAME_VAR (ssa_name
);
9237 goto expand_decl_rtl
;
9241 /* If a static var's type was incomplete when the decl was written,
9242 but the type is complete now, lay out the decl now. */
9243 if (DECL_SIZE (exp
) == 0
9244 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
9245 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
9246 layout_decl (exp
, 0);
9248 /* ... fall through ... */
9252 decl_rtl
= DECL_RTL (exp
);
9254 gcc_assert (decl_rtl
);
9255 decl_rtl
= copy_rtx (decl_rtl
);
9256 /* Record writes to register variables. */
9257 if (modifier
== EXPAND_WRITE
9259 && HARD_REGISTER_P (decl_rtl
))
9260 add_to_hard_reg_set (&crtl
->asm_clobbers
,
9261 GET_MODE (decl_rtl
), REGNO (decl_rtl
));
9263 /* Ensure variable marked as used even if it doesn't go through
9264 a parser. If it hasn't be used yet, write out an external
9266 TREE_USED (exp
) = 1;
9268 /* Show we haven't gotten RTL for this yet. */
9271 /* Variables inherited from containing functions should have
9272 been lowered by this point. */
9273 context
= decl_function_context (exp
);
9274 gcc_assert (!context
9275 || context
== current_function_decl
9276 || TREE_STATIC (exp
)
9277 || DECL_EXTERNAL (exp
)
9278 /* ??? C++ creates functions that are not TREE_STATIC. */
9279 || TREE_CODE (exp
) == FUNCTION_DECL
);
9281 /* This is the case of an array whose size is to be determined
9282 from its initializer, while the initializer is still being parsed.
9283 ??? We aren't parsing while expanding anymore. */
9285 if (MEM_P (decl_rtl
) && REG_P (XEXP (decl_rtl
, 0)))
9286 temp
= validize_mem (decl_rtl
);
9288 /* If DECL_RTL is memory, we are in the normal case and the
9289 address is not valid, get the address into a register. */
9291 else if (MEM_P (decl_rtl
) && modifier
!= EXPAND_INITIALIZER
)
9294 *alt_rtl
= decl_rtl
;
9295 decl_rtl
= use_anchored_address (decl_rtl
);
9296 if (modifier
!= EXPAND_CONST_ADDRESS
9297 && modifier
!= EXPAND_SUM
9298 && !memory_address_addr_space_p (DECL_MODE (exp
),
9300 MEM_ADDR_SPACE (decl_rtl
)))
9301 temp
= replace_equiv_address (decl_rtl
,
9302 copy_rtx (XEXP (decl_rtl
, 0)));
9305 /* If we got something, return it. But first, set the alignment
9306 if the address is a register. */
9309 if (MEM_P (temp
) && REG_P (XEXP (temp
, 0)))
9310 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
9315 /* If the mode of DECL_RTL does not match that of the decl,
9316 there are two cases: we are dealing with a BLKmode value
9317 that is returned in a register, or we are dealing with
9318 a promoted value. In the latter case, return a SUBREG
9319 of the wanted mode, but mark it so that we know that it
9320 was already extended. */
9321 if (REG_P (decl_rtl
)
9322 && DECL_MODE (exp
) != BLKmode
9323 && GET_MODE (decl_rtl
) != DECL_MODE (exp
))
9325 enum machine_mode pmode
;
9327 /* Get the signedness to be used for this variable. Ensure we get
9328 the same mode we got when the variable was declared. */
9329 if (code
== SSA_NAME
9330 && (g
= SSA_NAME_DEF_STMT (ssa_name
))
9331 && gimple_code (g
) == GIMPLE_CALL
)
9333 gcc_assert (!gimple_call_internal_p (g
));
9334 pmode
= promote_function_mode (type
, mode
, &unsignedp
,
9335 gimple_call_fntype (g
),
9339 pmode
= promote_decl_mode (exp
, &unsignedp
);
9340 gcc_assert (GET_MODE (decl_rtl
) == pmode
);
9342 temp
= gen_lowpart_SUBREG (mode
, decl_rtl
);
9343 SUBREG_PROMOTED_VAR_P (temp
) = 1;
9344 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
9351 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
9352 TREE_INT_CST_HIGH (exp
), mode
);
9358 tree tmp
= NULL_TREE
;
9359 if (GET_MODE_CLASS (mode
) == MODE_VECTOR_INT
9360 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FLOAT
9361 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FRACT
9362 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UFRACT
9363 || GET_MODE_CLASS (mode
) == MODE_VECTOR_ACCUM
9364 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UACCUM
)
9365 return const_vector_from_tree (exp
);
9366 if (GET_MODE_CLASS (mode
) == MODE_INT
)
9368 tree type_for_mode
= lang_hooks
.types
.type_for_mode (mode
, 1);
9370 tmp
= fold_unary_loc (loc
, VIEW_CONVERT_EXPR
, type_for_mode
, exp
);
9374 VEC(constructor_elt
,gc
) *v
;
9376 v
= VEC_alloc (constructor_elt
, gc
, VECTOR_CST_NELTS (exp
));
9377 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
9378 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, VECTOR_CST_ELT (exp
, i
));
9379 tmp
= build_constructor (type
, v
);
9381 return expand_expr (tmp
, ignore
? const0_rtx
: target
,
9386 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
9389 /* If optimized, generate immediate CONST_DOUBLE
9390 which will be turned into memory by reload if necessary.
9392 We used to force a register so that loop.c could see it. But
9393 this does not allow gen_* patterns to perform optimizations with
9394 the constants. It also produces two insns in cases like "x = 1.0;".
9395 On most machines, floating-point constants are not permitted in
9396 many insns, so we'd end up copying it to a register in any case.
9398 Now, we do the copying in expand_binop, if appropriate. */
9399 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
9400 TYPE_MODE (TREE_TYPE (exp
)));
9403 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp
),
9404 TYPE_MODE (TREE_TYPE (exp
)));
9407 /* Handle evaluating a complex constant in a CONCAT target. */
9408 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
9410 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
9413 rtarg
= XEXP (original_target
, 0);
9414 itarg
= XEXP (original_target
, 1);
9416 /* Move the real and imaginary parts separately. */
9417 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, EXPAND_NORMAL
);
9418 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, EXPAND_NORMAL
);
9421 emit_move_insn (rtarg
, op0
);
9423 emit_move_insn (itarg
, op1
);
9425 return original_target
;
9428 /* ... fall through ... */
9431 temp
= expand_expr_constant (exp
, 1, modifier
);
9433 /* temp contains a constant address.
9434 On RISC machines where a constant address isn't valid,
9435 make some insns to get that address into a register. */
9436 if (modifier
!= EXPAND_CONST_ADDRESS
9437 && modifier
!= EXPAND_INITIALIZER
9438 && modifier
!= EXPAND_SUM
9439 && ! memory_address_addr_space_p (mode
, XEXP (temp
, 0),
9440 MEM_ADDR_SPACE (temp
)))
9441 return replace_equiv_address (temp
,
9442 copy_rtx (XEXP (temp
, 0)));
9448 rtx ret
= expand_expr_real_1 (val
, target
, tmode
, modifier
, alt_rtl
);
9450 if (!SAVE_EXPR_RESOLVED_P (exp
))
9452 /* We can indeed still hit this case, typically via builtin
9453 expanders calling save_expr immediately before expanding
9454 something. Assume this means that we only have to deal
9455 with non-BLKmode values. */
9456 gcc_assert (GET_MODE (ret
) != BLKmode
);
9458 val
= build_decl (EXPR_LOCATION (exp
),
9459 VAR_DECL
, NULL
, TREE_TYPE (exp
));
9460 DECL_ARTIFICIAL (val
) = 1;
9461 DECL_IGNORED_P (val
) = 1;
9463 TREE_OPERAND (exp
, 0) = treeop0
;
9464 SAVE_EXPR_RESOLVED_P (exp
) = 1;
9466 if (!CONSTANT_P (ret
))
9467 ret
= copy_to_reg (ret
);
9468 SET_DECL_RTL (val
, ret
);
9476 /* If we don't need the result, just ensure we evaluate any
9480 unsigned HOST_WIDE_INT idx
;
9483 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
9484 expand_expr (value
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
9489 return expand_constructor (exp
, target
, modifier
, false);
9491 case TARGET_MEM_REF
:
9494 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
9495 struct mem_address addr
;
9496 enum insn_code icode
;
9499 get_address_description (exp
, &addr
);
9500 op0
= addr_for_mem_ref (&addr
, as
, true);
9501 op0
= memory_address_addr_space (mode
, op0
, as
);
9502 temp
= gen_rtx_MEM (mode
, op0
);
9503 set_mem_attributes (temp
, exp
, 0);
9504 set_mem_addr_space (temp
, as
);
9505 align
= get_object_or_type_alignment (exp
);
9506 if (modifier
!= EXPAND_WRITE
9508 && align
< GET_MODE_ALIGNMENT (mode
)
9509 /* If the target does not have special handling for unaligned
9510 loads of mode then it can use regular moves for them. */
9511 && ((icode
= optab_handler (movmisalign_optab
, mode
))
9512 != CODE_FOR_nothing
))
9514 struct expand_operand ops
[2];
9516 /* We've already validated the memory, and we're creating a
9517 new pseudo destination. The predicates really can't fail,
9518 nor can the generator. */
9519 create_output_operand (&ops
[0], NULL_RTX
, mode
);
9520 create_fixed_operand (&ops
[1], temp
);
9521 expand_insn (icode
, 2, ops
);
9522 return ops
[0].value
;
9530 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
9531 enum machine_mode address_mode
;
9532 tree base
= TREE_OPERAND (exp
, 0);
9534 enum insn_code icode
;
9536 /* Handle expansion of non-aliased memory with non-BLKmode. That
9537 might end up in a register. */
9538 if (mem_ref_refers_to_non_mem_p (exp
))
9540 HOST_WIDE_INT offset
= mem_ref_offset (exp
).low
;
9543 base
= TREE_OPERAND (base
, 0);
9545 && host_integerp (TYPE_SIZE (TREE_TYPE (exp
)), 1)
9546 && (GET_MODE_BITSIZE (DECL_MODE (base
))
9547 == TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp
)))))
9548 return expand_expr (build1 (VIEW_CONVERT_EXPR
,
9549 TREE_TYPE (exp
), base
),
9550 target
, tmode
, modifier
);
9551 bit_offset
= bitsize_int (offset
* BITS_PER_UNIT
);
9552 bftype
= TREE_TYPE (base
);
9553 if (TYPE_MODE (TREE_TYPE (exp
)) != BLKmode
)
9554 bftype
= TREE_TYPE (exp
);
9557 temp
= assign_stack_temp (DECL_MODE (base
),
9558 GET_MODE_SIZE (DECL_MODE (base
)));
9559 store_expr (base
, temp
, 0, false);
9560 temp
= adjust_address (temp
, BLKmode
, offset
);
9561 set_mem_size (temp
, int_size_in_bytes (TREE_TYPE (exp
)));
9564 return expand_expr (build3 (BIT_FIELD_REF
, bftype
,
9566 TYPE_SIZE (TREE_TYPE (exp
)),
9568 target
, tmode
, modifier
);
9570 address_mode
= targetm
.addr_space
.address_mode (as
);
9571 base
= TREE_OPERAND (exp
, 0);
9572 if ((def_stmt
= get_def_for_expr (base
, BIT_AND_EXPR
)))
9574 tree mask
= gimple_assign_rhs2 (def_stmt
);
9575 base
= build2 (BIT_AND_EXPR
, TREE_TYPE (base
),
9576 gimple_assign_rhs1 (def_stmt
), mask
);
9577 TREE_OPERAND (exp
, 0) = base
;
9579 align
= get_object_or_type_alignment (exp
);
9580 op0
= expand_expr (base
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
9581 op0
= memory_address_addr_space (address_mode
, op0
, as
);
9582 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
9585 = immed_double_int_const (mem_ref_offset (exp
), address_mode
);
9586 op0
= simplify_gen_binary (PLUS
, address_mode
, op0
, off
);
9588 op0
= memory_address_addr_space (mode
, op0
, as
);
9589 temp
= gen_rtx_MEM (mode
, op0
);
9590 set_mem_attributes (temp
, exp
, 0);
9591 set_mem_addr_space (temp
, as
);
9592 if (TREE_THIS_VOLATILE (exp
))
9593 MEM_VOLATILE_P (temp
) = 1;
9594 if (modifier
!= EXPAND_WRITE
9596 && align
< GET_MODE_ALIGNMENT (mode
))
9598 if ((icode
= optab_handler (movmisalign_optab
, mode
))
9599 != CODE_FOR_nothing
)
9601 struct expand_operand ops
[2];
9603 /* We've already validated the memory, and we're creating a
9604 new pseudo destination. The predicates really can't fail,
9605 nor can the generator. */
9606 create_output_operand (&ops
[0], NULL_RTX
, mode
);
9607 create_fixed_operand (&ops
[1], temp
);
9608 expand_insn (icode
, 2, ops
);
9609 return ops
[0].value
;
9611 else if (SLOW_UNALIGNED_ACCESS (mode
, align
))
9612 temp
= extract_bit_field (temp
, GET_MODE_BITSIZE (mode
),
9613 0, TYPE_UNSIGNED (TREE_TYPE (exp
)),
9614 true, (modifier
== EXPAND_STACK_PARM
9615 ? NULL_RTX
: target
),
9624 tree array
= treeop0
;
9625 tree index
= treeop1
;
9627 /* Fold an expression like: "foo"[2].
9628 This is not done in fold so it won't happen inside &.
9629 Don't fold if this is for wide characters since it's too
9630 difficult to do correctly and this is a very rare case. */
9632 if (modifier
!= EXPAND_CONST_ADDRESS
9633 && modifier
!= EXPAND_INITIALIZER
9634 && modifier
!= EXPAND_MEMORY
)
9636 tree t
= fold_read_from_constant_string (exp
);
9639 return expand_expr (t
, target
, tmode
, modifier
);
9642 /* If this is a constant index into a constant array,
9643 just get the value from the array. Handle both the cases when
9644 we have an explicit constructor and when our operand is a variable
9645 that was declared const. */
9647 if (modifier
!= EXPAND_CONST_ADDRESS
9648 && modifier
!= EXPAND_INITIALIZER
9649 && modifier
!= EXPAND_MEMORY
9650 && TREE_CODE (array
) == CONSTRUCTOR
9651 && ! TREE_SIDE_EFFECTS (array
)
9652 && TREE_CODE (index
) == INTEGER_CST
)
9654 unsigned HOST_WIDE_INT ix
;
9657 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array
), ix
,
9659 if (tree_int_cst_equal (field
, index
))
9661 if (!TREE_SIDE_EFFECTS (value
))
9662 return expand_expr (fold (value
), target
, tmode
, modifier
);
9667 else if (optimize
>= 1
9668 && modifier
!= EXPAND_CONST_ADDRESS
9669 && modifier
!= EXPAND_INITIALIZER
9670 && modifier
!= EXPAND_MEMORY
9671 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
9672 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
9673 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
9674 && const_value_known_p (array
))
9676 if (TREE_CODE (index
) == INTEGER_CST
)
9678 tree init
= DECL_INITIAL (array
);
9680 if (TREE_CODE (init
) == CONSTRUCTOR
)
9682 unsigned HOST_WIDE_INT ix
;
9685 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), ix
,
9687 if (tree_int_cst_equal (field
, index
))
9689 if (TREE_SIDE_EFFECTS (value
))
9692 if (TREE_CODE (value
) == CONSTRUCTOR
)
9694 /* If VALUE is a CONSTRUCTOR, this
9695 optimization is only useful if
9696 this doesn't store the CONSTRUCTOR
9697 into memory. If it does, it is more
9698 efficient to just load the data from
9699 the array directly. */
9700 rtx ret
= expand_constructor (value
, target
,
9702 if (ret
== NULL_RTX
)
9706 return expand_expr (fold (value
), target
, tmode
,
9710 else if(TREE_CODE (init
) == STRING_CST
)
9712 tree index1
= index
;
9713 tree low_bound
= array_ref_low_bound (exp
);
9714 index1
= fold_convert_loc (loc
, sizetype
,
9717 /* Optimize the special-case of a zero lower bound.
9719 We convert the low_bound to sizetype to avoid some problems
9720 with constant folding. (E.g. suppose the lower bound is 1,
9721 and its mode is QI. Without the conversion,l (ARRAY
9722 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
9723 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
9725 if (! integer_zerop (low_bound
))
9726 index1
= size_diffop_loc (loc
, index1
,
9727 fold_convert_loc (loc
, sizetype
,
9730 if (0 > compare_tree_int (index1
,
9731 TREE_STRING_LENGTH (init
)))
9733 tree type
= TREE_TYPE (TREE_TYPE (init
));
9734 enum machine_mode mode
= TYPE_MODE (type
);
9736 if (GET_MODE_CLASS (mode
) == MODE_INT
9737 && GET_MODE_SIZE (mode
) == 1)
9738 return gen_int_mode (TREE_STRING_POINTER (init
)
9739 [TREE_INT_CST_LOW (index1
)],
9746 goto normal_inner_ref
;
9749 /* If the operand is a CONSTRUCTOR, we can just extract the
9750 appropriate field if it is present. */
9751 if (TREE_CODE (treeop0
) == CONSTRUCTOR
)
9753 unsigned HOST_WIDE_INT idx
;
9756 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0
),
9758 if (field
== treeop1
9759 /* We can normally use the value of the field in the
9760 CONSTRUCTOR. However, if this is a bitfield in
9761 an integral mode that we can fit in a HOST_WIDE_INT,
9762 we must mask only the number of bits in the bitfield,
9763 since this is done implicitly by the constructor. If
9764 the bitfield does not meet either of those conditions,
9765 we can't do this optimization. */
9766 && (! DECL_BIT_FIELD (field
)
9767 || ((GET_MODE_CLASS (DECL_MODE (field
)) == MODE_INT
)
9768 && (GET_MODE_PRECISION (DECL_MODE (field
))
9769 <= HOST_BITS_PER_WIDE_INT
))))
9771 if (DECL_BIT_FIELD (field
)
9772 && modifier
== EXPAND_STACK_PARM
)
9774 op0
= expand_expr (value
, target
, tmode
, modifier
);
9775 if (DECL_BIT_FIELD (field
))
9777 HOST_WIDE_INT bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
9778 enum machine_mode imode
= TYPE_MODE (TREE_TYPE (field
));
9780 if (TYPE_UNSIGNED (TREE_TYPE (field
)))
9782 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
9783 op0
= expand_and (imode
, op0
, op1
, target
);
9787 int count
= GET_MODE_PRECISION (imode
) - bitsize
;
9789 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
9791 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
9799 goto normal_inner_ref
;
9802 case ARRAY_RANGE_REF
:
9805 enum machine_mode mode1
, mode2
;
9806 HOST_WIDE_INT bitsize
, bitpos
;
9808 int volatilep
= 0, must_force_mem
;
9809 bool packedp
= false;
9810 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
9811 &mode1
, &unsignedp
, &volatilep
, true);
9812 rtx orig_op0
, memloc
;
9813 bool mem_attrs_from_type
= false;
9815 /* If we got back the original object, something is wrong. Perhaps
9816 we are evaluating an expression too early. In any event, don't
9817 infinitely recurse. */
9818 gcc_assert (tem
!= exp
);
9820 if (TYPE_PACKED (TREE_TYPE (TREE_OPERAND (exp
, 0)))
9821 || (TREE_CODE (TREE_OPERAND (exp
, 1)) == FIELD_DECL
9822 && DECL_PACKED (TREE_OPERAND (exp
, 1))))
9825 /* If TEM's type is a union of variable size, pass TARGET to the inner
9826 computation, since it will need a temporary and TARGET is known
9827 to have to do. This occurs in unchecked conversion in Ada. */
9830 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
9831 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
9833 && modifier
!= EXPAND_STACK_PARM
9834 ? target
: NULL_RTX
),
9836 (modifier
== EXPAND_INITIALIZER
9837 || modifier
== EXPAND_CONST_ADDRESS
9838 || modifier
== EXPAND_STACK_PARM
)
9839 ? modifier
: EXPAND_NORMAL
);
9842 /* If the bitfield is volatile, we want to access it in the
9843 field's mode, not the computed mode.
9844 If a MEM has VOIDmode (external with incomplete type),
9845 use BLKmode for it instead. */
9848 if (volatilep
&& flag_strict_volatile_bitfields
> 0)
9849 op0
= adjust_address (op0
, mode1
, 0);
9850 else if (GET_MODE (op0
) == VOIDmode
)
9851 op0
= adjust_address (op0
, BLKmode
, 0);
9855 = CONSTANT_P (op0
) ? TYPE_MODE (TREE_TYPE (tem
)) : GET_MODE (op0
);
9857 /* If we have either an offset, a BLKmode result, or a reference
9858 outside the underlying object, we must force it to memory.
9859 Such a case can occur in Ada if we have unchecked conversion
9860 of an expression from a scalar type to an aggregate type or
9861 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
9862 passed a partially uninitialized object or a view-conversion
9863 to a larger size. */
9864 must_force_mem
= (offset
9866 || bitpos
+ bitsize
> GET_MODE_BITSIZE (mode2
));
9868 /* Handle CONCAT first. */
9869 if (GET_CODE (op0
) == CONCAT
&& !must_force_mem
)
9872 && bitsize
== GET_MODE_BITSIZE (GET_MODE (op0
)))
9875 && bitsize
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
9878 op0
= XEXP (op0
, 0);
9879 mode2
= GET_MODE (op0
);
9881 else if (bitpos
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
9882 && bitsize
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 1)))
9886 op0
= XEXP (op0
, 1);
9888 mode2
= GET_MODE (op0
);
9891 /* Otherwise force into memory. */
9895 /* If this is a constant, put it in a register if it is a legitimate
9896 constant and we don't need a memory reference. */
9897 if (CONSTANT_P (op0
)
9899 && targetm
.legitimate_constant_p (mode2
, op0
)
9901 op0
= force_reg (mode2
, op0
);
9903 /* Otherwise, if this is a constant, try to force it to the constant
9904 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
9905 is a legitimate constant. */
9906 else if (CONSTANT_P (op0
) && (memloc
= force_const_mem (mode2
, op0
)))
9907 op0
= validize_mem (memloc
);
9909 /* Otherwise, if this is a constant or the object is not in memory
9910 and need be, put it there. */
9911 else if (CONSTANT_P (op0
) || (!MEM_P (op0
) && must_force_mem
))
9913 tree nt
= build_qualified_type (TREE_TYPE (tem
),
9914 (TYPE_QUALS (TREE_TYPE (tem
))
9915 | TYPE_QUAL_CONST
));
9916 memloc
= assign_temp (nt
, 1, 1);
9917 emit_move_insn (memloc
, op0
);
9919 mem_attrs_from_type
= true;
9924 enum machine_mode address_mode
;
9925 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
9928 gcc_assert (MEM_P (op0
));
9930 address_mode
= get_address_mode (op0
);
9931 if (GET_MODE (offset_rtx
) != address_mode
)
9932 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
9934 if (GET_MODE (op0
) == BLKmode
9935 /* A constant address in OP0 can have VOIDmode, we must
9936 not try to call force_reg in that case. */
9937 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
9939 && (bitpos
% bitsize
) == 0
9940 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
9941 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
9943 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
9947 op0
= offset_address (op0
, offset_rtx
,
9948 highest_pow2_factor (offset
));
9951 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
9952 record its alignment as BIGGEST_ALIGNMENT. */
9953 if (MEM_P (op0
) && bitpos
== 0 && offset
!= 0
9954 && is_aligning_offset (offset
, tem
))
9955 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
9957 /* Don't forget about volatility even if this is a bitfield. */
9958 if (MEM_P (op0
) && volatilep
&& ! MEM_VOLATILE_P (op0
))
9960 if (op0
== orig_op0
)
9961 op0
= copy_rtx (op0
);
9963 MEM_VOLATILE_P (op0
) = 1;
9966 /* In cases where an aligned union has an unaligned object
9967 as a field, we might be extracting a BLKmode value from
9968 an integer-mode (e.g., SImode) object. Handle this case
9969 by doing the extract into an object as wide as the field
9970 (which we know to be the width of a basic mode), then
9971 storing into memory, and changing the mode to BLKmode. */
9972 if (mode1
== VOIDmode
9973 || REG_P (op0
) || GET_CODE (op0
) == SUBREG
9974 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
9975 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
9976 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
9977 && modifier
!= EXPAND_CONST_ADDRESS
9978 && modifier
!= EXPAND_INITIALIZER
)
9979 /* If the field is volatile, we always want an aligned
9980 access. Do this in following two situations:
9981 1. the access is not already naturally
9982 aligned, otherwise "normal" (non-bitfield) volatile fields
9983 become non-addressable.
9984 2. the bitsize is narrower than the access size. Need
9985 to extract bitfields from the access. */
9986 || (volatilep
&& flag_strict_volatile_bitfields
> 0
9987 && (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0
9988 || (mode1
!= BLKmode
9989 && bitsize
< GET_MODE_SIZE (mode1
) * BITS_PER_UNIT
)))
9990 /* If the field isn't aligned enough to fetch as a memref,
9991 fetch it as a bit field. */
9992 || (mode1
!= BLKmode
9993 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
9994 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)
9996 && (MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
9997 || (bitpos
% GET_MODE_ALIGNMENT (mode1
) != 0))))
9998 && ((modifier
== EXPAND_CONST_ADDRESS
9999 || modifier
== EXPAND_INITIALIZER
)
10001 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
10002 || (bitpos
% BITS_PER_UNIT
!= 0)))
10003 /* If the type and the field are a constant size and the
10004 size of the type isn't the same size as the bitfield,
10005 we must use bitfield operations. */
10007 && TYPE_SIZE (TREE_TYPE (exp
))
10008 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
10009 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
10012 enum machine_mode ext_mode
= mode
;
10014 if (ext_mode
== BLKmode
10015 && ! (target
!= 0 && MEM_P (op0
)
10017 && bitpos
% BITS_PER_UNIT
== 0))
10018 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
10020 if (ext_mode
== BLKmode
)
10023 target
= assign_temp (type
, 1, 1);
10028 /* In this case, BITPOS must start at a byte boundary and
10029 TARGET, if specified, must be a MEM. */
10030 gcc_assert (MEM_P (op0
)
10031 && (!target
|| MEM_P (target
))
10032 && !(bitpos
% BITS_PER_UNIT
));
10034 emit_block_move (target
,
10035 adjust_address (op0
, VOIDmode
,
10036 bitpos
/ BITS_PER_UNIT
),
10037 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
10039 (modifier
== EXPAND_STACK_PARM
10040 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
10045 op0
= validize_mem (op0
);
10047 if (MEM_P (op0
) && REG_P (XEXP (op0
, 0)))
10048 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10050 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
, packedp
,
10051 (modifier
== EXPAND_STACK_PARM
10052 ? NULL_RTX
: target
),
10053 ext_mode
, ext_mode
);
10055 /* If the result is a record type and BITSIZE is narrower than
10056 the mode of OP0, an integral mode, and this is a big endian
10057 machine, we must put the field into the high-order bits. */
10058 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
10059 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
10060 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
10061 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
10062 GET_MODE_BITSIZE (GET_MODE (op0
))
10063 - bitsize
, op0
, 1);
10065 /* If the result type is BLKmode, store the data into a temporary
10066 of the appropriate type, but with the mode corresponding to the
10067 mode for the data we have (op0's mode). It's tempting to make
10068 this a constant type, since we know it's only being stored once,
10069 but that can cause problems if we are taking the address of this
10070 COMPONENT_REF because the MEM of any reference via that address
10071 will have flags corresponding to the type, which will not
10072 necessarily be constant. */
10073 if (mode
== BLKmode
)
10077 new_rtx
= assign_stack_temp_for_type (ext_mode
,
10078 GET_MODE_BITSIZE (ext_mode
),
10080 emit_move_insn (new_rtx
, op0
);
10081 op0
= copy_rtx (new_rtx
);
10082 PUT_MODE (op0
, BLKmode
);
10088 /* If the result is BLKmode, use that to access the object
10090 if (mode
== BLKmode
)
10093 /* Get a reference to just this component. */
10094 if (modifier
== EXPAND_CONST_ADDRESS
10095 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
10096 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10098 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10100 if (op0
== orig_op0
)
10101 op0
= copy_rtx (op0
);
10103 /* If op0 is a temporary because of forcing to memory, pass only the
10104 type to set_mem_attributes so that the original expression is never
10105 marked as ADDRESSABLE through MEM_EXPR of the temporary. */
10106 if (mem_attrs_from_type
)
10107 set_mem_attributes (op0
, type
, 0);
10109 set_mem_attributes (op0
, exp
, 0);
10111 if (REG_P (XEXP (op0
, 0)))
10112 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10114 MEM_VOLATILE_P (op0
) |= volatilep
;
10115 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
10116 || modifier
== EXPAND_CONST_ADDRESS
10117 || modifier
== EXPAND_INITIALIZER
)
10119 else if (target
== 0)
10120 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
10122 convert_move (target
, op0
, unsignedp
);
10127 return expand_expr (OBJ_TYPE_REF_EXPR (exp
), target
, tmode
, modifier
);
10130 /* All valid uses of __builtin_va_arg_pack () are removed during
10132 if (CALL_EXPR_VA_ARG_PACK (exp
))
10133 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
10135 tree fndecl
= get_callee_fndecl (exp
), attr
;
10138 && (attr
= lookup_attribute ("error",
10139 DECL_ATTRIBUTES (fndecl
))) != NULL
)
10140 error ("%Kcall to %qs declared with attribute error: %s",
10141 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
10142 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
10144 && (attr
= lookup_attribute ("warning",
10145 DECL_ATTRIBUTES (fndecl
))) != NULL
)
10146 warning_at (tree_nonartificial_location (exp
),
10147 0, "%Kcall to %qs declared with attribute warning: %s",
10148 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
10149 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
10151 /* Check for a built-in function. */
10152 if (fndecl
&& DECL_BUILT_IN (fndecl
))
10154 gcc_assert (DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_FRONTEND
);
10155 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
10158 return expand_call (exp
, target
, ignore
);
10160 case VIEW_CONVERT_EXPR
:
10163 /* If we are converting to BLKmode, try to avoid an intermediate
10164 temporary by fetching an inner memory reference. */
10165 if (mode
== BLKmode
10166 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
10167 && TYPE_MODE (TREE_TYPE (treeop0
)) != BLKmode
10168 && handled_component_p (treeop0
))
10170 enum machine_mode mode1
;
10171 HOST_WIDE_INT bitsize
, bitpos
;
10176 = get_inner_reference (treeop0
, &bitsize
, &bitpos
,
10177 &offset
, &mode1
, &unsignedp
, &volatilep
,
10181 /* ??? We should work harder and deal with non-zero offsets. */
10183 && (bitpos
% BITS_PER_UNIT
) == 0
10185 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) == 0)
10187 /* See the normal_inner_ref case for the rationale. */
10189 = expand_expr (tem
,
10190 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
10191 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
10193 && modifier
!= EXPAND_STACK_PARM
10194 ? target
: NULL_RTX
),
10196 (modifier
== EXPAND_INITIALIZER
10197 || modifier
== EXPAND_CONST_ADDRESS
10198 || modifier
== EXPAND_STACK_PARM
)
10199 ? modifier
: EXPAND_NORMAL
);
10201 if (MEM_P (orig_op0
))
10205 /* Get a reference to just this component. */
10206 if (modifier
== EXPAND_CONST_ADDRESS
10207 || modifier
== EXPAND_SUM
10208 || modifier
== EXPAND_INITIALIZER
)
10209 op0
= adjust_address_nv (op0
, mode
, bitpos
/ BITS_PER_UNIT
);
10211 op0
= adjust_address (op0
, mode
, bitpos
/ BITS_PER_UNIT
);
10213 if (op0
== orig_op0
)
10214 op0
= copy_rtx (op0
);
10216 set_mem_attributes (op0
, treeop0
, 0);
10217 if (REG_P (XEXP (op0
, 0)))
10218 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10220 MEM_VOLATILE_P (op0
) |= volatilep
;
10226 op0
= expand_expr (treeop0
,
10227 NULL_RTX
, VOIDmode
, modifier
);
10229 /* If the input and output modes are both the same, we are done. */
10230 if (mode
== GET_MODE (op0
))
10232 /* If neither mode is BLKmode, and both modes are the same size
10233 then we can use gen_lowpart. */
10234 else if (mode
!= BLKmode
&& GET_MODE (op0
) != BLKmode
10235 && (GET_MODE_PRECISION (mode
)
10236 == GET_MODE_PRECISION (GET_MODE (op0
)))
10237 && !COMPLEX_MODE_P (GET_MODE (op0
)))
10239 if (GET_CODE (op0
) == SUBREG
)
10240 op0
= force_reg (GET_MODE (op0
), op0
);
10241 temp
= gen_lowpart_common (mode
, op0
);
10246 if (!REG_P (op0
) && !MEM_P (op0
))
10247 op0
= force_reg (GET_MODE (op0
), op0
);
10248 op0
= gen_lowpart (mode
, op0
);
10251 /* If both types are integral, convert from one mode to the other. */
10252 else if (INTEGRAL_TYPE_P (type
) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0
)))
10253 op0
= convert_modes (mode
, GET_MODE (op0
), op0
,
10254 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
10255 /* As a last resort, spill op0 to memory, and reload it in a
10257 else if (!MEM_P (op0
))
10259 /* If the operand is not a MEM, force it into memory. Since we
10260 are going to be changing the mode of the MEM, don't call
10261 force_const_mem for constants because we don't allow pool
10262 constants to change mode. */
10263 tree inner_type
= TREE_TYPE (treeop0
);
10265 gcc_assert (!TREE_ADDRESSABLE (exp
));
10267 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
10269 = assign_stack_temp_for_type
10270 (TYPE_MODE (inner_type
),
10271 GET_MODE_SIZE (TYPE_MODE (inner_type
)), inner_type
);
10273 emit_move_insn (target
, op0
);
10277 /* At this point, OP0 is in the correct mode. If the output type is
10278 such that the operand is known to be aligned, indicate that it is.
10279 Otherwise, we need only be concerned about alignment for non-BLKmode
10283 enum insn_code icode
;
10285 op0
= copy_rtx (op0
);
10287 if (TYPE_ALIGN_OK (type
))
10288 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
10289 else if (mode
!= BLKmode
10290 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode
)
10291 /* If the target does have special handling for unaligned
10292 loads of mode then use them. */
10293 && ((icode
= optab_handler (movmisalign_optab
, mode
))
10294 != CODE_FOR_nothing
))
10298 op0
= adjust_address (op0
, mode
, 0);
10299 /* We've already validated the memory, and we're creating a
10300 new pseudo destination. The predicates really can't
10302 reg
= gen_reg_rtx (mode
);
10304 /* Nor can the insn generator. */
10305 insn
= GEN_FCN (icode
) (reg
, op0
);
10309 else if (STRICT_ALIGNMENT
10311 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode
))
10313 tree inner_type
= TREE_TYPE (treeop0
);
10314 HOST_WIDE_INT temp_size
10315 = MAX (int_size_in_bytes (inner_type
),
10316 (HOST_WIDE_INT
) GET_MODE_SIZE (mode
));
10318 = assign_stack_temp_for_type (mode
, temp_size
, type
);
10319 rtx new_with_op0_mode
10320 = adjust_address (new_rtx
, GET_MODE (op0
), 0);
10322 gcc_assert (!TREE_ADDRESSABLE (exp
));
10324 if (GET_MODE (op0
) == BLKmode
)
10325 emit_block_move (new_with_op0_mode
, op0
,
10326 GEN_INT (GET_MODE_SIZE (mode
)),
10327 (modifier
== EXPAND_STACK_PARM
10328 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
10330 emit_move_insn (new_with_op0_mode
, op0
);
10335 op0
= adjust_address (op0
, mode
, 0);
10342 tree lhs
= treeop0
;
10343 tree rhs
= treeop1
;
10344 gcc_assert (ignore
);
10346 /* Check for |= or &= of a bitfield of size one into another bitfield
10347 of size 1. In this case, (unless we need the result of the
10348 assignment) we can do this more efficiently with a
10349 test followed by an assignment, if necessary.
10351 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10352 things change so we do, this code should be enhanced to
10354 if (TREE_CODE (lhs
) == COMPONENT_REF
10355 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
10356 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
10357 && TREE_OPERAND (rhs
, 0) == lhs
10358 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
10359 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
10360 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
10362 rtx label
= gen_label_rtx ();
10363 int value
= TREE_CODE (rhs
) == BIT_IOR_EXPR
;
10364 do_jump (TREE_OPERAND (rhs
, 1),
10366 value
? 0 : label
, -1);
10367 expand_assignment (lhs
, build_int_cst (TREE_TYPE (rhs
), value
),
10368 MOVE_NONTEMPORAL (exp
));
10369 do_pending_stack_adjust ();
10370 emit_label (label
);
10374 expand_assignment (lhs
, rhs
, MOVE_NONTEMPORAL (exp
));
10379 return expand_expr_addr_expr (exp
, target
, tmode
, modifier
);
10381 case REALPART_EXPR
:
10382 op0
= expand_normal (treeop0
);
10383 return read_complex_part (op0
, false);
10385 case IMAGPART_EXPR
:
10386 op0
= expand_normal (treeop0
);
10387 return read_complex_part (op0
, true);
10394 /* Expanded in cfgexpand.c. */
10395 gcc_unreachable ();
10397 case TRY_CATCH_EXPR
:
10399 case EH_FILTER_EXPR
:
10400 case TRY_FINALLY_EXPR
:
10401 /* Lowered by tree-eh.c. */
10402 gcc_unreachable ();
10404 case WITH_CLEANUP_EXPR
:
10405 case CLEANUP_POINT_EXPR
:
10407 case CASE_LABEL_EXPR
:
10412 case COMPOUND_EXPR
:
10413 case PREINCREMENT_EXPR
:
10414 case PREDECREMENT_EXPR
:
10415 case POSTINCREMENT_EXPR
:
10416 case POSTDECREMENT_EXPR
:
10419 case COMPOUND_LITERAL_EXPR
:
10420 /* Lowered by gimplify.c. */
10421 gcc_unreachable ();
10424 /* Function descriptors are not valid except for as
10425 initialization constants, and should not be expanded. */
10426 gcc_unreachable ();
10428 case WITH_SIZE_EXPR
:
10429 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10430 have pulled out the size to use in whatever context it needed. */
10431 return expand_expr_real (treeop0
, original_target
, tmode
,
10432 modifier
, alt_rtl
);
10435 return expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
10439 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10440 signedness of TYPE), possibly returning the result in TARGET. */
10442 reduce_to_bit_field_precision (rtx exp
, rtx target
, tree type
)
10444 HOST_WIDE_INT prec
= TYPE_PRECISION (type
);
10445 if (target
&& GET_MODE (target
) != GET_MODE (exp
))
10447 /* For constant values, reduce using build_int_cst_type. */
10448 if (CONST_INT_P (exp
))
10450 HOST_WIDE_INT value
= INTVAL (exp
);
10451 tree t
= build_int_cst_type (type
, value
);
10452 return expand_expr (t
, target
, VOIDmode
, EXPAND_NORMAL
);
10454 else if (TYPE_UNSIGNED (type
))
10456 rtx mask
= immed_double_int_const (double_int_mask (prec
),
10458 return expand_and (GET_MODE (exp
), exp
, mask
, target
);
10462 int count
= GET_MODE_PRECISION (GET_MODE (exp
)) - prec
;
10463 exp
= expand_shift (LSHIFT_EXPR
, GET_MODE (exp
),
10464 exp
, count
, target
, 0);
10465 return expand_shift (RSHIFT_EXPR
, GET_MODE (exp
),
10466 exp
, count
, target
, 0);
10470 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10471 when applied to the address of EXP produces an address known to be
10472 aligned more than BIGGEST_ALIGNMENT. */
10475 is_aligning_offset (const_tree offset
, const_tree exp
)
10477 /* Strip off any conversions. */
10478 while (CONVERT_EXPR_P (offset
))
10479 offset
= TREE_OPERAND (offset
, 0);
10481 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10482 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10483 if (TREE_CODE (offset
) != BIT_AND_EXPR
10484 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
10485 || compare_tree_int (TREE_OPERAND (offset
, 1),
10486 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
10487 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
10490 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10491 It must be NEGATE_EXPR. Then strip any more conversions. */
10492 offset
= TREE_OPERAND (offset
, 0);
10493 while (CONVERT_EXPR_P (offset
))
10494 offset
= TREE_OPERAND (offset
, 0);
10496 if (TREE_CODE (offset
) != NEGATE_EXPR
)
10499 offset
= TREE_OPERAND (offset
, 0);
10500 while (CONVERT_EXPR_P (offset
))
10501 offset
= TREE_OPERAND (offset
, 0);
10503 /* This must now be the address of EXP. */
10504 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
10507 /* Return the tree node if an ARG corresponds to a string constant or zero
10508 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10509 in bytes within the string that ARG is accessing. The type of the
10510 offset will be `sizetype'. */
10513 string_constant (tree arg
, tree
*ptr_offset
)
10515 tree array
, offset
, lower_bound
;
10518 if (TREE_CODE (arg
) == ADDR_EXPR
)
10520 if (TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
10522 *ptr_offset
= size_zero_node
;
10523 return TREE_OPERAND (arg
, 0);
10525 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == VAR_DECL
)
10527 array
= TREE_OPERAND (arg
, 0);
10528 offset
= size_zero_node
;
10530 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
10532 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
10533 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
10534 if (TREE_CODE (array
) != STRING_CST
10535 && TREE_CODE (array
) != VAR_DECL
)
10538 /* Check if the array has a nonzero lower bound. */
10539 lower_bound
= array_ref_low_bound (TREE_OPERAND (arg
, 0));
10540 if (!integer_zerop (lower_bound
))
10542 /* If the offset and base aren't both constants, return 0. */
10543 if (TREE_CODE (lower_bound
) != INTEGER_CST
)
10545 if (TREE_CODE (offset
) != INTEGER_CST
)
10547 /* Adjust offset by the lower bound. */
10548 offset
= size_diffop (fold_convert (sizetype
, offset
),
10549 fold_convert (sizetype
, lower_bound
));
10552 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == MEM_REF
)
10554 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
10555 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
10556 if (TREE_CODE (array
) != ADDR_EXPR
)
10558 array
= TREE_OPERAND (array
, 0);
10559 if (TREE_CODE (array
) != STRING_CST
10560 && TREE_CODE (array
) != VAR_DECL
)
10566 else if (TREE_CODE (arg
) == PLUS_EXPR
|| TREE_CODE (arg
) == POINTER_PLUS_EXPR
)
10568 tree arg0
= TREE_OPERAND (arg
, 0);
10569 tree arg1
= TREE_OPERAND (arg
, 1);
10574 if (TREE_CODE (arg0
) == ADDR_EXPR
10575 && (TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
10576 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == VAR_DECL
))
10578 array
= TREE_OPERAND (arg0
, 0);
10581 else if (TREE_CODE (arg1
) == ADDR_EXPR
10582 && (TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
10583 || TREE_CODE (TREE_OPERAND (arg1
, 0)) == VAR_DECL
))
10585 array
= TREE_OPERAND (arg1
, 0);
10594 if (TREE_CODE (array
) == STRING_CST
)
10596 *ptr_offset
= fold_convert (sizetype
, offset
);
10599 else if (TREE_CODE (array
) == VAR_DECL
10600 || TREE_CODE (array
) == CONST_DECL
)
10604 /* Variables initialized to string literals can be handled too. */
10605 if (!const_value_known_p (array
)
10606 || !DECL_INITIAL (array
)
10607 || TREE_CODE (DECL_INITIAL (array
)) != STRING_CST
)
10610 /* Avoid const char foo[4] = "abcde"; */
10611 if (DECL_SIZE_UNIT (array
) == NULL_TREE
10612 || TREE_CODE (DECL_SIZE_UNIT (array
)) != INTEGER_CST
10613 || (length
= TREE_STRING_LENGTH (DECL_INITIAL (array
))) <= 0
10614 || compare_tree_int (DECL_SIZE_UNIT (array
), length
) < 0)
10617 /* If variable is bigger than the string literal, OFFSET must be constant
10618 and inside of the bounds of the string literal. */
10619 offset
= fold_convert (sizetype
, offset
);
10620 if (compare_tree_int (DECL_SIZE_UNIT (array
), length
) > 0
10621 && (! host_integerp (offset
, 1)
10622 || compare_tree_int (offset
, length
) >= 0))
10625 *ptr_offset
= offset
;
10626 return DECL_INITIAL (array
);
10632 /* Generate code to calculate OPS, and exploded expression
10633 using a store-flag instruction and return an rtx for the result.
10634 OPS reflects a comparison.
10636 If TARGET is nonzero, store the result there if convenient.
10638 Return zero if there is no suitable set-flag instruction
10639 available on this machine.
10641 Once expand_expr has been called on the arguments of the comparison,
10642 we are committed to doing the store flag, since it is not safe to
10643 re-evaluate the expression. We emit the store-flag insn by calling
10644 emit_store_flag, but only expand the arguments if we have a reason
10645 to believe that emit_store_flag will be successful. If we think that
10646 it will, but it isn't, we have to simulate the store-flag with a
10647 set/jump/set sequence. */
10650 do_store_flag (sepops ops
, rtx target
, enum machine_mode mode
)
10652 enum rtx_code code
;
10653 tree arg0
, arg1
, type
;
10655 enum machine_mode operand_mode
;
10658 rtx subtarget
= target
;
10659 location_t loc
= ops
->location
;
10664 /* Don't crash if the comparison was erroneous. */
10665 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
10668 type
= TREE_TYPE (arg0
);
10669 operand_mode
= TYPE_MODE (type
);
10670 unsignedp
= TYPE_UNSIGNED (type
);
10672 /* We won't bother with BLKmode store-flag operations because it would mean
10673 passing a lot of information to emit_store_flag. */
10674 if (operand_mode
== BLKmode
)
10677 /* We won't bother with store-flag operations involving function pointers
10678 when function pointers must be canonicalized before comparisons. */
10679 #ifdef HAVE_canonicalize_funcptr_for_compare
10680 if (HAVE_canonicalize_funcptr_for_compare
10681 && ((TREE_CODE (TREE_TYPE (arg0
)) == POINTER_TYPE
10682 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
)))
10684 || (TREE_CODE (TREE_TYPE (arg1
)) == POINTER_TYPE
10685 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
)))
10686 == FUNCTION_TYPE
))))
10693 /* For vector typed comparisons emit code to generate the desired
10694 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10695 expander for this. */
10696 if (TREE_CODE (ops
->type
) == VECTOR_TYPE
)
10698 tree ifexp
= build2 (ops
->code
, ops
->type
, arg0
, arg1
);
10699 tree if_true
= constant_boolean_node (true, ops
->type
);
10700 tree if_false
= constant_boolean_node (false, ops
->type
);
10701 return expand_vec_cond_expr (ops
->type
, ifexp
, if_true
, if_false
, target
);
10704 /* For vector typed comparisons emit code to generate the desired
10705 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10706 expander for this. */
10707 if (TREE_CODE (ops
->type
) == VECTOR_TYPE
)
10709 tree ifexp
= build2 (ops
->code
, ops
->type
, arg0
, arg1
);
10710 tree if_true
= constant_boolean_node (true, ops
->type
);
10711 tree if_false
= constant_boolean_node (false, ops
->type
);
10712 return expand_vec_cond_expr (ops
->type
, ifexp
, if_true
, if_false
, target
);
10715 /* Get the rtx comparison code to use. We know that EXP is a comparison
10716 operation of some type. Some comparisons against 1 and -1 can be
10717 converted to comparisons with zero. Do so here so that the tests
10718 below will be aware that we have a comparison with zero. These
10719 tests will not catch constants in the first operand, but constants
10720 are rarely passed as the first operand. */
10731 if (integer_onep (arg1
))
10732 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10734 code
= unsignedp
? LTU
: LT
;
10737 if (! unsignedp
&& integer_all_onesp (arg1
))
10738 arg1
= integer_zero_node
, code
= LT
;
10740 code
= unsignedp
? LEU
: LE
;
10743 if (! unsignedp
&& integer_all_onesp (arg1
))
10744 arg1
= integer_zero_node
, code
= GE
;
10746 code
= unsignedp
? GTU
: GT
;
10749 if (integer_onep (arg1
))
10750 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
10752 code
= unsignedp
? GEU
: GE
;
10755 case UNORDERED_EXPR
:
10781 gcc_unreachable ();
10784 /* Put a constant second. */
10785 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
10786 || TREE_CODE (arg0
) == FIXED_CST
)
10788 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
10789 code
= swap_condition (code
);
10792 /* If this is an equality or inequality test of a single bit, we can
10793 do this by shifting the bit being tested to the low-order bit and
10794 masking the result with the constant 1. If the condition was EQ,
10795 we xor it with 1. This does not require an scc insn and is faster
10796 than an scc insn even if we have it.
10798 The code to make this transformation was moved into fold_single_bit_test,
10799 so we just call into the folder and expand its result. */
10801 if ((code
== NE
|| code
== EQ
)
10802 && integer_zerop (arg1
)
10803 && (TYPE_PRECISION (ops
->type
) != 1 || TYPE_UNSIGNED (ops
->type
)))
10805 gimple srcstmt
= get_def_for_expr (arg0
, BIT_AND_EXPR
);
10807 && integer_pow2p (gimple_assign_rhs2 (srcstmt
)))
10809 enum tree_code tcode
= code
== NE
? NE_EXPR
: EQ_EXPR
;
10810 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
10811 tree temp
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg1
),
10812 gimple_assign_rhs1 (srcstmt
),
10813 gimple_assign_rhs2 (srcstmt
));
10814 temp
= fold_single_bit_test (loc
, tcode
, temp
, arg1
, type
);
10816 return expand_expr (temp
, target
, VOIDmode
, EXPAND_NORMAL
);
10820 if (! get_subtarget (target
)
10821 || GET_MODE (subtarget
) != operand_mode
)
10824 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
10827 target
= gen_reg_rtx (mode
);
10829 /* Try a cstore if possible. */
10830 return emit_store_flag_force (target
, code
, op0
, op1
,
10831 operand_mode
, unsignedp
,
10832 (TYPE_PRECISION (ops
->type
) == 1
10833 && !TYPE_UNSIGNED (ops
->type
)) ? -1 : 1);
10837 /* Stubs in case we haven't got a casesi insn. */
10838 #ifndef HAVE_casesi
10839 # define HAVE_casesi 0
10840 # define gen_casesi(a, b, c, d, e) (0)
10841 # define CODE_FOR_casesi CODE_FOR_nothing
10844 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10845 0 otherwise (i.e. if there is no casesi instruction). */
10847 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
10848 rtx table_label ATTRIBUTE_UNUSED
, rtx default_label
,
10849 rtx fallback_label ATTRIBUTE_UNUSED
)
10851 struct expand_operand ops
[5];
10852 enum machine_mode index_mode
= SImode
;
10853 rtx op1
, op2
, index
;
10858 /* Convert the index to SImode. */
10859 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
10861 enum machine_mode omode
= TYPE_MODE (index_type
);
10862 rtx rangertx
= expand_normal (range
);
10864 /* We must handle the endpoints in the original mode. */
10865 index_expr
= build2 (MINUS_EXPR
, index_type
,
10866 index_expr
, minval
);
10867 minval
= integer_zero_node
;
10868 index
= expand_normal (index_expr
);
10870 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
10871 omode
, 1, default_label
);
10872 /* Now we can safely truncate. */
10873 index
= convert_to_mode (index_mode
, index
, 0);
10877 if (TYPE_MODE (index_type
) != index_mode
)
10879 index_type
= lang_hooks
.types
.type_for_mode (index_mode
, 0);
10880 index_expr
= fold_convert (index_type
, index_expr
);
10883 index
= expand_normal (index_expr
);
10886 do_pending_stack_adjust ();
10888 op1
= expand_normal (minval
);
10889 op2
= expand_normal (range
);
10891 create_input_operand (&ops
[0], index
, index_mode
);
10892 create_convert_operand_from_type (&ops
[1], op1
, TREE_TYPE (minval
));
10893 create_convert_operand_from_type (&ops
[2], op2
, TREE_TYPE (range
));
10894 create_fixed_operand (&ops
[3], table_label
);
10895 create_fixed_operand (&ops
[4], (default_label
10897 : fallback_label
));
10898 expand_jump_insn (CODE_FOR_casesi
, 5, ops
);
10902 /* Attempt to generate a tablejump instruction; same concept. */
10903 #ifndef HAVE_tablejump
10904 #define HAVE_tablejump 0
10905 #define gen_tablejump(x, y) (0)
10908 /* Subroutine of the next function.
10910 INDEX is the value being switched on, with the lowest value
10911 in the table already subtracted.
10912 MODE is its expected mode (needed if INDEX is constant).
10913 RANGE is the length of the jump table.
10914 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10916 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10917 index value is out of range. */
10920 do_tablejump (rtx index
, enum machine_mode mode
, rtx range
, rtx table_label
,
10925 if (INTVAL (range
) > cfun
->cfg
->max_jumptable_ents
)
10926 cfun
->cfg
->max_jumptable_ents
= INTVAL (range
);
10928 /* Do an unsigned comparison (in the proper mode) between the index
10929 expression and the value which represents the length of the range.
10930 Since we just finished subtracting the lower bound of the range
10931 from the index expression, this comparison allows us to simultaneously
10932 check that the original index expression value is both greater than
10933 or equal to the minimum value of the range and less than or equal to
10934 the maximum value of the range. */
10937 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
10940 /* If index is in range, it must fit in Pmode.
10941 Convert to Pmode so we can index with it. */
10943 index
= convert_to_mode (Pmode
, index
, 1);
10945 /* Don't let a MEM slip through, because then INDEX that comes
10946 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10947 and break_out_memory_refs will go to work on it and mess it up. */
10948 #ifdef PIC_CASE_VECTOR_ADDRESS
10949 if (flag_pic
&& !REG_P (index
))
10950 index
= copy_to_mode_reg (Pmode
, index
);
10953 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10954 GET_MODE_SIZE, because this indicates how large insns are. The other
10955 uses should all be Pmode, because they are addresses. This code
10956 could fail if addresses and insns are not the same size. */
10957 index
= gen_rtx_PLUS (Pmode
,
10958 gen_rtx_MULT (Pmode
, index
,
10959 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
10960 gen_rtx_LABEL_REF (Pmode
, table_label
));
10961 #ifdef PIC_CASE_VECTOR_ADDRESS
10963 index
= PIC_CASE_VECTOR_ADDRESS (index
);
10966 index
= memory_address (CASE_VECTOR_MODE
, index
);
10967 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
10968 vector
= gen_const_mem (CASE_VECTOR_MODE
, index
);
10969 convert_move (temp
, vector
, 0);
10971 emit_jump_insn (gen_tablejump (temp
, table_label
));
10973 /* If we are generating PIC code or if the table is PC-relative, the
10974 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10975 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
10980 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
10981 rtx table_label
, rtx default_label
)
10985 if (! HAVE_tablejump
)
10988 index_expr
= fold_build2 (MINUS_EXPR
, index_type
,
10989 fold_convert (index_type
, index_expr
),
10990 fold_convert (index_type
, minval
));
10991 index
= expand_normal (index_expr
);
10992 do_pending_stack_adjust ();
10994 do_tablejump (index
, TYPE_MODE (index_type
),
10995 convert_modes (TYPE_MODE (index_type
),
10996 TYPE_MODE (TREE_TYPE (range
)),
10997 expand_normal (range
),
10998 TYPE_UNSIGNED (TREE_TYPE (range
))),
10999 table_label
, default_label
);
11003 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11005 const_vector_from_tree (tree exp
)
11011 enum machine_mode inner
, mode
;
11013 mode
= TYPE_MODE (TREE_TYPE (exp
));
11015 if (initializer_zerop (exp
))
11016 return CONST0_RTX (mode
);
11018 units
= GET_MODE_NUNITS (mode
);
11019 inner
= GET_MODE_INNER (mode
);
11021 v
= rtvec_alloc (units
);
11023 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
11025 elt
= VECTOR_CST_ELT (exp
, i
);
11027 if (TREE_CODE (elt
) == REAL_CST
)
11028 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
11030 else if (TREE_CODE (elt
) == FIXED_CST
)
11031 RTVEC_ELT (v
, i
) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt
),
11034 RTVEC_ELT (v
, i
) = immed_double_int_const (tree_to_double_int (elt
),
11038 return gen_rtx_CONST_VECTOR (mode
, v
);
11041 /* Build a decl for a personality function given a language prefix. */
11044 build_personality_function (const char *lang
)
11046 const char *unwind_and_version
;
11050 switch (targetm_common
.except_unwind_info (&global_options
))
11055 unwind_and_version
= "_sj0";
11059 unwind_and_version
= "_v0";
11062 gcc_unreachable ();
11065 name
= ACONCAT (("__", lang
, "_personality", unwind_and_version
, NULL
));
11067 type
= build_function_type_list (integer_type_node
, integer_type_node
,
11068 long_long_unsigned_type_node
,
11069 ptr_type_node
, ptr_type_node
, NULL_TREE
);
11070 decl
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
,
11071 get_identifier (name
), type
);
11072 DECL_ARTIFICIAL (decl
) = 1;
11073 DECL_EXTERNAL (decl
) = 1;
11074 TREE_PUBLIC (decl
) = 1;
11076 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11077 are the flags assigned by targetm.encode_section_info. */
11078 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl
), 0), NULL
);
11083 /* Extracts the personality function of DECL and returns the corresponding
11087 get_personality_function (tree decl
)
11089 tree personality
= DECL_FUNCTION_PERSONALITY (decl
);
11090 enum eh_personality_kind pk
;
11092 pk
= function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl
));
11093 if (pk
== eh_personality_none
)
11097 && pk
== eh_personality_any
)
11098 personality
= lang_hooks
.eh_personality ();
11100 if (pk
== eh_personality_lang
)
11101 gcc_assert (personality
!= NULL_TREE
);
11103 return XEXP (DECL_RTL (personality
), 0);
11106 #include "gt-expr.h"