1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
46 #include "langhooks.h"
49 #include "tree-iterator.h"
50 #include "basic-block.h"
51 #include "tree-ssa-alias.h"
52 #include "internal-fn.h"
53 #include "gimple-expr.h"
56 #include "gimple-ssa.h"
58 #include "tree-ssanames.h"
60 #include "common/common-target.h"
63 #include "diagnostic.h"
64 #include "tree-ssa-live.h"
65 #include "tree-outof-ssa.h"
66 #include "target-globals.h"
68 #include "tree-ssa-address.h"
69 #include "cfgexpand.h"
73 #ifndef STACK_PUSH_CODE
74 #ifdef STACK_GROWS_DOWNWARD
75 #define STACK_PUSH_CODE PRE_DEC
77 #define STACK_PUSH_CODE PRE_INC
82 /* If this is nonzero, we do not bother generating VOLATILE
83 around volatile memory references, and we are willing to
84 output indirect addresses. If cse is to follow, we reject
85 indirect addresses so a useful potential cse is generated;
86 if it is used only once, instruction combination will produce
87 the same indirect address eventually. */
90 /* This structure is used by move_by_pieces to describe the move to
92 struct move_by_pieces_d
101 int explicit_inc_from
;
102 unsigned HOST_WIDE_INT len
;
103 HOST_WIDE_INT offset
;
107 /* This structure is used by store_by_pieces to describe the clear to
110 struct store_by_pieces_d
116 unsigned HOST_WIDE_INT len
;
117 HOST_WIDE_INT offset
;
118 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
);
123 static void move_by_pieces_1 (insn_gen_fn
, machine_mode
,
124 struct move_by_pieces_d
*);
125 static bool block_move_libcall_safe_for_call_parm (void);
126 static bool emit_block_move_via_movmem (rtx
, rtx
, rtx
, unsigned, unsigned, HOST_WIDE_INT
,
127 unsigned HOST_WIDE_INT
, unsigned HOST_WIDE_INT
,
128 unsigned HOST_WIDE_INT
);
129 static tree
emit_block_move_libcall_fn (int);
130 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
131 static rtx
clear_by_pieces_1 (void *, HOST_WIDE_INT
, enum machine_mode
);
132 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
133 static void store_by_pieces_1 (struct store_by_pieces_d
*, unsigned int);
134 static void store_by_pieces_2 (insn_gen_fn
, machine_mode
,
135 struct store_by_pieces_d
*);
136 static tree
clear_storage_libcall_fn (int);
137 static rtx_insn
*compress_float_constant (rtx
, rtx
);
138 static rtx
get_subtarget (rtx
);
139 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
140 HOST_WIDE_INT
, enum machine_mode
,
141 tree
, int, alias_set_type
);
142 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
);
143 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
,
144 unsigned HOST_WIDE_INT
, unsigned HOST_WIDE_INT
,
145 enum machine_mode
, tree
, alias_set_type
, bool);
147 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (const_tree
, const_tree
);
149 static int is_aligning_offset (const_tree
, const_tree
);
150 static void expand_operands (tree
, tree
, rtx
, rtx
*, rtx
*,
151 enum expand_modifier
);
152 static rtx
reduce_to_bit_field_precision (rtx
, rtx
, tree
);
153 static rtx
do_store_flag (sepops
, rtx
, enum machine_mode
);
155 static void emit_single_push_insn (enum machine_mode
, rtx
, tree
);
157 static void do_tablejump (rtx
, enum machine_mode
, rtx
, rtx
, rtx
, int);
158 static rtx
const_vector_from_tree (tree
);
159 static void write_complex_part (rtx
, rtx
, bool);
161 /* This macro is used to determine whether move_by_pieces should be called
162 to perform a structure copy. */
163 #ifndef MOVE_BY_PIECES_P
164 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
165 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
166 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
169 /* This macro is used to determine whether clear_by_pieces should be
170 called to clear storage. */
171 #ifndef CLEAR_BY_PIECES_P
172 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
173 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
174 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
177 /* This macro is used to determine whether store_by_pieces should be
178 called to "memset" storage with byte values other than zero. */
179 #ifndef SET_BY_PIECES_P
180 #define SET_BY_PIECES_P(SIZE, ALIGN) \
181 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
182 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
185 /* This macro is used to determine whether store_by_pieces should be
186 called to "memcpy" storage when the source is a constant string. */
187 #ifndef STORE_BY_PIECES_P
188 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
189 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
190 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
193 /* This is run to set up which modes can be used
194 directly in memory and to initialize the block move optab. It is run
195 at the beginning of compilation and when the target is reinitialized. */
198 init_expr_target (void)
201 enum machine_mode mode
;
206 /* Try indexing by frame ptr and try by stack ptr.
207 It is known that on the Convex the stack ptr isn't a valid index.
208 With luck, one or the other is valid on any machine. */
209 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
210 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
212 /* A scratch register we can modify in-place below to avoid
213 useless RTL allocations. */
214 reg
= gen_rtx_REG (VOIDmode
, -1);
216 insn
= rtx_alloc (INSN
);
217 pat
= gen_rtx_SET (VOIDmode
, NULL_RTX
, NULL_RTX
);
218 PATTERN (insn
) = pat
;
220 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
221 mode
= (enum machine_mode
) ((int) mode
+ 1))
225 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
226 PUT_MODE (mem
, mode
);
227 PUT_MODE (mem1
, mode
);
228 PUT_MODE (reg
, mode
);
230 /* See if there is some register that can be used in this mode and
231 directly loaded or stored from memory. */
233 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
234 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
235 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
238 if (! HARD_REGNO_MODE_OK (regno
, mode
))
241 SET_REGNO (reg
, regno
);
244 SET_DEST (pat
) = reg
;
245 if (recog (pat
, insn
, &num_clobbers
) >= 0)
246 direct_load
[(int) mode
] = 1;
248 SET_SRC (pat
) = mem1
;
249 SET_DEST (pat
) = reg
;
250 if (recog (pat
, insn
, &num_clobbers
) >= 0)
251 direct_load
[(int) mode
] = 1;
254 SET_DEST (pat
) = mem
;
255 if (recog (pat
, insn
, &num_clobbers
) >= 0)
256 direct_store
[(int) mode
] = 1;
259 SET_DEST (pat
) = mem1
;
260 if (recog (pat
, insn
, &num_clobbers
) >= 0)
261 direct_store
[(int) mode
] = 1;
265 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
267 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
268 mode
= GET_MODE_WIDER_MODE (mode
))
270 enum machine_mode srcmode
;
271 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
272 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
276 ic
= can_extend_p (mode
, srcmode
, 0);
277 if (ic
== CODE_FOR_nothing
)
280 PUT_MODE (mem
, srcmode
);
282 if (insn_operand_matches (ic
, 1, mem
))
283 float_extend_from_mem
[mode
][srcmode
] = true;
288 /* This is run at the start of compiling a function. */
293 memset (&crtl
->expr
, 0, sizeof (crtl
->expr
));
296 /* Copy data from FROM to TO, where the machine modes are not the same.
297 Both modes may be integer, or both may be floating, or both may be
299 UNSIGNEDP should be nonzero if FROM is an unsigned type.
300 This causes zero-extension instead of sign-extension. */
303 convert_move (rtx to
, rtx from
, int unsignedp
)
305 enum machine_mode to_mode
= GET_MODE (to
);
306 enum machine_mode from_mode
= GET_MODE (from
);
307 int to_real
= SCALAR_FLOAT_MODE_P (to_mode
);
308 int from_real
= SCALAR_FLOAT_MODE_P (from_mode
);
312 /* rtx code for making an equivalent value. */
313 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
314 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
317 gcc_assert (to_real
== from_real
);
318 gcc_assert (to_mode
!= BLKmode
);
319 gcc_assert (from_mode
!= BLKmode
);
321 /* If the source and destination are already the same, then there's
326 /* If FROM is a SUBREG that indicates that we have already done at least
327 the required extension, strip it. We don't handle such SUBREGs as
330 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
331 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from
)))
332 >= GET_MODE_PRECISION (to_mode
))
333 && SUBREG_CHECK_PROMOTED_SIGN (from
, unsignedp
))
334 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
336 gcc_assert (GET_CODE (to
) != SUBREG
|| !SUBREG_PROMOTED_VAR_P (to
));
338 if (to_mode
== from_mode
339 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
341 emit_move_insn (to
, from
);
345 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
347 gcc_assert (GET_MODE_BITSIZE (from_mode
) == GET_MODE_BITSIZE (to_mode
));
349 if (VECTOR_MODE_P (to_mode
))
350 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
352 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
354 emit_move_insn (to
, from
);
358 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
360 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
361 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
370 gcc_assert ((GET_MODE_PRECISION (from_mode
)
371 != GET_MODE_PRECISION (to_mode
))
372 || (DECIMAL_FLOAT_MODE_P (from_mode
)
373 != DECIMAL_FLOAT_MODE_P (to_mode
)));
375 if (GET_MODE_PRECISION (from_mode
) == GET_MODE_PRECISION (to_mode
))
376 /* Conversion between decimal float and binary float, same size. */
377 tab
= DECIMAL_FLOAT_MODE_P (from_mode
) ? trunc_optab
: sext_optab
;
378 else if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
383 /* Try converting directly if the insn is supported. */
385 code
= convert_optab_handler (tab
, to_mode
, from_mode
);
386 if (code
!= CODE_FOR_nothing
)
388 emit_unop_insn (code
, to
, from
,
389 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
393 /* Otherwise use a libcall. */
394 libcall
= convert_optab_libfunc (tab
, to_mode
, from_mode
);
396 /* Is this conversion implemented yet? */
397 gcc_assert (libcall
);
400 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
402 insns
= get_insns ();
404 emit_libcall_block (insns
, to
, value
,
405 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
407 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
411 /* Handle pointer conversion. */ /* SPEE 900220. */
412 /* Targets are expected to provide conversion insns between PxImode and
413 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
414 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
416 enum machine_mode full_mode
417 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode
), MODE_INT
);
419 gcc_assert (convert_optab_handler (trunc_optab
, to_mode
, full_mode
)
420 != CODE_FOR_nothing
);
422 if (full_mode
!= from_mode
)
423 from
= convert_to_mode (full_mode
, from
, unsignedp
);
424 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, full_mode
),
428 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
431 enum machine_mode full_mode
432 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode
), MODE_INT
);
433 convert_optab ctab
= unsignedp
? zext_optab
: sext_optab
;
434 enum insn_code icode
;
436 icode
= convert_optab_handler (ctab
, full_mode
, from_mode
);
437 gcc_assert (icode
!= CODE_FOR_nothing
);
439 if (to_mode
== full_mode
)
441 emit_unop_insn (icode
, to
, from
, UNKNOWN
);
445 new_from
= gen_reg_rtx (full_mode
);
446 emit_unop_insn (icode
, new_from
, from
, UNKNOWN
);
448 /* else proceed to integer conversions below. */
449 from_mode
= full_mode
;
453 /* Make sure both are fixed-point modes or both are not. */
454 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
) ==
455 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode
));
456 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
))
458 /* If we widen from_mode to to_mode and they are in the same class,
459 we won't saturate the result.
460 Otherwise, always saturate the result to play safe. */
461 if (GET_MODE_CLASS (from_mode
) == GET_MODE_CLASS (to_mode
)
462 && GET_MODE_SIZE (from_mode
) < GET_MODE_SIZE (to_mode
))
463 expand_fixed_convert (to
, from
, 0, 0);
465 expand_fixed_convert (to
, from
, 0, 1);
469 /* Now both modes are integers. */
471 /* Handle expanding beyond a word. */
472 if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
)
473 && GET_MODE_PRECISION (to_mode
) > BITS_PER_WORD
)
480 enum machine_mode lowpart_mode
;
481 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
483 /* Try converting directly if the insn is supported. */
484 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
487 /* If FROM is a SUBREG, put it into a register. Do this
488 so that we always generate the same set of insns for
489 better cse'ing; if an intermediate assignment occurred,
490 we won't be doing the operation directly on the SUBREG. */
491 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
492 from
= force_reg (from_mode
, from
);
493 emit_unop_insn (code
, to
, from
, equiv_code
);
496 /* Next, try converting via full word. */
497 else if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
498 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
499 != CODE_FOR_nothing
))
501 rtx word_to
= gen_reg_rtx (word_mode
);
504 if (reg_overlap_mentioned_p (to
, from
))
505 from
= force_reg (from_mode
, from
);
508 convert_move (word_to
, from
, unsignedp
);
509 emit_unop_insn (code
, to
, word_to
, equiv_code
);
513 /* No special multiword conversion insn; do it by hand. */
516 /* Since we will turn this into a no conflict block, we must ensure the
517 the source does not overlap the target so force it into an isolated
518 register when maybe so. Likewise for any MEM input, since the
519 conversion sequence might require several references to it and we
520 must ensure we're getting the same value every time. */
522 if (MEM_P (from
) || reg_overlap_mentioned_p (to
, from
))
523 from
= force_reg (from_mode
, from
);
525 /* Get a copy of FROM widened to a word, if necessary. */
526 if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
)
527 lowpart_mode
= word_mode
;
529 lowpart_mode
= from_mode
;
531 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
533 lowpart
= gen_lowpart (lowpart_mode
, to
);
534 emit_move_insn (lowpart
, lowfrom
);
536 /* Compute the value to put in each remaining word. */
538 fill_value
= const0_rtx
;
540 fill_value
= emit_store_flag_force (gen_reg_rtx (word_mode
),
541 LT
, lowfrom
, const0_rtx
,
542 lowpart_mode
, 0, -1);
544 /* Fill the remaining words. */
545 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
547 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
548 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
550 gcc_assert (subword
);
552 if (fill_value
!= subword
)
553 emit_move_insn (subword
, fill_value
);
556 insns
= get_insns ();
563 /* Truncating multi-word to a word or less. */
564 if (GET_MODE_PRECISION (from_mode
) > BITS_PER_WORD
565 && GET_MODE_PRECISION (to_mode
) <= BITS_PER_WORD
)
568 && ! MEM_VOLATILE_P (from
)
569 && direct_load
[(int) to_mode
]
570 && ! mode_dependent_address_p (XEXP (from
, 0),
571 MEM_ADDR_SPACE (from
)))
573 || GET_CODE (from
) == SUBREG
))
574 from
= force_reg (from_mode
, from
);
575 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
579 /* Now follow all the conversions between integers
580 no more than a word long. */
582 /* For truncation, usually we can just refer to FROM in a narrower mode. */
583 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
584 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
, from_mode
))
587 && ! MEM_VOLATILE_P (from
)
588 && direct_load
[(int) to_mode
]
589 && ! mode_dependent_address_p (XEXP (from
, 0),
590 MEM_ADDR_SPACE (from
)))
592 || GET_CODE (from
) == SUBREG
))
593 from
= force_reg (from_mode
, from
);
594 if (REG_P (from
) && REGNO (from
) < FIRST_PSEUDO_REGISTER
595 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
596 from
= copy_to_reg (from
);
597 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
601 /* Handle extension. */
602 if (GET_MODE_PRECISION (to_mode
) > GET_MODE_PRECISION (from_mode
))
604 /* Convert directly if that works. */
605 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
608 emit_unop_insn (code
, to
, from
, equiv_code
);
613 enum machine_mode intermediate
;
617 /* Search for a mode to convert via. */
618 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
619 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
620 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
622 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
623 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
, intermediate
)))
624 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
625 != CODE_FOR_nothing
))
627 convert_move (to
, convert_to_mode (intermediate
, from
,
628 unsignedp
), unsignedp
);
632 /* No suitable intermediate mode.
633 Generate what we need with shifts. */
634 shift_amount
= (GET_MODE_PRECISION (to_mode
)
635 - GET_MODE_PRECISION (from_mode
));
636 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
637 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
639 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
642 emit_move_insn (to
, tmp
);
647 /* Support special truncate insns for certain modes. */
648 if (convert_optab_handler (trunc_optab
, to_mode
,
649 from_mode
) != CODE_FOR_nothing
)
651 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, from_mode
),
656 /* Handle truncation of volatile memrefs, and so on;
657 the things that couldn't be truncated directly,
658 and for which there was no special instruction.
660 ??? Code above formerly short-circuited this, for most integer
661 mode pairs, with a force_reg in from_mode followed by a recursive
662 call to this routine. Appears always to have been wrong. */
663 if (GET_MODE_PRECISION (to_mode
) < GET_MODE_PRECISION (from_mode
))
665 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
666 emit_move_insn (to
, temp
);
670 /* Mode combination is not recognized. */
674 /* Return an rtx for a value that would result
675 from converting X to mode MODE.
676 Both X and MODE may be floating, or both integer.
677 UNSIGNEDP is nonzero if X is an unsigned value.
678 This can be done by referring to a part of X in place
679 or by copying to a new temporary with conversion. */
682 convert_to_mode (enum machine_mode mode
, rtx x
, int unsignedp
)
684 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
687 /* Return an rtx for a value that would result
688 from converting X from mode OLDMODE to mode MODE.
689 Both modes may be floating, or both integer.
690 UNSIGNEDP is nonzero if X is an unsigned value.
692 This can be done by referring to a part of X in place
693 or by copying to a new temporary with conversion.
695 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
698 convert_modes (enum machine_mode mode
, enum machine_mode oldmode
, rtx x
, int unsignedp
)
702 /* If FROM is a SUBREG that indicates that we have already done at least
703 the required extension, strip it. */
705 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
706 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
707 && SUBREG_CHECK_PROMOTED_SIGN (x
, unsignedp
))
708 x
= gen_lowpart (mode
, SUBREG_REG (x
));
710 if (GET_MODE (x
) != VOIDmode
)
711 oldmode
= GET_MODE (x
);
716 if (CONST_SCALAR_INT_P (x
) && GET_MODE_CLASS (mode
) == MODE_INT
)
718 /* If the caller did not tell us the old mode, then there is not
719 much to do with respect to canonicalization. We have to
720 assume that all the bits are significant. */
721 if (GET_MODE_CLASS (oldmode
) != MODE_INT
)
722 oldmode
= MAX_MODE_INT
;
723 wide_int w
= wide_int::from (std::make_pair (x
, oldmode
),
724 GET_MODE_PRECISION (mode
),
725 unsignedp
? UNSIGNED
: SIGNED
);
726 return immed_wide_int_const (w
, mode
);
729 /* We can do this with a gen_lowpart if both desired and current modes
730 are integer, and this is either a constant integer, a register, or a
732 if (GET_MODE_CLASS (mode
) == MODE_INT
733 && GET_MODE_CLASS (oldmode
) == MODE_INT
734 && GET_MODE_PRECISION (mode
) <= GET_MODE_PRECISION (oldmode
)
735 && ((MEM_P (x
) && !MEM_VOLATILE_P (x
) && direct_load
[(int) mode
])
737 && (!HARD_REGISTER_P (x
)
738 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
739 && TRULY_NOOP_TRUNCATION_MODES_P (mode
, GET_MODE (x
)))))
741 return gen_lowpart (mode
, x
);
743 /* Converting from integer constant into mode is always equivalent to an
745 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
747 gcc_assert (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (oldmode
));
748 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
751 temp
= gen_reg_rtx (mode
);
752 convert_move (temp
, x
, unsignedp
);
756 /* Return the largest alignment we can use for doing a move (or store)
757 of MAX_PIECES. ALIGN is the largest alignment we could use. */
760 alignment_for_piecewise_move (unsigned int max_pieces
, unsigned int align
)
762 enum machine_mode tmode
;
764 tmode
= mode_for_size (max_pieces
* BITS_PER_UNIT
, MODE_INT
, 1);
765 if (align
>= GET_MODE_ALIGNMENT (tmode
))
766 align
= GET_MODE_ALIGNMENT (tmode
);
769 enum machine_mode tmode
, xmode
;
771 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
773 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
774 if (GET_MODE_SIZE (tmode
) > max_pieces
775 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
778 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
784 /* Return the widest integer mode no wider than SIZE. If no such mode
785 can be found, return VOIDmode. */
787 static enum machine_mode
788 widest_int_mode_for_size (unsigned int size
)
790 enum machine_mode tmode
, mode
= VOIDmode
;
792 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
793 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
794 if (GET_MODE_SIZE (tmode
) < size
)
800 /* STORE_MAX_PIECES is the number of bytes at a time that we can
801 store efficiently. Due to internal GCC limitations, this is
802 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
803 for an immediate constant. */
805 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
807 /* Determine whether the LEN bytes can be moved by using several move
808 instructions. Return nonzero if a call to move_by_pieces should
812 can_move_by_pieces (unsigned HOST_WIDE_INT len ATTRIBUTE_UNUSED
,
813 unsigned int align ATTRIBUTE_UNUSED
)
815 return MOVE_BY_PIECES_P (len
, align
);
818 /* Generate several move instructions to copy LEN bytes from block FROM to
819 block TO. (These are MEM rtx's with BLKmode).
821 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
822 used to push FROM to the stack.
824 ALIGN is maximum stack alignment we can assume.
826 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
827 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
831 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
832 unsigned int align
, int endp
)
834 struct move_by_pieces_d data
;
835 enum machine_mode to_addr_mode
;
836 enum machine_mode from_addr_mode
= get_address_mode (from
);
837 rtx to_addr
, from_addr
= XEXP (from
, 0);
838 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
839 enum insn_code icode
;
841 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
844 data
.from_addr
= from_addr
;
847 to_addr_mode
= get_address_mode (to
);
848 to_addr
= XEXP (to
, 0);
851 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
852 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
854 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
858 to_addr_mode
= VOIDmode
;
862 #ifdef STACK_GROWS_DOWNWARD
868 data
.to_addr
= to_addr
;
871 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
872 || GET_CODE (from_addr
) == POST_INC
873 || GET_CODE (from_addr
) == POST_DEC
);
875 data
.explicit_inc_from
= 0;
876 data
.explicit_inc_to
= 0;
877 if (data
.reverse
) data
.offset
= len
;
880 /* If copying requires more than two move insns,
881 copy addresses to registers (to make displacements shorter)
882 and use post-increment if available. */
883 if (!(data
.autinc_from
&& data
.autinc_to
)
884 && move_by_pieces_ninsns (len
, align
, max_size
) > 2)
886 /* Find the mode of the largest move...
887 MODE might not be used depending on the definitions of the
888 USE_* macros below. */
889 enum machine_mode mode ATTRIBUTE_UNUSED
890 = widest_int_mode_for_size (max_size
);
892 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
894 data
.from_addr
= copy_to_mode_reg (from_addr_mode
,
895 plus_constant (from_addr_mode
,
897 data
.autinc_from
= 1;
898 data
.explicit_inc_from
= -1;
900 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
902 data
.from_addr
= copy_to_mode_reg (from_addr_mode
, from_addr
);
903 data
.autinc_from
= 1;
904 data
.explicit_inc_from
= 1;
906 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
907 data
.from_addr
= copy_to_mode_reg (from_addr_mode
, from_addr
);
908 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
910 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
911 plus_constant (to_addr_mode
,
914 data
.explicit_inc_to
= -1;
916 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
918 data
.to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
920 data
.explicit_inc_to
= 1;
922 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
923 data
.to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
926 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
928 /* First move what we can in the largest integer mode, then go to
929 successively smaller modes. */
931 while (max_size
> 1 && data
.len
> 0)
933 enum machine_mode mode
= widest_int_mode_for_size (max_size
);
935 if (mode
== VOIDmode
)
938 icode
= optab_handler (mov_optab
, mode
);
939 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
940 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
942 max_size
= GET_MODE_SIZE (mode
);
945 /* The code above should have handled everything. */
946 gcc_assert (!data
.len
);
952 gcc_assert (!data
.reverse
);
957 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
958 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
960 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
961 plus_constant (to_addr_mode
,
965 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
972 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
980 /* Return number of insns required to move L bytes by pieces.
981 ALIGN (in bits) is maximum alignment we can assume. */
983 unsigned HOST_WIDE_INT
984 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
,
985 unsigned int max_size
)
987 unsigned HOST_WIDE_INT n_insns
= 0;
989 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
991 while (max_size
> 1 && l
> 0)
993 enum machine_mode mode
;
994 enum insn_code icode
;
996 mode
= widest_int_mode_for_size (max_size
);
998 if (mode
== VOIDmode
)
1001 icode
= optab_handler (mov_optab
, mode
);
1002 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1003 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1005 max_size
= GET_MODE_SIZE (mode
);
1012 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1013 with move instructions for mode MODE. GENFUN is the gen_... function
1014 to make a move insn for that mode. DATA has all the other info. */
1017 move_by_pieces_1 (insn_gen_fn genfun
, machine_mode mode
,
1018 struct move_by_pieces_d
*data
)
1020 unsigned int size
= GET_MODE_SIZE (mode
);
1021 rtx to1
= NULL_RTX
, from1
;
1023 while (data
->len
>= size
)
1026 data
->offset
-= size
;
1030 if (data
->autinc_to
)
1031 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1034 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1037 if (data
->autinc_from
)
1038 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1041 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1043 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1044 emit_insn (gen_add2_insn (data
->to_addr
,
1045 gen_int_mode (-(HOST_WIDE_INT
) size
,
1046 GET_MODE (data
->to_addr
))));
1047 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1048 emit_insn (gen_add2_insn (data
->from_addr
,
1049 gen_int_mode (-(HOST_WIDE_INT
) size
,
1050 GET_MODE (data
->from_addr
))));
1053 emit_insn ((*genfun
) (to1
, from1
));
1056 #ifdef PUSH_ROUNDING
1057 emit_single_push_insn (mode
, from1
, NULL
);
1063 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1064 emit_insn (gen_add2_insn (data
->to_addr
,
1066 GET_MODE (data
->to_addr
))));
1067 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1068 emit_insn (gen_add2_insn (data
->from_addr
,
1070 GET_MODE (data
->from_addr
))));
1072 if (! data
->reverse
)
1073 data
->offset
+= size
;
1079 /* Emit code to move a block Y to a block X. This may be done with
1080 string-move instructions, with multiple scalar move instructions,
1081 or with a library call.
1083 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1084 SIZE is an rtx that says how long they are.
1085 ALIGN is the maximum alignment we can assume they have.
1086 METHOD describes what kind of copy this is, and what mechanisms may be used.
1087 MIN_SIZE is the minimal size of block to move
1088 MAX_SIZE is the maximal size of block to move, if it can not be represented
1089 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1091 Return the address of the new block, if memcpy is called and returns it,
1095 emit_block_move_hints (rtx x
, rtx y
, rtx size
, enum block_op_methods method
,
1096 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
1097 unsigned HOST_WIDE_INT min_size
,
1098 unsigned HOST_WIDE_INT max_size
,
1099 unsigned HOST_WIDE_INT probable_max_size
)
1106 if (CONST_INT_P (size
)
1107 && INTVAL (size
) == 0)
1112 case BLOCK_OP_NORMAL
:
1113 case BLOCK_OP_TAILCALL
:
1114 may_use_call
= true;
1117 case BLOCK_OP_CALL_PARM
:
1118 may_use_call
= block_move_libcall_safe_for_call_parm ();
1120 /* Make inhibit_defer_pop nonzero around the library call
1121 to force it to pop the arguments right away. */
1125 case BLOCK_OP_NO_LIBCALL
:
1126 may_use_call
= false;
1133 gcc_assert (MEM_P (x
) && MEM_P (y
));
1134 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1135 gcc_assert (align
>= BITS_PER_UNIT
);
1137 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1138 block copy is more efficient for other large modes, e.g. DCmode. */
1139 x
= adjust_address (x
, BLKmode
, 0);
1140 y
= adjust_address (y
, BLKmode
, 0);
1142 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1143 can be incorrect is coming from __builtin_memcpy. */
1144 if (CONST_INT_P (size
))
1146 x
= shallow_copy_rtx (x
);
1147 y
= shallow_copy_rtx (y
);
1148 set_mem_size (x
, INTVAL (size
));
1149 set_mem_size (y
, INTVAL (size
));
1152 if (CONST_INT_P (size
) && MOVE_BY_PIECES_P (INTVAL (size
), align
))
1153 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1154 else if (emit_block_move_via_movmem (x
, y
, size
, align
,
1155 expected_align
, expected_size
,
1156 min_size
, max_size
, probable_max_size
))
1158 else if (may_use_call
1159 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
))
1160 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y
)))
1162 /* Since x and y are passed to a libcall, mark the corresponding
1163 tree EXPR as addressable. */
1164 tree y_expr
= MEM_EXPR (y
);
1165 tree x_expr
= MEM_EXPR (x
);
1167 mark_addressable (y_expr
);
1169 mark_addressable (x_expr
);
1170 retval
= emit_block_move_via_libcall (x
, y
, size
,
1171 method
== BLOCK_OP_TAILCALL
);
1175 emit_block_move_via_loop (x
, y
, size
, align
);
1177 if (method
== BLOCK_OP_CALL_PARM
)
1184 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1186 unsigned HOST_WIDE_INT max
, min
= 0;
1187 if (GET_CODE (size
) == CONST_INT
)
1188 min
= max
= UINTVAL (size
);
1190 max
= GET_MODE_MASK (GET_MODE (size
));
1191 return emit_block_move_hints (x
, y
, size
, method
, 0, -1,
1195 /* A subroutine of emit_block_move. Returns true if calling the
1196 block move libcall will not clobber any parameters which may have
1197 already been placed on the stack. */
1200 block_move_libcall_safe_for_call_parm (void)
1202 #if defined (REG_PARM_STACK_SPACE)
1206 /* If arguments are pushed on the stack, then they're safe. */
1210 /* If registers go on the stack anyway, any argument is sure to clobber
1211 an outgoing argument. */
1212 #if defined (REG_PARM_STACK_SPACE)
1213 fn
= emit_block_move_libcall_fn (false);
1214 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1215 depend on its argument. */
1217 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn
? NULL_TREE
: TREE_TYPE (fn
)))
1218 && REG_PARM_STACK_SPACE (fn
) != 0)
1222 /* If any argument goes in memory, then it might clobber an outgoing
1225 CUMULATIVE_ARGS args_so_far_v
;
1226 cumulative_args_t args_so_far
;
1229 fn
= emit_block_move_libcall_fn (false);
1230 INIT_CUMULATIVE_ARGS (args_so_far_v
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1231 args_so_far
= pack_cumulative_args (&args_so_far_v
);
1233 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1234 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1236 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1237 rtx tmp
= targetm
.calls
.function_arg (args_so_far
, mode
,
1239 if (!tmp
|| !REG_P (tmp
))
1241 if (targetm
.calls
.arg_partial_bytes (args_so_far
, mode
, NULL
, 1))
1243 targetm
.calls
.function_arg_advance (args_so_far
, mode
,
1250 /* A subroutine of emit_block_move. Expand a movmem pattern;
1251 return true if successful. */
1254 emit_block_move_via_movmem (rtx x
, rtx y
, rtx size
, unsigned int align
,
1255 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
1256 unsigned HOST_WIDE_INT min_size
,
1257 unsigned HOST_WIDE_INT max_size
,
1258 unsigned HOST_WIDE_INT probable_max_size
)
1260 int save_volatile_ok
= volatile_ok
;
1261 enum machine_mode mode
;
1263 if (expected_align
< align
)
1264 expected_align
= align
;
1265 if (expected_size
!= -1)
1267 if ((unsigned HOST_WIDE_INT
)expected_size
> probable_max_size
)
1268 expected_size
= probable_max_size
;
1269 if ((unsigned HOST_WIDE_INT
)expected_size
< min_size
)
1270 expected_size
= min_size
;
1273 /* Since this is a move insn, we don't care about volatility. */
1276 /* Try the most limited insn first, because there's no point
1277 including more than one in the machine description unless
1278 the more limited one has some advantage. */
1280 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1281 mode
= GET_MODE_WIDER_MODE (mode
))
1283 enum insn_code code
= direct_optab_handler (movmem_optab
, mode
);
1285 if (code
!= CODE_FOR_nothing
1286 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1287 here because if SIZE is less than the mode mask, as it is
1288 returned by the macro, it will definitely be less than the
1289 actual mode mask. Since SIZE is within the Pmode address
1290 space, we limit MODE to Pmode. */
1291 && ((CONST_INT_P (size
)
1292 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1293 <= (GET_MODE_MASK (mode
) >> 1)))
1294 || max_size
<= (GET_MODE_MASK (mode
) >> 1)
1295 || GET_MODE_BITSIZE (mode
) >= GET_MODE_BITSIZE (Pmode
)))
1297 struct expand_operand ops
[9];
1300 /* ??? When called via emit_block_move_for_call, it'd be
1301 nice if there were some way to inform the backend, so
1302 that it doesn't fail the expansion because it thinks
1303 emitting the libcall would be more efficient. */
1304 nops
= insn_data
[(int) code
].n_generator_args
;
1305 gcc_assert (nops
== 4 || nops
== 6 || nops
== 8 || nops
== 9);
1307 create_fixed_operand (&ops
[0], x
);
1308 create_fixed_operand (&ops
[1], y
);
1309 /* The check above guarantees that this size conversion is valid. */
1310 create_convert_operand_to (&ops
[2], size
, mode
, true);
1311 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
1314 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
1315 create_integer_operand (&ops
[5], expected_size
);
1319 create_integer_operand (&ops
[6], min_size
);
1320 /* If we can not represent the maximal size,
1321 make parameter NULL. */
1322 if ((HOST_WIDE_INT
) max_size
!= -1)
1323 create_integer_operand (&ops
[7], max_size
);
1325 create_fixed_operand (&ops
[7], NULL
);
1329 /* If we can not represent the maximal size,
1330 make parameter NULL. */
1331 if ((HOST_WIDE_INT
) probable_max_size
!= -1)
1332 create_integer_operand (&ops
[8], probable_max_size
);
1334 create_fixed_operand (&ops
[8], NULL
);
1336 if (maybe_expand_insn (code
, nops
, ops
))
1338 volatile_ok
= save_volatile_ok
;
1344 volatile_ok
= save_volatile_ok
;
1348 /* A subroutine of emit_block_move. Expand a call to memcpy.
1349 Return the return value from memcpy, 0 otherwise. */
1352 emit_block_move_via_libcall (rtx dst
, rtx src
, rtx size
, bool tailcall
)
1354 rtx dst_addr
, src_addr
;
1355 tree call_expr
, fn
, src_tree
, dst_tree
, size_tree
;
1356 enum machine_mode size_mode
;
1359 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1360 pseudos. We can then place those new pseudos into a VAR_DECL and
1363 dst_addr
= copy_addr_to_reg (XEXP (dst
, 0));
1364 src_addr
= copy_addr_to_reg (XEXP (src
, 0));
1366 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1367 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1369 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1370 src_tree
= make_tree (ptr_type_node
, src_addr
);
1372 size_mode
= TYPE_MODE (sizetype
);
1374 size
= convert_to_mode (size_mode
, size
, 1);
1375 size
= copy_to_mode_reg (size_mode
, size
);
1377 /* It is incorrect to use the libcall calling conventions to call
1378 memcpy in this context. This could be a user call to memcpy and
1379 the user may wish to examine the return value from memcpy. For
1380 targets where libcalls and normal calls have different conventions
1381 for returning pointers, we could end up generating incorrect code. */
1383 size_tree
= make_tree (sizetype
, size
);
1385 fn
= emit_block_move_libcall_fn (true);
1386 call_expr
= build_call_expr (fn
, 3, dst_tree
, src_tree
, size_tree
);
1387 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
1389 retval
= expand_normal (call_expr
);
1394 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1395 for the function we use for block copies. */
1397 static GTY(()) tree block_move_fn
;
1400 init_block_move_fn (const char *asmspec
)
1404 tree args
, fn
, attrs
, attr_args
;
1406 fn
= get_identifier ("memcpy");
1407 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
1408 const_ptr_type_node
, sizetype
,
1411 fn
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
, fn
, args
);
1412 DECL_EXTERNAL (fn
) = 1;
1413 TREE_PUBLIC (fn
) = 1;
1414 DECL_ARTIFICIAL (fn
) = 1;
1415 TREE_NOTHROW (fn
) = 1;
1416 DECL_VISIBILITY (fn
) = VISIBILITY_DEFAULT
;
1417 DECL_VISIBILITY_SPECIFIED (fn
) = 1;
1419 attr_args
= build_tree_list (NULL_TREE
, build_string (1, "1"));
1420 attrs
= tree_cons (get_identifier ("fn spec"), attr_args
, NULL
);
1422 decl_attributes (&fn
, attrs
, ATTR_FLAG_BUILT_IN
);
1428 set_user_assembler_name (block_move_fn
, asmspec
);
1432 emit_block_move_libcall_fn (int for_call
)
1434 static bool emitted_extern
;
1437 init_block_move_fn (NULL
);
1439 if (for_call
&& !emitted_extern
)
1441 emitted_extern
= true;
1442 make_decl_rtl (block_move_fn
);
1445 return block_move_fn
;
1448 /* A subroutine of emit_block_move. Copy the data via an explicit
1449 loop. This is used only when libcalls are forbidden. */
1450 /* ??? It'd be nice to copy in hunks larger than QImode. */
1453 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1454 unsigned int align ATTRIBUTE_UNUSED
)
1456 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
1457 enum machine_mode x_addr_mode
= get_address_mode (x
);
1458 enum machine_mode y_addr_mode
= get_address_mode (y
);
1459 enum machine_mode iter_mode
;
1461 iter_mode
= GET_MODE (size
);
1462 if (iter_mode
== VOIDmode
)
1463 iter_mode
= word_mode
;
1465 top_label
= gen_label_rtx ();
1466 cmp_label
= gen_label_rtx ();
1467 iter
= gen_reg_rtx (iter_mode
);
1469 emit_move_insn (iter
, const0_rtx
);
1471 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1472 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1473 do_pending_stack_adjust ();
1475 emit_jump (cmp_label
);
1476 emit_label (top_label
);
1478 tmp
= convert_modes (x_addr_mode
, iter_mode
, iter
, true);
1479 x_addr
= simplify_gen_binary (PLUS
, x_addr_mode
, x_addr
, tmp
);
1481 if (x_addr_mode
!= y_addr_mode
)
1482 tmp
= convert_modes (y_addr_mode
, iter_mode
, iter
, true);
1483 y_addr
= simplify_gen_binary (PLUS
, y_addr_mode
, y_addr
, tmp
);
1485 x
= change_address (x
, QImode
, x_addr
);
1486 y
= change_address (y
, QImode
, y_addr
);
1488 emit_move_insn (x
, y
);
1490 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1491 true, OPTAB_LIB_WIDEN
);
1493 emit_move_insn (iter
, tmp
);
1495 emit_label (cmp_label
);
1497 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1498 true, top_label
, REG_BR_PROB_BASE
* 90 / 100);
1501 /* Copy all or part of a value X into registers starting at REGNO.
1502 The number of registers to be filled is NREGS. */
1505 move_block_to_reg (int regno
, rtx x
, int nregs
, enum machine_mode mode
)
1508 #ifdef HAVE_load_multiple
1516 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
1517 x
= validize_mem (force_const_mem (mode
, x
));
1519 /* See if the machine can do this with a load multiple insn. */
1520 #ifdef HAVE_load_multiple
1521 if (HAVE_load_multiple
)
1523 last
= get_last_insn ();
1524 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1532 delete_insns_since (last
);
1536 for (i
= 0; i
< nregs
; i
++)
1537 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1538 operand_subword_force (x
, i
, mode
));
1541 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1542 The number of registers to be filled is NREGS. */
1545 move_block_from_reg (int regno
, rtx x
, int nregs
)
1552 /* See if the machine can do this with a store multiple insn. */
1553 #ifdef HAVE_store_multiple
1554 if (HAVE_store_multiple
)
1556 rtx last
= get_last_insn ();
1557 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1565 delete_insns_since (last
);
1569 for (i
= 0; i
< nregs
; i
++)
1571 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1575 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1579 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1580 ORIG, where ORIG is a non-consecutive group of registers represented by
1581 a PARALLEL. The clone is identical to the original except in that the
1582 original set of registers is replaced by a new set of pseudo registers.
1583 The new set has the same modes as the original set. */
1586 gen_group_rtx (rtx orig
)
1591 gcc_assert (GET_CODE (orig
) == PARALLEL
);
1593 length
= XVECLEN (orig
, 0);
1594 tmps
= XALLOCAVEC (rtx
, length
);
1596 /* Skip a NULL entry in first slot. */
1597 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
1602 for (; i
< length
; i
++)
1604 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
1605 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
1607 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
1610 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
1613 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1614 except that values are placed in TMPS[i], and must later be moved
1615 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1618 emit_group_load_1 (rtx
*tmps
, rtx dst
, rtx orig_src
, tree type
, int ssize
)
1622 enum machine_mode m
= GET_MODE (orig_src
);
1624 gcc_assert (GET_CODE (dst
) == PARALLEL
);
1627 && !SCALAR_INT_MODE_P (m
)
1628 && !MEM_P (orig_src
)
1629 && GET_CODE (orig_src
) != CONCAT
)
1631 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_src
));
1632 if (imode
== BLKmode
)
1633 src
= assign_stack_temp (GET_MODE (orig_src
), ssize
);
1635 src
= gen_reg_rtx (imode
);
1636 if (imode
!= BLKmode
)
1637 src
= gen_lowpart (GET_MODE (orig_src
), src
);
1638 emit_move_insn (src
, orig_src
);
1639 /* ...and back again. */
1640 if (imode
!= BLKmode
)
1641 src
= gen_lowpart (imode
, src
);
1642 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1646 /* Check for a NULL entry, used to indicate that the parameter goes
1647 both on the stack and in registers. */
1648 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1653 /* Process the pieces. */
1654 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1656 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1657 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1658 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1661 /* Handle trailing fragments that run over the size of the struct. */
1662 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1664 /* Arrange to shift the fragment to where it belongs.
1665 extract_bit_field loads to the lsb of the reg. */
1667 #ifdef BLOCK_REG_PADDING
1668 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
1669 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1674 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1675 bytelen
= ssize
- bytepos
;
1676 gcc_assert (bytelen
> 0);
1679 /* If we won't be loading directly from memory, protect the real source
1680 from strange tricks we might play; but make sure that the source can
1681 be loaded directly into the destination. */
1683 if (!MEM_P (orig_src
)
1684 && (!CONSTANT_P (orig_src
)
1685 || (GET_MODE (orig_src
) != mode
1686 && GET_MODE (orig_src
) != VOIDmode
)))
1688 if (GET_MODE (orig_src
) == VOIDmode
)
1689 src
= gen_reg_rtx (mode
);
1691 src
= gen_reg_rtx (GET_MODE (orig_src
));
1693 emit_move_insn (src
, orig_src
);
1696 /* Optimize the access just a bit. */
1698 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
1699 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
1700 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1701 && bytelen
== GET_MODE_SIZE (mode
))
1703 tmps
[i
] = gen_reg_rtx (mode
);
1704 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
1706 else if (COMPLEX_MODE_P (mode
)
1707 && GET_MODE (src
) == mode
1708 && bytelen
== GET_MODE_SIZE (mode
))
1709 /* Let emit_move_complex do the bulk of the work. */
1711 else if (GET_CODE (src
) == CONCAT
)
1713 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
1714 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
1716 if ((bytepos
== 0 && bytelen
== slen0
)
1717 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
1719 /* The following assumes that the concatenated objects all
1720 have the same size. In this case, a simple calculation
1721 can be used to determine the object and the bit field
1723 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
1724 if (! CONSTANT_P (tmps
[i
])
1725 && (!REG_P (tmps
[i
]) || GET_MODE (tmps
[i
]) != mode
))
1726 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
1727 (bytepos
% slen0
) * BITS_PER_UNIT
,
1728 1, NULL_RTX
, mode
, mode
);
1734 gcc_assert (!bytepos
);
1735 mem
= assign_stack_temp (GET_MODE (src
), slen
);
1736 emit_move_insn (mem
, src
);
1737 tmps
[i
] = extract_bit_field (mem
, bytelen
* BITS_PER_UNIT
,
1738 0, 1, NULL_RTX
, mode
, mode
);
1741 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1742 SIMD register, which is currently broken. While we get GCC
1743 to emit proper RTL for these cases, let's dump to memory. */
1744 else if (VECTOR_MODE_P (GET_MODE (dst
))
1747 int slen
= GET_MODE_SIZE (GET_MODE (src
));
1750 mem
= assign_stack_temp (GET_MODE (src
), slen
);
1751 emit_move_insn (mem
, src
);
1752 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
1754 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
1755 && XVECLEN (dst
, 0) > 1)
1756 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE (dst
), bytepos
);
1757 else if (CONSTANT_P (src
))
1759 HOST_WIDE_INT len
= (HOST_WIDE_INT
) bytelen
;
1767 /* TODO: const_wide_int can have sizes other than this... */
1768 gcc_assert (2 * len
== ssize
);
1769 split_double (src
, &first
, &second
);
1776 else if (REG_P (src
) && GET_MODE (src
) == mode
)
1779 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
1780 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
1784 tmps
[i
] = expand_shift (LSHIFT_EXPR
, mode
, tmps
[i
],
1789 /* Emit code to move a block SRC of type TYPE to a block DST,
1790 where DST is non-consecutive registers represented by a PARALLEL.
1791 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1795 emit_group_load (rtx dst
, rtx src
, tree type
, int ssize
)
1800 tmps
= XALLOCAVEC (rtx
, XVECLEN (dst
, 0));
1801 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1803 /* Copy the extracted pieces into the proper (probable) hard regs. */
1804 for (i
= 0; i
< XVECLEN (dst
, 0); i
++)
1806 rtx d
= XEXP (XVECEXP (dst
, 0, i
), 0);
1809 emit_move_insn (d
, tmps
[i
]);
1813 /* Similar, but load SRC into new pseudos in a format that looks like
1814 PARALLEL. This can later be fed to emit_group_move to get things
1815 in the right place. */
1818 emit_group_load_into_temps (rtx parallel
, rtx src
, tree type
, int ssize
)
1823 vec
= rtvec_alloc (XVECLEN (parallel
, 0));
1824 emit_group_load_1 (&RTVEC_ELT (vec
, 0), parallel
, src
, type
, ssize
);
1826 /* Convert the vector to look just like the original PARALLEL, except
1827 with the computed values. */
1828 for (i
= 0; i
< XVECLEN (parallel
, 0); i
++)
1830 rtx e
= XVECEXP (parallel
, 0, i
);
1831 rtx d
= XEXP (e
, 0);
1835 d
= force_reg (GET_MODE (d
), RTVEC_ELT (vec
, i
));
1836 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), d
, XEXP (e
, 1));
1838 RTVEC_ELT (vec
, i
) = e
;
1841 return gen_rtx_PARALLEL (GET_MODE (parallel
), vec
);
1844 /* Emit code to move a block SRC to block DST, where SRC and DST are
1845 non-consecutive groups of registers, each represented by a PARALLEL. */
1848 emit_group_move (rtx dst
, rtx src
)
1852 gcc_assert (GET_CODE (src
) == PARALLEL
1853 && GET_CODE (dst
) == PARALLEL
1854 && XVECLEN (src
, 0) == XVECLEN (dst
, 0));
1856 /* Skip first entry if NULL. */
1857 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
1858 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
1859 XEXP (XVECEXP (src
, 0, i
), 0));
1862 /* Move a group of registers represented by a PARALLEL into pseudos. */
1865 emit_group_move_into_temps (rtx src
)
1867 rtvec vec
= rtvec_alloc (XVECLEN (src
, 0));
1870 for (i
= 0; i
< XVECLEN (src
, 0); i
++)
1872 rtx e
= XVECEXP (src
, 0, i
);
1873 rtx d
= XEXP (e
, 0);
1876 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), copy_to_reg (d
), XEXP (e
, 1));
1877 RTVEC_ELT (vec
, i
) = e
;
1880 return gen_rtx_PARALLEL (GET_MODE (src
), vec
);
1883 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1884 where SRC is non-consecutive registers represented by a PARALLEL.
1885 SSIZE represents the total size of block ORIG_DST, or -1 if not
1889 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1892 int start
, finish
, i
;
1893 enum machine_mode m
= GET_MODE (orig_dst
);
1895 gcc_assert (GET_CODE (src
) == PARALLEL
);
1897 if (!SCALAR_INT_MODE_P (m
)
1898 && !MEM_P (orig_dst
) && GET_CODE (orig_dst
) != CONCAT
)
1900 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_dst
));
1901 if (imode
== BLKmode
)
1902 dst
= assign_stack_temp (GET_MODE (orig_dst
), ssize
);
1904 dst
= gen_reg_rtx (imode
);
1905 emit_group_store (dst
, src
, type
, ssize
);
1906 if (imode
!= BLKmode
)
1907 dst
= gen_lowpart (GET_MODE (orig_dst
), dst
);
1908 emit_move_insn (orig_dst
, dst
);
1912 /* Check for a NULL entry, used to indicate that the parameter goes
1913 both on the stack and in registers. */
1914 if (XEXP (XVECEXP (src
, 0, 0), 0))
1918 finish
= XVECLEN (src
, 0);
1920 tmps
= XALLOCAVEC (rtx
, finish
);
1922 /* Copy the (probable) hard regs into pseudos. */
1923 for (i
= start
; i
< finish
; i
++)
1925 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
1926 if (!REG_P (reg
) || REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
1928 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
1929 emit_move_insn (tmps
[i
], reg
);
1935 /* If we won't be storing directly into memory, protect the real destination
1936 from strange tricks we might play. */
1938 if (GET_CODE (dst
) == PARALLEL
)
1942 /* We can get a PARALLEL dst if there is a conditional expression in
1943 a return statement. In that case, the dst and src are the same,
1944 so no action is necessary. */
1945 if (rtx_equal_p (dst
, src
))
1948 /* It is unclear if we can ever reach here, but we may as well handle
1949 it. Allocate a temporary, and split this into a store/load to/from
1951 temp
= assign_stack_temp (GET_MODE (dst
), ssize
);
1952 emit_group_store (temp
, src
, type
, ssize
);
1953 emit_group_load (dst
, temp
, type
, ssize
);
1956 else if (!MEM_P (dst
) && GET_CODE (dst
) != CONCAT
)
1958 enum machine_mode outer
= GET_MODE (dst
);
1959 enum machine_mode inner
;
1960 HOST_WIDE_INT bytepos
;
1964 if (!REG_P (dst
) || REGNO (dst
) < FIRST_PSEUDO_REGISTER
)
1965 dst
= gen_reg_rtx (outer
);
1967 /* Make life a bit easier for combine. */
1968 /* If the first element of the vector is the low part
1969 of the destination mode, use a paradoxical subreg to
1970 initialize the destination. */
1973 inner
= GET_MODE (tmps
[start
]);
1974 bytepos
= subreg_lowpart_offset (inner
, outer
);
1975 if (INTVAL (XEXP (XVECEXP (src
, 0, start
), 1)) == bytepos
)
1977 temp
= simplify_gen_subreg (outer
, tmps
[start
],
1981 emit_move_insn (dst
, temp
);
1988 /* If the first element wasn't the low part, try the last. */
1990 && start
< finish
- 1)
1992 inner
= GET_MODE (tmps
[finish
- 1]);
1993 bytepos
= subreg_lowpart_offset (inner
, outer
);
1994 if (INTVAL (XEXP (XVECEXP (src
, 0, finish
- 1), 1)) == bytepos
)
1996 temp
= simplify_gen_subreg (outer
, tmps
[finish
- 1],
2000 emit_move_insn (dst
, temp
);
2007 /* Otherwise, simply initialize the result to zero. */
2009 emit_move_insn (dst
, CONST0_RTX (outer
));
2012 /* Process the pieces. */
2013 for (i
= start
; i
< finish
; i
++)
2015 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2016 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2017 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2018 unsigned int adj_bytelen
;
2021 /* Handle trailing fragments that run over the size of the struct. */
2022 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2023 adj_bytelen
= ssize
- bytepos
;
2025 adj_bytelen
= bytelen
;
2027 if (GET_CODE (dst
) == CONCAT
)
2029 if (bytepos
+ adj_bytelen
2030 <= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2031 dest
= XEXP (dst
, 0);
2032 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2034 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2035 dest
= XEXP (dst
, 1);
2039 enum machine_mode dest_mode
= GET_MODE (dest
);
2040 enum machine_mode tmp_mode
= GET_MODE (tmps
[i
]);
2042 gcc_assert (bytepos
== 0 && XVECLEN (src
, 0));
2044 if (GET_MODE_ALIGNMENT (dest_mode
)
2045 >= GET_MODE_ALIGNMENT (tmp_mode
))
2047 dest
= assign_stack_temp (dest_mode
,
2048 GET_MODE_SIZE (dest_mode
));
2049 emit_move_insn (adjust_address (dest
,
2057 dest
= assign_stack_temp (tmp_mode
,
2058 GET_MODE_SIZE (tmp_mode
));
2059 emit_move_insn (dest
, tmps
[i
]);
2060 dst
= adjust_address (dest
, dest_mode
, bytepos
);
2066 /* Handle trailing fragments that run over the size of the struct. */
2067 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2069 /* store_bit_field always takes its value from the lsb.
2070 Move the fragment to the lsb if it's not already there. */
2072 #ifdef BLOCK_REG_PADDING
2073 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
2074 == (BYTES_BIG_ENDIAN
? upward
: downward
)
2080 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2081 tmps
[i
] = expand_shift (RSHIFT_EXPR
, mode
, tmps
[i
],
2085 /* Make sure not to write past the end of the struct. */
2086 store_bit_field (dest
,
2087 adj_bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2088 bytepos
* BITS_PER_UNIT
, ssize
* BITS_PER_UNIT
- 1,
2092 /* Optimize the access just a bit. */
2093 else if (MEM_P (dest
)
2094 && (!SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
2095 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2096 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2097 && bytelen
== GET_MODE_SIZE (mode
))
2098 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2101 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2102 0, 0, mode
, tmps
[i
]);
2105 /* Copy from the pseudo into the (probable) hard reg. */
2106 if (orig_dst
!= dst
)
2107 emit_move_insn (orig_dst
, dst
);
2110 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2111 of the value stored in X. */
2114 maybe_emit_group_store (rtx x
, tree type
)
2116 enum machine_mode mode
= TYPE_MODE (type
);
2117 gcc_checking_assert (GET_MODE (x
) == VOIDmode
|| GET_MODE (x
) == mode
);
2118 if (GET_CODE (x
) == PARALLEL
)
2120 rtx result
= gen_reg_rtx (mode
);
2121 emit_group_store (result
, x
, type
, int_size_in_bytes (type
));
2127 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2129 This is used on targets that return BLKmode values in registers. */
2132 copy_blkmode_from_reg (rtx target
, rtx srcreg
, tree type
)
2134 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2135 rtx src
= NULL
, dst
= NULL
;
2136 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2137 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2138 enum machine_mode mode
= GET_MODE (srcreg
);
2139 enum machine_mode tmode
= GET_MODE (target
);
2140 enum machine_mode copy_mode
;
2142 /* BLKmode registers created in the back-end shouldn't have survived. */
2143 gcc_assert (mode
!= BLKmode
);
2145 /* If the structure doesn't take up a whole number of words, see whether
2146 SRCREG is padded on the left or on the right. If it's on the left,
2147 set PADDING_CORRECTION to the number of bits to skip.
2149 In most ABIs, the structure will be returned at the least end of
2150 the register, which translates to right padding on little-endian
2151 targets and left padding on big-endian targets. The opposite
2152 holds if the structure is returned at the most significant
2153 end of the register. */
2154 if (bytes
% UNITS_PER_WORD
!= 0
2155 && (targetm
.calls
.return_in_msb (type
)
2157 : BYTES_BIG_ENDIAN
))
2159 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2161 /* We can use a single move if we have an exact mode for the size. */
2162 else if (MEM_P (target
)
2163 && (!SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
))
2164 || MEM_ALIGN (target
) >= GET_MODE_ALIGNMENT (mode
))
2165 && bytes
== GET_MODE_SIZE (mode
))
2167 emit_move_insn (adjust_address (target
, mode
, 0), srcreg
);
2171 /* And if we additionally have the same mode for a register. */
2172 else if (REG_P (target
)
2173 && GET_MODE (target
) == mode
2174 && bytes
== GET_MODE_SIZE (mode
))
2176 emit_move_insn (target
, srcreg
);
2180 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2181 into a new pseudo which is a full word. */
2182 if (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
)
2184 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
2188 /* Copy the structure BITSIZE bits at a time. If the target lives in
2189 memory, take care of not reading/writing past its end by selecting
2190 a copy mode suited to BITSIZE. This should always be possible given
2193 If the target lives in register, make sure not to select a copy mode
2194 larger than the mode of the register.
2196 We could probably emit more efficient code for machines which do not use
2197 strict alignment, but it doesn't seem worth the effort at the current
2200 copy_mode
= word_mode
;
2203 enum machine_mode mem_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
2204 if (mem_mode
!= BLKmode
)
2205 copy_mode
= mem_mode
;
2207 else if (REG_P (target
) && GET_MODE_BITSIZE (tmode
) < BITS_PER_WORD
)
2210 for (bitpos
= 0, xbitpos
= padding_correction
;
2211 bitpos
< bytes
* BITS_PER_UNIT
;
2212 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2214 /* We need a new source operand each time xbitpos is on a
2215 word boundary and when xbitpos == padding_correction
2216 (the first time through). */
2217 if (xbitpos
% BITS_PER_WORD
== 0 || xbitpos
== padding_correction
)
2218 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
, mode
);
2220 /* We need a new destination operand each time bitpos is on
2222 if (REG_P (target
) && GET_MODE_BITSIZE (tmode
) < BITS_PER_WORD
)
2224 else if (bitpos
% BITS_PER_WORD
== 0)
2225 dst
= operand_subword (target
, bitpos
/ BITS_PER_WORD
, 1, tmode
);
2227 /* Use xbitpos for the source extraction (right justified) and
2228 bitpos for the destination store (left justified). */
2229 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, 0, 0, copy_mode
,
2230 extract_bit_field (src
, bitsize
,
2231 xbitpos
% BITS_PER_WORD
, 1,
2232 NULL_RTX
, copy_mode
, copy_mode
));
2236 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2237 register if it contains any data, otherwise return null.
2239 This is used on targets that return BLKmode values in registers. */
2242 copy_blkmode_to_reg (enum machine_mode mode
, tree src
)
2245 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0, bytes
;
2246 unsigned int bitsize
;
2247 rtx
*dst_words
, dst
, x
, src_word
= NULL_RTX
, dst_word
= NULL_RTX
;
2248 enum machine_mode dst_mode
;
2250 gcc_assert (TYPE_MODE (TREE_TYPE (src
)) == BLKmode
);
2252 x
= expand_normal (src
);
2254 bytes
= int_size_in_bytes (TREE_TYPE (src
));
2258 /* If the structure doesn't take up a whole number of words, see
2259 whether the register value should be padded on the left or on
2260 the right. Set PADDING_CORRECTION to the number of padding
2261 bits needed on the left side.
2263 In most ABIs, the structure will be returned at the least end of
2264 the register, which translates to right padding on little-endian
2265 targets and left padding on big-endian targets. The opposite
2266 holds if the structure is returned at the most significant
2267 end of the register. */
2268 if (bytes
% UNITS_PER_WORD
!= 0
2269 && (targetm
.calls
.return_in_msb (TREE_TYPE (src
))
2271 : BYTES_BIG_ENDIAN
))
2272 padding_correction
= (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
)
2275 n_regs
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
2276 dst_words
= XALLOCAVEC (rtx
, n_regs
);
2277 bitsize
= MIN (TYPE_ALIGN (TREE_TYPE (src
)), BITS_PER_WORD
);
2279 /* Copy the structure BITSIZE bits at a time. */
2280 for (bitpos
= 0, xbitpos
= padding_correction
;
2281 bitpos
< bytes
* BITS_PER_UNIT
;
2282 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2284 /* We need a new destination pseudo each time xbitpos is
2285 on a word boundary and when xbitpos == padding_correction
2286 (the first time through). */
2287 if (xbitpos
% BITS_PER_WORD
== 0
2288 || xbitpos
== padding_correction
)
2290 /* Generate an appropriate register. */
2291 dst_word
= gen_reg_rtx (word_mode
);
2292 dst_words
[xbitpos
/ BITS_PER_WORD
] = dst_word
;
2294 /* Clear the destination before we move anything into it. */
2295 emit_move_insn (dst_word
, CONST0_RTX (word_mode
));
2298 /* We need a new source operand each time bitpos is on a word
2300 if (bitpos
% BITS_PER_WORD
== 0)
2301 src_word
= operand_subword_force (x
, bitpos
/ BITS_PER_WORD
, BLKmode
);
2303 /* Use bitpos for the source extraction (left justified) and
2304 xbitpos for the destination store (right justified). */
2305 store_bit_field (dst_word
, bitsize
, xbitpos
% BITS_PER_WORD
,
2307 extract_bit_field (src_word
, bitsize
,
2308 bitpos
% BITS_PER_WORD
, 1,
2309 NULL_RTX
, word_mode
, word_mode
));
2312 if (mode
== BLKmode
)
2314 /* Find the smallest integer mode large enough to hold the
2315 entire structure. */
2316 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2318 mode
= GET_MODE_WIDER_MODE (mode
))
2319 /* Have we found a large enough mode? */
2320 if (GET_MODE_SIZE (mode
) >= bytes
)
2323 /* A suitable mode should have been found. */
2324 gcc_assert (mode
!= VOIDmode
);
2327 if (GET_MODE_SIZE (mode
) < GET_MODE_SIZE (word_mode
))
2328 dst_mode
= word_mode
;
2331 dst
= gen_reg_rtx (dst_mode
);
2333 for (i
= 0; i
< n_regs
; i
++)
2334 emit_move_insn (operand_subword (dst
, i
, 0, dst_mode
), dst_words
[i
]);
2336 if (mode
!= dst_mode
)
2337 dst
= gen_lowpart (mode
, dst
);
2342 /* Add a USE expression for REG to the (possibly empty) list pointed
2343 to by CALL_FUSAGE. REG must denote a hard register. */
2346 use_reg_mode (rtx
*call_fusage
, rtx reg
, enum machine_mode mode
)
2348 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2351 = gen_rtx_EXPR_LIST (mode
, gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2354 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2355 to by CALL_FUSAGE. REG must denote a hard register. */
2358 clobber_reg_mode (rtx
*call_fusage
, rtx reg
, enum machine_mode mode
)
2360 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2363 = gen_rtx_EXPR_LIST (mode
, gen_rtx_CLOBBER (VOIDmode
, reg
), *call_fusage
);
2366 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2367 starting at REGNO. All of these registers must be hard registers. */
2370 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2374 gcc_assert (regno
+ nregs
<= FIRST_PSEUDO_REGISTER
);
2376 for (i
= 0; i
< nregs
; i
++)
2377 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2380 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2381 PARALLEL REGS. This is for calls that pass values in multiple
2382 non-contiguous locations. The Irix 6 ABI has examples of this. */
2385 use_group_regs (rtx
*call_fusage
, rtx regs
)
2389 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2391 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2393 /* A NULL entry means the parameter goes both on the stack and in
2394 registers. This can also be a MEM for targets that pass values
2395 partially on the stack and partially in registers. */
2396 if (reg
!= 0 && REG_P (reg
))
2397 use_reg (call_fusage
, reg
);
2401 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2402 assigment and the code of the expresion on the RHS is CODE. Return
2406 get_def_for_expr (tree name
, enum tree_code code
)
2410 if (TREE_CODE (name
) != SSA_NAME
)
2413 def_stmt
= get_gimple_for_ssa_name (name
);
2415 || gimple_assign_rhs_code (def_stmt
) != code
)
2421 #ifdef HAVE_conditional_move
2422 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2423 assigment and the class of the expresion on the RHS is CLASS. Return
2427 get_def_for_expr_class (tree name
, enum tree_code_class tclass
)
2431 if (TREE_CODE (name
) != SSA_NAME
)
2434 def_stmt
= get_gimple_for_ssa_name (name
);
2436 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) != tclass
)
2444 /* Determine whether the LEN bytes generated by CONSTFUN can be
2445 stored to memory using several move instructions. CONSTFUNDATA is
2446 a pointer which will be passed as argument in every CONSTFUN call.
2447 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2448 a memset operation and false if it's a copy of a constant string.
2449 Return nonzero if a call to store_by_pieces should succeed. */
2452 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
2453 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2454 void *constfundata
, unsigned int align
, bool memsetp
)
2456 unsigned HOST_WIDE_INT l
;
2457 unsigned int max_size
;
2458 HOST_WIDE_INT offset
= 0;
2459 enum machine_mode mode
;
2460 enum insn_code icode
;
2462 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2463 rtx cst ATTRIBUTE_UNUSED
;
2469 ? SET_BY_PIECES_P (len
, align
)
2470 : STORE_BY_PIECES_P (len
, align
)))
2473 align
= alignment_for_piecewise_move (STORE_MAX_PIECES
, align
);
2475 /* We would first store what we can in the largest integer mode, then go to
2476 successively smaller modes. */
2479 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2483 max_size
= STORE_MAX_PIECES
+ 1;
2484 while (max_size
> 1 && l
> 0)
2486 mode
= widest_int_mode_for_size (max_size
);
2488 if (mode
== VOIDmode
)
2491 icode
= optab_handler (mov_optab
, mode
);
2492 if (icode
!= CODE_FOR_nothing
2493 && align
>= GET_MODE_ALIGNMENT (mode
))
2495 unsigned int size
= GET_MODE_SIZE (mode
);
2502 cst
= (*constfun
) (constfundata
, offset
, mode
);
2503 if (!targetm
.legitimate_constant_p (mode
, cst
))
2513 max_size
= GET_MODE_SIZE (mode
);
2516 /* The code above should have handled everything. */
2523 /* Generate several move instructions to store LEN bytes generated by
2524 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2525 pointer which will be passed as argument in every CONSTFUN call.
2526 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2527 a memset operation and false if it's a copy of a constant string.
2528 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2529 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2533 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
2534 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2535 void *constfundata
, unsigned int align
, bool memsetp
, int endp
)
2537 enum machine_mode to_addr_mode
= get_address_mode (to
);
2538 struct store_by_pieces_d data
;
2542 gcc_assert (endp
!= 2);
2547 ? SET_BY_PIECES_P (len
, align
)
2548 : STORE_BY_PIECES_P (len
, align
));
2549 data
.constfun
= constfun
;
2550 data
.constfundata
= constfundata
;
2553 store_by_pieces_1 (&data
, align
);
2558 gcc_assert (!data
.reverse
);
2563 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2564 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2566 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
2567 plus_constant (to_addr_mode
,
2571 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2578 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2586 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2587 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2590 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
2592 struct store_by_pieces_d data
;
2597 data
.constfun
= clear_by_pieces_1
;
2598 data
.constfundata
= NULL
;
2601 store_by_pieces_1 (&data
, align
);
2604 /* Callback routine for clear_by_pieces.
2605 Return const0_rtx unconditionally. */
2608 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED
,
2609 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2610 enum machine_mode mode ATTRIBUTE_UNUSED
)
2615 /* Subroutine of clear_by_pieces and store_by_pieces.
2616 Generate several move instructions to store LEN bytes of block TO. (A MEM
2617 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2620 store_by_pieces_1 (struct store_by_pieces_d
*data ATTRIBUTE_UNUSED
,
2621 unsigned int align ATTRIBUTE_UNUSED
)
2623 enum machine_mode to_addr_mode
= get_address_mode (data
->to
);
2624 rtx to_addr
= XEXP (data
->to
, 0);
2625 unsigned int max_size
= STORE_MAX_PIECES
+ 1;
2626 enum insn_code icode
;
2629 data
->to_addr
= to_addr
;
2631 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2632 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2634 data
->explicit_inc_to
= 0;
2636 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2638 data
->offset
= data
->len
;
2640 /* If storing requires more than two move insns,
2641 copy addresses to registers (to make displacements shorter)
2642 and use post-increment if available. */
2643 if (!data
->autinc_to
2644 && move_by_pieces_ninsns (data
->len
, align
, max_size
) > 2)
2646 /* Determine the main mode we'll be using.
2647 MODE might not be used depending on the definitions of the
2648 USE_* macros below. */
2649 enum machine_mode mode ATTRIBUTE_UNUSED
2650 = widest_int_mode_for_size (max_size
);
2652 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2654 data
->to_addr
= copy_to_mode_reg (to_addr_mode
,
2655 plus_constant (to_addr_mode
,
2658 data
->autinc_to
= 1;
2659 data
->explicit_inc_to
= -1;
2662 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2663 && ! data
->autinc_to
)
2665 data
->to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
2666 data
->autinc_to
= 1;
2667 data
->explicit_inc_to
= 1;
2670 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2671 data
->to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
2674 align
= alignment_for_piecewise_move (STORE_MAX_PIECES
, align
);
2676 /* First store what we can in the largest integer mode, then go to
2677 successively smaller modes. */
2679 while (max_size
> 1 && data
->len
> 0)
2681 enum machine_mode mode
= widest_int_mode_for_size (max_size
);
2683 if (mode
== VOIDmode
)
2686 icode
= optab_handler (mov_optab
, mode
);
2687 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2688 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2690 max_size
= GET_MODE_SIZE (mode
);
2693 /* The code above should have handled everything. */
2694 gcc_assert (!data
->len
);
2697 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2698 with move instructions for mode MODE. GENFUN is the gen_... function
2699 to make a move insn for that mode. DATA has all the other info. */
2702 store_by_pieces_2 (insn_gen_fn genfun
, machine_mode mode
,
2703 struct store_by_pieces_d
*data
)
2705 unsigned int size
= GET_MODE_SIZE (mode
);
2708 while (data
->len
>= size
)
2711 data
->offset
-= size
;
2713 if (data
->autinc_to
)
2714 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2717 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2719 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2720 emit_insn (gen_add2_insn (data
->to_addr
,
2721 gen_int_mode (-(HOST_WIDE_INT
) size
,
2722 GET_MODE (data
->to_addr
))));
2724 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2725 emit_insn ((*genfun
) (to1
, cst
));
2727 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2728 emit_insn (gen_add2_insn (data
->to_addr
,
2730 GET_MODE (data
->to_addr
))));
2732 if (! data
->reverse
)
2733 data
->offset
+= size
;
2739 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2740 its length in bytes. */
2743 clear_storage_hints (rtx object
, rtx size
, enum block_op_methods method
,
2744 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
2745 unsigned HOST_WIDE_INT min_size
,
2746 unsigned HOST_WIDE_INT max_size
,
2747 unsigned HOST_WIDE_INT probable_max_size
)
2749 enum machine_mode mode
= GET_MODE (object
);
2752 gcc_assert (method
== BLOCK_OP_NORMAL
|| method
== BLOCK_OP_TAILCALL
);
2754 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2755 just move a zero. Otherwise, do this a piece at a time. */
2757 && CONST_INT_P (size
)
2758 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (mode
))
2760 rtx zero
= CONST0_RTX (mode
);
2763 emit_move_insn (object
, zero
);
2767 if (COMPLEX_MODE_P (mode
))
2769 zero
= CONST0_RTX (GET_MODE_INNER (mode
));
2772 write_complex_part (object
, zero
, 0);
2773 write_complex_part (object
, zero
, 1);
2779 if (size
== const0_rtx
)
2782 align
= MEM_ALIGN (object
);
2784 if (CONST_INT_P (size
)
2785 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2786 clear_by_pieces (object
, INTVAL (size
), align
);
2787 else if (set_storage_via_setmem (object
, size
, const0_rtx
, align
,
2788 expected_align
, expected_size
,
2789 min_size
, max_size
, probable_max_size
))
2791 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object
)))
2792 return set_storage_via_libcall (object
, size
, const0_rtx
,
2793 method
== BLOCK_OP_TAILCALL
);
2801 clear_storage (rtx object
, rtx size
, enum block_op_methods method
)
2803 unsigned HOST_WIDE_INT max
, min
= 0;
2804 if (GET_CODE (size
) == CONST_INT
)
2805 min
= max
= UINTVAL (size
);
2807 max
= GET_MODE_MASK (GET_MODE (size
));
2808 return clear_storage_hints (object
, size
, method
, 0, -1, min
, max
, max
);
2812 /* A subroutine of clear_storage. Expand a call to memset.
2813 Return the return value of memset, 0 otherwise. */
2816 set_storage_via_libcall (rtx object
, rtx size
, rtx val
, bool tailcall
)
2818 tree call_expr
, fn
, object_tree
, size_tree
, val_tree
;
2819 enum machine_mode size_mode
;
2822 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2823 place those into new pseudos into a VAR_DECL and use them later. */
2825 object
= copy_addr_to_reg (XEXP (object
, 0));
2827 size_mode
= TYPE_MODE (sizetype
);
2828 size
= convert_to_mode (size_mode
, size
, 1);
2829 size
= copy_to_mode_reg (size_mode
, size
);
2831 /* It is incorrect to use the libcall calling conventions to call
2832 memset in this context. This could be a user call to memset and
2833 the user may wish to examine the return value from memset. For
2834 targets where libcalls and normal calls have different conventions
2835 for returning pointers, we could end up generating incorrect code. */
2837 object_tree
= make_tree (ptr_type_node
, object
);
2838 if (!CONST_INT_P (val
))
2839 val
= convert_to_mode (TYPE_MODE (integer_type_node
), val
, 1);
2840 size_tree
= make_tree (sizetype
, size
);
2841 val_tree
= make_tree (integer_type_node
, val
);
2843 fn
= clear_storage_libcall_fn (true);
2844 call_expr
= build_call_expr (fn
, 3, object_tree
, val_tree
, size_tree
);
2845 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
2847 retval
= expand_normal (call_expr
);
2852 /* A subroutine of set_storage_via_libcall. Create the tree node
2853 for the function we use for block clears. */
2855 tree block_clear_fn
;
2858 init_block_clear_fn (const char *asmspec
)
2860 if (!block_clear_fn
)
2864 fn
= get_identifier ("memset");
2865 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2866 integer_type_node
, sizetype
,
2869 fn
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
, fn
, args
);
2870 DECL_EXTERNAL (fn
) = 1;
2871 TREE_PUBLIC (fn
) = 1;
2872 DECL_ARTIFICIAL (fn
) = 1;
2873 TREE_NOTHROW (fn
) = 1;
2874 DECL_VISIBILITY (fn
) = VISIBILITY_DEFAULT
;
2875 DECL_VISIBILITY_SPECIFIED (fn
) = 1;
2877 block_clear_fn
= fn
;
2881 set_user_assembler_name (block_clear_fn
, asmspec
);
2885 clear_storage_libcall_fn (int for_call
)
2887 static bool emitted_extern
;
2889 if (!block_clear_fn
)
2890 init_block_clear_fn (NULL
);
2892 if (for_call
&& !emitted_extern
)
2894 emitted_extern
= true;
2895 make_decl_rtl (block_clear_fn
);
2898 return block_clear_fn
;
2901 /* Expand a setmem pattern; return true if successful. */
2904 set_storage_via_setmem (rtx object
, rtx size
, rtx val
, unsigned int align
,
2905 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
2906 unsigned HOST_WIDE_INT min_size
,
2907 unsigned HOST_WIDE_INT max_size
,
2908 unsigned HOST_WIDE_INT probable_max_size
)
2910 /* Try the most limited insn first, because there's no point
2911 including more than one in the machine description unless
2912 the more limited one has some advantage. */
2914 enum machine_mode mode
;
2916 if (expected_align
< align
)
2917 expected_align
= align
;
2918 if (expected_size
!= -1)
2920 if ((unsigned HOST_WIDE_INT
)expected_size
> max_size
)
2921 expected_size
= max_size
;
2922 if ((unsigned HOST_WIDE_INT
)expected_size
< min_size
)
2923 expected_size
= min_size
;
2926 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2927 mode
= GET_MODE_WIDER_MODE (mode
))
2929 enum insn_code code
= direct_optab_handler (setmem_optab
, mode
);
2931 if (code
!= CODE_FOR_nothing
2932 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2933 here because if SIZE is less than the mode mask, as it is
2934 returned by the macro, it will definitely be less than the
2935 actual mode mask. Since SIZE is within the Pmode address
2936 space, we limit MODE to Pmode. */
2937 && ((CONST_INT_P (size
)
2938 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2939 <= (GET_MODE_MASK (mode
) >> 1)))
2940 || max_size
<= (GET_MODE_MASK (mode
) >> 1)
2941 || GET_MODE_BITSIZE (mode
) >= GET_MODE_BITSIZE (Pmode
)))
2943 struct expand_operand ops
[9];
2946 nops
= insn_data
[(int) code
].n_generator_args
;
2947 gcc_assert (nops
== 4 || nops
== 6 || nops
== 8 || nops
== 9);
2949 create_fixed_operand (&ops
[0], object
);
2950 /* The check above guarantees that this size conversion is valid. */
2951 create_convert_operand_to (&ops
[1], size
, mode
, true);
2952 create_convert_operand_from (&ops
[2], val
, byte_mode
, true);
2953 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
2956 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
2957 create_integer_operand (&ops
[5], expected_size
);
2961 create_integer_operand (&ops
[6], min_size
);
2962 /* If we can not represent the maximal size,
2963 make parameter NULL. */
2964 if ((HOST_WIDE_INT
) max_size
!= -1)
2965 create_integer_operand (&ops
[7], max_size
);
2967 create_fixed_operand (&ops
[7], NULL
);
2971 /* If we can not represent the maximal size,
2972 make parameter NULL. */
2973 if ((HOST_WIDE_INT
) probable_max_size
!= -1)
2974 create_integer_operand (&ops
[8], probable_max_size
);
2976 create_fixed_operand (&ops
[8], NULL
);
2978 if (maybe_expand_insn (code
, nops
, ops
))
2987 /* Write to one of the components of the complex value CPLX. Write VAL to
2988 the real part if IMAG_P is false, and the imaginary part if its true. */
2991 write_complex_part (rtx cplx
, rtx val
, bool imag_p
)
2993 enum machine_mode cmode
;
2994 enum machine_mode imode
;
2997 if (GET_CODE (cplx
) == CONCAT
)
2999 emit_move_insn (XEXP (cplx
, imag_p
), val
);
3003 cmode
= GET_MODE (cplx
);
3004 imode
= GET_MODE_INNER (cmode
);
3005 ibitsize
= GET_MODE_BITSIZE (imode
);
3007 /* For MEMs simplify_gen_subreg may generate an invalid new address
3008 because, e.g., the original address is considered mode-dependent
3009 by the target, which restricts simplify_subreg from invoking
3010 adjust_address_nv. Instead of preparing fallback support for an
3011 invalid address, we call adjust_address_nv directly. */
3014 emit_move_insn (adjust_address_nv (cplx
, imode
,
3015 imag_p
? GET_MODE_SIZE (imode
) : 0),
3020 /* If the sub-object is at least word sized, then we know that subregging
3021 will work. This special case is important, since store_bit_field
3022 wants to operate on integer modes, and there's rarely an OImode to
3023 correspond to TCmode. */
3024 if (ibitsize
>= BITS_PER_WORD
3025 /* For hard regs we have exact predicates. Assume we can split
3026 the original object if it spans an even number of hard regs.
3027 This special case is important for SCmode on 64-bit platforms
3028 where the natural size of floating-point regs is 32-bit. */
3030 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
3031 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0))
3033 rtx part
= simplify_gen_subreg (imode
, cplx
, cmode
,
3034 imag_p
? GET_MODE_SIZE (imode
) : 0);
3037 emit_move_insn (part
, val
);
3041 /* simplify_gen_subreg may fail for sub-word MEMs. */
3042 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
3045 store_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0, 0, 0, imode
, val
);
3048 /* Extract one of the components of the complex value CPLX. Extract the
3049 real part if IMAG_P is false, and the imaginary part if it's true. */
3052 read_complex_part (rtx cplx
, bool imag_p
)
3054 enum machine_mode cmode
, imode
;
3057 if (GET_CODE (cplx
) == CONCAT
)
3058 return XEXP (cplx
, imag_p
);
3060 cmode
= GET_MODE (cplx
);
3061 imode
= GET_MODE_INNER (cmode
);
3062 ibitsize
= GET_MODE_BITSIZE (imode
);
3064 /* Special case reads from complex constants that got spilled to memory. */
3065 if (MEM_P (cplx
) && GET_CODE (XEXP (cplx
, 0)) == SYMBOL_REF
)
3067 tree decl
= SYMBOL_REF_DECL (XEXP (cplx
, 0));
3068 if (decl
&& TREE_CODE (decl
) == COMPLEX_CST
)
3070 tree part
= imag_p
? TREE_IMAGPART (decl
) : TREE_REALPART (decl
);
3071 if (CONSTANT_CLASS_P (part
))
3072 return expand_expr (part
, NULL_RTX
, imode
, EXPAND_NORMAL
);
3076 /* For MEMs simplify_gen_subreg may generate an invalid new address
3077 because, e.g., the original address is considered mode-dependent
3078 by the target, which restricts simplify_subreg from invoking
3079 adjust_address_nv. Instead of preparing fallback support for an
3080 invalid address, we call adjust_address_nv directly. */
3082 return adjust_address_nv (cplx
, imode
,
3083 imag_p
? GET_MODE_SIZE (imode
) : 0);
3085 /* If the sub-object is at least word sized, then we know that subregging
3086 will work. This special case is important, since extract_bit_field
3087 wants to operate on integer modes, and there's rarely an OImode to
3088 correspond to TCmode. */
3089 if (ibitsize
>= BITS_PER_WORD
3090 /* For hard regs we have exact predicates. Assume we can split
3091 the original object if it spans an even number of hard regs.
3092 This special case is important for SCmode on 64-bit platforms
3093 where the natural size of floating-point regs is 32-bit. */
3095 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
3096 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0))
3098 rtx ret
= simplify_gen_subreg (imode
, cplx
, cmode
,
3099 imag_p
? GET_MODE_SIZE (imode
) : 0);
3103 /* simplify_gen_subreg may fail for sub-word MEMs. */
3104 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
3107 return extract_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0,
3108 true, NULL_RTX
, imode
, imode
);
3111 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3112 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3113 represented in NEW_MODE. If FORCE is true, this will never happen, as
3114 we'll force-create a SUBREG if needed. */
3117 emit_move_change_mode (enum machine_mode new_mode
,
3118 enum machine_mode old_mode
, rtx x
, bool force
)
3122 if (push_operand (x
, GET_MODE (x
)))
3124 ret
= gen_rtx_MEM (new_mode
, XEXP (x
, 0));
3125 MEM_COPY_ATTRIBUTES (ret
, x
);
3129 /* We don't have to worry about changing the address since the
3130 size in bytes is supposed to be the same. */
3131 if (reload_in_progress
)
3133 /* Copy the MEM to change the mode and move any
3134 substitutions from the old MEM to the new one. */
3135 ret
= adjust_address_nv (x
, new_mode
, 0);
3136 copy_replacements (x
, ret
);
3139 ret
= adjust_address (x
, new_mode
, 0);
3143 /* Note that we do want simplify_subreg's behavior of validating
3144 that the new mode is ok for a hard register. If we were to use
3145 simplify_gen_subreg, we would create the subreg, but would
3146 probably run into the target not being able to implement it. */
3147 /* Except, of course, when FORCE is true, when this is exactly what
3148 we want. Which is needed for CCmodes on some targets. */
3150 ret
= simplify_gen_subreg (new_mode
, x
, old_mode
, 0);
3152 ret
= simplify_subreg (new_mode
, x
, old_mode
, 0);
3158 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3159 an integer mode of the same size as MODE. Returns the instruction
3160 emitted, or NULL if such a move could not be generated. */
3163 emit_move_via_integer (enum machine_mode mode
, rtx x
, rtx y
, bool force
)
3165 enum machine_mode imode
;
3166 enum insn_code code
;
3168 /* There must exist a mode of the exact size we require. */
3169 imode
= int_mode_for_mode (mode
);
3170 if (imode
== BLKmode
)
3173 /* The target must support moves in this mode. */
3174 code
= optab_handler (mov_optab
, imode
);
3175 if (code
== CODE_FOR_nothing
)
3178 x
= emit_move_change_mode (imode
, mode
, x
, force
);
3181 y
= emit_move_change_mode (imode
, mode
, y
, force
);
3184 return emit_insn (GEN_FCN (code
) (x
, y
));
3187 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3188 Return an equivalent MEM that does not use an auto-increment. */
3191 emit_move_resolve_push (enum machine_mode mode
, rtx x
)
3193 enum rtx_code code
= GET_CODE (XEXP (x
, 0));
3194 HOST_WIDE_INT adjust
;
3197 adjust
= GET_MODE_SIZE (mode
);
3198 #ifdef PUSH_ROUNDING
3199 adjust
= PUSH_ROUNDING (adjust
);
3201 if (code
== PRE_DEC
|| code
== POST_DEC
)
3203 else if (code
== PRE_MODIFY
|| code
== POST_MODIFY
)
3205 rtx expr
= XEXP (XEXP (x
, 0), 1);
3208 gcc_assert (GET_CODE (expr
) == PLUS
|| GET_CODE (expr
) == MINUS
);
3209 gcc_assert (CONST_INT_P (XEXP (expr
, 1)));
3210 val
= INTVAL (XEXP (expr
, 1));
3211 if (GET_CODE (expr
) == MINUS
)
3213 gcc_assert (adjust
== val
|| adjust
== -val
);
3217 /* Do not use anti_adjust_stack, since we don't want to update
3218 stack_pointer_delta. */
3219 temp
= expand_simple_binop (Pmode
, PLUS
, stack_pointer_rtx
,
3220 gen_int_mode (adjust
, Pmode
), stack_pointer_rtx
,
3221 0, OPTAB_LIB_WIDEN
);
3222 if (temp
!= stack_pointer_rtx
)
3223 emit_move_insn (stack_pointer_rtx
, temp
);
3230 temp
= stack_pointer_rtx
;
3235 temp
= plus_constant (Pmode
, stack_pointer_rtx
, -adjust
);
3241 return replace_equiv_address (x
, temp
);
3244 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3245 X is known to satisfy push_operand, and MODE is known to be complex.
3246 Returns the last instruction emitted. */
3249 emit_move_complex_push (enum machine_mode mode
, rtx x
, rtx y
)
3251 enum machine_mode submode
= GET_MODE_INNER (mode
);
3254 #ifdef PUSH_ROUNDING
3255 unsigned int submodesize
= GET_MODE_SIZE (submode
);
3257 /* In case we output to the stack, but the size is smaller than the
3258 machine can push exactly, we need to use move instructions. */
3259 if (PUSH_ROUNDING (submodesize
) != submodesize
)
3261 x
= emit_move_resolve_push (mode
, x
);
3262 return emit_move_insn (x
, y
);
3266 /* Note that the real part always precedes the imag part in memory
3267 regardless of machine's endianness. */
3268 switch (GET_CODE (XEXP (x
, 0)))
3282 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3283 read_complex_part (y
, imag_first
));
3284 return emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3285 read_complex_part (y
, !imag_first
));
3288 /* A subroutine of emit_move_complex. Perform the move from Y to X
3289 via two moves of the parts. Returns the last instruction emitted. */
3292 emit_move_complex_parts (rtx x
, rtx y
)
3294 /* Show the output dies here. This is necessary for SUBREGs
3295 of pseudos since we cannot track their lifetimes correctly;
3296 hard regs shouldn't appear here except as return values. */
3297 if (!reload_completed
&& !reload_in_progress
3298 && REG_P (x
) && !reg_overlap_mentioned_p (x
, y
))
3301 write_complex_part (x
, read_complex_part (y
, false), false);
3302 write_complex_part (x
, read_complex_part (y
, true), true);
3304 return get_last_insn ();
3307 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3308 MODE is known to be complex. Returns the last instruction emitted. */
3311 emit_move_complex (enum machine_mode mode
, rtx x
, rtx y
)
3315 /* Need to take special care for pushes, to maintain proper ordering
3316 of the data, and possibly extra padding. */
3317 if (push_operand (x
, mode
))
3318 return emit_move_complex_push (mode
, x
, y
);
3320 /* See if we can coerce the target into moving both values at once, except
3321 for floating point where we favor moving as parts if this is easy. */
3322 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
3323 && optab_handler (mov_optab
, GET_MODE_INNER (mode
)) != CODE_FOR_nothing
3325 && HARD_REGISTER_P (x
)
3326 && hard_regno_nregs
[REGNO (x
)][mode
] == 1)
3328 && HARD_REGISTER_P (y
)
3329 && hard_regno_nregs
[REGNO (y
)][mode
] == 1))
3331 /* Not possible if the values are inherently not adjacent. */
3332 else if (GET_CODE (x
) == CONCAT
|| GET_CODE (y
) == CONCAT
)
3334 /* Is possible if both are registers (or subregs of registers). */
3335 else if (register_operand (x
, mode
) && register_operand (y
, mode
))
3337 /* If one of the operands is a memory, and alignment constraints
3338 are friendly enough, we may be able to do combined memory operations.
3339 We do not attempt this if Y is a constant because that combination is
3340 usually better with the by-parts thing below. */
3341 else if ((MEM_P (x
) ? !CONSTANT_P (y
) : MEM_P (y
))
3342 && (!STRICT_ALIGNMENT
3343 || get_mode_alignment (mode
) == BIGGEST_ALIGNMENT
))
3352 /* For memory to memory moves, optimal behavior can be had with the
3353 existing block move logic. */
3354 if (MEM_P (x
) && MEM_P (y
))
3356 emit_block_move (x
, y
, GEN_INT (GET_MODE_SIZE (mode
)),
3357 BLOCK_OP_NO_LIBCALL
);
3358 return get_last_insn ();
3361 ret
= emit_move_via_integer (mode
, x
, y
, true);
3366 return emit_move_complex_parts (x
, y
);
3369 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3370 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3373 emit_move_ccmode (enum machine_mode mode
, rtx x
, rtx y
)
3377 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3380 enum insn_code code
= optab_handler (mov_optab
, CCmode
);
3381 if (code
!= CODE_FOR_nothing
)
3383 x
= emit_move_change_mode (CCmode
, mode
, x
, true);
3384 y
= emit_move_change_mode (CCmode
, mode
, y
, true);
3385 return emit_insn (GEN_FCN (code
) (x
, y
));
3389 /* Otherwise, find the MODE_INT mode of the same width. */
3390 ret
= emit_move_via_integer (mode
, x
, y
, false);
3391 gcc_assert (ret
!= NULL
);
3395 /* Return true if word I of OP lies entirely in the
3396 undefined bits of a paradoxical subreg. */
3399 undefined_operand_subword_p (const_rtx op
, int i
)
3401 enum machine_mode innermode
, innermostmode
;
3403 if (GET_CODE (op
) != SUBREG
)
3405 innermode
= GET_MODE (op
);
3406 innermostmode
= GET_MODE (SUBREG_REG (op
));
3407 offset
= i
* UNITS_PER_WORD
+ SUBREG_BYTE (op
);
3408 /* The SUBREG_BYTE represents offset, as if the value were stored in
3409 memory, except for a paradoxical subreg where we define
3410 SUBREG_BYTE to be 0; undo this exception as in
3412 if (SUBREG_BYTE (op
) == 0
3413 && GET_MODE_SIZE (innermostmode
) < GET_MODE_SIZE (innermode
))
3415 int difference
= (GET_MODE_SIZE (innermostmode
) - GET_MODE_SIZE (innermode
));
3416 if (WORDS_BIG_ENDIAN
)
3417 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
3418 if (BYTES_BIG_ENDIAN
)
3419 offset
+= difference
% UNITS_PER_WORD
;
3421 if (offset
>= GET_MODE_SIZE (innermostmode
)
3422 || offset
<= -GET_MODE_SIZE (word_mode
))
3427 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3428 MODE is any multi-word or full-word mode that lacks a move_insn
3429 pattern. Note that you will get better code if you define such
3430 patterns, even if they must turn into multiple assembler instructions. */
3433 emit_move_multi_word (enum machine_mode mode
, rtx x
, rtx y
)
3435 rtx_insn
*last_insn
= 0;
3441 gcc_assert (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
);
3443 /* If X is a push on the stack, do the push now and replace
3444 X with a reference to the stack pointer. */
3445 if (push_operand (x
, mode
))
3446 x
= emit_move_resolve_push (mode
, x
);
3448 /* If we are in reload, see if either operand is a MEM whose address
3449 is scheduled for replacement. */
3450 if (reload_in_progress
&& MEM_P (x
)
3451 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3452 x
= replace_equiv_address_nv (x
, inner
);
3453 if (reload_in_progress
&& MEM_P (y
)
3454 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3455 y
= replace_equiv_address_nv (y
, inner
);
3459 need_clobber
= false;
3461 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3464 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3467 /* Do not generate code for a move if it would come entirely
3468 from the undefined bits of a paradoxical subreg. */
3469 if (undefined_operand_subword_p (y
, i
))
3472 ypart
= operand_subword (y
, i
, 1, mode
);
3474 /* If we can't get a part of Y, put Y into memory if it is a
3475 constant. Otherwise, force it into a register. Then we must
3476 be able to get a part of Y. */
3477 if (ypart
== 0 && CONSTANT_P (y
))
3479 y
= use_anchored_address (force_const_mem (mode
, y
));
3480 ypart
= operand_subword (y
, i
, 1, mode
);
3482 else if (ypart
== 0)
3483 ypart
= operand_subword_force (y
, i
, mode
);
3485 gcc_assert (xpart
&& ypart
);
3487 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3489 last_insn
= emit_move_insn (xpart
, ypart
);
3495 /* Show the output dies here. This is necessary for SUBREGs
3496 of pseudos since we cannot track their lifetimes correctly;
3497 hard regs shouldn't appear here except as return values.
3498 We never want to emit such a clobber after reload. */
3500 && ! (reload_in_progress
|| reload_completed
)
3501 && need_clobber
!= 0)
3509 /* Low level part of emit_move_insn.
3510 Called just like emit_move_insn, but assumes X and Y
3511 are basically valid. */
3514 emit_move_insn_1 (rtx x
, rtx y
)
3516 enum machine_mode mode
= GET_MODE (x
);
3517 enum insn_code code
;
3519 gcc_assert ((unsigned int) mode
< (unsigned int) MAX_MACHINE_MODE
);
3521 code
= optab_handler (mov_optab
, mode
);
3522 if (code
!= CODE_FOR_nothing
)
3523 return emit_insn (GEN_FCN (code
) (x
, y
));
3525 /* Expand complex moves by moving real part and imag part. */
3526 if (COMPLEX_MODE_P (mode
))
3527 return emit_move_complex (mode
, x
, y
);
3529 if (GET_MODE_CLASS (mode
) == MODE_DECIMAL_FLOAT
3530 || ALL_FIXED_POINT_MODE_P (mode
))
3532 rtx_insn
*result
= emit_move_via_integer (mode
, x
, y
, true);
3534 /* If we can't find an integer mode, use multi words. */
3538 return emit_move_multi_word (mode
, x
, y
);
3541 if (GET_MODE_CLASS (mode
) == MODE_CC
)
3542 return emit_move_ccmode (mode
, x
, y
);
3544 /* Try using a move pattern for the corresponding integer mode. This is
3545 only safe when simplify_subreg can convert MODE constants into integer
3546 constants. At present, it can only do this reliably if the value
3547 fits within a HOST_WIDE_INT. */
3548 if (!CONSTANT_P (y
) || GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
3550 rtx_insn
*ret
= emit_move_via_integer (mode
, x
, y
, lra_in_progress
);
3554 if (! lra_in_progress
|| recog (PATTERN (ret
), ret
, 0) >= 0)
3559 return emit_move_multi_word (mode
, x
, y
);
3562 /* Generate code to copy Y into X.
3563 Both Y and X must have the same mode, except that
3564 Y can be a constant with VOIDmode.
3565 This mode cannot be BLKmode; use emit_block_move for that.
3567 Return the last instruction emitted. */
3570 emit_move_insn (rtx x
, rtx y
)
3572 enum machine_mode mode
= GET_MODE (x
);
3573 rtx y_cst
= NULL_RTX
;
3574 rtx_insn
*last_insn
;
3577 gcc_assert (mode
!= BLKmode
3578 && (GET_MODE (y
) == mode
|| GET_MODE (y
) == VOIDmode
));
3583 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3584 && (last_insn
= compress_float_constant (x
, y
)))
3589 if (!targetm
.legitimate_constant_p (mode
, y
))
3591 y
= force_const_mem (mode
, y
);
3593 /* If the target's cannot_force_const_mem prevented the spill,
3594 assume that the target's move expanders will also take care
3595 of the non-legitimate constant. */
3599 y
= use_anchored_address (y
);
3603 /* If X or Y are memory references, verify that their addresses are valid
3606 && (! memory_address_addr_space_p (GET_MODE (x
), XEXP (x
, 0),
3608 && ! push_operand (x
, GET_MODE (x
))))
3609 x
= validize_mem (x
);
3612 && ! memory_address_addr_space_p (GET_MODE (y
), XEXP (y
, 0),
3613 MEM_ADDR_SPACE (y
)))
3614 y
= validize_mem (y
);
3616 gcc_assert (mode
!= BLKmode
);
3618 last_insn
= emit_move_insn_1 (x
, y
);
3620 if (y_cst
&& REG_P (x
)
3621 && (set
= single_set (last_insn
)) != NULL_RTX
3622 && SET_DEST (set
) == x
3623 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
3624 set_unique_reg_note (last_insn
, REG_EQUAL
, copy_rtx (y_cst
));
3629 /* If Y is representable exactly in a narrower mode, and the target can
3630 perform the extension directly from constant or memory, then emit the
3631 move as an extension. */
3634 compress_float_constant (rtx x
, rtx y
)
3636 enum machine_mode dstmode
= GET_MODE (x
);
3637 enum machine_mode orig_srcmode
= GET_MODE (y
);
3638 enum machine_mode srcmode
;
3640 int oldcost
, newcost
;
3641 bool speed
= optimize_insn_for_speed_p ();
3643 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3645 if (targetm
.legitimate_constant_p (dstmode
, y
))
3646 oldcost
= set_src_cost (y
, speed
);
3648 oldcost
= set_src_cost (force_const_mem (dstmode
, y
), speed
);
3650 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3651 srcmode
!= orig_srcmode
;
3652 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3656 rtx_insn
*last_insn
;
3658 /* Skip if the target can't extend this way. */
3659 ic
= can_extend_p (dstmode
, srcmode
, 0);
3660 if (ic
== CODE_FOR_nothing
)
3663 /* Skip if the narrowed value isn't exact. */
3664 if (! exact_real_truncate (srcmode
, &r
))
3667 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3669 if (targetm
.legitimate_constant_p (srcmode
, trunc_y
))
3671 /* Skip if the target needs extra instructions to perform
3673 if (!insn_operand_matches (ic
, 1, trunc_y
))
3675 /* This is valid, but may not be cheaper than the original. */
3676 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3678 if (oldcost
< newcost
)
3681 else if (float_extend_from_mem
[dstmode
][srcmode
])
3683 trunc_y
= force_const_mem (srcmode
, trunc_y
);
3684 /* This is valid, but may not be cheaper than the original. */
3685 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3687 if (oldcost
< newcost
)
3689 trunc_y
= validize_mem (trunc_y
);
3694 /* For CSE's benefit, force the compressed constant pool entry
3695 into a new pseudo. This constant may be used in different modes,
3696 and if not, combine will put things back together for us. */
3697 trunc_y
= force_reg (srcmode
, trunc_y
);
3699 /* If x is a hard register, perform the extension into a pseudo,
3700 so that e.g. stack realignment code is aware of it. */
3702 if (REG_P (x
) && HARD_REGISTER_P (x
))
3703 target
= gen_reg_rtx (dstmode
);
3705 emit_unop_insn (ic
, target
, trunc_y
, UNKNOWN
);
3706 last_insn
= get_last_insn ();
3709 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3712 return emit_move_insn (x
, target
);
3719 /* Pushing data onto the stack. */
3721 /* Push a block of length SIZE (perhaps variable)
3722 and return an rtx to address the beginning of the block.
3723 The value may be virtual_outgoing_args_rtx.
3725 EXTRA is the number of bytes of padding to push in addition to SIZE.
3726 BELOW nonzero means this padding comes at low addresses;
3727 otherwise, the padding comes at high addresses. */
3730 push_block (rtx size
, int extra
, int below
)
3734 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3735 if (CONSTANT_P (size
))
3736 anti_adjust_stack (plus_constant (Pmode
, size
, extra
));
3737 else if (REG_P (size
) && extra
== 0)
3738 anti_adjust_stack (size
);
3741 temp
= copy_to_mode_reg (Pmode
, size
);
3743 temp
= expand_binop (Pmode
, add_optab
, temp
,
3744 gen_int_mode (extra
, Pmode
),
3745 temp
, 0, OPTAB_LIB_WIDEN
);
3746 anti_adjust_stack (temp
);
3749 #ifndef STACK_GROWS_DOWNWARD
3755 temp
= virtual_outgoing_args_rtx
;
3756 if (extra
!= 0 && below
)
3757 temp
= plus_constant (Pmode
, temp
, extra
);
3761 if (CONST_INT_P (size
))
3762 temp
= plus_constant (Pmode
, virtual_outgoing_args_rtx
,
3763 -INTVAL (size
) - (below
? 0 : extra
));
3764 else if (extra
!= 0 && !below
)
3765 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3766 negate_rtx (Pmode
, plus_constant (Pmode
, size
,
3769 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3770 negate_rtx (Pmode
, size
));
3773 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3776 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3779 mem_autoinc_base (rtx mem
)
3783 rtx addr
= XEXP (mem
, 0);
3784 if (GET_RTX_CLASS (GET_CODE (addr
)) == RTX_AUTOINC
)
3785 return XEXP (addr
, 0);
3790 /* A utility routine used here, in reload, and in try_split. The insns
3791 after PREV up to and including LAST are known to adjust the stack,
3792 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3793 placing notes as appropriate. PREV may be NULL, indicating the
3794 entire insn sequence prior to LAST should be scanned.
3796 The set of allowed stack pointer modifications is small:
3797 (1) One or more auto-inc style memory references (aka pushes),
3798 (2) One or more addition/subtraction with the SP as destination,
3799 (3) A single move insn with the SP as destination,
3800 (4) A call_pop insn,
3801 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3803 Insns in the sequence that do not modify the SP are ignored,
3804 except for noreturn calls.
3806 The return value is the amount of adjustment that can be trivially
3807 verified, via immediate operand or auto-inc. If the adjustment
3808 cannot be trivially extracted, the return value is INT_MIN. */
3811 find_args_size_adjust (rtx insn
)
3816 pat
= PATTERN (insn
);
3819 /* Look for a call_pop pattern. */
3822 /* We have to allow non-call_pop patterns for the case
3823 of emit_single_push_insn of a TLS address. */
3824 if (GET_CODE (pat
) != PARALLEL
)
3827 /* All call_pop have a stack pointer adjust in the parallel.
3828 The call itself is always first, and the stack adjust is
3829 usually last, so search from the end. */
3830 for (i
= XVECLEN (pat
, 0) - 1; i
> 0; --i
)
3832 set
= XVECEXP (pat
, 0, i
);
3833 if (GET_CODE (set
) != SET
)
3835 dest
= SET_DEST (set
);
3836 if (dest
== stack_pointer_rtx
)
3839 /* We'd better have found the stack pointer adjust. */
3842 /* Fall through to process the extracted SET and DEST
3843 as if it was a standalone insn. */
3845 else if (GET_CODE (pat
) == SET
)
3847 else if ((set
= single_set (insn
)) != NULL
)
3849 else if (GET_CODE (pat
) == PARALLEL
)
3851 /* ??? Some older ports use a parallel with a stack adjust
3852 and a store for a PUSH_ROUNDING pattern, rather than a
3853 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3854 /* ??? See h8300 and m68k, pushqi1. */
3855 for (i
= XVECLEN (pat
, 0) - 1; i
>= 0; --i
)
3857 set
= XVECEXP (pat
, 0, i
);
3858 if (GET_CODE (set
) != SET
)
3860 dest
= SET_DEST (set
);
3861 if (dest
== stack_pointer_rtx
)
3864 /* We do not expect an auto-inc of the sp in the parallel. */
3865 gcc_checking_assert (mem_autoinc_base (dest
) != stack_pointer_rtx
);
3866 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
3867 != stack_pointer_rtx
);
3875 dest
= SET_DEST (set
);
3877 /* Look for direct modifications of the stack pointer. */
3878 if (REG_P (dest
) && REGNO (dest
) == STACK_POINTER_REGNUM
)
3880 /* Look for a trivial adjustment, otherwise assume nothing. */
3881 /* Note that the SPU restore_stack_block pattern refers to
3882 the stack pointer in V4SImode. Consider that non-trivial. */
3883 if (SCALAR_INT_MODE_P (GET_MODE (dest
))
3884 && GET_CODE (SET_SRC (set
)) == PLUS
3885 && XEXP (SET_SRC (set
), 0) == stack_pointer_rtx
3886 && CONST_INT_P (XEXP (SET_SRC (set
), 1)))
3887 return INTVAL (XEXP (SET_SRC (set
), 1));
3888 /* ??? Reload can generate no-op moves, which will be cleaned
3889 up later. Recognize it and continue searching. */
3890 else if (rtx_equal_p (dest
, SET_SRC (set
)))
3893 return HOST_WIDE_INT_MIN
;
3899 /* Otherwise only think about autoinc patterns. */
3900 if (mem_autoinc_base (dest
) == stack_pointer_rtx
)
3903 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
3904 != stack_pointer_rtx
);
3906 else if (mem_autoinc_base (SET_SRC (set
)) == stack_pointer_rtx
)
3907 mem
= SET_SRC (set
);
3911 addr
= XEXP (mem
, 0);
3912 switch (GET_CODE (addr
))
3916 return GET_MODE_SIZE (GET_MODE (mem
));
3919 return -GET_MODE_SIZE (GET_MODE (mem
));
3922 addr
= XEXP (addr
, 1);
3923 gcc_assert (GET_CODE (addr
) == PLUS
);
3924 gcc_assert (XEXP (addr
, 0) == stack_pointer_rtx
);
3925 gcc_assert (CONST_INT_P (XEXP (addr
, 1)));
3926 return INTVAL (XEXP (addr
, 1));
3934 fixup_args_size_notes (rtx prev
, rtx last
, int end_args_size
)
3936 int args_size
= end_args_size
;
3937 bool saw_unknown
= false;
3940 for (insn
= last
; insn
!= prev
; insn
= PREV_INSN (insn
))
3942 HOST_WIDE_INT this_delta
;
3944 if (!NONDEBUG_INSN_P (insn
))
3947 this_delta
= find_args_size_adjust (insn
);
3948 if (this_delta
== 0)
3951 || ACCUMULATE_OUTGOING_ARGS
3952 || find_reg_note (insn
, REG_NORETURN
, NULL_RTX
) == NULL_RTX
)
3956 gcc_assert (!saw_unknown
);
3957 if (this_delta
== HOST_WIDE_INT_MIN
)
3960 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (args_size
));
3961 #ifdef STACK_GROWS_DOWNWARD
3962 this_delta
= -(unsigned HOST_WIDE_INT
) this_delta
;
3964 args_size
-= this_delta
;
3967 return saw_unknown
? INT_MIN
: args_size
;
3970 #ifdef PUSH_ROUNDING
3971 /* Emit single push insn. */
3974 emit_single_push_insn_1 (enum machine_mode mode
, rtx x
, tree type
)
3977 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3979 enum insn_code icode
;
3981 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3982 /* If there is push pattern, use it. Otherwise try old way of throwing
3983 MEM representing push operation to move expander. */
3984 icode
= optab_handler (push_optab
, mode
);
3985 if (icode
!= CODE_FOR_nothing
)
3987 struct expand_operand ops
[1];
3989 create_input_operand (&ops
[0], x
, mode
);
3990 if (maybe_expand_insn (icode
, 1, ops
))
3993 if (GET_MODE_SIZE (mode
) == rounded_size
)
3994 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3995 /* If we are to pad downward, adjust the stack pointer first and
3996 then store X into the stack location using an offset. This is
3997 because emit_move_insn does not know how to pad; it does not have
3999 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
4001 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
4002 HOST_WIDE_INT offset
;
4004 emit_move_insn (stack_pointer_rtx
,
4005 expand_binop (Pmode
,
4006 #ifdef STACK_GROWS_DOWNWARD
4012 gen_int_mode (rounded_size
, Pmode
),
4013 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
4015 offset
= (HOST_WIDE_INT
) padding_size
;
4016 #ifdef STACK_GROWS_DOWNWARD
4017 if (STACK_PUSH_CODE
== POST_DEC
)
4018 /* We have already decremented the stack pointer, so get the
4020 offset
+= (HOST_WIDE_INT
) rounded_size
;
4022 if (STACK_PUSH_CODE
== POST_INC
)
4023 /* We have already incremented the stack pointer, so get the
4025 offset
-= (HOST_WIDE_INT
) rounded_size
;
4027 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
4028 gen_int_mode (offset
, Pmode
));
4032 #ifdef STACK_GROWS_DOWNWARD
4033 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4034 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
4035 gen_int_mode (-(HOST_WIDE_INT
) rounded_size
,
4038 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4039 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
4040 gen_int_mode (rounded_size
, Pmode
));
4042 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
4045 dest
= gen_rtx_MEM (mode
, dest_addr
);
4049 set_mem_attributes (dest
, type
, 1);
4051 if (cfun
->tail_call_marked
)
4052 /* Function incoming arguments may overlap with sibling call
4053 outgoing arguments and we cannot allow reordering of reads
4054 from function arguments with stores to outgoing arguments
4055 of sibling calls. */
4056 set_mem_alias_set (dest
, 0);
4058 emit_move_insn (dest
, x
);
4061 /* Emit and annotate a single push insn. */
4064 emit_single_push_insn (enum machine_mode mode
, rtx x
, tree type
)
4066 int delta
, old_delta
= stack_pointer_delta
;
4067 rtx prev
= get_last_insn ();
4070 emit_single_push_insn_1 (mode
, x
, type
);
4072 last
= get_last_insn ();
4074 /* Notice the common case where we emitted exactly one insn. */
4075 if (PREV_INSN (last
) == prev
)
4077 add_reg_note (last
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
4081 delta
= fixup_args_size_notes (prev
, last
, stack_pointer_delta
);
4082 gcc_assert (delta
== INT_MIN
|| delta
== old_delta
);
4086 /* Generate code to push X onto the stack, assuming it has mode MODE and
4088 MODE is redundant except when X is a CONST_INT (since they don't
4090 SIZE is an rtx for the size of data to be copied (in bytes),
4091 needed only if X is BLKmode.
4093 ALIGN (in bits) is maximum alignment we can assume.
4095 If PARTIAL and REG are both nonzero, then copy that many of the first
4096 bytes of X into registers starting with REG, and push the rest of X.
4097 The amount of space pushed is decreased by PARTIAL bytes.
4098 REG must be a hard register in this case.
4099 If REG is zero but PARTIAL is not, take any all others actions for an
4100 argument partially in registers, but do not actually load any
4103 EXTRA is the amount in bytes of extra space to leave next to this arg.
4104 This is ignored if an argument block has already been allocated.
4106 On a machine that lacks real push insns, ARGS_ADDR is the address of
4107 the bottom of the argument block for this call. We use indexing off there
4108 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4109 argument block has not been preallocated.
4111 ARGS_SO_FAR is the size of args previously pushed for this call.
4113 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4114 for arguments passed in registers. If nonzero, it will be the number
4115 of bytes required. */
4118 emit_push_insn (rtx x
, enum machine_mode mode
, tree type
, rtx size
,
4119 unsigned int align
, int partial
, rtx reg
, int extra
,
4120 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
4124 enum direction stack_direction
4125 #ifdef STACK_GROWS_DOWNWARD
4131 /* Decide where to pad the argument: `downward' for below,
4132 `upward' for above, or `none' for don't pad it.
4133 Default is below for small data on big-endian machines; else above. */
4134 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
4136 /* Invert direction if stack is post-decrement.
4138 if (STACK_PUSH_CODE
== POST_DEC
)
4139 if (where_pad
!= none
)
4140 where_pad
= (where_pad
== downward
? upward
: downward
);
4145 || (STRICT_ALIGNMENT
&& align
< GET_MODE_ALIGNMENT (mode
)))
4147 /* Copy a block into the stack, entirely or partially. */
4154 offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4155 used
= partial
- offset
;
4157 if (mode
!= BLKmode
)
4159 /* A value is to be stored in an insufficiently aligned
4160 stack slot; copy via a suitably aligned slot if
4162 size
= GEN_INT (GET_MODE_SIZE (mode
));
4163 if (!MEM_P (xinner
))
4165 temp
= assign_temp (type
, 1, 1);
4166 emit_move_insn (temp
, xinner
);
4173 /* USED is now the # of bytes we need not copy to the stack
4174 because registers will take care of them. */
4177 xinner
= adjust_address (xinner
, BLKmode
, used
);
4179 /* If the partial register-part of the arg counts in its stack size,
4180 skip the part of stack space corresponding to the registers.
4181 Otherwise, start copying to the beginning of the stack space,
4182 by setting SKIP to 0. */
4183 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
4185 #ifdef PUSH_ROUNDING
4186 /* Do it with several push insns if that doesn't take lots of insns
4187 and if there is no difficulty with push insns that skip bytes
4188 on the stack for alignment purposes. */
4191 && CONST_INT_P (size
)
4193 && MEM_ALIGN (xinner
) >= align
4194 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
4195 /* Here we avoid the case of a structure whose weak alignment
4196 forces many pushes of a small amount of data,
4197 and such small pushes do rounding that causes trouble. */
4198 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
4199 || align
>= BIGGEST_ALIGNMENT
4200 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
4201 == (align
/ BITS_PER_UNIT
)))
4202 && (HOST_WIDE_INT
) PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
4204 /* Push padding now if padding above and stack grows down,
4205 or if padding below and stack grows up.
4206 But if space already allocated, this has already been done. */
4207 if (extra
&& args_addr
== 0
4208 && where_pad
!= none
&& where_pad
!= stack_direction
)
4209 anti_adjust_stack (GEN_INT (extra
));
4211 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
4214 #endif /* PUSH_ROUNDING */
4218 /* Otherwise make space on the stack and copy the data
4219 to the address of that space. */
4221 /* Deduct words put into registers from the size we must copy. */
4224 if (CONST_INT_P (size
))
4225 size
= GEN_INT (INTVAL (size
) - used
);
4227 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
4228 gen_int_mode (used
, GET_MODE (size
)),
4229 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4232 /* Get the address of the stack space.
4233 In this case, we do not deal with EXTRA separately.
4234 A single stack adjust will do. */
4237 temp
= push_block (size
, extra
, where_pad
== downward
);
4240 else if (CONST_INT_P (args_so_far
))
4241 temp
= memory_address (BLKmode
,
4242 plus_constant (Pmode
, args_addr
,
4243 skip
+ INTVAL (args_so_far
)));
4245 temp
= memory_address (BLKmode
,
4246 plus_constant (Pmode
,
4247 gen_rtx_PLUS (Pmode
,
4252 if (!ACCUMULATE_OUTGOING_ARGS
)
4254 /* If the source is referenced relative to the stack pointer,
4255 copy it to another register to stabilize it. We do not need
4256 to do this if we know that we won't be changing sp. */
4258 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
4259 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
4260 temp
= copy_to_reg (temp
);
4263 target
= gen_rtx_MEM (BLKmode
, temp
);
4265 /* We do *not* set_mem_attributes here, because incoming arguments
4266 may overlap with sibling call outgoing arguments and we cannot
4267 allow reordering of reads from function arguments with stores
4268 to outgoing arguments of sibling calls. We do, however, want
4269 to record the alignment of the stack slot. */
4270 /* ALIGN may well be better aligned than TYPE, e.g. due to
4271 PARM_BOUNDARY. Assume the caller isn't lying. */
4272 set_mem_align (target
, align
);
4274 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
4277 else if (partial
> 0)
4279 /* Scalar partly in registers. */
4281 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
4284 /* # bytes of start of argument
4285 that we must make space for but need not store. */
4286 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4287 int args_offset
= INTVAL (args_so_far
);
4290 /* Push padding now if padding above and stack grows down,
4291 or if padding below and stack grows up.
4292 But if space already allocated, this has already been done. */
4293 if (extra
&& args_addr
== 0
4294 && where_pad
!= none
&& where_pad
!= stack_direction
)
4295 anti_adjust_stack (GEN_INT (extra
));
4297 /* If we make space by pushing it, we might as well push
4298 the real data. Otherwise, we can leave OFFSET nonzero
4299 and leave the space uninitialized. */
4303 /* Now NOT_STACK gets the number of words that we don't need to
4304 allocate on the stack. Convert OFFSET to words too. */
4305 not_stack
= (partial
- offset
) / UNITS_PER_WORD
;
4306 offset
/= UNITS_PER_WORD
;
4308 /* If the partial register-part of the arg counts in its stack size,
4309 skip the part of stack space corresponding to the registers.
4310 Otherwise, start copying to the beginning of the stack space,
4311 by setting SKIP to 0. */
4312 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
4314 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
4315 x
= validize_mem (force_const_mem (mode
, x
));
4317 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4318 SUBREGs of such registers are not allowed. */
4319 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
4320 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
4321 x
= copy_to_reg (x
);
4323 /* Loop over all the words allocated on the stack for this arg. */
4324 /* We can do it by words, because any scalar bigger than a word
4325 has a size a multiple of a word. */
4326 for (i
= size
- 1; i
>= not_stack
; i
--)
4327 if (i
>= not_stack
+ offset
)
4328 emit_push_insn (operand_subword_force (x
, i
, mode
),
4329 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
4331 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
4333 reg_parm_stack_space
, alignment_pad
);
4340 /* Push padding now if padding above and stack grows down,
4341 or if padding below and stack grows up.
4342 But if space already allocated, this has already been done. */
4343 if (extra
&& args_addr
== 0
4344 && where_pad
!= none
&& where_pad
!= stack_direction
)
4345 anti_adjust_stack (GEN_INT (extra
));
4347 #ifdef PUSH_ROUNDING
4348 if (args_addr
== 0 && PUSH_ARGS
)
4349 emit_single_push_insn (mode
, x
, type
);
4353 if (CONST_INT_P (args_so_far
))
4355 = memory_address (mode
,
4356 plus_constant (Pmode
, args_addr
,
4357 INTVAL (args_so_far
)));
4359 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
4361 dest
= gen_rtx_MEM (mode
, addr
);
4363 /* We do *not* set_mem_attributes here, because incoming arguments
4364 may overlap with sibling call outgoing arguments and we cannot
4365 allow reordering of reads from function arguments with stores
4366 to outgoing arguments of sibling calls. We do, however, want
4367 to record the alignment of the stack slot. */
4368 /* ALIGN may well be better aligned than TYPE, e.g. due to
4369 PARM_BOUNDARY. Assume the caller isn't lying. */
4370 set_mem_align (dest
, align
);
4372 emit_move_insn (dest
, x
);
4376 /* If part should go in registers, copy that part
4377 into the appropriate registers. Do this now, at the end,
4378 since mem-to-mem copies above may do function calls. */
4379 if (partial
> 0 && reg
!= 0)
4381 /* Handle calls that pass values in multiple non-contiguous locations.
4382 The Irix 6 ABI has examples of this. */
4383 if (GET_CODE (reg
) == PARALLEL
)
4384 emit_group_load (reg
, x
, type
, -1);
4387 gcc_assert (partial
% UNITS_PER_WORD
== 0);
4388 move_block_to_reg (REGNO (reg
), x
, partial
/ UNITS_PER_WORD
, mode
);
4392 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
4393 anti_adjust_stack (GEN_INT (extra
));
4395 if (alignment_pad
&& args_addr
== 0)
4396 anti_adjust_stack (alignment_pad
);
4399 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4403 get_subtarget (rtx x
)
4407 /* Only registers can be subtargets. */
4409 /* Don't use hard regs to avoid extending their life. */
4410 || REGNO (x
) < FIRST_PSEUDO_REGISTER
4414 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4415 FIELD is a bitfield. Returns true if the optimization was successful,
4416 and there's nothing else to do. */
4419 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize
,
4420 unsigned HOST_WIDE_INT bitpos
,
4421 unsigned HOST_WIDE_INT bitregion_start
,
4422 unsigned HOST_WIDE_INT bitregion_end
,
4423 enum machine_mode mode1
, rtx str_rtx
,
4426 enum machine_mode str_mode
= GET_MODE (str_rtx
);
4427 unsigned int str_bitsize
= GET_MODE_BITSIZE (str_mode
);
4432 enum tree_code code
;
4434 if (mode1
!= VOIDmode
4435 || bitsize
>= BITS_PER_WORD
4436 || str_bitsize
> BITS_PER_WORD
4437 || TREE_SIDE_EFFECTS (to
)
4438 || TREE_THIS_VOLATILE (to
))
4442 if (TREE_CODE (src
) != SSA_NAME
)
4444 if (TREE_CODE (TREE_TYPE (src
)) != INTEGER_TYPE
)
4447 srcstmt
= get_gimple_for_ssa_name (src
);
4449 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt
)) != tcc_binary
)
4452 code
= gimple_assign_rhs_code (srcstmt
);
4454 op0
= gimple_assign_rhs1 (srcstmt
);
4456 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4457 to find its initialization. Hopefully the initialization will
4458 be from a bitfield load. */
4459 if (TREE_CODE (op0
) == SSA_NAME
)
4461 gimple op0stmt
= get_gimple_for_ssa_name (op0
);
4463 /* We want to eventually have OP0 be the same as TO, which
4464 should be a bitfield. */
4466 || !is_gimple_assign (op0stmt
)
4467 || gimple_assign_rhs_code (op0stmt
) != TREE_CODE (to
))
4469 op0
= gimple_assign_rhs1 (op0stmt
);
4472 op1
= gimple_assign_rhs2 (srcstmt
);
4474 if (!operand_equal_p (to
, op0
, 0))
4477 if (MEM_P (str_rtx
))
4479 unsigned HOST_WIDE_INT offset1
;
4481 if (str_bitsize
== 0 || str_bitsize
> BITS_PER_WORD
)
4482 str_mode
= word_mode
;
4483 str_mode
= get_best_mode (bitsize
, bitpos
,
4484 bitregion_start
, bitregion_end
,
4485 MEM_ALIGN (str_rtx
), str_mode
, 0);
4486 if (str_mode
== VOIDmode
)
4488 str_bitsize
= GET_MODE_BITSIZE (str_mode
);
4491 bitpos
%= str_bitsize
;
4492 offset1
= (offset1
- bitpos
) / BITS_PER_UNIT
;
4493 str_rtx
= adjust_address (str_rtx
, str_mode
, offset1
);
4495 else if (!REG_P (str_rtx
) && GET_CODE (str_rtx
) != SUBREG
)
4498 /* If the bit field covers the whole REG/MEM, store_field
4499 will likely generate better code. */
4500 if (bitsize
>= str_bitsize
)
4503 /* We can't handle fields split across multiple entities. */
4504 if (bitpos
+ bitsize
> str_bitsize
)
4507 if (BYTES_BIG_ENDIAN
)
4508 bitpos
= str_bitsize
- bitpos
- bitsize
;
4514 /* For now, just optimize the case of the topmost bitfield
4515 where we don't need to do any masking and also
4516 1 bit bitfields where xor can be used.
4517 We might win by one instruction for the other bitfields
4518 too if insv/extv instructions aren't used, so that
4519 can be added later. */
4520 if (bitpos
+ bitsize
!= str_bitsize
4521 && (bitsize
!= 1 || TREE_CODE (op1
) != INTEGER_CST
))
4524 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4525 value
= convert_modes (str_mode
,
4526 TYPE_MODE (TREE_TYPE (op1
)), value
,
4527 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4529 /* We may be accessing data outside the field, which means
4530 we can alias adjacent data. */
4531 if (MEM_P (str_rtx
))
4533 str_rtx
= shallow_copy_rtx (str_rtx
);
4534 set_mem_alias_set (str_rtx
, 0);
4535 set_mem_expr (str_rtx
, 0);
4538 binop
= code
== PLUS_EXPR
? add_optab
: sub_optab
;
4539 if (bitsize
== 1 && bitpos
+ bitsize
!= str_bitsize
)
4541 value
= expand_and (str_mode
, value
, const1_rtx
, NULL
);
4544 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4545 result
= expand_binop (str_mode
, binop
, str_rtx
,
4546 value
, str_rtx
, 1, OPTAB_WIDEN
);
4547 if (result
!= str_rtx
)
4548 emit_move_insn (str_rtx
, result
);
4553 if (TREE_CODE (op1
) != INTEGER_CST
)
4555 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4556 value
= convert_modes (str_mode
,
4557 TYPE_MODE (TREE_TYPE (op1
)), value
,
4558 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4560 /* We may be accessing data outside the field, which means
4561 we can alias adjacent data. */
4562 if (MEM_P (str_rtx
))
4564 str_rtx
= shallow_copy_rtx (str_rtx
);
4565 set_mem_alias_set (str_rtx
, 0);
4566 set_mem_expr (str_rtx
, 0);
4569 binop
= code
== BIT_IOR_EXPR
? ior_optab
: xor_optab
;
4570 if (bitpos
+ bitsize
!= str_bitsize
)
4572 rtx mask
= gen_int_mode (((unsigned HOST_WIDE_INT
) 1 << bitsize
) - 1,
4574 value
= expand_and (str_mode
, value
, mask
, NULL_RTX
);
4576 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4577 result
= expand_binop (str_mode
, binop
, str_rtx
,
4578 value
, str_rtx
, 1, OPTAB_WIDEN
);
4579 if (result
!= str_rtx
)
4580 emit_move_insn (str_rtx
, result
);
4590 /* In the C++ memory model, consecutive bit fields in a structure are
4591 considered one memory location.
4593 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4594 returns the bit range of consecutive bits in which this COMPONENT_REF
4595 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4596 and *OFFSET may be adjusted in the process.
4598 If the access does not need to be restricted, 0 is returned in both
4599 *BITSTART and *BITEND. */
4602 get_bit_range (unsigned HOST_WIDE_INT
*bitstart
,
4603 unsigned HOST_WIDE_INT
*bitend
,
4605 HOST_WIDE_INT
*bitpos
,
4608 HOST_WIDE_INT bitoffset
;
4611 gcc_assert (TREE_CODE (exp
) == COMPONENT_REF
);
4613 field
= TREE_OPERAND (exp
, 1);
4614 repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
4615 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4616 need to limit the range we can access. */
4619 *bitstart
= *bitend
= 0;
4623 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4624 part of a larger bit field, then the representative does not serve any
4625 useful purpose. This can occur in Ada. */
4626 if (handled_component_p (TREE_OPERAND (exp
, 0)))
4628 enum machine_mode rmode
;
4629 HOST_WIDE_INT rbitsize
, rbitpos
;
4633 get_inner_reference (TREE_OPERAND (exp
, 0), &rbitsize
, &rbitpos
,
4634 &roffset
, &rmode
, &unsignedp
, &volatilep
, false);
4635 if ((rbitpos
% BITS_PER_UNIT
) != 0)
4637 *bitstart
= *bitend
= 0;
4642 /* Compute the adjustment to bitpos from the offset of the field
4643 relative to the representative. DECL_FIELD_OFFSET of field and
4644 repr are the same by construction if they are not constants,
4645 see finish_bitfield_layout. */
4646 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field
))
4647 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr
)))
4648 bitoffset
= (tree_to_uhwi (DECL_FIELD_OFFSET (field
))
4649 - tree_to_uhwi (DECL_FIELD_OFFSET (repr
))) * BITS_PER_UNIT
;
4652 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
4653 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
4655 /* If the adjustment is larger than bitpos, we would have a negative bit
4656 position for the lower bound and this may wreak havoc later. Adjust
4657 offset and bitpos to make the lower bound non-negative in that case. */
4658 if (bitoffset
> *bitpos
)
4660 HOST_WIDE_INT adjust
= bitoffset
- *bitpos
;
4661 gcc_assert ((adjust
% BITS_PER_UNIT
) == 0);
4664 if (*offset
== NULL_TREE
)
4665 *offset
= size_int (-adjust
/ BITS_PER_UNIT
);
4668 = size_binop (MINUS_EXPR
, *offset
, size_int (adjust
/ BITS_PER_UNIT
));
4672 *bitstart
= *bitpos
- bitoffset
;
4674 *bitend
= *bitstart
+ tree_to_uhwi (DECL_SIZE (repr
)) - 1;
4677 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4678 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4679 DECL_RTL was not set yet, return NORTL. */
4682 addr_expr_of_non_mem_decl_p_1 (tree addr
, bool nortl
)
4684 if (TREE_CODE (addr
) != ADDR_EXPR
)
4687 tree base
= TREE_OPERAND (addr
, 0);
4690 || TREE_ADDRESSABLE (base
)
4691 || DECL_MODE (base
) == BLKmode
)
4694 if (!DECL_RTL_SET_P (base
))
4697 return (!MEM_P (DECL_RTL (base
)));
4700 /* Returns true if the MEM_REF REF refers to an object that does not
4701 reside in memory and has non-BLKmode. */
4704 mem_ref_refers_to_non_mem_p (tree ref
)
4706 tree base
= TREE_OPERAND (ref
, 0);
4707 return addr_expr_of_non_mem_decl_p_1 (base
, false);
4710 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4711 is true, try generating a nontemporal store. */
4714 expand_assignment (tree to
, tree from
, bool nontemporal
)
4718 enum machine_mode mode
;
4720 enum insn_code icode
;
4722 /* Don't crash if the lhs of the assignment was erroneous. */
4723 if (TREE_CODE (to
) == ERROR_MARK
)
4725 expand_normal (from
);
4729 /* Optimize away no-op moves without side-effects. */
4730 if (operand_equal_p (to
, from
, 0))
4733 /* Handle misaligned stores. */
4734 mode
= TYPE_MODE (TREE_TYPE (to
));
4735 if ((TREE_CODE (to
) == MEM_REF
4736 || TREE_CODE (to
) == TARGET_MEM_REF
)
4738 && !mem_ref_refers_to_non_mem_p (to
)
4739 && ((align
= get_object_alignment (to
))
4740 < GET_MODE_ALIGNMENT (mode
))
4741 && (((icode
= optab_handler (movmisalign_optab
, mode
))
4742 != CODE_FOR_nothing
)
4743 || SLOW_UNALIGNED_ACCESS (mode
, align
)))
4747 reg
= expand_expr (from
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4748 reg
= force_not_mem (reg
);
4749 mem
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4751 if (icode
!= CODE_FOR_nothing
)
4753 struct expand_operand ops
[2];
4755 create_fixed_operand (&ops
[0], mem
);
4756 create_input_operand (&ops
[1], reg
, mode
);
4757 /* The movmisalign<mode> pattern cannot fail, else the assignment
4758 would silently be omitted. */
4759 expand_insn (icode
, 2, ops
);
4762 store_bit_field (mem
, GET_MODE_BITSIZE (mode
), 0, 0, 0, mode
, reg
);
4766 /* Assignment of a structure component needs special treatment
4767 if the structure component's rtx is not simply a MEM.
4768 Assignment of an array element at a constant index, and assignment of
4769 an array element in an unaligned packed structure field, has the same
4770 problem. Same for (partially) storing into a non-memory object. */
4771 if (handled_component_p (to
)
4772 || (TREE_CODE (to
) == MEM_REF
4773 && mem_ref_refers_to_non_mem_p (to
))
4774 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
4776 enum machine_mode mode1
;
4777 HOST_WIDE_INT bitsize
, bitpos
;
4778 unsigned HOST_WIDE_INT bitregion_start
= 0;
4779 unsigned HOST_WIDE_INT bitregion_end
= 0;
4786 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
4787 &unsignedp
, &volatilep
, true);
4789 /* Make sure bitpos is not negative, it can wreak havoc later. */
4792 gcc_assert (offset
== NULL_TREE
);
4793 offset
= size_int (bitpos
>> (BITS_PER_UNIT
== 8
4794 ? 3 : exact_log2 (BITS_PER_UNIT
)));
4795 bitpos
&= BITS_PER_UNIT
- 1;
4798 if (TREE_CODE (to
) == COMPONENT_REF
4799 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to
, 1)))
4800 get_bit_range (&bitregion_start
, &bitregion_end
, to
, &bitpos
, &offset
);
4801 /* The C++ memory model naturally applies to byte-aligned fields.
4802 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4803 BITSIZE are not byte-aligned, there is no need to limit the range
4804 we can access. This can occur with packed structures in Ada. */
4805 else if (bitsize
> 0
4806 && bitsize
% BITS_PER_UNIT
== 0
4807 && bitpos
% BITS_PER_UNIT
== 0)
4809 bitregion_start
= bitpos
;
4810 bitregion_end
= bitpos
+ bitsize
- 1;
4813 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4815 /* If the field has a mode, we want to access it in the
4816 field's mode, not the computed mode.
4817 If a MEM has VOIDmode (external with incomplete type),
4818 use BLKmode for it instead. */
4821 if (mode1
!= VOIDmode
)
4822 to_rtx
= adjust_address (to_rtx
, mode1
, 0);
4823 else if (GET_MODE (to_rtx
) == VOIDmode
)
4824 to_rtx
= adjust_address (to_rtx
, BLKmode
, 0);
4829 enum machine_mode address_mode
;
4832 if (!MEM_P (to_rtx
))
4834 /* We can get constant negative offsets into arrays with broken
4835 user code. Translate this to a trap instead of ICEing. */
4836 gcc_assert (TREE_CODE (offset
) == INTEGER_CST
);
4837 expand_builtin_trap ();
4838 to_rtx
= gen_rtx_MEM (BLKmode
, const0_rtx
);
4841 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
4842 address_mode
= get_address_mode (to_rtx
);
4843 if (GET_MODE (offset_rtx
) != address_mode
)
4844 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
4846 /* If we have an expression in OFFSET_RTX and a non-zero
4847 byte offset in BITPOS, adding the byte offset before the
4848 OFFSET_RTX results in better intermediate code, which makes
4849 later rtl optimization passes perform better.
4851 We prefer intermediate code like this:
4853 r124:DI=r123:DI+0x18
4858 r124:DI=r123:DI+0x10
4859 [r124:DI+0x8]=r121:DI
4861 This is only done for aligned data values, as these can
4862 be expected to result in single move instructions. */
4863 if (mode1
!= VOIDmode
4866 && (bitpos
% bitsize
) == 0
4867 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
4868 && MEM_ALIGN (to_rtx
) >= GET_MODE_ALIGNMENT (mode1
))
4870 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
4871 bitregion_start
= 0;
4872 if (bitregion_end
>= (unsigned HOST_WIDE_INT
) bitpos
)
4873 bitregion_end
-= bitpos
;
4877 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4878 highest_pow2_factor_for_target (to
,
4882 /* No action is needed if the target is not a memory and the field
4883 lies completely outside that target. This can occur if the source
4884 code contains an out-of-bounds access to a small array. */
4886 && GET_MODE (to_rtx
) != BLKmode
4887 && (unsigned HOST_WIDE_INT
) bitpos
4888 >= GET_MODE_PRECISION (GET_MODE (to_rtx
)))
4890 expand_normal (from
);
4893 /* Handle expand_expr of a complex value returning a CONCAT. */
4894 else if (GET_CODE (to_rtx
) == CONCAT
)
4896 unsigned short mode_bitsize
= GET_MODE_BITSIZE (GET_MODE (to_rtx
));
4897 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from
)))
4899 && bitsize
== mode_bitsize
)
4900 result
= store_expr (from
, to_rtx
, false, nontemporal
);
4901 else if (bitsize
== mode_bitsize
/ 2
4902 && (bitpos
== 0 || bitpos
== mode_bitsize
/ 2))
4903 result
= store_expr (from
, XEXP (to_rtx
, bitpos
!= 0), false,
4905 else if (bitpos
+ bitsize
<= mode_bitsize
/ 2)
4906 result
= store_field (XEXP (to_rtx
, 0), bitsize
, bitpos
,
4907 bitregion_start
, bitregion_end
,
4909 get_alias_set (to
), nontemporal
);
4910 else if (bitpos
>= mode_bitsize
/ 2)
4911 result
= store_field (XEXP (to_rtx
, 1), bitsize
,
4912 bitpos
- mode_bitsize
/ 2,
4913 bitregion_start
, bitregion_end
,
4915 get_alias_set (to
), nontemporal
);
4916 else if (bitpos
== 0 && bitsize
== mode_bitsize
)
4919 result
= expand_normal (from
);
4920 from_rtx
= simplify_gen_subreg (GET_MODE (to_rtx
), result
,
4921 TYPE_MODE (TREE_TYPE (from
)), 0);
4922 emit_move_insn (XEXP (to_rtx
, 0),
4923 read_complex_part (from_rtx
, false));
4924 emit_move_insn (XEXP (to_rtx
, 1),
4925 read_complex_part (from_rtx
, true));
4929 rtx temp
= assign_stack_temp (GET_MODE (to_rtx
),
4930 GET_MODE_SIZE (GET_MODE (to_rtx
)));
4931 write_complex_part (temp
, XEXP (to_rtx
, 0), false);
4932 write_complex_part (temp
, XEXP (to_rtx
, 1), true);
4933 result
= store_field (temp
, bitsize
, bitpos
,
4934 bitregion_start
, bitregion_end
,
4936 get_alias_set (to
), nontemporal
);
4937 emit_move_insn (XEXP (to_rtx
, 0), read_complex_part (temp
, false));
4938 emit_move_insn (XEXP (to_rtx
, 1), read_complex_part (temp
, true));
4945 /* If the field is at offset zero, we could have been given the
4946 DECL_RTX of the parent struct. Don't munge it. */
4947 to_rtx
= shallow_copy_rtx (to_rtx
);
4948 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
4950 MEM_VOLATILE_P (to_rtx
) = 1;
4953 if (optimize_bitfield_assignment_op (bitsize
, bitpos
,
4954 bitregion_start
, bitregion_end
,
4959 result
= store_field (to_rtx
, bitsize
, bitpos
,
4960 bitregion_start
, bitregion_end
,
4962 get_alias_set (to
), nontemporal
);
4966 preserve_temp_slots (result
);
4971 /* If the rhs is a function call and its value is not an aggregate,
4972 call the function before we start to compute the lhs.
4973 This is needed for correct code for cases such as
4974 val = setjmp (buf) on machines where reference to val
4975 requires loading up part of an address in a separate insn.
4977 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4978 since it might be a promoted variable where the zero- or sign- extension
4979 needs to be done. Handling this in the normal way is safe because no
4980 computation is done before the call. The same is true for SSA names. */
4981 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
4982 && COMPLETE_TYPE_P (TREE_TYPE (from
))
4983 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
4984 && ! (((TREE_CODE (to
) == VAR_DECL
4985 || TREE_CODE (to
) == PARM_DECL
4986 || TREE_CODE (to
) == RESULT_DECL
)
4987 && REG_P (DECL_RTL (to
)))
4988 || TREE_CODE (to
) == SSA_NAME
))
4993 value
= expand_normal (from
);
4995 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4997 /* Handle calls that return values in multiple non-contiguous locations.
4998 The Irix 6 ABI has examples of this. */
4999 if (GET_CODE (to_rtx
) == PARALLEL
)
5001 if (GET_CODE (value
) == PARALLEL
)
5002 emit_group_move (to_rtx
, value
);
5004 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
5005 int_size_in_bytes (TREE_TYPE (from
)));
5007 else if (GET_CODE (value
) == PARALLEL
)
5008 emit_group_store (to_rtx
, value
, TREE_TYPE (from
),
5009 int_size_in_bytes (TREE_TYPE (from
)));
5010 else if (GET_MODE (to_rtx
) == BLKmode
)
5012 /* Handle calls that return BLKmode values in registers. */
5014 copy_blkmode_from_reg (to_rtx
, value
, TREE_TYPE (from
));
5016 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
5020 if (POINTER_TYPE_P (TREE_TYPE (to
)))
5021 value
= convert_memory_address_addr_space
5022 (GET_MODE (to_rtx
), value
,
5023 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to
))));
5025 emit_move_insn (to_rtx
, value
);
5027 preserve_temp_slots (to_rtx
);
5032 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5033 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5035 /* Don't move directly into a return register. */
5036 if (TREE_CODE (to
) == RESULT_DECL
5037 && (REG_P (to_rtx
) || GET_CODE (to_rtx
) == PARALLEL
))
5043 /* If the source is itself a return value, it still is in a pseudo at
5044 this point so we can move it back to the return register directly. */
5046 && TYPE_MODE (TREE_TYPE (from
)) == BLKmode
5047 && TREE_CODE (from
) != CALL_EXPR
)
5048 temp
= copy_blkmode_to_reg (GET_MODE (to_rtx
), from
);
5050 temp
= expand_expr (from
, NULL_RTX
, GET_MODE (to_rtx
), EXPAND_NORMAL
);
5052 /* Handle calls that return values in multiple non-contiguous locations.
5053 The Irix 6 ABI has examples of this. */
5054 if (GET_CODE (to_rtx
) == PARALLEL
)
5056 if (GET_CODE (temp
) == PARALLEL
)
5057 emit_group_move (to_rtx
, temp
);
5059 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
5060 int_size_in_bytes (TREE_TYPE (from
)));
5063 emit_move_insn (to_rtx
, temp
);
5065 preserve_temp_slots (to_rtx
);
5070 /* In case we are returning the contents of an object which overlaps
5071 the place the value is being stored, use a safe function when copying
5072 a value through a pointer into a structure value return block. */
5073 if (TREE_CODE (to
) == RESULT_DECL
5074 && TREE_CODE (from
) == INDIRECT_REF
5075 && ADDR_SPACE_GENERIC_P
5076 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from
, 0)))))
5077 && refs_may_alias_p (to
, from
)
5078 && cfun
->returns_struct
5079 && !cfun
->returns_pcc_struct
)
5084 size
= expr_size (from
);
5085 from_rtx
= expand_normal (from
);
5087 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
5088 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
5089 XEXP (from_rtx
, 0), Pmode
,
5090 convert_to_mode (TYPE_MODE (sizetype
),
5091 size
, TYPE_UNSIGNED (sizetype
)),
5092 TYPE_MODE (sizetype
));
5094 preserve_temp_slots (to_rtx
);
5099 /* Compute FROM and store the value in the rtx we got. */
5102 result
= store_expr (from
, to_rtx
, 0, nontemporal
);
5103 preserve_temp_slots (result
);
5108 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5109 succeeded, false otherwise. */
5112 emit_storent_insn (rtx to
, rtx from
)
5114 struct expand_operand ops
[2];
5115 enum machine_mode mode
= GET_MODE (to
);
5116 enum insn_code code
= optab_handler (storent_optab
, mode
);
5118 if (code
== CODE_FOR_nothing
)
5121 create_fixed_operand (&ops
[0], to
);
5122 create_input_operand (&ops
[1], from
, mode
);
5123 return maybe_expand_insn (code
, 2, ops
);
5126 /* Generate code for computing expression EXP,
5127 and storing the value into TARGET.
5129 If the mode is BLKmode then we may return TARGET itself.
5130 It turns out that in BLKmode it doesn't cause a problem.
5131 because C has no operators that could combine two different
5132 assignments into the same BLKmode object with different values
5133 with no sequence point. Will other languages need this to
5136 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5137 stack, and block moves may need to be treated specially.
5139 If NONTEMPORAL is true, try using a nontemporal store instruction. */
5142 store_expr (tree exp
, rtx target
, int call_param_p
, bool nontemporal
)
5145 rtx alt_rtl
= NULL_RTX
;
5146 location_t loc
= curr_insn_location ();
5148 if (VOID_TYPE_P (TREE_TYPE (exp
)))
5150 /* C++ can generate ?: expressions with a throw expression in one
5151 branch and an rvalue in the other. Here, we resolve attempts to
5152 store the throw expression's nonexistent result. */
5153 gcc_assert (!call_param_p
);
5154 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5157 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
5159 /* Perform first part of compound expression, then assign from second
5161 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
5162 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5163 return store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
,
5166 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
5168 /* For conditional expression, get safe form of the target. Then
5169 test the condition, doing the appropriate assignment on either
5170 side. This avoids the creation of unnecessary temporaries.
5171 For non-BLKmode, it is more efficient not to do this. */
5173 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
5175 do_pending_stack_adjust ();
5177 jumpifnot (TREE_OPERAND (exp
, 0), lab1
, -1);
5178 store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
,
5180 emit_jump_insn (gen_jump (lab2
));
5183 store_expr (TREE_OPERAND (exp
, 2), target
, call_param_p
,
5190 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
5191 /* If this is a scalar in a register that is stored in a wider mode
5192 than the declared mode, compute the result into its declared mode
5193 and then convert to the wider mode. Our value is the computed
5196 rtx inner_target
= 0;
5198 /* We can do the conversion inside EXP, which will often result
5199 in some optimizations. Do the conversion in two steps: first
5200 change the signedness, if needed, then the extend. But don't
5201 do this if the type of EXP is a subtype of something else
5202 since then the conversion might involve more than just
5203 converting modes. */
5204 if (INTEGRAL_TYPE_P (TREE_TYPE (exp
))
5205 && TREE_TYPE (TREE_TYPE (exp
)) == 0
5206 && GET_MODE_PRECISION (GET_MODE (target
))
5207 == TYPE_PRECISION (TREE_TYPE (exp
)))
5209 if (!SUBREG_CHECK_PROMOTED_SIGN (target
,
5210 TYPE_UNSIGNED (TREE_TYPE (exp
))))
5212 /* Some types, e.g. Fortran's logical*4, won't have a signed
5213 version, so use the mode instead. */
5215 = (signed_or_unsigned_type_for
5216 (SUBREG_PROMOTED_SIGN (target
), TREE_TYPE (exp
)));
5218 ntype
= lang_hooks
.types
.type_for_mode
5219 (TYPE_MODE (TREE_TYPE (exp
)),
5220 SUBREG_PROMOTED_SIGN (target
));
5222 exp
= fold_convert_loc (loc
, ntype
, exp
);
5225 exp
= fold_convert_loc (loc
, lang_hooks
.types
.type_for_mode
5226 (GET_MODE (SUBREG_REG (target
)),
5227 SUBREG_PROMOTED_SIGN (target
)),
5230 inner_target
= SUBREG_REG (target
);
5233 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
5234 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5236 /* If TEMP is a VOIDmode constant, use convert_modes to make
5237 sure that we properly convert it. */
5238 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
5240 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
5241 temp
, SUBREG_PROMOTED_SIGN (target
));
5242 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
5243 GET_MODE (target
), temp
,
5244 SUBREG_PROMOTED_SIGN (target
));
5247 convert_move (SUBREG_REG (target
), temp
,
5248 SUBREG_PROMOTED_SIGN (target
));
5252 else if ((TREE_CODE (exp
) == STRING_CST
5253 || (TREE_CODE (exp
) == MEM_REF
5254 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
5255 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
5257 && integer_zerop (TREE_OPERAND (exp
, 1))))
5258 && !nontemporal
&& !call_param_p
5261 /* Optimize initialization of an array with a STRING_CST. */
5262 HOST_WIDE_INT exp_len
, str_copy_len
;
5264 tree str
= TREE_CODE (exp
) == STRING_CST
5265 ? exp
: TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5267 exp_len
= int_expr_size (exp
);
5271 if (TREE_STRING_LENGTH (str
) <= 0)
5274 str_copy_len
= strlen (TREE_STRING_POINTER (str
));
5275 if (str_copy_len
< TREE_STRING_LENGTH (str
) - 1)
5278 str_copy_len
= TREE_STRING_LENGTH (str
);
5279 if ((STORE_MAX_PIECES
& (STORE_MAX_PIECES
- 1)) == 0
5280 && TREE_STRING_POINTER (str
)[TREE_STRING_LENGTH (str
) - 1] == '\0')
5282 str_copy_len
+= STORE_MAX_PIECES
- 1;
5283 str_copy_len
&= ~(STORE_MAX_PIECES
- 1);
5285 str_copy_len
= MIN (str_copy_len
, exp_len
);
5286 if (!can_store_by_pieces (str_copy_len
, builtin_strncpy_read_str
,
5287 CONST_CAST (char *, TREE_STRING_POINTER (str
)),
5288 MEM_ALIGN (target
), false))
5293 dest_mem
= store_by_pieces (dest_mem
,
5294 str_copy_len
, builtin_strncpy_read_str
,
5296 TREE_STRING_POINTER (str
)),
5297 MEM_ALIGN (target
), false,
5298 exp_len
> str_copy_len
? 1 : 0);
5299 if (exp_len
> str_copy_len
)
5300 clear_storage (adjust_address (dest_mem
, BLKmode
, 0),
5301 GEN_INT (exp_len
- str_copy_len
),
5310 /* If we want to use a nontemporal store, force the value to
5312 tmp_target
= nontemporal
? NULL_RTX
: target
;
5313 temp
= expand_expr_real (exp
, tmp_target
, GET_MODE (target
),
5315 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
5319 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5320 the same as that of TARGET, adjust the constant. This is needed, for
5321 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5322 only a word-sized value. */
5323 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
5324 && TREE_CODE (exp
) != ERROR_MARK
5325 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
5326 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
5327 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5329 /* If value was not generated in the target, store it there.
5330 Convert the value to TARGET's type first if necessary and emit the
5331 pending incrementations that have been queued when expanding EXP.
5332 Note that we cannot emit the whole queue blindly because this will
5333 effectively disable the POST_INC optimization later.
5335 If TEMP and TARGET compare equal according to rtx_equal_p, but
5336 one or both of them are volatile memory refs, we have to distinguish
5338 - expand_expr has used TARGET. In this case, we must not generate
5339 another copy. This can be detected by TARGET being equal according
5341 - expand_expr has not used TARGET - that means that the source just
5342 happens to have the same RTX form. Since temp will have been created
5343 by expand_expr, it will compare unequal according to == .
5344 We must generate a copy in this case, to reach the correct number
5345 of volatile memory references. */
5347 if ((! rtx_equal_p (temp
, target
)
5348 || (temp
!= target
&& (side_effects_p (temp
)
5349 || side_effects_p (target
))))
5350 && TREE_CODE (exp
) != ERROR_MARK
5351 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5352 but TARGET is not valid memory reference, TEMP will differ
5353 from TARGET although it is really the same location. */
5355 && rtx_equal_p (alt_rtl
, target
)
5356 && !side_effects_p (alt_rtl
)
5357 && !side_effects_p (target
))
5358 /* If there's nothing to copy, don't bother. Don't call
5359 expr_size unless necessary, because some front-ends (C++)
5360 expr_size-hook must not be given objects that are not
5361 supposed to be bit-copied or bit-initialized. */
5362 && expr_size (exp
) != const0_rtx
)
5364 if (GET_MODE (temp
) != GET_MODE (target
) && GET_MODE (temp
) != VOIDmode
)
5366 if (GET_MODE (target
) == BLKmode
)
5368 /* Handle calls that return BLKmode values in registers. */
5369 if (REG_P (temp
) && TREE_CODE (exp
) == CALL_EXPR
)
5370 copy_blkmode_from_reg (target
, temp
, TREE_TYPE (exp
));
5372 store_bit_field (target
,
5373 INTVAL (expr_size (exp
)) * BITS_PER_UNIT
,
5374 0, 0, 0, GET_MODE (temp
), temp
);
5377 convert_move (target
, temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5380 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
5382 /* Handle copying a string constant into an array. The string
5383 constant may be shorter than the array. So copy just the string's
5384 actual length, and clear the rest. First get the size of the data
5385 type of the string, which is actually the size of the target. */
5386 rtx size
= expr_size (exp
);
5388 if (CONST_INT_P (size
)
5389 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
5390 emit_block_move (target
, temp
, size
,
5392 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5395 enum machine_mode pointer_mode
5396 = targetm
.addr_space
.pointer_mode (MEM_ADDR_SPACE (target
));
5397 enum machine_mode address_mode
= get_address_mode (target
);
5399 /* Compute the size of the data to copy from the string. */
5401 = size_binop_loc (loc
, MIN_EXPR
,
5402 make_tree (sizetype
, size
),
5403 size_int (TREE_STRING_LENGTH (exp
)));
5405 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
5407 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
5410 /* Copy that much. */
5411 copy_size_rtx
= convert_to_mode (pointer_mode
, copy_size_rtx
,
5412 TYPE_UNSIGNED (sizetype
));
5413 emit_block_move (target
, temp
, copy_size_rtx
,
5415 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5417 /* Figure out how much is left in TARGET that we have to clear.
5418 Do all calculations in pointer_mode. */
5419 if (CONST_INT_P (copy_size_rtx
))
5421 size
= plus_constant (address_mode
, size
,
5422 -INTVAL (copy_size_rtx
));
5423 target
= adjust_address (target
, BLKmode
,
5424 INTVAL (copy_size_rtx
));
5428 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
5429 copy_size_rtx
, NULL_RTX
, 0,
5432 if (GET_MODE (copy_size_rtx
) != address_mode
)
5433 copy_size_rtx
= convert_to_mode (address_mode
,
5435 TYPE_UNSIGNED (sizetype
));
5437 target
= offset_address (target
, copy_size_rtx
,
5438 highest_pow2_factor (copy_size
));
5439 label
= gen_label_rtx ();
5440 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
5441 GET_MODE (size
), 0, label
);
5444 if (size
!= const0_rtx
)
5445 clear_storage (target
, size
, BLOCK_OP_NORMAL
);
5451 /* Handle calls that return values in multiple non-contiguous locations.
5452 The Irix 6 ABI has examples of this. */
5453 else if (GET_CODE (target
) == PARALLEL
)
5455 if (GET_CODE (temp
) == PARALLEL
)
5456 emit_group_move (target
, temp
);
5458 emit_group_load (target
, temp
, TREE_TYPE (exp
),
5459 int_size_in_bytes (TREE_TYPE (exp
)));
5461 else if (GET_CODE (temp
) == PARALLEL
)
5462 emit_group_store (target
, temp
, TREE_TYPE (exp
),
5463 int_size_in_bytes (TREE_TYPE (exp
)));
5464 else if (GET_MODE (temp
) == BLKmode
)
5465 emit_block_move (target
, temp
, expr_size (exp
),
5467 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5468 /* If we emit a nontemporal store, there is nothing else to do. */
5469 else if (nontemporal
&& emit_storent_insn (target
, temp
))
5473 temp
= force_operand (temp
, target
);
5475 emit_move_insn (target
, temp
);
5482 /* Return true if field F of structure TYPE is a flexible array. */
5485 flexible_array_member_p (const_tree f
, const_tree type
)
5490 return (DECL_CHAIN (f
) == NULL
5491 && TREE_CODE (tf
) == ARRAY_TYPE
5493 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf
))
5494 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf
)))
5495 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf
))
5496 && int_size_in_bytes (type
) >= 0);
5499 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5500 must have in order for it to completely initialize a value of type TYPE.
5501 Return -1 if the number isn't known.
5503 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5505 static HOST_WIDE_INT
5506 count_type_elements (const_tree type
, bool for_ctor_p
)
5508 switch (TREE_CODE (type
))
5514 nelts
= array_type_nelts (type
);
5515 if (nelts
&& tree_fits_uhwi_p (nelts
))
5517 unsigned HOST_WIDE_INT n
;
5519 n
= tree_to_uhwi (nelts
) + 1;
5520 if (n
== 0 || for_ctor_p
)
5523 return n
* count_type_elements (TREE_TYPE (type
), false);
5525 return for_ctor_p
? -1 : 1;
5530 unsigned HOST_WIDE_INT n
;
5534 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5535 if (TREE_CODE (f
) == FIELD_DECL
)
5538 n
+= count_type_elements (TREE_TYPE (f
), false);
5539 else if (!flexible_array_member_p (f
, type
))
5540 /* Don't count flexible arrays, which are not supposed
5541 to be initialized. */
5549 case QUAL_UNION_TYPE
:
5554 gcc_assert (!for_ctor_p
);
5555 /* Estimate the number of scalars in each field and pick the
5556 maximum. Other estimates would do instead; the idea is simply
5557 to make sure that the estimate is not sensitive to the ordering
5560 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5561 if (TREE_CODE (f
) == FIELD_DECL
)
5563 m
= count_type_elements (TREE_TYPE (f
), false);
5564 /* If the field doesn't span the whole union, add an extra
5565 scalar for the rest. */
5566 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f
)),
5567 TYPE_SIZE (type
)) != 1)
5579 return TYPE_VECTOR_SUBPARTS (type
);
5583 case FIXED_POINT_TYPE
:
5588 case REFERENCE_TYPE
:
5604 /* Helper for categorize_ctor_elements. Identical interface. */
5607 categorize_ctor_elements_1 (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
5608 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
5610 unsigned HOST_WIDE_INT idx
;
5611 HOST_WIDE_INT nz_elts
, init_elts
, num_fields
;
5612 tree value
, purpose
, elt_type
;
5614 /* Whether CTOR is a valid constant initializer, in accordance with what
5615 initializer_constant_valid_p does. If inferred from the constructor
5616 elements, true until proven otherwise. */
5617 bool const_from_elts_p
= constructor_static_from_elts_p (ctor
);
5618 bool const_p
= const_from_elts_p
? true : TREE_STATIC (ctor
);
5623 elt_type
= NULL_TREE
;
5625 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), idx
, purpose
, value
)
5627 HOST_WIDE_INT mult
= 1;
5629 if (purpose
&& TREE_CODE (purpose
) == RANGE_EXPR
)
5631 tree lo_index
= TREE_OPERAND (purpose
, 0);
5632 tree hi_index
= TREE_OPERAND (purpose
, 1);
5634 if (tree_fits_uhwi_p (lo_index
) && tree_fits_uhwi_p (hi_index
))
5635 mult
= (tree_to_uhwi (hi_index
)
5636 - tree_to_uhwi (lo_index
) + 1);
5639 elt_type
= TREE_TYPE (value
);
5641 switch (TREE_CODE (value
))
5645 HOST_WIDE_INT nz
= 0, ic
= 0;
5647 bool const_elt_p
= categorize_ctor_elements_1 (value
, &nz
, &ic
,
5650 nz_elts
+= mult
* nz
;
5651 init_elts
+= mult
* ic
;
5653 if (const_from_elts_p
&& const_p
)
5654 const_p
= const_elt_p
;
5661 if (!initializer_zerop (value
))
5667 nz_elts
+= mult
* TREE_STRING_LENGTH (value
);
5668 init_elts
+= mult
* TREE_STRING_LENGTH (value
);
5672 if (!initializer_zerop (TREE_REALPART (value
)))
5674 if (!initializer_zerop (TREE_IMAGPART (value
)))
5682 for (i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
5684 tree v
= VECTOR_CST_ELT (value
, i
);
5685 if (!initializer_zerop (v
))
5694 HOST_WIDE_INT tc
= count_type_elements (elt_type
, false);
5695 nz_elts
+= mult
* tc
;
5696 init_elts
+= mult
* tc
;
5698 if (const_from_elts_p
&& const_p
)
5699 const_p
= initializer_constant_valid_p (value
, elt_type
)
5706 if (*p_complete
&& !complete_ctor_at_level_p (TREE_TYPE (ctor
),
5707 num_fields
, elt_type
))
5708 *p_complete
= false;
5710 *p_nz_elts
+= nz_elts
;
5711 *p_init_elts
+= init_elts
;
5716 /* Examine CTOR to discover:
5717 * how many scalar fields are set to nonzero values,
5718 and place it in *P_NZ_ELTS;
5719 * how many scalar fields in total are in CTOR,
5720 and place it in *P_ELT_COUNT.
5721 * whether the constructor is complete -- in the sense that every
5722 meaningful byte is explicitly given a value --
5723 and place it in *P_COMPLETE.
5725 Return whether or not CTOR is a valid static constant initializer, the same
5726 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5729 categorize_ctor_elements (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
5730 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
5736 return categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_init_elts
, p_complete
);
5739 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5740 of which had type LAST_TYPE. Each element was itself a complete
5741 initializer, in the sense that every meaningful byte was explicitly
5742 given a value. Return true if the same is true for the constructor
5746 complete_ctor_at_level_p (const_tree type
, HOST_WIDE_INT num_elts
,
5747 const_tree last_type
)
5749 if (TREE_CODE (type
) == UNION_TYPE
5750 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
5755 gcc_assert (num_elts
== 1 && last_type
);
5757 /* ??? We could look at each element of the union, and find the
5758 largest element. Which would avoid comparing the size of the
5759 initialized element against any tail padding in the union.
5760 Doesn't seem worth the effort... */
5761 return simple_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (last_type
)) == 1;
5764 return count_type_elements (type
, true) == num_elts
;
5767 /* Return 1 if EXP contains mostly (3/4) zeros. */
5770 mostly_zeros_p (const_tree exp
)
5772 if (TREE_CODE (exp
) == CONSTRUCTOR
)
5774 HOST_WIDE_INT nz_elts
, init_elts
;
5777 categorize_ctor_elements (exp
, &nz_elts
, &init_elts
, &complete_p
);
5778 return !complete_p
|| nz_elts
< init_elts
/ 4;
5781 return initializer_zerop (exp
);
5784 /* Return 1 if EXP contains all zeros. */
5787 all_zeros_p (const_tree exp
)
5789 if (TREE_CODE (exp
) == CONSTRUCTOR
)
5791 HOST_WIDE_INT nz_elts
, init_elts
;
5794 categorize_ctor_elements (exp
, &nz_elts
, &init_elts
, &complete_p
);
5795 return nz_elts
== 0;
5798 return initializer_zerop (exp
);
5801 /* Helper function for store_constructor.
5802 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5803 CLEARED is as for store_constructor.
5804 ALIAS_SET is the alias set to use for any stores.
5806 This provides a recursive shortcut back to store_constructor when it isn't
5807 necessary to go through store_field. This is so that we can pass through
5808 the cleared field to let store_constructor know that we may not have to
5809 clear a substructure if the outer structure has already been cleared. */
5812 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
5813 HOST_WIDE_INT bitpos
, enum machine_mode mode
,
5814 tree exp
, int cleared
, alias_set_type alias_set
)
5816 if (TREE_CODE (exp
) == CONSTRUCTOR
5817 /* We can only call store_constructor recursively if the size and
5818 bit position are on a byte boundary. */
5819 && bitpos
% BITS_PER_UNIT
== 0
5820 && (bitsize
> 0 && bitsize
% BITS_PER_UNIT
== 0)
5821 /* If we have a nonzero bitpos for a register target, then we just
5822 let store_field do the bitfield handling. This is unlikely to
5823 generate unnecessary clear instructions anyways. */
5824 && (bitpos
== 0 || MEM_P (target
)))
5828 = adjust_address (target
,
5829 GET_MODE (target
) == BLKmode
5831 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
5832 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5835 /* Update the alias set, if required. */
5836 if (MEM_P (target
) && ! MEM_KEEP_ALIAS_SET_P (target
)
5837 && MEM_ALIAS_SET (target
) != 0)
5839 target
= copy_rtx (target
);
5840 set_mem_alias_set (target
, alias_set
);
5843 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
5846 store_field (target
, bitsize
, bitpos
, 0, 0, mode
, exp
, alias_set
, false);
5850 /* Returns the number of FIELD_DECLs in TYPE. */
5853 fields_length (const_tree type
)
5855 tree t
= TYPE_FIELDS (type
);
5858 for (; t
; t
= DECL_CHAIN (t
))
5859 if (TREE_CODE (t
) == FIELD_DECL
)
5866 /* Store the value of constructor EXP into the rtx TARGET.
5867 TARGET is either a REG or a MEM; we know it cannot conflict, since
5868 safe_from_p has been called.
5869 CLEARED is true if TARGET is known to have been zero'd.
5870 SIZE is the number of bytes of TARGET we are allowed to modify: this
5871 may not be the same as the size of EXP if we are assigning to a field
5872 which has been packed to exclude padding bits. */
5875 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
)
5877 tree type
= TREE_TYPE (exp
);
5878 #ifdef WORD_REGISTER_OPERATIONS
5879 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
5882 switch (TREE_CODE (type
))
5886 case QUAL_UNION_TYPE
:
5888 unsigned HOST_WIDE_INT idx
;
5891 /* If size is zero or the target is already cleared, do nothing. */
5892 if (size
== 0 || cleared
)
5894 /* We either clear the aggregate or indicate the value is dead. */
5895 else if ((TREE_CODE (type
) == UNION_TYPE
5896 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
5897 && ! CONSTRUCTOR_ELTS (exp
))
5898 /* If the constructor is empty, clear the union. */
5900 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
5904 /* If we are building a static constructor into a register,
5905 set the initial value as zero so we can fold the value into
5906 a constant. But if more than one register is involved,
5907 this probably loses. */
5908 else if (REG_P (target
) && TREE_STATIC (exp
)
5909 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
5911 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5915 /* If the constructor has fewer fields than the structure or
5916 if we are initializing the structure to mostly zeros, clear
5917 the whole structure first. Don't do this if TARGET is a
5918 register whose mode size isn't equal to SIZE since
5919 clear_storage can't handle this case. */
5921 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp
))
5922 != fields_length (type
))
5923 || mostly_zeros_p (exp
))
5925 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
5928 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
5932 if (REG_P (target
) && !cleared
)
5933 emit_clobber (target
);
5935 /* Store each element of the constructor into the
5936 corresponding field of TARGET. */
5937 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, field
, value
)
5939 enum machine_mode mode
;
5940 HOST_WIDE_INT bitsize
;
5941 HOST_WIDE_INT bitpos
= 0;
5943 rtx to_rtx
= target
;
5945 /* Just ignore missing fields. We cleared the whole
5946 structure, above, if any fields are missing. */
5950 if (cleared
&& initializer_zerop (value
))
5953 if (tree_fits_uhwi_p (DECL_SIZE (field
)))
5954 bitsize
= tree_to_uhwi (DECL_SIZE (field
));
5958 mode
= DECL_MODE (field
);
5959 if (DECL_BIT_FIELD (field
))
5962 offset
= DECL_FIELD_OFFSET (field
);
5963 if (tree_fits_shwi_p (offset
)
5964 && tree_fits_shwi_p (bit_position (field
)))
5966 bitpos
= int_bit_position (field
);
5970 bitpos
= tree_to_shwi (DECL_FIELD_BIT_OFFSET (field
));
5974 enum machine_mode address_mode
;
5978 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset
,
5979 make_tree (TREE_TYPE (exp
),
5982 offset_rtx
= expand_normal (offset
);
5983 gcc_assert (MEM_P (to_rtx
));
5985 address_mode
= get_address_mode (to_rtx
);
5986 if (GET_MODE (offset_rtx
) != address_mode
)
5987 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
5989 to_rtx
= offset_address (to_rtx
, offset_rtx
,
5990 highest_pow2_factor (offset
));
5993 #ifdef WORD_REGISTER_OPERATIONS
5994 /* If this initializes a field that is smaller than a
5995 word, at the start of a word, try to widen it to a full
5996 word. This special case allows us to output C++ member
5997 function initializations in a form that the optimizers
6000 && bitsize
< BITS_PER_WORD
6001 && bitpos
% BITS_PER_WORD
== 0
6002 && GET_MODE_CLASS (mode
) == MODE_INT
6003 && TREE_CODE (value
) == INTEGER_CST
6005 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
6007 tree type
= TREE_TYPE (value
);
6009 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
6011 type
= lang_hooks
.types
.type_for_mode
6012 (word_mode
, TYPE_UNSIGNED (type
));
6013 value
= fold_convert (type
, value
);
6016 if (BYTES_BIG_ENDIAN
)
6018 = fold_build2 (LSHIFT_EXPR
, type
, value
,
6019 build_int_cst (type
,
6020 BITS_PER_WORD
- bitsize
));
6021 bitsize
= BITS_PER_WORD
;
6026 if (MEM_P (to_rtx
) && !MEM_KEEP_ALIAS_SET_P (to_rtx
)
6027 && DECL_NONADDRESSABLE_P (field
))
6029 to_rtx
= copy_rtx (to_rtx
);
6030 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
6033 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
6035 get_alias_set (TREE_TYPE (field
)));
6042 unsigned HOST_WIDE_INT i
;
6045 tree elttype
= TREE_TYPE (type
);
6047 HOST_WIDE_INT minelt
= 0;
6048 HOST_WIDE_INT maxelt
= 0;
6050 domain
= TYPE_DOMAIN (type
);
6051 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
6052 && TYPE_MAX_VALUE (domain
)
6053 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain
))
6054 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain
)));
6056 /* If we have constant bounds for the range of the type, get them. */
6059 minelt
= tree_to_shwi (TYPE_MIN_VALUE (domain
));
6060 maxelt
= tree_to_shwi (TYPE_MAX_VALUE (domain
));
6063 /* If the constructor has fewer elements than the array, clear
6064 the whole array first. Similarly if this is static
6065 constructor of a non-BLKmode object. */
6068 else if (REG_P (target
) && TREE_STATIC (exp
))
6072 unsigned HOST_WIDE_INT idx
;
6074 HOST_WIDE_INT count
= 0, zero_count
= 0;
6075 need_to_clear
= ! const_bounds_p
;
6077 /* This loop is a more accurate version of the loop in
6078 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6079 is also needed to check for missing elements. */
6080 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, index
, value
)
6082 HOST_WIDE_INT this_node_count
;
6087 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
6089 tree lo_index
= TREE_OPERAND (index
, 0);
6090 tree hi_index
= TREE_OPERAND (index
, 1);
6092 if (! tree_fits_uhwi_p (lo_index
)
6093 || ! tree_fits_uhwi_p (hi_index
))
6099 this_node_count
= (tree_to_uhwi (hi_index
)
6100 - tree_to_uhwi (lo_index
) + 1);
6103 this_node_count
= 1;
6105 count
+= this_node_count
;
6106 if (mostly_zeros_p (value
))
6107 zero_count
+= this_node_count
;
6110 /* Clear the entire array first if there are any missing
6111 elements, or if the incidence of zero elements is >=
6114 && (count
< maxelt
- minelt
+ 1
6115 || 4 * zero_count
>= 3 * count
))
6119 if (need_to_clear
&& size
> 0)
6122 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6124 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
6128 if (!cleared
&& REG_P (target
))
6129 /* Inform later passes that the old value is dead. */
6130 emit_clobber (target
);
6132 /* Store each element of the constructor into the
6133 corresponding element of TARGET, determined by counting the
6135 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), i
, index
, value
)
6137 enum machine_mode mode
;
6138 HOST_WIDE_INT bitsize
;
6139 HOST_WIDE_INT bitpos
;
6140 rtx xtarget
= target
;
6142 if (cleared
&& initializer_zerop (value
))
6145 mode
= TYPE_MODE (elttype
);
6146 if (mode
== BLKmode
)
6147 bitsize
= (tree_fits_uhwi_p (TYPE_SIZE (elttype
))
6148 ? tree_to_uhwi (TYPE_SIZE (elttype
))
6151 bitsize
= GET_MODE_BITSIZE (mode
);
6153 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
6155 tree lo_index
= TREE_OPERAND (index
, 0);
6156 tree hi_index
= TREE_OPERAND (index
, 1);
6157 rtx index_r
, pos_rtx
;
6158 HOST_WIDE_INT lo
, hi
, count
;
6161 /* If the range is constant and "small", unroll the loop. */
6163 && tree_fits_shwi_p (lo_index
)
6164 && tree_fits_shwi_p (hi_index
)
6165 && (lo
= tree_to_shwi (lo_index
),
6166 hi
= tree_to_shwi (hi_index
),
6167 count
= hi
- lo
+ 1,
6170 || (tree_fits_uhwi_p (TYPE_SIZE (elttype
))
6171 && (tree_to_uhwi (TYPE_SIZE (elttype
)) * count
6174 lo
-= minelt
; hi
-= minelt
;
6175 for (; lo
<= hi
; lo
++)
6177 bitpos
= lo
* tree_to_shwi (TYPE_SIZE (elttype
));
6180 && !MEM_KEEP_ALIAS_SET_P (target
)
6181 && TREE_CODE (type
) == ARRAY_TYPE
6182 && TYPE_NONALIASED_COMPONENT (type
))
6184 target
= copy_rtx (target
);
6185 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6188 store_constructor_field
6189 (target
, bitsize
, bitpos
, mode
, value
, cleared
,
6190 get_alias_set (elttype
));
6195 rtx loop_start
= gen_label_rtx ();
6196 rtx loop_end
= gen_label_rtx ();
6199 expand_normal (hi_index
);
6201 index
= build_decl (EXPR_LOCATION (exp
),
6202 VAR_DECL
, NULL_TREE
, domain
);
6203 index_r
= gen_reg_rtx (promote_decl_mode (index
, NULL
));
6204 SET_DECL_RTL (index
, index_r
);
6205 store_expr (lo_index
, index_r
, 0, false);
6207 /* Build the head of the loop. */
6208 do_pending_stack_adjust ();
6209 emit_label (loop_start
);
6211 /* Assign value to element index. */
6213 fold_convert (ssizetype
,
6214 fold_build2 (MINUS_EXPR
,
6217 TYPE_MIN_VALUE (domain
)));
6220 size_binop (MULT_EXPR
, position
,
6221 fold_convert (ssizetype
,
6222 TYPE_SIZE_UNIT (elttype
)));
6224 pos_rtx
= expand_normal (position
);
6225 xtarget
= offset_address (target
, pos_rtx
,
6226 highest_pow2_factor (position
));
6227 xtarget
= adjust_address (xtarget
, mode
, 0);
6228 if (TREE_CODE (value
) == CONSTRUCTOR
)
6229 store_constructor (value
, xtarget
, cleared
,
6230 bitsize
/ BITS_PER_UNIT
);
6232 store_expr (value
, xtarget
, 0, false);
6234 /* Generate a conditional jump to exit the loop. */
6235 exit_cond
= build2 (LT_EXPR
, integer_type_node
,
6237 jumpif (exit_cond
, loop_end
, -1);
6239 /* Update the loop counter, and jump to the head of
6241 expand_assignment (index
,
6242 build2 (PLUS_EXPR
, TREE_TYPE (index
),
6243 index
, integer_one_node
),
6246 emit_jump (loop_start
);
6248 /* Build the end of the loop. */
6249 emit_label (loop_end
);
6252 else if ((index
!= 0 && ! tree_fits_shwi_p (index
))
6253 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype
)))
6258 index
= ssize_int (1);
6261 index
= fold_convert (ssizetype
,
6262 fold_build2 (MINUS_EXPR
,
6265 TYPE_MIN_VALUE (domain
)));
6268 size_binop (MULT_EXPR
, index
,
6269 fold_convert (ssizetype
,
6270 TYPE_SIZE_UNIT (elttype
)));
6271 xtarget
= offset_address (target
,
6272 expand_normal (position
),
6273 highest_pow2_factor (position
));
6274 xtarget
= adjust_address (xtarget
, mode
, 0);
6275 store_expr (value
, xtarget
, 0, false);
6280 bitpos
= ((tree_to_shwi (index
) - minelt
)
6281 * tree_to_uhwi (TYPE_SIZE (elttype
)));
6283 bitpos
= (i
* tree_to_uhwi (TYPE_SIZE (elttype
)));
6285 if (MEM_P (target
) && !MEM_KEEP_ALIAS_SET_P (target
)
6286 && TREE_CODE (type
) == ARRAY_TYPE
6287 && TYPE_NONALIASED_COMPONENT (type
))
6289 target
= copy_rtx (target
);
6290 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6292 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
6293 cleared
, get_alias_set (elttype
));
6301 unsigned HOST_WIDE_INT idx
;
6302 constructor_elt
*ce
;
6305 int icode
= CODE_FOR_nothing
;
6306 tree elttype
= TREE_TYPE (type
);
6307 int elt_size
= tree_to_uhwi (TYPE_SIZE (elttype
));
6308 enum machine_mode eltmode
= TYPE_MODE (elttype
);
6309 HOST_WIDE_INT bitsize
;
6310 HOST_WIDE_INT bitpos
;
6311 rtvec vector
= NULL
;
6313 alias_set_type alias
;
6315 gcc_assert (eltmode
!= BLKmode
);
6317 n_elts
= TYPE_VECTOR_SUBPARTS (type
);
6318 if (REG_P (target
) && VECTOR_MODE_P (GET_MODE (target
)))
6320 enum machine_mode mode
= GET_MODE (target
);
6322 icode
= (int) optab_handler (vec_init_optab
, mode
);
6323 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6324 if (icode
!= CODE_FOR_nothing
)
6328 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
6329 if (TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
)
6331 icode
= CODE_FOR_nothing
;
6335 if (icode
!= CODE_FOR_nothing
)
6339 vector
= rtvec_alloc (n_elts
);
6340 for (i
= 0; i
< n_elts
; i
++)
6341 RTVEC_ELT (vector
, i
) = CONST0_RTX (GET_MODE_INNER (mode
));
6345 /* If the constructor has fewer elements than the vector,
6346 clear the whole array first. Similarly if this is static
6347 constructor of a non-BLKmode object. */
6350 else if (REG_P (target
) && TREE_STATIC (exp
))
6354 unsigned HOST_WIDE_INT count
= 0, zero_count
= 0;
6357 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
6359 int n_elts_here
= tree_to_uhwi
6360 (int_const_binop (TRUNC_DIV_EXPR
,
6361 TYPE_SIZE (TREE_TYPE (value
)),
6362 TYPE_SIZE (elttype
)));
6364 count
+= n_elts_here
;
6365 if (mostly_zeros_p (value
))
6366 zero_count
+= n_elts_here
;
6369 /* Clear the entire vector first if there are any missing elements,
6370 or if the incidence of zero elements is >= 75%. */
6371 need_to_clear
= (count
< n_elts
|| 4 * zero_count
>= 3 * count
);
6374 if (need_to_clear
&& size
> 0 && !vector
)
6377 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6379 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
6383 /* Inform later passes that the old value is dead. */
6384 if (!cleared
&& !vector
&& REG_P (target
))
6385 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6388 alias
= MEM_ALIAS_SET (target
);
6390 alias
= get_alias_set (elttype
);
6392 /* Store each element of the constructor into the corresponding
6393 element of TARGET, determined by counting the elements. */
6394 for (idx
= 0, i
= 0;
6395 vec_safe_iterate (CONSTRUCTOR_ELTS (exp
), idx
, &ce
);
6396 idx
++, i
+= bitsize
/ elt_size
)
6398 HOST_WIDE_INT eltpos
;
6399 tree value
= ce
->value
;
6401 bitsize
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value
)));
6402 if (cleared
&& initializer_zerop (value
))
6406 eltpos
= tree_to_uhwi (ce
->index
);
6412 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6414 gcc_assert (TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
);
6415 RTVEC_ELT (vector
, eltpos
)
6416 = expand_normal (value
);
6420 enum machine_mode value_mode
=
6421 TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
6422 ? TYPE_MODE (TREE_TYPE (value
))
6424 bitpos
= eltpos
* elt_size
;
6425 store_constructor_field (target
, bitsize
, bitpos
, value_mode
,
6426 value
, cleared
, alias
);
6431 emit_insn (GEN_FCN (icode
)
6433 gen_rtx_PARALLEL (GET_MODE (target
), vector
)));
6442 /* Store the value of EXP (an expression tree)
6443 into a subfield of TARGET which has mode MODE and occupies
6444 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6445 If MODE is VOIDmode, it means that we are storing into a bit-field.
6447 BITREGION_START is bitpos of the first bitfield in this region.
6448 BITREGION_END is the bitpos of the ending bitfield in this region.
6449 These two fields are 0, if the C++ memory model does not apply,
6450 or we are not interested in keeping track of bitfield regions.
6452 Always return const0_rtx unless we have something particular to
6455 ALIAS_SET is the alias set for the destination. This value will
6456 (in general) be different from that for TARGET, since TARGET is a
6457 reference to the containing structure.
6459 If NONTEMPORAL is true, try generating a nontemporal store. */
6462 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
6463 unsigned HOST_WIDE_INT bitregion_start
,
6464 unsigned HOST_WIDE_INT bitregion_end
,
6465 enum machine_mode mode
, tree exp
,
6466 alias_set_type alias_set
, bool nontemporal
)
6468 if (TREE_CODE (exp
) == ERROR_MARK
)
6471 /* If we have nothing to store, do nothing unless the expression has
6474 return expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6476 if (GET_CODE (target
) == CONCAT
)
6478 /* We're storing into a struct containing a single __complex. */
6480 gcc_assert (!bitpos
);
6481 return store_expr (exp
, target
, 0, nontemporal
);
6484 /* If the structure is in a register or if the component
6485 is a bit field, we cannot use addressing to access it.
6486 Use bit-field techniques or SUBREG to store in it. */
6488 if (mode
== VOIDmode
6489 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
6490 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
6491 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
6493 || GET_CODE (target
) == SUBREG
6494 /* If the field isn't aligned enough to store as an ordinary memref,
6495 store it as a bit field. */
6497 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
6498 || bitpos
% GET_MODE_ALIGNMENT (mode
))
6499 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
6500 || (bitpos
% BITS_PER_UNIT
!= 0)))
6501 || (bitsize
>= 0 && mode
!= BLKmode
6502 && GET_MODE_BITSIZE (mode
) > bitsize
)
6503 /* If the RHS and field are a constant size and the size of the
6504 RHS isn't the same size as the bitfield, we must use bitfield
6507 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
6508 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0)
6509 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6510 decl we must use bitfield operations. */
6512 && TREE_CODE (exp
) == MEM_REF
6513 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
6514 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
6515 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp
, 0),0 ))
6516 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) != BLKmode
))
6521 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6522 implies a mask operation. If the precision is the same size as
6523 the field we're storing into, that mask is redundant. This is
6524 particularly common with bit field assignments generated by the
6526 nop_def
= get_def_for_expr (exp
, NOP_EXPR
);
6529 tree type
= TREE_TYPE (exp
);
6530 if (INTEGRAL_TYPE_P (type
)
6531 && TYPE_PRECISION (type
) < GET_MODE_BITSIZE (TYPE_MODE (type
))
6532 && bitsize
== TYPE_PRECISION (type
))
6534 tree op
= gimple_assign_rhs1 (nop_def
);
6535 type
= TREE_TYPE (op
);
6536 if (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) >= bitsize
)
6541 temp
= expand_normal (exp
);
6543 /* If BITSIZE is narrower than the size of the type of EXP
6544 we will be narrowing TEMP. Normally, what's wanted are the
6545 low-order bits. However, if EXP's type is a record and this is
6546 big-endian machine, we want the upper BITSIZE bits. */
6547 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
6548 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
6549 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
6550 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
6551 GET_MODE_BITSIZE (GET_MODE (temp
)) - bitsize
,
6554 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6555 if (mode
!= VOIDmode
&& mode
!= BLKmode
6556 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
6557 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
6559 /* If the modes of TEMP and TARGET are both BLKmode, both
6560 must be in memory and BITPOS must be aligned on a byte
6561 boundary. If so, we simply do a block copy. Likewise
6562 for a BLKmode-like TARGET. */
6563 if (GET_MODE (temp
) == BLKmode
6564 && (GET_MODE (target
) == BLKmode
6566 && GET_MODE_CLASS (GET_MODE (target
)) == MODE_INT
6567 && (bitpos
% BITS_PER_UNIT
) == 0
6568 && (bitsize
% BITS_PER_UNIT
) == 0)))
6570 gcc_assert (MEM_P (target
) && MEM_P (temp
)
6571 && (bitpos
% BITS_PER_UNIT
) == 0);
6573 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
6574 emit_block_move (target
, temp
,
6575 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
6582 /* Handle calls that return values in multiple non-contiguous locations.
6583 The Irix 6 ABI has examples of this. */
6584 if (GET_CODE (temp
) == PARALLEL
)
6586 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
6588 if (mode
== BLKmode
|| mode
== VOIDmode
)
6589 mode
= smallest_mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
);
6590 temp_target
= gen_reg_rtx (mode
);
6591 emit_group_store (temp_target
, temp
, TREE_TYPE (exp
), size
);
6594 else if (mode
== BLKmode
)
6596 /* Handle calls that return BLKmode values in registers. */
6597 if (REG_P (temp
) && TREE_CODE (exp
) == CALL_EXPR
)
6599 rtx temp_target
= gen_reg_rtx (GET_MODE (temp
));
6600 copy_blkmode_from_reg (temp_target
, temp
, TREE_TYPE (exp
));
6605 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
6607 mode
= smallest_mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
);
6608 temp_target
= gen_reg_rtx (mode
);
6610 = extract_bit_field (temp
, size
* BITS_PER_UNIT
, 0, 1,
6611 temp_target
, mode
, mode
);
6616 /* Store the value in the bitfield. */
6617 store_bit_field (target
, bitsize
, bitpos
,
6618 bitregion_start
, bitregion_end
,
6625 /* Now build a reference to just the desired component. */
6626 rtx to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
6628 if (to_rtx
== target
)
6629 to_rtx
= copy_rtx (to_rtx
);
6631 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
6632 set_mem_alias_set (to_rtx
, alias_set
);
6634 return store_expr (exp
, to_rtx
, 0, nontemporal
);
6638 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6639 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6640 codes and find the ultimate containing object, which we return.
6642 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6643 bit position, and *PUNSIGNEDP to the signedness of the field.
6644 If the position of the field is variable, we store a tree
6645 giving the variable offset (in units) in *POFFSET.
6646 This offset is in addition to the bit position.
6647 If the position is not variable, we store 0 in *POFFSET.
6649 If any of the extraction expressions is volatile,
6650 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6652 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6653 Otherwise, it is a mode that can be used to access the field.
6655 If the field describes a variable-sized object, *PMODE is set to
6656 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6657 this case, but the address of the object can be found.
6659 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6660 look through nodes that serve as markers of a greater alignment than
6661 the one that can be deduced from the expression. These nodes make it
6662 possible for front-ends to prevent temporaries from being created by
6663 the middle-end on alignment considerations. For that purpose, the
6664 normal operating mode at high-level is to always pass FALSE so that
6665 the ultimate containing object is really returned; moreover, the
6666 associated predicate handled_component_p will always return TRUE
6667 on these nodes, thus indicating that they are essentially handled
6668 by get_inner_reference. TRUE should only be passed when the caller
6669 is scanning the expression in order to build another representation
6670 and specifically knows how to handle these nodes; as such, this is
6671 the normal operating mode in the RTL expanders. */
6674 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
6675 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
6676 enum machine_mode
*pmode
, int *punsignedp
,
6677 int *pvolatilep
, bool keep_aligning
)
6680 enum machine_mode mode
= VOIDmode
;
6681 bool blkmode_bitfield
= false;
6682 tree offset
= size_zero_node
;
6683 offset_int bit_offset
= 0;
6685 /* First get the mode, signedness, and size. We do this from just the
6686 outermost expression. */
6688 if (TREE_CODE (exp
) == COMPONENT_REF
)
6690 tree field
= TREE_OPERAND (exp
, 1);
6691 size_tree
= DECL_SIZE (field
);
6692 if (flag_strict_volatile_bitfields
> 0
6693 && TREE_THIS_VOLATILE (exp
)
6694 && DECL_BIT_FIELD_TYPE (field
)
6695 && DECL_MODE (field
) != BLKmode
)
6696 /* Volatile bitfields should be accessed in the mode of the
6697 field's type, not the mode computed based on the bit
6699 mode
= TYPE_MODE (DECL_BIT_FIELD_TYPE (field
));
6700 else if (!DECL_BIT_FIELD (field
))
6701 mode
= DECL_MODE (field
);
6702 else if (DECL_MODE (field
) == BLKmode
)
6703 blkmode_bitfield
= true;
6705 *punsignedp
= DECL_UNSIGNED (field
);
6707 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
6709 size_tree
= TREE_OPERAND (exp
, 1);
6710 *punsignedp
= (! INTEGRAL_TYPE_P (TREE_TYPE (exp
))
6711 || TYPE_UNSIGNED (TREE_TYPE (exp
)));
6713 /* For vector types, with the correct size of access, use the mode of
6715 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == VECTOR_TYPE
6716 && TREE_TYPE (exp
) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
6717 && tree_int_cst_equal (size_tree
, TYPE_SIZE (TREE_TYPE (exp
))))
6718 mode
= TYPE_MODE (TREE_TYPE (exp
));
6722 mode
= TYPE_MODE (TREE_TYPE (exp
));
6723 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
6725 if (mode
== BLKmode
)
6726 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
6728 *pbitsize
= GET_MODE_BITSIZE (mode
);
6733 if (! tree_fits_uhwi_p (size_tree
))
6734 mode
= BLKmode
, *pbitsize
= -1;
6736 *pbitsize
= tree_to_uhwi (size_tree
);
6739 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6740 and find the ultimate containing object. */
6743 switch (TREE_CODE (exp
))
6746 bit_offset
+= wi::to_offset (TREE_OPERAND (exp
, 2));
6751 tree field
= TREE_OPERAND (exp
, 1);
6752 tree this_offset
= component_ref_field_offset (exp
);
6754 /* If this field hasn't been filled in yet, don't go past it.
6755 This should only happen when folding expressions made during
6756 type construction. */
6757 if (this_offset
== 0)
6760 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
6761 bit_offset
+= wi::to_offset (DECL_FIELD_BIT_OFFSET (field
));
6763 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6768 case ARRAY_RANGE_REF
:
6770 tree index
= TREE_OPERAND (exp
, 1);
6771 tree low_bound
= array_ref_low_bound (exp
);
6772 tree unit_size
= array_ref_element_size (exp
);
6774 /* We assume all arrays have sizes that are a multiple of a byte.
6775 First subtract the lower bound, if any, in the type of the
6776 index, then convert to sizetype and multiply by the size of
6777 the array element. */
6778 if (! integer_zerop (low_bound
))
6779 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
6782 offset
= size_binop (PLUS_EXPR
, offset
,
6783 size_binop (MULT_EXPR
,
6784 fold_convert (sizetype
, index
),
6793 bit_offset
+= *pbitsize
;
6796 case VIEW_CONVERT_EXPR
:
6797 if (keep_aligning
&& STRICT_ALIGNMENT
6798 && (TYPE_ALIGN (TREE_TYPE (exp
))
6799 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
6800 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
6801 < BIGGEST_ALIGNMENT
)
6802 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
6803 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
6808 /* Hand back the decl for MEM[&decl, off]. */
6809 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
6811 tree off
= TREE_OPERAND (exp
, 1);
6812 if (!integer_zerop (off
))
6814 offset_int boff
, coff
= mem_ref_offset (exp
);
6815 boff
= wi::lshift (coff
, LOG2_BITS_PER_UNIT
);
6818 exp
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6826 /* If any reference in the chain is volatile, the effect is volatile. */
6827 if (TREE_THIS_VOLATILE (exp
))
6830 exp
= TREE_OPERAND (exp
, 0);
6834 /* If OFFSET is constant, see if we can return the whole thing as a
6835 constant bit position. Make sure to handle overflow during
6837 if (TREE_CODE (offset
) == INTEGER_CST
)
6839 offset_int tem
= wi::sext (wi::to_offset (offset
),
6840 TYPE_PRECISION (sizetype
));
6841 tem
= wi::lshift (tem
, LOG2_BITS_PER_UNIT
);
6843 if (wi::fits_shwi_p (tem
))
6845 *pbitpos
= tem
.to_shwi ();
6846 *poffset
= offset
= NULL_TREE
;
6850 /* Otherwise, split it up. */
6853 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6854 if (wi::neg_p (bit_offset
))
6856 offset_int mask
= wi::mask
<offset_int
> (LOG2_BITS_PER_UNIT
, false);
6857 offset_int tem
= bit_offset
.and_not (mask
);
6858 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6859 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6861 tem
= wi::arshift (tem
, LOG2_BITS_PER_UNIT
);
6862 offset
= size_binop (PLUS_EXPR
, offset
,
6863 wide_int_to_tree (sizetype
, tem
));
6866 *pbitpos
= bit_offset
.to_shwi ();
6870 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6871 if (mode
== VOIDmode
6873 && (*pbitpos
% BITS_PER_UNIT
) == 0
6874 && (*pbitsize
% BITS_PER_UNIT
) == 0)
6882 /* Return a tree of sizetype representing the size, in bytes, of the element
6883 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6886 array_ref_element_size (tree exp
)
6888 tree aligned_size
= TREE_OPERAND (exp
, 3);
6889 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6890 location_t loc
= EXPR_LOCATION (exp
);
6892 /* If a size was specified in the ARRAY_REF, it's the size measured
6893 in alignment units of the element type. So multiply by that value. */
6896 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6897 sizetype from another type of the same width and signedness. */
6898 if (TREE_TYPE (aligned_size
) != sizetype
)
6899 aligned_size
= fold_convert_loc (loc
, sizetype
, aligned_size
);
6900 return size_binop_loc (loc
, MULT_EXPR
, aligned_size
,
6901 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
6904 /* Otherwise, take the size from that of the element type. Substitute
6905 any PLACEHOLDER_EXPR that we have. */
6907 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type
), exp
);
6910 /* Return a tree representing the lower bound of the array mentioned in
6911 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6914 array_ref_low_bound (tree exp
)
6916 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6918 /* If a lower bound is specified in EXP, use it. */
6919 if (TREE_OPERAND (exp
, 2))
6920 return TREE_OPERAND (exp
, 2);
6922 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6923 substituting for a PLACEHOLDER_EXPR as needed. */
6924 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
6925 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type
), exp
);
6927 /* Otherwise, return a zero of the appropriate type. */
6928 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp
, 1)), 0);
6931 /* Returns true if REF is an array reference to an array at the end of
6932 a structure. If this is the case, the array may be allocated larger
6933 than its upper bound implies. */
6936 array_at_struct_end_p (tree ref
)
6938 if (TREE_CODE (ref
) != ARRAY_REF
6939 && TREE_CODE (ref
) != ARRAY_RANGE_REF
)
6942 while (handled_component_p (ref
))
6944 /* If the reference chain contains a component reference to a
6945 non-union type and there follows another field the reference
6946 is not at the end of a structure. */
6947 if (TREE_CODE (ref
) == COMPONENT_REF
6948 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref
, 0))) == RECORD_TYPE
)
6950 tree nextf
= DECL_CHAIN (TREE_OPERAND (ref
, 1));
6951 while (nextf
&& TREE_CODE (nextf
) != FIELD_DECL
)
6952 nextf
= DECL_CHAIN (nextf
);
6957 ref
= TREE_OPERAND (ref
, 0);
6960 /* If the reference is based on a declared entity, the size of the array
6961 is constrained by its given domain. */
6968 /* Return a tree representing the upper bound of the array mentioned in
6969 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6972 array_ref_up_bound (tree exp
)
6974 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6976 /* If there is a domain type and it has an upper bound, use it, substituting
6977 for a PLACEHOLDER_EXPR as needed. */
6978 if (domain_type
&& TYPE_MAX_VALUE (domain_type
))
6979 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type
), exp
);
6981 /* Otherwise fail. */
6985 /* Return a tree representing the offset, in bytes, of the field referenced
6986 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6989 component_ref_field_offset (tree exp
)
6991 tree aligned_offset
= TREE_OPERAND (exp
, 2);
6992 tree field
= TREE_OPERAND (exp
, 1);
6993 location_t loc
= EXPR_LOCATION (exp
);
6995 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6996 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
7000 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
7001 sizetype from another type of the same width and signedness. */
7002 if (TREE_TYPE (aligned_offset
) != sizetype
)
7003 aligned_offset
= fold_convert_loc (loc
, sizetype
, aligned_offset
);
7004 return size_binop_loc (loc
, MULT_EXPR
, aligned_offset
,
7005 size_int (DECL_OFFSET_ALIGN (field
)
7009 /* Otherwise, take the offset from that of the field. Substitute
7010 any PLACEHOLDER_EXPR that we have. */
7012 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field
), exp
);
7015 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7017 static unsigned HOST_WIDE_INT
7018 target_align (const_tree target
)
7020 /* We might have a chain of nested references with intermediate misaligning
7021 bitfields components, so need to recurse to find out. */
7023 unsigned HOST_WIDE_INT this_align
, outer_align
;
7025 switch (TREE_CODE (target
))
7031 this_align
= DECL_ALIGN (TREE_OPERAND (target
, 1));
7032 outer_align
= target_align (TREE_OPERAND (target
, 0));
7033 return MIN (this_align
, outer_align
);
7036 case ARRAY_RANGE_REF
:
7037 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
7038 outer_align
= target_align (TREE_OPERAND (target
, 0));
7039 return MIN (this_align
, outer_align
);
7042 case NON_LVALUE_EXPR
:
7043 case VIEW_CONVERT_EXPR
:
7044 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
7045 outer_align
= target_align (TREE_OPERAND (target
, 0));
7046 return MAX (this_align
, outer_align
);
7049 return TYPE_ALIGN (TREE_TYPE (target
));
7054 /* Given an rtx VALUE that may contain additions and multiplications, return
7055 an equivalent value that just refers to a register, memory, or constant.
7056 This is done by generating instructions to perform the arithmetic and
7057 returning a pseudo-register containing the value.
7059 The returned value may be a REG, SUBREG, MEM or constant. */
7062 force_operand (rtx value
, rtx target
)
7065 /* Use subtarget as the target for operand 0 of a binary operation. */
7066 rtx subtarget
= get_subtarget (target
);
7067 enum rtx_code code
= GET_CODE (value
);
7069 /* Check for subreg applied to an expression produced by loop optimizer. */
7071 && !REG_P (SUBREG_REG (value
))
7072 && !MEM_P (SUBREG_REG (value
)))
7075 = simplify_gen_subreg (GET_MODE (value
),
7076 force_reg (GET_MODE (SUBREG_REG (value
)),
7077 force_operand (SUBREG_REG (value
),
7079 GET_MODE (SUBREG_REG (value
)),
7080 SUBREG_BYTE (value
));
7081 code
= GET_CODE (value
);
7084 /* Check for a PIC address load. */
7085 if ((code
== PLUS
|| code
== MINUS
)
7086 && XEXP (value
, 0) == pic_offset_table_rtx
7087 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
7088 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
7089 || GET_CODE (XEXP (value
, 1)) == CONST
))
7092 subtarget
= gen_reg_rtx (GET_MODE (value
));
7093 emit_move_insn (subtarget
, value
);
7097 if (ARITHMETIC_P (value
))
7099 op2
= XEXP (value
, 1);
7100 if (!CONSTANT_P (op2
) && !(REG_P (op2
) && op2
!= subtarget
))
7102 if (code
== MINUS
&& CONST_INT_P (op2
))
7105 op2
= negate_rtx (GET_MODE (value
), op2
);
7108 /* Check for an addition with OP2 a constant integer and our first
7109 operand a PLUS of a virtual register and something else. In that
7110 case, we want to emit the sum of the virtual register and the
7111 constant first and then add the other value. This allows virtual
7112 register instantiation to simply modify the constant rather than
7113 creating another one around this addition. */
7114 if (code
== PLUS
&& CONST_INT_P (op2
)
7115 && GET_CODE (XEXP (value
, 0)) == PLUS
7116 && REG_P (XEXP (XEXP (value
, 0), 0))
7117 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7118 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
7120 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
7121 XEXP (XEXP (value
, 0), 0), op2
,
7122 subtarget
, 0, OPTAB_LIB_WIDEN
);
7123 return expand_simple_binop (GET_MODE (value
), code
, temp
,
7124 force_operand (XEXP (XEXP (value
,
7126 target
, 0, OPTAB_LIB_WIDEN
);
7129 op1
= force_operand (XEXP (value
, 0), subtarget
);
7130 op2
= force_operand (op2
, NULL_RTX
);
7134 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
7136 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
7137 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7138 target
, 1, OPTAB_LIB_WIDEN
);
7140 return expand_divmod (0,
7141 FLOAT_MODE_P (GET_MODE (value
))
7142 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
7143 GET_MODE (value
), op1
, op2
, target
, 0);
7145 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7148 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
7151 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7154 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7155 target
, 0, OPTAB_LIB_WIDEN
);
7157 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7158 target
, 1, OPTAB_LIB_WIDEN
);
7161 if (UNARY_P (value
))
7164 target
= gen_reg_rtx (GET_MODE (value
));
7165 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
7172 case FLOAT_TRUNCATE
:
7173 convert_move (target
, op1
, code
== ZERO_EXTEND
);
7178 expand_fix (target
, op1
, code
== UNSIGNED_FIX
);
7182 case UNSIGNED_FLOAT
:
7183 expand_float (target
, op1
, code
== UNSIGNED_FLOAT
);
7187 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
7191 #ifdef INSN_SCHEDULING
7192 /* On machines that have insn scheduling, we want all memory reference to be
7193 explicit, so we need to deal with such paradoxical SUBREGs. */
7194 if (paradoxical_subreg_p (value
) && MEM_P (SUBREG_REG (value
)))
7196 = simplify_gen_subreg (GET_MODE (value
),
7197 force_reg (GET_MODE (SUBREG_REG (value
)),
7198 force_operand (SUBREG_REG (value
),
7200 GET_MODE (SUBREG_REG (value
)),
7201 SUBREG_BYTE (value
));
7207 /* Subroutine of expand_expr: return nonzero iff there is no way that
7208 EXP can reference X, which is being modified. TOP_P is nonzero if this
7209 call is going to be used to determine whether we need a temporary
7210 for EXP, as opposed to a recursive call to this function.
7212 It is always safe for this routine to return zero since it merely
7213 searches for optimization opportunities. */
7216 safe_from_p (const_rtx x
, tree exp
, int top_p
)
7222 /* If EXP has varying size, we MUST use a target since we currently
7223 have no way of allocating temporaries of variable size
7224 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7225 So we assume here that something at a higher level has prevented a
7226 clash. This is somewhat bogus, but the best we can do. Only
7227 do this when X is BLKmode and when we are at the top level. */
7228 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
7229 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
7230 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
7231 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
7232 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
7234 && GET_MODE (x
) == BLKmode
)
7235 /* If X is in the outgoing argument area, it is always safe. */
7237 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
7238 || (GET_CODE (XEXP (x
, 0)) == PLUS
7239 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
7242 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7243 find the underlying pseudo. */
7244 if (GET_CODE (x
) == SUBREG
)
7247 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7251 /* Now look at our tree code and possibly recurse. */
7252 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
7254 case tcc_declaration
:
7255 exp_rtl
= DECL_RTL_IF_SET (exp
);
7261 case tcc_exceptional
:
7262 if (TREE_CODE (exp
) == TREE_LIST
)
7266 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
7268 exp
= TREE_CHAIN (exp
);
7271 if (TREE_CODE (exp
) != TREE_LIST
)
7272 return safe_from_p (x
, exp
, 0);
7275 else if (TREE_CODE (exp
) == CONSTRUCTOR
)
7277 constructor_elt
*ce
;
7278 unsigned HOST_WIDE_INT idx
;
7280 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp
), idx
, ce
)
7281 if ((ce
->index
!= NULL_TREE
&& !safe_from_p (x
, ce
->index
, 0))
7282 || !safe_from_p (x
, ce
->value
, 0))
7286 else if (TREE_CODE (exp
) == ERROR_MARK
)
7287 return 1; /* An already-visited SAVE_EXPR? */
7292 /* The only case we look at here is the DECL_INITIAL inside a
7294 return (TREE_CODE (exp
) != DECL_EXPR
7295 || TREE_CODE (DECL_EXPR_DECL (exp
)) != VAR_DECL
7296 || !DECL_INITIAL (DECL_EXPR_DECL (exp
))
7297 || safe_from_p (x
, DECL_INITIAL (DECL_EXPR_DECL (exp
)), 0));
7300 case tcc_comparison
:
7301 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
7306 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7308 case tcc_expression
:
7311 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7312 the expression. If it is set, we conflict iff we are that rtx or
7313 both are in memory. Otherwise, we check all operands of the
7314 expression recursively. */
7316 switch (TREE_CODE (exp
))
7319 /* If the operand is static or we are static, we can't conflict.
7320 Likewise if we don't conflict with the operand at all. */
7321 if (staticp (TREE_OPERAND (exp
, 0))
7322 || TREE_STATIC (exp
)
7323 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
7326 /* Otherwise, the only way this can conflict is if we are taking
7327 the address of a DECL a that address if part of X, which is
7329 exp
= TREE_OPERAND (exp
, 0);
7332 if (!DECL_RTL_SET_P (exp
)
7333 || !MEM_P (DECL_RTL (exp
)))
7336 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
7342 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
7343 get_alias_set (exp
)))
7348 /* Assume that the call will clobber all hard registers and
7350 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7355 case WITH_CLEANUP_EXPR
:
7356 case CLEANUP_POINT_EXPR
:
7357 /* Lowered by gimplify.c. */
7361 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7367 /* If we have an rtx, we do not need to scan our operands. */
7371 nops
= TREE_OPERAND_LENGTH (exp
);
7372 for (i
= 0; i
< nops
; i
++)
7373 if (TREE_OPERAND (exp
, i
) != 0
7374 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
7380 /* Should never get a type here. */
7384 /* If we have an rtl, find any enclosed object. Then see if we conflict
7388 if (GET_CODE (exp_rtl
) == SUBREG
)
7390 exp_rtl
= SUBREG_REG (exp_rtl
);
7392 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
7396 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7397 are memory and they conflict. */
7398 return ! (rtx_equal_p (x
, exp_rtl
)
7399 || (MEM_P (x
) && MEM_P (exp_rtl
)
7400 && true_dependence (exp_rtl
, VOIDmode
, x
)));
7403 /* If we reach here, it is safe. */
7408 /* Return the highest power of two that EXP is known to be a multiple of.
7409 This is used in updating alignment of MEMs in array references. */
7411 unsigned HOST_WIDE_INT
7412 highest_pow2_factor (const_tree exp
)
7414 unsigned HOST_WIDE_INT ret
;
7415 int trailing_zeros
= tree_ctz (exp
);
7416 if (trailing_zeros
>= HOST_BITS_PER_WIDE_INT
)
7417 return BIGGEST_ALIGNMENT
;
7418 ret
= (unsigned HOST_WIDE_INT
) 1 << trailing_zeros
;
7419 if (ret
> BIGGEST_ALIGNMENT
)
7420 return BIGGEST_ALIGNMENT
;
7424 /* Similar, except that the alignment requirements of TARGET are
7425 taken into account. Assume it is at least as aligned as its
7426 type, unless it is a COMPONENT_REF in which case the layout of
7427 the structure gives the alignment. */
7429 static unsigned HOST_WIDE_INT
7430 highest_pow2_factor_for_target (const_tree target
, const_tree exp
)
7432 unsigned HOST_WIDE_INT talign
= target_align (target
) / BITS_PER_UNIT
;
7433 unsigned HOST_WIDE_INT factor
= highest_pow2_factor (exp
);
7435 return MAX (factor
, talign
);
7438 #ifdef HAVE_conditional_move
7439 /* Convert the tree comparison code TCODE to the rtl one where the
7440 signedness is UNSIGNEDP. */
7442 static enum rtx_code
7443 convert_tree_comp_to_rtx (enum tree_code tcode
, int unsignedp
)
7455 code
= unsignedp
? LTU
: LT
;
7458 code
= unsignedp
? LEU
: LE
;
7461 code
= unsignedp
? GTU
: GT
;
7464 code
= unsignedp
? GEU
: GE
;
7466 case UNORDERED_EXPR
:
7498 /* Subroutine of expand_expr. Expand the two operands of a binary
7499 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7500 The value may be stored in TARGET if TARGET is nonzero. The
7501 MODIFIER argument is as documented by expand_expr. */
7504 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
7505 enum expand_modifier modifier
)
7507 if (! safe_from_p (target
, exp1
, 1))
7509 if (operand_equal_p (exp0
, exp1
, 0))
7511 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7512 *op1
= copy_rtx (*op0
);
7516 /* If we need to preserve evaluation order, copy exp0 into its own
7517 temporary variable so that it can't be clobbered by exp1. */
7518 if (flag_evaluation_order
&& TREE_SIDE_EFFECTS (exp1
))
7519 exp0
= save_expr (exp0
);
7520 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7521 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
7526 /* Return a MEM that contains constant EXP. DEFER is as for
7527 output_constant_def and MODIFIER is as for expand_expr. */
7530 expand_expr_constant (tree exp
, int defer
, enum expand_modifier modifier
)
7534 mem
= output_constant_def (exp
, defer
);
7535 if (modifier
!= EXPAND_INITIALIZER
)
7536 mem
= use_anchored_address (mem
);
7540 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7541 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7544 expand_expr_addr_expr_1 (tree exp
, rtx target
, enum machine_mode tmode
,
7545 enum expand_modifier modifier
, addr_space_t as
)
7547 rtx result
, subtarget
;
7549 HOST_WIDE_INT bitsize
, bitpos
;
7550 int volatilep
, unsignedp
;
7551 enum machine_mode mode1
;
7553 /* If we are taking the address of a constant and are at the top level,
7554 we have to use output_constant_def since we can't call force_const_mem
7556 /* ??? This should be considered a front-end bug. We should not be
7557 generating ADDR_EXPR of something that isn't an LVALUE. The only
7558 exception here is STRING_CST. */
7559 if (CONSTANT_CLASS_P (exp
))
7561 result
= XEXP (expand_expr_constant (exp
, 0, modifier
), 0);
7562 if (modifier
< EXPAND_SUM
)
7563 result
= force_operand (result
, target
);
7567 /* Everything must be something allowed by is_gimple_addressable. */
7568 switch (TREE_CODE (exp
))
7571 /* This case will happen via recursion for &a->b. */
7572 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7576 tree tem
= TREE_OPERAND (exp
, 0);
7577 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
7578 tem
= fold_build_pointer_plus (tem
, TREE_OPERAND (exp
, 1));
7579 return expand_expr (tem
, target
, tmode
, modifier
);
7583 /* Expand the initializer like constants above. */
7584 result
= XEXP (expand_expr_constant (DECL_INITIAL (exp
),
7586 if (modifier
< EXPAND_SUM
)
7587 result
= force_operand (result
, target
);
7591 /* The real part of the complex number is always first, therefore
7592 the address is the same as the address of the parent object. */
7595 inner
= TREE_OPERAND (exp
, 0);
7599 /* The imaginary part of the complex number is always second.
7600 The expression is therefore always offset by the size of the
7603 bitpos
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp
)));
7604 inner
= TREE_OPERAND (exp
, 0);
7607 case COMPOUND_LITERAL_EXPR
:
7608 /* Allow COMPOUND_LITERAL_EXPR in initializers, if e.g.
7609 rtl_for_decl_init is called on DECL_INITIAL with
7610 COMPOUNT_LITERAL_EXPRs in it, they aren't gimplified. */
7611 if (modifier
== EXPAND_INITIALIZER
7612 && COMPOUND_LITERAL_EXPR_DECL (exp
))
7613 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp
),
7614 target
, tmode
, modifier
, as
);
7617 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7618 expand_expr, as that can have various side effects; LABEL_DECLs for
7619 example, may not have their DECL_RTL set yet. Expand the rtl of
7620 CONSTRUCTORs too, which should yield a memory reference for the
7621 constructor's contents. Assume language specific tree nodes can
7622 be expanded in some interesting way. */
7623 gcc_assert (TREE_CODE (exp
) < LAST_AND_UNUSED_TREE_CODE
);
7625 || TREE_CODE (exp
) == CONSTRUCTOR
7626 || TREE_CODE (exp
) == COMPOUND_LITERAL_EXPR
)
7628 result
= expand_expr (exp
, target
, tmode
,
7629 modifier
== EXPAND_INITIALIZER
7630 ? EXPAND_INITIALIZER
: EXPAND_CONST_ADDRESS
);
7632 /* If the DECL isn't in memory, then the DECL wasn't properly
7633 marked TREE_ADDRESSABLE, which will be either a front-end
7634 or a tree optimizer bug. */
7636 if (TREE_ADDRESSABLE (exp
)
7638 && ! targetm
.calls
.allocate_stack_slots_for_args ())
7640 error ("local frame unavailable (naked function?)");
7644 gcc_assert (MEM_P (result
));
7645 result
= XEXP (result
, 0);
7647 /* ??? Is this needed anymore? */
7649 TREE_USED (exp
) = 1;
7651 if (modifier
!= EXPAND_INITIALIZER
7652 && modifier
!= EXPAND_CONST_ADDRESS
7653 && modifier
!= EXPAND_SUM
)
7654 result
= force_operand (result
, target
);
7658 /* Pass FALSE as the last argument to get_inner_reference although
7659 we are expanding to RTL. The rationale is that we know how to
7660 handle "aligning nodes" here: we can just bypass them because
7661 they won't change the final object whose address will be returned
7662 (they actually exist only for that purpose). */
7663 inner
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7664 &mode1
, &unsignedp
, &volatilep
, false);
7668 /* We must have made progress. */
7669 gcc_assert (inner
!= exp
);
7671 subtarget
= offset
|| bitpos
? NULL_RTX
: target
;
7672 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7673 inner alignment, force the inner to be sufficiently aligned. */
7674 if (CONSTANT_CLASS_P (inner
)
7675 && TYPE_ALIGN (TREE_TYPE (inner
)) < TYPE_ALIGN (TREE_TYPE (exp
)))
7677 inner
= copy_node (inner
);
7678 TREE_TYPE (inner
) = copy_node (TREE_TYPE (inner
));
7679 TYPE_ALIGN (TREE_TYPE (inner
)) = TYPE_ALIGN (TREE_TYPE (exp
));
7680 TYPE_USER_ALIGN (TREE_TYPE (inner
)) = 1;
7682 result
= expand_expr_addr_expr_1 (inner
, subtarget
, tmode
, modifier
, as
);
7688 if (modifier
!= EXPAND_NORMAL
)
7689 result
= force_operand (result
, NULL
);
7690 tmp
= expand_expr (offset
, NULL_RTX
, tmode
,
7691 modifier
== EXPAND_INITIALIZER
7692 ? EXPAND_INITIALIZER
: EXPAND_NORMAL
);
7694 /* expand_expr is allowed to return an object in a mode other
7695 than TMODE. If it did, we need to convert. */
7696 if (GET_MODE (tmp
) != VOIDmode
&& tmode
!= GET_MODE (tmp
))
7697 tmp
= convert_modes (tmode
, GET_MODE (tmp
),
7698 tmp
, TYPE_UNSIGNED (TREE_TYPE (offset
)));
7699 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7700 tmp
= convert_memory_address_addr_space (tmode
, tmp
, as
);
7702 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7703 result
= simplify_gen_binary (PLUS
, tmode
, result
, tmp
);
7706 subtarget
= bitpos
? NULL_RTX
: target
;
7707 result
= expand_simple_binop (tmode
, PLUS
, result
, tmp
, subtarget
,
7708 1, OPTAB_LIB_WIDEN
);
7714 /* Someone beforehand should have rejected taking the address
7715 of such an object. */
7716 gcc_assert ((bitpos
% BITS_PER_UNIT
) == 0);
7718 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7719 result
= plus_constant (tmode
, result
, bitpos
/ BITS_PER_UNIT
);
7720 if (modifier
< EXPAND_SUM
)
7721 result
= force_operand (result
, target
);
7727 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7728 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7731 expand_expr_addr_expr (tree exp
, rtx target
, enum machine_mode tmode
,
7732 enum expand_modifier modifier
)
7734 addr_space_t as
= ADDR_SPACE_GENERIC
;
7735 enum machine_mode address_mode
= Pmode
;
7736 enum machine_mode pointer_mode
= ptr_mode
;
7737 enum machine_mode rmode
;
7740 /* Target mode of VOIDmode says "whatever's natural". */
7741 if (tmode
== VOIDmode
)
7742 tmode
= TYPE_MODE (TREE_TYPE (exp
));
7744 if (POINTER_TYPE_P (TREE_TYPE (exp
)))
7746 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
7747 address_mode
= targetm
.addr_space
.address_mode (as
);
7748 pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
7751 /* We can get called with some Weird Things if the user does silliness
7752 like "(short) &a". In that case, convert_memory_address won't do
7753 the right thing, so ignore the given target mode. */
7754 if (tmode
!= address_mode
&& tmode
!= pointer_mode
)
7755 tmode
= address_mode
;
7757 result
= expand_expr_addr_expr_1 (TREE_OPERAND (exp
, 0), target
,
7758 tmode
, modifier
, as
);
7760 /* Despite expand_expr claims concerning ignoring TMODE when not
7761 strictly convenient, stuff breaks if we don't honor it. Note
7762 that combined with the above, we only do this for pointer modes. */
7763 rmode
= GET_MODE (result
);
7764 if (rmode
== VOIDmode
)
7767 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7772 /* Generate code for computing CONSTRUCTOR EXP.
7773 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7774 is TRUE, instead of creating a temporary variable in memory
7775 NULL is returned and the caller needs to handle it differently. */
7778 expand_constructor (tree exp
, rtx target
, enum expand_modifier modifier
,
7779 bool avoid_temp_mem
)
7781 tree type
= TREE_TYPE (exp
);
7782 enum machine_mode mode
= TYPE_MODE (type
);
7784 /* Try to avoid creating a temporary at all. This is possible
7785 if all of the initializer is zero.
7786 FIXME: try to handle all [0..255] initializers we can handle
7788 if (TREE_STATIC (exp
)
7789 && !TREE_ADDRESSABLE (exp
)
7790 && target
!= 0 && mode
== BLKmode
7791 && all_zeros_p (exp
))
7793 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
7797 /* All elts simple constants => refer to a constant in memory. But
7798 if this is a non-BLKmode mode, let it store a field at a time
7799 since that should make a CONST_INT, CONST_WIDE_INT or
7800 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7801 use, it is best to store directly into the target unless the type
7802 is large enough that memcpy will be used. If we are making an
7803 initializer and all operands are constant, put it in memory as
7806 FIXME: Avoid trying to fill vector constructors piece-meal.
7807 Output them with output_constant_def below unless we're sure
7808 they're zeros. This should go away when vector initializers
7809 are treated like VECTOR_CST instead of arrays. */
7810 if ((TREE_STATIC (exp
)
7811 && ((mode
== BLKmode
7812 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
7813 || TREE_ADDRESSABLE (exp
)
7814 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type
))
7815 && (! MOVE_BY_PIECES_P
7816 (tree_to_uhwi (TYPE_SIZE_UNIT (type
)),
7818 && ! mostly_zeros_p (exp
))))
7819 || ((modifier
== EXPAND_INITIALIZER
|| modifier
== EXPAND_CONST_ADDRESS
)
7820 && TREE_CONSTANT (exp
)))
7827 constructor
= expand_expr_constant (exp
, 1, modifier
);
7829 if (modifier
!= EXPAND_CONST_ADDRESS
7830 && modifier
!= EXPAND_INITIALIZER
7831 && modifier
!= EXPAND_SUM
)
7832 constructor
= validize_mem (constructor
);
7837 /* Handle calls that pass values in multiple non-contiguous
7838 locations. The Irix 6 ABI has examples of this. */
7839 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
7840 || GET_CODE (target
) == PARALLEL
|| modifier
== EXPAND_STACK_PARM
)
7845 target
= assign_temp (type
, TREE_ADDRESSABLE (exp
), 1);
7848 store_constructor (exp
, target
, 0, int_expr_size (exp
));
7853 /* expand_expr: generate code for computing expression EXP.
7854 An rtx for the computed value is returned. The value is never null.
7855 In the case of a void EXP, const0_rtx is returned.
7857 The value may be stored in TARGET if TARGET is nonzero.
7858 TARGET is just a suggestion; callers must assume that
7859 the rtx returned may not be the same as TARGET.
7861 If TARGET is CONST0_RTX, it means that the value will be ignored.
7863 If TMODE is not VOIDmode, it suggests generating the
7864 result in mode TMODE. But this is done only when convenient.
7865 Otherwise, TMODE is ignored and the value generated in its natural mode.
7866 TMODE is just a suggestion; callers must assume that
7867 the rtx returned may not have mode TMODE.
7869 Note that TARGET may have neither TMODE nor MODE. In that case, it
7870 probably will not be used.
7872 If MODIFIER is EXPAND_SUM then when EXP is an addition
7873 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7874 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7875 products as above, or REG or MEM, or constant.
7876 Ordinarily in such cases we would output mul or add instructions
7877 and then return a pseudo reg containing the sum.
7879 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7880 it also marks a label as absolutely required (it can't be dead).
7881 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7882 This is used for outputting expressions used in initializers.
7884 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7885 with a constant address even if that address is not normally legitimate.
7886 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7888 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7889 a call parameter. Such targets require special care as we haven't yet
7890 marked TARGET so that it's safe from being trashed by libcalls. We
7891 don't want to use TARGET for anything but the final result;
7892 Intermediate values must go elsewhere. Additionally, calls to
7893 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7895 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7896 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7897 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7898 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7901 If INNER_REFERENCE_P is true, we are expanding an inner reference.
7902 In this case, we don't adjust a returned MEM rtx that wouldn't be
7903 sufficiently aligned for its mode; instead, it's up to the caller
7904 to deal with it afterwards. This is used to make sure that unaligned
7905 base objects for which out-of-bounds accesses are supported, for
7906 example record types with trailing arrays, aren't realigned behind
7907 the back of the caller.
7908 The normal operating mode is to pass FALSE for this parameter. */
7911 expand_expr_real (tree exp
, rtx target
, enum machine_mode tmode
,
7912 enum expand_modifier modifier
, rtx
*alt_rtl
,
7913 bool inner_reference_p
)
7917 /* Handle ERROR_MARK before anybody tries to access its type. */
7918 if (TREE_CODE (exp
) == ERROR_MARK
7919 || (TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
))
7921 ret
= CONST0_RTX (tmode
);
7922 return ret
? ret
: const0_rtx
;
7925 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
,
7930 /* Try to expand the conditional expression which is represented by
7931 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7932 return the rtl reg which repsents the result. Otherwise return
7936 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED
,
7937 tree treeop1 ATTRIBUTE_UNUSED
,
7938 tree treeop2 ATTRIBUTE_UNUSED
)
7940 #ifdef HAVE_conditional_move
7942 rtx op00
, op01
, op1
, op2
;
7943 enum rtx_code comparison_code
;
7944 enum machine_mode comparison_mode
;
7947 tree type
= TREE_TYPE (treeop1
);
7948 int unsignedp
= TYPE_UNSIGNED (type
);
7949 enum machine_mode mode
= TYPE_MODE (type
);
7950 enum machine_mode orig_mode
= mode
;
7952 /* If we cannot do a conditional move on the mode, try doing it
7953 with the promoted mode. */
7954 if (!can_conditionally_move_p (mode
))
7956 mode
= promote_mode (type
, mode
, &unsignedp
);
7957 if (!can_conditionally_move_p (mode
))
7959 temp
= assign_temp (type
, 0, 0); /* Use promoted mode for temp. */
7962 temp
= assign_temp (type
, 0, 1);
7965 expand_operands (treeop1
, treeop2
,
7966 temp
, &op1
, &op2
, EXPAND_NORMAL
);
7968 if (TREE_CODE (treeop0
) == SSA_NAME
7969 && (srcstmt
= get_def_for_expr_class (treeop0
, tcc_comparison
)))
7971 tree type
= TREE_TYPE (gimple_assign_rhs1 (srcstmt
));
7972 enum tree_code cmpcode
= gimple_assign_rhs_code (srcstmt
);
7973 op00
= expand_normal (gimple_assign_rhs1 (srcstmt
));
7974 op01
= expand_normal (gimple_assign_rhs2 (srcstmt
));
7975 comparison_mode
= TYPE_MODE (type
);
7976 unsignedp
= TYPE_UNSIGNED (type
);
7977 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
7979 else if (TREE_CODE_CLASS (TREE_CODE (treeop0
)) == tcc_comparison
)
7981 tree type
= TREE_TYPE (TREE_OPERAND (treeop0
, 0));
7982 enum tree_code cmpcode
= TREE_CODE (treeop0
);
7983 op00
= expand_normal (TREE_OPERAND (treeop0
, 0));
7984 op01
= expand_normal (TREE_OPERAND (treeop0
, 1));
7985 unsignedp
= TYPE_UNSIGNED (type
);
7986 comparison_mode
= TYPE_MODE (type
);
7987 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
7991 op00
= expand_normal (treeop0
);
7993 comparison_code
= NE
;
7994 comparison_mode
= TYPE_MODE (TREE_TYPE (treeop0
));
7997 if (GET_MODE (op1
) != mode
)
7998 op1
= gen_lowpart (mode
, op1
);
8000 if (GET_MODE (op2
) != mode
)
8001 op2
= gen_lowpart (mode
, op2
);
8003 /* Try to emit the conditional move. */
8004 insn
= emit_conditional_move (temp
, comparison_code
,
8005 op00
, op01
, comparison_mode
,
8009 /* If we could do the conditional move, emit the sequence,
8013 rtx seq
= get_insns ();
8016 return convert_modes (orig_mode
, mode
, temp
, 0);
8019 /* Otherwise discard the sequence and fall back to code with
8027 expand_expr_real_2 (sepops ops
, rtx target
, enum machine_mode tmode
,
8028 enum expand_modifier modifier
)
8030 rtx op0
, op1
, op2
, temp
;
8033 enum machine_mode mode
;
8034 enum tree_code code
= ops
->code
;
8036 rtx subtarget
, original_target
;
8038 bool reduce_bit_field
;
8039 location_t loc
= ops
->location
;
8040 tree treeop0
, treeop1
, treeop2
;
8041 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8042 ? reduce_to_bit_field_precision ((expr), \
8048 mode
= TYPE_MODE (type
);
8049 unsignedp
= TYPE_UNSIGNED (type
);
8055 /* We should be called only on simple (binary or unary) expressions,
8056 exactly those that are valid in gimple expressions that aren't
8057 GIMPLE_SINGLE_RHS (or invalid). */
8058 gcc_assert (get_gimple_rhs_class (code
) == GIMPLE_UNARY_RHS
8059 || get_gimple_rhs_class (code
) == GIMPLE_BINARY_RHS
8060 || get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
);
8062 ignore
= (target
== const0_rtx
8063 || ((CONVERT_EXPR_CODE_P (code
)
8064 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
8065 && TREE_CODE (type
) == VOID_TYPE
));
8067 /* We should be called only if we need the result. */
8068 gcc_assert (!ignore
);
8070 /* An operation in what may be a bit-field type needs the
8071 result to be reduced to the precision of the bit-field type,
8072 which is narrower than that of the type's mode. */
8073 reduce_bit_field
= (INTEGRAL_TYPE_P (type
)
8074 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
));
8076 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
8079 /* Use subtarget as the target for operand 0 of a binary operation. */
8080 subtarget
= get_subtarget (target
);
8081 original_target
= target
;
8085 case NON_LVALUE_EXPR
:
8088 if (treeop0
== error_mark_node
)
8091 if (TREE_CODE (type
) == UNION_TYPE
)
8093 tree valtype
= TREE_TYPE (treeop0
);
8095 /* If both input and output are BLKmode, this conversion isn't doing
8096 anything except possibly changing memory attribute. */
8097 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
8099 rtx result
= expand_expr (treeop0
, target
, tmode
,
8102 result
= copy_rtx (result
);
8103 set_mem_attributes (result
, type
, 0);
8109 if (TYPE_MODE (type
) != BLKmode
)
8110 target
= gen_reg_rtx (TYPE_MODE (type
));
8112 target
= assign_temp (type
, 1, 1);
8116 /* Store data into beginning of memory target. */
8117 store_expr (treeop0
,
8118 adjust_address (target
, TYPE_MODE (valtype
), 0),
8119 modifier
== EXPAND_STACK_PARM
,
8124 gcc_assert (REG_P (target
));
8126 /* Store this field into a union of the proper type. */
8127 store_field (target
,
8128 MIN ((int_size_in_bytes (TREE_TYPE
8131 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
8132 0, 0, 0, TYPE_MODE (valtype
), treeop0
, 0, false);
8135 /* Return the entire union. */
8139 if (mode
== TYPE_MODE (TREE_TYPE (treeop0
)))
8141 op0
= expand_expr (treeop0
, target
, VOIDmode
,
8144 /* If the signedness of the conversion differs and OP0 is
8145 a promoted SUBREG, clear that indication since we now
8146 have to do the proper extension. */
8147 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)) != unsignedp
8148 && GET_CODE (op0
) == SUBREG
)
8149 SUBREG_PROMOTED_VAR_P (op0
) = 0;
8151 return REDUCE_BIT_FIELD (op0
);
8154 op0
= expand_expr (treeop0
, NULL_RTX
, mode
,
8155 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
);
8156 if (GET_MODE (op0
) == mode
)
8159 /* If OP0 is a constant, just convert it into the proper mode. */
8160 else if (CONSTANT_P (op0
))
8162 tree inner_type
= TREE_TYPE (treeop0
);
8163 enum machine_mode inner_mode
= GET_MODE (op0
);
8165 if (inner_mode
== VOIDmode
)
8166 inner_mode
= TYPE_MODE (inner_type
);
8168 if (modifier
== EXPAND_INITIALIZER
)
8169 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
,
8170 subreg_lowpart_offset (mode
,
8173 op0
= convert_modes (mode
, inner_mode
, op0
,
8174 TYPE_UNSIGNED (inner_type
));
8177 else if (modifier
== EXPAND_INITIALIZER
)
8178 op0
= gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
8180 else if (target
== 0)
8181 op0
= convert_to_mode (mode
, op0
,
8182 TYPE_UNSIGNED (TREE_TYPE
8186 convert_move (target
, op0
,
8187 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8191 return REDUCE_BIT_FIELD (op0
);
8193 case ADDR_SPACE_CONVERT_EXPR
:
8195 tree treeop0_type
= TREE_TYPE (treeop0
);
8197 addr_space_t as_from
;
8199 gcc_assert (POINTER_TYPE_P (type
));
8200 gcc_assert (POINTER_TYPE_P (treeop0_type
));
8202 as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type
));
8203 as_from
= TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type
));
8205 /* Conversions between pointers to the same address space should
8206 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8207 gcc_assert (as_to
!= as_from
);
8209 /* Ask target code to handle conversion between pointers
8210 to overlapping address spaces. */
8211 if (targetm
.addr_space
.subset_p (as_to
, as_from
)
8212 || targetm
.addr_space
.subset_p (as_from
, as_to
))
8214 op0
= expand_expr (treeop0
, NULL_RTX
, VOIDmode
, modifier
);
8215 op0
= targetm
.addr_space
.convert (op0
, treeop0_type
, type
);
8220 /* For disjoint address spaces, converting anything but
8221 a null pointer invokes undefined behaviour. We simply
8222 always return a null pointer here. */
8223 return CONST0_RTX (mode
);
8226 case POINTER_PLUS_EXPR
:
8227 /* Even though the sizetype mode and the pointer's mode can be different
8228 expand is able to handle this correctly and get the correct result out
8229 of the PLUS_EXPR code. */
8230 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8231 if sizetype precision is smaller than pointer precision. */
8232 if (TYPE_PRECISION (sizetype
) < TYPE_PRECISION (type
))
8233 treeop1
= fold_convert_loc (loc
, type
,
8234 fold_convert_loc (loc
, ssizetype
,
8236 /* If sizetype precision is larger than pointer precision, truncate the
8237 offset to have matching modes. */
8238 else if (TYPE_PRECISION (sizetype
) > TYPE_PRECISION (type
))
8239 treeop1
= fold_convert_loc (loc
, type
, treeop1
);
8242 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8243 something else, make sure we add the register to the constant and
8244 then to the other thing. This case can occur during strength
8245 reduction and doing it this way will produce better code if the
8246 frame pointer or argument pointer is eliminated.
8248 fold-const.c will ensure that the constant is always in the inner
8249 PLUS_EXPR, so the only case we need to do anything about is if
8250 sp, ap, or fp is our second argument, in which case we must swap
8251 the innermost first argument and our second argument. */
8253 if (TREE_CODE (treeop0
) == PLUS_EXPR
8254 && TREE_CODE (TREE_OPERAND (treeop0
, 1)) == INTEGER_CST
8255 && TREE_CODE (treeop1
) == VAR_DECL
8256 && (DECL_RTL (treeop1
) == frame_pointer_rtx
8257 || DECL_RTL (treeop1
) == stack_pointer_rtx
8258 || DECL_RTL (treeop1
) == arg_pointer_rtx
))
8263 /* If the result is to be ptr_mode and we are adding an integer to
8264 something, we might be forming a constant. So try to use
8265 plus_constant. If it produces a sum and we can't accept it,
8266 use force_operand. This allows P = &ARR[const] to generate
8267 efficient code on machines where a SYMBOL_REF is not a valid
8270 If this is an EXPAND_SUM call, always return the sum. */
8271 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
8272 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
8274 if (modifier
== EXPAND_STACK_PARM
)
8276 if (TREE_CODE (treeop0
) == INTEGER_CST
8277 && GET_MODE_PRECISION (mode
) <= HOST_BITS_PER_WIDE_INT
8278 && TREE_CONSTANT (treeop1
))
8282 enum machine_mode wmode
= TYPE_MODE (TREE_TYPE (treeop1
));
8284 op1
= expand_expr (treeop1
, subtarget
, VOIDmode
,
8286 /* Use wi::shwi to ensure that the constant is
8287 truncated according to the mode of OP1, then sign extended
8288 to a HOST_WIDE_INT. Using the constant directly can result
8289 in non-canonical RTL in a 64x32 cross compile. */
8290 wc
= TREE_INT_CST_LOW (treeop0
);
8292 immed_wide_int_const (wi::shwi (wc
, wmode
), wmode
);
8293 op1
= plus_constant (mode
, op1
, INTVAL (constant_part
));
8294 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8295 op1
= force_operand (op1
, target
);
8296 return REDUCE_BIT_FIELD (op1
);
8299 else if (TREE_CODE (treeop1
) == INTEGER_CST
8300 && GET_MODE_PRECISION (mode
) <= HOST_BITS_PER_WIDE_INT
8301 && TREE_CONSTANT (treeop0
))
8305 enum machine_mode wmode
= TYPE_MODE (TREE_TYPE (treeop0
));
8307 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8308 (modifier
== EXPAND_INITIALIZER
8309 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
8310 if (! CONSTANT_P (op0
))
8312 op1
= expand_expr (treeop1
, NULL_RTX
,
8313 VOIDmode
, modifier
);
8314 /* Return a PLUS if modifier says it's OK. */
8315 if (modifier
== EXPAND_SUM
8316 || modifier
== EXPAND_INITIALIZER
)
8317 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
8320 /* Use wi::shwi to ensure that the constant is
8321 truncated according to the mode of OP1, then sign extended
8322 to a HOST_WIDE_INT. Using the constant directly can result
8323 in non-canonical RTL in a 64x32 cross compile. */
8324 wc
= TREE_INT_CST_LOW (treeop1
);
8326 = immed_wide_int_const (wi::shwi (wc
, wmode
), wmode
);
8327 op0
= plus_constant (mode
, op0
, INTVAL (constant_part
));
8328 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8329 op0
= force_operand (op0
, target
);
8330 return REDUCE_BIT_FIELD (op0
);
8334 /* Use TER to expand pointer addition of a negated value
8335 as pointer subtraction. */
8336 if ((POINTER_TYPE_P (TREE_TYPE (treeop0
))
8337 || (TREE_CODE (TREE_TYPE (treeop0
)) == VECTOR_TYPE
8338 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0
)))))
8339 && TREE_CODE (treeop1
) == SSA_NAME
8340 && TYPE_MODE (TREE_TYPE (treeop0
))
8341 == TYPE_MODE (TREE_TYPE (treeop1
)))
8343 gimple def
= get_def_for_expr (treeop1
, NEGATE_EXPR
);
8346 treeop1
= gimple_assign_rhs1 (def
);
8352 /* No sense saving up arithmetic to be done
8353 if it's all in the wrong mode to form part of an address.
8354 And force_operand won't know whether to sign-extend or
8356 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8357 || mode
!= ptr_mode
)
8359 expand_operands (treeop0
, treeop1
,
8360 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8361 if (op0
== const0_rtx
)
8363 if (op1
== const0_rtx
)
8368 expand_operands (treeop0
, treeop1
,
8369 subtarget
, &op0
, &op1
, modifier
);
8370 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8374 /* For initializers, we are allowed to return a MINUS of two
8375 symbolic constants. Here we handle all cases when both operands
8377 /* Handle difference of two symbolic constants,
8378 for the sake of an initializer. */
8379 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8380 && really_constant_p (treeop0
)
8381 && really_constant_p (treeop1
))
8383 expand_operands (treeop0
, treeop1
,
8384 NULL_RTX
, &op0
, &op1
, modifier
);
8386 /* If the last operand is a CONST_INT, use plus_constant of
8387 the negated constant. Else make the MINUS. */
8388 if (CONST_INT_P (op1
))
8389 return REDUCE_BIT_FIELD (plus_constant (mode
, op0
,
8392 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode
, op0
, op1
));
8395 /* No sense saving up arithmetic to be done
8396 if it's all in the wrong mode to form part of an address.
8397 And force_operand won't know whether to sign-extend or
8399 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8400 || mode
!= ptr_mode
)
8403 expand_operands (treeop0
, treeop1
,
8404 subtarget
, &op0
, &op1
, modifier
);
8406 /* Convert A - const to A + (-const). */
8407 if (CONST_INT_P (op1
))
8409 op1
= negate_rtx (mode
, op1
);
8410 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8415 case WIDEN_MULT_PLUS_EXPR
:
8416 case WIDEN_MULT_MINUS_EXPR
:
8417 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8418 op2
= expand_normal (treeop2
);
8419 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
8423 case WIDEN_MULT_EXPR
:
8424 /* If first operand is constant, swap them.
8425 Thus the following special case checks need only
8426 check the second operand. */
8427 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8434 /* First, check if we have a multiplication of one signed and one
8435 unsigned operand. */
8436 if (TREE_CODE (treeop1
) != INTEGER_CST
8437 && (TYPE_UNSIGNED (TREE_TYPE (treeop0
))
8438 != TYPE_UNSIGNED (TREE_TYPE (treeop1
))))
8440 enum machine_mode innermode
= TYPE_MODE (TREE_TYPE (treeop0
));
8441 this_optab
= usmul_widen_optab
;
8442 if (find_widening_optab_handler (this_optab
, mode
, innermode
, 0)
8443 != CODE_FOR_nothing
)
8445 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
8446 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8449 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op1
, &op0
,
8451 /* op0 and op1 might still be constant, despite the above
8452 != INTEGER_CST check. Handle it. */
8453 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8455 op0
= convert_modes (innermode
, mode
, op0
, true);
8456 op1
= convert_modes (innermode
, mode
, op1
, false);
8457 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
,
8458 target
, unsignedp
));
8463 /* Check for a multiplication with matching signedness. */
8464 else if ((TREE_CODE (treeop1
) == INTEGER_CST
8465 && int_fits_type_p (treeop1
, TREE_TYPE (treeop0
)))
8466 || (TYPE_UNSIGNED (TREE_TYPE (treeop1
))
8467 == TYPE_UNSIGNED (TREE_TYPE (treeop0
))))
8469 tree op0type
= TREE_TYPE (treeop0
);
8470 enum machine_mode innermode
= TYPE_MODE (op0type
);
8471 bool zextend_p
= TYPE_UNSIGNED (op0type
);
8472 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
8473 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
8475 if (TREE_CODE (treeop0
) != INTEGER_CST
)
8477 if (find_widening_optab_handler (this_optab
, mode
, innermode
, 0)
8478 != CODE_FOR_nothing
)
8480 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8482 /* op0 and op1 might still be constant, despite the above
8483 != INTEGER_CST check. Handle it. */
8484 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8487 op0
= convert_modes (innermode
, mode
, op0
, zextend_p
);
8489 = convert_modes (innermode
, mode
, op1
,
8490 TYPE_UNSIGNED (TREE_TYPE (treeop1
)));
8491 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
,
8495 temp
= expand_widening_mult (mode
, op0
, op1
, target
,
8496 unsignedp
, this_optab
);
8497 return REDUCE_BIT_FIELD (temp
);
8499 if (find_widening_optab_handler (other_optab
, mode
, innermode
, 0)
8501 && innermode
== word_mode
)
8504 op0
= expand_normal (treeop0
);
8505 if (TREE_CODE (treeop1
) == INTEGER_CST
)
8506 op1
= convert_modes (innermode
, mode
,
8507 expand_normal (treeop1
),
8508 TYPE_UNSIGNED (TREE_TYPE (treeop1
)));
8510 op1
= expand_normal (treeop1
);
8511 /* op0 and op1 might still be constant, despite the above
8512 != INTEGER_CST check. Handle it. */
8513 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8514 goto widen_mult_const
;
8515 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8516 unsignedp
, OPTAB_LIB_WIDEN
);
8517 hipart
= gen_highpart (innermode
, temp
);
8518 htem
= expand_mult_highpart_adjust (innermode
, hipart
,
8522 emit_move_insn (hipart
, htem
);
8523 return REDUCE_BIT_FIELD (temp
);
8527 treeop0
= fold_build1 (CONVERT_EXPR
, type
, treeop0
);
8528 treeop1
= fold_build1 (CONVERT_EXPR
, type
, treeop1
);
8529 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8530 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8534 optab opt
= fma_optab
;
8537 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8539 if (optab_handler (fma_optab
, mode
) == CODE_FOR_nothing
)
8541 tree fn
= mathfn_built_in (TREE_TYPE (treeop0
), BUILT_IN_FMA
);
8544 gcc_assert (fn
!= NULL_TREE
);
8545 call_expr
= build_call_expr (fn
, 3, treeop0
, treeop1
, treeop2
);
8546 return expand_builtin (call_expr
, target
, subtarget
, mode
, false);
8549 def0
= get_def_for_expr (treeop0
, NEGATE_EXPR
);
8550 def2
= get_def_for_expr (treeop2
, NEGATE_EXPR
);
8555 && optab_handler (fnms_optab
, mode
) != CODE_FOR_nothing
)
8558 op0
= expand_normal (gimple_assign_rhs1 (def0
));
8559 op2
= expand_normal (gimple_assign_rhs1 (def2
));
8562 && optab_handler (fnma_optab
, mode
) != CODE_FOR_nothing
)
8565 op0
= expand_normal (gimple_assign_rhs1 (def0
));
8568 && optab_handler (fms_optab
, mode
) != CODE_FOR_nothing
)
8571 op2
= expand_normal (gimple_assign_rhs1 (def2
));
8575 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
8577 op2
= expand_normal (treeop2
);
8578 op1
= expand_normal (treeop1
);
8580 return expand_ternary_op (TYPE_MODE (type
), opt
,
8581 op0
, op1
, op2
, target
, 0);
8585 /* If this is a fixed-point operation, then we cannot use the code
8586 below because "expand_mult" doesn't support sat/no-sat fixed-point
8588 if (ALL_FIXED_POINT_MODE_P (mode
))
8591 /* If first operand is constant, swap them.
8592 Thus the following special case checks need only
8593 check the second operand. */
8594 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8601 /* Attempt to return something suitable for generating an
8602 indexed address, for machines that support that. */
8604 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
8605 && tree_fits_shwi_p (treeop1
))
8607 tree exp1
= treeop1
;
8609 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8613 op0
= force_operand (op0
, NULL_RTX
);
8615 op0
= copy_to_mode_reg (mode
, op0
);
8617 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode
, op0
,
8618 gen_int_mode (tree_to_shwi (exp1
),
8619 TYPE_MODE (TREE_TYPE (exp1
)))));
8622 if (modifier
== EXPAND_STACK_PARM
)
8625 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8626 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8628 case TRUNC_DIV_EXPR
:
8629 case FLOOR_DIV_EXPR
:
8631 case ROUND_DIV_EXPR
:
8632 case EXACT_DIV_EXPR
:
8633 /* If this is a fixed-point operation, then we cannot use the code
8634 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8636 if (ALL_FIXED_POINT_MODE_P (mode
))
8639 if (modifier
== EXPAND_STACK_PARM
)
8641 /* Possible optimization: compute the dividend with EXPAND_SUM
8642 then if the divisor is constant can optimize the case
8643 where some terms of the dividend have coeffs divisible by it. */
8644 expand_operands (treeop0
, treeop1
,
8645 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8646 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
8651 case MULT_HIGHPART_EXPR
:
8652 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8653 temp
= expand_mult_highpart (mode
, op0
, op1
, target
, unsignedp
);
8657 case TRUNC_MOD_EXPR
:
8658 case FLOOR_MOD_EXPR
:
8660 case ROUND_MOD_EXPR
:
8661 if (modifier
== EXPAND_STACK_PARM
)
8663 expand_operands (treeop0
, treeop1
,
8664 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8665 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
8667 case FIXED_CONVERT_EXPR
:
8668 op0
= expand_normal (treeop0
);
8669 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8670 target
= gen_reg_rtx (mode
);
8672 if ((TREE_CODE (TREE_TYPE (treeop0
)) == INTEGER_TYPE
8673 && TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
8674 || (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_UNSIGNED (type
)))
8675 expand_fixed_convert (target
, op0
, 1, TYPE_SATURATING (type
));
8677 expand_fixed_convert (target
, op0
, 0, TYPE_SATURATING (type
));
8680 case FIX_TRUNC_EXPR
:
8681 op0
= expand_normal (treeop0
);
8682 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8683 target
= gen_reg_rtx (mode
);
8684 expand_fix (target
, op0
, unsignedp
);
8688 op0
= expand_normal (treeop0
);
8689 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8690 target
= gen_reg_rtx (mode
);
8691 /* expand_float can't figure out what to do if FROM has VOIDmode.
8692 So give it the correct mode. With -O, cse will optimize this. */
8693 if (GET_MODE (op0
) == VOIDmode
)
8694 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0
)),
8696 expand_float (target
, op0
,
8697 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8701 op0
= expand_expr (treeop0
, subtarget
,
8702 VOIDmode
, EXPAND_NORMAL
);
8703 if (modifier
== EXPAND_STACK_PARM
)
8705 temp
= expand_unop (mode
,
8706 optab_for_tree_code (NEGATE_EXPR
, type
,
8710 return REDUCE_BIT_FIELD (temp
);
8713 op0
= expand_expr (treeop0
, subtarget
,
8714 VOIDmode
, EXPAND_NORMAL
);
8715 if (modifier
== EXPAND_STACK_PARM
)
8718 /* ABS_EXPR is not valid for complex arguments. */
8719 gcc_assert (GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
8720 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
);
8722 /* Unsigned abs is simply the operand. Testing here means we don't
8723 risk generating incorrect code below. */
8724 if (TYPE_UNSIGNED (type
))
8727 return expand_abs (mode
, op0
, target
, unsignedp
,
8728 safe_from_p (target
, treeop0
, 1));
8732 target
= original_target
;
8734 || modifier
== EXPAND_STACK_PARM
8735 || (MEM_P (target
) && MEM_VOLATILE_P (target
))
8736 || GET_MODE (target
) != mode
8738 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8739 target
= gen_reg_rtx (mode
);
8740 expand_operands (treeop0
, treeop1
,
8741 target
, &op0
, &op1
, EXPAND_NORMAL
);
8743 /* First try to do it with a special MIN or MAX instruction.
8744 If that does not win, use a conditional jump to select the proper
8746 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8747 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8752 /* At this point, a MEM target is no longer useful; we will get better
8755 if (! REG_P (target
))
8756 target
= gen_reg_rtx (mode
);
8758 /* If op1 was placed in target, swap op0 and op1. */
8759 if (target
!= op0
&& target
== op1
)
8766 /* We generate better code and avoid problems with op1 mentioning
8767 target by forcing op1 into a pseudo if it isn't a constant. */
8768 if (! CONSTANT_P (op1
))
8769 op1
= force_reg (mode
, op1
);
8772 enum rtx_code comparison_code
;
8775 if (code
== MAX_EXPR
)
8776 comparison_code
= unsignedp
? GEU
: GE
;
8778 comparison_code
= unsignedp
? LEU
: LE
;
8780 /* Canonicalize to comparisons against 0. */
8781 if (op1
== const1_rtx
)
8783 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8784 or (a != 0 ? a : 1) for unsigned.
8785 For MIN we are safe converting (a <= 1 ? a : 1)
8786 into (a <= 0 ? a : 1) */
8787 cmpop1
= const0_rtx
;
8788 if (code
== MAX_EXPR
)
8789 comparison_code
= unsignedp
? NE
: GT
;
8791 if (op1
== constm1_rtx
&& !unsignedp
)
8793 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8794 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8795 cmpop1
= const0_rtx
;
8796 if (code
== MIN_EXPR
)
8797 comparison_code
= LT
;
8799 #ifdef HAVE_conditional_move
8800 /* Use a conditional move if possible. */
8801 if (can_conditionally_move_p (mode
))
8807 /* Try to emit the conditional move. */
8808 insn
= emit_conditional_move (target
, comparison_code
,
8813 /* If we could do the conditional move, emit the sequence,
8817 rtx seq
= get_insns ();
8823 /* Otherwise discard the sequence and fall back to code with
8829 emit_move_insn (target
, op0
);
8831 temp
= gen_label_rtx ();
8832 do_compare_rtx_and_jump (target
, cmpop1
, comparison_code
,
8833 unsignedp
, mode
, NULL_RTX
, NULL_RTX
, temp
,
8836 emit_move_insn (target
, op1
);
8841 op0
= expand_expr (treeop0
, subtarget
,
8842 VOIDmode
, EXPAND_NORMAL
);
8843 if (modifier
== EXPAND_STACK_PARM
)
8845 /* In case we have to reduce the result to bitfield precision
8846 for unsigned bitfield expand this as XOR with a proper constant
8848 if (reduce_bit_field
&& TYPE_UNSIGNED (type
))
8850 wide_int mask
= wi::mask (TYPE_PRECISION (type
),
8851 false, GET_MODE_PRECISION (mode
));
8853 temp
= expand_binop (mode
, xor_optab
, op0
,
8854 immed_wide_int_const (mask
, mode
),
8855 target
, 1, OPTAB_LIB_WIDEN
);
8858 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8862 /* ??? Can optimize bitwise operations with one arg constant.
8863 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8864 and (a bitwise1 b) bitwise2 b (etc)
8865 but that is probably not worth while. */
8874 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type
))
8875 || (GET_MODE_PRECISION (TYPE_MODE (type
))
8876 == TYPE_PRECISION (type
)));
8881 /* If this is a fixed-point operation, then we cannot use the code
8882 below because "expand_shift" doesn't support sat/no-sat fixed-point
8884 if (ALL_FIXED_POINT_MODE_P (mode
))
8887 if (! safe_from_p (subtarget
, treeop1
, 1))
8889 if (modifier
== EXPAND_STACK_PARM
)
8891 op0
= expand_expr (treeop0
, subtarget
,
8892 VOIDmode
, EXPAND_NORMAL
);
8893 temp
= expand_variable_shift (code
, mode
, op0
, treeop1
, target
,
8895 if (code
== LSHIFT_EXPR
)
8896 temp
= REDUCE_BIT_FIELD (temp
);
8899 /* Could determine the answer when only additive constants differ. Also,
8900 the addition of one can be handled by changing the condition. */
8907 case UNORDERED_EXPR
:
8915 temp
= do_store_flag (ops
,
8916 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
8917 tmode
!= VOIDmode
? tmode
: mode
);
8921 /* Use a compare and a jump for BLKmode comparisons, or for function
8922 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8925 || modifier
== EXPAND_STACK_PARM
8926 || ! safe_from_p (target
, treeop0
, 1)
8927 || ! safe_from_p (target
, treeop1
, 1)
8928 /* Make sure we don't have a hard reg (such as function's return
8929 value) live across basic blocks, if not optimizing. */
8930 || (!optimize
&& REG_P (target
)
8931 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8932 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8934 emit_move_insn (target
, const0_rtx
);
8936 op1
= gen_label_rtx ();
8937 jumpifnot_1 (code
, treeop0
, treeop1
, op1
, -1);
8939 if (TYPE_PRECISION (type
) == 1 && !TYPE_UNSIGNED (type
))
8940 emit_move_insn (target
, constm1_rtx
);
8942 emit_move_insn (target
, const1_rtx
);
8948 /* Get the rtx code of the operands. */
8949 op0
= expand_normal (treeop0
);
8950 op1
= expand_normal (treeop1
);
8953 target
= gen_reg_rtx (TYPE_MODE (type
));
8955 /* If target overlaps with op1, then either we need to force
8956 op1 into a pseudo (if target also overlaps with op0),
8957 or write the complex parts in reverse order. */
8958 switch (GET_CODE (target
))
8961 if (reg_overlap_mentioned_p (XEXP (target
, 0), op1
))
8963 if (reg_overlap_mentioned_p (XEXP (target
, 1), op0
))
8965 complex_expr_force_op1
:
8966 temp
= gen_reg_rtx (GET_MODE_INNER (GET_MODE (target
)));
8967 emit_move_insn (temp
, op1
);
8971 complex_expr_swap_order
:
8972 /* Move the imaginary (op1) and real (op0) parts to their
8974 write_complex_part (target
, op1
, true);
8975 write_complex_part (target
, op0
, false);
8981 temp
= adjust_address_nv (target
,
8982 GET_MODE_INNER (GET_MODE (target
)), 0);
8983 if (reg_overlap_mentioned_p (temp
, op1
))
8985 enum machine_mode imode
= GET_MODE_INNER (GET_MODE (target
));
8986 temp
= adjust_address_nv (target
, imode
,
8987 GET_MODE_SIZE (imode
));
8988 if (reg_overlap_mentioned_p (temp
, op0
))
8989 goto complex_expr_force_op1
;
8990 goto complex_expr_swap_order
;
8994 if (reg_overlap_mentioned_p (target
, op1
))
8996 if (reg_overlap_mentioned_p (target
, op0
))
8997 goto complex_expr_force_op1
;
8998 goto complex_expr_swap_order
;
9003 /* Move the real (op0) and imaginary (op1) parts to their location. */
9004 write_complex_part (target
, op0
, false);
9005 write_complex_part (target
, op1
, true);
9009 case WIDEN_SUM_EXPR
:
9011 tree oprnd0
= treeop0
;
9012 tree oprnd1
= treeop1
;
9014 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9015 target
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, op1
,
9020 case REDUC_MAX_EXPR
:
9021 case REDUC_MIN_EXPR
:
9022 case REDUC_PLUS_EXPR
:
9024 op0
= expand_normal (treeop0
);
9025 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9026 temp
= expand_unop (mode
, this_optab
, op0
, target
, unsignedp
);
9031 case VEC_LSHIFT_EXPR
:
9032 case VEC_RSHIFT_EXPR
:
9034 target
= expand_vec_shift_expr (ops
, target
);
9038 case VEC_UNPACK_HI_EXPR
:
9039 case VEC_UNPACK_LO_EXPR
:
9041 op0
= expand_normal (treeop0
);
9042 temp
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, NULL_RTX
,
9048 case VEC_UNPACK_FLOAT_HI_EXPR
:
9049 case VEC_UNPACK_FLOAT_LO_EXPR
:
9051 op0
= expand_normal (treeop0
);
9052 /* The signedness is determined from input operand. */
9053 temp
= expand_widen_pattern_expr
9054 (ops
, op0
, NULL_RTX
, NULL_RTX
,
9055 target
, TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
9061 case VEC_WIDEN_MULT_HI_EXPR
:
9062 case VEC_WIDEN_MULT_LO_EXPR
:
9063 case VEC_WIDEN_MULT_EVEN_EXPR
:
9064 case VEC_WIDEN_MULT_ODD_EXPR
:
9065 case VEC_WIDEN_LSHIFT_HI_EXPR
:
9066 case VEC_WIDEN_LSHIFT_LO_EXPR
:
9067 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9068 target
= expand_widen_pattern_expr (ops
, op0
, op1
, NULL_RTX
,
9070 gcc_assert (target
);
9073 case VEC_PACK_TRUNC_EXPR
:
9074 case VEC_PACK_SAT_EXPR
:
9075 case VEC_PACK_FIX_TRUNC_EXPR
:
9076 mode
= TYPE_MODE (TREE_TYPE (treeop0
));
9080 expand_operands (treeop0
, treeop1
, target
, &op0
, &op1
, EXPAND_NORMAL
);
9081 op2
= expand_normal (treeop2
);
9083 /* Careful here: if the target doesn't support integral vector modes,
9084 a constant selection vector could wind up smooshed into a normal
9085 integral constant. */
9086 if (CONSTANT_P (op2
) && GET_CODE (op2
) != CONST_VECTOR
)
9088 tree sel_type
= TREE_TYPE (treeop2
);
9089 enum machine_mode vmode
9090 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type
)),
9091 TYPE_VECTOR_SUBPARTS (sel_type
));
9092 gcc_assert (GET_MODE_CLASS (vmode
) == MODE_VECTOR_INT
);
9093 op2
= simplify_subreg (vmode
, op2
, TYPE_MODE (sel_type
), 0);
9094 gcc_assert (op2
&& GET_CODE (op2
) == CONST_VECTOR
);
9097 gcc_assert (GET_MODE_CLASS (GET_MODE (op2
)) == MODE_VECTOR_INT
);
9099 temp
= expand_vec_perm (mode
, op0
, op1
, op2
, target
);
9105 tree oprnd0
= treeop0
;
9106 tree oprnd1
= treeop1
;
9107 tree oprnd2
= treeop2
;
9110 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9111 op2
= expand_normal (oprnd2
);
9112 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
9119 tree oprnd0
= treeop0
;
9120 tree oprnd1
= treeop1
;
9121 tree oprnd2
= treeop2
;
9124 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9125 op2
= expand_normal (oprnd2
);
9126 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
9131 case REALIGN_LOAD_EXPR
:
9133 tree oprnd0
= treeop0
;
9134 tree oprnd1
= treeop1
;
9135 tree oprnd2
= treeop2
;
9138 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9139 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9140 op2
= expand_normal (oprnd2
);
9141 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
9148 /* A COND_EXPR with its type being VOID_TYPE represents a
9149 conditional jump and is handled in
9150 expand_gimple_cond_expr. */
9151 gcc_assert (!VOID_TYPE_P (type
));
9153 /* Note that COND_EXPRs whose type is a structure or union
9154 are required to be constructed to contain assignments of
9155 a temporary variable, so that we can evaluate them here
9156 for side effect only. If type is void, we must do likewise. */
9158 gcc_assert (!TREE_ADDRESSABLE (type
)
9160 && TREE_TYPE (treeop1
) != void_type_node
9161 && TREE_TYPE (treeop2
) != void_type_node
);
9163 temp
= expand_cond_expr_using_cmove (treeop0
, treeop1
, treeop2
);
9167 /* If we are not to produce a result, we have no target. Otherwise,
9168 if a target was specified use it; it will not be used as an
9169 intermediate target unless it is safe. If no target, use a
9172 if (modifier
!= EXPAND_STACK_PARM
9174 && safe_from_p (original_target
, treeop0
, 1)
9175 && GET_MODE (original_target
) == mode
9176 && !MEM_P (original_target
))
9177 temp
= original_target
;
9179 temp
= assign_temp (type
, 0, 1);
9181 do_pending_stack_adjust ();
9183 op0
= gen_label_rtx ();
9184 op1
= gen_label_rtx ();
9185 jumpifnot (treeop0
, op0
, -1);
9186 store_expr (treeop1
, temp
,
9187 modifier
== EXPAND_STACK_PARM
,
9190 emit_jump_insn (gen_jump (op1
));
9193 store_expr (treeop2
, temp
,
9194 modifier
== EXPAND_STACK_PARM
,
9202 target
= expand_vec_cond_expr (type
, treeop0
, treeop1
, treeop2
, target
);
9209 /* Here to do an ordinary binary operator. */
9211 expand_operands (treeop0
, treeop1
,
9212 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
9214 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9216 if (modifier
== EXPAND_STACK_PARM
)
9218 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9219 unsignedp
, OPTAB_LIB_WIDEN
);
9221 /* Bitwise operations do not need bitfield reduction as we expect their
9222 operands being properly truncated. */
9223 if (code
== BIT_XOR_EXPR
9224 || code
== BIT_AND_EXPR
9225 || code
== BIT_IOR_EXPR
)
9227 return REDUCE_BIT_FIELD (temp
);
9229 #undef REDUCE_BIT_FIELD
9231 /* Return TRUE if value in SSA is zero and sign extended for wider mode MODE
9232 using value range information stored. Return FALSE otherwise.
9234 This is used to check if SUBREG is zero and sign extended and to set
9235 promoted mode SRP_SIGNED_AND_UNSIGNED to SUBREG. */
9238 promoted_for_signed_and_unsigned_p (tree ssa
, enum machine_mode mode
)
9242 if (ssa
== NULL_TREE
9243 || TREE_CODE (ssa
) != SSA_NAME
9244 || !INTEGRAL_TYPE_P (TREE_TYPE (ssa
))
9245 || (TYPE_PRECISION (TREE_TYPE (ssa
)) != GET_MODE_PRECISION (mode
)))
9248 /* Return FALSE if value_range is not recorded for SSA. */
9249 if (get_range_info (ssa
, &min
, &max
) != VR_RANGE
)
9252 /* Return true (to set SRP_SIGNED_AND_UNSIGNED to SUBREG) if MSB of the
9253 smaller mode is not set (i.e. MSB of ssa is not set). */
9254 if (!wi::neg_p (min
, SIGNED
) && !wi::neg_p(max
, SIGNED
))
9261 /* Return TRUE if expression STMT is suitable for replacement.
9262 Never consider memory loads as replaceable, because those don't ever lead
9263 into constant expressions. */
9266 stmt_is_replaceable_p (gimple stmt
)
9268 if (ssa_is_replaceable_p (stmt
))
9270 /* Don't move around loads. */
9271 if (!gimple_assign_single_p (stmt
)
9272 || is_gimple_val (gimple_assign_rhs1 (stmt
)))
9279 expand_expr_real_1 (tree exp
, rtx target
, enum machine_mode tmode
,
9280 enum expand_modifier modifier
, rtx
*alt_rtl
,
9281 bool inner_reference_p
)
9283 rtx op0
, op1
, temp
, decl_rtl
;
9286 enum machine_mode mode
;
9287 enum tree_code code
= TREE_CODE (exp
);
9288 rtx subtarget
, original_target
;
9291 bool reduce_bit_field
;
9292 location_t loc
= EXPR_LOCATION (exp
);
9293 struct separate_ops ops
;
9294 tree treeop0
, treeop1
, treeop2
;
9295 tree ssa_name
= NULL_TREE
;
9298 type
= TREE_TYPE (exp
);
9299 mode
= TYPE_MODE (type
);
9300 unsignedp
= TYPE_UNSIGNED (type
);
9302 treeop0
= treeop1
= treeop2
= NULL_TREE
;
9303 if (!VL_EXP_CLASS_P (exp
))
9304 switch (TREE_CODE_LENGTH (code
))
9307 case 3: treeop2
= TREE_OPERAND (exp
, 2);
9308 case 2: treeop1
= TREE_OPERAND (exp
, 1);
9309 case 1: treeop0
= TREE_OPERAND (exp
, 0);
9319 ignore
= (target
== const0_rtx
9320 || ((CONVERT_EXPR_CODE_P (code
)
9321 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
9322 && TREE_CODE (type
) == VOID_TYPE
));
9324 /* An operation in what may be a bit-field type needs the
9325 result to be reduced to the precision of the bit-field type,
9326 which is narrower than that of the type's mode. */
9327 reduce_bit_field
= (!ignore
9328 && INTEGRAL_TYPE_P (type
)
9329 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
));
9331 /* If we are going to ignore this result, we need only do something
9332 if there is a side-effect somewhere in the expression. If there
9333 is, short-circuit the most common cases here. Note that we must
9334 not call expand_expr with anything but const0_rtx in case this
9335 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9339 if (! TREE_SIDE_EFFECTS (exp
))
9342 /* Ensure we reference a volatile object even if value is ignored, but
9343 don't do this if all we are doing is taking its address. */
9344 if (TREE_THIS_VOLATILE (exp
)
9345 && TREE_CODE (exp
) != FUNCTION_DECL
9346 && mode
!= VOIDmode
&& mode
!= BLKmode
9347 && modifier
!= EXPAND_CONST_ADDRESS
)
9349 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
9355 if (TREE_CODE_CLASS (code
) == tcc_unary
9356 || code
== BIT_FIELD_REF
9357 || code
== COMPONENT_REF
9358 || code
== INDIRECT_REF
)
9359 return expand_expr (treeop0
, const0_rtx
, VOIDmode
,
9362 else if (TREE_CODE_CLASS (code
) == tcc_binary
9363 || TREE_CODE_CLASS (code
) == tcc_comparison
9364 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
9366 expand_expr (treeop0
, const0_rtx
, VOIDmode
, modifier
);
9367 expand_expr (treeop1
, const0_rtx
, VOIDmode
, modifier
);
9374 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
9377 /* Use subtarget as the target for operand 0 of a binary operation. */
9378 subtarget
= get_subtarget (target
);
9379 original_target
= target
;
9385 tree function
= decl_function_context (exp
);
9387 temp
= label_rtx (exp
);
9388 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
9390 if (function
!= current_function_decl
9392 LABEL_REF_NONLOCAL_P (temp
) = 1;
9394 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
9399 /* ??? ivopts calls expander, without any preparation from
9400 out-of-ssa. So fake instructions as if this was an access to the
9401 base variable. This unnecessarily allocates a pseudo, see how we can
9402 reuse it, if partition base vars have it set already. */
9403 if (!currently_expanding_to_rtl
)
9405 tree var
= SSA_NAME_VAR (exp
);
9406 if (var
&& DECL_RTL_SET_P (var
))
9407 return DECL_RTL (var
);
9408 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp
)),
9409 LAST_VIRTUAL_REGISTER
+ 1);
9412 g
= get_gimple_for_ssa_name (exp
);
9413 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9415 && modifier
== EXPAND_INITIALIZER
9416 && !SSA_NAME_IS_DEFAULT_DEF (exp
)
9417 && (optimize
|| DECL_IGNORED_P (SSA_NAME_VAR (exp
)))
9418 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp
)))
9419 g
= SSA_NAME_DEF_STMT (exp
);
9423 ops
.code
= gimple_assign_rhs_code (g
);
9424 switch (get_gimple_rhs_class (ops
.code
))
9426 case GIMPLE_TERNARY_RHS
:
9427 ops
.op2
= gimple_assign_rhs3 (g
);
9429 case GIMPLE_BINARY_RHS
:
9430 ops
.op1
= gimple_assign_rhs2 (g
);
9432 case GIMPLE_UNARY_RHS
:
9433 ops
.op0
= gimple_assign_rhs1 (g
);
9434 ops
.type
= TREE_TYPE (gimple_assign_lhs (g
));
9435 ops
.location
= gimple_location (g
);
9436 r
= expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
9438 case GIMPLE_SINGLE_RHS
:
9440 location_t saved_loc
= curr_insn_location ();
9441 set_curr_insn_location (gimple_location (g
));
9442 r
= expand_expr_real (gimple_assign_rhs1 (g
), target
,
9443 tmode
, modifier
, NULL
, inner_reference_p
);
9444 set_curr_insn_location (saved_loc
);
9450 if (REG_P (r
) && !REG_EXPR (r
))
9451 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp
), r
);
9456 decl_rtl
= get_rtx_for_ssa_name (ssa_name
);
9457 exp
= SSA_NAME_VAR (ssa_name
);
9458 goto expand_decl_rtl
;
9462 /* If a static var's type was incomplete when the decl was written,
9463 but the type is complete now, lay out the decl now. */
9464 if (DECL_SIZE (exp
) == 0
9465 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
9466 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
9467 layout_decl (exp
, 0);
9469 /* ... fall through ... */
9473 decl_rtl
= DECL_RTL (exp
);
9475 gcc_assert (decl_rtl
);
9476 decl_rtl
= copy_rtx (decl_rtl
);
9477 /* Record writes to register variables. */
9478 if (modifier
== EXPAND_WRITE
9480 && HARD_REGISTER_P (decl_rtl
))
9481 add_to_hard_reg_set (&crtl
->asm_clobbers
,
9482 GET_MODE (decl_rtl
), REGNO (decl_rtl
));
9484 /* Ensure variable marked as used even if it doesn't go through
9485 a parser. If it hasn't be used yet, write out an external
9487 TREE_USED (exp
) = 1;
9489 /* Show we haven't gotten RTL for this yet. */
9492 /* Variables inherited from containing functions should have
9493 been lowered by this point. */
9494 context
= decl_function_context (exp
);
9495 gcc_assert (SCOPE_FILE_SCOPE_P (context
)
9496 || context
== current_function_decl
9497 || TREE_STATIC (exp
)
9498 || DECL_EXTERNAL (exp
)
9499 /* ??? C++ creates functions that are not TREE_STATIC. */
9500 || TREE_CODE (exp
) == FUNCTION_DECL
);
9502 /* This is the case of an array whose size is to be determined
9503 from its initializer, while the initializer is still being parsed.
9504 ??? We aren't parsing while expanding anymore. */
9506 if (MEM_P (decl_rtl
) && REG_P (XEXP (decl_rtl
, 0)))
9507 temp
= validize_mem (decl_rtl
);
9509 /* If DECL_RTL is memory, we are in the normal case and the
9510 address is not valid, get the address into a register. */
9512 else if (MEM_P (decl_rtl
) && modifier
!= EXPAND_INITIALIZER
)
9515 *alt_rtl
= decl_rtl
;
9516 decl_rtl
= use_anchored_address (decl_rtl
);
9517 if (modifier
!= EXPAND_CONST_ADDRESS
9518 && modifier
!= EXPAND_SUM
9519 && !memory_address_addr_space_p (DECL_MODE (exp
),
9521 MEM_ADDR_SPACE (decl_rtl
)))
9522 temp
= replace_equiv_address (decl_rtl
,
9523 copy_rtx (XEXP (decl_rtl
, 0)));
9526 /* If we got something, return it. But first, set the alignment
9527 if the address is a register. */
9530 if (MEM_P (temp
) && REG_P (XEXP (temp
, 0)))
9531 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
9536 /* If the mode of DECL_RTL does not match that of the decl,
9537 there are two cases: we are dealing with a BLKmode value
9538 that is returned in a register, or we are dealing with
9539 a promoted value. In the latter case, return a SUBREG
9540 of the wanted mode, but mark it so that we know that it
9541 was already extended. */
9542 if (REG_P (decl_rtl
)
9543 && DECL_MODE (exp
) != BLKmode
9544 && GET_MODE (decl_rtl
) != DECL_MODE (exp
))
9546 enum machine_mode pmode
;
9548 /* Get the signedness to be used for this variable. Ensure we get
9549 the same mode we got when the variable was declared. */
9550 if (code
== SSA_NAME
9551 && (g
= SSA_NAME_DEF_STMT (ssa_name
))
9552 && gimple_code (g
) == GIMPLE_CALL
9553 && !gimple_call_internal_p (g
))
9554 pmode
= promote_function_mode (type
, mode
, &unsignedp
,
9555 gimple_call_fntype (g
),
9558 pmode
= promote_decl_mode (exp
, &unsignedp
);
9559 gcc_assert (GET_MODE (decl_rtl
) == pmode
);
9561 temp
= gen_lowpart_SUBREG (mode
, decl_rtl
);
9562 SUBREG_PROMOTED_VAR_P (temp
) = 1;
9563 if (promoted_for_signed_and_unsigned_p (ssa_name
, mode
))
9564 SUBREG_PROMOTED_SET (temp
, SRP_SIGNED_AND_UNSIGNED
);
9566 SUBREG_PROMOTED_SET (temp
, unsignedp
);
9573 /* Given that TYPE_PRECISION (type) is not always equal to
9574 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9575 the former to the latter according to the signedness of the
9577 temp
= immed_wide_int_const (wide_int::from
9579 GET_MODE_PRECISION (TYPE_MODE (type
)),
9586 tree tmp
= NULL_TREE
;
9587 if (GET_MODE_CLASS (mode
) == MODE_VECTOR_INT
9588 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FLOAT
9589 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FRACT
9590 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UFRACT
9591 || GET_MODE_CLASS (mode
) == MODE_VECTOR_ACCUM
9592 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UACCUM
)
9593 return const_vector_from_tree (exp
);
9594 if (GET_MODE_CLASS (mode
) == MODE_INT
)
9596 tree type_for_mode
= lang_hooks
.types
.type_for_mode (mode
, 1);
9598 tmp
= fold_unary_loc (loc
, VIEW_CONVERT_EXPR
, type_for_mode
, exp
);
9602 vec
<constructor_elt
, va_gc
> *v
;
9604 vec_alloc (v
, VECTOR_CST_NELTS (exp
));
9605 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
9606 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, VECTOR_CST_ELT (exp
, i
));
9607 tmp
= build_constructor (type
, v
);
9609 return expand_expr (tmp
, ignore
? const0_rtx
: target
,
9614 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
9617 /* If optimized, generate immediate CONST_DOUBLE
9618 which will be turned into memory by reload if necessary.
9620 We used to force a register so that loop.c could see it. But
9621 this does not allow gen_* patterns to perform optimizations with
9622 the constants. It also produces two insns in cases like "x = 1.0;".
9623 On most machines, floating-point constants are not permitted in
9624 many insns, so we'd end up copying it to a register in any case.
9626 Now, we do the copying in expand_binop, if appropriate. */
9627 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
9628 TYPE_MODE (TREE_TYPE (exp
)));
9631 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp
),
9632 TYPE_MODE (TREE_TYPE (exp
)));
9635 /* Handle evaluating a complex constant in a CONCAT target. */
9636 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
9638 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
9641 rtarg
= XEXP (original_target
, 0);
9642 itarg
= XEXP (original_target
, 1);
9644 /* Move the real and imaginary parts separately. */
9645 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, EXPAND_NORMAL
);
9646 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, EXPAND_NORMAL
);
9649 emit_move_insn (rtarg
, op0
);
9651 emit_move_insn (itarg
, op1
);
9653 return original_target
;
9656 /* ... fall through ... */
9659 temp
= expand_expr_constant (exp
, 1, modifier
);
9661 /* temp contains a constant address.
9662 On RISC machines where a constant address isn't valid,
9663 make some insns to get that address into a register. */
9664 if (modifier
!= EXPAND_CONST_ADDRESS
9665 && modifier
!= EXPAND_INITIALIZER
9666 && modifier
!= EXPAND_SUM
9667 && ! memory_address_addr_space_p (mode
, XEXP (temp
, 0),
9668 MEM_ADDR_SPACE (temp
)))
9669 return replace_equiv_address (temp
,
9670 copy_rtx (XEXP (temp
, 0)));
9676 rtx ret
= expand_expr_real_1 (val
, target
, tmode
, modifier
, alt_rtl
,
9679 if (!SAVE_EXPR_RESOLVED_P (exp
))
9681 /* We can indeed still hit this case, typically via builtin
9682 expanders calling save_expr immediately before expanding
9683 something. Assume this means that we only have to deal
9684 with non-BLKmode values. */
9685 gcc_assert (GET_MODE (ret
) != BLKmode
);
9687 val
= build_decl (curr_insn_location (),
9688 VAR_DECL
, NULL
, TREE_TYPE (exp
));
9689 DECL_ARTIFICIAL (val
) = 1;
9690 DECL_IGNORED_P (val
) = 1;
9692 TREE_OPERAND (exp
, 0) = treeop0
;
9693 SAVE_EXPR_RESOLVED_P (exp
) = 1;
9695 if (!CONSTANT_P (ret
))
9696 ret
= copy_to_reg (ret
);
9697 SET_DECL_RTL (val
, ret
);
9705 /* If we don't need the result, just ensure we evaluate any
9709 unsigned HOST_WIDE_INT idx
;
9712 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
9713 expand_expr (value
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
9718 return expand_constructor (exp
, target
, modifier
, false);
9720 case TARGET_MEM_REF
:
9723 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
9724 enum insn_code icode
;
9727 op0
= addr_for_mem_ref (exp
, as
, true);
9728 op0
= memory_address_addr_space (mode
, op0
, as
);
9729 temp
= gen_rtx_MEM (mode
, op0
);
9730 set_mem_attributes (temp
, exp
, 0);
9731 set_mem_addr_space (temp
, as
);
9732 align
= get_object_alignment (exp
);
9733 if (modifier
!= EXPAND_WRITE
9734 && modifier
!= EXPAND_MEMORY
9736 && align
< GET_MODE_ALIGNMENT (mode
)
9737 /* If the target does not have special handling for unaligned
9738 loads of mode then it can use regular moves for them. */
9739 && ((icode
= optab_handler (movmisalign_optab
, mode
))
9740 != CODE_FOR_nothing
))
9742 struct expand_operand ops
[2];
9744 /* We've already validated the memory, and we're creating a
9745 new pseudo destination. The predicates really can't fail,
9746 nor can the generator. */
9747 create_output_operand (&ops
[0], NULL_RTX
, mode
);
9748 create_fixed_operand (&ops
[1], temp
);
9749 expand_insn (icode
, 2, ops
);
9750 temp
= ops
[0].value
;
9758 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
9759 enum machine_mode address_mode
;
9760 tree base
= TREE_OPERAND (exp
, 0);
9762 enum insn_code icode
;
9764 /* Handle expansion of non-aliased memory with non-BLKmode. That
9765 might end up in a register. */
9766 if (mem_ref_refers_to_non_mem_p (exp
))
9768 HOST_WIDE_INT offset
= mem_ref_offset (exp
).to_short_addr ();
9769 base
= TREE_OPERAND (base
, 0);
9771 && tree_fits_uhwi_p (TYPE_SIZE (type
))
9772 && (GET_MODE_BITSIZE (DECL_MODE (base
))
9773 == tree_to_uhwi (TYPE_SIZE (type
))))
9774 return expand_expr (build1 (VIEW_CONVERT_EXPR
, type
, base
),
9775 target
, tmode
, modifier
);
9776 if (TYPE_MODE (type
) == BLKmode
)
9778 temp
= assign_stack_temp (DECL_MODE (base
),
9779 GET_MODE_SIZE (DECL_MODE (base
)));
9780 store_expr (base
, temp
, 0, false);
9781 temp
= adjust_address (temp
, BLKmode
, offset
);
9782 set_mem_size (temp
, int_size_in_bytes (type
));
9785 exp
= build3 (BIT_FIELD_REF
, type
, base
, TYPE_SIZE (type
),
9786 bitsize_int (offset
* BITS_PER_UNIT
));
9787 return expand_expr (exp
, target
, tmode
, modifier
);
9789 address_mode
= targetm
.addr_space
.address_mode (as
);
9790 base
= TREE_OPERAND (exp
, 0);
9791 if ((def_stmt
= get_def_for_expr (base
, BIT_AND_EXPR
)))
9793 tree mask
= gimple_assign_rhs2 (def_stmt
);
9794 base
= build2 (BIT_AND_EXPR
, TREE_TYPE (base
),
9795 gimple_assign_rhs1 (def_stmt
), mask
);
9796 TREE_OPERAND (exp
, 0) = base
;
9798 align
= get_object_alignment (exp
);
9799 op0
= expand_expr (base
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
9800 op0
= memory_address_addr_space (mode
, op0
, as
);
9801 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
9803 rtx off
= immed_wide_int_const (mem_ref_offset (exp
), address_mode
);
9804 op0
= simplify_gen_binary (PLUS
, address_mode
, op0
, off
);
9805 op0
= memory_address_addr_space (mode
, op0
, as
);
9807 temp
= gen_rtx_MEM (mode
, op0
);
9808 set_mem_attributes (temp
, exp
, 0);
9809 set_mem_addr_space (temp
, as
);
9810 if (TREE_THIS_VOLATILE (exp
))
9811 MEM_VOLATILE_P (temp
) = 1;
9812 if (modifier
!= EXPAND_WRITE
9813 && modifier
!= EXPAND_MEMORY
9814 && !inner_reference_p
9816 && align
< GET_MODE_ALIGNMENT (mode
))
9818 if ((icode
= optab_handler (movmisalign_optab
, mode
))
9819 != CODE_FOR_nothing
)
9821 struct expand_operand ops
[2];
9823 /* We've already validated the memory, and we're creating a
9824 new pseudo destination. The predicates really can't fail,
9825 nor can the generator. */
9826 create_output_operand (&ops
[0], NULL_RTX
, mode
);
9827 create_fixed_operand (&ops
[1], temp
);
9828 expand_insn (icode
, 2, ops
);
9829 temp
= ops
[0].value
;
9831 else if (SLOW_UNALIGNED_ACCESS (mode
, align
))
9832 temp
= extract_bit_field (temp
, GET_MODE_BITSIZE (mode
),
9833 0, TYPE_UNSIGNED (TREE_TYPE (exp
)),
9834 (modifier
== EXPAND_STACK_PARM
9835 ? NULL_RTX
: target
),
9844 tree array
= treeop0
;
9845 tree index
= treeop1
;
9848 /* Fold an expression like: "foo"[2].
9849 This is not done in fold so it won't happen inside &.
9850 Don't fold if this is for wide characters since it's too
9851 difficult to do correctly and this is a very rare case. */
9853 if (modifier
!= EXPAND_CONST_ADDRESS
9854 && modifier
!= EXPAND_INITIALIZER
9855 && modifier
!= EXPAND_MEMORY
)
9857 tree t
= fold_read_from_constant_string (exp
);
9860 return expand_expr (t
, target
, tmode
, modifier
);
9863 /* If this is a constant index into a constant array,
9864 just get the value from the array. Handle both the cases when
9865 we have an explicit constructor and when our operand is a variable
9866 that was declared const. */
9868 if (modifier
!= EXPAND_CONST_ADDRESS
9869 && modifier
!= EXPAND_INITIALIZER
9870 && modifier
!= EXPAND_MEMORY
9871 && TREE_CODE (array
) == CONSTRUCTOR
9872 && ! TREE_SIDE_EFFECTS (array
)
9873 && TREE_CODE (index
) == INTEGER_CST
)
9875 unsigned HOST_WIDE_INT ix
;
9878 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array
), ix
,
9880 if (tree_int_cst_equal (field
, index
))
9882 if (!TREE_SIDE_EFFECTS (value
))
9883 return expand_expr (fold (value
), target
, tmode
, modifier
);
9888 else if (optimize
>= 1
9889 && modifier
!= EXPAND_CONST_ADDRESS
9890 && modifier
!= EXPAND_INITIALIZER
9891 && modifier
!= EXPAND_MEMORY
9892 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
9893 && TREE_CODE (index
) == INTEGER_CST
9894 && (TREE_CODE (array
) == VAR_DECL
9895 || TREE_CODE (array
) == CONST_DECL
)
9896 && (init
= ctor_for_folding (array
)) != error_mark_node
)
9898 if (init
== NULL_TREE
)
9900 tree value
= build_zero_cst (type
);
9901 if (TREE_CODE (value
) == CONSTRUCTOR
)
9903 /* If VALUE is a CONSTRUCTOR, this optimization is only
9904 useful if this doesn't store the CONSTRUCTOR into
9905 memory. If it does, it is more efficient to just
9906 load the data from the array directly. */
9907 rtx ret
= expand_constructor (value
, target
,
9909 if (ret
== NULL_RTX
)
9914 return expand_expr (value
, target
, tmode
, modifier
);
9916 else if (TREE_CODE (init
) == CONSTRUCTOR
)
9918 unsigned HOST_WIDE_INT ix
;
9921 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), ix
,
9923 if (tree_int_cst_equal (field
, index
))
9925 if (TREE_SIDE_EFFECTS (value
))
9928 if (TREE_CODE (value
) == CONSTRUCTOR
)
9930 /* If VALUE is a CONSTRUCTOR, this
9931 optimization is only useful if
9932 this doesn't store the CONSTRUCTOR
9933 into memory. If it does, it is more
9934 efficient to just load the data from
9935 the array directly. */
9936 rtx ret
= expand_constructor (value
, target
,
9938 if (ret
== NULL_RTX
)
9943 expand_expr (fold (value
), target
, tmode
, modifier
);
9946 else if (TREE_CODE (init
) == STRING_CST
)
9948 tree low_bound
= array_ref_low_bound (exp
);
9949 tree index1
= fold_convert_loc (loc
, sizetype
, treeop1
);
9951 /* Optimize the special case of a zero lower bound.
9953 We convert the lower bound to sizetype to avoid problems
9954 with constant folding. E.g. suppose the lower bound is
9955 1 and its mode is QI. Without the conversion
9956 (ARRAY + (INDEX - (unsigned char)1))
9958 (ARRAY + (-(unsigned char)1) + INDEX)
9960 (ARRAY + 255 + INDEX). Oops! */
9961 if (!integer_zerop (low_bound
))
9962 index1
= size_diffop_loc (loc
, index1
,
9963 fold_convert_loc (loc
, sizetype
,
9966 if (compare_tree_int (index1
, TREE_STRING_LENGTH (init
)) < 0)
9968 tree type
= TREE_TYPE (TREE_TYPE (init
));
9969 enum machine_mode mode
= TYPE_MODE (type
);
9971 if (GET_MODE_CLASS (mode
) == MODE_INT
9972 && GET_MODE_SIZE (mode
) == 1)
9973 return gen_int_mode (TREE_STRING_POINTER (init
)
9974 [TREE_INT_CST_LOW (index1
)],
9980 goto normal_inner_ref
;
9983 /* If the operand is a CONSTRUCTOR, we can just extract the
9984 appropriate field if it is present. */
9985 if (TREE_CODE (treeop0
) == CONSTRUCTOR
)
9987 unsigned HOST_WIDE_INT idx
;
9990 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0
),
9992 if (field
== treeop1
9993 /* We can normally use the value of the field in the
9994 CONSTRUCTOR. However, if this is a bitfield in
9995 an integral mode that we can fit in a HOST_WIDE_INT,
9996 we must mask only the number of bits in the bitfield,
9997 since this is done implicitly by the constructor. If
9998 the bitfield does not meet either of those conditions,
9999 we can't do this optimization. */
10000 && (! DECL_BIT_FIELD (field
)
10001 || ((GET_MODE_CLASS (DECL_MODE (field
)) == MODE_INT
)
10002 && (GET_MODE_PRECISION (DECL_MODE (field
))
10003 <= HOST_BITS_PER_WIDE_INT
))))
10005 if (DECL_BIT_FIELD (field
)
10006 && modifier
== EXPAND_STACK_PARM
)
10008 op0
= expand_expr (value
, target
, tmode
, modifier
);
10009 if (DECL_BIT_FIELD (field
))
10011 HOST_WIDE_INT bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
10012 enum machine_mode imode
= TYPE_MODE (TREE_TYPE (field
));
10014 if (TYPE_UNSIGNED (TREE_TYPE (field
)))
10016 op1
= gen_int_mode (((HOST_WIDE_INT
) 1 << bitsize
) - 1,
10018 op0
= expand_and (imode
, op0
, op1
, target
);
10022 int count
= GET_MODE_PRECISION (imode
) - bitsize
;
10024 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
10026 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
10034 goto normal_inner_ref
;
10036 case BIT_FIELD_REF
:
10037 case ARRAY_RANGE_REF
:
10040 enum machine_mode mode1
, mode2
;
10041 HOST_WIDE_INT bitsize
, bitpos
;
10043 int volatilep
= 0, must_force_mem
;
10044 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
10045 &mode1
, &unsignedp
, &volatilep
, true);
10046 rtx orig_op0
, memloc
;
10047 bool mem_attrs_from_type
= false;
10049 /* If we got back the original object, something is wrong. Perhaps
10050 we are evaluating an expression too early. In any event, don't
10051 infinitely recurse. */
10052 gcc_assert (tem
!= exp
);
10054 /* If TEM's type is a union of variable size, pass TARGET to the inner
10055 computation, since it will need a temporary and TARGET is known
10056 to have to do. This occurs in unchecked conversion in Ada. */
10058 = expand_expr_real (tem
,
10059 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
10060 && COMPLETE_TYPE_P (TREE_TYPE (tem
))
10061 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
10063 && modifier
!= EXPAND_STACK_PARM
10064 ? target
: NULL_RTX
),
10066 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
,
10069 /* If the field has a mode, we want to access it in the
10070 field's mode, not the computed mode.
10071 If a MEM has VOIDmode (external with incomplete type),
10072 use BLKmode for it instead. */
10075 if (mode1
!= VOIDmode
)
10076 op0
= adjust_address (op0
, mode1
, 0);
10077 else if (GET_MODE (op0
) == VOIDmode
)
10078 op0
= adjust_address (op0
, BLKmode
, 0);
10082 = CONSTANT_P (op0
) ? TYPE_MODE (TREE_TYPE (tem
)) : GET_MODE (op0
);
10084 /* If we have either an offset, a BLKmode result, or a reference
10085 outside the underlying object, we must force it to memory.
10086 Such a case can occur in Ada if we have unchecked conversion
10087 of an expression from a scalar type to an aggregate type or
10088 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10089 passed a partially uninitialized object or a view-conversion
10090 to a larger size. */
10091 must_force_mem
= (offset
10092 || mode1
== BLKmode
10093 || bitpos
+ bitsize
> GET_MODE_BITSIZE (mode2
));
10095 /* Handle CONCAT first. */
10096 if (GET_CODE (op0
) == CONCAT
&& !must_force_mem
)
10099 && bitsize
== GET_MODE_BITSIZE (GET_MODE (op0
)))
10102 && bitsize
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
10105 op0
= XEXP (op0
, 0);
10106 mode2
= GET_MODE (op0
);
10108 else if (bitpos
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
10109 && bitsize
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 1)))
10113 op0
= XEXP (op0
, 1);
10115 mode2
= GET_MODE (op0
);
10118 /* Otherwise force into memory. */
10119 must_force_mem
= 1;
10122 /* If this is a constant, put it in a register if it is a legitimate
10123 constant and we don't need a memory reference. */
10124 if (CONSTANT_P (op0
)
10125 && mode2
!= BLKmode
10126 && targetm
.legitimate_constant_p (mode2
, op0
)
10127 && !must_force_mem
)
10128 op0
= force_reg (mode2
, op0
);
10130 /* Otherwise, if this is a constant, try to force it to the constant
10131 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10132 is a legitimate constant. */
10133 else if (CONSTANT_P (op0
) && (memloc
= force_const_mem (mode2
, op0
)))
10134 op0
= validize_mem (memloc
);
10136 /* Otherwise, if this is a constant or the object is not in memory
10137 and need be, put it there. */
10138 else if (CONSTANT_P (op0
) || (!MEM_P (op0
) && must_force_mem
))
10140 memloc
= assign_temp (TREE_TYPE (tem
), 1, 1);
10141 emit_move_insn (memloc
, op0
);
10143 mem_attrs_from_type
= true;
10148 enum machine_mode address_mode
;
10149 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
10152 gcc_assert (MEM_P (op0
));
10154 address_mode
= get_address_mode (op0
);
10155 if (GET_MODE (offset_rtx
) != address_mode
)
10156 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
10158 /* See the comment in expand_assignment for the rationale. */
10159 if (mode1
!= VOIDmode
10162 && (bitpos
% bitsize
) == 0
10163 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
10164 && MEM_ALIGN (op0
) >= GET_MODE_ALIGNMENT (mode1
))
10166 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10170 op0
= offset_address (op0
, offset_rtx
,
10171 highest_pow2_factor (offset
));
10174 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10175 record its alignment as BIGGEST_ALIGNMENT. */
10176 if (MEM_P (op0
) && bitpos
== 0 && offset
!= 0
10177 && is_aligning_offset (offset
, tem
))
10178 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
10180 /* Don't forget about volatility even if this is a bitfield. */
10181 if (MEM_P (op0
) && volatilep
&& ! MEM_VOLATILE_P (op0
))
10183 if (op0
== orig_op0
)
10184 op0
= copy_rtx (op0
);
10186 MEM_VOLATILE_P (op0
) = 1;
10189 /* In cases where an aligned union has an unaligned object
10190 as a field, we might be extracting a BLKmode value from
10191 an integer-mode (e.g., SImode) object. Handle this case
10192 by doing the extract into an object as wide as the field
10193 (which we know to be the width of a basic mode), then
10194 storing into memory, and changing the mode to BLKmode. */
10195 if (mode1
== VOIDmode
10196 || REG_P (op0
) || GET_CODE (op0
) == SUBREG
10197 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
10198 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
10199 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
10200 && modifier
!= EXPAND_CONST_ADDRESS
10201 && modifier
!= EXPAND_INITIALIZER
10202 && modifier
!= EXPAND_MEMORY
)
10203 /* If the bitfield is volatile and the bitsize
10204 is narrower than the access size of the bitfield,
10205 we need to extract bitfields from the access. */
10206 || (volatilep
&& TREE_CODE (exp
) == COMPONENT_REF
10207 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp
, 1))
10208 && mode1
!= BLKmode
10209 && bitsize
< GET_MODE_SIZE (mode1
) * BITS_PER_UNIT
)
10210 /* If the field isn't aligned enough to fetch as a memref,
10211 fetch it as a bit field. */
10212 || (mode1
!= BLKmode
10213 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
10214 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)
10216 && (MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
10217 || (bitpos
% GET_MODE_ALIGNMENT (mode1
) != 0))))
10218 && modifier
!= EXPAND_MEMORY
10219 && ((modifier
== EXPAND_CONST_ADDRESS
10220 || modifier
== EXPAND_INITIALIZER
)
10222 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
10223 || (bitpos
% BITS_PER_UNIT
!= 0)))
10224 /* If the type and the field are a constant size and the
10225 size of the type isn't the same size as the bitfield,
10226 we must use bitfield operations. */
10228 && TYPE_SIZE (TREE_TYPE (exp
))
10229 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
10230 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
10233 enum machine_mode ext_mode
= mode
;
10235 if (ext_mode
== BLKmode
10236 && ! (target
!= 0 && MEM_P (op0
)
10238 && bitpos
% BITS_PER_UNIT
== 0))
10239 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
10241 if (ext_mode
== BLKmode
)
10244 target
= assign_temp (type
, 1, 1);
10246 /* ??? Unlike the similar test a few lines below, this one is
10247 very likely obsolete. */
10251 /* In this case, BITPOS must start at a byte boundary and
10252 TARGET, if specified, must be a MEM. */
10253 gcc_assert (MEM_P (op0
)
10254 && (!target
|| MEM_P (target
))
10255 && !(bitpos
% BITS_PER_UNIT
));
10257 emit_block_move (target
,
10258 adjust_address (op0
, VOIDmode
,
10259 bitpos
/ BITS_PER_UNIT
),
10260 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
10262 (modifier
== EXPAND_STACK_PARM
10263 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
10268 /* If we have nothing to extract, the result will be 0 for targets
10269 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10270 return 0 for the sake of consistency, as reading a zero-sized
10271 bitfield is valid in Ada and the value is fully specified. */
10275 op0
= validize_mem (op0
);
10277 if (MEM_P (op0
) && REG_P (XEXP (op0
, 0)))
10278 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10280 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
10281 (modifier
== EXPAND_STACK_PARM
10282 ? NULL_RTX
: target
),
10283 ext_mode
, ext_mode
);
10285 /* If the result is a record type and BITSIZE is narrower than
10286 the mode of OP0, an integral mode, and this is a big endian
10287 machine, we must put the field into the high-order bits. */
10288 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
10289 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
10290 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
10291 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
10292 GET_MODE_BITSIZE (GET_MODE (op0
))
10293 - bitsize
, op0
, 1);
10295 /* If the result type is BLKmode, store the data into a temporary
10296 of the appropriate type, but with the mode corresponding to the
10297 mode for the data we have (op0's mode). */
10298 if (mode
== BLKmode
)
10301 = assign_stack_temp_for_type (ext_mode
,
10302 GET_MODE_BITSIZE (ext_mode
),
10304 emit_move_insn (new_rtx
, op0
);
10305 op0
= copy_rtx (new_rtx
);
10306 PUT_MODE (op0
, BLKmode
);
10312 /* If the result is BLKmode, use that to access the object
10314 if (mode
== BLKmode
)
10317 /* Get a reference to just this component. */
10318 if (modifier
== EXPAND_CONST_ADDRESS
10319 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
10320 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10322 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10324 if (op0
== orig_op0
)
10325 op0
= copy_rtx (op0
);
10327 /* If op0 is a temporary because of forcing to memory, pass only the
10328 type to set_mem_attributes so that the original expression is never
10329 marked as ADDRESSABLE through MEM_EXPR of the temporary. */
10330 if (mem_attrs_from_type
)
10331 set_mem_attributes (op0
, type
, 0);
10333 set_mem_attributes (op0
, exp
, 0);
10335 if (REG_P (XEXP (op0
, 0)))
10336 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10338 MEM_VOLATILE_P (op0
) |= volatilep
;
10339 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
10340 || modifier
== EXPAND_CONST_ADDRESS
10341 || modifier
== EXPAND_INITIALIZER
)
10345 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
10347 convert_move (target
, op0
, unsignedp
);
10352 return expand_expr (OBJ_TYPE_REF_EXPR (exp
), target
, tmode
, modifier
);
10355 /* All valid uses of __builtin_va_arg_pack () are removed during
10357 if (CALL_EXPR_VA_ARG_PACK (exp
))
10358 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
10360 tree fndecl
= get_callee_fndecl (exp
), attr
;
10363 && (attr
= lookup_attribute ("error",
10364 DECL_ATTRIBUTES (fndecl
))) != NULL
)
10365 error ("%Kcall to %qs declared with attribute error: %s",
10366 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
10367 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
10369 && (attr
= lookup_attribute ("warning",
10370 DECL_ATTRIBUTES (fndecl
))) != NULL
)
10371 warning_at (tree_nonartificial_location (exp
),
10372 0, "%Kcall to %qs declared with attribute warning: %s",
10373 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
10374 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
10376 /* Check for a built-in function. */
10377 if (fndecl
&& DECL_BUILT_IN (fndecl
))
10379 gcc_assert (DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_FRONTEND
);
10380 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
10383 return expand_call (exp
, target
, ignore
);
10385 case VIEW_CONVERT_EXPR
:
10388 /* If we are converting to BLKmode, try to avoid an intermediate
10389 temporary by fetching an inner memory reference. */
10390 if (mode
== BLKmode
10391 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
10392 && TYPE_MODE (TREE_TYPE (treeop0
)) != BLKmode
10393 && handled_component_p (treeop0
))
10395 enum machine_mode mode1
;
10396 HOST_WIDE_INT bitsize
, bitpos
;
10401 = get_inner_reference (treeop0
, &bitsize
, &bitpos
,
10402 &offset
, &mode1
, &unsignedp
, &volatilep
,
10406 /* ??? We should work harder and deal with non-zero offsets. */
10408 && (bitpos
% BITS_PER_UNIT
) == 0
10410 && compare_tree_int (TYPE_SIZE (type
), bitsize
) == 0)
10412 /* See the normal_inner_ref case for the rationale. */
10414 = expand_expr_real (tem
,
10415 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
10416 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
10418 && modifier
!= EXPAND_STACK_PARM
10419 ? target
: NULL_RTX
),
10421 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
,
10424 if (MEM_P (orig_op0
))
10428 /* Get a reference to just this component. */
10429 if (modifier
== EXPAND_CONST_ADDRESS
10430 || modifier
== EXPAND_SUM
10431 || modifier
== EXPAND_INITIALIZER
)
10432 op0
= adjust_address_nv (op0
, mode
, bitpos
/ BITS_PER_UNIT
);
10434 op0
= adjust_address (op0
, mode
, bitpos
/ BITS_PER_UNIT
);
10436 if (op0
== orig_op0
)
10437 op0
= copy_rtx (op0
);
10439 set_mem_attributes (op0
, treeop0
, 0);
10440 if (REG_P (XEXP (op0
, 0)))
10441 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10443 MEM_VOLATILE_P (op0
) |= volatilep
;
10449 op0
= expand_expr_real (treeop0
, NULL_RTX
, VOIDmode
, modifier
,
10450 NULL
, inner_reference_p
);
10452 /* If the input and output modes are both the same, we are done. */
10453 if (mode
== GET_MODE (op0
))
10455 /* If neither mode is BLKmode, and both modes are the same size
10456 then we can use gen_lowpart. */
10457 else if (mode
!= BLKmode
&& GET_MODE (op0
) != BLKmode
10458 && (GET_MODE_PRECISION (mode
)
10459 == GET_MODE_PRECISION (GET_MODE (op0
)))
10460 && !COMPLEX_MODE_P (GET_MODE (op0
)))
10462 if (GET_CODE (op0
) == SUBREG
)
10463 op0
= force_reg (GET_MODE (op0
), op0
);
10464 temp
= gen_lowpart_common (mode
, op0
);
10469 if (!REG_P (op0
) && !MEM_P (op0
))
10470 op0
= force_reg (GET_MODE (op0
), op0
);
10471 op0
= gen_lowpart (mode
, op0
);
10474 /* If both types are integral, convert from one mode to the other. */
10475 else if (INTEGRAL_TYPE_P (type
) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0
)))
10476 op0
= convert_modes (mode
, GET_MODE (op0
), op0
,
10477 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
10478 /* If the output type is a bit-field type, do an extraction. */
10479 else if (reduce_bit_field
)
10480 return extract_bit_field (op0
, TYPE_PRECISION (type
), 0,
10481 TYPE_UNSIGNED (type
), NULL_RTX
,
10483 /* As a last resort, spill op0 to memory, and reload it in a
10485 else if (!MEM_P (op0
))
10487 /* If the operand is not a MEM, force it into memory. Since we
10488 are going to be changing the mode of the MEM, don't call
10489 force_const_mem for constants because we don't allow pool
10490 constants to change mode. */
10491 tree inner_type
= TREE_TYPE (treeop0
);
10493 gcc_assert (!TREE_ADDRESSABLE (exp
));
10495 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
10497 = assign_stack_temp_for_type
10498 (TYPE_MODE (inner_type
),
10499 GET_MODE_SIZE (TYPE_MODE (inner_type
)), inner_type
);
10501 emit_move_insn (target
, op0
);
10505 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10506 output type is such that the operand is known to be aligned, indicate
10507 that it is. Otherwise, we need only be concerned about alignment for
10508 non-BLKmode results. */
10511 enum insn_code icode
;
10513 if (TYPE_ALIGN_OK (type
))
10515 /* ??? Copying the MEM without substantially changing it might
10516 run afoul of the code handling volatile memory references in
10517 store_expr, which assumes that TARGET is returned unmodified
10518 if it has been used. */
10519 op0
= copy_rtx (op0
);
10520 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
10522 else if (modifier
!= EXPAND_WRITE
10523 && modifier
!= EXPAND_MEMORY
10524 && !inner_reference_p
10526 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode
))
10528 /* If the target does have special handling for unaligned
10529 loads of mode then use them. */
10530 if ((icode
= optab_handler (movmisalign_optab
, mode
))
10531 != CODE_FOR_nothing
)
10535 op0
= adjust_address (op0
, mode
, 0);
10536 /* We've already validated the memory, and we're creating a
10537 new pseudo destination. The predicates really can't
10539 reg
= gen_reg_rtx (mode
);
10541 /* Nor can the insn generator. */
10542 insn
= GEN_FCN (icode
) (reg
, op0
);
10546 else if (STRICT_ALIGNMENT
)
10548 tree inner_type
= TREE_TYPE (treeop0
);
10549 HOST_WIDE_INT temp_size
10550 = MAX (int_size_in_bytes (inner_type
),
10551 (HOST_WIDE_INT
) GET_MODE_SIZE (mode
));
10553 = assign_stack_temp_for_type (mode
, temp_size
, type
);
10554 rtx new_with_op0_mode
10555 = adjust_address (new_rtx
, GET_MODE (op0
), 0);
10557 gcc_assert (!TREE_ADDRESSABLE (exp
));
10559 if (GET_MODE (op0
) == BLKmode
)
10560 emit_block_move (new_with_op0_mode
, op0
,
10561 GEN_INT (GET_MODE_SIZE (mode
)),
10562 (modifier
== EXPAND_STACK_PARM
10563 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
10565 emit_move_insn (new_with_op0_mode
, op0
);
10571 op0
= adjust_address (op0
, mode
, 0);
10578 tree lhs
= treeop0
;
10579 tree rhs
= treeop1
;
10580 gcc_assert (ignore
);
10582 /* Check for |= or &= of a bitfield of size one into another bitfield
10583 of size 1. In this case, (unless we need the result of the
10584 assignment) we can do this more efficiently with a
10585 test followed by an assignment, if necessary.
10587 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10588 things change so we do, this code should be enhanced to
10590 if (TREE_CODE (lhs
) == COMPONENT_REF
10591 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
10592 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
10593 && TREE_OPERAND (rhs
, 0) == lhs
10594 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
10595 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
10596 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
10598 rtx label
= gen_label_rtx ();
10599 int value
= TREE_CODE (rhs
) == BIT_IOR_EXPR
;
10600 do_jump (TREE_OPERAND (rhs
, 1),
10602 value
? 0 : label
, -1);
10603 expand_assignment (lhs
, build_int_cst (TREE_TYPE (rhs
), value
),
10605 do_pending_stack_adjust ();
10606 emit_label (label
);
10610 expand_assignment (lhs
, rhs
, false);
10615 return expand_expr_addr_expr (exp
, target
, tmode
, modifier
);
10617 case REALPART_EXPR
:
10618 op0
= expand_normal (treeop0
);
10619 return read_complex_part (op0
, false);
10621 case IMAGPART_EXPR
:
10622 op0
= expand_normal (treeop0
);
10623 return read_complex_part (op0
, true);
10630 /* Expanded in cfgexpand.c. */
10631 gcc_unreachable ();
10633 case TRY_CATCH_EXPR
:
10635 case EH_FILTER_EXPR
:
10636 case TRY_FINALLY_EXPR
:
10637 /* Lowered by tree-eh.c. */
10638 gcc_unreachable ();
10640 case WITH_CLEANUP_EXPR
:
10641 case CLEANUP_POINT_EXPR
:
10643 case CASE_LABEL_EXPR
:
10648 case COMPOUND_EXPR
:
10649 case PREINCREMENT_EXPR
:
10650 case PREDECREMENT_EXPR
:
10651 case POSTINCREMENT_EXPR
:
10652 case POSTDECREMENT_EXPR
:
10655 case COMPOUND_LITERAL_EXPR
:
10656 /* Lowered by gimplify.c. */
10657 gcc_unreachable ();
10660 /* Function descriptors are not valid except for as
10661 initialization constants, and should not be expanded. */
10662 gcc_unreachable ();
10664 case WITH_SIZE_EXPR
:
10665 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10666 have pulled out the size to use in whatever context it needed. */
10667 return expand_expr_real (treeop0
, original_target
, tmode
,
10668 modifier
, alt_rtl
, inner_reference_p
);
10671 return expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
10675 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10676 signedness of TYPE), possibly returning the result in TARGET. */
10678 reduce_to_bit_field_precision (rtx exp
, rtx target
, tree type
)
10680 HOST_WIDE_INT prec
= TYPE_PRECISION (type
);
10681 if (target
&& GET_MODE (target
) != GET_MODE (exp
))
10683 /* For constant values, reduce using build_int_cst_type. */
10684 if (CONST_INT_P (exp
))
10686 HOST_WIDE_INT value
= INTVAL (exp
);
10687 tree t
= build_int_cst_type (type
, value
);
10688 return expand_expr (t
, target
, VOIDmode
, EXPAND_NORMAL
);
10690 else if (TYPE_UNSIGNED (type
))
10692 enum machine_mode mode
= GET_MODE (exp
);
10693 rtx mask
= immed_wide_int_const
10694 (wi::mask (prec
, false, GET_MODE_PRECISION (mode
)), mode
);
10695 return expand_and (mode
, exp
, mask
, target
);
10699 int count
= GET_MODE_PRECISION (GET_MODE (exp
)) - prec
;
10700 exp
= expand_shift (LSHIFT_EXPR
, GET_MODE (exp
),
10701 exp
, count
, target
, 0);
10702 return expand_shift (RSHIFT_EXPR
, GET_MODE (exp
),
10703 exp
, count
, target
, 0);
10707 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10708 when applied to the address of EXP produces an address known to be
10709 aligned more than BIGGEST_ALIGNMENT. */
10712 is_aligning_offset (const_tree offset
, const_tree exp
)
10714 /* Strip off any conversions. */
10715 while (CONVERT_EXPR_P (offset
))
10716 offset
= TREE_OPERAND (offset
, 0);
10718 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10719 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10720 if (TREE_CODE (offset
) != BIT_AND_EXPR
10721 || !tree_fits_uhwi_p (TREE_OPERAND (offset
, 1))
10722 || compare_tree_int (TREE_OPERAND (offset
, 1),
10723 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
10724 || !exact_log2 (tree_to_uhwi (TREE_OPERAND (offset
, 1)) + 1) < 0)
10727 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10728 It must be NEGATE_EXPR. Then strip any more conversions. */
10729 offset
= TREE_OPERAND (offset
, 0);
10730 while (CONVERT_EXPR_P (offset
))
10731 offset
= TREE_OPERAND (offset
, 0);
10733 if (TREE_CODE (offset
) != NEGATE_EXPR
)
10736 offset
= TREE_OPERAND (offset
, 0);
10737 while (CONVERT_EXPR_P (offset
))
10738 offset
= TREE_OPERAND (offset
, 0);
10740 /* This must now be the address of EXP. */
10741 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
10744 /* Return the tree node if an ARG corresponds to a string constant or zero
10745 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10746 in bytes within the string that ARG is accessing. The type of the
10747 offset will be `sizetype'. */
10750 string_constant (tree arg
, tree
*ptr_offset
)
10752 tree array
, offset
, lower_bound
;
10755 if (TREE_CODE (arg
) == ADDR_EXPR
)
10757 if (TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
10759 *ptr_offset
= size_zero_node
;
10760 return TREE_OPERAND (arg
, 0);
10762 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == VAR_DECL
)
10764 array
= TREE_OPERAND (arg
, 0);
10765 offset
= size_zero_node
;
10767 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
10769 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
10770 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
10771 if (TREE_CODE (array
) != STRING_CST
10772 && TREE_CODE (array
) != VAR_DECL
)
10775 /* Check if the array has a nonzero lower bound. */
10776 lower_bound
= array_ref_low_bound (TREE_OPERAND (arg
, 0));
10777 if (!integer_zerop (lower_bound
))
10779 /* If the offset and base aren't both constants, return 0. */
10780 if (TREE_CODE (lower_bound
) != INTEGER_CST
)
10782 if (TREE_CODE (offset
) != INTEGER_CST
)
10784 /* Adjust offset by the lower bound. */
10785 offset
= size_diffop (fold_convert (sizetype
, offset
),
10786 fold_convert (sizetype
, lower_bound
));
10789 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == MEM_REF
)
10791 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
10792 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
10793 if (TREE_CODE (array
) != ADDR_EXPR
)
10795 array
= TREE_OPERAND (array
, 0);
10796 if (TREE_CODE (array
) != STRING_CST
10797 && TREE_CODE (array
) != VAR_DECL
)
10803 else if (TREE_CODE (arg
) == PLUS_EXPR
|| TREE_CODE (arg
) == POINTER_PLUS_EXPR
)
10805 tree arg0
= TREE_OPERAND (arg
, 0);
10806 tree arg1
= TREE_OPERAND (arg
, 1);
10811 if (TREE_CODE (arg0
) == ADDR_EXPR
10812 && (TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
10813 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == VAR_DECL
))
10815 array
= TREE_OPERAND (arg0
, 0);
10818 else if (TREE_CODE (arg1
) == ADDR_EXPR
10819 && (TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
10820 || TREE_CODE (TREE_OPERAND (arg1
, 0)) == VAR_DECL
))
10822 array
= TREE_OPERAND (arg1
, 0);
10831 if (TREE_CODE (array
) == STRING_CST
)
10833 *ptr_offset
= fold_convert (sizetype
, offset
);
10836 else if (TREE_CODE (array
) == VAR_DECL
10837 || TREE_CODE (array
) == CONST_DECL
)
10840 tree init
= ctor_for_folding (array
);
10842 /* Variables initialized to string literals can be handled too. */
10843 if (init
== error_mark_node
10845 || TREE_CODE (init
) != STRING_CST
)
10848 /* Avoid const char foo[4] = "abcde"; */
10849 if (DECL_SIZE_UNIT (array
) == NULL_TREE
10850 || TREE_CODE (DECL_SIZE_UNIT (array
)) != INTEGER_CST
10851 || (length
= TREE_STRING_LENGTH (init
)) <= 0
10852 || compare_tree_int (DECL_SIZE_UNIT (array
), length
) < 0)
10855 /* If variable is bigger than the string literal, OFFSET must be constant
10856 and inside of the bounds of the string literal. */
10857 offset
= fold_convert (sizetype
, offset
);
10858 if (compare_tree_int (DECL_SIZE_UNIT (array
), length
) > 0
10859 && (! tree_fits_uhwi_p (offset
)
10860 || compare_tree_int (offset
, length
) >= 0))
10863 *ptr_offset
= offset
;
10870 /* Generate code to calculate OPS, and exploded expression
10871 using a store-flag instruction and return an rtx for the result.
10872 OPS reflects a comparison.
10874 If TARGET is nonzero, store the result there if convenient.
10876 Return zero if there is no suitable set-flag instruction
10877 available on this machine.
10879 Once expand_expr has been called on the arguments of the comparison,
10880 we are committed to doing the store flag, since it is not safe to
10881 re-evaluate the expression. We emit the store-flag insn by calling
10882 emit_store_flag, but only expand the arguments if we have a reason
10883 to believe that emit_store_flag will be successful. If we think that
10884 it will, but it isn't, we have to simulate the store-flag with a
10885 set/jump/set sequence. */
10888 do_store_flag (sepops ops
, rtx target
, enum machine_mode mode
)
10890 enum rtx_code code
;
10891 tree arg0
, arg1
, type
;
10893 enum machine_mode operand_mode
;
10896 rtx subtarget
= target
;
10897 location_t loc
= ops
->location
;
10902 /* Don't crash if the comparison was erroneous. */
10903 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
10906 type
= TREE_TYPE (arg0
);
10907 operand_mode
= TYPE_MODE (type
);
10908 unsignedp
= TYPE_UNSIGNED (type
);
10910 /* We won't bother with BLKmode store-flag operations because it would mean
10911 passing a lot of information to emit_store_flag. */
10912 if (operand_mode
== BLKmode
)
10915 /* We won't bother with store-flag operations involving function pointers
10916 when function pointers must be canonicalized before comparisons. */
10917 #ifdef HAVE_canonicalize_funcptr_for_compare
10918 if (HAVE_canonicalize_funcptr_for_compare
10919 && ((TREE_CODE (TREE_TYPE (arg0
)) == POINTER_TYPE
10920 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
)))
10922 || (TREE_CODE (TREE_TYPE (arg1
)) == POINTER_TYPE
10923 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
)))
10924 == FUNCTION_TYPE
))))
10931 /* For vector typed comparisons emit code to generate the desired
10932 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10933 expander for this. */
10934 if (TREE_CODE (ops
->type
) == VECTOR_TYPE
)
10936 tree ifexp
= build2 (ops
->code
, ops
->type
, arg0
, arg1
);
10937 tree if_true
= constant_boolean_node (true, ops
->type
);
10938 tree if_false
= constant_boolean_node (false, ops
->type
);
10939 return expand_vec_cond_expr (ops
->type
, ifexp
, if_true
, if_false
, target
);
10942 /* Get the rtx comparison code to use. We know that EXP is a comparison
10943 operation of some type. Some comparisons against 1 and -1 can be
10944 converted to comparisons with zero. Do so here so that the tests
10945 below will be aware that we have a comparison with zero. These
10946 tests will not catch constants in the first operand, but constants
10947 are rarely passed as the first operand. */
10958 if (integer_onep (arg1
))
10959 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10961 code
= unsignedp
? LTU
: LT
;
10964 if (! unsignedp
&& integer_all_onesp (arg1
))
10965 arg1
= integer_zero_node
, code
= LT
;
10967 code
= unsignedp
? LEU
: LE
;
10970 if (! unsignedp
&& integer_all_onesp (arg1
))
10971 arg1
= integer_zero_node
, code
= GE
;
10973 code
= unsignedp
? GTU
: GT
;
10976 if (integer_onep (arg1
))
10977 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
10979 code
= unsignedp
? GEU
: GE
;
10982 case UNORDERED_EXPR
:
11008 gcc_unreachable ();
11011 /* Put a constant second. */
11012 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
11013 || TREE_CODE (arg0
) == FIXED_CST
)
11015 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
11016 code
= swap_condition (code
);
11019 /* If this is an equality or inequality test of a single bit, we can
11020 do this by shifting the bit being tested to the low-order bit and
11021 masking the result with the constant 1. If the condition was EQ,
11022 we xor it with 1. This does not require an scc insn and is faster
11023 than an scc insn even if we have it.
11025 The code to make this transformation was moved into fold_single_bit_test,
11026 so we just call into the folder and expand its result. */
11028 if ((code
== NE
|| code
== EQ
)
11029 && integer_zerop (arg1
)
11030 && (TYPE_PRECISION (ops
->type
) != 1 || TYPE_UNSIGNED (ops
->type
)))
11032 gimple srcstmt
= get_def_for_expr (arg0
, BIT_AND_EXPR
);
11034 && integer_pow2p (gimple_assign_rhs2 (srcstmt
)))
11036 enum tree_code tcode
= code
== NE
? NE_EXPR
: EQ_EXPR
;
11037 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
11038 tree temp
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg1
),
11039 gimple_assign_rhs1 (srcstmt
),
11040 gimple_assign_rhs2 (srcstmt
));
11041 temp
= fold_single_bit_test (loc
, tcode
, temp
, arg1
, type
);
11043 return expand_expr (temp
, target
, VOIDmode
, EXPAND_NORMAL
);
11047 if (! get_subtarget (target
)
11048 || GET_MODE (subtarget
) != operand_mode
)
11051 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
11054 target
= gen_reg_rtx (mode
);
11056 /* Try a cstore if possible. */
11057 return emit_store_flag_force (target
, code
, op0
, op1
,
11058 operand_mode
, unsignedp
,
11059 (TYPE_PRECISION (ops
->type
) == 1
11060 && !TYPE_UNSIGNED (ops
->type
)) ? -1 : 1);
11064 /* Stubs in case we haven't got a casesi insn. */
11065 #ifndef HAVE_casesi
11066 # define HAVE_casesi 0
11067 # define gen_casesi(a, b, c, d, e) (0)
11068 # define CODE_FOR_casesi CODE_FOR_nothing
11071 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11072 0 otherwise (i.e. if there is no casesi instruction).
11074 DEFAULT_PROBABILITY is the probability of jumping to the default
11077 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
11078 rtx table_label
, rtx default_label
, rtx fallback_label
,
11079 int default_probability
)
11081 struct expand_operand ops
[5];
11082 enum machine_mode index_mode
= SImode
;
11083 rtx op1
, op2
, index
;
11088 /* Convert the index to SImode. */
11089 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
11091 enum machine_mode omode
= TYPE_MODE (index_type
);
11092 rtx rangertx
= expand_normal (range
);
11094 /* We must handle the endpoints in the original mode. */
11095 index_expr
= build2 (MINUS_EXPR
, index_type
,
11096 index_expr
, minval
);
11097 minval
= integer_zero_node
;
11098 index
= expand_normal (index_expr
);
11100 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
11101 omode
, 1, default_label
,
11102 default_probability
);
11103 /* Now we can safely truncate. */
11104 index
= convert_to_mode (index_mode
, index
, 0);
11108 if (TYPE_MODE (index_type
) != index_mode
)
11110 index_type
= lang_hooks
.types
.type_for_mode (index_mode
, 0);
11111 index_expr
= fold_convert (index_type
, index_expr
);
11114 index
= expand_normal (index_expr
);
11117 do_pending_stack_adjust ();
11119 op1
= expand_normal (minval
);
11120 op2
= expand_normal (range
);
11122 create_input_operand (&ops
[0], index
, index_mode
);
11123 create_convert_operand_from_type (&ops
[1], op1
, TREE_TYPE (minval
));
11124 create_convert_operand_from_type (&ops
[2], op2
, TREE_TYPE (range
));
11125 create_fixed_operand (&ops
[3], table_label
);
11126 create_fixed_operand (&ops
[4], (default_label
11128 : fallback_label
));
11129 expand_jump_insn (CODE_FOR_casesi
, 5, ops
);
11133 /* Attempt to generate a tablejump instruction; same concept. */
11134 #ifndef HAVE_tablejump
11135 #define HAVE_tablejump 0
11136 #define gen_tablejump(x, y) (0)
11139 /* Subroutine of the next function.
11141 INDEX is the value being switched on, with the lowest value
11142 in the table already subtracted.
11143 MODE is its expected mode (needed if INDEX is constant).
11144 RANGE is the length of the jump table.
11145 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11147 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11148 index value is out of range.
11149 DEFAULT_PROBABILITY is the probability of jumping to
11150 the default label. */
11153 do_tablejump (rtx index
, enum machine_mode mode
, rtx range
, rtx table_label
,
11154 rtx default_label
, int default_probability
)
11158 if (INTVAL (range
) > cfun
->cfg
->max_jumptable_ents
)
11159 cfun
->cfg
->max_jumptable_ents
= INTVAL (range
);
11161 /* Do an unsigned comparison (in the proper mode) between the index
11162 expression and the value which represents the length of the range.
11163 Since we just finished subtracting the lower bound of the range
11164 from the index expression, this comparison allows us to simultaneously
11165 check that the original index expression value is both greater than
11166 or equal to the minimum value of the range and less than or equal to
11167 the maximum value of the range. */
11170 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
11171 default_label
, default_probability
);
11174 /* If index is in range, it must fit in Pmode.
11175 Convert to Pmode so we can index with it. */
11177 index
= convert_to_mode (Pmode
, index
, 1);
11179 /* Don't let a MEM slip through, because then INDEX that comes
11180 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11181 and break_out_memory_refs will go to work on it and mess it up. */
11182 #ifdef PIC_CASE_VECTOR_ADDRESS
11183 if (flag_pic
&& !REG_P (index
))
11184 index
= copy_to_mode_reg (Pmode
, index
);
11187 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11188 GET_MODE_SIZE, because this indicates how large insns are. The other
11189 uses should all be Pmode, because they are addresses. This code
11190 could fail if addresses and insns are not the same size. */
11191 index
= simplify_gen_binary (MULT
, Pmode
, index
,
11192 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE
),
11194 index
= simplify_gen_binary (PLUS
, Pmode
, index
,
11195 gen_rtx_LABEL_REF (Pmode
, table_label
));
11197 #ifdef PIC_CASE_VECTOR_ADDRESS
11199 index
= PIC_CASE_VECTOR_ADDRESS (index
);
11202 index
= memory_address (CASE_VECTOR_MODE
, index
);
11203 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
11204 vector
= gen_const_mem (CASE_VECTOR_MODE
, index
);
11205 convert_move (temp
, vector
, 0);
11207 emit_jump_insn (gen_tablejump (temp
, table_label
));
11209 /* If we are generating PIC code or if the table is PC-relative, the
11210 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11211 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
11216 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
11217 rtx table_label
, rtx default_label
, int default_probability
)
11221 if (! HAVE_tablejump
)
11224 index_expr
= fold_build2 (MINUS_EXPR
, index_type
,
11225 fold_convert (index_type
, index_expr
),
11226 fold_convert (index_type
, minval
));
11227 index
= expand_normal (index_expr
);
11228 do_pending_stack_adjust ();
11230 do_tablejump (index
, TYPE_MODE (index_type
),
11231 convert_modes (TYPE_MODE (index_type
),
11232 TYPE_MODE (TREE_TYPE (range
)),
11233 expand_normal (range
),
11234 TYPE_UNSIGNED (TREE_TYPE (range
))),
11235 table_label
, default_label
, default_probability
);
11239 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11241 const_vector_from_tree (tree exp
)
11247 enum machine_mode inner
, mode
;
11249 mode
= TYPE_MODE (TREE_TYPE (exp
));
11251 if (initializer_zerop (exp
))
11252 return CONST0_RTX (mode
);
11254 units
= GET_MODE_NUNITS (mode
);
11255 inner
= GET_MODE_INNER (mode
);
11257 v
= rtvec_alloc (units
);
11259 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
11261 elt
= VECTOR_CST_ELT (exp
, i
);
11263 if (TREE_CODE (elt
) == REAL_CST
)
11264 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
11266 else if (TREE_CODE (elt
) == FIXED_CST
)
11267 RTVEC_ELT (v
, i
) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt
),
11270 RTVEC_ELT (v
, i
) = immed_wide_int_const (elt
, inner
);
11273 return gen_rtx_CONST_VECTOR (mode
, v
);
11276 /* Build a decl for a personality function given a language prefix. */
11279 build_personality_function (const char *lang
)
11281 const char *unwind_and_version
;
11285 switch (targetm_common
.except_unwind_info (&global_options
))
11290 unwind_and_version
= "_sj0";
11294 unwind_and_version
= "_v0";
11297 unwind_and_version
= "_seh0";
11300 gcc_unreachable ();
11303 name
= ACONCAT (("__", lang
, "_personality", unwind_and_version
, NULL
));
11305 type
= build_function_type_list (integer_type_node
, integer_type_node
,
11306 long_long_unsigned_type_node
,
11307 ptr_type_node
, ptr_type_node
, NULL_TREE
);
11308 decl
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
,
11309 get_identifier (name
), type
);
11310 DECL_ARTIFICIAL (decl
) = 1;
11311 DECL_EXTERNAL (decl
) = 1;
11312 TREE_PUBLIC (decl
) = 1;
11314 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11315 are the flags assigned by targetm.encode_section_info. */
11316 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl
), 0), NULL
);
11321 /* Extracts the personality function of DECL and returns the corresponding
11325 get_personality_function (tree decl
)
11327 tree personality
= DECL_FUNCTION_PERSONALITY (decl
);
11328 enum eh_personality_kind pk
;
11330 pk
= function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl
));
11331 if (pk
== eh_personality_none
)
11335 && pk
== eh_personality_any
)
11336 personality
= lang_hooks
.eh_personality ();
11338 if (pk
== eh_personality_lang
)
11339 gcc_assert (personality
!= NULL_TREE
);
11341 return XEXP (DECL_RTL (personality
), 0);
11344 #include "gt-expr.h"