1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
33 #include "hard-reg-set.h"
40 #include "insn-config.h"
41 #include "insn-attr.h"
42 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
48 #include "typeclass.h"
50 #include "langhooks.h"
53 #include "tree-iterator.h"
55 #include "dominance.h"
57 #include "basic-block.h"
58 #include "tree-ssa-alias.h"
59 #include "internal-fn.h"
60 #include "gimple-expr.h"
63 #include "gimple-ssa.h"
65 #include "plugin-api.h"
68 #include "tree-ssanames.h"
70 #include "common/common-target.h"
73 #include "diagnostic.h"
74 #include "tree-ssa-live.h"
75 #include "tree-outof-ssa.h"
76 #include "target-globals.h"
78 #include "tree-ssa-address.h"
79 #include "cfgexpand.h"
82 #ifndef STACK_PUSH_CODE
83 #ifdef STACK_GROWS_DOWNWARD
84 #define STACK_PUSH_CODE PRE_DEC
86 #define STACK_PUSH_CODE PRE_INC
91 /* If this is nonzero, we do not bother generating VOLATILE
92 around volatile memory references, and we are willing to
93 output indirect addresses. If cse is to follow, we reject
94 indirect addresses so a useful potential cse is generated;
95 if it is used only once, instruction combination will produce
96 the same indirect address eventually. */
99 /* This structure is used by move_by_pieces to describe the move to
101 struct move_by_pieces_d
110 int explicit_inc_from
;
111 unsigned HOST_WIDE_INT len
;
112 HOST_WIDE_INT offset
;
116 /* This structure is used by store_by_pieces to describe the clear to
119 struct store_by_pieces_d
125 unsigned HOST_WIDE_INT len
;
126 HOST_WIDE_INT offset
;
127 rtx (*constfun
) (void *, HOST_WIDE_INT
, machine_mode
);
132 static void move_by_pieces_1 (insn_gen_fn
, machine_mode
,
133 struct move_by_pieces_d
*);
134 static bool block_move_libcall_safe_for_call_parm (void);
135 static bool emit_block_move_via_movmem (rtx
, rtx
, rtx
, unsigned, unsigned, HOST_WIDE_INT
,
136 unsigned HOST_WIDE_INT
, unsigned HOST_WIDE_INT
,
137 unsigned HOST_WIDE_INT
);
138 static tree
emit_block_move_libcall_fn (int);
139 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
140 static rtx
clear_by_pieces_1 (void *, HOST_WIDE_INT
, machine_mode
);
141 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
142 static void store_by_pieces_1 (struct store_by_pieces_d
*, unsigned int);
143 static void store_by_pieces_2 (insn_gen_fn
, machine_mode
,
144 struct store_by_pieces_d
*);
145 static tree
clear_storage_libcall_fn (int);
146 static rtx_insn
*compress_float_constant (rtx
, rtx
);
147 static rtx
get_subtarget (rtx
);
148 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
149 HOST_WIDE_INT
, machine_mode
,
150 tree
, int, alias_set_type
);
151 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
);
152 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
,
153 unsigned HOST_WIDE_INT
, unsigned HOST_WIDE_INT
,
154 machine_mode
, tree
, alias_set_type
, bool);
156 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (const_tree
, const_tree
);
158 static int is_aligning_offset (const_tree
, const_tree
);
159 static void expand_operands (tree
, tree
, rtx
, rtx
*, rtx
*,
160 enum expand_modifier
);
161 static rtx
reduce_to_bit_field_precision (rtx
, rtx
, tree
);
162 static rtx
do_store_flag (sepops
, rtx
, machine_mode
);
164 static void emit_single_push_insn (machine_mode
, rtx
, tree
);
166 static void do_tablejump (rtx
, machine_mode
, rtx
, rtx
, rtx
, int);
167 static rtx
const_vector_from_tree (tree
);
168 static void write_complex_part (rtx
, rtx
, bool);
170 /* This macro is used to determine whether move_by_pieces should be called
171 to perform a structure copy. */
172 #ifndef MOVE_BY_PIECES_P
173 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
174 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
175 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
178 /* This macro is used to determine whether clear_by_pieces should be
179 called to clear storage. */
180 #ifndef CLEAR_BY_PIECES_P
181 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
182 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
183 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
186 /* This macro is used to determine whether store_by_pieces should be
187 called to "memset" storage with byte values other than zero. */
188 #ifndef SET_BY_PIECES_P
189 #define SET_BY_PIECES_P(SIZE, ALIGN) \
190 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
191 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
194 /* This macro is used to determine whether store_by_pieces should be
195 called to "memcpy" storage when the source is a constant string. */
196 #ifndef STORE_BY_PIECES_P
197 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
198 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
199 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
202 /* This is run to set up which modes can be used
203 directly in memory and to initialize the block move optab. It is run
204 at the beginning of compilation and when the target is reinitialized. */
207 init_expr_target (void)
215 /* Try indexing by frame ptr and try by stack ptr.
216 It is known that on the Convex the stack ptr isn't a valid index.
217 With luck, one or the other is valid on any machine. */
218 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
219 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
221 /* A scratch register we can modify in-place below to avoid
222 useless RTL allocations. */
223 reg
= gen_rtx_REG (VOIDmode
, -1);
225 insn
= rtx_alloc (INSN
);
226 pat
= gen_rtx_SET (VOIDmode
, NULL_RTX
, NULL_RTX
);
227 PATTERN (insn
) = pat
;
229 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
230 mode
= (machine_mode
) ((int) mode
+ 1))
234 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
235 PUT_MODE (mem
, mode
);
236 PUT_MODE (mem1
, mode
);
237 PUT_MODE (reg
, mode
);
239 /* See if there is some register that can be used in this mode and
240 directly loaded or stored from memory. */
242 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
243 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
244 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
247 if (! HARD_REGNO_MODE_OK (regno
, mode
))
250 SET_REGNO (reg
, regno
);
253 SET_DEST (pat
) = reg
;
254 if (recog (pat
, insn
, &num_clobbers
) >= 0)
255 direct_load
[(int) mode
] = 1;
257 SET_SRC (pat
) = mem1
;
258 SET_DEST (pat
) = reg
;
259 if (recog (pat
, insn
, &num_clobbers
) >= 0)
260 direct_load
[(int) mode
] = 1;
263 SET_DEST (pat
) = mem
;
264 if (recog (pat
, insn
, &num_clobbers
) >= 0)
265 direct_store
[(int) mode
] = 1;
268 SET_DEST (pat
) = mem1
;
269 if (recog (pat
, insn
, &num_clobbers
) >= 0)
270 direct_store
[(int) mode
] = 1;
274 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
276 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
277 mode
= GET_MODE_WIDER_MODE (mode
))
279 machine_mode srcmode
;
280 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
281 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
285 ic
= can_extend_p (mode
, srcmode
, 0);
286 if (ic
== CODE_FOR_nothing
)
289 PUT_MODE (mem
, srcmode
);
291 if (insn_operand_matches (ic
, 1, mem
))
292 float_extend_from_mem
[mode
][srcmode
] = true;
297 /* This is run at the start of compiling a function. */
302 memset (&crtl
->expr
, 0, sizeof (crtl
->expr
));
305 /* Copy data from FROM to TO, where the machine modes are not the same.
306 Both modes may be integer, or both may be floating, or both may be
308 UNSIGNEDP should be nonzero if FROM is an unsigned type.
309 This causes zero-extension instead of sign-extension. */
312 convert_move (rtx to
, rtx from
, int unsignedp
)
314 machine_mode to_mode
= GET_MODE (to
);
315 machine_mode from_mode
= GET_MODE (from
);
316 int to_real
= SCALAR_FLOAT_MODE_P (to_mode
);
317 int from_real
= SCALAR_FLOAT_MODE_P (from_mode
);
321 /* rtx code for making an equivalent value. */
322 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
323 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
326 gcc_assert (to_real
== from_real
);
327 gcc_assert (to_mode
!= BLKmode
);
328 gcc_assert (from_mode
!= BLKmode
);
330 /* If the source and destination are already the same, then there's
335 /* If FROM is a SUBREG that indicates that we have already done at least
336 the required extension, strip it. We don't handle such SUBREGs as
339 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
340 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from
)))
341 >= GET_MODE_PRECISION (to_mode
))
342 && SUBREG_CHECK_PROMOTED_SIGN (from
, unsignedp
))
343 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
345 gcc_assert (GET_CODE (to
) != SUBREG
|| !SUBREG_PROMOTED_VAR_P (to
));
347 if (to_mode
== from_mode
348 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
350 emit_move_insn (to
, from
);
354 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
356 gcc_assert (GET_MODE_BITSIZE (from_mode
) == GET_MODE_BITSIZE (to_mode
));
358 if (VECTOR_MODE_P (to_mode
))
359 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
361 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
363 emit_move_insn (to
, from
);
367 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
369 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
370 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
380 gcc_assert ((GET_MODE_PRECISION (from_mode
)
381 != GET_MODE_PRECISION (to_mode
))
382 || (DECIMAL_FLOAT_MODE_P (from_mode
)
383 != DECIMAL_FLOAT_MODE_P (to_mode
)));
385 if (GET_MODE_PRECISION (from_mode
) == GET_MODE_PRECISION (to_mode
))
386 /* Conversion between decimal float and binary float, same size. */
387 tab
= DECIMAL_FLOAT_MODE_P (from_mode
) ? trunc_optab
: sext_optab
;
388 else if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
393 /* Try converting directly if the insn is supported. */
395 code
= convert_optab_handler (tab
, to_mode
, from_mode
);
396 if (code
!= CODE_FOR_nothing
)
398 emit_unop_insn (code
, to
, from
,
399 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
403 /* Otherwise use a libcall. */
404 libcall
= convert_optab_libfunc (tab
, to_mode
, from_mode
);
406 /* Is this conversion implemented yet? */
407 gcc_assert (libcall
);
410 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
412 insns
= get_insns ();
414 emit_libcall_block (insns
, to
, value
,
415 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
417 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
421 /* Handle pointer conversion. */ /* SPEE 900220. */
422 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
426 if (GET_MODE_PRECISION (from_mode
) > GET_MODE_PRECISION (to_mode
))
433 if (convert_optab_handler (ctab
, to_mode
, from_mode
)
436 emit_unop_insn (convert_optab_handler (ctab
, to_mode
, from_mode
),
442 /* Targets are expected to provide conversion insns between PxImode and
443 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
444 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
446 machine_mode full_mode
447 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode
), MODE_INT
);
449 gcc_assert (convert_optab_handler (trunc_optab
, to_mode
, full_mode
)
450 != CODE_FOR_nothing
);
452 if (full_mode
!= from_mode
)
453 from
= convert_to_mode (full_mode
, from
, unsignedp
);
454 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, full_mode
),
458 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
461 machine_mode full_mode
462 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode
), MODE_INT
);
463 convert_optab ctab
= unsignedp
? zext_optab
: sext_optab
;
464 enum insn_code icode
;
466 icode
= convert_optab_handler (ctab
, full_mode
, from_mode
);
467 gcc_assert (icode
!= CODE_FOR_nothing
);
469 if (to_mode
== full_mode
)
471 emit_unop_insn (icode
, to
, from
, UNKNOWN
);
475 new_from
= gen_reg_rtx (full_mode
);
476 emit_unop_insn (icode
, new_from
, from
, UNKNOWN
);
478 /* else proceed to integer conversions below. */
479 from_mode
= full_mode
;
483 /* Make sure both are fixed-point modes or both are not. */
484 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
) ==
485 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode
));
486 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
))
488 /* If we widen from_mode to to_mode and they are in the same class,
489 we won't saturate the result.
490 Otherwise, always saturate the result to play safe. */
491 if (GET_MODE_CLASS (from_mode
) == GET_MODE_CLASS (to_mode
)
492 && GET_MODE_SIZE (from_mode
) < GET_MODE_SIZE (to_mode
))
493 expand_fixed_convert (to
, from
, 0, 0);
495 expand_fixed_convert (to
, from
, 0, 1);
499 /* Now both modes are integers. */
501 /* Handle expanding beyond a word. */
502 if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
)
503 && GET_MODE_PRECISION (to_mode
) > BITS_PER_WORD
)
510 machine_mode lowpart_mode
;
511 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
513 /* Try converting directly if the insn is supported. */
514 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
517 /* If FROM is a SUBREG, put it into a register. Do this
518 so that we always generate the same set of insns for
519 better cse'ing; if an intermediate assignment occurred,
520 we won't be doing the operation directly on the SUBREG. */
521 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
522 from
= force_reg (from_mode
, from
);
523 emit_unop_insn (code
, to
, from
, equiv_code
);
526 /* Next, try converting via full word. */
527 else if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
528 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
529 != CODE_FOR_nothing
))
531 rtx word_to
= gen_reg_rtx (word_mode
);
534 if (reg_overlap_mentioned_p (to
, from
))
535 from
= force_reg (from_mode
, from
);
538 convert_move (word_to
, from
, unsignedp
);
539 emit_unop_insn (code
, to
, word_to
, equiv_code
);
543 /* No special multiword conversion insn; do it by hand. */
546 /* Since we will turn this into a no conflict block, we must ensure the
547 the source does not overlap the target so force it into an isolated
548 register when maybe so. Likewise for any MEM input, since the
549 conversion sequence might require several references to it and we
550 must ensure we're getting the same value every time. */
552 if (MEM_P (from
) || reg_overlap_mentioned_p (to
, from
))
553 from
= force_reg (from_mode
, from
);
555 /* Get a copy of FROM widened to a word, if necessary. */
556 if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
)
557 lowpart_mode
= word_mode
;
559 lowpart_mode
= from_mode
;
561 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
563 lowpart
= gen_lowpart (lowpart_mode
, to
);
564 emit_move_insn (lowpart
, lowfrom
);
566 /* Compute the value to put in each remaining word. */
568 fill_value
= const0_rtx
;
570 fill_value
= emit_store_flag_force (gen_reg_rtx (word_mode
),
571 LT
, lowfrom
, const0_rtx
,
572 lowpart_mode
, 0, -1);
574 /* Fill the remaining words. */
575 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
577 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
578 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
580 gcc_assert (subword
);
582 if (fill_value
!= subword
)
583 emit_move_insn (subword
, fill_value
);
586 insns
= get_insns ();
593 /* Truncating multi-word to a word or less. */
594 if (GET_MODE_PRECISION (from_mode
) > BITS_PER_WORD
595 && GET_MODE_PRECISION (to_mode
) <= BITS_PER_WORD
)
598 && ! MEM_VOLATILE_P (from
)
599 && direct_load
[(int) to_mode
]
600 && ! mode_dependent_address_p (XEXP (from
, 0),
601 MEM_ADDR_SPACE (from
)))
603 || GET_CODE (from
) == SUBREG
))
604 from
= force_reg (from_mode
, from
);
605 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
609 /* Now follow all the conversions between integers
610 no more than a word long. */
612 /* For truncation, usually we can just refer to FROM in a narrower mode. */
613 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
614 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
, from_mode
))
617 && ! MEM_VOLATILE_P (from
)
618 && direct_load
[(int) to_mode
]
619 && ! mode_dependent_address_p (XEXP (from
, 0),
620 MEM_ADDR_SPACE (from
)))
622 || GET_CODE (from
) == SUBREG
))
623 from
= force_reg (from_mode
, from
);
624 if (REG_P (from
) && REGNO (from
) < FIRST_PSEUDO_REGISTER
625 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
626 from
= copy_to_reg (from
);
627 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
631 /* Handle extension. */
632 if (GET_MODE_PRECISION (to_mode
) > GET_MODE_PRECISION (from_mode
))
634 /* Convert directly if that works. */
635 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
638 emit_unop_insn (code
, to
, from
, equiv_code
);
643 machine_mode intermediate
;
647 /* Search for a mode to convert via. */
648 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
649 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
650 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
652 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
653 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
, intermediate
)))
654 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
655 != CODE_FOR_nothing
))
657 convert_move (to
, convert_to_mode (intermediate
, from
,
658 unsignedp
), unsignedp
);
662 /* No suitable intermediate mode.
663 Generate what we need with shifts. */
664 shift_amount
= (GET_MODE_PRECISION (to_mode
)
665 - GET_MODE_PRECISION (from_mode
));
666 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
667 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
669 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
672 emit_move_insn (to
, tmp
);
677 /* Support special truncate insns for certain modes. */
678 if (convert_optab_handler (trunc_optab
, to_mode
,
679 from_mode
) != CODE_FOR_nothing
)
681 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, from_mode
),
686 /* Handle truncation of volatile memrefs, and so on;
687 the things that couldn't be truncated directly,
688 and for which there was no special instruction.
690 ??? Code above formerly short-circuited this, for most integer
691 mode pairs, with a force_reg in from_mode followed by a recursive
692 call to this routine. Appears always to have been wrong. */
693 if (GET_MODE_PRECISION (to_mode
) < GET_MODE_PRECISION (from_mode
))
695 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
696 emit_move_insn (to
, temp
);
700 /* Mode combination is not recognized. */
704 /* Return an rtx for a value that would result
705 from converting X to mode MODE.
706 Both X and MODE may be floating, or both integer.
707 UNSIGNEDP is nonzero if X is an unsigned value.
708 This can be done by referring to a part of X in place
709 or by copying to a new temporary with conversion. */
712 convert_to_mode (machine_mode mode
, rtx x
, int unsignedp
)
714 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
717 /* Return an rtx for a value that would result
718 from converting X from mode OLDMODE to mode MODE.
719 Both modes may be floating, or both integer.
720 UNSIGNEDP is nonzero if X is an unsigned value.
722 This can be done by referring to a part of X in place
723 or by copying to a new temporary with conversion.
725 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
728 convert_modes (machine_mode mode
, machine_mode oldmode
, rtx x
, int unsignedp
)
732 /* If FROM is a SUBREG that indicates that we have already done at least
733 the required extension, strip it. */
735 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
736 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
737 && SUBREG_CHECK_PROMOTED_SIGN (x
, unsignedp
))
738 x
= gen_lowpart (mode
, SUBREG_REG (x
));
740 if (GET_MODE (x
) != VOIDmode
)
741 oldmode
= GET_MODE (x
);
746 if (CONST_SCALAR_INT_P (x
) && GET_MODE_CLASS (mode
) == MODE_INT
)
748 /* If the caller did not tell us the old mode, then there is not
749 much to do with respect to canonicalization. We have to
750 assume that all the bits are significant. */
751 if (GET_MODE_CLASS (oldmode
) != MODE_INT
)
752 oldmode
= MAX_MODE_INT
;
753 wide_int w
= wide_int::from (std::make_pair (x
, oldmode
),
754 GET_MODE_PRECISION (mode
),
755 unsignedp
? UNSIGNED
: SIGNED
);
756 return immed_wide_int_const (w
, mode
);
759 /* We can do this with a gen_lowpart if both desired and current modes
760 are integer, and this is either a constant integer, a register, or a
762 if (GET_MODE_CLASS (mode
) == MODE_INT
763 && GET_MODE_CLASS (oldmode
) == MODE_INT
764 && GET_MODE_PRECISION (mode
) <= GET_MODE_PRECISION (oldmode
)
765 && ((MEM_P (x
) && !MEM_VOLATILE_P (x
) && direct_load
[(int) mode
])
767 && (!HARD_REGISTER_P (x
)
768 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
769 && TRULY_NOOP_TRUNCATION_MODES_P (mode
, GET_MODE (x
)))))
771 return gen_lowpart (mode
, x
);
773 /* Converting from integer constant into mode is always equivalent to an
775 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
777 gcc_assert (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (oldmode
));
778 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
781 temp
= gen_reg_rtx (mode
);
782 convert_move (temp
, x
, unsignedp
);
786 /* Return the largest alignment we can use for doing a move (or store)
787 of MAX_PIECES. ALIGN is the largest alignment we could use. */
790 alignment_for_piecewise_move (unsigned int max_pieces
, unsigned int align
)
794 tmode
= mode_for_size (max_pieces
* BITS_PER_UNIT
, MODE_INT
, 1);
795 if (align
>= GET_MODE_ALIGNMENT (tmode
))
796 align
= GET_MODE_ALIGNMENT (tmode
);
799 machine_mode tmode
, xmode
;
801 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
803 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
804 if (GET_MODE_SIZE (tmode
) > max_pieces
805 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
808 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
814 /* Return the widest integer mode no wider than SIZE. If no such mode
815 can be found, return VOIDmode. */
818 widest_int_mode_for_size (unsigned int size
)
820 machine_mode tmode
, mode
= VOIDmode
;
822 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
823 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
824 if (GET_MODE_SIZE (tmode
) < size
)
830 /* STORE_MAX_PIECES is the number of bytes at a time that we can
831 store efficiently. Due to internal GCC limitations, this is
832 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
833 for an immediate constant. */
835 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
837 /* Determine whether the LEN bytes can be moved by using several move
838 instructions. Return nonzero if a call to move_by_pieces should
842 can_move_by_pieces (unsigned HOST_WIDE_INT len ATTRIBUTE_UNUSED
,
843 unsigned int align ATTRIBUTE_UNUSED
)
845 return MOVE_BY_PIECES_P (len
, align
);
848 /* Generate several move instructions to copy LEN bytes from block FROM to
849 block TO. (These are MEM rtx's with BLKmode).
851 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
852 used to push FROM to the stack.
854 ALIGN is maximum stack alignment we can assume.
856 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
857 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
861 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
862 unsigned int align
, int endp
)
864 struct move_by_pieces_d data
;
865 machine_mode to_addr_mode
;
866 machine_mode from_addr_mode
= get_address_mode (from
);
867 rtx to_addr
, from_addr
= XEXP (from
, 0);
868 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
869 enum insn_code icode
;
871 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
874 data
.from_addr
= from_addr
;
877 to_addr_mode
= get_address_mode (to
);
878 to_addr
= XEXP (to
, 0);
881 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
882 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
884 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
888 to_addr_mode
= VOIDmode
;
892 #ifdef STACK_GROWS_DOWNWARD
898 data
.to_addr
= to_addr
;
901 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
902 || GET_CODE (from_addr
) == POST_INC
903 || GET_CODE (from_addr
) == POST_DEC
);
905 data
.explicit_inc_from
= 0;
906 data
.explicit_inc_to
= 0;
907 if (data
.reverse
) data
.offset
= len
;
910 /* If copying requires more than two move insns,
911 copy addresses to registers (to make displacements shorter)
912 and use post-increment if available. */
913 if (!(data
.autinc_from
&& data
.autinc_to
)
914 && move_by_pieces_ninsns (len
, align
, max_size
) > 2)
916 /* Find the mode of the largest move...
917 MODE might not be used depending on the definitions of the
918 USE_* macros below. */
919 machine_mode mode ATTRIBUTE_UNUSED
920 = widest_int_mode_for_size (max_size
);
922 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
924 data
.from_addr
= copy_to_mode_reg (from_addr_mode
,
925 plus_constant (from_addr_mode
,
927 data
.autinc_from
= 1;
928 data
.explicit_inc_from
= -1;
930 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
932 data
.from_addr
= copy_to_mode_reg (from_addr_mode
, from_addr
);
933 data
.autinc_from
= 1;
934 data
.explicit_inc_from
= 1;
936 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
937 data
.from_addr
= copy_to_mode_reg (from_addr_mode
, from_addr
);
938 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
940 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
941 plus_constant (to_addr_mode
,
944 data
.explicit_inc_to
= -1;
946 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
948 data
.to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
950 data
.explicit_inc_to
= 1;
952 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
953 data
.to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
956 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
958 /* First move what we can in the largest integer mode, then go to
959 successively smaller modes. */
961 while (max_size
> 1 && data
.len
> 0)
963 machine_mode mode
= widest_int_mode_for_size (max_size
);
965 if (mode
== VOIDmode
)
968 icode
= optab_handler (mov_optab
, mode
);
969 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
970 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
972 max_size
= GET_MODE_SIZE (mode
);
975 /* The code above should have handled everything. */
976 gcc_assert (!data
.len
);
982 gcc_assert (!data
.reverse
);
987 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
988 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
990 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
991 plus_constant (to_addr_mode
,
995 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
1002 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
1010 /* Return number of insns required to move L bytes by pieces.
1011 ALIGN (in bits) is maximum alignment we can assume. */
1013 unsigned HOST_WIDE_INT
1014 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
,
1015 unsigned int max_size
)
1017 unsigned HOST_WIDE_INT n_insns
= 0;
1019 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
1021 while (max_size
> 1 && l
> 0)
1024 enum insn_code icode
;
1026 mode
= widest_int_mode_for_size (max_size
);
1028 if (mode
== VOIDmode
)
1031 icode
= optab_handler (mov_optab
, mode
);
1032 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1033 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1035 max_size
= GET_MODE_SIZE (mode
);
1042 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1043 with move instructions for mode MODE. GENFUN is the gen_... function
1044 to make a move insn for that mode. DATA has all the other info. */
1047 move_by_pieces_1 (insn_gen_fn genfun
, machine_mode mode
,
1048 struct move_by_pieces_d
*data
)
1050 unsigned int size
= GET_MODE_SIZE (mode
);
1051 rtx to1
= NULL_RTX
, from1
;
1053 while (data
->len
>= size
)
1056 data
->offset
-= size
;
1060 if (data
->autinc_to
)
1061 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1064 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1067 if (data
->autinc_from
)
1068 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1071 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1073 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1074 emit_insn (gen_add2_insn (data
->to_addr
,
1075 gen_int_mode (-(HOST_WIDE_INT
) size
,
1076 GET_MODE (data
->to_addr
))));
1077 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1078 emit_insn (gen_add2_insn (data
->from_addr
,
1079 gen_int_mode (-(HOST_WIDE_INT
) size
,
1080 GET_MODE (data
->from_addr
))));
1083 emit_insn ((*genfun
) (to1
, from1
));
1086 #ifdef PUSH_ROUNDING
1087 emit_single_push_insn (mode
, from1
, NULL
);
1093 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1094 emit_insn (gen_add2_insn (data
->to_addr
,
1096 GET_MODE (data
->to_addr
))));
1097 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1098 emit_insn (gen_add2_insn (data
->from_addr
,
1100 GET_MODE (data
->from_addr
))));
1102 if (! data
->reverse
)
1103 data
->offset
+= size
;
1109 /* Emit code to move a block Y to a block X. This may be done with
1110 string-move instructions, with multiple scalar move instructions,
1111 or with a library call.
1113 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1114 SIZE is an rtx that says how long they are.
1115 ALIGN is the maximum alignment we can assume they have.
1116 METHOD describes what kind of copy this is, and what mechanisms may be used.
1117 MIN_SIZE is the minimal size of block to move
1118 MAX_SIZE is the maximal size of block to move, if it can not be represented
1119 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1121 Return the address of the new block, if memcpy is called and returns it,
1125 emit_block_move_hints (rtx x
, rtx y
, rtx size
, enum block_op_methods method
,
1126 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
1127 unsigned HOST_WIDE_INT min_size
,
1128 unsigned HOST_WIDE_INT max_size
,
1129 unsigned HOST_WIDE_INT probable_max_size
)
1136 if (CONST_INT_P (size
)
1137 && INTVAL (size
) == 0)
1142 case BLOCK_OP_NORMAL
:
1143 case BLOCK_OP_TAILCALL
:
1144 may_use_call
= true;
1147 case BLOCK_OP_CALL_PARM
:
1148 may_use_call
= block_move_libcall_safe_for_call_parm ();
1150 /* Make inhibit_defer_pop nonzero around the library call
1151 to force it to pop the arguments right away. */
1155 case BLOCK_OP_NO_LIBCALL
:
1156 may_use_call
= false;
1163 gcc_assert (MEM_P (x
) && MEM_P (y
));
1164 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1165 gcc_assert (align
>= BITS_PER_UNIT
);
1167 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1168 block copy is more efficient for other large modes, e.g. DCmode. */
1169 x
= adjust_address (x
, BLKmode
, 0);
1170 y
= adjust_address (y
, BLKmode
, 0);
1172 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1173 can be incorrect is coming from __builtin_memcpy. */
1174 if (CONST_INT_P (size
))
1176 x
= shallow_copy_rtx (x
);
1177 y
= shallow_copy_rtx (y
);
1178 set_mem_size (x
, INTVAL (size
));
1179 set_mem_size (y
, INTVAL (size
));
1182 if (CONST_INT_P (size
) && MOVE_BY_PIECES_P (INTVAL (size
), align
))
1183 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1184 else if (emit_block_move_via_movmem (x
, y
, size
, align
,
1185 expected_align
, expected_size
,
1186 min_size
, max_size
, probable_max_size
))
1188 else if (may_use_call
1189 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
))
1190 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y
)))
1192 /* Since x and y are passed to a libcall, mark the corresponding
1193 tree EXPR as addressable. */
1194 tree y_expr
= MEM_EXPR (y
);
1195 tree x_expr
= MEM_EXPR (x
);
1197 mark_addressable (y_expr
);
1199 mark_addressable (x_expr
);
1200 retval
= emit_block_move_via_libcall (x
, y
, size
,
1201 method
== BLOCK_OP_TAILCALL
);
1205 emit_block_move_via_loop (x
, y
, size
, align
);
1207 if (method
== BLOCK_OP_CALL_PARM
)
1214 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1216 unsigned HOST_WIDE_INT max
, min
= 0;
1217 if (GET_CODE (size
) == CONST_INT
)
1218 min
= max
= UINTVAL (size
);
1220 max
= GET_MODE_MASK (GET_MODE (size
));
1221 return emit_block_move_hints (x
, y
, size
, method
, 0, -1,
1225 /* A subroutine of emit_block_move. Returns true if calling the
1226 block move libcall will not clobber any parameters which may have
1227 already been placed on the stack. */
1230 block_move_libcall_safe_for_call_parm (void)
1232 #if defined (REG_PARM_STACK_SPACE)
1236 /* If arguments are pushed on the stack, then they're safe. */
1240 /* If registers go on the stack anyway, any argument is sure to clobber
1241 an outgoing argument. */
1242 #if defined (REG_PARM_STACK_SPACE)
1243 fn
= emit_block_move_libcall_fn (false);
1244 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1245 depend on its argument. */
1247 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn
? NULL_TREE
: TREE_TYPE (fn
)))
1248 && REG_PARM_STACK_SPACE (fn
) != 0)
1252 /* If any argument goes in memory, then it might clobber an outgoing
1255 CUMULATIVE_ARGS args_so_far_v
;
1256 cumulative_args_t args_so_far
;
1259 fn
= emit_block_move_libcall_fn (false);
1260 INIT_CUMULATIVE_ARGS (args_so_far_v
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1261 args_so_far
= pack_cumulative_args (&args_so_far_v
);
1263 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1264 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1266 machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1267 rtx tmp
= targetm
.calls
.function_arg (args_so_far
, mode
,
1269 if (!tmp
|| !REG_P (tmp
))
1271 if (targetm
.calls
.arg_partial_bytes (args_so_far
, mode
, NULL
, 1))
1273 targetm
.calls
.function_arg_advance (args_so_far
, mode
,
1280 /* A subroutine of emit_block_move. Expand a movmem pattern;
1281 return true if successful. */
1284 emit_block_move_via_movmem (rtx x
, rtx y
, rtx size
, unsigned int align
,
1285 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
1286 unsigned HOST_WIDE_INT min_size
,
1287 unsigned HOST_WIDE_INT max_size
,
1288 unsigned HOST_WIDE_INT probable_max_size
)
1290 int save_volatile_ok
= volatile_ok
;
1293 if (expected_align
< align
)
1294 expected_align
= align
;
1295 if (expected_size
!= -1)
1297 if ((unsigned HOST_WIDE_INT
)expected_size
> probable_max_size
)
1298 expected_size
= probable_max_size
;
1299 if ((unsigned HOST_WIDE_INT
)expected_size
< min_size
)
1300 expected_size
= min_size
;
1303 /* Since this is a move insn, we don't care about volatility. */
1306 /* Try the most limited insn first, because there's no point
1307 including more than one in the machine description unless
1308 the more limited one has some advantage. */
1310 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1311 mode
= GET_MODE_WIDER_MODE (mode
))
1313 enum insn_code code
= direct_optab_handler (movmem_optab
, mode
);
1315 if (code
!= CODE_FOR_nothing
1316 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1317 here because if SIZE is less than the mode mask, as it is
1318 returned by the macro, it will definitely be less than the
1319 actual mode mask. Since SIZE is within the Pmode address
1320 space, we limit MODE to Pmode. */
1321 && ((CONST_INT_P (size
)
1322 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1323 <= (GET_MODE_MASK (mode
) >> 1)))
1324 || max_size
<= (GET_MODE_MASK (mode
) >> 1)
1325 || GET_MODE_BITSIZE (mode
) >= GET_MODE_BITSIZE (Pmode
)))
1327 struct expand_operand ops
[9];
1330 /* ??? When called via emit_block_move_for_call, it'd be
1331 nice if there were some way to inform the backend, so
1332 that it doesn't fail the expansion because it thinks
1333 emitting the libcall would be more efficient. */
1334 nops
= insn_data
[(int) code
].n_generator_args
;
1335 gcc_assert (nops
== 4 || nops
== 6 || nops
== 8 || nops
== 9);
1337 create_fixed_operand (&ops
[0], x
);
1338 create_fixed_operand (&ops
[1], y
);
1339 /* The check above guarantees that this size conversion is valid. */
1340 create_convert_operand_to (&ops
[2], size
, mode
, true);
1341 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
1344 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
1345 create_integer_operand (&ops
[5], expected_size
);
1349 create_integer_operand (&ops
[6], min_size
);
1350 /* If we can not represent the maximal size,
1351 make parameter NULL. */
1352 if ((HOST_WIDE_INT
) max_size
!= -1)
1353 create_integer_operand (&ops
[7], max_size
);
1355 create_fixed_operand (&ops
[7], NULL
);
1359 /* If we can not represent the maximal size,
1360 make parameter NULL. */
1361 if ((HOST_WIDE_INT
) probable_max_size
!= -1)
1362 create_integer_operand (&ops
[8], probable_max_size
);
1364 create_fixed_operand (&ops
[8], NULL
);
1366 if (maybe_expand_insn (code
, nops
, ops
))
1368 volatile_ok
= save_volatile_ok
;
1374 volatile_ok
= save_volatile_ok
;
1378 /* A subroutine of emit_block_move. Expand a call to memcpy.
1379 Return the return value from memcpy, 0 otherwise. */
1382 emit_block_move_via_libcall (rtx dst
, rtx src
, rtx size
, bool tailcall
)
1384 rtx dst_addr
, src_addr
;
1385 tree call_expr
, fn
, src_tree
, dst_tree
, size_tree
;
1386 machine_mode size_mode
;
1389 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1390 pseudos. We can then place those new pseudos into a VAR_DECL and
1393 dst_addr
= copy_addr_to_reg (XEXP (dst
, 0));
1394 src_addr
= copy_addr_to_reg (XEXP (src
, 0));
1396 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1397 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1399 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1400 src_tree
= make_tree (ptr_type_node
, src_addr
);
1402 size_mode
= TYPE_MODE (sizetype
);
1404 size
= convert_to_mode (size_mode
, size
, 1);
1405 size
= copy_to_mode_reg (size_mode
, size
);
1407 /* It is incorrect to use the libcall calling conventions to call
1408 memcpy in this context. This could be a user call to memcpy and
1409 the user may wish to examine the return value from memcpy. For
1410 targets where libcalls and normal calls have different conventions
1411 for returning pointers, we could end up generating incorrect code. */
1413 size_tree
= make_tree (sizetype
, size
);
1415 fn
= emit_block_move_libcall_fn (true);
1416 call_expr
= build_call_expr (fn
, 3, dst_tree
, src_tree
, size_tree
);
1417 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
1419 retval
= expand_normal (call_expr
);
1424 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1425 for the function we use for block copies. */
1427 static GTY(()) tree block_move_fn
;
1430 init_block_move_fn (const char *asmspec
)
1434 tree args
, fn
, attrs
, attr_args
;
1436 fn
= get_identifier ("memcpy");
1437 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
1438 const_ptr_type_node
, sizetype
,
1441 fn
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
, fn
, args
);
1442 DECL_EXTERNAL (fn
) = 1;
1443 TREE_PUBLIC (fn
) = 1;
1444 DECL_ARTIFICIAL (fn
) = 1;
1445 TREE_NOTHROW (fn
) = 1;
1446 DECL_VISIBILITY (fn
) = VISIBILITY_DEFAULT
;
1447 DECL_VISIBILITY_SPECIFIED (fn
) = 1;
1449 attr_args
= build_tree_list (NULL_TREE
, build_string (1, "1"));
1450 attrs
= tree_cons (get_identifier ("fn spec"), attr_args
, NULL
);
1452 decl_attributes (&fn
, attrs
, ATTR_FLAG_BUILT_IN
);
1458 set_user_assembler_name (block_move_fn
, asmspec
);
1462 emit_block_move_libcall_fn (int for_call
)
1464 static bool emitted_extern
;
1467 init_block_move_fn (NULL
);
1469 if (for_call
&& !emitted_extern
)
1471 emitted_extern
= true;
1472 make_decl_rtl (block_move_fn
);
1475 return block_move_fn
;
1478 /* A subroutine of emit_block_move. Copy the data via an explicit
1479 loop. This is used only when libcalls are forbidden. */
1480 /* ??? It'd be nice to copy in hunks larger than QImode. */
1483 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1484 unsigned int align ATTRIBUTE_UNUSED
)
1486 rtx_code_label
*cmp_label
, *top_label
;
1487 rtx iter
, x_addr
, y_addr
, tmp
;
1488 machine_mode x_addr_mode
= get_address_mode (x
);
1489 machine_mode y_addr_mode
= get_address_mode (y
);
1490 machine_mode iter_mode
;
1492 iter_mode
= GET_MODE (size
);
1493 if (iter_mode
== VOIDmode
)
1494 iter_mode
= word_mode
;
1496 top_label
= gen_label_rtx ();
1497 cmp_label
= gen_label_rtx ();
1498 iter
= gen_reg_rtx (iter_mode
);
1500 emit_move_insn (iter
, const0_rtx
);
1502 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1503 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1504 do_pending_stack_adjust ();
1506 emit_jump (cmp_label
);
1507 emit_label (top_label
);
1509 tmp
= convert_modes (x_addr_mode
, iter_mode
, iter
, true);
1510 x_addr
= simplify_gen_binary (PLUS
, x_addr_mode
, x_addr
, tmp
);
1512 if (x_addr_mode
!= y_addr_mode
)
1513 tmp
= convert_modes (y_addr_mode
, iter_mode
, iter
, true);
1514 y_addr
= simplify_gen_binary (PLUS
, y_addr_mode
, y_addr
, tmp
);
1516 x
= change_address (x
, QImode
, x_addr
);
1517 y
= change_address (y
, QImode
, y_addr
);
1519 emit_move_insn (x
, y
);
1521 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1522 true, OPTAB_LIB_WIDEN
);
1524 emit_move_insn (iter
, tmp
);
1526 emit_label (cmp_label
);
1528 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1529 true, top_label
, REG_BR_PROB_BASE
* 90 / 100);
1532 /* Copy all or part of a value X into registers starting at REGNO.
1533 The number of registers to be filled is NREGS. */
1536 move_block_to_reg (int regno
, rtx x
, int nregs
, machine_mode mode
)
1539 #ifdef HAVE_load_multiple
1547 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
1548 x
= validize_mem (force_const_mem (mode
, x
));
1550 /* See if the machine can do this with a load multiple insn. */
1551 #ifdef HAVE_load_multiple
1552 if (HAVE_load_multiple
)
1554 last
= get_last_insn ();
1555 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1563 delete_insns_since (last
);
1567 for (i
= 0; i
< nregs
; i
++)
1568 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1569 operand_subword_force (x
, i
, mode
));
1572 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1573 The number of registers to be filled is NREGS. */
1576 move_block_from_reg (int regno
, rtx x
, int nregs
)
1583 /* See if the machine can do this with a store multiple insn. */
1584 #ifdef HAVE_store_multiple
1585 if (HAVE_store_multiple
)
1587 rtx_insn
*last
= get_last_insn ();
1588 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1596 delete_insns_since (last
);
1600 for (i
= 0; i
< nregs
; i
++)
1602 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1606 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1610 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1611 ORIG, where ORIG is a non-consecutive group of registers represented by
1612 a PARALLEL. The clone is identical to the original except in that the
1613 original set of registers is replaced by a new set of pseudo registers.
1614 The new set has the same modes as the original set. */
1617 gen_group_rtx (rtx orig
)
1622 gcc_assert (GET_CODE (orig
) == PARALLEL
);
1624 length
= XVECLEN (orig
, 0);
1625 tmps
= XALLOCAVEC (rtx
, length
);
1627 /* Skip a NULL entry in first slot. */
1628 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
1633 for (; i
< length
; i
++)
1635 machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
1636 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
1638 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
1641 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
1644 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1645 except that values are placed in TMPS[i], and must later be moved
1646 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1649 emit_group_load_1 (rtx
*tmps
, rtx dst
, rtx orig_src
, tree type
, int ssize
)
1653 machine_mode m
= GET_MODE (orig_src
);
1655 gcc_assert (GET_CODE (dst
) == PARALLEL
);
1658 && !SCALAR_INT_MODE_P (m
)
1659 && !MEM_P (orig_src
)
1660 && GET_CODE (orig_src
) != CONCAT
)
1662 machine_mode imode
= int_mode_for_mode (GET_MODE (orig_src
));
1663 if (imode
== BLKmode
)
1664 src
= assign_stack_temp (GET_MODE (orig_src
), ssize
);
1666 src
= gen_reg_rtx (imode
);
1667 if (imode
!= BLKmode
)
1668 src
= gen_lowpart (GET_MODE (orig_src
), src
);
1669 emit_move_insn (src
, orig_src
);
1670 /* ...and back again. */
1671 if (imode
!= BLKmode
)
1672 src
= gen_lowpart (imode
, src
);
1673 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1677 /* Check for a NULL entry, used to indicate that the parameter goes
1678 both on the stack and in registers. */
1679 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1684 /* Process the pieces. */
1685 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1687 machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1688 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1689 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1692 /* Handle trailing fragments that run over the size of the struct. */
1693 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1695 /* Arrange to shift the fragment to where it belongs.
1696 extract_bit_field loads to the lsb of the reg. */
1698 #ifdef BLOCK_REG_PADDING
1699 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
1700 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1705 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1706 bytelen
= ssize
- bytepos
;
1707 gcc_assert (bytelen
> 0);
1710 /* If we won't be loading directly from memory, protect the real source
1711 from strange tricks we might play; but make sure that the source can
1712 be loaded directly into the destination. */
1714 if (!MEM_P (orig_src
)
1715 && (!CONSTANT_P (orig_src
)
1716 || (GET_MODE (orig_src
) != mode
1717 && GET_MODE (orig_src
) != VOIDmode
)))
1719 if (GET_MODE (orig_src
) == VOIDmode
)
1720 src
= gen_reg_rtx (mode
);
1722 src
= gen_reg_rtx (GET_MODE (orig_src
));
1724 emit_move_insn (src
, orig_src
);
1727 /* Optimize the access just a bit. */
1729 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
1730 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
1731 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1732 && bytelen
== GET_MODE_SIZE (mode
))
1734 tmps
[i
] = gen_reg_rtx (mode
);
1735 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
1737 else if (COMPLEX_MODE_P (mode
)
1738 && GET_MODE (src
) == mode
1739 && bytelen
== GET_MODE_SIZE (mode
))
1740 /* Let emit_move_complex do the bulk of the work. */
1742 else if (GET_CODE (src
) == CONCAT
)
1744 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
1745 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
1747 if ((bytepos
== 0 && bytelen
== slen0
)
1748 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
1750 /* The following assumes that the concatenated objects all
1751 have the same size. In this case, a simple calculation
1752 can be used to determine the object and the bit field
1754 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
1755 if (! CONSTANT_P (tmps
[i
])
1756 && (!REG_P (tmps
[i
]) || GET_MODE (tmps
[i
]) != mode
))
1757 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
1758 (bytepos
% slen0
) * BITS_PER_UNIT
,
1759 1, NULL_RTX
, mode
, mode
);
1765 gcc_assert (!bytepos
);
1766 mem
= assign_stack_temp (GET_MODE (src
), slen
);
1767 emit_move_insn (mem
, src
);
1768 tmps
[i
] = extract_bit_field (mem
, bytelen
* BITS_PER_UNIT
,
1769 0, 1, NULL_RTX
, mode
, mode
);
1772 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1773 SIMD register, which is currently broken. While we get GCC
1774 to emit proper RTL for these cases, let's dump to memory. */
1775 else if (VECTOR_MODE_P (GET_MODE (dst
))
1778 int slen
= GET_MODE_SIZE (GET_MODE (src
));
1781 mem
= assign_stack_temp (GET_MODE (src
), slen
);
1782 emit_move_insn (mem
, src
);
1783 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
1785 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
1786 && XVECLEN (dst
, 0) > 1)
1787 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE (dst
), bytepos
);
1788 else if (CONSTANT_P (src
))
1790 HOST_WIDE_INT len
= (HOST_WIDE_INT
) bytelen
;
1798 /* TODO: const_wide_int can have sizes other than this... */
1799 gcc_assert (2 * len
== ssize
);
1800 split_double (src
, &first
, &second
);
1807 else if (REG_P (src
) && GET_MODE (src
) == mode
)
1810 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
1811 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
1815 tmps
[i
] = expand_shift (LSHIFT_EXPR
, mode
, tmps
[i
],
1820 /* Emit code to move a block SRC of type TYPE to a block DST,
1821 where DST is non-consecutive registers represented by a PARALLEL.
1822 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1826 emit_group_load (rtx dst
, rtx src
, tree type
, int ssize
)
1831 tmps
= XALLOCAVEC (rtx
, XVECLEN (dst
, 0));
1832 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1834 /* Copy the extracted pieces into the proper (probable) hard regs. */
1835 for (i
= 0; i
< XVECLEN (dst
, 0); i
++)
1837 rtx d
= XEXP (XVECEXP (dst
, 0, i
), 0);
1840 emit_move_insn (d
, tmps
[i
]);
1844 /* Similar, but load SRC into new pseudos in a format that looks like
1845 PARALLEL. This can later be fed to emit_group_move to get things
1846 in the right place. */
1849 emit_group_load_into_temps (rtx parallel
, rtx src
, tree type
, int ssize
)
1854 vec
= rtvec_alloc (XVECLEN (parallel
, 0));
1855 emit_group_load_1 (&RTVEC_ELT (vec
, 0), parallel
, src
, type
, ssize
);
1857 /* Convert the vector to look just like the original PARALLEL, except
1858 with the computed values. */
1859 for (i
= 0; i
< XVECLEN (parallel
, 0); i
++)
1861 rtx e
= XVECEXP (parallel
, 0, i
);
1862 rtx d
= XEXP (e
, 0);
1866 d
= force_reg (GET_MODE (d
), RTVEC_ELT (vec
, i
));
1867 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), d
, XEXP (e
, 1));
1869 RTVEC_ELT (vec
, i
) = e
;
1872 return gen_rtx_PARALLEL (GET_MODE (parallel
), vec
);
1875 /* Emit code to move a block SRC to block DST, where SRC and DST are
1876 non-consecutive groups of registers, each represented by a PARALLEL. */
1879 emit_group_move (rtx dst
, rtx src
)
1883 gcc_assert (GET_CODE (src
) == PARALLEL
1884 && GET_CODE (dst
) == PARALLEL
1885 && XVECLEN (src
, 0) == XVECLEN (dst
, 0));
1887 /* Skip first entry if NULL. */
1888 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
1889 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
1890 XEXP (XVECEXP (src
, 0, i
), 0));
1893 /* Move a group of registers represented by a PARALLEL into pseudos. */
1896 emit_group_move_into_temps (rtx src
)
1898 rtvec vec
= rtvec_alloc (XVECLEN (src
, 0));
1901 for (i
= 0; i
< XVECLEN (src
, 0); i
++)
1903 rtx e
= XVECEXP (src
, 0, i
);
1904 rtx d
= XEXP (e
, 0);
1907 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), copy_to_reg (d
), XEXP (e
, 1));
1908 RTVEC_ELT (vec
, i
) = e
;
1911 return gen_rtx_PARALLEL (GET_MODE (src
), vec
);
1914 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1915 where SRC is non-consecutive registers represented by a PARALLEL.
1916 SSIZE represents the total size of block ORIG_DST, or -1 if not
1920 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1923 int start
, finish
, i
;
1924 machine_mode m
= GET_MODE (orig_dst
);
1926 gcc_assert (GET_CODE (src
) == PARALLEL
);
1928 if (!SCALAR_INT_MODE_P (m
)
1929 && !MEM_P (orig_dst
) && GET_CODE (orig_dst
) != CONCAT
)
1931 machine_mode imode
= int_mode_for_mode (GET_MODE (orig_dst
));
1932 if (imode
== BLKmode
)
1933 dst
= assign_stack_temp (GET_MODE (orig_dst
), ssize
);
1935 dst
= gen_reg_rtx (imode
);
1936 emit_group_store (dst
, src
, type
, ssize
);
1937 if (imode
!= BLKmode
)
1938 dst
= gen_lowpart (GET_MODE (orig_dst
), dst
);
1939 emit_move_insn (orig_dst
, dst
);
1943 /* Check for a NULL entry, used to indicate that the parameter goes
1944 both on the stack and in registers. */
1945 if (XEXP (XVECEXP (src
, 0, 0), 0))
1949 finish
= XVECLEN (src
, 0);
1951 tmps
= XALLOCAVEC (rtx
, finish
);
1953 /* Copy the (probable) hard regs into pseudos. */
1954 for (i
= start
; i
< finish
; i
++)
1956 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
1957 if (!REG_P (reg
) || REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
1959 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
1960 emit_move_insn (tmps
[i
], reg
);
1966 /* If we won't be storing directly into memory, protect the real destination
1967 from strange tricks we might play. */
1969 if (GET_CODE (dst
) == PARALLEL
)
1973 /* We can get a PARALLEL dst if there is a conditional expression in
1974 a return statement. In that case, the dst and src are the same,
1975 so no action is necessary. */
1976 if (rtx_equal_p (dst
, src
))
1979 /* It is unclear if we can ever reach here, but we may as well handle
1980 it. Allocate a temporary, and split this into a store/load to/from
1982 temp
= assign_stack_temp (GET_MODE (dst
), ssize
);
1983 emit_group_store (temp
, src
, type
, ssize
);
1984 emit_group_load (dst
, temp
, type
, ssize
);
1987 else if (!MEM_P (dst
) && GET_CODE (dst
) != CONCAT
)
1989 machine_mode outer
= GET_MODE (dst
);
1991 HOST_WIDE_INT bytepos
;
1995 if (!REG_P (dst
) || REGNO (dst
) < FIRST_PSEUDO_REGISTER
)
1996 dst
= gen_reg_rtx (outer
);
1998 /* Make life a bit easier for combine. */
1999 /* If the first element of the vector is the low part
2000 of the destination mode, use a paradoxical subreg to
2001 initialize the destination. */
2004 inner
= GET_MODE (tmps
[start
]);
2005 bytepos
= subreg_lowpart_offset (inner
, outer
);
2006 if (INTVAL (XEXP (XVECEXP (src
, 0, start
), 1)) == bytepos
)
2008 temp
= simplify_gen_subreg (outer
, tmps
[start
],
2012 emit_move_insn (dst
, temp
);
2019 /* If the first element wasn't the low part, try the last. */
2021 && start
< finish
- 1)
2023 inner
= GET_MODE (tmps
[finish
- 1]);
2024 bytepos
= subreg_lowpart_offset (inner
, outer
);
2025 if (INTVAL (XEXP (XVECEXP (src
, 0, finish
- 1), 1)) == bytepos
)
2027 temp
= simplify_gen_subreg (outer
, tmps
[finish
- 1],
2031 emit_move_insn (dst
, temp
);
2038 /* Otherwise, simply initialize the result to zero. */
2040 emit_move_insn (dst
, CONST0_RTX (outer
));
2043 /* Process the pieces. */
2044 for (i
= start
; i
< finish
; i
++)
2046 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2047 machine_mode mode
= GET_MODE (tmps
[i
]);
2048 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2049 unsigned int adj_bytelen
;
2052 /* Handle trailing fragments that run over the size of the struct. */
2053 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2054 adj_bytelen
= ssize
- bytepos
;
2056 adj_bytelen
= bytelen
;
2058 if (GET_CODE (dst
) == CONCAT
)
2060 if (bytepos
+ adj_bytelen
2061 <= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2062 dest
= XEXP (dst
, 0);
2063 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2065 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2066 dest
= XEXP (dst
, 1);
2070 machine_mode dest_mode
= GET_MODE (dest
);
2071 machine_mode tmp_mode
= GET_MODE (tmps
[i
]);
2073 gcc_assert (bytepos
== 0 && XVECLEN (src
, 0));
2075 if (GET_MODE_ALIGNMENT (dest_mode
)
2076 >= GET_MODE_ALIGNMENT (tmp_mode
))
2078 dest
= assign_stack_temp (dest_mode
,
2079 GET_MODE_SIZE (dest_mode
));
2080 emit_move_insn (adjust_address (dest
,
2088 dest
= assign_stack_temp (tmp_mode
,
2089 GET_MODE_SIZE (tmp_mode
));
2090 emit_move_insn (dest
, tmps
[i
]);
2091 dst
= adjust_address (dest
, dest_mode
, bytepos
);
2097 /* Handle trailing fragments that run over the size of the struct. */
2098 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2100 /* store_bit_field always takes its value from the lsb.
2101 Move the fragment to the lsb if it's not already there. */
2103 #ifdef BLOCK_REG_PADDING
2104 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
2105 == (BYTES_BIG_ENDIAN
? upward
: downward
)
2111 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2112 tmps
[i
] = expand_shift (RSHIFT_EXPR
, mode
, tmps
[i
],
2116 /* Make sure not to write past the end of the struct. */
2117 store_bit_field (dest
,
2118 adj_bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2119 bytepos
* BITS_PER_UNIT
, ssize
* BITS_PER_UNIT
- 1,
2123 /* Optimize the access just a bit. */
2124 else if (MEM_P (dest
)
2125 && (!SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
2126 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2127 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2128 && bytelen
== GET_MODE_SIZE (mode
))
2129 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2132 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2133 0, 0, mode
, tmps
[i
]);
2136 /* Copy from the pseudo into the (probable) hard reg. */
2137 if (orig_dst
!= dst
)
2138 emit_move_insn (orig_dst
, dst
);
2141 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2142 of the value stored in X. */
2145 maybe_emit_group_store (rtx x
, tree type
)
2147 machine_mode mode
= TYPE_MODE (type
);
2148 gcc_checking_assert (GET_MODE (x
) == VOIDmode
|| GET_MODE (x
) == mode
);
2149 if (GET_CODE (x
) == PARALLEL
)
2151 rtx result
= gen_reg_rtx (mode
);
2152 emit_group_store (result
, x
, type
, int_size_in_bytes (type
));
2158 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2160 This is used on targets that return BLKmode values in registers. */
2163 copy_blkmode_from_reg (rtx target
, rtx srcreg
, tree type
)
2165 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2166 rtx src
= NULL
, dst
= NULL
;
2167 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2168 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2169 machine_mode mode
= GET_MODE (srcreg
);
2170 machine_mode tmode
= GET_MODE (target
);
2171 machine_mode copy_mode
;
2173 /* BLKmode registers created in the back-end shouldn't have survived. */
2174 gcc_assert (mode
!= BLKmode
);
2176 /* If the structure doesn't take up a whole number of words, see whether
2177 SRCREG is padded on the left or on the right. If it's on the left,
2178 set PADDING_CORRECTION to the number of bits to skip.
2180 In most ABIs, the structure will be returned at the least end of
2181 the register, which translates to right padding on little-endian
2182 targets and left padding on big-endian targets. The opposite
2183 holds if the structure is returned at the most significant
2184 end of the register. */
2185 if (bytes
% UNITS_PER_WORD
!= 0
2186 && (targetm
.calls
.return_in_msb (type
)
2188 : BYTES_BIG_ENDIAN
))
2190 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2192 /* We can use a single move if we have an exact mode for the size. */
2193 else if (MEM_P (target
)
2194 && (!SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
))
2195 || MEM_ALIGN (target
) >= GET_MODE_ALIGNMENT (mode
))
2196 && bytes
== GET_MODE_SIZE (mode
))
2198 emit_move_insn (adjust_address (target
, mode
, 0), srcreg
);
2202 /* And if we additionally have the same mode for a register. */
2203 else if (REG_P (target
)
2204 && GET_MODE (target
) == mode
2205 && bytes
== GET_MODE_SIZE (mode
))
2207 emit_move_insn (target
, srcreg
);
2211 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2212 into a new pseudo which is a full word. */
2213 if (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
)
2215 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
2219 /* Copy the structure BITSIZE bits at a time. If the target lives in
2220 memory, take care of not reading/writing past its end by selecting
2221 a copy mode suited to BITSIZE. This should always be possible given
2224 If the target lives in register, make sure not to select a copy mode
2225 larger than the mode of the register.
2227 We could probably emit more efficient code for machines which do not use
2228 strict alignment, but it doesn't seem worth the effort at the current
2231 copy_mode
= word_mode
;
2234 machine_mode mem_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
2235 if (mem_mode
!= BLKmode
)
2236 copy_mode
= mem_mode
;
2238 else if (REG_P (target
) && GET_MODE_BITSIZE (tmode
) < BITS_PER_WORD
)
2241 for (bitpos
= 0, xbitpos
= padding_correction
;
2242 bitpos
< bytes
* BITS_PER_UNIT
;
2243 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2245 /* We need a new source operand each time xbitpos is on a
2246 word boundary and when xbitpos == padding_correction
2247 (the first time through). */
2248 if (xbitpos
% BITS_PER_WORD
== 0 || xbitpos
== padding_correction
)
2249 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
, mode
);
2251 /* We need a new destination operand each time bitpos is on
2253 if (REG_P (target
) && GET_MODE_BITSIZE (tmode
) < BITS_PER_WORD
)
2255 else if (bitpos
% BITS_PER_WORD
== 0)
2256 dst
= operand_subword (target
, bitpos
/ BITS_PER_WORD
, 1, tmode
);
2258 /* Use xbitpos for the source extraction (right justified) and
2259 bitpos for the destination store (left justified). */
2260 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, 0, 0, copy_mode
,
2261 extract_bit_field (src
, bitsize
,
2262 xbitpos
% BITS_PER_WORD
, 1,
2263 NULL_RTX
, copy_mode
, copy_mode
));
2267 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2268 register if it contains any data, otherwise return null.
2270 This is used on targets that return BLKmode values in registers. */
2273 copy_blkmode_to_reg (machine_mode mode
, tree src
)
2276 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0, bytes
;
2277 unsigned int bitsize
;
2278 rtx
*dst_words
, dst
, x
, src_word
= NULL_RTX
, dst_word
= NULL_RTX
;
2279 machine_mode dst_mode
;
2281 gcc_assert (TYPE_MODE (TREE_TYPE (src
)) == BLKmode
);
2283 x
= expand_normal (src
);
2285 bytes
= int_size_in_bytes (TREE_TYPE (src
));
2289 /* If the structure doesn't take up a whole number of words, see
2290 whether the register value should be padded on the left or on
2291 the right. Set PADDING_CORRECTION to the number of padding
2292 bits needed on the left side.
2294 In most ABIs, the structure will be returned at the least end of
2295 the register, which translates to right padding on little-endian
2296 targets and left padding on big-endian targets. The opposite
2297 holds if the structure is returned at the most significant
2298 end of the register. */
2299 if (bytes
% UNITS_PER_WORD
!= 0
2300 && (targetm
.calls
.return_in_msb (TREE_TYPE (src
))
2302 : BYTES_BIG_ENDIAN
))
2303 padding_correction
= (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
)
2306 n_regs
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
2307 dst_words
= XALLOCAVEC (rtx
, n_regs
);
2308 bitsize
= MIN (TYPE_ALIGN (TREE_TYPE (src
)), BITS_PER_WORD
);
2310 /* Copy the structure BITSIZE bits at a time. */
2311 for (bitpos
= 0, xbitpos
= padding_correction
;
2312 bitpos
< bytes
* BITS_PER_UNIT
;
2313 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2315 /* We need a new destination pseudo each time xbitpos is
2316 on a word boundary and when xbitpos == padding_correction
2317 (the first time through). */
2318 if (xbitpos
% BITS_PER_WORD
== 0
2319 || xbitpos
== padding_correction
)
2321 /* Generate an appropriate register. */
2322 dst_word
= gen_reg_rtx (word_mode
);
2323 dst_words
[xbitpos
/ BITS_PER_WORD
] = dst_word
;
2325 /* Clear the destination before we move anything into it. */
2326 emit_move_insn (dst_word
, CONST0_RTX (word_mode
));
2329 /* We need a new source operand each time bitpos is on a word
2331 if (bitpos
% BITS_PER_WORD
== 0)
2332 src_word
= operand_subword_force (x
, bitpos
/ BITS_PER_WORD
, BLKmode
);
2334 /* Use bitpos for the source extraction (left justified) and
2335 xbitpos for the destination store (right justified). */
2336 store_bit_field (dst_word
, bitsize
, xbitpos
% BITS_PER_WORD
,
2338 extract_bit_field (src_word
, bitsize
,
2339 bitpos
% BITS_PER_WORD
, 1,
2340 NULL_RTX
, word_mode
, word_mode
));
2343 if (mode
== BLKmode
)
2345 /* Find the smallest integer mode large enough to hold the
2346 entire structure. */
2347 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2349 mode
= GET_MODE_WIDER_MODE (mode
))
2350 /* Have we found a large enough mode? */
2351 if (GET_MODE_SIZE (mode
) >= bytes
)
2354 /* A suitable mode should have been found. */
2355 gcc_assert (mode
!= VOIDmode
);
2358 if (GET_MODE_SIZE (mode
) < GET_MODE_SIZE (word_mode
))
2359 dst_mode
= word_mode
;
2362 dst
= gen_reg_rtx (dst_mode
);
2364 for (i
= 0; i
< n_regs
; i
++)
2365 emit_move_insn (operand_subword (dst
, i
, 0, dst_mode
), dst_words
[i
]);
2367 if (mode
!= dst_mode
)
2368 dst
= gen_lowpart (mode
, dst
);
2373 /* Add a USE expression for REG to the (possibly empty) list pointed
2374 to by CALL_FUSAGE. REG must denote a hard register. */
2377 use_reg_mode (rtx
*call_fusage
, rtx reg
, machine_mode mode
)
2379 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2382 = gen_rtx_EXPR_LIST (mode
, gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2385 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2386 to by CALL_FUSAGE. REG must denote a hard register. */
2389 clobber_reg_mode (rtx
*call_fusage
, rtx reg
, machine_mode mode
)
2391 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2394 = gen_rtx_EXPR_LIST (mode
, gen_rtx_CLOBBER (VOIDmode
, reg
), *call_fusage
);
2397 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2398 starting at REGNO. All of these registers must be hard registers. */
2401 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2405 gcc_assert (regno
+ nregs
<= FIRST_PSEUDO_REGISTER
);
2407 for (i
= 0; i
< nregs
; i
++)
2408 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2411 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2412 PARALLEL REGS. This is for calls that pass values in multiple
2413 non-contiguous locations. The Irix 6 ABI has examples of this. */
2416 use_group_regs (rtx
*call_fusage
, rtx regs
)
2420 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2422 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2424 /* A NULL entry means the parameter goes both on the stack and in
2425 registers. This can also be a MEM for targets that pass values
2426 partially on the stack and partially in registers. */
2427 if (reg
!= 0 && REG_P (reg
))
2428 use_reg (call_fusage
, reg
);
2432 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2433 assigment and the code of the expresion on the RHS is CODE. Return
2437 get_def_for_expr (tree name
, enum tree_code code
)
2441 if (TREE_CODE (name
) != SSA_NAME
)
2444 def_stmt
= get_gimple_for_ssa_name (name
);
2446 || gimple_assign_rhs_code (def_stmt
) != code
)
2452 #ifdef HAVE_conditional_move
2453 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2454 assigment and the class of the expresion on the RHS is CLASS. Return
2458 get_def_for_expr_class (tree name
, enum tree_code_class tclass
)
2462 if (TREE_CODE (name
) != SSA_NAME
)
2465 def_stmt
= get_gimple_for_ssa_name (name
);
2467 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) != tclass
)
2475 /* Determine whether the LEN bytes generated by CONSTFUN can be
2476 stored to memory using several move instructions. CONSTFUNDATA is
2477 a pointer which will be passed as argument in every CONSTFUN call.
2478 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2479 a memset operation and false if it's a copy of a constant string.
2480 Return nonzero if a call to store_by_pieces should succeed. */
2483 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
2484 rtx (*constfun
) (void *, HOST_WIDE_INT
, machine_mode
),
2485 void *constfundata
, unsigned int align
, bool memsetp
)
2487 unsigned HOST_WIDE_INT l
;
2488 unsigned int max_size
;
2489 HOST_WIDE_INT offset
= 0;
2491 enum insn_code icode
;
2493 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2494 rtx cst ATTRIBUTE_UNUSED
;
2500 ? SET_BY_PIECES_P (len
, align
)
2501 : STORE_BY_PIECES_P (len
, align
)))
2504 align
= alignment_for_piecewise_move (STORE_MAX_PIECES
, align
);
2506 /* We would first store what we can in the largest integer mode, then go to
2507 successively smaller modes. */
2510 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2514 max_size
= STORE_MAX_PIECES
+ 1;
2515 while (max_size
> 1 && l
> 0)
2517 mode
= widest_int_mode_for_size (max_size
);
2519 if (mode
== VOIDmode
)
2522 icode
= optab_handler (mov_optab
, mode
);
2523 if (icode
!= CODE_FOR_nothing
2524 && align
>= GET_MODE_ALIGNMENT (mode
))
2526 unsigned int size
= GET_MODE_SIZE (mode
);
2533 cst
= (*constfun
) (constfundata
, offset
, mode
);
2534 if (!targetm
.legitimate_constant_p (mode
, cst
))
2544 max_size
= GET_MODE_SIZE (mode
);
2547 /* The code above should have handled everything. */
2554 /* Generate several move instructions to store LEN bytes generated by
2555 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2556 pointer which will be passed as argument in every CONSTFUN call.
2557 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2558 a memset operation and false if it's a copy of a constant string.
2559 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2560 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2564 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
2565 rtx (*constfun
) (void *, HOST_WIDE_INT
, machine_mode
),
2566 void *constfundata
, unsigned int align
, bool memsetp
, int endp
)
2568 machine_mode to_addr_mode
= get_address_mode (to
);
2569 struct store_by_pieces_d data
;
2573 gcc_assert (endp
!= 2);
2578 ? SET_BY_PIECES_P (len
, align
)
2579 : STORE_BY_PIECES_P (len
, align
));
2580 data
.constfun
= constfun
;
2581 data
.constfundata
= constfundata
;
2584 store_by_pieces_1 (&data
, align
);
2589 gcc_assert (!data
.reverse
);
2594 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2595 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2597 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
2598 plus_constant (to_addr_mode
,
2602 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2609 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2617 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2618 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2621 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
2623 struct store_by_pieces_d data
;
2628 data
.constfun
= clear_by_pieces_1
;
2629 data
.constfundata
= NULL
;
2632 store_by_pieces_1 (&data
, align
);
2635 /* Callback routine for clear_by_pieces.
2636 Return const0_rtx unconditionally. */
2639 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED
,
2640 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2641 machine_mode mode ATTRIBUTE_UNUSED
)
2646 /* Subroutine of clear_by_pieces and store_by_pieces.
2647 Generate several move instructions to store LEN bytes of block TO. (A MEM
2648 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2651 store_by_pieces_1 (struct store_by_pieces_d
*data ATTRIBUTE_UNUSED
,
2652 unsigned int align ATTRIBUTE_UNUSED
)
2654 machine_mode to_addr_mode
= get_address_mode (data
->to
);
2655 rtx to_addr
= XEXP (data
->to
, 0);
2656 unsigned int max_size
= STORE_MAX_PIECES
+ 1;
2657 enum insn_code icode
;
2660 data
->to_addr
= to_addr
;
2662 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2663 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2665 data
->explicit_inc_to
= 0;
2667 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2669 data
->offset
= data
->len
;
2671 /* If storing requires more than two move insns,
2672 copy addresses to registers (to make displacements shorter)
2673 and use post-increment if available. */
2674 if (!data
->autinc_to
2675 && move_by_pieces_ninsns (data
->len
, align
, max_size
) > 2)
2677 /* Determine the main mode we'll be using.
2678 MODE might not be used depending on the definitions of the
2679 USE_* macros below. */
2680 machine_mode mode ATTRIBUTE_UNUSED
2681 = widest_int_mode_for_size (max_size
);
2683 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2685 data
->to_addr
= copy_to_mode_reg (to_addr_mode
,
2686 plus_constant (to_addr_mode
,
2689 data
->autinc_to
= 1;
2690 data
->explicit_inc_to
= -1;
2693 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2694 && ! data
->autinc_to
)
2696 data
->to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
2697 data
->autinc_to
= 1;
2698 data
->explicit_inc_to
= 1;
2701 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2702 data
->to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
2705 align
= alignment_for_piecewise_move (STORE_MAX_PIECES
, align
);
2707 /* First store what we can in the largest integer mode, then go to
2708 successively smaller modes. */
2710 while (max_size
> 1 && data
->len
> 0)
2712 machine_mode mode
= widest_int_mode_for_size (max_size
);
2714 if (mode
== VOIDmode
)
2717 icode
= optab_handler (mov_optab
, mode
);
2718 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2719 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2721 max_size
= GET_MODE_SIZE (mode
);
2724 /* The code above should have handled everything. */
2725 gcc_assert (!data
->len
);
2728 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2729 with move instructions for mode MODE. GENFUN is the gen_... function
2730 to make a move insn for that mode. DATA has all the other info. */
2733 store_by_pieces_2 (insn_gen_fn genfun
, machine_mode mode
,
2734 struct store_by_pieces_d
*data
)
2736 unsigned int size
= GET_MODE_SIZE (mode
);
2739 while (data
->len
>= size
)
2742 data
->offset
-= size
;
2744 if (data
->autinc_to
)
2745 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2748 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2750 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2751 emit_insn (gen_add2_insn (data
->to_addr
,
2752 gen_int_mode (-(HOST_WIDE_INT
) size
,
2753 GET_MODE (data
->to_addr
))));
2755 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2756 emit_insn ((*genfun
) (to1
, cst
));
2758 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2759 emit_insn (gen_add2_insn (data
->to_addr
,
2761 GET_MODE (data
->to_addr
))));
2763 if (! data
->reverse
)
2764 data
->offset
+= size
;
2770 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2771 its length in bytes. */
2774 clear_storage_hints (rtx object
, rtx size
, enum block_op_methods method
,
2775 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
2776 unsigned HOST_WIDE_INT min_size
,
2777 unsigned HOST_WIDE_INT max_size
,
2778 unsigned HOST_WIDE_INT probable_max_size
)
2780 machine_mode mode
= GET_MODE (object
);
2783 gcc_assert (method
== BLOCK_OP_NORMAL
|| method
== BLOCK_OP_TAILCALL
);
2785 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2786 just move a zero. Otherwise, do this a piece at a time. */
2788 && CONST_INT_P (size
)
2789 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (mode
))
2791 rtx zero
= CONST0_RTX (mode
);
2794 emit_move_insn (object
, zero
);
2798 if (COMPLEX_MODE_P (mode
))
2800 zero
= CONST0_RTX (GET_MODE_INNER (mode
));
2803 write_complex_part (object
, zero
, 0);
2804 write_complex_part (object
, zero
, 1);
2810 if (size
== const0_rtx
)
2813 align
= MEM_ALIGN (object
);
2815 if (CONST_INT_P (size
)
2816 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2817 clear_by_pieces (object
, INTVAL (size
), align
);
2818 else if (set_storage_via_setmem (object
, size
, const0_rtx
, align
,
2819 expected_align
, expected_size
,
2820 min_size
, max_size
, probable_max_size
))
2822 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object
)))
2823 return set_storage_via_libcall (object
, size
, const0_rtx
,
2824 method
== BLOCK_OP_TAILCALL
);
2832 clear_storage (rtx object
, rtx size
, enum block_op_methods method
)
2834 unsigned HOST_WIDE_INT max
, min
= 0;
2835 if (GET_CODE (size
) == CONST_INT
)
2836 min
= max
= UINTVAL (size
);
2838 max
= GET_MODE_MASK (GET_MODE (size
));
2839 return clear_storage_hints (object
, size
, method
, 0, -1, min
, max
, max
);
2843 /* A subroutine of clear_storage. Expand a call to memset.
2844 Return the return value of memset, 0 otherwise. */
2847 set_storage_via_libcall (rtx object
, rtx size
, rtx val
, bool tailcall
)
2849 tree call_expr
, fn
, object_tree
, size_tree
, val_tree
;
2850 machine_mode size_mode
;
2853 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2854 place those into new pseudos into a VAR_DECL and use them later. */
2856 object
= copy_addr_to_reg (XEXP (object
, 0));
2858 size_mode
= TYPE_MODE (sizetype
);
2859 size
= convert_to_mode (size_mode
, size
, 1);
2860 size
= copy_to_mode_reg (size_mode
, size
);
2862 /* It is incorrect to use the libcall calling conventions to call
2863 memset in this context. This could be a user call to memset and
2864 the user may wish to examine the return value from memset. For
2865 targets where libcalls and normal calls have different conventions
2866 for returning pointers, we could end up generating incorrect code. */
2868 object_tree
= make_tree (ptr_type_node
, object
);
2869 if (!CONST_INT_P (val
))
2870 val
= convert_to_mode (TYPE_MODE (integer_type_node
), val
, 1);
2871 size_tree
= make_tree (sizetype
, size
);
2872 val_tree
= make_tree (integer_type_node
, val
);
2874 fn
= clear_storage_libcall_fn (true);
2875 call_expr
= build_call_expr (fn
, 3, object_tree
, val_tree
, size_tree
);
2876 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
2878 retval
= expand_normal (call_expr
);
2883 /* A subroutine of set_storage_via_libcall. Create the tree node
2884 for the function we use for block clears. */
2886 tree block_clear_fn
;
2889 init_block_clear_fn (const char *asmspec
)
2891 if (!block_clear_fn
)
2895 fn
= get_identifier ("memset");
2896 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2897 integer_type_node
, sizetype
,
2900 fn
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
, fn
, args
);
2901 DECL_EXTERNAL (fn
) = 1;
2902 TREE_PUBLIC (fn
) = 1;
2903 DECL_ARTIFICIAL (fn
) = 1;
2904 TREE_NOTHROW (fn
) = 1;
2905 DECL_VISIBILITY (fn
) = VISIBILITY_DEFAULT
;
2906 DECL_VISIBILITY_SPECIFIED (fn
) = 1;
2908 block_clear_fn
= fn
;
2912 set_user_assembler_name (block_clear_fn
, asmspec
);
2916 clear_storage_libcall_fn (int for_call
)
2918 static bool emitted_extern
;
2920 if (!block_clear_fn
)
2921 init_block_clear_fn (NULL
);
2923 if (for_call
&& !emitted_extern
)
2925 emitted_extern
= true;
2926 make_decl_rtl (block_clear_fn
);
2929 return block_clear_fn
;
2932 /* Expand a setmem pattern; return true if successful. */
2935 set_storage_via_setmem (rtx object
, rtx size
, rtx val
, unsigned int align
,
2936 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
2937 unsigned HOST_WIDE_INT min_size
,
2938 unsigned HOST_WIDE_INT max_size
,
2939 unsigned HOST_WIDE_INT probable_max_size
)
2941 /* Try the most limited insn first, because there's no point
2942 including more than one in the machine description unless
2943 the more limited one has some advantage. */
2947 if (expected_align
< align
)
2948 expected_align
= align
;
2949 if (expected_size
!= -1)
2951 if ((unsigned HOST_WIDE_INT
)expected_size
> max_size
)
2952 expected_size
= max_size
;
2953 if ((unsigned HOST_WIDE_INT
)expected_size
< min_size
)
2954 expected_size
= min_size
;
2957 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2958 mode
= GET_MODE_WIDER_MODE (mode
))
2960 enum insn_code code
= direct_optab_handler (setmem_optab
, mode
);
2962 if (code
!= CODE_FOR_nothing
2963 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2964 here because if SIZE is less than the mode mask, as it is
2965 returned by the macro, it will definitely be less than the
2966 actual mode mask. Since SIZE is within the Pmode address
2967 space, we limit MODE to Pmode. */
2968 && ((CONST_INT_P (size
)
2969 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2970 <= (GET_MODE_MASK (mode
) >> 1)))
2971 || max_size
<= (GET_MODE_MASK (mode
) >> 1)
2972 || GET_MODE_BITSIZE (mode
) >= GET_MODE_BITSIZE (Pmode
)))
2974 struct expand_operand ops
[9];
2977 nops
= insn_data
[(int) code
].n_generator_args
;
2978 gcc_assert (nops
== 4 || nops
== 6 || nops
== 8 || nops
== 9);
2980 create_fixed_operand (&ops
[0], object
);
2981 /* The check above guarantees that this size conversion is valid. */
2982 create_convert_operand_to (&ops
[1], size
, mode
, true);
2983 create_convert_operand_from (&ops
[2], val
, byte_mode
, true);
2984 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
2987 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
2988 create_integer_operand (&ops
[5], expected_size
);
2992 create_integer_operand (&ops
[6], min_size
);
2993 /* If we can not represent the maximal size,
2994 make parameter NULL. */
2995 if ((HOST_WIDE_INT
) max_size
!= -1)
2996 create_integer_operand (&ops
[7], max_size
);
2998 create_fixed_operand (&ops
[7], NULL
);
3002 /* If we can not represent the maximal size,
3003 make parameter NULL. */
3004 if ((HOST_WIDE_INT
) probable_max_size
!= -1)
3005 create_integer_operand (&ops
[8], probable_max_size
);
3007 create_fixed_operand (&ops
[8], NULL
);
3009 if (maybe_expand_insn (code
, nops
, ops
))
3018 /* Write to one of the components of the complex value CPLX. Write VAL to
3019 the real part if IMAG_P is false, and the imaginary part if its true. */
3022 write_complex_part (rtx cplx
, rtx val
, bool imag_p
)
3028 if (GET_CODE (cplx
) == CONCAT
)
3030 emit_move_insn (XEXP (cplx
, imag_p
), val
);
3034 cmode
= GET_MODE (cplx
);
3035 imode
= GET_MODE_INNER (cmode
);
3036 ibitsize
= GET_MODE_BITSIZE (imode
);
3038 /* For MEMs simplify_gen_subreg may generate an invalid new address
3039 because, e.g., the original address is considered mode-dependent
3040 by the target, which restricts simplify_subreg from invoking
3041 adjust_address_nv. Instead of preparing fallback support for an
3042 invalid address, we call adjust_address_nv directly. */
3045 emit_move_insn (adjust_address_nv (cplx
, imode
,
3046 imag_p
? GET_MODE_SIZE (imode
) : 0),
3051 /* If the sub-object is at least word sized, then we know that subregging
3052 will work. This special case is important, since store_bit_field
3053 wants to operate on integer modes, and there's rarely an OImode to
3054 correspond to TCmode. */
3055 if (ibitsize
>= BITS_PER_WORD
3056 /* For hard regs we have exact predicates. Assume we can split
3057 the original object if it spans an even number of hard regs.
3058 This special case is important for SCmode on 64-bit platforms
3059 where the natural size of floating-point regs is 32-bit. */
3061 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
3062 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0))
3064 rtx part
= simplify_gen_subreg (imode
, cplx
, cmode
,
3065 imag_p
? GET_MODE_SIZE (imode
) : 0);
3068 emit_move_insn (part
, val
);
3072 /* simplify_gen_subreg may fail for sub-word MEMs. */
3073 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
3076 store_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0, 0, 0, imode
, val
);
3079 /* Extract one of the components of the complex value CPLX. Extract the
3080 real part if IMAG_P is false, and the imaginary part if it's true. */
3083 read_complex_part (rtx cplx
, bool imag_p
)
3085 machine_mode cmode
, imode
;
3088 if (GET_CODE (cplx
) == CONCAT
)
3089 return XEXP (cplx
, imag_p
);
3091 cmode
= GET_MODE (cplx
);
3092 imode
= GET_MODE_INNER (cmode
);
3093 ibitsize
= GET_MODE_BITSIZE (imode
);
3095 /* Special case reads from complex constants that got spilled to memory. */
3096 if (MEM_P (cplx
) && GET_CODE (XEXP (cplx
, 0)) == SYMBOL_REF
)
3098 tree decl
= SYMBOL_REF_DECL (XEXP (cplx
, 0));
3099 if (decl
&& TREE_CODE (decl
) == COMPLEX_CST
)
3101 tree part
= imag_p
? TREE_IMAGPART (decl
) : TREE_REALPART (decl
);
3102 if (CONSTANT_CLASS_P (part
))
3103 return expand_expr (part
, NULL_RTX
, imode
, EXPAND_NORMAL
);
3107 /* For MEMs simplify_gen_subreg may generate an invalid new address
3108 because, e.g., the original address is considered mode-dependent
3109 by the target, which restricts simplify_subreg from invoking
3110 adjust_address_nv. Instead of preparing fallback support for an
3111 invalid address, we call adjust_address_nv directly. */
3113 return adjust_address_nv (cplx
, imode
,
3114 imag_p
? GET_MODE_SIZE (imode
) : 0);
3116 /* If the sub-object is at least word sized, then we know that subregging
3117 will work. This special case is important, since extract_bit_field
3118 wants to operate on integer modes, and there's rarely an OImode to
3119 correspond to TCmode. */
3120 if (ibitsize
>= BITS_PER_WORD
3121 /* For hard regs we have exact predicates. Assume we can split
3122 the original object if it spans an even number of hard regs.
3123 This special case is important for SCmode on 64-bit platforms
3124 where the natural size of floating-point regs is 32-bit. */
3126 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
3127 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0))
3129 rtx ret
= simplify_gen_subreg (imode
, cplx
, cmode
,
3130 imag_p
? GET_MODE_SIZE (imode
) : 0);
3134 /* simplify_gen_subreg may fail for sub-word MEMs. */
3135 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
3138 return extract_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0,
3139 true, NULL_RTX
, imode
, imode
);
3142 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3143 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3144 represented in NEW_MODE. If FORCE is true, this will never happen, as
3145 we'll force-create a SUBREG if needed. */
3148 emit_move_change_mode (machine_mode new_mode
,
3149 machine_mode old_mode
, rtx x
, bool force
)
3153 if (push_operand (x
, GET_MODE (x
)))
3155 ret
= gen_rtx_MEM (new_mode
, XEXP (x
, 0));
3156 MEM_COPY_ATTRIBUTES (ret
, x
);
3160 /* We don't have to worry about changing the address since the
3161 size in bytes is supposed to be the same. */
3162 if (reload_in_progress
)
3164 /* Copy the MEM to change the mode and move any
3165 substitutions from the old MEM to the new one. */
3166 ret
= adjust_address_nv (x
, new_mode
, 0);
3167 copy_replacements (x
, ret
);
3170 ret
= adjust_address (x
, new_mode
, 0);
3174 /* Note that we do want simplify_subreg's behavior of validating
3175 that the new mode is ok for a hard register. If we were to use
3176 simplify_gen_subreg, we would create the subreg, but would
3177 probably run into the target not being able to implement it. */
3178 /* Except, of course, when FORCE is true, when this is exactly what
3179 we want. Which is needed for CCmodes on some targets. */
3181 ret
= simplify_gen_subreg (new_mode
, x
, old_mode
, 0);
3183 ret
= simplify_subreg (new_mode
, x
, old_mode
, 0);
3189 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3190 an integer mode of the same size as MODE. Returns the instruction
3191 emitted, or NULL if such a move could not be generated. */
3194 emit_move_via_integer (machine_mode mode
, rtx x
, rtx y
, bool force
)
3197 enum insn_code code
;
3199 /* There must exist a mode of the exact size we require. */
3200 imode
= int_mode_for_mode (mode
);
3201 if (imode
== BLKmode
)
3204 /* The target must support moves in this mode. */
3205 code
= optab_handler (mov_optab
, imode
);
3206 if (code
== CODE_FOR_nothing
)
3209 x
= emit_move_change_mode (imode
, mode
, x
, force
);
3212 y
= emit_move_change_mode (imode
, mode
, y
, force
);
3215 return emit_insn (GEN_FCN (code
) (x
, y
));
3218 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3219 Return an equivalent MEM that does not use an auto-increment. */
3222 emit_move_resolve_push (machine_mode mode
, rtx x
)
3224 enum rtx_code code
= GET_CODE (XEXP (x
, 0));
3225 HOST_WIDE_INT adjust
;
3228 adjust
= GET_MODE_SIZE (mode
);
3229 #ifdef PUSH_ROUNDING
3230 adjust
= PUSH_ROUNDING (adjust
);
3232 if (code
== PRE_DEC
|| code
== POST_DEC
)
3234 else if (code
== PRE_MODIFY
|| code
== POST_MODIFY
)
3236 rtx expr
= XEXP (XEXP (x
, 0), 1);
3239 gcc_assert (GET_CODE (expr
) == PLUS
|| GET_CODE (expr
) == MINUS
);
3240 gcc_assert (CONST_INT_P (XEXP (expr
, 1)));
3241 val
= INTVAL (XEXP (expr
, 1));
3242 if (GET_CODE (expr
) == MINUS
)
3244 gcc_assert (adjust
== val
|| adjust
== -val
);
3248 /* Do not use anti_adjust_stack, since we don't want to update
3249 stack_pointer_delta. */
3250 temp
= expand_simple_binop (Pmode
, PLUS
, stack_pointer_rtx
,
3251 gen_int_mode (adjust
, Pmode
), stack_pointer_rtx
,
3252 0, OPTAB_LIB_WIDEN
);
3253 if (temp
!= stack_pointer_rtx
)
3254 emit_move_insn (stack_pointer_rtx
, temp
);
3261 temp
= stack_pointer_rtx
;
3266 temp
= plus_constant (Pmode
, stack_pointer_rtx
, -adjust
);
3272 return replace_equiv_address (x
, temp
);
3275 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3276 X is known to satisfy push_operand, and MODE is known to be complex.
3277 Returns the last instruction emitted. */
3280 emit_move_complex_push (machine_mode mode
, rtx x
, rtx y
)
3282 machine_mode submode
= GET_MODE_INNER (mode
);
3285 #ifdef PUSH_ROUNDING
3286 unsigned int submodesize
= GET_MODE_SIZE (submode
);
3288 /* In case we output to the stack, but the size is smaller than the
3289 machine can push exactly, we need to use move instructions. */
3290 if (PUSH_ROUNDING (submodesize
) != submodesize
)
3292 x
= emit_move_resolve_push (mode
, x
);
3293 return emit_move_insn (x
, y
);
3297 /* Note that the real part always precedes the imag part in memory
3298 regardless of machine's endianness. */
3299 switch (GET_CODE (XEXP (x
, 0)))
3313 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3314 read_complex_part (y
, imag_first
));
3315 return emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3316 read_complex_part (y
, !imag_first
));
3319 /* A subroutine of emit_move_complex. Perform the move from Y to X
3320 via two moves of the parts. Returns the last instruction emitted. */
3323 emit_move_complex_parts (rtx x
, rtx y
)
3325 /* Show the output dies here. This is necessary for SUBREGs
3326 of pseudos since we cannot track their lifetimes correctly;
3327 hard regs shouldn't appear here except as return values. */
3328 if (!reload_completed
&& !reload_in_progress
3329 && REG_P (x
) && !reg_overlap_mentioned_p (x
, y
))
3332 write_complex_part (x
, read_complex_part (y
, false), false);
3333 write_complex_part (x
, read_complex_part (y
, true), true);
3335 return get_last_insn ();
3338 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3339 MODE is known to be complex. Returns the last instruction emitted. */
3342 emit_move_complex (machine_mode mode
, rtx x
, rtx y
)
3346 /* Need to take special care for pushes, to maintain proper ordering
3347 of the data, and possibly extra padding. */
3348 if (push_operand (x
, mode
))
3349 return emit_move_complex_push (mode
, x
, y
);
3351 /* See if we can coerce the target into moving both values at once, except
3352 for floating point where we favor moving as parts if this is easy. */
3353 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
3354 && optab_handler (mov_optab
, GET_MODE_INNER (mode
)) != CODE_FOR_nothing
3356 && HARD_REGISTER_P (x
)
3357 && hard_regno_nregs
[REGNO (x
)][mode
] == 1)
3359 && HARD_REGISTER_P (y
)
3360 && hard_regno_nregs
[REGNO (y
)][mode
] == 1))
3362 /* Not possible if the values are inherently not adjacent. */
3363 else if (GET_CODE (x
) == CONCAT
|| GET_CODE (y
) == CONCAT
)
3365 /* Is possible if both are registers (or subregs of registers). */
3366 else if (register_operand (x
, mode
) && register_operand (y
, mode
))
3368 /* If one of the operands is a memory, and alignment constraints
3369 are friendly enough, we may be able to do combined memory operations.
3370 We do not attempt this if Y is a constant because that combination is
3371 usually better with the by-parts thing below. */
3372 else if ((MEM_P (x
) ? !CONSTANT_P (y
) : MEM_P (y
))
3373 && (!STRICT_ALIGNMENT
3374 || get_mode_alignment (mode
) == BIGGEST_ALIGNMENT
))
3383 /* For memory to memory moves, optimal behavior can be had with the
3384 existing block move logic. */
3385 if (MEM_P (x
) && MEM_P (y
))
3387 emit_block_move (x
, y
, GEN_INT (GET_MODE_SIZE (mode
)),
3388 BLOCK_OP_NO_LIBCALL
);
3389 return get_last_insn ();
3392 ret
= emit_move_via_integer (mode
, x
, y
, true);
3397 return emit_move_complex_parts (x
, y
);
3400 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3401 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3404 emit_move_ccmode (machine_mode mode
, rtx x
, rtx y
)
3408 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3411 enum insn_code code
= optab_handler (mov_optab
, CCmode
);
3412 if (code
!= CODE_FOR_nothing
)
3414 x
= emit_move_change_mode (CCmode
, mode
, x
, true);
3415 y
= emit_move_change_mode (CCmode
, mode
, y
, true);
3416 return emit_insn (GEN_FCN (code
) (x
, y
));
3420 /* Otherwise, find the MODE_INT mode of the same width. */
3421 ret
= emit_move_via_integer (mode
, x
, y
, false);
3422 gcc_assert (ret
!= NULL
);
3426 /* Return true if word I of OP lies entirely in the
3427 undefined bits of a paradoxical subreg. */
3430 undefined_operand_subword_p (const_rtx op
, int i
)
3432 machine_mode innermode
, innermostmode
;
3434 if (GET_CODE (op
) != SUBREG
)
3436 innermode
= GET_MODE (op
);
3437 innermostmode
= GET_MODE (SUBREG_REG (op
));
3438 offset
= i
* UNITS_PER_WORD
+ SUBREG_BYTE (op
);
3439 /* The SUBREG_BYTE represents offset, as if the value were stored in
3440 memory, except for a paradoxical subreg where we define
3441 SUBREG_BYTE to be 0; undo this exception as in
3443 if (SUBREG_BYTE (op
) == 0
3444 && GET_MODE_SIZE (innermostmode
) < GET_MODE_SIZE (innermode
))
3446 int difference
= (GET_MODE_SIZE (innermostmode
) - GET_MODE_SIZE (innermode
));
3447 if (WORDS_BIG_ENDIAN
)
3448 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
3449 if (BYTES_BIG_ENDIAN
)
3450 offset
+= difference
% UNITS_PER_WORD
;
3452 if (offset
>= GET_MODE_SIZE (innermostmode
)
3453 || offset
<= -GET_MODE_SIZE (word_mode
))
3458 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3459 MODE is any multi-word or full-word mode that lacks a move_insn
3460 pattern. Note that you will get better code if you define such
3461 patterns, even if they must turn into multiple assembler instructions. */
3464 emit_move_multi_word (machine_mode mode
, rtx x
, rtx y
)
3466 rtx_insn
*last_insn
= 0;
3472 gcc_assert (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
);
3474 /* If X is a push on the stack, do the push now and replace
3475 X with a reference to the stack pointer. */
3476 if (push_operand (x
, mode
))
3477 x
= emit_move_resolve_push (mode
, x
);
3479 /* If we are in reload, see if either operand is a MEM whose address
3480 is scheduled for replacement. */
3481 if (reload_in_progress
&& MEM_P (x
)
3482 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3483 x
= replace_equiv_address_nv (x
, inner
);
3484 if (reload_in_progress
&& MEM_P (y
)
3485 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3486 y
= replace_equiv_address_nv (y
, inner
);
3490 need_clobber
= false;
3492 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3495 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3498 /* Do not generate code for a move if it would come entirely
3499 from the undefined bits of a paradoxical subreg. */
3500 if (undefined_operand_subword_p (y
, i
))
3503 ypart
= operand_subword (y
, i
, 1, mode
);
3505 /* If we can't get a part of Y, put Y into memory if it is a
3506 constant. Otherwise, force it into a register. Then we must
3507 be able to get a part of Y. */
3508 if (ypart
== 0 && CONSTANT_P (y
))
3510 y
= use_anchored_address (force_const_mem (mode
, y
));
3511 ypart
= operand_subword (y
, i
, 1, mode
);
3513 else if (ypart
== 0)
3514 ypart
= operand_subword_force (y
, i
, mode
);
3516 gcc_assert (xpart
&& ypart
);
3518 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3520 last_insn
= emit_move_insn (xpart
, ypart
);
3526 /* Show the output dies here. This is necessary for SUBREGs
3527 of pseudos since we cannot track their lifetimes correctly;
3528 hard regs shouldn't appear here except as return values.
3529 We never want to emit such a clobber after reload. */
3531 && ! (reload_in_progress
|| reload_completed
)
3532 && need_clobber
!= 0)
3540 /* Low level part of emit_move_insn.
3541 Called just like emit_move_insn, but assumes X and Y
3542 are basically valid. */
3545 emit_move_insn_1 (rtx x
, rtx y
)
3547 machine_mode mode
= GET_MODE (x
);
3548 enum insn_code code
;
3550 gcc_assert ((unsigned int) mode
< (unsigned int) MAX_MACHINE_MODE
);
3552 code
= optab_handler (mov_optab
, mode
);
3553 if (code
!= CODE_FOR_nothing
)
3554 return emit_insn (GEN_FCN (code
) (x
, y
));
3556 /* Expand complex moves by moving real part and imag part. */
3557 if (COMPLEX_MODE_P (mode
))
3558 return emit_move_complex (mode
, x
, y
);
3560 if (GET_MODE_CLASS (mode
) == MODE_DECIMAL_FLOAT
3561 || ALL_FIXED_POINT_MODE_P (mode
))
3563 rtx_insn
*result
= emit_move_via_integer (mode
, x
, y
, true);
3565 /* If we can't find an integer mode, use multi words. */
3569 return emit_move_multi_word (mode
, x
, y
);
3572 if (GET_MODE_CLASS (mode
) == MODE_CC
)
3573 return emit_move_ccmode (mode
, x
, y
);
3575 /* Try using a move pattern for the corresponding integer mode. This is
3576 only safe when simplify_subreg can convert MODE constants into integer
3577 constants. At present, it can only do this reliably if the value
3578 fits within a HOST_WIDE_INT. */
3579 if (!CONSTANT_P (y
) || GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
3581 rtx_insn
*ret
= emit_move_via_integer (mode
, x
, y
, lra_in_progress
);
3585 if (! lra_in_progress
|| recog (PATTERN (ret
), ret
, 0) >= 0)
3590 return emit_move_multi_word (mode
, x
, y
);
3593 /* Generate code to copy Y into X.
3594 Both Y and X must have the same mode, except that
3595 Y can be a constant with VOIDmode.
3596 This mode cannot be BLKmode; use emit_block_move for that.
3598 Return the last instruction emitted. */
3601 emit_move_insn (rtx x
, rtx y
)
3603 machine_mode mode
= GET_MODE (x
);
3604 rtx y_cst
= NULL_RTX
;
3605 rtx_insn
*last_insn
;
3608 gcc_assert (mode
!= BLKmode
3609 && (GET_MODE (y
) == mode
|| GET_MODE (y
) == VOIDmode
));
3614 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3615 && (last_insn
= compress_float_constant (x
, y
)))
3620 if (!targetm
.legitimate_constant_p (mode
, y
))
3622 y
= force_const_mem (mode
, y
);
3624 /* If the target's cannot_force_const_mem prevented the spill,
3625 assume that the target's move expanders will also take care
3626 of the non-legitimate constant. */
3630 y
= use_anchored_address (y
);
3634 /* If X or Y are memory references, verify that their addresses are valid
3637 && (! memory_address_addr_space_p (GET_MODE (x
), XEXP (x
, 0),
3639 && ! push_operand (x
, GET_MODE (x
))))
3640 x
= validize_mem (x
);
3643 && ! memory_address_addr_space_p (GET_MODE (y
), XEXP (y
, 0),
3644 MEM_ADDR_SPACE (y
)))
3645 y
= validize_mem (y
);
3647 gcc_assert (mode
!= BLKmode
);
3649 last_insn
= emit_move_insn_1 (x
, y
);
3651 if (y_cst
&& REG_P (x
)
3652 && (set
= single_set (last_insn
)) != NULL_RTX
3653 && SET_DEST (set
) == x
3654 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
3655 set_unique_reg_note (last_insn
, REG_EQUAL
, copy_rtx (y_cst
));
3660 /* If Y is representable exactly in a narrower mode, and the target can
3661 perform the extension directly from constant or memory, then emit the
3662 move as an extension. */
3665 compress_float_constant (rtx x
, rtx y
)
3667 machine_mode dstmode
= GET_MODE (x
);
3668 machine_mode orig_srcmode
= GET_MODE (y
);
3669 machine_mode srcmode
;
3671 int oldcost
, newcost
;
3672 bool speed
= optimize_insn_for_speed_p ();
3674 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3676 if (targetm
.legitimate_constant_p (dstmode
, y
))
3677 oldcost
= set_src_cost (y
, speed
);
3679 oldcost
= set_src_cost (force_const_mem (dstmode
, y
), speed
);
3681 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3682 srcmode
!= orig_srcmode
;
3683 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3687 rtx_insn
*last_insn
;
3689 /* Skip if the target can't extend this way. */
3690 ic
= can_extend_p (dstmode
, srcmode
, 0);
3691 if (ic
== CODE_FOR_nothing
)
3694 /* Skip if the narrowed value isn't exact. */
3695 if (! exact_real_truncate (srcmode
, &r
))
3698 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3700 if (targetm
.legitimate_constant_p (srcmode
, trunc_y
))
3702 /* Skip if the target needs extra instructions to perform
3704 if (!insn_operand_matches (ic
, 1, trunc_y
))
3706 /* This is valid, but may not be cheaper than the original. */
3707 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3709 if (oldcost
< newcost
)
3712 else if (float_extend_from_mem
[dstmode
][srcmode
])
3714 trunc_y
= force_const_mem (srcmode
, trunc_y
);
3715 /* This is valid, but may not be cheaper than the original. */
3716 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3718 if (oldcost
< newcost
)
3720 trunc_y
= validize_mem (trunc_y
);
3725 /* For CSE's benefit, force the compressed constant pool entry
3726 into a new pseudo. This constant may be used in different modes,
3727 and if not, combine will put things back together for us. */
3728 trunc_y
= force_reg (srcmode
, trunc_y
);
3730 /* If x is a hard register, perform the extension into a pseudo,
3731 so that e.g. stack realignment code is aware of it. */
3733 if (REG_P (x
) && HARD_REGISTER_P (x
))
3734 target
= gen_reg_rtx (dstmode
);
3736 emit_unop_insn (ic
, target
, trunc_y
, UNKNOWN
);
3737 last_insn
= get_last_insn ();
3740 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3743 return emit_move_insn (x
, target
);
3750 /* Pushing data onto the stack. */
3752 /* Push a block of length SIZE (perhaps variable)
3753 and return an rtx to address the beginning of the block.
3754 The value may be virtual_outgoing_args_rtx.
3756 EXTRA is the number of bytes of padding to push in addition to SIZE.
3757 BELOW nonzero means this padding comes at low addresses;
3758 otherwise, the padding comes at high addresses. */
3761 push_block (rtx size
, int extra
, int below
)
3765 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3766 if (CONSTANT_P (size
))
3767 anti_adjust_stack (plus_constant (Pmode
, size
, extra
));
3768 else if (REG_P (size
) && extra
== 0)
3769 anti_adjust_stack (size
);
3772 temp
= copy_to_mode_reg (Pmode
, size
);
3774 temp
= expand_binop (Pmode
, add_optab
, temp
,
3775 gen_int_mode (extra
, Pmode
),
3776 temp
, 0, OPTAB_LIB_WIDEN
);
3777 anti_adjust_stack (temp
);
3780 #ifndef STACK_GROWS_DOWNWARD
3786 temp
= virtual_outgoing_args_rtx
;
3787 if (extra
!= 0 && below
)
3788 temp
= plus_constant (Pmode
, temp
, extra
);
3792 if (CONST_INT_P (size
))
3793 temp
= plus_constant (Pmode
, virtual_outgoing_args_rtx
,
3794 -INTVAL (size
) - (below
? 0 : extra
));
3795 else if (extra
!= 0 && !below
)
3796 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3797 negate_rtx (Pmode
, plus_constant (Pmode
, size
,
3800 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3801 negate_rtx (Pmode
, size
));
3804 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3807 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3810 mem_autoinc_base (rtx mem
)
3814 rtx addr
= XEXP (mem
, 0);
3815 if (GET_RTX_CLASS (GET_CODE (addr
)) == RTX_AUTOINC
)
3816 return XEXP (addr
, 0);
3821 /* A utility routine used here, in reload, and in try_split. The insns
3822 after PREV up to and including LAST are known to adjust the stack,
3823 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3824 placing notes as appropriate. PREV may be NULL, indicating the
3825 entire insn sequence prior to LAST should be scanned.
3827 The set of allowed stack pointer modifications is small:
3828 (1) One or more auto-inc style memory references (aka pushes),
3829 (2) One or more addition/subtraction with the SP as destination,
3830 (3) A single move insn with the SP as destination,
3831 (4) A call_pop insn,
3832 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3834 Insns in the sequence that do not modify the SP are ignored,
3835 except for noreturn calls.
3837 The return value is the amount of adjustment that can be trivially
3838 verified, via immediate operand or auto-inc. If the adjustment
3839 cannot be trivially extracted, the return value is INT_MIN. */
3842 find_args_size_adjust (rtx_insn
*insn
)
3847 pat
= PATTERN (insn
);
3850 /* Look for a call_pop pattern. */
3853 /* We have to allow non-call_pop patterns for the case
3854 of emit_single_push_insn of a TLS address. */
3855 if (GET_CODE (pat
) != PARALLEL
)
3858 /* All call_pop have a stack pointer adjust in the parallel.
3859 The call itself is always first, and the stack adjust is
3860 usually last, so search from the end. */
3861 for (i
= XVECLEN (pat
, 0) - 1; i
> 0; --i
)
3863 set
= XVECEXP (pat
, 0, i
);
3864 if (GET_CODE (set
) != SET
)
3866 dest
= SET_DEST (set
);
3867 if (dest
== stack_pointer_rtx
)
3870 /* We'd better have found the stack pointer adjust. */
3873 /* Fall through to process the extracted SET and DEST
3874 as if it was a standalone insn. */
3876 else if (GET_CODE (pat
) == SET
)
3878 else if ((set
= single_set (insn
)) != NULL
)
3880 else if (GET_CODE (pat
) == PARALLEL
)
3882 /* ??? Some older ports use a parallel with a stack adjust
3883 and a store for a PUSH_ROUNDING pattern, rather than a
3884 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3885 /* ??? See h8300 and m68k, pushqi1. */
3886 for (i
= XVECLEN (pat
, 0) - 1; i
>= 0; --i
)
3888 set
= XVECEXP (pat
, 0, i
);
3889 if (GET_CODE (set
) != SET
)
3891 dest
= SET_DEST (set
);
3892 if (dest
== stack_pointer_rtx
)
3895 /* We do not expect an auto-inc of the sp in the parallel. */
3896 gcc_checking_assert (mem_autoinc_base (dest
) != stack_pointer_rtx
);
3897 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
3898 != stack_pointer_rtx
);
3906 dest
= SET_DEST (set
);
3908 /* Look for direct modifications of the stack pointer. */
3909 if (REG_P (dest
) && REGNO (dest
) == STACK_POINTER_REGNUM
)
3911 /* Look for a trivial adjustment, otherwise assume nothing. */
3912 /* Note that the SPU restore_stack_block pattern refers to
3913 the stack pointer in V4SImode. Consider that non-trivial. */
3914 if (SCALAR_INT_MODE_P (GET_MODE (dest
))
3915 && GET_CODE (SET_SRC (set
)) == PLUS
3916 && XEXP (SET_SRC (set
), 0) == stack_pointer_rtx
3917 && CONST_INT_P (XEXP (SET_SRC (set
), 1)))
3918 return INTVAL (XEXP (SET_SRC (set
), 1));
3919 /* ??? Reload can generate no-op moves, which will be cleaned
3920 up later. Recognize it and continue searching. */
3921 else if (rtx_equal_p (dest
, SET_SRC (set
)))
3924 return HOST_WIDE_INT_MIN
;
3930 /* Otherwise only think about autoinc patterns. */
3931 if (mem_autoinc_base (dest
) == stack_pointer_rtx
)
3934 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
3935 != stack_pointer_rtx
);
3937 else if (mem_autoinc_base (SET_SRC (set
)) == stack_pointer_rtx
)
3938 mem
= SET_SRC (set
);
3942 addr
= XEXP (mem
, 0);
3943 switch (GET_CODE (addr
))
3947 return GET_MODE_SIZE (GET_MODE (mem
));
3950 return -GET_MODE_SIZE (GET_MODE (mem
));
3953 addr
= XEXP (addr
, 1);
3954 gcc_assert (GET_CODE (addr
) == PLUS
);
3955 gcc_assert (XEXP (addr
, 0) == stack_pointer_rtx
);
3956 gcc_assert (CONST_INT_P (XEXP (addr
, 1)));
3957 return INTVAL (XEXP (addr
, 1));
3965 fixup_args_size_notes (rtx_insn
*prev
, rtx_insn
*last
, int end_args_size
)
3967 int args_size
= end_args_size
;
3968 bool saw_unknown
= false;
3971 for (insn
= last
; insn
!= prev
; insn
= PREV_INSN (insn
))
3973 HOST_WIDE_INT this_delta
;
3975 if (!NONDEBUG_INSN_P (insn
))
3978 this_delta
= find_args_size_adjust (insn
);
3979 if (this_delta
== 0)
3982 || ACCUMULATE_OUTGOING_ARGS
3983 || find_reg_note (insn
, REG_NORETURN
, NULL_RTX
) == NULL_RTX
)
3987 gcc_assert (!saw_unknown
);
3988 if (this_delta
== HOST_WIDE_INT_MIN
)
3991 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (args_size
));
3992 #ifdef STACK_GROWS_DOWNWARD
3993 this_delta
= -(unsigned HOST_WIDE_INT
) this_delta
;
3995 args_size
-= this_delta
;
3998 return saw_unknown
? INT_MIN
: args_size
;
4001 #ifdef PUSH_ROUNDING
4002 /* Emit single push insn. */
4005 emit_single_push_insn_1 (machine_mode mode
, rtx x
, tree type
)
4008 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
4010 enum insn_code icode
;
4012 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
4013 /* If there is push pattern, use it. Otherwise try old way of throwing
4014 MEM representing push operation to move expander. */
4015 icode
= optab_handler (push_optab
, mode
);
4016 if (icode
!= CODE_FOR_nothing
)
4018 struct expand_operand ops
[1];
4020 create_input_operand (&ops
[0], x
, mode
);
4021 if (maybe_expand_insn (icode
, 1, ops
))
4024 if (GET_MODE_SIZE (mode
) == rounded_size
)
4025 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
4026 /* If we are to pad downward, adjust the stack pointer first and
4027 then store X into the stack location using an offset. This is
4028 because emit_move_insn does not know how to pad; it does not have
4030 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
4032 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
4033 HOST_WIDE_INT offset
;
4035 emit_move_insn (stack_pointer_rtx
,
4036 expand_binop (Pmode
,
4037 #ifdef STACK_GROWS_DOWNWARD
4043 gen_int_mode (rounded_size
, Pmode
),
4044 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
4046 offset
= (HOST_WIDE_INT
) padding_size
;
4047 #ifdef STACK_GROWS_DOWNWARD
4048 if (STACK_PUSH_CODE
== POST_DEC
)
4049 /* We have already decremented the stack pointer, so get the
4051 offset
+= (HOST_WIDE_INT
) rounded_size
;
4053 if (STACK_PUSH_CODE
== POST_INC
)
4054 /* We have already incremented the stack pointer, so get the
4056 offset
-= (HOST_WIDE_INT
) rounded_size
;
4058 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
4059 gen_int_mode (offset
, Pmode
));
4063 #ifdef STACK_GROWS_DOWNWARD
4064 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4065 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
4066 gen_int_mode (-(HOST_WIDE_INT
) rounded_size
,
4069 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4070 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
4071 gen_int_mode (rounded_size
, Pmode
));
4073 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
4076 dest
= gen_rtx_MEM (mode
, dest_addr
);
4080 set_mem_attributes (dest
, type
, 1);
4082 if (cfun
->tail_call_marked
)
4083 /* Function incoming arguments may overlap with sibling call
4084 outgoing arguments and we cannot allow reordering of reads
4085 from function arguments with stores to outgoing arguments
4086 of sibling calls. */
4087 set_mem_alias_set (dest
, 0);
4089 emit_move_insn (dest
, x
);
4092 /* Emit and annotate a single push insn. */
4095 emit_single_push_insn (machine_mode mode
, rtx x
, tree type
)
4097 int delta
, old_delta
= stack_pointer_delta
;
4098 rtx_insn
*prev
= get_last_insn ();
4101 emit_single_push_insn_1 (mode
, x
, type
);
4103 last
= get_last_insn ();
4105 /* Notice the common case where we emitted exactly one insn. */
4106 if (PREV_INSN (last
) == prev
)
4108 add_reg_note (last
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
4112 delta
= fixup_args_size_notes (prev
, last
, stack_pointer_delta
);
4113 gcc_assert (delta
== INT_MIN
|| delta
== old_delta
);
4117 /* Generate code to push X onto the stack, assuming it has mode MODE and
4119 MODE is redundant except when X is a CONST_INT (since they don't
4121 SIZE is an rtx for the size of data to be copied (in bytes),
4122 needed only if X is BLKmode.
4124 ALIGN (in bits) is maximum alignment we can assume.
4126 If PARTIAL and REG are both nonzero, then copy that many of the first
4127 bytes of X into registers starting with REG, and push the rest of X.
4128 The amount of space pushed is decreased by PARTIAL bytes.
4129 REG must be a hard register in this case.
4130 If REG is zero but PARTIAL is not, take any all others actions for an
4131 argument partially in registers, but do not actually load any
4134 EXTRA is the amount in bytes of extra space to leave next to this arg.
4135 This is ignored if an argument block has already been allocated.
4137 On a machine that lacks real push insns, ARGS_ADDR is the address of
4138 the bottom of the argument block for this call. We use indexing off there
4139 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4140 argument block has not been preallocated.
4142 ARGS_SO_FAR is the size of args previously pushed for this call.
4144 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4145 for arguments passed in registers. If nonzero, it will be the number
4146 of bytes required. */
4149 emit_push_insn (rtx x
, machine_mode mode
, tree type
, rtx size
,
4150 unsigned int align
, int partial
, rtx reg
, int extra
,
4151 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
4155 enum direction stack_direction
4156 #ifdef STACK_GROWS_DOWNWARD
4162 /* Decide where to pad the argument: `downward' for below,
4163 `upward' for above, or `none' for don't pad it.
4164 Default is below for small data on big-endian machines; else above. */
4165 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
4167 /* Invert direction if stack is post-decrement.
4169 if (STACK_PUSH_CODE
== POST_DEC
)
4170 if (where_pad
!= none
)
4171 where_pad
= (where_pad
== downward
? upward
: downward
);
4176 || (STRICT_ALIGNMENT
&& align
< GET_MODE_ALIGNMENT (mode
)))
4178 /* Copy a block into the stack, entirely or partially. */
4185 offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4186 used
= partial
- offset
;
4188 if (mode
!= BLKmode
)
4190 /* A value is to be stored in an insufficiently aligned
4191 stack slot; copy via a suitably aligned slot if
4193 size
= GEN_INT (GET_MODE_SIZE (mode
));
4194 if (!MEM_P (xinner
))
4196 temp
= assign_temp (type
, 1, 1);
4197 emit_move_insn (temp
, xinner
);
4204 /* USED is now the # of bytes we need not copy to the stack
4205 because registers will take care of them. */
4208 xinner
= adjust_address (xinner
, BLKmode
, used
);
4210 /* If the partial register-part of the arg counts in its stack size,
4211 skip the part of stack space corresponding to the registers.
4212 Otherwise, start copying to the beginning of the stack space,
4213 by setting SKIP to 0. */
4214 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
4216 #ifdef PUSH_ROUNDING
4217 /* Do it with several push insns if that doesn't take lots of insns
4218 and if there is no difficulty with push insns that skip bytes
4219 on the stack for alignment purposes. */
4222 && CONST_INT_P (size
)
4224 && MEM_ALIGN (xinner
) >= align
4225 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
4226 /* Here we avoid the case of a structure whose weak alignment
4227 forces many pushes of a small amount of data,
4228 and such small pushes do rounding that causes trouble. */
4229 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
4230 || align
>= BIGGEST_ALIGNMENT
4231 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
4232 == (align
/ BITS_PER_UNIT
)))
4233 && (HOST_WIDE_INT
) PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
4235 /* Push padding now if padding above and stack grows down,
4236 or if padding below and stack grows up.
4237 But if space already allocated, this has already been done. */
4238 if (extra
&& args_addr
== 0
4239 && where_pad
!= none
&& where_pad
!= stack_direction
)
4240 anti_adjust_stack (GEN_INT (extra
));
4242 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
4245 #endif /* PUSH_ROUNDING */
4249 /* Otherwise make space on the stack and copy the data
4250 to the address of that space. */
4252 /* Deduct words put into registers from the size we must copy. */
4255 if (CONST_INT_P (size
))
4256 size
= GEN_INT (INTVAL (size
) - used
);
4258 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
4259 gen_int_mode (used
, GET_MODE (size
)),
4260 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4263 /* Get the address of the stack space.
4264 In this case, we do not deal with EXTRA separately.
4265 A single stack adjust will do. */
4268 temp
= push_block (size
, extra
, where_pad
== downward
);
4271 else if (CONST_INT_P (args_so_far
))
4272 temp
= memory_address (BLKmode
,
4273 plus_constant (Pmode
, args_addr
,
4274 skip
+ INTVAL (args_so_far
)));
4276 temp
= memory_address (BLKmode
,
4277 plus_constant (Pmode
,
4278 gen_rtx_PLUS (Pmode
,
4283 if (!ACCUMULATE_OUTGOING_ARGS
)
4285 /* If the source is referenced relative to the stack pointer,
4286 copy it to another register to stabilize it. We do not need
4287 to do this if we know that we won't be changing sp. */
4289 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
4290 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
4291 temp
= copy_to_reg (temp
);
4294 target
= gen_rtx_MEM (BLKmode
, temp
);
4296 /* We do *not* set_mem_attributes here, because incoming arguments
4297 may overlap with sibling call outgoing arguments and we cannot
4298 allow reordering of reads from function arguments with stores
4299 to outgoing arguments of sibling calls. We do, however, want
4300 to record the alignment of the stack slot. */
4301 /* ALIGN may well be better aligned than TYPE, e.g. due to
4302 PARM_BOUNDARY. Assume the caller isn't lying. */
4303 set_mem_align (target
, align
);
4305 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
4308 else if (partial
> 0)
4310 /* Scalar partly in registers. */
4312 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
4315 /* # bytes of start of argument
4316 that we must make space for but need not store. */
4317 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4318 int args_offset
= INTVAL (args_so_far
);
4321 /* Push padding now if padding above and stack grows down,
4322 or if padding below and stack grows up.
4323 But if space already allocated, this has already been done. */
4324 if (extra
&& args_addr
== 0
4325 && where_pad
!= none
&& where_pad
!= stack_direction
)
4326 anti_adjust_stack (GEN_INT (extra
));
4328 /* If we make space by pushing it, we might as well push
4329 the real data. Otherwise, we can leave OFFSET nonzero
4330 and leave the space uninitialized. */
4334 /* Now NOT_STACK gets the number of words that we don't need to
4335 allocate on the stack. Convert OFFSET to words too. */
4336 not_stack
= (partial
- offset
) / UNITS_PER_WORD
;
4337 offset
/= UNITS_PER_WORD
;
4339 /* If the partial register-part of the arg counts in its stack size,
4340 skip the part of stack space corresponding to the registers.
4341 Otherwise, start copying to the beginning of the stack space,
4342 by setting SKIP to 0. */
4343 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
4345 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
4346 x
= validize_mem (force_const_mem (mode
, x
));
4348 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4349 SUBREGs of such registers are not allowed. */
4350 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
4351 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
4352 x
= copy_to_reg (x
);
4354 /* Loop over all the words allocated on the stack for this arg. */
4355 /* We can do it by words, because any scalar bigger than a word
4356 has a size a multiple of a word. */
4357 for (i
= size
- 1; i
>= not_stack
; i
--)
4358 if (i
>= not_stack
+ offset
)
4359 emit_push_insn (operand_subword_force (x
, i
, mode
),
4360 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
4362 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
4364 reg_parm_stack_space
, alignment_pad
);
4371 /* Push padding now if padding above and stack grows down,
4372 or if padding below and stack grows up.
4373 But if space already allocated, this has already been done. */
4374 if (extra
&& args_addr
== 0
4375 && where_pad
!= none
&& where_pad
!= stack_direction
)
4376 anti_adjust_stack (GEN_INT (extra
));
4378 #ifdef PUSH_ROUNDING
4379 if (args_addr
== 0 && PUSH_ARGS
)
4380 emit_single_push_insn (mode
, x
, type
);
4384 if (CONST_INT_P (args_so_far
))
4386 = memory_address (mode
,
4387 plus_constant (Pmode
, args_addr
,
4388 INTVAL (args_so_far
)));
4390 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
4392 dest
= gen_rtx_MEM (mode
, addr
);
4394 /* We do *not* set_mem_attributes here, because incoming arguments
4395 may overlap with sibling call outgoing arguments and we cannot
4396 allow reordering of reads from function arguments with stores
4397 to outgoing arguments of sibling calls. We do, however, want
4398 to record the alignment of the stack slot. */
4399 /* ALIGN may well be better aligned than TYPE, e.g. due to
4400 PARM_BOUNDARY. Assume the caller isn't lying. */
4401 set_mem_align (dest
, align
);
4403 emit_move_insn (dest
, x
);
4407 /* If part should go in registers, copy that part
4408 into the appropriate registers. Do this now, at the end,
4409 since mem-to-mem copies above may do function calls. */
4410 if (partial
> 0 && reg
!= 0)
4412 /* Handle calls that pass values in multiple non-contiguous locations.
4413 The Irix 6 ABI has examples of this. */
4414 if (GET_CODE (reg
) == PARALLEL
)
4415 emit_group_load (reg
, x
, type
, -1);
4418 gcc_assert (partial
% UNITS_PER_WORD
== 0);
4419 move_block_to_reg (REGNO (reg
), x
, partial
/ UNITS_PER_WORD
, mode
);
4423 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
4424 anti_adjust_stack (GEN_INT (extra
));
4426 if (alignment_pad
&& args_addr
== 0)
4427 anti_adjust_stack (alignment_pad
);
4430 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4434 get_subtarget (rtx x
)
4438 /* Only registers can be subtargets. */
4440 /* Don't use hard regs to avoid extending their life. */
4441 || REGNO (x
) < FIRST_PSEUDO_REGISTER
4445 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4446 FIELD is a bitfield. Returns true if the optimization was successful,
4447 and there's nothing else to do. */
4450 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize
,
4451 unsigned HOST_WIDE_INT bitpos
,
4452 unsigned HOST_WIDE_INT bitregion_start
,
4453 unsigned HOST_WIDE_INT bitregion_end
,
4454 machine_mode mode1
, rtx str_rtx
,
4457 machine_mode str_mode
= GET_MODE (str_rtx
);
4458 unsigned int str_bitsize
= GET_MODE_BITSIZE (str_mode
);
4463 enum tree_code code
;
4465 if (mode1
!= VOIDmode
4466 || bitsize
>= BITS_PER_WORD
4467 || str_bitsize
> BITS_PER_WORD
4468 || TREE_SIDE_EFFECTS (to
)
4469 || TREE_THIS_VOLATILE (to
))
4473 if (TREE_CODE (src
) != SSA_NAME
)
4475 if (TREE_CODE (TREE_TYPE (src
)) != INTEGER_TYPE
)
4478 srcstmt
= get_gimple_for_ssa_name (src
);
4480 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt
)) != tcc_binary
)
4483 code
= gimple_assign_rhs_code (srcstmt
);
4485 op0
= gimple_assign_rhs1 (srcstmt
);
4487 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4488 to find its initialization. Hopefully the initialization will
4489 be from a bitfield load. */
4490 if (TREE_CODE (op0
) == SSA_NAME
)
4492 gimple op0stmt
= get_gimple_for_ssa_name (op0
);
4494 /* We want to eventually have OP0 be the same as TO, which
4495 should be a bitfield. */
4497 || !is_gimple_assign (op0stmt
)
4498 || gimple_assign_rhs_code (op0stmt
) != TREE_CODE (to
))
4500 op0
= gimple_assign_rhs1 (op0stmt
);
4503 op1
= gimple_assign_rhs2 (srcstmt
);
4505 if (!operand_equal_p (to
, op0
, 0))
4508 if (MEM_P (str_rtx
))
4510 unsigned HOST_WIDE_INT offset1
;
4512 if (str_bitsize
== 0 || str_bitsize
> BITS_PER_WORD
)
4513 str_mode
= word_mode
;
4514 str_mode
= get_best_mode (bitsize
, bitpos
,
4515 bitregion_start
, bitregion_end
,
4516 MEM_ALIGN (str_rtx
), str_mode
, 0);
4517 if (str_mode
== VOIDmode
)
4519 str_bitsize
= GET_MODE_BITSIZE (str_mode
);
4522 bitpos
%= str_bitsize
;
4523 offset1
= (offset1
- bitpos
) / BITS_PER_UNIT
;
4524 str_rtx
= adjust_address (str_rtx
, str_mode
, offset1
);
4526 else if (!REG_P (str_rtx
) && GET_CODE (str_rtx
) != SUBREG
)
4529 /* If the bit field covers the whole REG/MEM, store_field
4530 will likely generate better code. */
4531 if (bitsize
>= str_bitsize
)
4534 /* We can't handle fields split across multiple entities. */
4535 if (bitpos
+ bitsize
> str_bitsize
)
4538 if (BYTES_BIG_ENDIAN
)
4539 bitpos
= str_bitsize
- bitpos
- bitsize
;
4545 /* For now, just optimize the case of the topmost bitfield
4546 where we don't need to do any masking and also
4547 1 bit bitfields where xor can be used.
4548 We might win by one instruction for the other bitfields
4549 too if insv/extv instructions aren't used, so that
4550 can be added later. */
4551 if (bitpos
+ bitsize
!= str_bitsize
4552 && (bitsize
!= 1 || TREE_CODE (op1
) != INTEGER_CST
))
4555 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4556 value
= convert_modes (str_mode
,
4557 TYPE_MODE (TREE_TYPE (op1
)), value
,
4558 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4560 /* We may be accessing data outside the field, which means
4561 we can alias adjacent data. */
4562 if (MEM_P (str_rtx
))
4564 str_rtx
= shallow_copy_rtx (str_rtx
);
4565 set_mem_alias_set (str_rtx
, 0);
4566 set_mem_expr (str_rtx
, 0);
4569 binop
= code
== PLUS_EXPR
? add_optab
: sub_optab
;
4570 if (bitsize
== 1 && bitpos
+ bitsize
!= str_bitsize
)
4572 value
= expand_and (str_mode
, value
, const1_rtx
, NULL
);
4575 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4576 result
= expand_binop (str_mode
, binop
, str_rtx
,
4577 value
, str_rtx
, 1, OPTAB_WIDEN
);
4578 if (result
!= str_rtx
)
4579 emit_move_insn (str_rtx
, result
);
4584 if (TREE_CODE (op1
) != INTEGER_CST
)
4586 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4587 value
= convert_modes (str_mode
,
4588 TYPE_MODE (TREE_TYPE (op1
)), value
,
4589 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4591 /* We may be accessing data outside the field, which means
4592 we can alias adjacent data. */
4593 if (MEM_P (str_rtx
))
4595 str_rtx
= shallow_copy_rtx (str_rtx
);
4596 set_mem_alias_set (str_rtx
, 0);
4597 set_mem_expr (str_rtx
, 0);
4600 binop
= code
== BIT_IOR_EXPR
? ior_optab
: xor_optab
;
4601 if (bitpos
+ bitsize
!= str_bitsize
)
4603 rtx mask
= gen_int_mode (((unsigned HOST_WIDE_INT
) 1 << bitsize
) - 1,
4605 value
= expand_and (str_mode
, value
, mask
, NULL_RTX
);
4607 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4608 result
= expand_binop (str_mode
, binop
, str_rtx
,
4609 value
, str_rtx
, 1, OPTAB_WIDEN
);
4610 if (result
!= str_rtx
)
4611 emit_move_insn (str_rtx
, result
);
4621 /* In the C++ memory model, consecutive bit fields in a structure are
4622 considered one memory location.
4624 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4625 returns the bit range of consecutive bits in which this COMPONENT_REF
4626 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4627 and *OFFSET may be adjusted in the process.
4629 If the access does not need to be restricted, 0 is returned in both
4630 *BITSTART and *BITEND. */
4633 get_bit_range (unsigned HOST_WIDE_INT
*bitstart
,
4634 unsigned HOST_WIDE_INT
*bitend
,
4636 HOST_WIDE_INT
*bitpos
,
4639 HOST_WIDE_INT bitoffset
;
4642 gcc_assert (TREE_CODE (exp
) == COMPONENT_REF
);
4644 field
= TREE_OPERAND (exp
, 1);
4645 repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
4646 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4647 need to limit the range we can access. */
4650 *bitstart
= *bitend
= 0;
4654 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4655 part of a larger bit field, then the representative does not serve any
4656 useful purpose. This can occur in Ada. */
4657 if (handled_component_p (TREE_OPERAND (exp
, 0)))
4660 HOST_WIDE_INT rbitsize
, rbitpos
;
4664 get_inner_reference (TREE_OPERAND (exp
, 0), &rbitsize
, &rbitpos
,
4665 &roffset
, &rmode
, &unsignedp
, &volatilep
, false);
4666 if ((rbitpos
% BITS_PER_UNIT
) != 0)
4668 *bitstart
= *bitend
= 0;
4673 /* Compute the adjustment to bitpos from the offset of the field
4674 relative to the representative. DECL_FIELD_OFFSET of field and
4675 repr are the same by construction if they are not constants,
4676 see finish_bitfield_layout. */
4677 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field
))
4678 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr
)))
4679 bitoffset
= (tree_to_uhwi (DECL_FIELD_OFFSET (field
))
4680 - tree_to_uhwi (DECL_FIELD_OFFSET (repr
))) * BITS_PER_UNIT
;
4683 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
4684 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
4686 /* If the adjustment is larger than bitpos, we would have a negative bit
4687 position for the lower bound and this may wreak havoc later. Adjust
4688 offset and bitpos to make the lower bound non-negative in that case. */
4689 if (bitoffset
> *bitpos
)
4691 HOST_WIDE_INT adjust
= bitoffset
- *bitpos
;
4692 gcc_assert ((adjust
% BITS_PER_UNIT
) == 0);
4695 if (*offset
== NULL_TREE
)
4696 *offset
= size_int (-adjust
/ BITS_PER_UNIT
);
4699 = size_binop (MINUS_EXPR
, *offset
, size_int (adjust
/ BITS_PER_UNIT
));
4703 *bitstart
= *bitpos
- bitoffset
;
4705 *bitend
= *bitstart
+ tree_to_uhwi (DECL_SIZE (repr
)) - 1;
4708 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4709 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4710 DECL_RTL was not set yet, return NORTL. */
4713 addr_expr_of_non_mem_decl_p_1 (tree addr
, bool nortl
)
4715 if (TREE_CODE (addr
) != ADDR_EXPR
)
4718 tree base
= TREE_OPERAND (addr
, 0);
4721 || TREE_ADDRESSABLE (base
)
4722 || DECL_MODE (base
) == BLKmode
)
4725 if (!DECL_RTL_SET_P (base
))
4728 return (!MEM_P (DECL_RTL (base
)));
4731 /* Returns true if the MEM_REF REF refers to an object that does not
4732 reside in memory and has non-BLKmode. */
4735 mem_ref_refers_to_non_mem_p (tree ref
)
4737 tree base
= TREE_OPERAND (ref
, 0);
4738 return addr_expr_of_non_mem_decl_p_1 (base
, false);
4741 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4742 is true, try generating a nontemporal store. */
4745 expand_assignment (tree to
, tree from
, bool nontemporal
)
4751 enum insn_code icode
;
4753 /* Don't crash if the lhs of the assignment was erroneous. */
4754 if (TREE_CODE (to
) == ERROR_MARK
)
4756 expand_normal (from
);
4760 /* Optimize away no-op moves without side-effects. */
4761 if (operand_equal_p (to
, from
, 0))
4764 /* Handle misaligned stores. */
4765 mode
= TYPE_MODE (TREE_TYPE (to
));
4766 if ((TREE_CODE (to
) == MEM_REF
4767 || TREE_CODE (to
) == TARGET_MEM_REF
)
4769 && !mem_ref_refers_to_non_mem_p (to
)
4770 && ((align
= get_object_alignment (to
))
4771 < GET_MODE_ALIGNMENT (mode
))
4772 && (((icode
= optab_handler (movmisalign_optab
, mode
))
4773 != CODE_FOR_nothing
)
4774 || SLOW_UNALIGNED_ACCESS (mode
, align
)))
4778 reg
= expand_expr (from
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4779 reg
= force_not_mem (reg
);
4780 mem
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4782 if (icode
!= CODE_FOR_nothing
)
4784 struct expand_operand ops
[2];
4786 create_fixed_operand (&ops
[0], mem
);
4787 create_input_operand (&ops
[1], reg
, mode
);
4788 /* The movmisalign<mode> pattern cannot fail, else the assignment
4789 would silently be omitted. */
4790 expand_insn (icode
, 2, ops
);
4793 store_bit_field (mem
, GET_MODE_BITSIZE (mode
), 0, 0, 0, mode
, reg
);
4797 /* Assignment of a structure component needs special treatment
4798 if the structure component's rtx is not simply a MEM.
4799 Assignment of an array element at a constant index, and assignment of
4800 an array element in an unaligned packed structure field, has the same
4801 problem. Same for (partially) storing into a non-memory object. */
4802 if (handled_component_p (to
)
4803 || (TREE_CODE (to
) == MEM_REF
4804 && mem_ref_refers_to_non_mem_p (to
))
4805 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
4808 HOST_WIDE_INT bitsize
, bitpos
;
4809 unsigned HOST_WIDE_INT bitregion_start
= 0;
4810 unsigned HOST_WIDE_INT bitregion_end
= 0;
4817 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
4818 &unsignedp
, &volatilep
, true);
4820 /* Make sure bitpos is not negative, it can wreak havoc later. */
4823 gcc_assert (offset
== NULL_TREE
);
4824 offset
= size_int (bitpos
>> (BITS_PER_UNIT
== 8
4825 ? 3 : exact_log2 (BITS_PER_UNIT
)));
4826 bitpos
&= BITS_PER_UNIT
- 1;
4829 if (TREE_CODE (to
) == COMPONENT_REF
4830 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to
, 1)))
4831 get_bit_range (&bitregion_start
, &bitregion_end
, to
, &bitpos
, &offset
);
4832 /* The C++ memory model naturally applies to byte-aligned fields.
4833 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4834 BITSIZE are not byte-aligned, there is no need to limit the range
4835 we can access. This can occur with packed structures in Ada. */
4836 else if (bitsize
> 0
4837 && bitsize
% BITS_PER_UNIT
== 0
4838 && bitpos
% BITS_PER_UNIT
== 0)
4840 bitregion_start
= bitpos
;
4841 bitregion_end
= bitpos
+ bitsize
- 1;
4844 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4846 /* If the field has a mode, we want to access it in the
4847 field's mode, not the computed mode.
4848 If a MEM has VOIDmode (external with incomplete type),
4849 use BLKmode for it instead. */
4852 if (mode1
!= VOIDmode
)
4853 to_rtx
= adjust_address (to_rtx
, mode1
, 0);
4854 else if (GET_MODE (to_rtx
) == VOIDmode
)
4855 to_rtx
= adjust_address (to_rtx
, BLKmode
, 0);
4860 machine_mode address_mode
;
4863 if (!MEM_P (to_rtx
))
4865 /* We can get constant negative offsets into arrays with broken
4866 user code. Translate this to a trap instead of ICEing. */
4867 gcc_assert (TREE_CODE (offset
) == INTEGER_CST
);
4868 expand_builtin_trap ();
4869 to_rtx
= gen_rtx_MEM (BLKmode
, const0_rtx
);
4872 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
4873 address_mode
= get_address_mode (to_rtx
);
4874 if (GET_MODE (offset_rtx
) != address_mode
)
4875 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
4877 /* If we have an expression in OFFSET_RTX and a non-zero
4878 byte offset in BITPOS, adding the byte offset before the
4879 OFFSET_RTX results in better intermediate code, which makes
4880 later rtl optimization passes perform better.
4882 We prefer intermediate code like this:
4884 r124:DI=r123:DI+0x18
4889 r124:DI=r123:DI+0x10
4890 [r124:DI+0x8]=r121:DI
4892 This is only done for aligned data values, as these can
4893 be expected to result in single move instructions. */
4894 if (mode1
!= VOIDmode
4897 && (bitpos
% bitsize
) == 0
4898 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
4899 && MEM_ALIGN (to_rtx
) >= GET_MODE_ALIGNMENT (mode1
))
4901 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
4902 bitregion_start
= 0;
4903 if (bitregion_end
>= (unsigned HOST_WIDE_INT
) bitpos
)
4904 bitregion_end
-= bitpos
;
4908 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4909 highest_pow2_factor_for_target (to
,
4913 /* No action is needed if the target is not a memory and the field
4914 lies completely outside that target. This can occur if the source
4915 code contains an out-of-bounds access to a small array. */
4917 && GET_MODE (to_rtx
) != BLKmode
4918 && (unsigned HOST_WIDE_INT
) bitpos
4919 >= GET_MODE_PRECISION (GET_MODE (to_rtx
)))
4921 expand_normal (from
);
4924 /* Handle expand_expr of a complex value returning a CONCAT. */
4925 else if (GET_CODE (to_rtx
) == CONCAT
)
4927 unsigned short mode_bitsize
= GET_MODE_BITSIZE (GET_MODE (to_rtx
));
4928 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from
)))
4930 && bitsize
== mode_bitsize
)
4931 result
= store_expr (from
, to_rtx
, false, nontemporal
);
4932 else if (bitsize
== mode_bitsize
/ 2
4933 && (bitpos
== 0 || bitpos
== mode_bitsize
/ 2))
4934 result
= store_expr (from
, XEXP (to_rtx
, bitpos
!= 0), false,
4936 else if (bitpos
+ bitsize
<= mode_bitsize
/ 2)
4937 result
= store_field (XEXP (to_rtx
, 0), bitsize
, bitpos
,
4938 bitregion_start
, bitregion_end
,
4940 get_alias_set (to
), nontemporal
);
4941 else if (bitpos
>= mode_bitsize
/ 2)
4942 result
= store_field (XEXP (to_rtx
, 1), bitsize
,
4943 bitpos
- mode_bitsize
/ 2,
4944 bitregion_start
, bitregion_end
,
4946 get_alias_set (to
), nontemporal
);
4947 else if (bitpos
== 0 && bitsize
== mode_bitsize
)
4950 result
= expand_normal (from
);
4951 from_rtx
= simplify_gen_subreg (GET_MODE (to_rtx
), result
,
4952 TYPE_MODE (TREE_TYPE (from
)), 0);
4953 emit_move_insn (XEXP (to_rtx
, 0),
4954 read_complex_part (from_rtx
, false));
4955 emit_move_insn (XEXP (to_rtx
, 1),
4956 read_complex_part (from_rtx
, true));
4960 rtx temp
= assign_stack_temp (GET_MODE (to_rtx
),
4961 GET_MODE_SIZE (GET_MODE (to_rtx
)));
4962 write_complex_part (temp
, XEXP (to_rtx
, 0), false);
4963 write_complex_part (temp
, XEXP (to_rtx
, 1), true);
4964 result
= store_field (temp
, bitsize
, bitpos
,
4965 bitregion_start
, bitregion_end
,
4967 get_alias_set (to
), nontemporal
);
4968 emit_move_insn (XEXP (to_rtx
, 0), read_complex_part (temp
, false));
4969 emit_move_insn (XEXP (to_rtx
, 1), read_complex_part (temp
, true));
4976 /* If the field is at offset zero, we could have been given the
4977 DECL_RTX of the parent struct. Don't munge it. */
4978 to_rtx
= shallow_copy_rtx (to_rtx
);
4979 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
4981 MEM_VOLATILE_P (to_rtx
) = 1;
4984 if (optimize_bitfield_assignment_op (bitsize
, bitpos
,
4985 bitregion_start
, bitregion_end
,
4990 result
= store_field (to_rtx
, bitsize
, bitpos
,
4991 bitregion_start
, bitregion_end
,
4993 get_alias_set (to
), nontemporal
);
4997 preserve_temp_slots (result
);
5002 /* If the rhs is a function call and its value is not an aggregate,
5003 call the function before we start to compute the lhs.
5004 This is needed for correct code for cases such as
5005 val = setjmp (buf) on machines where reference to val
5006 requires loading up part of an address in a separate insn.
5008 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5009 since it might be a promoted variable where the zero- or sign- extension
5010 needs to be done. Handling this in the normal way is safe because no
5011 computation is done before the call. The same is true for SSA names. */
5012 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
5013 && COMPLETE_TYPE_P (TREE_TYPE (from
))
5014 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
5015 && ! (((TREE_CODE (to
) == VAR_DECL
5016 || TREE_CODE (to
) == PARM_DECL
5017 || TREE_CODE (to
) == RESULT_DECL
)
5018 && REG_P (DECL_RTL (to
)))
5019 || TREE_CODE (to
) == SSA_NAME
))
5024 value
= expand_normal (from
);
5026 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5028 /* Handle calls that return values in multiple non-contiguous locations.
5029 The Irix 6 ABI has examples of this. */
5030 if (GET_CODE (to_rtx
) == PARALLEL
)
5032 if (GET_CODE (value
) == PARALLEL
)
5033 emit_group_move (to_rtx
, value
);
5035 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
5036 int_size_in_bytes (TREE_TYPE (from
)));
5038 else if (GET_CODE (value
) == PARALLEL
)
5039 emit_group_store (to_rtx
, value
, TREE_TYPE (from
),
5040 int_size_in_bytes (TREE_TYPE (from
)));
5041 else if (GET_MODE (to_rtx
) == BLKmode
)
5043 /* Handle calls that return BLKmode values in registers. */
5045 copy_blkmode_from_reg (to_rtx
, value
, TREE_TYPE (from
));
5047 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
5051 if (POINTER_TYPE_P (TREE_TYPE (to
)))
5052 value
= convert_memory_address_addr_space
5053 (GET_MODE (to_rtx
), value
,
5054 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to
))));
5056 emit_move_insn (to_rtx
, value
);
5058 preserve_temp_slots (to_rtx
);
5063 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5064 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5066 /* Don't move directly into a return register. */
5067 if (TREE_CODE (to
) == RESULT_DECL
5068 && (REG_P (to_rtx
) || GET_CODE (to_rtx
) == PARALLEL
))
5074 /* If the source is itself a return value, it still is in a pseudo at
5075 this point so we can move it back to the return register directly. */
5077 && TYPE_MODE (TREE_TYPE (from
)) == BLKmode
5078 && TREE_CODE (from
) != CALL_EXPR
)
5079 temp
= copy_blkmode_to_reg (GET_MODE (to_rtx
), from
);
5081 temp
= expand_expr (from
, NULL_RTX
, GET_MODE (to_rtx
), EXPAND_NORMAL
);
5083 /* Handle calls that return values in multiple non-contiguous locations.
5084 The Irix 6 ABI has examples of this. */
5085 if (GET_CODE (to_rtx
) == PARALLEL
)
5087 if (GET_CODE (temp
) == PARALLEL
)
5088 emit_group_move (to_rtx
, temp
);
5090 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
5091 int_size_in_bytes (TREE_TYPE (from
)));
5094 emit_move_insn (to_rtx
, temp
);
5096 preserve_temp_slots (to_rtx
);
5101 /* In case we are returning the contents of an object which overlaps
5102 the place the value is being stored, use a safe function when copying
5103 a value through a pointer into a structure value return block. */
5104 if (TREE_CODE (to
) == RESULT_DECL
5105 && TREE_CODE (from
) == INDIRECT_REF
5106 && ADDR_SPACE_GENERIC_P
5107 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from
, 0)))))
5108 && refs_may_alias_p (to
, from
)
5109 && cfun
->returns_struct
5110 && !cfun
->returns_pcc_struct
)
5115 size
= expr_size (from
);
5116 from_rtx
= expand_normal (from
);
5118 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
5119 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
5120 XEXP (from_rtx
, 0), Pmode
,
5121 convert_to_mode (TYPE_MODE (sizetype
),
5122 size
, TYPE_UNSIGNED (sizetype
)),
5123 TYPE_MODE (sizetype
));
5125 preserve_temp_slots (to_rtx
);
5130 /* Compute FROM and store the value in the rtx we got. */
5133 result
= store_expr (from
, to_rtx
, 0, nontemporal
);
5134 preserve_temp_slots (result
);
5139 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5140 succeeded, false otherwise. */
5143 emit_storent_insn (rtx to
, rtx from
)
5145 struct expand_operand ops
[2];
5146 machine_mode mode
= GET_MODE (to
);
5147 enum insn_code code
= optab_handler (storent_optab
, mode
);
5149 if (code
== CODE_FOR_nothing
)
5152 create_fixed_operand (&ops
[0], to
);
5153 create_input_operand (&ops
[1], from
, mode
);
5154 return maybe_expand_insn (code
, 2, ops
);
5157 /* Generate code for computing expression EXP,
5158 and storing the value into TARGET.
5160 If the mode is BLKmode then we may return TARGET itself.
5161 It turns out that in BLKmode it doesn't cause a problem.
5162 because C has no operators that could combine two different
5163 assignments into the same BLKmode object with different values
5164 with no sequence point. Will other languages need this to
5167 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5168 stack, and block moves may need to be treated specially.
5170 If NONTEMPORAL is true, try using a nontemporal store instruction. */
5173 store_expr (tree exp
, rtx target
, int call_param_p
, bool nontemporal
)
5176 rtx alt_rtl
= NULL_RTX
;
5177 location_t loc
= curr_insn_location ();
5179 if (VOID_TYPE_P (TREE_TYPE (exp
)))
5181 /* C++ can generate ?: expressions with a throw expression in one
5182 branch and an rvalue in the other. Here, we resolve attempts to
5183 store the throw expression's nonexistent result. */
5184 gcc_assert (!call_param_p
);
5185 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5188 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
5190 /* Perform first part of compound expression, then assign from second
5192 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
5193 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5194 return store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
,
5197 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
5199 /* For conditional expression, get safe form of the target. Then
5200 test the condition, doing the appropriate assignment on either
5201 side. This avoids the creation of unnecessary temporaries.
5202 For non-BLKmode, it is more efficient not to do this. */
5204 rtx_code_label
*lab1
= gen_label_rtx (), *lab2
= gen_label_rtx ();
5206 do_pending_stack_adjust ();
5208 jumpifnot (TREE_OPERAND (exp
, 0), lab1
, -1);
5209 store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
,
5211 emit_jump_insn (gen_jump (lab2
));
5214 store_expr (TREE_OPERAND (exp
, 2), target
, call_param_p
,
5221 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
5222 /* If this is a scalar in a register that is stored in a wider mode
5223 than the declared mode, compute the result into its declared mode
5224 and then convert to the wider mode. Our value is the computed
5227 rtx inner_target
= 0;
5229 /* We can do the conversion inside EXP, which will often result
5230 in some optimizations. Do the conversion in two steps: first
5231 change the signedness, if needed, then the extend. But don't
5232 do this if the type of EXP is a subtype of something else
5233 since then the conversion might involve more than just
5234 converting modes. */
5235 if (INTEGRAL_TYPE_P (TREE_TYPE (exp
))
5236 && TREE_TYPE (TREE_TYPE (exp
)) == 0
5237 && GET_MODE_PRECISION (GET_MODE (target
))
5238 == TYPE_PRECISION (TREE_TYPE (exp
)))
5240 if (!SUBREG_CHECK_PROMOTED_SIGN (target
,
5241 TYPE_UNSIGNED (TREE_TYPE (exp
))))
5243 /* Some types, e.g. Fortran's logical*4, won't have a signed
5244 version, so use the mode instead. */
5246 = (signed_or_unsigned_type_for
5247 (SUBREG_PROMOTED_SIGN (target
), TREE_TYPE (exp
)));
5249 ntype
= lang_hooks
.types
.type_for_mode
5250 (TYPE_MODE (TREE_TYPE (exp
)),
5251 SUBREG_PROMOTED_SIGN (target
));
5253 exp
= fold_convert_loc (loc
, ntype
, exp
);
5256 exp
= fold_convert_loc (loc
, lang_hooks
.types
.type_for_mode
5257 (GET_MODE (SUBREG_REG (target
)),
5258 SUBREG_PROMOTED_SIGN (target
)),
5261 inner_target
= SUBREG_REG (target
);
5264 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
5265 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5267 /* If TEMP is a VOIDmode constant, use convert_modes to make
5268 sure that we properly convert it. */
5269 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
5271 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
5272 temp
, SUBREG_PROMOTED_SIGN (target
));
5273 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
5274 GET_MODE (target
), temp
,
5275 SUBREG_PROMOTED_SIGN (target
));
5278 convert_move (SUBREG_REG (target
), temp
,
5279 SUBREG_PROMOTED_SIGN (target
));
5283 else if ((TREE_CODE (exp
) == STRING_CST
5284 || (TREE_CODE (exp
) == MEM_REF
5285 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
5286 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
5288 && integer_zerop (TREE_OPERAND (exp
, 1))))
5289 && !nontemporal
&& !call_param_p
5292 /* Optimize initialization of an array with a STRING_CST. */
5293 HOST_WIDE_INT exp_len
, str_copy_len
;
5295 tree str
= TREE_CODE (exp
) == STRING_CST
5296 ? exp
: TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5298 exp_len
= int_expr_size (exp
);
5302 if (TREE_STRING_LENGTH (str
) <= 0)
5305 str_copy_len
= strlen (TREE_STRING_POINTER (str
));
5306 if (str_copy_len
< TREE_STRING_LENGTH (str
) - 1)
5309 str_copy_len
= TREE_STRING_LENGTH (str
);
5310 if ((STORE_MAX_PIECES
& (STORE_MAX_PIECES
- 1)) == 0
5311 && TREE_STRING_POINTER (str
)[TREE_STRING_LENGTH (str
) - 1] == '\0')
5313 str_copy_len
+= STORE_MAX_PIECES
- 1;
5314 str_copy_len
&= ~(STORE_MAX_PIECES
- 1);
5316 str_copy_len
= MIN (str_copy_len
, exp_len
);
5317 if (!can_store_by_pieces (str_copy_len
, builtin_strncpy_read_str
,
5318 CONST_CAST (char *, TREE_STRING_POINTER (str
)),
5319 MEM_ALIGN (target
), false))
5324 dest_mem
= store_by_pieces (dest_mem
,
5325 str_copy_len
, builtin_strncpy_read_str
,
5327 TREE_STRING_POINTER (str
)),
5328 MEM_ALIGN (target
), false,
5329 exp_len
> str_copy_len
? 1 : 0);
5330 if (exp_len
> str_copy_len
)
5331 clear_storage (adjust_address (dest_mem
, BLKmode
, 0),
5332 GEN_INT (exp_len
- str_copy_len
),
5341 /* If we want to use a nontemporal store, force the value to
5343 tmp_target
= nontemporal
? NULL_RTX
: target
;
5344 temp
= expand_expr_real (exp
, tmp_target
, GET_MODE (target
),
5346 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
5350 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5351 the same as that of TARGET, adjust the constant. This is needed, for
5352 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5353 only a word-sized value. */
5354 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
5355 && TREE_CODE (exp
) != ERROR_MARK
5356 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
5357 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
5358 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5360 /* If value was not generated in the target, store it there.
5361 Convert the value to TARGET's type first if necessary and emit the
5362 pending incrementations that have been queued when expanding EXP.
5363 Note that we cannot emit the whole queue blindly because this will
5364 effectively disable the POST_INC optimization later.
5366 If TEMP and TARGET compare equal according to rtx_equal_p, but
5367 one or both of them are volatile memory refs, we have to distinguish
5369 - expand_expr has used TARGET. In this case, we must not generate
5370 another copy. This can be detected by TARGET being equal according
5372 - expand_expr has not used TARGET - that means that the source just
5373 happens to have the same RTX form. Since temp will have been created
5374 by expand_expr, it will compare unequal according to == .
5375 We must generate a copy in this case, to reach the correct number
5376 of volatile memory references. */
5378 if ((! rtx_equal_p (temp
, target
)
5379 || (temp
!= target
&& (side_effects_p (temp
)
5380 || side_effects_p (target
))))
5381 && TREE_CODE (exp
) != ERROR_MARK
5382 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5383 but TARGET is not valid memory reference, TEMP will differ
5384 from TARGET although it is really the same location. */
5386 && rtx_equal_p (alt_rtl
, target
)
5387 && !side_effects_p (alt_rtl
)
5388 && !side_effects_p (target
))
5389 /* If there's nothing to copy, don't bother. Don't call
5390 expr_size unless necessary, because some front-ends (C++)
5391 expr_size-hook must not be given objects that are not
5392 supposed to be bit-copied or bit-initialized. */
5393 && expr_size (exp
) != const0_rtx
)
5395 if (GET_MODE (temp
) != GET_MODE (target
) && GET_MODE (temp
) != VOIDmode
)
5397 if (GET_MODE (target
) == BLKmode
)
5399 /* Handle calls that return BLKmode values in registers. */
5400 if (REG_P (temp
) && TREE_CODE (exp
) == CALL_EXPR
)
5401 copy_blkmode_from_reg (target
, temp
, TREE_TYPE (exp
));
5403 store_bit_field (target
,
5404 INTVAL (expr_size (exp
)) * BITS_PER_UNIT
,
5405 0, 0, 0, GET_MODE (temp
), temp
);
5408 convert_move (target
, temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5411 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
5413 /* Handle copying a string constant into an array. The string
5414 constant may be shorter than the array. So copy just the string's
5415 actual length, and clear the rest. First get the size of the data
5416 type of the string, which is actually the size of the target. */
5417 rtx size
= expr_size (exp
);
5419 if (CONST_INT_P (size
)
5420 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
5421 emit_block_move (target
, temp
, size
,
5423 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5426 machine_mode pointer_mode
5427 = targetm
.addr_space
.pointer_mode (MEM_ADDR_SPACE (target
));
5428 machine_mode address_mode
= get_address_mode (target
);
5430 /* Compute the size of the data to copy from the string. */
5432 = size_binop_loc (loc
, MIN_EXPR
,
5433 make_tree (sizetype
, size
),
5434 size_int (TREE_STRING_LENGTH (exp
)));
5436 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
5438 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
5439 rtx_code_label
*label
= 0;
5441 /* Copy that much. */
5442 copy_size_rtx
= convert_to_mode (pointer_mode
, copy_size_rtx
,
5443 TYPE_UNSIGNED (sizetype
));
5444 emit_block_move (target
, temp
, copy_size_rtx
,
5446 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5448 /* Figure out how much is left in TARGET that we have to clear.
5449 Do all calculations in pointer_mode. */
5450 if (CONST_INT_P (copy_size_rtx
))
5452 size
= plus_constant (address_mode
, size
,
5453 -INTVAL (copy_size_rtx
));
5454 target
= adjust_address (target
, BLKmode
,
5455 INTVAL (copy_size_rtx
));
5459 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
5460 copy_size_rtx
, NULL_RTX
, 0,
5463 if (GET_MODE (copy_size_rtx
) != address_mode
)
5464 copy_size_rtx
= convert_to_mode (address_mode
,
5466 TYPE_UNSIGNED (sizetype
));
5468 target
= offset_address (target
, copy_size_rtx
,
5469 highest_pow2_factor (copy_size
));
5470 label
= gen_label_rtx ();
5471 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
5472 GET_MODE (size
), 0, label
);
5475 if (size
!= const0_rtx
)
5476 clear_storage (target
, size
, BLOCK_OP_NORMAL
);
5482 /* Handle calls that return values in multiple non-contiguous locations.
5483 The Irix 6 ABI has examples of this. */
5484 else if (GET_CODE (target
) == PARALLEL
)
5486 if (GET_CODE (temp
) == PARALLEL
)
5487 emit_group_move (target
, temp
);
5489 emit_group_load (target
, temp
, TREE_TYPE (exp
),
5490 int_size_in_bytes (TREE_TYPE (exp
)));
5492 else if (GET_CODE (temp
) == PARALLEL
)
5493 emit_group_store (target
, temp
, TREE_TYPE (exp
),
5494 int_size_in_bytes (TREE_TYPE (exp
)));
5495 else if (GET_MODE (temp
) == BLKmode
)
5496 emit_block_move (target
, temp
, expr_size (exp
),
5498 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5499 /* If we emit a nontemporal store, there is nothing else to do. */
5500 else if (nontemporal
&& emit_storent_insn (target
, temp
))
5504 temp
= force_operand (temp
, target
);
5506 emit_move_insn (target
, temp
);
5513 /* Return true if field F of structure TYPE is a flexible array. */
5516 flexible_array_member_p (const_tree f
, const_tree type
)
5521 return (DECL_CHAIN (f
) == NULL
5522 && TREE_CODE (tf
) == ARRAY_TYPE
5524 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf
))
5525 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf
)))
5526 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf
))
5527 && int_size_in_bytes (type
) >= 0);
5530 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5531 must have in order for it to completely initialize a value of type TYPE.
5532 Return -1 if the number isn't known.
5534 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5536 static HOST_WIDE_INT
5537 count_type_elements (const_tree type
, bool for_ctor_p
)
5539 switch (TREE_CODE (type
))
5545 nelts
= array_type_nelts (type
);
5546 if (nelts
&& tree_fits_uhwi_p (nelts
))
5548 unsigned HOST_WIDE_INT n
;
5550 n
= tree_to_uhwi (nelts
) + 1;
5551 if (n
== 0 || for_ctor_p
)
5554 return n
* count_type_elements (TREE_TYPE (type
), false);
5556 return for_ctor_p
? -1 : 1;
5561 unsigned HOST_WIDE_INT n
;
5565 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5566 if (TREE_CODE (f
) == FIELD_DECL
)
5569 n
+= count_type_elements (TREE_TYPE (f
), false);
5570 else if (!flexible_array_member_p (f
, type
))
5571 /* Don't count flexible arrays, which are not supposed
5572 to be initialized. */
5580 case QUAL_UNION_TYPE
:
5585 gcc_assert (!for_ctor_p
);
5586 /* Estimate the number of scalars in each field and pick the
5587 maximum. Other estimates would do instead; the idea is simply
5588 to make sure that the estimate is not sensitive to the ordering
5591 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5592 if (TREE_CODE (f
) == FIELD_DECL
)
5594 m
= count_type_elements (TREE_TYPE (f
), false);
5595 /* If the field doesn't span the whole union, add an extra
5596 scalar for the rest. */
5597 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f
)),
5598 TYPE_SIZE (type
)) != 1)
5610 return TYPE_VECTOR_SUBPARTS (type
);
5614 case FIXED_POINT_TYPE
:
5619 case REFERENCE_TYPE
:
5635 /* Helper for categorize_ctor_elements. Identical interface. */
5638 categorize_ctor_elements_1 (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
5639 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
5641 unsigned HOST_WIDE_INT idx
;
5642 HOST_WIDE_INT nz_elts
, init_elts
, num_fields
;
5643 tree value
, purpose
, elt_type
;
5645 /* Whether CTOR is a valid constant initializer, in accordance with what
5646 initializer_constant_valid_p does. If inferred from the constructor
5647 elements, true until proven otherwise. */
5648 bool const_from_elts_p
= constructor_static_from_elts_p (ctor
);
5649 bool const_p
= const_from_elts_p
? true : TREE_STATIC (ctor
);
5654 elt_type
= NULL_TREE
;
5656 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), idx
, purpose
, value
)
5658 HOST_WIDE_INT mult
= 1;
5660 if (purpose
&& TREE_CODE (purpose
) == RANGE_EXPR
)
5662 tree lo_index
= TREE_OPERAND (purpose
, 0);
5663 tree hi_index
= TREE_OPERAND (purpose
, 1);
5665 if (tree_fits_uhwi_p (lo_index
) && tree_fits_uhwi_p (hi_index
))
5666 mult
= (tree_to_uhwi (hi_index
)
5667 - tree_to_uhwi (lo_index
) + 1);
5670 elt_type
= TREE_TYPE (value
);
5672 switch (TREE_CODE (value
))
5676 HOST_WIDE_INT nz
= 0, ic
= 0;
5678 bool const_elt_p
= categorize_ctor_elements_1 (value
, &nz
, &ic
,
5681 nz_elts
+= mult
* nz
;
5682 init_elts
+= mult
* ic
;
5684 if (const_from_elts_p
&& const_p
)
5685 const_p
= const_elt_p
;
5692 if (!initializer_zerop (value
))
5698 nz_elts
+= mult
* TREE_STRING_LENGTH (value
);
5699 init_elts
+= mult
* TREE_STRING_LENGTH (value
);
5703 if (!initializer_zerop (TREE_REALPART (value
)))
5705 if (!initializer_zerop (TREE_IMAGPART (value
)))
5713 for (i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
5715 tree v
= VECTOR_CST_ELT (value
, i
);
5716 if (!initializer_zerop (v
))
5725 HOST_WIDE_INT tc
= count_type_elements (elt_type
, false);
5726 nz_elts
+= mult
* tc
;
5727 init_elts
+= mult
* tc
;
5729 if (const_from_elts_p
&& const_p
)
5730 const_p
= initializer_constant_valid_p (value
, elt_type
)
5737 if (*p_complete
&& !complete_ctor_at_level_p (TREE_TYPE (ctor
),
5738 num_fields
, elt_type
))
5739 *p_complete
= false;
5741 *p_nz_elts
+= nz_elts
;
5742 *p_init_elts
+= init_elts
;
5747 /* Examine CTOR to discover:
5748 * how many scalar fields are set to nonzero values,
5749 and place it in *P_NZ_ELTS;
5750 * how many scalar fields in total are in CTOR,
5751 and place it in *P_ELT_COUNT.
5752 * whether the constructor is complete -- in the sense that every
5753 meaningful byte is explicitly given a value --
5754 and place it in *P_COMPLETE.
5756 Return whether or not CTOR is a valid static constant initializer, the same
5757 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5760 categorize_ctor_elements (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
5761 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
5767 return categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_init_elts
, p_complete
);
5770 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5771 of which had type LAST_TYPE. Each element was itself a complete
5772 initializer, in the sense that every meaningful byte was explicitly
5773 given a value. Return true if the same is true for the constructor
5777 complete_ctor_at_level_p (const_tree type
, HOST_WIDE_INT num_elts
,
5778 const_tree last_type
)
5780 if (TREE_CODE (type
) == UNION_TYPE
5781 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
5786 gcc_assert (num_elts
== 1 && last_type
);
5788 /* ??? We could look at each element of the union, and find the
5789 largest element. Which would avoid comparing the size of the
5790 initialized element against any tail padding in the union.
5791 Doesn't seem worth the effort... */
5792 return simple_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (last_type
)) == 1;
5795 return count_type_elements (type
, true) == num_elts
;
5798 /* Return 1 if EXP contains mostly (3/4) zeros. */
5801 mostly_zeros_p (const_tree exp
)
5803 if (TREE_CODE (exp
) == CONSTRUCTOR
)
5805 HOST_WIDE_INT nz_elts
, init_elts
;
5808 categorize_ctor_elements (exp
, &nz_elts
, &init_elts
, &complete_p
);
5809 return !complete_p
|| nz_elts
< init_elts
/ 4;
5812 return initializer_zerop (exp
);
5815 /* Return 1 if EXP contains all zeros. */
5818 all_zeros_p (const_tree exp
)
5820 if (TREE_CODE (exp
) == CONSTRUCTOR
)
5822 HOST_WIDE_INT nz_elts
, init_elts
;
5825 categorize_ctor_elements (exp
, &nz_elts
, &init_elts
, &complete_p
);
5826 return nz_elts
== 0;
5829 return initializer_zerop (exp
);
5832 /* Helper function for store_constructor.
5833 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5834 CLEARED is as for store_constructor.
5835 ALIAS_SET is the alias set to use for any stores.
5837 This provides a recursive shortcut back to store_constructor when it isn't
5838 necessary to go through store_field. This is so that we can pass through
5839 the cleared field to let store_constructor know that we may not have to
5840 clear a substructure if the outer structure has already been cleared. */
5843 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
5844 HOST_WIDE_INT bitpos
, machine_mode mode
,
5845 tree exp
, int cleared
, alias_set_type alias_set
)
5847 if (TREE_CODE (exp
) == CONSTRUCTOR
5848 /* We can only call store_constructor recursively if the size and
5849 bit position are on a byte boundary. */
5850 && bitpos
% BITS_PER_UNIT
== 0
5851 && (bitsize
> 0 && bitsize
% BITS_PER_UNIT
== 0)
5852 /* If we have a nonzero bitpos for a register target, then we just
5853 let store_field do the bitfield handling. This is unlikely to
5854 generate unnecessary clear instructions anyways. */
5855 && (bitpos
== 0 || MEM_P (target
)))
5859 = adjust_address (target
,
5860 GET_MODE (target
) == BLKmode
5862 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
5863 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5866 /* Update the alias set, if required. */
5867 if (MEM_P (target
) && ! MEM_KEEP_ALIAS_SET_P (target
)
5868 && MEM_ALIAS_SET (target
) != 0)
5870 target
= copy_rtx (target
);
5871 set_mem_alias_set (target
, alias_set
);
5874 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
5877 store_field (target
, bitsize
, bitpos
, 0, 0, mode
, exp
, alias_set
, false);
5881 /* Returns the number of FIELD_DECLs in TYPE. */
5884 fields_length (const_tree type
)
5886 tree t
= TYPE_FIELDS (type
);
5889 for (; t
; t
= DECL_CHAIN (t
))
5890 if (TREE_CODE (t
) == FIELD_DECL
)
5897 /* Store the value of constructor EXP into the rtx TARGET.
5898 TARGET is either a REG or a MEM; we know it cannot conflict, since
5899 safe_from_p has been called.
5900 CLEARED is true if TARGET is known to have been zero'd.
5901 SIZE is the number of bytes of TARGET we are allowed to modify: this
5902 may not be the same as the size of EXP if we are assigning to a field
5903 which has been packed to exclude padding bits. */
5906 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
)
5908 tree type
= TREE_TYPE (exp
);
5909 #ifdef WORD_REGISTER_OPERATIONS
5910 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
5913 switch (TREE_CODE (type
))
5917 case QUAL_UNION_TYPE
:
5919 unsigned HOST_WIDE_INT idx
;
5922 /* If size is zero or the target is already cleared, do nothing. */
5923 if (size
== 0 || cleared
)
5925 /* We either clear the aggregate or indicate the value is dead. */
5926 else if ((TREE_CODE (type
) == UNION_TYPE
5927 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
5928 && ! CONSTRUCTOR_ELTS (exp
))
5929 /* If the constructor is empty, clear the union. */
5931 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
5935 /* If we are building a static constructor into a register,
5936 set the initial value as zero so we can fold the value into
5937 a constant. But if more than one register is involved,
5938 this probably loses. */
5939 else if (REG_P (target
) && TREE_STATIC (exp
)
5940 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
5942 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5946 /* If the constructor has fewer fields than the structure or
5947 if we are initializing the structure to mostly zeros, clear
5948 the whole structure first. Don't do this if TARGET is a
5949 register whose mode size isn't equal to SIZE since
5950 clear_storage can't handle this case. */
5952 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp
))
5953 != fields_length (type
))
5954 || mostly_zeros_p (exp
))
5956 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
5959 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
5963 if (REG_P (target
) && !cleared
)
5964 emit_clobber (target
);
5966 /* Store each element of the constructor into the
5967 corresponding field of TARGET. */
5968 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, field
, value
)
5971 HOST_WIDE_INT bitsize
;
5972 HOST_WIDE_INT bitpos
= 0;
5974 rtx to_rtx
= target
;
5976 /* Just ignore missing fields. We cleared the whole
5977 structure, above, if any fields are missing. */
5981 if (cleared
&& initializer_zerop (value
))
5984 if (tree_fits_uhwi_p (DECL_SIZE (field
)))
5985 bitsize
= tree_to_uhwi (DECL_SIZE (field
));
5989 mode
= DECL_MODE (field
);
5990 if (DECL_BIT_FIELD (field
))
5993 offset
= DECL_FIELD_OFFSET (field
);
5994 if (tree_fits_shwi_p (offset
)
5995 && tree_fits_shwi_p (bit_position (field
)))
5997 bitpos
= int_bit_position (field
);
6001 bitpos
= tree_to_shwi (DECL_FIELD_BIT_OFFSET (field
));
6005 machine_mode address_mode
;
6009 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset
,
6010 make_tree (TREE_TYPE (exp
),
6013 offset_rtx
= expand_normal (offset
);
6014 gcc_assert (MEM_P (to_rtx
));
6016 address_mode
= get_address_mode (to_rtx
);
6017 if (GET_MODE (offset_rtx
) != address_mode
)
6018 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
6020 to_rtx
= offset_address (to_rtx
, offset_rtx
,
6021 highest_pow2_factor (offset
));
6024 #ifdef WORD_REGISTER_OPERATIONS
6025 /* If this initializes a field that is smaller than a
6026 word, at the start of a word, try to widen it to a full
6027 word. This special case allows us to output C++ member
6028 function initializations in a form that the optimizers
6031 && bitsize
< BITS_PER_WORD
6032 && bitpos
% BITS_PER_WORD
== 0
6033 && GET_MODE_CLASS (mode
) == MODE_INT
6034 && TREE_CODE (value
) == INTEGER_CST
6036 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
6038 tree type
= TREE_TYPE (value
);
6040 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
6042 type
= lang_hooks
.types
.type_for_mode
6043 (word_mode
, TYPE_UNSIGNED (type
));
6044 value
= fold_convert (type
, value
);
6047 if (BYTES_BIG_ENDIAN
)
6049 = fold_build2 (LSHIFT_EXPR
, type
, value
,
6050 build_int_cst (type
,
6051 BITS_PER_WORD
- bitsize
));
6052 bitsize
= BITS_PER_WORD
;
6057 if (MEM_P (to_rtx
) && !MEM_KEEP_ALIAS_SET_P (to_rtx
)
6058 && DECL_NONADDRESSABLE_P (field
))
6060 to_rtx
= copy_rtx (to_rtx
);
6061 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
6064 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
6066 get_alias_set (TREE_TYPE (field
)));
6073 unsigned HOST_WIDE_INT i
;
6076 tree elttype
= TREE_TYPE (type
);
6078 HOST_WIDE_INT minelt
= 0;
6079 HOST_WIDE_INT maxelt
= 0;
6081 domain
= TYPE_DOMAIN (type
);
6082 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
6083 && TYPE_MAX_VALUE (domain
)
6084 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain
))
6085 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain
)));
6087 /* If we have constant bounds for the range of the type, get them. */
6090 minelt
= tree_to_shwi (TYPE_MIN_VALUE (domain
));
6091 maxelt
= tree_to_shwi (TYPE_MAX_VALUE (domain
));
6094 /* If the constructor has fewer elements than the array, clear
6095 the whole array first. Similarly if this is static
6096 constructor of a non-BLKmode object. */
6099 else if (REG_P (target
) && TREE_STATIC (exp
))
6103 unsigned HOST_WIDE_INT idx
;
6105 HOST_WIDE_INT count
= 0, zero_count
= 0;
6106 need_to_clear
= ! const_bounds_p
;
6108 /* This loop is a more accurate version of the loop in
6109 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6110 is also needed to check for missing elements. */
6111 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, index
, value
)
6113 HOST_WIDE_INT this_node_count
;
6118 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
6120 tree lo_index
= TREE_OPERAND (index
, 0);
6121 tree hi_index
= TREE_OPERAND (index
, 1);
6123 if (! tree_fits_uhwi_p (lo_index
)
6124 || ! tree_fits_uhwi_p (hi_index
))
6130 this_node_count
= (tree_to_uhwi (hi_index
)
6131 - tree_to_uhwi (lo_index
) + 1);
6134 this_node_count
= 1;
6136 count
+= this_node_count
;
6137 if (mostly_zeros_p (value
))
6138 zero_count
+= this_node_count
;
6141 /* Clear the entire array first if there are any missing
6142 elements, or if the incidence of zero elements is >=
6145 && (count
< maxelt
- minelt
+ 1
6146 || 4 * zero_count
>= 3 * count
))
6150 if (need_to_clear
&& size
> 0)
6153 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6155 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
6159 if (!cleared
&& REG_P (target
))
6160 /* Inform later passes that the old value is dead. */
6161 emit_clobber (target
);
6163 /* Store each element of the constructor into the
6164 corresponding element of TARGET, determined by counting the
6166 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), i
, index
, value
)
6169 HOST_WIDE_INT bitsize
;
6170 HOST_WIDE_INT bitpos
;
6171 rtx xtarget
= target
;
6173 if (cleared
&& initializer_zerop (value
))
6176 mode
= TYPE_MODE (elttype
);
6177 if (mode
== BLKmode
)
6178 bitsize
= (tree_fits_uhwi_p (TYPE_SIZE (elttype
))
6179 ? tree_to_uhwi (TYPE_SIZE (elttype
))
6182 bitsize
= GET_MODE_BITSIZE (mode
);
6184 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
6186 tree lo_index
= TREE_OPERAND (index
, 0);
6187 tree hi_index
= TREE_OPERAND (index
, 1);
6188 rtx index_r
, pos_rtx
;
6189 HOST_WIDE_INT lo
, hi
, count
;
6192 /* If the range is constant and "small", unroll the loop. */
6194 && tree_fits_shwi_p (lo_index
)
6195 && tree_fits_shwi_p (hi_index
)
6196 && (lo
= tree_to_shwi (lo_index
),
6197 hi
= tree_to_shwi (hi_index
),
6198 count
= hi
- lo
+ 1,
6201 || (tree_fits_uhwi_p (TYPE_SIZE (elttype
))
6202 && (tree_to_uhwi (TYPE_SIZE (elttype
)) * count
6205 lo
-= minelt
; hi
-= minelt
;
6206 for (; lo
<= hi
; lo
++)
6208 bitpos
= lo
* tree_to_shwi (TYPE_SIZE (elttype
));
6211 && !MEM_KEEP_ALIAS_SET_P (target
)
6212 && TREE_CODE (type
) == ARRAY_TYPE
6213 && TYPE_NONALIASED_COMPONENT (type
))
6215 target
= copy_rtx (target
);
6216 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6219 store_constructor_field
6220 (target
, bitsize
, bitpos
, mode
, value
, cleared
,
6221 get_alias_set (elttype
));
6226 rtx_code_label
*loop_start
= gen_label_rtx ();
6227 rtx_code_label
*loop_end
= gen_label_rtx ();
6230 expand_normal (hi_index
);
6232 index
= build_decl (EXPR_LOCATION (exp
),
6233 VAR_DECL
, NULL_TREE
, domain
);
6234 index_r
= gen_reg_rtx (promote_decl_mode (index
, NULL
));
6235 SET_DECL_RTL (index
, index_r
);
6236 store_expr (lo_index
, index_r
, 0, false);
6238 /* Build the head of the loop. */
6239 do_pending_stack_adjust ();
6240 emit_label (loop_start
);
6242 /* Assign value to element index. */
6244 fold_convert (ssizetype
,
6245 fold_build2 (MINUS_EXPR
,
6248 TYPE_MIN_VALUE (domain
)));
6251 size_binop (MULT_EXPR
, position
,
6252 fold_convert (ssizetype
,
6253 TYPE_SIZE_UNIT (elttype
)));
6255 pos_rtx
= expand_normal (position
);
6256 xtarget
= offset_address (target
, pos_rtx
,
6257 highest_pow2_factor (position
));
6258 xtarget
= adjust_address (xtarget
, mode
, 0);
6259 if (TREE_CODE (value
) == CONSTRUCTOR
)
6260 store_constructor (value
, xtarget
, cleared
,
6261 bitsize
/ BITS_PER_UNIT
);
6263 store_expr (value
, xtarget
, 0, false);
6265 /* Generate a conditional jump to exit the loop. */
6266 exit_cond
= build2 (LT_EXPR
, integer_type_node
,
6268 jumpif (exit_cond
, loop_end
, -1);
6270 /* Update the loop counter, and jump to the head of
6272 expand_assignment (index
,
6273 build2 (PLUS_EXPR
, TREE_TYPE (index
),
6274 index
, integer_one_node
),
6277 emit_jump (loop_start
);
6279 /* Build the end of the loop. */
6280 emit_label (loop_end
);
6283 else if ((index
!= 0 && ! tree_fits_shwi_p (index
))
6284 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype
)))
6289 index
= ssize_int (1);
6292 index
= fold_convert (ssizetype
,
6293 fold_build2 (MINUS_EXPR
,
6296 TYPE_MIN_VALUE (domain
)));
6299 size_binop (MULT_EXPR
, index
,
6300 fold_convert (ssizetype
,
6301 TYPE_SIZE_UNIT (elttype
)));
6302 xtarget
= offset_address (target
,
6303 expand_normal (position
),
6304 highest_pow2_factor (position
));
6305 xtarget
= adjust_address (xtarget
, mode
, 0);
6306 store_expr (value
, xtarget
, 0, false);
6311 bitpos
= ((tree_to_shwi (index
) - minelt
)
6312 * tree_to_uhwi (TYPE_SIZE (elttype
)));
6314 bitpos
= (i
* tree_to_uhwi (TYPE_SIZE (elttype
)));
6316 if (MEM_P (target
) && !MEM_KEEP_ALIAS_SET_P (target
)
6317 && TREE_CODE (type
) == ARRAY_TYPE
6318 && TYPE_NONALIASED_COMPONENT (type
))
6320 target
= copy_rtx (target
);
6321 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6323 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
6324 cleared
, get_alias_set (elttype
));
6332 unsigned HOST_WIDE_INT idx
;
6333 constructor_elt
*ce
;
6336 int icode
= CODE_FOR_nothing
;
6337 tree elttype
= TREE_TYPE (type
);
6338 int elt_size
= tree_to_uhwi (TYPE_SIZE (elttype
));
6339 machine_mode eltmode
= TYPE_MODE (elttype
);
6340 HOST_WIDE_INT bitsize
;
6341 HOST_WIDE_INT bitpos
;
6342 rtvec vector
= NULL
;
6344 alias_set_type alias
;
6346 gcc_assert (eltmode
!= BLKmode
);
6348 n_elts
= TYPE_VECTOR_SUBPARTS (type
);
6349 if (REG_P (target
) && VECTOR_MODE_P (GET_MODE (target
)))
6351 machine_mode mode
= GET_MODE (target
);
6353 icode
= (int) optab_handler (vec_init_optab
, mode
);
6354 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6355 if (icode
!= CODE_FOR_nothing
)
6359 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
6360 if (TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
)
6362 icode
= CODE_FOR_nothing
;
6366 if (icode
!= CODE_FOR_nothing
)
6370 vector
= rtvec_alloc (n_elts
);
6371 for (i
= 0; i
< n_elts
; i
++)
6372 RTVEC_ELT (vector
, i
) = CONST0_RTX (GET_MODE_INNER (mode
));
6376 /* If the constructor has fewer elements than the vector,
6377 clear the whole array first. Similarly if this is static
6378 constructor of a non-BLKmode object. */
6381 else if (REG_P (target
) && TREE_STATIC (exp
))
6385 unsigned HOST_WIDE_INT count
= 0, zero_count
= 0;
6388 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
6390 int n_elts_here
= tree_to_uhwi
6391 (int_const_binop (TRUNC_DIV_EXPR
,
6392 TYPE_SIZE (TREE_TYPE (value
)),
6393 TYPE_SIZE (elttype
)));
6395 count
+= n_elts_here
;
6396 if (mostly_zeros_p (value
))
6397 zero_count
+= n_elts_here
;
6400 /* Clear the entire vector first if there are any missing elements,
6401 or if the incidence of zero elements is >= 75%. */
6402 need_to_clear
= (count
< n_elts
|| 4 * zero_count
>= 3 * count
);
6405 if (need_to_clear
&& size
> 0 && !vector
)
6408 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6410 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
6414 /* Inform later passes that the old value is dead. */
6415 if (!cleared
&& !vector
&& REG_P (target
))
6416 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6419 alias
= MEM_ALIAS_SET (target
);
6421 alias
= get_alias_set (elttype
);
6423 /* Store each element of the constructor into the corresponding
6424 element of TARGET, determined by counting the elements. */
6425 for (idx
= 0, i
= 0;
6426 vec_safe_iterate (CONSTRUCTOR_ELTS (exp
), idx
, &ce
);
6427 idx
++, i
+= bitsize
/ elt_size
)
6429 HOST_WIDE_INT eltpos
;
6430 tree value
= ce
->value
;
6432 bitsize
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value
)));
6433 if (cleared
&& initializer_zerop (value
))
6437 eltpos
= tree_to_uhwi (ce
->index
);
6443 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6445 gcc_assert (TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
);
6446 RTVEC_ELT (vector
, eltpos
)
6447 = expand_normal (value
);
6451 machine_mode value_mode
=
6452 TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
6453 ? TYPE_MODE (TREE_TYPE (value
))
6455 bitpos
= eltpos
* elt_size
;
6456 store_constructor_field (target
, bitsize
, bitpos
, value_mode
,
6457 value
, cleared
, alias
);
6462 emit_insn (GEN_FCN (icode
)
6464 gen_rtx_PARALLEL (GET_MODE (target
), vector
)));
6473 /* Store the value of EXP (an expression tree)
6474 into a subfield of TARGET which has mode MODE and occupies
6475 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6476 If MODE is VOIDmode, it means that we are storing into a bit-field.
6478 BITREGION_START is bitpos of the first bitfield in this region.
6479 BITREGION_END is the bitpos of the ending bitfield in this region.
6480 These two fields are 0, if the C++ memory model does not apply,
6481 or we are not interested in keeping track of bitfield regions.
6483 Always return const0_rtx unless we have something particular to
6486 ALIAS_SET is the alias set for the destination. This value will
6487 (in general) be different from that for TARGET, since TARGET is a
6488 reference to the containing structure.
6490 If NONTEMPORAL is true, try generating a nontemporal store. */
6493 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
6494 unsigned HOST_WIDE_INT bitregion_start
,
6495 unsigned HOST_WIDE_INT bitregion_end
,
6496 machine_mode mode
, tree exp
,
6497 alias_set_type alias_set
, bool nontemporal
)
6499 if (TREE_CODE (exp
) == ERROR_MARK
)
6502 /* If we have nothing to store, do nothing unless the expression has
6505 return expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6507 if (GET_CODE (target
) == CONCAT
)
6509 /* We're storing into a struct containing a single __complex. */
6511 gcc_assert (!bitpos
);
6512 return store_expr (exp
, target
, 0, nontemporal
);
6515 /* If the structure is in a register or if the component
6516 is a bit field, we cannot use addressing to access it.
6517 Use bit-field techniques or SUBREG to store in it. */
6519 if (mode
== VOIDmode
6520 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
6521 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
6522 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
6524 || GET_CODE (target
) == SUBREG
6525 /* If the field isn't aligned enough to store as an ordinary memref,
6526 store it as a bit field. */
6528 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
6529 || bitpos
% GET_MODE_ALIGNMENT (mode
))
6530 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
6531 || (bitpos
% BITS_PER_UNIT
!= 0)))
6532 || (bitsize
>= 0 && mode
!= BLKmode
6533 && GET_MODE_BITSIZE (mode
) > bitsize
)
6534 /* If the RHS and field are a constant size and the size of the
6535 RHS isn't the same size as the bitfield, we must use bitfield
6538 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
6539 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0)
6540 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6541 decl we must use bitfield operations. */
6543 && TREE_CODE (exp
) == MEM_REF
6544 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
6545 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
6546 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp
, 0),0 ))
6547 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) != BLKmode
))
6552 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6553 implies a mask operation. If the precision is the same size as
6554 the field we're storing into, that mask is redundant. This is
6555 particularly common with bit field assignments generated by the
6557 nop_def
= get_def_for_expr (exp
, NOP_EXPR
);
6560 tree type
= TREE_TYPE (exp
);
6561 if (INTEGRAL_TYPE_P (type
)
6562 && TYPE_PRECISION (type
) < GET_MODE_BITSIZE (TYPE_MODE (type
))
6563 && bitsize
== TYPE_PRECISION (type
))
6565 tree op
= gimple_assign_rhs1 (nop_def
);
6566 type
= TREE_TYPE (op
);
6567 if (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) >= bitsize
)
6572 temp
= expand_normal (exp
);
6574 /* If BITSIZE is narrower than the size of the type of EXP
6575 we will be narrowing TEMP. Normally, what's wanted are the
6576 low-order bits. However, if EXP's type is a record and this is
6577 big-endian machine, we want the upper BITSIZE bits. */
6578 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
6579 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
6580 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
6581 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
6582 GET_MODE_BITSIZE (GET_MODE (temp
)) - bitsize
,
6585 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6586 if (mode
!= VOIDmode
&& mode
!= BLKmode
6587 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
6588 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
6590 /* If the modes of TEMP and TARGET are both BLKmode, both
6591 must be in memory and BITPOS must be aligned on a byte
6592 boundary. If so, we simply do a block copy. Likewise
6593 for a BLKmode-like TARGET. */
6594 if (GET_MODE (temp
) == BLKmode
6595 && (GET_MODE (target
) == BLKmode
6597 && GET_MODE_CLASS (GET_MODE (target
)) == MODE_INT
6598 && (bitpos
% BITS_PER_UNIT
) == 0
6599 && (bitsize
% BITS_PER_UNIT
) == 0)))
6601 gcc_assert (MEM_P (target
) && MEM_P (temp
)
6602 && (bitpos
% BITS_PER_UNIT
) == 0);
6604 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
6605 emit_block_move (target
, temp
,
6606 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
6613 /* Handle calls that return values in multiple non-contiguous locations.
6614 The Irix 6 ABI has examples of this. */
6615 if (GET_CODE (temp
) == PARALLEL
)
6617 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
6619 if (mode
== BLKmode
|| mode
== VOIDmode
)
6620 mode
= smallest_mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
);
6621 temp_target
= gen_reg_rtx (mode
);
6622 emit_group_store (temp_target
, temp
, TREE_TYPE (exp
), size
);
6625 else if (mode
== BLKmode
)
6627 /* Handle calls that return BLKmode values in registers. */
6628 if (REG_P (temp
) && TREE_CODE (exp
) == CALL_EXPR
)
6630 rtx temp_target
= gen_reg_rtx (GET_MODE (temp
));
6631 copy_blkmode_from_reg (temp_target
, temp
, TREE_TYPE (exp
));
6636 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
6638 mode
= smallest_mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
);
6639 temp_target
= gen_reg_rtx (mode
);
6641 = extract_bit_field (temp
, size
* BITS_PER_UNIT
, 0, 1,
6642 temp_target
, mode
, mode
);
6647 /* Store the value in the bitfield. */
6648 store_bit_field (target
, bitsize
, bitpos
,
6649 bitregion_start
, bitregion_end
,
6656 /* Now build a reference to just the desired component. */
6657 rtx to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
6659 if (to_rtx
== target
)
6660 to_rtx
= copy_rtx (to_rtx
);
6662 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
6663 set_mem_alias_set (to_rtx
, alias_set
);
6665 return store_expr (exp
, to_rtx
, 0, nontemporal
);
6669 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6670 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6671 codes and find the ultimate containing object, which we return.
6673 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6674 bit position, and *PUNSIGNEDP to the signedness of the field.
6675 If the position of the field is variable, we store a tree
6676 giving the variable offset (in units) in *POFFSET.
6677 This offset is in addition to the bit position.
6678 If the position is not variable, we store 0 in *POFFSET.
6680 If any of the extraction expressions is volatile,
6681 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6683 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6684 Otherwise, it is a mode that can be used to access the field.
6686 If the field describes a variable-sized object, *PMODE is set to
6687 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6688 this case, but the address of the object can be found.
6690 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6691 look through nodes that serve as markers of a greater alignment than
6692 the one that can be deduced from the expression. These nodes make it
6693 possible for front-ends to prevent temporaries from being created by
6694 the middle-end on alignment considerations. For that purpose, the
6695 normal operating mode at high-level is to always pass FALSE so that
6696 the ultimate containing object is really returned; moreover, the
6697 associated predicate handled_component_p will always return TRUE
6698 on these nodes, thus indicating that they are essentially handled
6699 by get_inner_reference. TRUE should only be passed when the caller
6700 is scanning the expression in order to build another representation
6701 and specifically knows how to handle these nodes; as such, this is
6702 the normal operating mode in the RTL expanders. */
6705 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
6706 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
6707 machine_mode
*pmode
, int *punsignedp
,
6708 int *pvolatilep
, bool keep_aligning
)
6711 machine_mode mode
= VOIDmode
;
6712 bool blkmode_bitfield
= false;
6713 tree offset
= size_zero_node
;
6714 offset_int bit_offset
= 0;
6716 /* First get the mode, signedness, and size. We do this from just the
6717 outermost expression. */
6719 if (TREE_CODE (exp
) == COMPONENT_REF
)
6721 tree field
= TREE_OPERAND (exp
, 1);
6722 size_tree
= DECL_SIZE (field
);
6723 if (flag_strict_volatile_bitfields
> 0
6724 && TREE_THIS_VOLATILE (exp
)
6725 && DECL_BIT_FIELD_TYPE (field
)
6726 && DECL_MODE (field
) != BLKmode
)
6727 /* Volatile bitfields should be accessed in the mode of the
6728 field's type, not the mode computed based on the bit
6730 mode
= TYPE_MODE (DECL_BIT_FIELD_TYPE (field
));
6731 else if (!DECL_BIT_FIELD (field
))
6732 mode
= DECL_MODE (field
);
6733 else if (DECL_MODE (field
) == BLKmode
)
6734 blkmode_bitfield
= true;
6736 *punsignedp
= DECL_UNSIGNED (field
);
6738 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
6740 size_tree
= TREE_OPERAND (exp
, 1);
6741 *punsignedp
= (! INTEGRAL_TYPE_P (TREE_TYPE (exp
))
6742 || TYPE_UNSIGNED (TREE_TYPE (exp
)));
6744 /* For vector types, with the correct size of access, use the mode of
6746 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == VECTOR_TYPE
6747 && TREE_TYPE (exp
) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
6748 && tree_int_cst_equal (size_tree
, TYPE_SIZE (TREE_TYPE (exp
))))
6749 mode
= TYPE_MODE (TREE_TYPE (exp
));
6753 mode
= TYPE_MODE (TREE_TYPE (exp
));
6754 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
6756 if (mode
== BLKmode
)
6757 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
6759 *pbitsize
= GET_MODE_BITSIZE (mode
);
6764 if (! tree_fits_uhwi_p (size_tree
))
6765 mode
= BLKmode
, *pbitsize
= -1;
6767 *pbitsize
= tree_to_uhwi (size_tree
);
6770 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6771 and find the ultimate containing object. */
6774 switch (TREE_CODE (exp
))
6777 bit_offset
+= wi::to_offset (TREE_OPERAND (exp
, 2));
6782 tree field
= TREE_OPERAND (exp
, 1);
6783 tree this_offset
= component_ref_field_offset (exp
);
6785 /* If this field hasn't been filled in yet, don't go past it.
6786 This should only happen when folding expressions made during
6787 type construction. */
6788 if (this_offset
== 0)
6791 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
6792 bit_offset
+= wi::to_offset (DECL_FIELD_BIT_OFFSET (field
));
6794 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6799 case ARRAY_RANGE_REF
:
6801 tree index
= TREE_OPERAND (exp
, 1);
6802 tree low_bound
= array_ref_low_bound (exp
);
6803 tree unit_size
= array_ref_element_size (exp
);
6805 /* We assume all arrays have sizes that are a multiple of a byte.
6806 First subtract the lower bound, if any, in the type of the
6807 index, then convert to sizetype and multiply by the size of
6808 the array element. */
6809 if (! integer_zerop (low_bound
))
6810 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
6813 offset
= size_binop (PLUS_EXPR
, offset
,
6814 size_binop (MULT_EXPR
,
6815 fold_convert (sizetype
, index
),
6824 bit_offset
+= *pbitsize
;
6827 case VIEW_CONVERT_EXPR
:
6828 if (keep_aligning
&& STRICT_ALIGNMENT
6829 && (TYPE_ALIGN (TREE_TYPE (exp
))
6830 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
6831 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
6832 < BIGGEST_ALIGNMENT
)
6833 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
6834 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
6839 /* Hand back the decl for MEM[&decl, off]. */
6840 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
6842 tree off
= TREE_OPERAND (exp
, 1);
6843 if (!integer_zerop (off
))
6845 offset_int boff
, coff
= mem_ref_offset (exp
);
6846 boff
= wi::lshift (coff
, LOG2_BITS_PER_UNIT
);
6849 exp
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6857 /* If any reference in the chain is volatile, the effect is volatile. */
6858 if (TREE_THIS_VOLATILE (exp
))
6861 exp
= TREE_OPERAND (exp
, 0);
6865 /* If OFFSET is constant, see if we can return the whole thing as a
6866 constant bit position. Make sure to handle overflow during
6868 if (TREE_CODE (offset
) == INTEGER_CST
)
6870 offset_int tem
= wi::sext (wi::to_offset (offset
),
6871 TYPE_PRECISION (sizetype
));
6872 tem
= wi::lshift (tem
, LOG2_BITS_PER_UNIT
);
6874 if (wi::fits_shwi_p (tem
))
6876 *pbitpos
= tem
.to_shwi ();
6877 *poffset
= offset
= NULL_TREE
;
6881 /* Otherwise, split it up. */
6884 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6885 if (wi::neg_p (bit_offset
))
6887 offset_int mask
= wi::mask
<offset_int
> (LOG2_BITS_PER_UNIT
, false);
6888 offset_int tem
= bit_offset
.and_not (mask
);
6889 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6890 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6892 tem
= wi::arshift (tem
, LOG2_BITS_PER_UNIT
);
6893 offset
= size_binop (PLUS_EXPR
, offset
,
6894 wide_int_to_tree (sizetype
, tem
));
6897 *pbitpos
= bit_offset
.to_shwi ();
6901 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6902 if (mode
== VOIDmode
6904 && (*pbitpos
% BITS_PER_UNIT
) == 0
6905 && (*pbitsize
% BITS_PER_UNIT
) == 0)
6913 /* Return a tree of sizetype representing the size, in bytes, of the element
6914 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6917 array_ref_element_size (tree exp
)
6919 tree aligned_size
= TREE_OPERAND (exp
, 3);
6920 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6921 location_t loc
= EXPR_LOCATION (exp
);
6923 /* If a size was specified in the ARRAY_REF, it's the size measured
6924 in alignment units of the element type. So multiply by that value. */
6927 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6928 sizetype from another type of the same width and signedness. */
6929 if (TREE_TYPE (aligned_size
) != sizetype
)
6930 aligned_size
= fold_convert_loc (loc
, sizetype
, aligned_size
);
6931 return size_binop_loc (loc
, MULT_EXPR
, aligned_size
,
6932 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
6935 /* Otherwise, take the size from that of the element type. Substitute
6936 any PLACEHOLDER_EXPR that we have. */
6938 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type
), exp
);
6941 /* Return a tree representing the lower bound of the array mentioned in
6942 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6945 array_ref_low_bound (tree exp
)
6947 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6949 /* If a lower bound is specified in EXP, use it. */
6950 if (TREE_OPERAND (exp
, 2))
6951 return TREE_OPERAND (exp
, 2);
6953 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6954 substituting for a PLACEHOLDER_EXPR as needed. */
6955 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
6956 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type
), exp
);
6958 /* Otherwise, return a zero of the appropriate type. */
6959 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp
, 1)), 0);
6962 /* Returns true if REF is an array reference to an array at the end of
6963 a structure. If this is the case, the array may be allocated larger
6964 than its upper bound implies. */
6967 array_at_struct_end_p (tree ref
)
6969 if (TREE_CODE (ref
) != ARRAY_REF
6970 && TREE_CODE (ref
) != ARRAY_RANGE_REF
)
6973 while (handled_component_p (ref
))
6975 /* If the reference chain contains a component reference to a
6976 non-union type and there follows another field the reference
6977 is not at the end of a structure. */
6978 if (TREE_CODE (ref
) == COMPONENT_REF
6979 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref
, 0))) == RECORD_TYPE
)
6981 tree nextf
= DECL_CHAIN (TREE_OPERAND (ref
, 1));
6982 while (nextf
&& TREE_CODE (nextf
) != FIELD_DECL
)
6983 nextf
= DECL_CHAIN (nextf
);
6988 ref
= TREE_OPERAND (ref
, 0);
6991 /* If the reference is based on a declared entity, the size of the array
6992 is constrained by its given domain. */
6999 /* Return a tree representing the upper bound of the array mentioned in
7000 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
7003 array_ref_up_bound (tree exp
)
7005 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
7007 /* If there is a domain type and it has an upper bound, use it, substituting
7008 for a PLACEHOLDER_EXPR as needed. */
7009 if (domain_type
&& TYPE_MAX_VALUE (domain_type
))
7010 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type
), exp
);
7012 /* Otherwise fail. */
7016 /* Return a tree representing the offset, in bytes, of the field referenced
7017 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
7020 component_ref_field_offset (tree exp
)
7022 tree aligned_offset
= TREE_OPERAND (exp
, 2);
7023 tree field
= TREE_OPERAND (exp
, 1);
7024 location_t loc
= EXPR_LOCATION (exp
);
7026 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
7027 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
7031 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
7032 sizetype from another type of the same width and signedness. */
7033 if (TREE_TYPE (aligned_offset
) != sizetype
)
7034 aligned_offset
= fold_convert_loc (loc
, sizetype
, aligned_offset
);
7035 return size_binop_loc (loc
, MULT_EXPR
, aligned_offset
,
7036 size_int (DECL_OFFSET_ALIGN (field
)
7040 /* Otherwise, take the offset from that of the field. Substitute
7041 any PLACEHOLDER_EXPR that we have. */
7043 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field
), exp
);
7046 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7048 static unsigned HOST_WIDE_INT
7049 target_align (const_tree target
)
7051 /* We might have a chain of nested references with intermediate misaligning
7052 bitfields components, so need to recurse to find out. */
7054 unsigned HOST_WIDE_INT this_align
, outer_align
;
7056 switch (TREE_CODE (target
))
7062 this_align
= DECL_ALIGN (TREE_OPERAND (target
, 1));
7063 outer_align
= target_align (TREE_OPERAND (target
, 0));
7064 return MIN (this_align
, outer_align
);
7067 case ARRAY_RANGE_REF
:
7068 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
7069 outer_align
= target_align (TREE_OPERAND (target
, 0));
7070 return MIN (this_align
, outer_align
);
7073 case NON_LVALUE_EXPR
:
7074 case VIEW_CONVERT_EXPR
:
7075 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
7076 outer_align
= target_align (TREE_OPERAND (target
, 0));
7077 return MAX (this_align
, outer_align
);
7080 return TYPE_ALIGN (TREE_TYPE (target
));
7085 /* Given an rtx VALUE that may contain additions and multiplications, return
7086 an equivalent value that just refers to a register, memory, or constant.
7087 This is done by generating instructions to perform the arithmetic and
7088 returning a pseudo-register containing the value.
7090 The returned value may be a REG, SUBREG, MEM or constant. */
7093 force_operand (rtx value
, rtx target
)
7096 /* Use subtarget as the target for operand 0 of a binary operation. */
7097 rtx subtarget
= get_subtarget (target
);
7098 enum rtx_code code
= GET_CODE (value
);
7100 /* Check for subreg applied to an expression produced by loop optimizer. */
7102 && !REG_P (SUBREG_REG (value
))
7103 && !MEM_P (SUBREG_REG (value
)))
7106 = simplify_gen_subreg (GET_MODE (value
),
7107 force_reg (GET_MODE (SUBREG_REG (value
)),
7108 force_operand (SUBREG_REG (value
),
7110 GET_MODE (SUBREG_REG (value
)),
7111 SUBREG_BYTE (value
));
7112 code
= GET_CODE (value
);
7115 /* Check for a PIC address load. */
7116 if ((code
== PLUS
|| code
== MINUS
)
7117 && XEXP (value
, 0) == pic_offset_table_rtx
7118 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
7119 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
7120 || GET_CODE (XEXP (value
, 1)) == CONST
))
7123 subtarget
= gen_reg_rtx (GET_MODE (value
));
7124 emit_move_insn (subtarget
, value
);
7128 if (ARITHMETIC_P (value
))
7130 op2
= XEXP (value
, 1);
7131 if (!CONSTANT_P (op2
) && !(REG_P (op2
) && op2
!= subtarget
))
7133 if (code
== MINUS
&& CONST_INT_P (op2
))
7136 op2
= negate_rtx (GET_MODE (value
), op2
);
7139 /* Check for an addition with OP2 a constant integer and our first
7140 operand a PLUS of a virtual register and something else. In that
7141 case, we want to emit the sum of the virtual register and the
7142 constant first and then add the other value. This allows virtual
7143 register instantiation to simply modify the constant rather than
7144 creating another one around this addition. */
7145 if (code
== PLUS
&& CONST_INT_P (op2
)
7146 && GET_CODE (XEXP (value
, 0)) == PLUS
7147 && REG_P (XEXP (XEXP (value
, 0), 0))
7148 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7149 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
7151 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
7152 XEXP (XEXP (value
, 0), 0), op2
,
7153 subtarget
, 0, OPTAB_LIB_WIDEN
);
7154 return expand_simple_binop (GET_MODE (value
), code
, temp
,
7155 force_operand (XEXP (XEXP (value
,
7157 target
, 0, OPTAB_LIB_WIDEN
);
7160 op1
= force_operand (XEXP (value
, 0), subtarget
);
7161 op2
= force_operand (op2
, NULL_RTX
);
7165 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
7167 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
7168 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7169 target
, 1, OPTAB_LIB_WIDEN
);
7171 return expand_divmod (0,
7172 FLOAT_MODE_P (GET_MODE (value
))
7173 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
7174 GET_MODE (value
), op1
, op2
, target
, 0);
7176 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7179 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
7182 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7185 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7186 target
, 0, OPTAB_LIB_WIDEN
);
7188 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7189 target
, 1, OPTAB_LIB_WIDEN
);
7192 if (UNARY_P (value
))
7195 target
= gen_reg_rtx (GET_MODE (value
));
7196 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
7203 case FLOAT_TRUNCATE
:
7204 convert_move (target
, op1
, code
== ZERO_EXTEND
);
7209 expand_fix (target
, op1
, code
== UNSIGNED_FIX
);
7213 case UNSIGNED_FLOAT
:
7214 expand_float (target
, op1
, code
== UNSIGNED_FLOAT
);
7218 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
7222 #ifdef INSN_SCHEDULING
7223 /* On machines that have insn scheduling, we want all memory reference to be
7224 explicit, so we need to deal with such paradoxical SUBREGs. */
7225 if (paradoxical_subreg_p (value
) && MEM_P (SUBREG_REG (value
)))
7227 = simplify_gen_subreg (GET_MODE (value
),
7228 force_reg (GET_MODE (SUBREG_REG (value
)),
7229 force_operand (SUBREG_REG (value
),
7231 GET_MODE (SUBREG_REG (value
)),
7232 SUBREG_BYTE (value
));
7238 /* Subroutine of expand_expr: return nonzero iff there is no way that
7239 EXP can reference X, which is being modified. TOP_P is nonzero if this
7240 call is going to be used to determine whether we need a temporary
7241 for EXP, as opposed to a recursive call to this function.
7243 It is always safe for this routine to return zero since it merely
7244 searches for optimization opportunities. */
7247 safe_from_p (const_rtx x
, tree exp
, int top_p
)
7253 /* If EXP has varying size, we MUST use a target since we currently
7254 have no way of allocating temporaries of variable size
7255 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7256 So we assume here that something at a higher level has prevented a
7257 clash. This is somewhat bogus, but the best we can do. Only
7258 do this when X is BLKmode and when we are at the top level. */
7259 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
7260 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
7261 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
7262 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
7263 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
7265 && GET_MODE (x
) == BLKmode
)
7266 /* If X is in the outgoing argument area, it is always safe. */
7268 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
7269 || (GET_CODE (XEXP (x
, 0)) == PLUS
7270 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
7273 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7274 find the underlying pseudo. */
7275 if (GET_CODE (x
) == SUBREG
)
7278 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7282 /* Now look at our tree code and possibly recurse. */
7283 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
7285 case tcc_declaration
:
7286 exp_rtl
= DECL_RTL_IF_SET (exp
);
7292 case tcc_exceptional
:
7293 if (TREE_CODE (exp
) == TREE_LIST
)
7297 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
7299 exp
= TREE_CHAIN (exp
);
7302 if (TREE_CODE (exp
) != TREE_LIST
)
7303 return safe_from_p (x
, exp
, 0);
7306 else if (TREE_CODE (exp
) == CONSTRUCTOR
)
7308 constructor_elt
*ce
;
7309 unsigned HOST_WIDE_INT idx
;
7311 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp
), idx
, ce
)
7312 if ((ce
->index
!= NULL_TREE
&& !safe_from_p (x
, ce
->index
, 0))
7313 || !safe_from_p (x
, ce
->value
, 0))
7317 else if (TREE_CODE (exp
) == ERROR_MARK
)
7318 return 1; /* An already-visited SAVE_EXPR? */
7323 /* The only case we look at here is the DECL_INITIAL inside a
7325 return (TREE_CODE (exp
) != DECL_EXPR
7326 || TREE_CODE (DECL_EXPR_DECL (exp
)) != VAR_DECL
7327 || !DECL_INITIAL (DECL_EXPR_DECL (exp
))
7328 || safe_from_p (x
, DECL_INITIAL (DECL_EXPR_DECL (exp
)), 0));
7331 case tcc_comparison
:
7332 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
7337 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7339 case tcc_expression
:
7342 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7343 the expression. If it is set, we conflict iff we are that rtx or
7344 both are in memory. Otherwise, we check all operands of the
7345 expression recursively. */
7347 switch (TREE_CODE (exp
))
7350 /* If the operand is static or we are static, we can't conflict.
7351 Likewise if we don't conflict with the operand at all. */
7352 if (staticp (TREE_OPERAND (exp
, 0))
7353 || TREE_STATIC (exp
)
7354 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
7357 /* Otherwise, the only way this can conflict is if we are taking
7358 the address of a DECL a that address if part of X, which is
7360 exp
= TREE_OPERAND (exp
, 0);
7363 if (!DECL_RTL_SET_P (exp
)
7364 || !MEM_P (DECL_RTL (exp
)))
7367 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
7373 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
7374 get_alias_set (exp
)))
7379 /* Assume that the call will clobber all hard registers and
7381 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7386 case WITH_CLEANUP_EXPR
:
7387 case CLEANUP_POINT_EXPR
:
7388 /* Lowered by gimplify.c. */
7392 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7398 /* If we have an rtx, we do not need to scan our operands. */
7402 nops
= TREE_OPERAND_LENGTH (exp
);
7403 for (i
= 0; i
< nops
; i
++)
7404 if (TREE_OPERAND (exp
, i
) != 0
7405 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
7411 /* Should never get a type here. */
7415 /* If we have an rtl, find any enclosed object. Then see if we conflict
7419 if (GET_CODE (exp_rtl
) == SUBREG
)
7421 exp_rtl
= SUBREG_REG (exp_rtl
);
7423 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
7427 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7428 are memory and they conflict. */
7429 return ! (rtx_equal_p (x
, exp_rtl
)
7430 || (MEM_P (x
) && MEM_P (exp_rtl
)
7431 && true_dependence (exp_rtl
, VOIDmode
, x
)));
7434 /* If we reach here, it is safe. */
7439 /* Return the highest power of two that EXP is known to be a multiple of.
7440 This is used in updating alignment of MEMs in array references. */
7442 unsigned HOST_WIDE_INT
7443 highest_pow2_factor (const_tree exp
)
7445 unsigned HOST_WIDE_INT ret
;
7446 int trailing_zeros
= tree_ctz (exp
);
7447 if (trailing_zeros
>= HOST_BITS_PER_WIDE_INT
)
7448 return BIGGEST_ALIGNMENT
;
7449 ret
= (unsigned HOST_WIDE_INT
) 1 << trailing_zeros
;
7450 if (ret
> BIGGEST_ALIGNMENT
)
7451 return BIGGEST_ALIGNMENT
;
7455 /* Similar, except that the alignment requirements of TARGET are
7456 taken into account. Assume it is at least as aligned as its
7457 type, unless it is a COMPONENT_REF in which case the layout of
7458 the structure gives the alignment. */
7460 static unsigned HOST_WIDE_INT
7461 highest_pow2_factor_for_target (const_tree target
, const_tree exp
)
7463 unsigned HOST_WIDE_INT talign
= target_align (target
) / BITS_PER_UNIT
;
7464 unsigned HOST_WIDE_INT factor
= highest_pow2_factor (exp
);
7466 return MAX (factor
, talign
);
7469 #ifdef HAVE_conditional_move
7470 /* Convert the tree comparison code TCODE to the rtl one where the
7471 signedness is UNSIGNEDP. */
7473 static enum rtx_code
7474 convert_tree_comp_to_rtx (enum tree_code tcode
, int unsignedp
)
7486 code
= unsignedp
? LTU
: LT
;
7489 code
= unsignedp
? LEU
: LE
;
7492 code
= unsignedp
? GTU
: GT
;
7495 code
= unsignedp
? GEU
: GE
;
7497 case UNORDERED_EXPR
:
7529 /* Subroutine of expand_expr. Expand the two operands of a binary
7530 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7531 The value may be stored in TARGET if TARGET is nonzero. The
7532 MODIFIER argument is as documented by expand_expr. */
7535 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
7536 enum expand_modifier modifier
)
7538 if (! safe_from_p (target
, exp1
, 1))
7540 if (operand_equal_p (exp0
, exp1
, 0))
7542 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7543 *op1
= copy_rtx (*op0
);
7547 /* If we need to preserve evaluation order, copy exp0 into its own
7548 temporary variable so that it can't be clobbered by exp1. */
7549 if (flag_evaluation_order
&& TREE_SIDE_EFFECTS (exp1
))
7550 exp0
= save_expr (exp0
);
7551 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7552 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
7557 /* Return a MEM that contains constant EXP. DEFER is as for
7558 output_constant_def and MODIFIER is as for expand_expr. */
7561 expand_expr_constant (tree exp
, int defer
, enum expand_modifier modifier
)
7565 mem
= output_constant_def (exp
, defer
);
7566 if (modifier
!= EXPAND_INITIALIZER
)
7567 mem
= use_anchored_address (mem
);
7571 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7572 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7575 expand_expr_addr_expr_1 (tree exp
, rtx target
, machine_mode tmode
,
7576 enum expand_modifier modifier
, addr_space_t as
)
7578 rtx result
, subtarget
;
7580 HOST_WIDE_INT bitsize
, bitpos
;
7581 int volatilep
, unsignedp
;
7584 /* If we are taking the address of a constant and are at the top level,
7585 we have to use output_constant_def since we can't call force_const_mem
7587 /* ??? This should be considered a front-end bug. We should not be
7588 generating ADDR_EXPR of something that isn't an LVALUE. The only
7589 exception here is STRING_CST. */
7590 if (CONSTANT_CLASS_P (exp
))
7592 result
= XEXP (expand_expr_constant (exp
, 0, modifier
), 0);
7593 if (modifier
< EXPAND_SUM
)
7594 result
= force_operand (result
, target
);
7598 /* Everything must be something allowed by is_gimple_addressable. */
7599 switch (TREE_CODE (exp
))
7602 /* This case will happen via recursion for &a->b. */
7603 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7607 tree tem
= TREE_OPERAND (exp
, 0);
7608 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
7609 tem
= fold_build_pointer_plus (tem
, TREE_OPERAND (exp
, 1));
7610 return expand_expr (tem
, target
, tmode
, modifier
);
7614 /* Expand the initializer like constants above. */
7615 result
= XEXP (expand_expr_constant (DECL_INITIAL (exp
),
7617 if (modifier
< EXPAND_SUM
)
7618 result
= force_operand (result
, target
);
7622 /* The real part of the complex number is always first, therefore
7623 the address is the same as the address of the parent object. */
7626 inner
= TREE_OPERAND (exp
, 0);
7630 /* The imaginary part of the complex number is always second.
7631 The expression is therefore always offset by the size of the
7634 bitpos
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp
)));
7635 inner
= TREE_OPERAND (exp
, 0);
7638 case COMPOUND_LITERAL_EXPR
:
7639 /* Allow COMPOUND_LITERAL_EXPR in initializers, if e.g.
7640 rtl_for_decl_init is called on DECL_INITIAL with
7641 COMPOUNT_LITERAL_EXPRs in it, they aren't gimplified. */
7642 if (modifier
== EXPAND_INITIALIZER
7643 && COMPOUND_LITERAL_EXPR_DECL (exp
))
7644 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp
),
7645 target
, tmode
, modifier
, as
);
7648 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7649 expand_expr, as that can have various side effects; LABEL_DECLs for
7650 example, may not have their DECL_RTL set yet. Expand the rtl of
7651 CONSTRUCTORs too, which should yield a memory reference for the
7652 constructor's contents. Assume language specific tree nodes can
7653 be expanded in some interesting way. */
7654 gcc_assert (TREE_CODE (exp
) < LAST_AND_UNUSED_TREE_CODE
);
7656 || TREE_CODE (exp
) == CONSTRUCTOR
7657 || TREE_CODE (exp
) == COMPOUND_LITERAL_EXPR
)
7659 result
= expand_expr (exp
, target
, tmode
,
7660 modifier
== EXPAND_INITIALIZER
7661 ? EXPAND_INITIALIZER
: EXPAND_CONST_ADDRESS
);
7663 /* If the DECL isn't in memory, then the DECL wasn't properly
7664 marked TREE_ADDRESSABLE, which will be either a front-end
7665 or a tree optimizer bug. */
7667 if (TREE_ADDRESSABLE (exp
)
7669 && ! targetm
.calls
.allocate_stack_slots_for_args ())
7671 error ("local frame unavailable (naked function?)");
7675 gcc_assert (MEM_P (result
));
7676 result
= XEXP (result
, 0);
7678 /* ??? Is this needed anymore? */
7680 TREE_USED (exp
) = 1;
7682 if (modifier
!= EXPAND_INITIALIZER
7683 && modifier
!= EXPAND_CONST_ADDRESS
7684 && modifier
!= EXPAND_SUM
)
7685 result
= force_operand (result
, target
);
7689 /* Pass FALSE as the last argument to get_inner_reference although
7690 we are expanding to RTL. The rationale is that we know how to
7691 handle "aligning nodes" here: we can just bypass them because
7692 they won't change the final object whose address will be returned
7693 (they actually exist only for that purpose). */
7694 inner
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7695 &mode1
, &unsignedp
, &volatilep
, false);
7699 /* We must have made progress. */
7700 gcc_assert (inner
!= exp
);
7702 subtarget
= offset
|| bitpos
? NULL_RTX
: target
;
7703 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7704 inner alignment, force the inner to be sufficiently aligned. */
7705 if (CONSTANT_CLASS_P (inner
)
7706 && TYPE_ALIGN (TREE_TYPE (inner
)) < TYPE_ALIGN (TREE_TYPE (exp
)))
7708 inner
= copy_node (inner
);
7709 TREE_TYPE (inner
) = copy_node (TREE_TYPE (inner
));
7710 TYPE_ALIGN (TREE_TYPE (inner
)) = TYPE_ALIGN (TREE_TYPE (exp
));
7711 TYPE_USER_ALIGN (TREE_TYPE (inner
)) = 1;
7713 result
= expand_expr_addr_expr_1 (inner
, subtarget
, tmode
, modifier
, as
);
7719 if (modifier
!= EXPAND_NORMAL
)
7720 result
= force_operand (result
, NULL
);
7721 tmp
= expand_expr (offset
, NULL_RTX
, tmode
,
7722 modifier
== EXPAND_INITIALIZER
7723 ? EXPAND_INITIALIZER
: EXPAND_NORMAL
);
7725 /* expand_expr is allowed to return an object in a mode other
7726 than TMODE. If it did, we need to convert. */
7727 if (GET_MODE (tmp
) != VOIDmode
&& tmode
!= GET_MODE (tmp
))
7728 tmp
= convert_modes (tmode
, GET_MODE (tmp
),
7729 tmp
, TYPE_UNSIGNED (TREE_TYPE (offset
)));
7730 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7731 tmp
= convert_memory_address_addr_space (tmode
, tmp
, as
);
7733 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7734 result
= simplify_gen_binary (PLUS
, tmode
, result
, tmp
);
7737 subtarget
= bitpos
? NULL_RTX
: target
;
7738 result
= expand_simple_binop (tmode
, PLUS
, result
, tmp
, subtarget
,
7739 1, OPTAB_LIB_WIDEN
);
7745 /* Someone beforehand should have rejected taking the address
7746 of such an object. */
7747 gcc_assert ((bitpos
% BITS_PER_UNIT
) == 0);
7749 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7750 result
= plus_constant (tmode
, result
, bitpos
/ BITS_PER_UNIT
);
7751 if (modifier
< EXPAND_SUM
)
7752 result
= force_operand (result
, target
);
7758 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7759 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7762 expand_expr_addr_expr (tree exp
, rtx target
, machine_mode tmode
,
7763 enum expand_modifier modifier
)
7765 addr_space_t as
= ADDR_SPACE_GENERIC
;
7766 machine_mode address_mode
= Pmode
;
7767 machine_mode pointer_mode
= ptr_mode
;
7771 /* Target mode of VOIDmode says "whatever's natural". */
7772 if (tmode
== VOIDmode
)
7773 tmode
= TYPE_MODE (TREE_TYPE (exp
));
7775 if (POINTER_TYPE_P (TREE_TYPE (exp
)))
7777 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
7778 address_mode
= targetm
.addr_space
.address_mode (as
);
7779 pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
7782 /* We can get called with some Weird Things if the user does silliness
7783 like "(short) &a". In that case, convert_memory_address won't do
7784 the right thing, so ignore the given target mode. */
7785 if (tmode
!= address_mode
&& tmode
!= pointer_mode
)
7786 tmode
= address_mode
;
7788 result
= expand_expr_addr_expr_1 (TREE_OPERAND (exp
, 0), target
,
7789 tmode
, modifier
, as
);
7791 /* Despite expand_expr claims concerning ignoring TMODE when not
7792 strictly convenient, stuff breaks if we don't honor it. Note
7793 that combined with the above, we only do this for pointer modes. */
7794 rmode
= GET_MODE (result
);
7795 if (rmode
== VOIDmode
)
7798 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7803 /* Generate code for computing CONSTRUCTOR EXP.
7804 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7805 is TRUE, instead of creating a temporary variable in memory
7806 NULL is returned and the caller needs to handle it differently. */
7809 expand_constructor (tree exp
, rtx target
, enum expand_modifier modifier
,
7810 bool avoid_temp_mem
)
7812 tree type
= TREE_TYPE (exp
);
7813 machine_mode mode
= TYPE_MODE (type
);
7815 /* Try to avoid creating a temporary at all. This is possible
7816 if all of the initializer is zero.
7817 FIXME: try to handle all [0..255] initializers we can handle
7819 if (TREE_STATIC (exp
)
7820 && !TREE_ADDRESSABLE (exp
)
7821 && target
!= 0 && mode
== BLKmode
7822 && all_zeros_p (exp
))
7824 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
7828 /* All elts simple constants => refer to a constant in memory. But
7829 if this is a non-BLKmode mode, let it store a field at a time
7830 since that should make a CONST_INT, CONST_WIDE_INT or
7831 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7832 use, it is best to store directly into the target unless the type
7833 is large enough that memcpy will be used. If we are making an
7834 initializer and all operands are constant, put it in memory as
7837 FIXME: Avoid trying to fill vector constructors piece-meal.
7838 Output them with output_constant_def below unless we're sure
7839 they're zeros. This should go away when vector initializers
7840 are treated like VECTOR_CST instead of arrays. */
7841 if ((TREE_STATIC (exp
)
7842 && ((mode
== BLKmode
7843 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
7844 || TREE_ADDRESSABLE (exp
)
7845 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type
))
7846 && (! MOVE_BY_PIECES_P
7847 (tree_to_uhwi (TYPE_SIZE_UNIT (type
)),
7849 && ! mostly_zeros_p (exp
))))
7850 || ((modifier
== EXPAND_INITIALIZER
|| modifier
== EXPAND_CONST_ADDRESS
)
7851 && TREE_CONSTANT (exp
)))
7858 constructor
= expand_expr_constant (exp
, 1, modifier
);
7860 if (modifier
!= EXPAND_CONST_ADDRESS
7861 && modifier
!= EXPAND_INITIALIZER
7862 && modifier
!= EXPAND_SUM
)
7863 constructor
= validize_mem (constructor
);
7868 /* Handle calls that pass values in multiple non-contiguous
7869 locations. The Irix 6 ABI has examples of this. */
7870 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
7871 || GET_CODE (target
) == PARALLEL
|| modifier
== EXPAND_STACK_PARM
)
7876 target
= assign_temp (type
, TREE_ADDRESSABLE (exp
), 1);
7879 store_constructor (exp
, target
, 0, int_expr_size (exp
));
7884 /* expand_expr: generate code for computing expression EXP.
7885 An rtx for the computed value is returned. The value is never null.
7886 In the case of a void EXP, const0_rtx is returned.
7888 The value may be stored in TARGET if TARGET is nonzero.
7889 TARGET is just a suggestion; callers must assume that
7890 the rtx returned may not be the same as TARGET.
7892 If TARGET is CONST0_RTX, it means that the value will be ignored.
7894 If TMODE is not VOIDmode, it suggests generating the
7895 result in mode TMODE. But this is done only when convenient.
7896 Otherwise, TMODE is ignored and the value generated in its natural mode.
7897 TMODE is just a suggestion; callers must assume that
7898 the rtx returned may not have mode TMODE.
7900 Note that TARGET may have neither TMODE nor MODE. In that case, it
7901 probably will not be used.
7903 If MODIFIER is EXPAND_SUM then when EXP is an addition
7904 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7905 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7906 products as above, or REG or MEM, or constant.
7907 Ordinarily in such cases we would output mul or add instructions
7908 and then return a pseudo reg containing the sum.
7910 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7911 it also marks a label as absolutely required (it can't be dead).
7912 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7913 This is used for outputting expressions used in initializers.
7915 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7916 with a constant address even if that address is not normally legitimate.
7917 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7919 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7920 a call parameter. Such targets require special care as we haven't yet
7921 marked TARGET so that it's safe from being trashed by libcalls. We
7922 don't want to use TARGET for anything but the final result;
7923 Intermediate values must go elsewhere. Additionally, calls to
7924 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7926 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7927 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7928 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7929 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7932 If INNER_REFERENCE_P is true, we are expanding an inner reference.
7933 In this case, we don't adjust a returned MEM rtx that wouldn't be
7934 sufficiently aligned for its mode; instead, it's up to the caller
7935 to deal with it afterwards. This is used to make sure that unaligned
7936 base objects for which out-of-bounds accesses are supported, for
7937 example record types with trailing arrays, aren't realigned behind
7938 the back of the caller.
7939 The normal operating mode is to pass FALSE for this parameter. */
7942 expand_expr_real (tree exp
, rtx target
, machine_mode tmode
,
7943 enum expand_modifier modifier
, rtx
*alt_rtl
,
7944 bool inner_reference_p
)
7948 /* Handle ERROR_MARK before anybody tries to access its type. */
7949 if (TREE_CODE (exp
) == ERROR_MARK
7950 || (TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
))
7952 ret
= CONST0_RTX (tmode
);
7953 return ret
? ret
: const0_rtx
;
7956 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
,
7961 /* Try to expand the conditional expression which is represented by
7962 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7963 return the rtl reg which repsents the result. Otherwise return
7967 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED
,
7968 tree treeop1 ATTRIBUTE_UNUSED
,
7969 tree treeop2 ATTRIBUTE_UNUSED
)
7971 #ifdef HAVE_conditional_move
7973 rtx op00
, op01
, op1
, op2
;
7974 enum rtx_code comparison_code
;
7975 machine_mode comparison_mode
;
7978 tree type
= TREE_TYPE (treeop1
);
7979 int unsignedp
= TYPE_UNSIGNED (type
);
7980 machine_mode mode
= TYPE_MODE (type
);
7981 machine_mode orig_mode
= mode
;
7983 /* If we cannot do a conditional move on the mode, try doing it
7984 with the promoted mode. */
7985 if (!can_conditionally_move_p (mode
))
7987 mode
= promote_mode (type
, mode
, &unsignedp
);
7988 if (!can_conditionally_move_p (mode
))
7990 temp
= assign_temp (type
, 0, 0); /* Use promoted mode for temp. */
7993 temp
= assign_temp (type
, 0, 1);
7996 expand_operands (treeop1
, treeop2
,
7997 temp
, &op1
, &op2
, EXPAND_NORMAL
);
7999 if (TREE_CODE (treeop0
) == SSA_NAME
8000 && (srcstmt
= get_def_for_expr_class (treeop0
, tcc_comparison
)))
8002 tree type
= TREE_TYPE (gimple_assign_rhs1 (srcstmt
));
8003 enum tree_code cmpcode
= gimple_assign_rhs_code (srcstmt
);
8004 op00
= expand_normal (gimple_assign_rhs1 (srcstmt
));
8005 op01
= expand_normal (gimple_assign_rhs2 (srcstmt
));
8006 comparison_mode
= TYPE_MODE (type
);
8007 unsignedp
= TYPE_UNSIGNED (type
);
8008 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
8010 else if (TREE_CODE_CLASS (TREE_CODE (treeop0
)) == tcc_comparison
)
8012 tree type
= TREE_TYPE (TREE_OPERAND (treeop0
, 0));
8013 enum tree_code cmpcode
= TREE_CODE (treeop0
);
8014 op00
= expand_normal (TREE_OPERAND (treeop0
, 0));
8015 op01
= expand_normal (TREE_OPERAND (treeop0
, 1));
8016 unsignedp
= TYPE_UNSIGNED (type
);
8017 comparison_mode
= TYPE_MODE (type
);
8018 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
8022 op00
= expand_normal (treeop0
);
8024 comparison_code
= NE
;
8025 comparison_mode
= TYPE_MODE (TREE_TYPE (treeop0
));
8028 if (GET_MODE (op1
) != mode
)
8029 op1
= gen_lowpart (mode
, op1
);
8031 if (GET_MODE (op2
) != mode
)
8032 op2
= gen_lowpart (mode
, op2
);
8034 /* Try to emit the conditional move. */
8035 insn
= emit_conditional_move (temp
, comparison_code
,
8036 op00
, op01
, comparison_mode
,
8040 /* If we could do the conditional move, emit the sequence,
8044 rtx_insn
*seq
= get_insns ();
8047 return convert_modes (orig_mode
, mode
, temp
, 0);
8050 /* Otherwise discard the sequence and fall back to code with
8058 expand_expr_real_2 (sepops ops
, rtx target
, machine_mode tmode
,
8059 enum expand_modifier modifier
)
8061 rtx op0
, op1
, op2
, temp
;
8065 enum tree_code code
= ops
->code
;
8067 rtx subtarget
, original_target
;
8069 bool reduce_bit_field
;
8070 location_t loc
= ops
->location
;
8071 tree treeop0
, treeop1
, treeop2
;
8072 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8073 ? reduce_to_bit_field_precision ((expr), \
8079 mode
= TYPE_MODE (type
);
8080 unsignedp
= TYPE_UNSIGNED (type
);
8086 /* We should be called only on simple (binary or unary) expressions,
8087 exactly those that are valid in gimple expressions that aren't
8088 GIMPLE_SINGLE_RHS (or invalid). */
8089 gcc_assert (get_gimple_rhs_class (code
) == GIMPLE_UNARY_RHS
8090 || get_gimple_rhs_class (code
) == GIMPLE_BINARY_RHS
8091 || get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
);
8093 ignore
= (target
== const0_rtx
8094 || ((CONVERT_EXPR_CODE_P (code
)
8095 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
8096 && TREE_CODE (type
) == VOID_TYPE
));
8098 /* We should be called only if we need the result. */
8099 gcc_assert (!ignore
);
8101 /* An operation in what may be a bit-field type needs the
8102 result to be reduced to the precision of the bit-field type,
8103 which is narrower than that of the type's mode. */
8104 reduce_bit_field
= (INTEGRAL_TYPE_P (type
)
8105 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
));
8107 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
8110 /* Use subtarget as the target for operand 0 of a binary operation. */
8111 subtarget
= get_subtarget (target
);
8112 original_target
= target
;
8116 case NON_LVALUE_EXPR
:
8119 if (treeop0
== error_mark_node
)
8122 if (TREE_CODE (type
) == UNION_TYPE
)
8124 tree valtype
= TREE_TYPE (treeop0
);
8126 /* If both input and output are BLKmode, this conversion isn't doing
8127 anything except possibly changing memory attribute. */
8128 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
8130 rtx result
= expand_expr (treeop0
, target
, tmode
,
8133 result
= copy_rtx (result
);
8134 set_mem_attributes (result
, type
, 0);
8140 if (TYPE_MODE (type
) != BLKmode
)
8141 target
= gen_reg_rtx (TYPE_MODE (type
));
8143 target
= assign_temp (type
, 1, 1);
8147 /* Store data into beginning of memory target. */
8148 store_expr (treeop0
,
8149 adjust_address (target
, TYPE_MODE (valtype
), 0),
8150 modifier
== EXPAND_STACK_PARM
,
8155 gcc_assert (REG_P (target
));
8157 /* Store this field into a union of the proper type. */
8158 store_field (target
,
8159 MIN ((int_size_in_bytes (TREE_TYPE
8162 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
8163 0, 0, 0, TYPE_MODE (valtype
), treeop0
, 0, false);
8166 /* Return the entire union. */
8170 if (mode
== TYPE_MODE (TREE_TYPE (treeop0
)))
8172 op0
= expand_expr (treeop0
, target
, VOIDmode
,
8175 /* If the signedness of the conversion differs and OP0 is
8176 a promoted SUBREG, clear that indication since we now
8177 have to do the proper extension. */
8178 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)) != unsignedp
8179 && GET_CODE (op0
) == SUBREG
)
8180 SUBREG_PROMOTED_VAR_P (op0
) = 0;
8182 return REDUCE_BIT_FIELD (op0
);
8185 op0
= expand_expr (treeop0
, NULL_RTX
, mode
,
8186 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
);
8187 if (GET_MODE (op0
) == mode
)
8190 /* If OP0 is a constant, just convert it into the proper mode. */
8191 else if (CONSTANT_P (op0
))
8193 tree inner_type
= TREE_TYPE (treeop0
);
8194 machine_mode inner_mode
= GET_MODE (op0
);
8196 if (inner_mode
== VOIDmode
)
8197 inner_mode
= TYPE_MODE (inner_type
);
8199 if (modifier
== EXPAND_INITIALIZER
)
8200 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
,
8201 subreg_lowpart_offset (mode
,
8204 op0
= convert_modes (mode
, inner_mode
, op0
,
8205 TYPE_UNSIGNED (inner_type
));
8208 else if (modifier
== EXPAND_INITIALIZER
)
8209 op0
= gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
8211 else if (target
== 0)
8212 op0
= convert_to_mode (mode
, op0
,
8213 TYPE_UNSIGNED (TREE_TYPE
8217 convert_move (target
, op0
,
8218 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8222 return REDUCE_BIT_FIELD (op0
);
8224 case ADDR_SPACE_CONVERT_EXPR
:
8226 tree treeop0_type
= TREE_TYPE (treeop0
);
8228 addr_space_t as_from
;
8230 gcc_assert (POINTER_TYPE_P (type
));
8231 gcc_assert (POINTER_TYPE_P (treeop0_type
));
8233 as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type
));
8234 as_from
= TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type
));
8236 /* Conversions between pointers to the same address space should
8237 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8238 gcc_assert (as_to
!= as_from
);
8240 /* Ask target code to handle conversion between pointers
8241 to overlapping address spaces. */
8242 if (targetm
.addr_space
.subset_p (as_to
, as_from
)
8243 || targetm
.addr_space
.subset_p (as_from
, as_to
))
8245 op0
= expand_expr (treeop0
, NULL_RTX
, VOIDmode
, modifier
);
8246 op0
= targetm
.addr_space
.convert (op0
, treeop0_type
, type
);
8251 /* For disjoint address spaces, converting anything but
8252 a null pointer invokes undefined behaviour. We simply
8253 always return a null pointer here. */
8254 return CONST0_RTX (mode
);
8257 case POINTER_PLUS_EXPR
:
8258 /* Even though the sizetype mode and the pointer's mode can be different
8259 expand is able to handle this correctly and get the correct result out
8260 of the PLUS_EXPR code. */
8261 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8262 if sizetype precision is smaller than pointer precision. */
8263 if (TYPE_PRECISION (sizetype
) < TYPE_PRECISION (type
))
8264 treeop1
= fold_convert_loc (loc
, type
,
8265 fold_convert_loc (loc
, ssizetype
,
8267 /* If sizetype precision is larger than pointer precision, truncate the
8268 offset to have matching modes. */
8269 else if (TYPE_PRECISION (sizetype
) > TYPE_PRECISION (type
))
8270 treeop1
= fold_convert_loc (loc
, type
, treeop1
);
8273 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8274 something else, make sure we add the register to the constant and
8275 then to the other thing. This case can occur during strength
8276 reduction and doing it this way will produce better code if the
8277 frame pointer or argument pointer is eliminated.
8279 fold-const.c will ensure that the constant is always in the inner
8280 PLUS_EXPR, so the only case we need to do anything about is if
8281 sp, ap, or fp is our second argument, in which case we must swap
8282 the innermost first argument and our second argument. */
8284 if (TREE_CODE (treeop0
) == PLUS_EXPR
8285 && TREE_CODE (TREE_OPERAND (treeop0
, 1)) == INTEGER_CST
8286 && TREE_CODE (treeop1
) == VAR_DECL
8287 && (DECL_RTL (treeop1
) == frame_pointer_rtx
8288 || DECL_RTL (treeop1
) == stack_pointer_rtx
8289 || DECL_RTL (treeop1
) == arg_pointer_rtx
))
8294 /* If the result is to be ptr_mode and we are adding an integer to
8295 something, we might be forming a constant. So try to use
8296 plus_constant. If it produces a sum and we can't accept it,
8297 use force_operand. This allows P = &ARR[const] to generate
8298 efficient code on machines where a SYMBOL_REF is not a valid
8301 If this is an EXPAND_SUM call, always return the sum. */
8302 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
8303 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
8305 if (modifier
== EXPAND_STACK_PARM
)
8307 if (TREE_CODE (treeop0
) == INTEGER_CST
8308 && GET_MODE_PRECISION (mode
) <= HOST_BITS_PER_WIDE_INT
8309 && TREE_CONSTANT (treeop1
))
8313 machine_mode wmode
= TYPE_MODE (TREE_TYPE (treeop1
));
8315 op1
= expand_expr (treeop1
, subtarget
, VOIDmode
,
8317 /* Use wi::shwi to ensure that the constant is
8318 truncated according to the mode of OP1, then sign extended
8319 to a HOST_WIDE_INT. Using the constant directly can result
8320 in non-canonical RTL in a 64x32 cross compile. */
8321 wc
= TREE_INT_CST_LOW (treeop0
);
8323 immed_wide_int_const (wi::shwi (wc
, wmode
), wmode
);
8324 op1
= plus_constant (mode
, op1
, INTVAL (constant_part
));
8325 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8326 op1
= force_operand (op1
, target
);
8327 return REDUCE_BIT_FIELD (op1
);
8330 else if (TREE_CODE (treeop1
) == INTEGER_CST
8331 && GET_MODE_PRECISION (mode
) <= HOST_BITS_PER_WIDE_INT
8332 && TREE_CONSTANT (treeop0
))
8336 machine_mode wmode
= TYPE_MODE (TREE_TYPE (treeop0
));
8338 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8339 (modifier
== EXPAND_INITIALIZER
8340 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
8341 if (! CONSTANT_P (op0
))
8343 op1
= expand_expr (treeop1
, NULL_RTX
,
8344 VOIDmode
, modifier
);
8345 /* Return a PLUS if modifier says it's OK. */
8346 if (modifier
== EXPAND_SUM
8347 || modifier
== EXPAND_INITIALIZER
)
8348 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
8351 /* Use wi::shwi to ensure that the constant is
8352 truncated according to the mode of OP1, then sign extended
8353 to a HOST_WIDE_INT. Using the constant directly can result
8354 in non-canonical RTL in a 64x32 cross compile. */
8355 wc
= TREE_INT_CST_LOW (treeop1
);
8357 = immed_wide_int_const (wi::shwi (wc
, wmode
), wmode
);
8358 op0
= plus_constant (mode
, op0
, INTVAL (constant_part
));
8359 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8360 op0
= force_operand (op0
, target
);
8361 return REDUCE_BIT_FIELD (op0
);
8365 /* Use TER to expand pointer addition of a negated value
8366 as pointer subtraction. */
8367 if ((POINTER_TYPE_P (TREE_TYPE (treeop0
))
8368 || (TREE_CODE (TREE_TYPE (treeop0
)) == VECTOR_TYPE
8369 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0
)))))
8370 && TREE_CODE (treeop1
) == SSA_NAME
8371 && TYPE_MODE (TREE_TYPE (treeop0
))
8372 == TYPE_MODE (TREE_TYPE (treeop1
)))
8374 gimple def
= get_def_for_expr (treeop1
, NEGATE_EXPR
);
8377 treeop1
= gimple_assign_rhs1 (def
);
8383 /* No sense saving up arithmetic to be done
8384 if it's all in the wrong mode to form part of an address.
8385 And force_operand won't know whether to sign-extend or
8387 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8388 || mode
!= ptr_mode
)
8390 expand_operands (treeop0
, treeop1
,
8391 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8392 if (op0
== const0_rtx
)
8394 if (op1
== const0_rtx
)
8399 expand_operands (treeop0
, treeop1
,
8400 subtarget
, &op0
, &op1
, modifier
);
8401 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8405 /* For initializers, we are allowed to return a MINUS of two
8406 symbolic constants. Here we handle all cases when both operands
8408 /* Handle difference of two symbolic constants,
8409 for the sake of an initializer. */
8410 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8411 && really_constant_p (treeop0
)
8412 && really_constant_p (treeop1
))
8414 expand_operands (treeop0
, treeop1
,
8415 NULL_RTX
, &op0
, &op1
, modifier
);
8417 /* If the last operand is a CONST_INT, use plus_constant of
8418 the negated constant. Else make the MINUS. */
8419 if (CONST_INT_P (op1
))
8420 return REDUCE_BIT_FIELD (plus_constant (mode
, op0
,
8423 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode
, op0
, op1
));
8426 /* No sense saving up arithmetic to be done
8427 if it's all in the wrong mode to form part of an address.
8428 And force_operand won't know whether to sign-extend or
8430 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8431 || mode
!= ptr_mode
)
8434 expand_operands (treeop0
, treeop1
,
8435 subtarget
, &op0
, &op1
, modifier
);
8437 /* Convert A - const to A + (-const). */
8438 if (CONST_INT_P (op1
))
8440 op1
= negate_rtx (mode
, op1
);
8441 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8446 case WIDEN_MULT_PLUS_EXPR
:
8447 case WIDEN_MULT_MINUS_EXPR
:
8448 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8449 op2
= expand_normal (treeop2
);
8450 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
8454 case WIDEN_MULT_EXPR
:
8455 /* If first operand is constant, swap them.
8456 Thus the following special case checks need only
8457 check the second operand. */
8458 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8465 /* First, check if we have a multiplication of one signed and one
8466 unsigned operand. */
8467 if (TREE_CODE (treeop1
) != INTEGER_CST
8468 && (TYPE_UNSIGNED (TREE_TYPE (treeop0
))
8469 != TYPE_UNSIGNED (TREE_TYPE (treeop1
))))
8471 machine_mode innermode
= TYPE_MODE (TREE_TYPE (treeop0
));
8472 this_optab
= usmul_widen_optab
;
8473 if (find_widening_optab_handler (this_optab
, mode
, innermode
, 0)
8474 != CODE_FOR_nothing
)
8476 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
8477 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8480 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op1
, &op0
,
8482 /* op0 and op1 might still be constant, despite the above
8483 != INTEGER_CST check. Handle it. */
8484 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8486 op0
= convert_modes (innermode
, mode
, op0
, true);
8487 op1
= convert_modes (innermode
, mode
, op1
, false);
8488 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
,
8489 target
, unsignedp
));
8494 /* Check for a multiplication with matching signedness. */
8495 else if ((TREE_CODE (treeop1
) == INTEGER_CST
8496 && int_fits_type_p (treeop1
, TREE_TYPE (treeop0
)))
8497 || (TYPE_UNSIGNED (TREE_TYPE (treeop1
))
8498 == TYPE_UNSIGNED (TREE_TYPE (treeop0
))))
8500 tree op0type
= TREE_TYPE (treeop0
);
8501 machine_mode innermode
= TYPE_MODE (op0type
);
8502 bool zextend_p
= TYPE_UNSIGNED (op0type
);
8503 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
8504 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
8506 if (TREE_CODE (treeop0
) != INTEGER_CST
)
8508 if (find_widening_optab_handler (this_optab
, mode
, innermode
, 0)
8509 != CODE_FOR_nothing
)
8511 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8513 /* op0 and op1 might still be constant, despite the above
8514 != INTEGER_CST check. Handle it. */
8515 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8518 op0
= convert_modes (innermode
, mode
, op0
, zextend_p
);
8520 = convert_modes (innermode
, mode
, op1
,
8521 TYPE_UNSIGNED (TREE_TYPE (treeop1
)));
8522 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
,
8526 temp
= expand_widening_mult (mode
, op0
, op1
, target
,
8527 unsignedp
, this_optab
);
8528 return REDUCE_BIT_FIELD (temp
);
8530 if (find_widening_optab_handler (other_optab
, mode
, innermode
, 0)
8532 && innermode
== word_mode
)
8535 op0
= expand_normal (treeop0
);
8536 if (TREE_CODE (treeop1
) == INTEGER_CST
)
8537 op1
= convert_modes (innermode
, mode
,
8538 expand_normal (treeop1
),
8539 TYPE_UNSIGNED (TREE_TYPE (treeop1
)));
8541 op1
= expand_normal (treeop1
);
8542 /* op0 and op1 might still be constant, despite the above
8543 != INTEGER_CST check. Handle it. */
8544 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8545 goto widen_mult_const
;
8546 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8547 unsignedp
, OPTAB_LIB_WIDEN
);
8548 hipart
= gen_highpart (innermode
, temp
);
8549 htem
= expand_mult_highpart_adjust (innermode
, hipart
,
8553 emit_move_insn (hipart
, htem
);
8554 return REDUCE_BIT_FIELD (temp
);
8558 treeop0
= fold_build1 (CONVERT_EXPR
, type
, treeop0
);
8559 treeop1
= fold_build1 (CONVERT_EXPR
, type
, treeop1
);
8560 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8561 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8565 optab opt
= fma_optab
;
8568 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8570 if (optab_handler (fma_optab
, mode
) == CODE_FOR_nothing
)
8572 tree fn
= mathfn_built_in (TREE_TYPE (treeop0
), BUILT_IN_FMA
);
8575 gcc_assert (fn
!= NULL_TREE
);
8576 call_expr
= build_call_expr (fn
, 3, treeop0
, treeop1
, treeop2
);
8577 return expand_builtin (call_expr
, target
, subtarget
, mode
, false);
8580 def0
= get_def_for_expr (treeop0
, NEGATE_EXPR
);
8581 def2
= get_def_for_expr (treeop2
, NEGATE_EXPR
);
8586 && optab_handler (fnms_optab
, mode
) != CODE_FOR_nothing
)
8589 op0
= expand_normal (gimple_assign_rhs1 (def0
));
8590 op2
= expand_normal (gimple_assign_rhs1 (def2
));
8593 && optab_handler (fnma_optab
, mode
) != CODE_FOR_nothing
)
8596 op0
= expand_normal (gimple_assign_rhs1 (def0
));
8599 && optab_handler (fms_optab
, mode
) != CODE_FOR_nothing
)
8602 op2
= expand_normal (gimple_assign_rhs1 (def2
));
8606 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
8608 op2
= expand_normal (treeop2
);
8609 op1
= expand_normal (treeop1
);
8611 return expand_ternary_op (TYPE_MODE (type
), opt
,
8612 op0
, op1
, op2
, target
, 0);
8616 /* If this is a fixed-point operation, then we cannot use the code
8617 below because "expand_mult" doesn't support sat/no-sat fixed-point
8619 if (ALL_FIXED_POINT_MODE_P (mode
))
8622 /* If first operand is constant, swap them.
8623 Thus the following special case checks need only
8624 check the second operand. */
8625 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8632 /* Attempt to return something suitable for generating an
8633 indexed address, for machines that support that. */
8635 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
8636 && tree_fits_shwi_p (treeop1
))
8638 tree exp1
= treeop1
;
8640 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8644 op0
= force_operand (op0
, NULL_RTX
);
8646 op0
= copy_to_mode_reg (mode
, op0
);
8648 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode
, op0
,
8649 gen_int_mode (tree_to_shwi (exp1
),
8650 TYPE_MODE (TREE_TYPE (exp1
)))));
8653 if (modifier
== EXPAND_STACK_PARM
)
8656 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8657 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8659 case TRUNC_DIV_EXPR
:
8660 case FLOOR_DIV_EXPR
:
8662 case ROUND_DIV_EXPR
:
8663 case EXACT_DIV_EXPR
:
8664 /* If this is a fixed-point operation, then we cannot use the code
8665 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8667 if (ALL_FIXED_POINT_MODE_P (mode
))
8670 if (modifier
== EXPAND_STACK_PARM
)
8672 /* Possible optimization: compute the dividend with EXPAND_SUM
8673 then if the divisor is constant can optimize the case
8674 where some terms of the dividend have coeffs divisible by it. */
8675 expand_operands (treeop0
, treeop1
,
8676 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8677 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
8682 case MULT_HIGHPART_EXPR
:
8683 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8684 temp
= expand_mult_highpart (mode
, op0
, op1
, target
, unsignedp
);
8688 case TRUNC_MOD_EXPR
:
8689 case FLOOR_MOD_EXPR
:
8691 case ROUND_MOD_EXPR
:
8692 if (modifier
== EXPAND_STACK_PARM
)
8694 expand_operands (treeop0
, treeop1
,
8695 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8696 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
8698 case FIXED_CONVERT_EXPR
:
8699 op0
= expand_normal (treeop0
);
8700 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8701 target
= gen_reg_rtx (mode
);
8703 if ((TREE_CODE (TREE_TYPE (treeop0
)) == INTEGER_TYPE
8704 && TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
8705 || (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_UNSIGNED (type
)))
8706 expand_fixed_convert (target
, op0
, 1, TYPE_SATURATING (type
));
8708 expand_fixed_convert (target
, op0
, 0, TYPE_SATURATING (type
));
8711 case FIX_TRUNC_EXPR
:
8712 op0
= expand_normal (treeop0
);
8713 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8714 target
= gen_reg_rtx (mode
);
8715 expand_fix (target
, op0
, unsignedp
);
8719 op0
= expand_normal (treeop0
);
8720 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8721 target
= gen_reg_rtx (mode
);
8722 /* expand_float can't figure out what to do if FROM has VOIDmode.
8723 So give it the correct mode. With -O, cse will optimize this. */
8724 if (GET_MODE (op0
) == VOIDmode
)
8725 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0
)),
8727 expand_float (target
, op0
,
8728 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8732 op0
= expand_expr (treeop0
, subtarget
,
8733 VOIDmode
, EXPAND_NORMAL
);
8734 if (modifier
== EXPAND_STACK_PARM
)
8736 temp
= expand_unop (mode
,
8737 optab_for_tree_code (NEGATE_EXPR
, type
,
8741 return REDUCE_BIT_FIELD (temp
);
8744 op0
= expand_expr (treeop0
, subtarget
,
8745 VOIDmode
, EXPAND_NORMAL
);
8746 if (modifier
== EXPAND_STACK_PARM
)
8749 /* ABS_EXPR is not valid for complex arguments. */
8750 gcc_assert (GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
8751 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
);
8753 /* Unsigned abs is simply the operand. Testing here means we don't
8754 risk generating incorrect code below. */
8755 if (TYPE_UNSIGNED (type
))
8758 return expand_abs (mode
, op0
, target
, unsignedp
,
8759 safe_from_p (target
, treeop0
, 1));
8763 target
= original_target
;
8765 || modifier
== EXPAND_STACK_PARM
8766 || (MEM_P (target
) && MEM_VOLATILE_P (target
))
8767 || GET_MODE (target
) != mode
8769 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8770 target
= gen_reg_rtx (mode
);
8771 expand_operands (treeop0
, treeop1
,
8772 target
, &op0
, &op1
, EXPAND_NORMAL
);
8774 /* First try to do it with a special MIN or MAX instruction.
8775 If that does not win, use a conditional jump to select the proper
8777 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8778 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8783 /* At this point, a MEM target is no longer useful; we will get better
8786 if (! REG_P (target
))
8787 target
= gen_reg_rtx (mode
);
8789 /* If op1 was placed in target, swap op0 and op1. */
8790 if (target
!= op0
&& target
== op1
)
8797 /* We generate better code and avoid problems with op1 mentioning
8798 target by forcing op1 into a pseudo if it isn't a constant. */
8799 if (! CONSTANT_P (op1
))
8800 op1
= force_reg (mode
, op1
);
8803 enum rtx_code comparison_code
;
8806 if (code
== MAX_EXPR
)
8807 comparison_code
= unsignedp
? GEU
: GE
;
8809 comparison_code
= unsignedp
? LEU
: LE
;
8811 /* Canonicalize to comparisons against 0. */
8812 if (op1
== const1_rtx
)
8814 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8815 or (a != 0 ? a : 1) for unsigned.
8816 For MIN we are safe converting (a <= 1 ? a : 1)
8817 into (a <= 0 ? a : 1) */
8818 cmpop1
= const0_rtx
;
8819 if (code
== MAX_EXPR
)
8820 comparison_code
= unsignedp
? NE
: GT
;
8822 if (op1
== constm1_rtx
&& !unsignedp
)
8824 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8825 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8826 cmpop1
= const0_rtx
;
8827 if (code
== MIN_EXPR
)
8828 comparison_code
= LT
;
8830 #ifdef HAVE_conditional_move
8831 /* Use a conditional move if possible. */
8832 if (can_conditionally_move_p (mode
))
8838 /* Try to emit the conditional move. */
8839 insn
= emit_conditional_move (target
, comparison_code
,
8844 /* If we could do the conditional move, emit the sequence,
8848 rtx_insn
*seq
= get_insns ();
8854 /* Otherwise discard the sequence and fall back to code with
8860 emit_move_insn (target
, op0
);
8862 temp
= gen_label_rtx ();
8863 do_compare_rtx_and_jump (target
, cmpop1
, comparison_code
,
8864 unsignedp
, mode
, NULL_RTX
, NULL_RTX
, temp
,
8867 emit_move_insn (target
, op1
);
8872 op0
= expand_expr (treeop0
, subtarget
,
8873 VOIDmode
, EXPAND_NORMAL
);
8874 if (modifier
== EXPAND_STACK_PARM
)
8876 /* In case we have to reduce the result to bitfield precision
8877 for unsigned bitfield expand this as XOR with a proper constant
8879 if (reduce_bit_field
&& TYPE_UNSIGNED (type
))
8881 wide_int mask
= wi::mask (TYPE_PRECISION (type
),
8882 false, GET_MODE_PRECISION (mode
));
8884 temp
= expand_binop (mode
, xor_optab
, op0
,
8885 immed_wide_int_const (mask
, mode
),
8886 target
, 1, OPTAB_LIB_WIDEN
);
8889 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8893 /* ??? Can optimize bitwise operations with one arg constant.
8894 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8895 and (a bitwise1 b) bitwise2 b (etc)
8896 but that is probably not worth while. */
8905 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type
))
8906 || (GET_MODE_PRECISION (TYPE_MODE (type
))
8907 == TYPE_PRECISION (type
)));
8912 /* If this is a fixed-point operation, then we cannot use the code
8913 below because "expand_shift" doesn't support sat/no-sat fixed-point
8915 if (ALL_FIXED_POINT_MODE_P (mode
))
8918 if (! safe_from_p (subtarget
, treeop1
, 1))
8920 if (modifier
== EXPAND_STACK_PARM
)
8922 op0
= expand_expr (treeop0
, subtarget
,
8923 VOIDmode
, EXPAND_NORMAL
);
8924 temp
= expand_variable_shift (code
, mode
, op0
, treeop1
, target
,
8926 if (code
== LSHIFT_EXPR
)
8927 temp
= REDUCE_BIT_FIELD (temp
);
8930 /* Could determine the answer when only additive constants differ. Also,
8931 the addition of one can be handled by changing the condition. */
8938 case UNORDERED_EXPR
:
8946 temp
= do_store_flag (ops
,
8947 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
8948 tmode
!= VOIDmode
? tmode
: mode
);
8952 /* Use a compare and a jump for BLKmode comparisons, or for function
8953 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8956 || modifier
== EXPAND_STACK_PARM
8957 || ! safe_from_p (target
, treeop0
, 1)
8958 || ! safe_from_p (target
, treeop1
, 1)
8959 /* Make sure we don't have a hard reg (such as function's return
8960 value) live across basic blocks, if not optimizing. */
8961 || (!optimize
&& REG_P (target
)
8962 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8963 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8965 emit_move_insn (target
, const0_rtx
);
8967 op1
= gen_label_rtx ();
8968 jumpifnot_1 (code
, treeop0
, treeop1
, op1
, -1);
8970 if (TYPE_PRECISION (type
) == 1 && !TYPE_UNSIGNED (type
))
8971 emit_move_insn (target
, constm1_rtx
);
8973 emit_move_insn (target
, const1_rtx
);
8979 /* Get the rtx code of the operands. */
8980 op0
= expand_normal (treeop0
);
8981 op1
= expand_normal (treeop1
);
8984 target
= gen_reg_rtx (TYPE_MODE (type
));
8986 /* If target overlaps with op1, then either we need to force
8987 op1 into a pseudo (if target also overlaps with op0),
8988 or write the complex parts in reverse order. */
8989 switch (GET_CODE (target
))
8992 if (reg_overlap_mentioned_p (XEXP (target
, 0), op1
))
8994 if (reg_overlap_mentioned_p (XEXP (target
, 1), op0
))
8996 complex_expr_force_op1
:
8997 temp
= gen_reg_rtx (GET_MODE_INNER (GET_MODE (target
)));
8998 emit_move_insn (temp
, op1
);
9002 complex_expr_swap_order
:
9003 /* Move the imaginary (op1) and real (op0) parts to their
9005 write_complex_part (target
, op1
, true);
9006 write_complex_part (target
, op0
, false);
9012 temp
= adjust_address_nv (target
,
9013 GET_MODE_INNER (GET_MODE (target
)), 0);
9014 if (reg_overlap_mentioned_p (temp
, op1
))
9016 machine_mode imode
= GET_MODE_INNER (GET_MODE (target
));
9017 temp
= adjust_address_nv (target
, imode
,
9018 GET_MODE_SIZE (imode
));
9019 if (reg_overlap_mentioned_p (temp
, op0
))
9020 goto complex_expr_force_op1
;
9021 goto complex_expr_swap_order
;
9025 if (reg_overlap_mentioned_p (target
, op1
))
9027 if (reg_overlap_mentioned_p (target
, op0
))
9028 goto complex_expr_force_op1
;
9029 goto complex_expr_swap_order
;
9034 /* Move the real (op0) and imaginary (op1) parts to their location. */
9035 write_complex_part (target
, op0
, false);
9036 write_complex_part (target
, op1
, true);
9040 case WIDEN_SUM_EXPR
:
9042 tree oprnd0
= treeop0
;
9043 tree oprnd1
= treeop1
;
9045 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9046 target
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, op1
,
9051 case REDUC_MAX_EXPR
:
9052 case REDUC_MIN_EXPR
:
9053 case REDUC_PLUS_EXPR
:
9055 op0
= expand_normal (treeop0
);
9056 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9057 machine_mode vec_mode
= TYPE_MODE (TREE_TYPE (treeop0
));
9059 if (optab_handler (this_optab
, vec_mode
) != CODE_FOR_nothing
)
9061 struct expand_operand ops
[2];
9062 enum insn_code icode
= optab_handler (this_optab
, vec_mode
);
9064 create_output_operand (&ops
[0], target
, mode
);
9065 create_input_operand (&ops
[1], op0
, vec_mode
);
9066 if (maybe_expand_insn (icode
, 2, ops
))
9068 target
= ops
[0].value
;
9069 if (GET_MODE (target
) != mode
)
9070 return gen_lowpart (tmode
, target
);
9074 /* Fall back to optab with vector result, and then extract scalar. */
9075 this_optab
= scalar_reduc_to_vector (this_optab
, type
);
9076 temp
= expand_unop (vec_mode
, this_optab
, op0
, NULL_RTX
, unsignedp
);
9078 /* The tree code produces a scalar result, but (somewhat by convention)
9079 the optab produces a vector with the result in element 0 if
9080 little-endian, or element N-1 if big-endian. So pull the scalar
9081 result out of that element. */
9082 int index
= BYTES_BIG_ENDIAN
? GET_MODE_NUNITS (vec_mode
) - 1 : 0;
9083 int bitsize
= GET_MODE_BITSIZE (GET_MODE_INNER (vec_mode
));
9084 temp
= extract_bit_field (temp
, bitsize
, bitsize
* index
, unsignedp
,
9085 target
, mode
, mode
);
9090 case VEC_RSHIFT_EXPR
:
9092 target
= expand_vec_shift_expr (ops
, target
);
9096 case VEC_UNPACK_HI_EXPR
:
9097 case VEC_UNPACK_LO_EXPR
:
9099 op0
= expand_normal (treeop0
);
9100 temp
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, NULL_RTX
,
9106 case VEC_UNPACK_FLOAT_HI_EXPR
:
9107 case VEC_UNPACK_FLOAT_LO_EXPR
:
9109 op0
= expand_normal (treeop0
);
9110 /* The signedness is determined from input operand. */
9111 temp
= expand_widen_pattern_expr
9112 (ops
, op0
, NULL_RTX
, NULL_RTX
,
9113 target
, TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
9119 case VEC_WIDEN_MULT_HI_EXPR
:
9120 case VEC_WIDEN_MULT_LO_EXPR
:
9121 case VEC_WIDEN_MULT_EVEN_EXPR
:
9122 case VEC_WIDEN_MULT_ODD_EXPR
:
9123 case VEC_WIDEN_LSHIFT_HI_EXPR
:
9124 case VEC_WIDEN_LSHIFT_LO_EXPR
:
9125 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9126 target
= expand_widen_pattern_expr (ops
, op0
, op1
, NULL_RTX
,
9128 gcc_assert (target
);
9131 case VEC_PACK_TRUNC_EXPR
:
9132 case VEC_PACK_SAT_EXPR
:
9133 case VEC_PACK_FIX_TRUNC_EXPR
:
9134 mode
= TYPE_MODE (TREE_TYPE (treeop0
));
9138 expand_operands (treeop0
, treeop1
, target
, &op0
, &op1
, EXPAND_NORMAL
);
9139 op2
= expand_normal (treeop2
);
9141 /* Careful here: if the target doesn't support integral vector modes,
9142 a constant selection vector could wind up smooshed into a normal
9143 integral constant. */
9144 if (CONSTANT_P (op2
) && GET_CODE (op2
) != CONST_VECTOR
)
9146 tree sel_type
= TREE_TYPE (treeop2
);
9148 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type
)),
9149 TYPE_VECTOR_SUBPARTS (sel_type
));
9150 gcc_assert (GET_MODE_CLASS (vmode
) == MODE_VECTOR_INT
);
9151 op2
= simplify_subreg (vmode
, op2
, TYPE_MODE (sel_type
), 0);
9152 gcc_assert (op2
&& GET_CODE (op2
) == CONST_VECTOR
);
9155 gcc_assert (GET_MODE_CLASS (GET_MODE (op2
)) == MODE_VECTOR_INT
);
9157 temp
= expand_vec_perm (mode
, op0
, op1
, op2
, target
);
9163 tree oprnd0
= treeop0
;
9164 tree oprnd1
= treeop1
;
9165 tree oprnd2
= treeop2
;
9168 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9169 op2
= expand_normal (oprnd2
);
9170 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
9177 tree oprnd0
= treeop0
;
9178 tree oprnd1
= treeop1
;
9179 tree oprnd2
= treeop2
;
9182 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9183 op2
= expand_normal (oprnd2
);
9184 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
9189 case REALIGN_LOAD_EXPR
:
9191 tree oprnd0
= treeop0
;
9192 tree oprnd1
= treeop1
;
9193 tree oprnd2
= treeop2
;
9196 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9197 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9198 op2
= expand_normal (oprnd2
);
9199 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
9206 /* A COND_EXPR with its type being VOID_TYPE represents a
9207 conditional jump and is handled in
9208 expand_gimple_cond_expr. */
9209 gcc_assert (!VOID_TYPE_P (type
));
9211 /* Note that COND_EXPRs whose type is a structure or union
9212 are required to be constructed to contain assignments of
9213 a temporary variable, so that we can evaluate them here
9214 for side effect only. If type is void, we must do likewise. */
9216 gcc_assert (!TREE_ADDRESSABLE (type
)
9218 && TREE_TYPE (treeop1
) != void_type_node
9219 && TREE_TYPE (treeop2
) != void_type_node
);
9221 temp
= expand_cond_expr_using_cmove (treeop0
, treeop1
, treeop2
);
9225 /* If we are not to produce a result, we have no target. Otherwise,
9226 if a target was specified use it; it will not be used as an
9227 intermediate target unless it is safe. If no target, use a
9230 if (modifier
!= EXPAND_STACK_PARM
9232 && safe_from_p (original_target
, treeop0
, 1)
9233 && GET_MODE (original_target
) == mode
9234 && !MEM_P (original_target
))
9235 temp
= original_target
;
9237 temp
= assign_temp (type
, 0, 1);
9239 do_pending_stack_adjust ();
9241 op0
= gen_label_rtx ();
9242 op1
= gen_label_rtx ();
9243 jumpifnot (treeop0
, op0
, -1);
9244 store_expr (treeop1
, temp
,
9245 modifier
== EXPAND_STACK_PARM
,
9248 emit_jump_insn (gen_jump (op1
));
9251 store_expr (treeop2
, temp
,
9252 modifier
== EXPAND_STACK_PARM
,
9260 target
= expand_vec_cond_expr (type
, treeop0
, treeop1
, treeop2
, target
);
9267 /* Here to do an ordinary binary operator. */
9269 expand_operands (treeop0
, treeop1
,
9270 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
9272 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9274 if (modifier
== EXPAND_STACK_PARM
)
9276 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9277 unsignedp
, OPTAB_LIB_WIDEN
);
9279 /* Bitwise operations do not need bitfield reduction as we expect their
9280 operands being properly truncated. */
9281 if (code
== BIT_XOR_EXPR
9282 || code
== BIT_AND_EXPR
9283 || code
== BIT_IOR_EXPR
)
9285 return REDUCE_BIT_FIELD (temp
);
9287 #undef REDUCE_BIT_FIELD
9290 /* Return TRUE if expression STMT is suitable for replacement.
9291 Never consider memory loads as replaceable, because those don't ever lead
9292 into constant expressions. */
9295 stmt_is_replaceable_p (gimple stmt
)
9297 if (ssa_is_replaceable_p (stmt
))
9299 /* Don't move around loads. */
9300 if (!gimple_assign_single_p (stmt
)
9301 || is_gimple_val (gimple_assign_rhs1 (stmt
)))
9308 expand_expr_real_1 (tree exp
, rtx target
, machine_mode tmode
,
9309 enum expand_modifier modifier
, rtx
*alt_rtl
,
9310 bool inner_reference_p
)
9312 rtx op0
, op1
, temp
, decl_rtl
;
9316 enum tree_code code
= TREE_CODE (exp
);
9317 rtx subtarget
, original_target
;
9320 bool reduce_bit_field
;
9321 location_t loc
= EXPR_LOCATION (exp
);
9322 struct separate_ops ops
;
9323 tree treeop0
, treeop1
, treeop2
;
9324 tree ssa_name
= NULL_TREE
;
9327 type
= TREE_TYPE (exp
);
9328 mode
= TYPE_MODE (type
);
9329 unsignedp
= TYPE_UNSIGNED (type
);
9331 treeop0
= treeop1
= treeop2
= NULL_TREE
;
9332 if (!VL_EXP_CLASS_P (exp
))
9333 switch (TREE_CODE_LENGTH (code
))
9336 case 3: treeop2
= TREE_OPERAND (exp
, 2);
9337 case 2: treeop1
= TREE_OPERAND (exp
, 1);
9338 case 1: treeop0
= TREE_OPERAND (exp
, 0);
9348 ignore
= (target
== const0_rtx
9349 || ((CONVERT_EXPR_CODE_P (code
)
9350 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
9351 && TREE_CODE (type
) == VOID_TYPE
));
9353 /* An operation in what may be a bit-field type needs the
9354 result to be reduced to the precision of the bit-field type,
9355 which is narrower than that of the type's mode. */
9356 reduce_bit_field
= (!ignore
9357 && INTEGRAL_TYPE_P (type
)
9358 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
));
9360 /* If we are going to ignore this result, we need only do something
9361 if there is a side-effect somewhere in the expression. If there
9362 is, short-circuit the most common cases here. Note that we must
9363 not call expand_expr with anything but const0_rtx in case this
9364 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9368 if (! TREE_SIDE_EFFECTS (exp
))
9371 /* Ensure we reference a volatile object even if value is ignored, but
9372 don't do this if all we are doing is taking its address. */
9373 if (TREE_THIS_VOLATILE (exp
)
9374 && TREE_CODE (exp
) != FUNCTION_DECL
9375 && mode
!= VOIDmode
&& mode
!= BLKmode
9376 && modifier
!= EXPAND_CONST_ADDRESS
)
9378 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
9384 if (TREE_CODE_CLASS (code
) == tcc_unary
9385 || code
== BIT_FIELD_REF
9386 || code
== COMPONENT_REF
9387 || code
== INDIRECT_REF
)
9388 return expand_expr (treeop0
, const0_rtx
, VOIDmode
,
9391 else if (TREE_CODE_CLASS (code
) == tcc_binary
9392 || TREE_CODE_CLASS (code
) == tcc_comparison
9393 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
9395 expand_expr (treeop0
, const0_rtx
, VOIDmode
, modifier
);
9396 expand_expr (treeop1
, const0_rtx
, VOIDmode
, modifier
);
9403 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
9406 /* Use subtarget as the target for operand 0 of a binary operation. */
9407 subtarget
= get_subtarget (target
);
9408 original_target
= target
;
9414 tree function
= decl_function_context (exp
);
9416 temp
= label_rtx (exp
);
9417 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
9419 if (function
!= current_function_decl
9421 LABEL_REF_NONLOCAL_P (temp
) = 1;
9423 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
9428 /* ??? ivopts calls expander, without any preparation from
9429 out-of-ssa. So fake instructions as if this was an access to the
9430 base variable. This unnecessarily allocates a pseudo, see how we can
9431 reuse it, if partition base vars have it set already. */
9432 if (!currently_expanding_to_rtl
)
9434 tree var
= SSA_NAME_VAR (exp
);
9435 if (var
&& DECL_RTL_SET_P (var
))
9436 return DECL_RTL (var
);
9437 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp
)),
9438 LAST_VIRTUAL_REGISTER
+ 1);
9441 g
= get_gimple_for_ssa_name (exp
);
9442 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9444 && modifier
== EXPAND_INITIALIZER
9445 && !SSA_NAME_IS_DEFAULT_DEF (exp
)
9446 && (optimize
|| DECL_IGNORED_P (SSA_NAME_VAR (exp
)))
9447 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp
)))
9448 g
= SSA_NAME_DEF_STMT (exp
);
9452 ops
.code
= gimple_assign_rhs_code (g
);
9453 switch (get_gimple_rhs_class (ops
.code
))
9455 case GIMPLE_TERNARY_RHS
:
9456 ops
.op2
= gimple_assign_rhs3 (g
);
9458 case GIMPLE_BINARY_RHS
:
9459 ops
.op1
= gimple_assign_rhs2 (g
);
9461 case GIMPLE_UNARY_RHS
:
9462 ops
.op0
= gimple_assign_rhs1 (g
);
9463 ops
.type
= TREE_TYPE (gimple_assign_lhs (g
));
9464 ops
.location
= gimple_location (g
);
9465 r
= expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
9467 case GIMPLE_SINGLE_RHS
:
9469 location_t saved_loc
= curr_insn_location ();
9470 set_curr_insn_location (gimple_location (g
));
9471 r
= expand_expr_real (gimple_assign_rhs1 (g
), target
,
9472 tmode
, modifier
, NULL
, inner_reference_p
);
9473 set_curr_insn_location (saved_loc
);
9479 if (REG_P (r
) && !REG_EXPR (r
))
9480 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp
), r
);
9485 decl_rtl
= get_rtx_for_ssa_name (ssa_name
);
9486 exp
= SSA_NAME_VAR (ssa_name
);
9487 goto expand_decl_rtl
;
9491 /* If a static var's type was incomplete when the decl was written,
9492 but the type is complete now, lay out the decl now. */
9493 if (DECL_SIZE (exp
) == 0
9494 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
9495 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
9496 layout_decl (exp
, 0);
9498 /* ... fall through ... */
9502 decl_rtl
= DECL_RTL (exp
);
9504 gcc_assert (decl_rtl
);
9505 decl_rtl
= copy_rtx (decl_rtl
);
9506 /* Record writes to register variables. */
9507 if (modifier
== EXPAND_WRITE
9509 && HARD_REGISTER_P (decl_rtl
))
9510 add_to_hard_reg_set (&crtl
->asm_clobbers
,
9511 GET_MODE (decl_rtl
), REGNO (decl_rtl
));
9513 /* Ensure variable marked as used even if it doesn't go through
9514 a parser. If it hasn't be used yet, write out an external
9516 TREE_USED (exp
) = 1;
9518 /* Show we haven't gotten RTL for this yet. */
9521 /* Variables inherited from containing functions should have
9522 been lowered by this point. */
9523 context
= decl_function_context (exp
);
9524 gcc_assert (SCOPE_FILE_SCOPE_P (context
)
9525 || context
== current_function_decl
9526 || TREE_STATIC (exp
)
9527 || DECL_EXTERNAL (exp
)
9528 /* ??? C++ creates functions that are not TREE_STATIC. */
9529 || TREE_CODE (exp
) == FUNCTION_DECL
);
9531 /* This is the case of an array whose size is to be determined
9532 from its initializer, while the initializer is still being parsed.
9533 ??? We aren't parsing while expanding anymore. */
9535 if (MEM_P (decl_rtl
) && REG_P (XEXP (decl_rtl
, 0)))
9536 temp
= validize_mem (decl_rtl
);
9538 /* If DECL_RTL is memory, we are in the normal case and the
9539 address is not valid, get the address into a register. */
9541 else if (MEM_P (decl_rtl
) && modifier
!= EXPAND_INITIALIZER
)
9544 *alt_rtl
= decl_rtl
;
9545 decl_rtl
= use_anchored_address (decl_rtl
);
9546 if (modifier
!= EXPAND_CONST_ADDRESS
9547 && modifier
!= EXPAND_SUM
9548 && !memory_address_addr_space_p (DECL_MODE (exp
),
9550 MEM_ADDR_SPACE (decl_rtl
)))
9551 temp
= replace_equiv_address (decl_rtl
,
9552 copy_rtx (XEXP (decl_rtl
, 0)));
9555 /* If we got something, return it. But first, set the alignment
9556 if the address is a register. */
9559 if (MEM_P (temp
) && REG_P (XEXP (temp
, 0)))
9560 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
9565 /* If the mode of DECL_RTL does not match that of the decl,
9566 there are two cases: we are dealing with a BLKmode value
9567 that is returned in a register, or we are dealing with
9568 a promoted value. In the latter case, return a SUBREG
9569 of the wanted mode, but mark it so that we know that it
9570 was already extended. */
9571 if (REG_P (decl_rtl
)
9572 && DECL_MODE (exp
) != BLKmode
9573 && GET_MODE (decl_rtl
) != DECL_MODE (exp
))
9577 /* Get the signedness to be used for this variable. Ensure we get
9578 the same mode we got when the variable was declared. */
9579 if (code
== SSA_NAME
9580 && (g
= SSA_NAME_DEF_STMT (ssa_name
))
9581 && gimple_code (g
) == GIMPLE_CALL
9582 && !gimple_call_internal_p (g
))
9583 pmode
= promote_function_mode (type
, mode
, &unsignedp
,
9584 gimple_call_fntype (g
),
9587 pmode
= promote_decl_mode (exp
, &unsignedp
);
9588 gcc_assert (GET_MODE (decl_rtl
) == pmode
);
9590 temp
= gen_lowpart_SUBREG (mode
, decl_rtl
);
9591 SUBREG_PROMOTED_VAR_P (temp
) = 1;
9592 SUBREG_PROMOTED_SET (temp
, unsignedp
);
9599 /* Given that TYPE_PRECISION (type) is not always equal to
9600 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9601 the former to the latter according to the signedness of the
9603 temp
= immed_wide_int_const (wide_int::from
9605 GET_MODE_PRECISION (TYPE_MODE (type
)),
9612 tree tmp
= NULL_TREE
;
9613 if (GET_MODE_CLASS (mode
) == MODE_VECTOR_INT
9614 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FLOAT
9615 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FRACT
9616 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UFRACT
9617 || GET_MODE_CLASS (mode
) == MODE_VECTOR_ACCUM
9618 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UACCUM
)
9619 return const_vector_from_tree (exp
);
9620 if (GET_MODE_CLASS (mode
) == MODE_INT
)
9622 tree type_for_mode
= lang_hooks
.types
.type_for_mode (mode
, 1);
9624 tmp
= fold_unary_loc (loc
, VIEW_CONVERT_EXPR
, type_for_mode
, exp
);
9628 vec
<constructor_elt
, va_gc
> *v
;
9630 vec_alloc (v
, VECTOR_CST_NELTS (exp
));
9631 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
9632 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, VECTOR_CST_ELT (exp
, i
));
9633 tmp
= build_constructor (type
, v
);
9635 return expand_expr (tmp
, ignore
? const0_rtx
: target
,
9640 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
9643 /* If optimized, generate immediate CONST_DOUBLE
9644 which will be turned into memory by reload if necessary.
9646 We used to force a register so that loop.c could see it. But
9647 this does not allow gen_* patterns to perform optimizations with
9648 the constants. It also produces two insns in cases like "x = 1.0;".
9649 On most machines, floating-point constants are not permitted in
9650 many insns, so we'd end up copying it to a register in any case.
9652 Now, we do the copying in expand_binop, if appropriate. */
9653 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
9654 TYPE_MODE (TREE_TYPE (exp
)));
9657 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp
),
9658 TYPE_MODE (TREE_TYPE (exp
)));
9661 /* Handle evaluating a complex constant in a CONCAT target. */
9662 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
9664 machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
9667 rtarg
= XEXP (original_target
, 0);
9668 itarg
= XEXP (original_target
, 1);
9670 /* Move the real and imaginary parts separately. */
9671 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, EXPAND_NORMAL
);
9672 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, EXPAND_NORMAL
);
9675 emit_move_insn (rtarg
, op0
);
9677 emit_move_insn (itarg
, op1
);
9679 return original_target
;
9682 /* ... fall through ... */
9685 temp
= expand_expr_constant (exp
, 1, modifier
);
9687 /* temp contains a constant address.
9688 On RISC machines where a constant address isn't valid,
9689 make some insns to get that address into a register. */
9690 if (modifier
!= EXPAND_CONST_ADDRESS
9691 && modifier
!= EXPAND_INITIALIZER
9692 && modifier
!= EXPAND_SUM
9693 && ! memory_address_addr_space_p (mode
, XEXP (temp
, 0),
9694 MEM_ADDR_SPACE (temp
)))
9695 return replace_equiv_address (temp
,
9696 copy_rtx (XEXP (temp
, 0)));
9702 rtx ret
= expand_expr_real_1 (val
, target
, tmode
, modifier
, alt_rtl
,
9705 if (!SAVE_EXPR_RESOLVED_P (exp
))
9707 /* We can indeed still hit this case, typically via builtin
9708 expanders calling save_expr immediately before expanding
9709 something. Assume this means that we only have to deal
9710 with non-BLKmode values. */
9711 gcc_assert (GET_MODE (ret
) != BLKmode
);
9713 val
= build_decl (curr_insn_location (),
9714 VAR_DECL
, NULL
, TREE_TYPE (exp
));
9715 DECL_ARTIFICIAL (val
) = 1;
9716 DECL_IGNORED_P (val
) = 1;
9718 TREE_OPERAND (exp
, 0) = treeop0
;
9719 SAVE_EXPR_RESOLVED_P (exp
) = 1;
9721 if (!CONSTANT_P (ret
))
9722 ret
= copy_to_reg (ret
);
9723 SET_DECL_RTL (val
, ret
);
9731 /* If we don't need the result, just ensure we evaluate any
9735 unsigned HOST_WIDE_INT idx
;
9738 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
9739 expand_expr (value
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
9744 return expand_constructor (exp
, target
, modifier
, false);
9746 case TARGET_MEM_REF
:
9749 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
9750 enum insn_code icode
;
9753 op0
= addr_for_mem_ref (exp
, as
, true);
9754 op0
= memory_address_addr_space (mode
, op0
, as
);
9755 temp
= gen_rtx_MEM (mode
, op0
);
9756 set_mem_attributes (temp
, exp
, 0);
9757 set_mem_addr_space (temp
, as
);
9758 align
= get_object_alignment (exp
);
9759 if (modifier
!= EXPAND_WRITE
9760 && modifier
!= EXPAND_MEMORY
9762 && align
< GET_MODE_ALIGNMENT (mode
)
9763 /* If the target does not have special handling for unaligned
9764 loads of mode then it can use regular moves for them. */
9765 && ((icode
= optab_handler (movmisalign_optab
, mode
))
9766 != CODE_FOR_nothing
))
9768 struct expand_operand ops
[2];
9770 /* We've already validated the memory, and we're creating a
9771 new pseudo destination. The predicates really can't fail,
9772 nor can the generator. */
9773 create_output_operand (&ops
[0], NULL_RTX
, mode
);
9774 create_fixed_operand (&ops
[1], temp
);
9775 expand_insn (icode
, 2, ops
);
9776 temp
= ops
[0].value
;
9784 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
9785 machine_mode address_mode
;
9786 tree base
= TREE_OPERAND (exp
, 0);
9788 enum insn_code icode
;
9790 /* Handle expansion of non-aliased memory with non-BLKmode. That
9791 might end up in a register. */
9792 if (mem_ref_refers_to_non_mem_p (exp
))
9794 HOST_WIDE_INT offset
= mem_ref_offset (exp
).to_short_addr ();
9795 base
= TREE_OPERAND (base
, 0);
9797 && tree_fits_uhwi_p (TYPE_SIZE (type
))
9798 && (GET_MODE_BITSIZE (DECL_MODE (base
))
9799 == tree_to_uhwi (TYPE_SIZE (type
))))
9800 return expand_expr (build1 (VIEW_CONVERT_EXPR
, type
, base
),
9801 target
, tmode
, modifier
);
9802 if (TYPE_MODE (type
) == BLKmode
)
9804 temp
= assign_stack_temp (DECL_MODE (base
),
9805 GET_MODE_SIZE (DECL_MODE (base
)));
9806 store_expr (base
, temp
, 0, false);
9807 temp
= adjust_address (temp
, BLKmode
, offset
);
9808 set_mem_size (temp
, int_size_in_bytes (type
));
9811 exp
= build3 (BIT_FIELD_REF
, type
, base
, TYPE_SIZE (type
),
9812 bitsize_int (offset
* BITS_PER_UNIT
));
9813 return expand_expr (exp
, target
, tmode
, modifier
);
9815 address_mode
= targetm
.addr_space
.address_mode (as
);
9816 base
= TREE_OPERAND (exp
, 0);
9817 if ((def_stmt
= get_def_for_expr (base
, BIT_AND_EXPR
)))
9819 tree mask
= gimple_assign_rhs2 (def_stmt
);
9820 base
= build2 (BIT_AND_EXPR
, TREE_TYPE (base
),
9821 gimple_assign_rhs1 (def_stmt
), mask
);
9822 TREE_OPERAND (exp
, 0) = base
;
9824 align
= get_object_alignment (exp
);
9825 op0
= expand_expr (base
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
9826 op0
= memory_address_addr_space (mode
, op0
, as
);
9827 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
9829 rtx off
= immed_wide_int_const (mem_ref_offset (exp
), address_mode
);
9830 op0
= simplify_gen_binary (PLUS
, address_mode
, op0
, off
);
9831 op0
= memory_address_addr_space (mode
, op0
, as
);
9833 temp
= gen_rtx_MEM (mode
, op0
);
9834 set_mem_attributes (temp
, exp
, 0);
9835 set_mem_addr_space (temp
, as
);
9836 if (TREE_THIS_VOLATILE (exp
))
9837 MEM_VOLATILE_P (temp
) = 1;
9838 if (modifier
!= EXPAND_WRITE
9839 && modifier
!= EXPAND_MEMORY
9840 && !inner_reference_p
9842 && align
< GET_MODE_ALIGNMENT (mode
))
9844 if ((icode
= optab_handler (movmisalign_optab
, mode
))
9845 != CODE_FOR_nothing
)
9847 struct expand_operand ops
[2];
9849 /* We've already validated the memory, and we're creating a
9850 new pseudo destination. The predicates really can't fail,
9851 nor can the generator. */
9852 create_output_operand (&ops
[0], NULL_RTX
, mode
);
9853 create_fixed_operand (&ops
[1], temp
);
9854 expand_insn (icode
, 2, ops
);
9855 temp
= ops
[0].value
;
9857 else if (SLOW_UNALIGNED_ACCESS (mode
, align
))
9858 temp
= extract_bit_field (temp
, GET_MODE_BITSIZE (mode
),
9859 0, TYPE_UNSIGNED (TREE_TYPE (exp
)),
9860 (modifier
== EXPAND_STACK_PARM
9861 ? NULL_RTX
: target
),
9870 tree array
= treeop0
;
9871 tree index
= treeop1
;
9874 /* Fold an expression like: "foo"[2].
9875 This is not done in fold so it won't happen inside &.
9876 Don't fold if this is for wide characters since it's too
9877 difficult to do correctly and this is a very rare case. */
9879 if (modifier
!= EXPAND_CONST_ADDRESS
9880 && modifier
!= EXPAND_INITIALIZER
9881 && modifier
!= EXPAND_MEMORY
)
9883 tree t
= fold_read_from_constant_string (exp
);
9886 return expand_expr (t
, target
, tmode
, modifier
);
9889 /* If this is a constant index into a constant array,
9890 just get the value from the array. Handle both the cases when
9891 we have an explicit constructor and when our operand is a variable
9892 that was declared const. */
9894 if (modifier
!= EXPAND_CONST_ADDRESS
9895 && modifier
!= EXPAND_INITIALIZER
9896 && modifier
!= EXPAND_MEMORY
9897 && TREE_CODE (array
) == CONSTRUCTOR
9898 && ! TREE_SIDE_EFFECTS (array
)
9899 && TREE_CODE (index
) == INTEGER_CST
)
9901 unsigned HOST_WIDE_INT ix
;
9904 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array
), ix
,
9906 if (tree_int_cst_equal (field
, index
))
9908 if (!TREE_SIDE_EFFECTS (value
))
9909 return expand_expr (fold (value
), target
, tmode
, modifier
);
9914 else if (optimize
>= 1
9915 && modifier
!= EXPAND_CONST_ADDRESS
9916 && modifier
!= EXPAND_INITIALIZER
9917 && modifier
!= EXPAND_MEMORY
9918 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
9919 && TREE_CODE (index
) == INTEGER_CST
9920 && (TREE_CODE (array
) == VAR_DECL
9921 || TREE_CODE (array
) == CONST_DECL
)
9922 && (init
= ctor_for_folding (array
)) != error_mark_node
)
9924 if (init
== NULL_TREE
)
9926 tree value
= build_zero_cst (type
);
9927 if (TREE_CODE (value
) == CONSTRUCTOR
)
9929 /* If VALUE is a CONSTRUCTOR, this optimization is only
9930 useful if this doesn't store the CONSTRUCTOR into
9931 memory. If it does, it is more efficient to just
9932 load the data from the array directly. */
9933 rtx ret
= expand_constructor (value
, target
,
9935 if (ret
== NULL_RTX
)
9940 return expand_expr (value
, target
, tmode
, modifier
);
9942 else if (TREE_CODE (init
) == CONSTRUCTOR
)
9944 unsigned HOST_WIDE_INT ix
;
9947 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), ix
,
9949 if (tree_int_cst_equal (field
, index
))
9951 if (TREE_SIDE_EFFECTS (value
))
9954 if (TREE_CODE (value
) == CONSTRUCTOR
)
9956 /* If VALUE is a CONSTRUCTOR, this
9957 optimization is only useful if
9958 this doesn't store the CONSTRUCTOR
9959 into memory. If it does, it is more
9960 efficient to just load the data from
9961 the array directly. */
9962 rtx ret
= expand_constructor (value
, target
,
9964 if (ret
== NULL_RTX
)
9969 expand_expr (fold (value
), target
, tmode
, modifier
);
9972 else if (TREE_CODE (init
) == STRING_CST
)
9974 tree low_bound
= array_ref_low_bound (exp
);
9975 tree index1
= fold_convert_loc (loc
, sizetype
, treeop1
);
9977 /* Optimize the special case of a zero lower bound.
9979 We convert the lower bound to sizetype to avoid problems
9980 with constant folding. E.g. suppose the lower bound is
9981 1 and its mode is QI. Without the conversion
9982 (ARRAY + (INDEX - (unsigned char)1))
9984 (ARRAY + (-(unsigned char)1) + INDEX)
9986 (ARRAY + 255 + INDEX). Oops! */
9987 if (!integer_zerop (low_bound
))
9988 index1
= size_diffop_loc (loc
, index1
,
9989 fold_convert_loc (loc
, sizetype
,
9992 if (compare_tree_int (index1
, TREE_STRING_LENGTH (init
)) < 0)
9994 tree type
= TREE_TYPE (TREE_TYPE (init
));
9995 machine_mode mode
= TYPE_MODE (type
);
9997 if (GET_MODE_CLASS (mode
) == MODE_INT
9998 && GET_MODE_SIZE (mode
) == 1)
9999 return gen_int_mode (TREE_STRING_POINTER (init
)
10000 [TREE_INT_CST_LOW (index1
)],
10006 goto normal_inner_ref
;
10008 case COMPONENT_REF
:
10009 /* If the operand is a CONSTRUCTOR, we can just extract the
10010 appropriate field if it is present. */
10011 if (TREE_CODE (treeop0
) == CONSTRUCTOR
)
10013 unsigned HOST_WIDE_INT idx
;
10016 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0
),
10018 if (field
== treeop1
10019 /* We can normally use the value of the field in the
10020 CONSTRUCTOR. However, if this is a bitfield in
10021 an integral mode that we can fit in a HOST_WIDE_INT,
10022 we must mask only the number of bits in the bitfield,
10023 since this is done implicitly by the constructor. If
10024 the bitfield does not meet either of those conditions,
10025 we can't do this optimization. */
10026 && (! DECL_BIT_FIELD (field
)
10027 || ((GET_MODE_CLASS (DECL_MODE (field
)) == MODE_INT
)
10028 && (GET_MODE_PRECISION (DECL_MODE (field
))
10029 <= HOST_BITS_PER_WIDE_INT
))))
10031 if (DECL_BIT_FIELD (field
)
10032 && modifier
== EXPAND_STACK_PARM
)
10034 op0
= expand_expr (value
, target
, tmode
, modifier
);
10035 if (DECL_BIT_FIELD (field
))
10037 HOST_WIDE_INT bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
10038 machine_mode imode
= TYPE_MODE (TREE_TYPE (field
));
10040 if (TYPE_UNSIGNED (TREE_TYPE (field
)))
10042 op1
= gen_int_mode (((HOST_WIDE_INT
) 1 << bitsize
) - 1,
10044 op0
= expand_and (imode
, op0
, op1
, target
);
10048 int count
= GET_MODE_PRECISION (imode
) - bitsize
;
10050 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
10052 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
10060 goto normal_inner_ref
;
10062 case BIT_FIELD_REF
:
10063 case ARRAY_RANGE_REF
:
10066 machine_mode mode1
, mode2
;
10067 HOST_WIDE_INT bitsize
, bitpos
;
10069 int volatilep
= 0, must_force_mem
;
10070 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
10071 &mode1
, &unsignedp
, &volatilep
, true);
10072 rtx orig_op0
, memloc
;
10073 bool mem_attrs_from_type
= false;
10075 /* If we got back the original object, something is wrong. Perhaps
10076 we are evaluating an expression too early. In any event, don't
10077 infinitely recurse. */
10078 gcc_assert (tem
!= exp
);
10080 /* If TEM's type is a union of variable size, pass TARGET to the inner
10081 computation, since it will need a temporary and TARGET is known
10082 to have to do. This occurs in unchecked conversion in Ada. */
10084 = expand_expr_real (tem
,
10085 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
10086 && COMPLETE_TYPE_P (TREE_TYPE (tem
))
10087 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
10089 && modifier
!= EXPAND_STACK_PARM
10090 ? target
: NULL_RTX
),
10092 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
,
10095 /* If the field has a mode, we want to access it in the
10096 field's mode, not the computed mode.
10097 If a MEM has VOIDmode (external with incomplete type),
10098 use BLKmode for it instead. */
10101 if (mode1
!= VOIDmode
)
10102 op0
= adjust_address (op0
, mode1
, 0);
10103 else if (GET_MODE (op0
) == VOIDmode
)
10104 op0
= adjust_address (op0
, BLKmode
, 0);
10108 = CONSTANT_P (op0
) ? TYPE_MODE (TREE_TYPE (tem
)) : GET_MODE (op0
);
10110 /* If we have either an offset, a BLKmode result, or a reference
10111 outside the underlying object, we must force it to memory.
10112 Such a case can occur in Ada if we have unchecked conversion
10113 of an expression from a scalar type to an aggregate type or
10114 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10115 passed a partially uninitialized object or a view-conversion
10116 to a larger size. */
10117 must_force_mem
= (offset
10118 || mode1
== BLKmode
10119 || bitpos
+ bitsize
> GET_MODE_BITSIZE (mode2
));
10121 /* Handle CONCAT first. */
10122 if (GET_CODE (op0
) == CONCAT
&& !must_force_mem
)
10125 && bitsize
== GET_MODE_BITSIZE (GET_MODE (op0
)))
10128 && bitsize
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
10131 op0
= XEXP (op0
, 0);
10132 mode2
= GET_MODE (op0
);
10134 else if (bitpos
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
10135 && bitsize
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 1)))
10139 op0
= XEXP (op0
, 1);
10141 mode2
= GET_MODE (op0
);
10144 /* Otherwise force into memory. */
10145 must_force_mem
= 1;
10148 /* If this is a constant, put it in a register if it is a legitimate
10149 constant and we don't need a memory reference. */
10150 if (CONSTANT_P (op0
)
10151 && mode2
!= BLKmode
10152 && targetm
.legitimate_constant_p (mode2
, op0
)
10153 && !must_force_mem
)
10154 op0
= force_reg (mode2
, op0
);
10156 /* Otherwise, if this is a constant, try to force it to the constant
10157 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10158 is a legitimate constant. */
10159 else if (CONSTANT_P (op0
) && (memloc
= force_const_mem (mode2
, op0
)))
10160 op0
= validize_mem (memloc
);
10162 /* Otherwise, if this is a constant or the object is not in memory
10163 and need be, put it there. */
10164 else if (CONSTANT_P (op0
) || (!MEM_P (op0
) && must_force_mem
))
10166 memloc
= assign_temp (TREE_TYPE (tem
), 1, 1);
10167 emit_move_insn (memloc
, op0
);
10169 mem_attrs_from_type
= true;
10174 machine_mode address_mode
;
10175 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
10178 gcc_assert (MEM_P (op0
));
10180 address_mode
= get_address_mode (op0
);
10181 if (GET_MODE (offset_rtx
) != address_mode
)
10182 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
10184 /* See the comment in expand_assignment for the rationale. */
10185 if (mode1
!= VOIDmode
10188 && (bitpos
% bitsize
) == 0
10189 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
10190 && MEM_ALIGN (op0
) >= GET_MODE_ALIGNMENT (mode1
))
10192 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10196 op0
= offset_address (op0
, offset_rtx
,
10197 highest_pow2_factor (offset
));
10200 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10201 record its alignment as BIGGEST_ALIGNMENT. */
10202 if (MEM_P (op0
) && bitpos
== 0 && offset
!= 0
10203 && is_aligning_offset (offset
, tem
))
10204 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
10206 /* Don't forget about volatility even if this is a bitfield. */
10207 if (MEM_P (op0
) && volatilep
&& ! MEM_VOLATILE_P (op0
))
10209 if (op0
== orig_op0
)
10210 op0
= copy_rtx (op0
);
10212 MEM_VOLATILE_P (op0
) = 1;
10215 /* In cases where an aligned union has an unaligned object
10216 as a field, we might be extracting a BLKmode value from
10217 an integer-mode (e.g., SImode) object. Handle this case
10218 by doing the extract into an object as wide as the field
10219 (which we know to be the width of a basic mode), then
10220 storing into memory, and changing the mode to BLKmode. */
10221 if (mode1
== VOIDmode
10222 || REG_P (op0
) || GET_CODE (op0
) == SUBREG
10223 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
10224 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
10225 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
10226 && modifier
!= EXPAND_CONST_ADDRESS
10227 && modifier
!= EXPAND_INITIALIZER
10228 && modifier
!= EXPAND_MEMORY
)
10229 /* If the bitfield is volatile and the bitsize
10230 is narrower than the access size of the bitfield,
10231 we need to extract bitfields from the access. */
10232 || (volatilep
&& TREE_CODE (exp
) == COMPONENT_REF
10233 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp
, 1))
10234 && mode1
!= BLKmode
10235 && bitsize
< GET_MODE_SIZE (mode1
) * BITS_PER_UNIT
)
10236 /* If the field isn't aligned enough to fetch as a memref,
10237 fetch it as a bit field. */
10238 || (mode1
!= BLKmode
10239 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
10240 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)
10242 && (MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
10243 || (bitpos
% GET_MODE_ALIGNMENT (mode1
) != 0))))
10244 && modifier
!= EXPAND_MEMORY
10245 && ((modifier
== EXPAND_CONST_ADDRESS
10246 || modifier
== EXPAND_INITIALIZER
)
10248 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
10249 || (bitpos
% BITS_PER_UNIT
!= 0)))
10250 /* If the type and the field are a constant size and the
10251 size of the type isn't the same size as the bitfield,
10252 we must use bitfield operations. */
10254 && TYPE_SIZE (TREE_TYPE (exp
))
10255 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
10256 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
10259 machine_mode ext_mode
= mode
;
10261 if (ext_mode
== BLKmode
10262 && ! (target
!= 0 && MEM_P (op0
)
10264 && bitpos
% BITS_PER_UNIT
== 0))
10265 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
10267 if (ext_mode
== BLKmode
)
10270 target
= assign_temp (type
, 1, 1);
10272 /* ??? Unlike the similar test a few lines below, this one is
10273 very likely obsolete. */
10277 /* In this case, BITPOS must start at a byte boundary and
10278 TARGET, if specified, must be a MEM. */
10279 gcc_assert (MEM_P (op0
)
10280 && (!target
|| MEM_P (target
))
10281 && !(bitpos
% BITS_PER_UNIT
));
10283 emit_block_move (target
,
10284 adjust_address (op0
, VOIDmode
,
10285 bitpos
/ BITS_PER_UNIT
),
10286 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
10288 (modifier
== EXPAND_STACK_PARM
10289 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
10294 /* If we have nothing to extract, the result will be 0 for targets
10295 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10296 return 0 for the sake of consistency, as reading a zero-sized
10297 bitfield is valid in Ada and the value is fully specified. */
10301 op0
= validize_mem (op0
);
10303 if (MEM_P (op0
) && REG_P (XEXP (op0
, 0)))
10304 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10306 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
10307 (modifier
== EXPAND_STACK_PARM
10308 ? NULL_RTX
: target
),
10309 ext_mode
, ext_mode
);
10311 /* If the result is a record type and BITSIZE is narrower than
10312 the mode of OP0, an integral mode, and this is a big endian
10313 machine, we must put the field into the high-order bits. */
10314 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
10315 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
10316 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
10317 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
10318 GET_MODE_BITSIZE (GET_MODE (op0
))
10319 - bitsize
, op0
, 1);
10321 /* If the result type is BLKmode, store the data into a temporary
10322 of the appropriate type, but with the mode corresponding to the
10323 mode for the data we have (op0's mode). */
10324 if (mode
== BLKmode
)
10327 = assign_stack_temp_for_type (ext_mode
,
10328 GET_MODE_BITSIZE (ext_mode
),
10330 emit_move_insn (new_rtx
, op0
);
10331 op0
= copy_rtx (new_rtx
);
10332 PUT_MODE (op0
, BLKmode
);
10338 /* If the result is BLKmode, use that to access the object
10340 if (mode
== BLKmode
)
10343 /* Get a reference to just this component. */
10344 if (modifier
== EXPAND_CONST_ADDRESS
10345 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
10346 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10348 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10350 if (op0
== orig_op0
)
10351 op0
= copy_rtx (op0
);
10353 /* If op0 is a temporary because of forcing to memory, pass only the
10354 type to set_mem_attributes so that the original expression is never
10355 marked as ADDRESSABLE through MEM_EXPR of the temporary. */
10356 if (mem_attrs_from_type
)
10357 set_mem_attributes (op0
, type
, 0);
10359 set_mem_attributes (op0
, exp
, 0);
10361 if (REG_P (XEXP (op0
, 0)))
10362 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10364 MEM_VOLATILE_P (op0
) |= volatilep
;
10365 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
10366 || modifier
== EXPAND_CONST_ADDRESS
10367 || modifier
== EXPAND_INITIALIZER
)
10371 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
10373 convert_move (target
, op0
, unsignedp
);
10378 return expand_expr (OBJ_TYPE_REF_EXPR (exp
), target
, tmode
, modifier
);
10381 /* All valid uses of __builtin_va_arg_pack () are removed during
10383 if (CALL_EXPR_VA_ARG_PACK (exp
))
10384 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
10386 tree fndecl
= get_callee_fndecl (exp
), attr
;
10389 && (attr
= lookup_attribute ("error",
10390 DECL_ATTRIBUTES (fndecl
))) != NULL
)
10391 error ("%Kcall to %qs declared with attribute error: %s",
10392 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
10393 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
10395 && (attr
= lookup_attribute ("warning",
10396 DECL_ATTRIBUTES (fndecl
))) != NULL
)
10397 warning_at (tree_nonartificial_location (exp
),
10398 0, "%Kcall to %qs declared with attribute warning: %s",
10399 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
10400 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
10402 /* Check for a built-in function. */
10403 if (fndecl
&& DECL_BUILT_IN (fndecl
))
10405 gcc_assert (DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_FRONTEND
);
10406 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
10409 return expand_call (exp
, target
, ignore
);
10411 case VIEW_CONVERT_EXPR
:
10414 /* If we are converting to BLKmode, try to avoid an intermediate
10415 temporary by fetching an inner memory reference. */
10416 if (mode
== BLKmode
10417 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
10418 && TYPE_MODE (TREE_TYPE (treeop0
)) != BLKmode
10419 && handled_component_p (treeop0
))
10421 machine_mode mode1
;
10422 HOST_WIDE_INT bitsize
, bitpos
;
10427 = get_inner_reference (treeop0
, &bitsize
, &bitpos
,
10428 &offset
, &mode1
, &unsignedp
, &volatilep
,
10432 /* ??? We should work harder and deal with non-zero offsets. */
10434 && (bitpos
% BITS_PER_UNIT
) == 0
10436 && compare_tree_int (TYPE_SIZE (type
), bitsize
) == 0)
10438 /* See the normal_inner_ref case for the rationale. */
10440 = expand_expr_real (tem
,
10441 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
10442 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
10444 && modifier
!= EXPAND_STACK_PARM
10445 ? target
: NULL_RTX
),
10447 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
,
10450 if (MEM_P (orig_op0
))
10454 /* Get a reference to just this component. */
10455 if (modifier
== EXPAND_CONST_ADDRESS
10456 || modifier
== EXPAND_SUM
10457 || modifier
== EXPAND_INITIALIZER
)
10458 op0
= adjust_address_nv (op0
, mode
, bitpos
/ BITS_PER_UNIT
);
10460 op0
= adjust_address (op0
, mode
, bitpos
/ BITS_PER_UNIT
);
10462 if (op0
== orig_op0
)
10463 op0
= copy_rtx (op0
);
10465 set_mem_attributes (op0
, treeop0
, 0);
10466 if (REG_P (XEXP (op0
, 0)))
10467 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10469 MEM_VOLATILE_P (op0
) |= volatilep
;
10475 op0
= expand_expr_real (treeop0
, NULL_RTX
, VOIDmode
, modifier
,
10476 NULL
, inner_reference_p
);
10478 /* If the input and output modes are both the same, we are done. */
10479 if (mode
== GET_MODE (op0
))
10481 /* If neither mode is BLKmode, and both modes are the same size
10482 then we can use gen_lowpart. */
10483 else if (mode
!= BLKmode
&& GET_MODE (op0
) != BLKmode
10484 && (GET_MODE_PRECISION (mode
)
10485 == GET_MODE_PRECISION (GET_MODE (op0
)))
10486 && !COMPLEX_MODE_P (GET_MODE (op0
)))
10488 if (GET_CODE (op0
) == SUBREG
)
10489 op0
= force_reg (GET_MODE (op0
), op0
);
10490 temp
= gen_lowpart_common (mode
, op0
);
10495 if (!REG_P (op0
) && !MEM_P (op0
))
10496 op0
= force_reg (GET_MODE (op0
), op0
);
10497 op0
= gen_lowpart (mode
, op0
);
10500 /* If both types are integral, convert from one mode to the other. */
10501 else if (INTEGRAL_TYPE_P (type
) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0
)))
10502 op0
= convert_modes (mode
, GET_MODE (op0
), op0
,
10503 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
10504 /* If the output type is a bit-field type, do an extraction. */
10505 else if (reduce_bit_field
)
10506 return extract_bit_field (op0
, TYPE_PRECISION (type
), 0,
10507 TYPE_UNSIGNED (type
), NULL_RTX
,
10509 /* As a last resort, spill op0 to memory, and reload it in a
10511 else if (!MEM_P (op0
))
10513 /* If the operand is not a MEM, force it into memory. Since we
10514 are going to be changing the mode of the MEM, don't call
10515 force_const_mem for constants because we don't allow pool
10516 constants to change mode. */
10517 tree inner_type
= TREE_TYPE (treeop0
);
10519 gcc_assert (!TREE_ADDRESSABLE (exp
));
10521 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
10523 = assign_stack_temp_for_type
10524 (TYPE_MODE (inner_type
),
10525 GET_MODE_SIZE (TYPE_MODE (inner_type
)), inner_type
);
10527 emit_move_insn (target
, op0
);
10531 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10532 output type is such that the operand is known to be aligned, indicate
10533 that it is. Otherwise, we need only be concerned about alignment for
10534 non-BLKmode results. */
10537 enum insn_code icode
;
10539 if (TYPE_ALIGN_OK (type
))
10541 /* ??? Copying the MEM without substantially changing it might
10542 run afoul of the code handling volatile memory references in
10543 store_expr, which assumes that TARGET is returned unmodified
10544 if it has been used. */
10545 op0
= copy_rtx (op0
);
10546 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
10548 else if (modifier
!= EXPAND_WRITE
10549 && modifier
!= EXPAND_MEMORY
10550 && !inner_reference_p
10552 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode
))
10554 /* If the target does have special handling for unaligned
10555 loads of mode then use them. */
10556 if ((icode
= optab_handler (movmisalign_optab
, mode
))
10557 != CODE_FOR_nothing
)
10561 op0
= adjust_address (op0
, mode
, 0);
10562 /* We've already validated the memory, and we're creating a
10563 new pseudo destination. The predicates really can't
10565 reg
= gen_reg_rtx (mode
);
10567 /* Nor can the insn generator. */
10568 insn
= GEN_FCN (icode
) (reg
, op0
);
10572 else if (STRICT_ALIGNMENT
)
10574 tree inner_type
= TREE_TYPE (treeop0
);
10575 HOST_WIDE_INT temp_size
10576 = MAX (int_size_in_bytes (inner_type
),
10577 (HOST_WIDE_INT
) GET_MODE_SIZE (mode
));
10579 = assign_stack_temp_for_type (mode
, temp_size
, type
);
10580 rtx new_with_op0_mode
10581 = adjust_address (new_rtx
, GET_MODE (op0
), 0);
10583 gcc_assert (!TREE_ADDRESSABLE (exp
));
10585 if (GET_MODE (op0
) == BLKmode
)
10586 emit_block_move (new_with_op0_mode
, op0
,
10587 GEN_INT (GET_MODE_SIZE (mode
)),
10588 (modifier
== EXPAND_STACK_PARM
10589 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
10591 emit_move_insn (new_with_op0_mode
, op0
);
10597 op0
= adjust_address (op0
, mode
, 0);
10604 tree lhs
= treeop0
;
10605 tree rhs
= treeop1
;
10606 gcc_assert (ignore
);
10608 /* Check for |= or &= of a bitfield of size one into another bitfield
10609 of size 1. In this case, (unless we need the result of the
10610 assignment) we can do this more efficiently with a
10611 test followed by an assignment, if necessary.
10613 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10614 things change so we do, this code should be enhanced to
10616 if (TREE_CODE (lhs
) == COMPONENT_REF
10617 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
10618 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
10619 && TREE_OPERAND (rhs
, 0) == lhs
10620 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
10621 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
10622 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
10624 rtx_code_label
*label
= gen_label_rtx ();
10625 int value
= TREE_CODE (rhs
) == BIT_IOR_EXPR
;
10626 do_jump (TREE_OPERAND (rhs
, 1),
10628 value
? 0 : label
, -1);
10629 expand_assignment (lhs
, build_int_cst (TREE_TYPE (rhs
), value
),
10631 do_pending_stack_adjust ();
10632 emit_label (label
);
10636 expand_assignment (lhs
, rhs
, false);
10641 return expand_expr_addr_expr (exp
, target
, tmode
, modifier
);
10643 case REALPART_EXPR
:
10644 op0
= expand_normal (treeop0
);
10645 return read_complex_part (op0
, false);
10647 case IMAGPART_EXPR
:
10648 op0
= expand_normal (treeop0
);
10649 return read_complex_part (op0
, true);
10656 /* Expanded in cfgexpand.c. */
10657 gcc_unreachable ();
10659 case TRY_CATCH_EXPR
:
10661 case EH_FILTER_EXPR
:
10662 case TRY_FINALLY_EXPR
:
10663 /* Lowered by tree-eh.c. */
10664 gcc_unreachable ();
10666 case WITH_CLEANUP_EXPR
:
10667 case CLEANUP_POINT_EXPR
:
10669 case CASE_LABEL_EXPR
:
10674 case COMPOUND_EXPR
:
10675 case PREINCREMENT_EXPR
:
10676 case PREDECREMENT_EXPR
:
10677 case POSTINCREMENT_EXPR
:
10678 case POSTDECREMENT_EXPR
:
10681 case COMPOUND_LITERAL_EXPR
:
10682 /* Lowered by gimplify.c. */
10683 gcc_unreachable ();
10686 /* Function descriptors are not valid except for as
10687 initialization constants, and should not be expanded. */
10688 gcc_unreachable ();
10690 case WITH_SIZE_EXPR
:
10691 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10692 have pulled out the size to use in whatever context it needed. */
10693 return expand_expr_real (treeop0
, original_target
, tmode
,
10694 modifier
, alt_rtl
, inner_reference_p
);
10697 return expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
10701 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10702 signedness of TYPE), possibly returning the result in TARGET. */
10704 reduce_to_bit_field_precision (rtx exp
, rtx target
, tree type
)
10706 HOST_WIDE_INT prec
= TYPE_PRECISION (type
);
10707 if (target
&& GET_MODE (target
) != GET_MODE (exp
))
10709 /* For constant values, reduce using build_int_cst_type. */
10710 if (CONST_INT_P (exp
))
10712 HOST_WIDE_INT value
= INTVAL (exp
);
10713 tree t
= build_int_cst_type (type
, value
);
10714 return expand_expr (t
, target
, VOIDmode
, EXPAND_NORMAL
);
10716 else if (TYPE_UNSIGNED (type
))
10718 machine_mode mode
= GET_MODE (exp
);
10719 rtx mask
= immed_wide_int_const
10720 (wi::mask (prec
, false, GET_MODE_PRECISION (mode
)), mode
);
10721 return expand_and (mode
, exp
, mask
, target
);
10725 int count
= GET_MODE_PRECISION (GET_MODE (exp
)) - prec
;
10726 exp
= expand_shift (LSHIFT_EXPR
, GET_MODE (exp
),
10727 exp
, count
, target
, 0);
10728 return expand_shift (RSHIFT_EXPR
, GET_MODE (exp
),
10729 exp
, count
, target
, 0);
10733 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10734 when applied to the address of EXP produces an address known to be
10735 aligned more than BIGGEST_ALIGNMENT. */
10738 is_aligning_offset (const_tree offset
, const_tree exp
)
10740 /* Strip off any conversions. */
10741 while (CONVERT_EXPR_P (offset
))
10742 offset
= TREE_OPERAND (offset
, 0);
10744 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10745 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10746 if (TREE_CODE (offset
) != BIT_AND_EXPR
10747 || !tree_fits_uhwi_p (TREE_OPERAND (offset
, 1))
10748 || compare_tree_int (TREE_OPERAND (offset
, 1),
10749 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
10750 || exact_log2 (tree_to_uhwi (TREE_OPERAND (offset
, 1)) + 1) < 0)
10753 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10754 It must be NEGATE_EXPR. Then strip any more conversions. */
10755 offset
= TREE_OPERAND (offset
, 0);
10756 while (CONVERT_EXPR_P (offset
))
10757 offset
= TREE_OPERAND (offset
, 0);
10759 if (TREE_CODE (offset
) != NEGATE_EXPR
)
10762 offset
= TREE_OPERAND (offset
, 0);
10763 while (CONVERT_EXPR_P (offset
))
10764 offset
= TREE_OPERAND (offset
, 0);
10766 /* This must now be the address of EXP. */
10767 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
10770 /* Return the tree node if an ARG corresponds to a string constant or zero
10771 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10772 in bytes within the string that ARG is accessing. The type of the
10773 offset will be `sizetype'. */
10776 string_constant (tree arg
, tree
*ptr_offset
)
10778 tree array
, offset
, lower_bound
;
10781 if (TREE_CODE (arg
) == ADDR_EXPR
)
10783 if (TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
10785 *ptr_offset
= size_zero_node
;
10786 return TREE_OPERAND (arg
, 0);
10788 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == VAR_DECL
)
10790 array
= TREE_OPERAND (arg
, 0);
10791 offset
= size_zero_node
;
10793 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
10795 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
10796 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
10797 if (TREE_CODE (array
) != STRING_CST
10798 && TREE_CODE (array
) != VAR_DECL
)
10801 /* Check if the array has a nonzero lower bound. */
10802 lower_bound
= array_ref_low_bound (TREE_OPERAND (arg
, 0));
10803 if (!integer_zerop (lower_bound
))
10805 /* If the offset and base aren't both constants, return 0. */
10806 if (TREE_CODE (lower_bound
) != INTEGER_CST
)
10808 if (TREE_CODE (offset
) != INTEGER_CST
)
10810 /* Adjust offset by the lower bound. */
10811 offset
= size_diffop (fold_convert (sizetype
, offset
),
10812 fold_convert (sizetype
, lower_bound
));
10815 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == MEM_REF
)
10817 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
10818 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
10819 if (TREE_CODE (array
) != ADDR_EXPR
)
10821 array
= TREE_OPERAND (array
, 0);
10822 if (TREE_CODE (array
) != STRING_CST
10823 && TREE_CODE (array
) != VAR_DECL
)
10829 else if (TREE_CODE (arg
) == PLUS_EXPR
|| TREE_CODE (arg
) == POINTER_PLUS_EXPR
)
10831 tree arg0
= TREE_OPERAND (arg
, 0);
10832 tree arg1
= TREE_OPERAND (arg
, 1);
10837 if (TREE_CODE (arg0
) == ADDR_EXPR
10838 && (TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
10839 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == VAR_DECL
))
10841 array
= TREE_OPERAND (arg0
, 0);
10844 else if (TREE_CODE (arg1
) == ADDR_EXPR
10845 && (TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
10846 || TREE_CODE (TREE_OPERAND (arg1
, 0)) == VAR_DECL
))
10848 array
= TREE_OPERAND (arg1
, 0);
10857 if (TREE_CODE (array
) == STRING_CST
)
10859 *ptr_offset
= fold_convert (sizetype
, offset
);
10862 else if (TREE_CODE (array
) == VAR_DECL
10863 || TREE_CODE (array
) == CONST_DECL
)
10866 tree init
= ctor_for_folding (array
);
10868 /* Variables initialized to string literals can be handled too. */
10869 if (init
== error_mark_node
10871 || TREE_CODE (init
) != STRING_CST
)
10874 /* Avoid const char foo[4] = "abcde"; */
10875 if (DECL_SIZE_UNIT (array
) == NULL_TREE
10876 || TREE_CODE (DECL_SIZE_UNIT (array
)) != INTEGER_CST
10877 || (length
= TREE_STRING_LENGTH (init
)) <= 0
10878 || compare_tree_int (DECL_SIZE_UNIT (array
), length
) < 0)
10881 /* If variable is bigger than the string literal, OFFSET must be constant
10882 and inside of the bounds of the string literal. */
10883 offset
= fold_convert (sizetype
, offset
);
10884 if (compare_tree_int (DECL_SIZE_UNIT (array
), length
) > 0
10885 && (! tree_fits_uhwi_p (offset
)
10886 || compare_tree_int (offset
, length
) >= 0))
10889 *ptr_offset
= offset
;
10896 /* Generate code to calculate OPS, and exploded expression
10897 using a store-flag instruction and return an rtx for the result.
10898 OPS reflects a comparison.
10900 If TARGET is nonzero, store the result there if convenient.
10902 Return zero if there is no suitable set-flag instruction
10903 available on this machine.
10905 Once expand_expr has been called on the arguments of the comparison,
10906 we are committed to doing the store flag, since it is not safe to
10907 re-evaluate the expression. We emit the store-flag insn by calling
10908 emit_store_flag, but only expand the arguments if we have a reason
10909 to believe that emit_store_flag will be successful. If we think that
10910 it will, but it isn't, we have to simulate the store-flag with a
10911 set/jump/set sequence. */
10914 do_store_flag (sepops ops
, rtx target
, machine_mode mode
)
10916 enum rtx_code code
;
10917 tree arg0
, arg1
, type
;
10919 machine_mode operand_mode
;
10922 rtx subtarget
= target
;
10923 location_t loc
= ops
->location
;
10928 /* Don't crash if the comparison was erroneous. */
10929 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
10932 type
= TREE_TYPE (arg0
);
10933 operand_mode
= TYPE_MODE (type
);
10934 unsignedp
= TYPE_UNSIGNED (type
);
10936 /* We won't bother with BLKmode store-flag operations because it would mean
10937 passing a lot of information to emit_store_flag. */
10938 if (operand_mode
== BLKmode
)
10941 /* We won't bother with store-flag operations involving function pointers
10942 when function pointers must be canonicalized before comparisons. */
10943 #ifdef HAVE_canonicalize_funcptr_for_compare
10944 if (HAVE_canonicalize_funcptr_for_compare
10945 && ((TREE_CODE (TREE_TYPE (arg0
)) == POINTER_TYPE
10946 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
)))
10948 || (TREE_CODE (TREE_TYPE (arg1
)) == POINTER_TYPE
10949 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
)))
10950 == FUNCTION_TYPE
))))
10957 /* For vector typed comparisons emit code to generate the desired
10958 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10959 expander for this. */
10960 if (TREE_CODE (ops
->type
) == VECTOR_TYPE
)
10962 tree ifexp
= build2 (ops
->code
, ops
->type
, arg0
, arg1
);
10963 tree if_true
= constant_boolean_node (true, ops
->type
);
10964 tree if_false
= constant_boolean_node (false, ops
->type
);
10965 return expand_vec_cond_expr (ops
->type
, ifexp
, if_true
, if_false
, target
);
10968 /* Get the rtx comparison code to use. We know that EXP is a comparison
10969 operation of some type. Some comparisons against 1 and -1 can be
10970 converted to comparisons with zero. Do so here so that the tests
10971 below will be aware that we have a comparison with zero. These
10972 tests will not catch constants in the first operand, but constants
10973 are rarely passed as the first operand. */
10984 if (integer_onep (arg1
))
10985 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10987 code
= unsignedp
? LTU
: LT
;
10990 if (! unsignedp
&& integer_all_onesp (arg1
))
10991 arg1
= integer_zero_node
, code
= LT
;
10993 code
= unsignedp
? LEU
: LE
;
10996 if (! unsignedp
&& integer_all_onesp (arg1
))
10997 arg1
= integer_zero_node
, code
= GE
;
10999 code
= unsignedp
? GTU
: GT
;
11002 if (integer_onep (arg1
))
11003 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
11005 code
= unsignedp
? GEU
: GE
;
11008 case UNORDERED_EXPR
:
11034 gcc_unreachable ();
11037 /* Put a constant second. */
11038 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
11039 || TREE_CODE (arg0
) == FIXED_CST
)
11041 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
11042 code
= swap_condition (code
);
11045 /* If this is an equality or inequality test of a single bit, we can
11046 do this by shifting the bit being tested to the low-order bit and
11047 masking the result with the constant 1. If the condition was EQ,
11048 we xor it with 1. This does not require an scc insn and is faster
11049 than an scc insn even if we have it.
11051 The code to make this transformation was moved into fold_single_bit_test,
11052 so we just call into the folder and expand its result. */
11054 if ((code
== NE
|| code
== EQ
)
11055 && integer_zerop (arg1
)
11056 && (TYPE_PRECISION (ops
->type
) != 1 || TYPE_UNSIGNED (ops
->type
)))
11058 gimple srcstmt
= get_def_for_expr (arg0
, BIT_AND_EXPR
);
11060 && integer_pow2p (gimple_assign_rhs2 (srcstmt
)))
11062 enum tree_code tcode
= code
== NE
? NE_EXPR
: EQ_EXPR
;
11063 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
11064 tree temp
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg1
),
11065 gimple_assign_rhs1 (srcstmt
),
11066 gimple_assign_rhs2 (srcstmt
));
11067 temp
= fold_single_bit_test (loc
, tcode
, temp
, arg1
, type
);
11069 return expand_expr (temp
, target
, VOIDmode
, EXPAND_NORMAL
);
11073 if (! get_subtarget (target
)
11074 || GET_MODE (subtarget
) != operand_mode
)
11077 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
11080 target
= gen_reg_rtx (mode
);
11082 /* Try a cstore if possible. */
11083 return emit_store_flag_force (target
, code
, op0
, op1
,
11084 operand_mode
, unsignedp
,
11085 (TYPE_PRECISION (ops
->type
) == 1
11086 && !TYPE_UNSIGNED (ops
->type
)) ? -1 : 1);
11090 /* Stubs in case we haven't got a casesi insn. */
11091 #ifndef HAVE_casesi
11092 # define HAVE_casesi 0
11093 # define gen_casesi(a, b, c, d, e) (0)
11094 # define CODE_FOR_casesi CODE_FOR_nothing
11097 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11098 0 otherwise (i.e. if there is no casesi instruction).
11100 DEFAULT_PROBABILITY is the probability of jumping to the default
11103 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
11104 rtx table_label
, rtx default_label
, rtx fallback_label
,
11105 int default_probability
)
11107 struct expand_operand ops
[5];
11108 machine_mode index_mode
= SImode
;
11109 rtx op1
, op2
, index
;
11114 /* Convert the index to SImode. */
11115 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
11117 machine_mode omode
= TYPE_MODE (index_type
);
11118 rtx rangertx
= expand_normal (range
);
11120 /* We must handle the endpoints in the original mode. */
11121 index_expr
= build2 (MINUS_EXPR
, index_type
,
11122 index_expr
, minval
);
11123 minval
= integer_zero_node
;
11124 index
= expand_normal (index_expr
);
11126 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
11127 omode
, 1, default_label
,
11128 default_probability
);
11129 /* Now we can safely truncate. */
11130 index
= convert_to_mode (index_mode
, index
, 0);
11134 if (TYPE_MODE (index_type
) != index_mode
)
11136 index_type
= lang_hooks
.types
.type_for_mode (index_mode
, 0);
11137 index_expr
= fold_convert (index_type
, index_expr
);
11140 index
= expand_normal (index_expr
);
11143 do_pending_stack_adjust ();
11145 op1
= expand_normal (minval
);
11146 op2
= expand_normal (range
);
11148 create_input_operand (&ops
[0], index
, index_mode
);
11149 create_convert_operand_from_type (&ops
[1], op1
, TREE_TYPE (minval
));
11150 create_convert_operand_from_type (&ops
[2], op2
, TREE_TYPE (range
));
11151 create_fixed_operand (&ops
[3], table_label
);
11152 create_fixed_operand (&ops
[4], (default_label
11154 : fallback_label
));
11155 expand_jump_insn (CODE_FOR_casesi
, 5, ops
);
11159 /* Attempt to generate a tablejump instruction; same concept. */
11160 #ifndef HAVE_tablejump
11161 #define HAVE_tablejump 0
11162 #define gen_tablejump(x, y) (0)
11165 /* Subroutine of the next function.
11167 INDEX is the value being switched on, with the lowest value
11168 in the table already subtracted.
11169 MODE is its expected mode (needed if INDEX is constant).
11170 RANGE is the length of the jump table.
11171 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11173 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11174 index value is out of range.
11175 DEFAULT_PROBABILITY is the probability of jumping to
11176 the default label. */
11179 do_tablejump (rtx index
, machine_mode mode
, rtx range
, rtx table_label
,
11180 rtx default_label
, int default_probability
)
11184 if (INTVAL (range
) > cfun
->cfg
->max_jumptable_ents
)
11185 cfun
->cfg
->max_jumptable_ents
= INTVAL (range
);
11187 /* Do an unsigned comparison (in the proper mode) between the index
11188 expression and the value which represents the length of the range.
11189 Since we just finished subtracting the lower bound of the range
11190 from the index expression, this comparison allows us to simultaneously
11191 check that the original index expression value is both greater than
11192 or equal to the minimum value of the range and less than or equal to
11193 the maximum value of the range. */
11196 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
11197 default_label
, default_probability
);
11200 /* If index is in range, it must fit in Pmode.
11201 Convert to Pmode so we can index with it. */
11203 index
= convert_to_mode (Pmode
, index
, 1);
11205 /* Don't let a MEM slip through, because then INDEX that comes
11206 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11207 and break_out_memory_refs will go to work on it and mess it up. */
11208 #ifdef PIC_CASE_VECTOR_ADDRESS
11209 if (flag_pic
&& !REG_P (index
))
11210 index
= copy_to_mode_reg (Pmode
, index
);
11213 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11214 GET_MODE_SIZE, because this indicates how large insns are. The other
11215 uses should all be Pmode, because they are addresses. This code
11216 could fail if addresses and insns are not the same size. */
11217 index
= simplify_gen_binary (MULT
, Pmode
, index
,
11218 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE
),
11220 index
= simplify_gen_binary (PLUS
, Pmode
, index
,
11221 gen_rtx_LABEL_REF (Pmode
, table_label
));
11223 #ifdef PIC_CASE_VECTOR_ADDRESS
11225 index
= PIC_CASE_VECTOR_ADDRESS (index
);
11228 index
= memory_address (CASE_VECTOR_MODE
, index
);
11229 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
11230 vector
= gen_const_mem (CASE_VECTOR_MODE
, index
);
11231 convert_move (temp
, vector
, 0);
11233 emit_jump_insn (gen_tablejump (temp
, table_label
));
11235 /* If we are generating PIC code or if the table is PC-relative, the
11236 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11237 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
11242 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
11243 rtx table_label
, rtx default_label
, int default_probability
)
11247 if (! HAVE_tablejump
)
11250 index_expr
= fold_build2 (MINUS_EXPR
, index_type
,
11251 fold_convert (index_type
, index_expr
),
11252 fold_convert (index_type
, minval
));
11253 index
= expand_normal (index_expr
);
11254 do_pending_stack_adjust ();
11256 do_tablejump (index
, TYPE_MODE (index_type
),
11257 convert_modes (TYPE_MODE (index_type
),
11258 TYPE_MODE (TREE_TYPE (range
)),
11259 expand_normal (range
),
11260 TYPE_UNSIGNED (TREE_TYPE (range
))),
11261 table_label
, default_label
, default_probability
);
11265 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11267 const_vector_from_tree (tree exp
)
11273 machine_mode inner
, mode
;
11275 mode
= TYPE_MODE (TREE_TYPE (exp
));
11277 if (initializer_zerop (exp
))
11278 return CONST0_RTX (mode
);
11280 units
= GET_MODE_NUNITS (mode
);
11281 inner
= GET_MODE_INNER (mode
);
11283 v
= rtvec_alloc (units
);
11285 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
11287 elt
= VECTOR_CST_ELT (exp
, i
);
11289 if (TREE_CODE (elt
) == REAL_CST
)
11290 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
11292 else if (TREE_CODE (elt
) == FIXED_CST
)
11293 RTVEC_ELT (v
, i
) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt
),
11296 RTVEC_ELT (v
, i
) = immed_wide_int_const (elt
, inner
);
11299 return gen_rtx_CONST_VECTOR (mode
, v
);
11302 /* Build a decl for a personality function given a language prefix. */
11305 build_personality_function (const char *lang
)
11307 const char *unwind_and_version
;
11311 switch (targetm_common
.except_unwind_info (&global_options
))
11316 unwind_and_version
= "_sj0";
11320 unwind_and_version
= "_v0";
11323 unwind_and_version
= "_seh0";
11326 gcc_unreachable ();
11329 name
= ACONCAT (("__", lang
, "_personality", unwind_and_version
, NULL
));
11331 type
= build_function_type_list (integer_type_node
, integer_type_node
,
11332 long_long_unsigned_type_node
,
11333 ptr_type_node
, ptr_type_node
, NULL_TREE
);
11334 decl
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
,
11335 get_identifier (name
), type
);
11336 DECL_ARTIFICIAL (decl
) = 1;
11337 DECL_EXTERNAL (decl
) = 1;
11338 TREE_PUBLIC (decl
) = 1;
11340 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11341 are the flags assigned by targetm.encode_section_info. */
11342 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl
), 0), NULL
);
11347 /* Extracts the personality function of DECL and returns the corresponding
11351 get_personality_function (tree decl
)
11353 tree personality
= DECL_FUNCTION_PERSONALITY (decl
);
11354 enum eh_personality_kind pk
;
11356 pk
= function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl
));
11357 if (pk
== eh_personality_none
)
11361 && pk
== eh_personality_any
)
11362 personality
= lang_hooks
.eh_personality ();
11364 if (pk
== eh_personality_lang
)
11365 gcc_assert (personality
!= NULL_TREE
);
11367 return XEXP (DECL_RTL (personality
), 0);
11370 #include "gt-expr.h"