1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
46 #include "langhooks.h"
49 #include "tree-iterator.h"
50 #include "tree-pass.h"
51 #include "tree-flow.h"
55 #include "diagnostic.h"
56 #include "ssaexpand.h"
58 /* Decide whether a function's arguments should be processed
59 from first to last or from last to first.
61 They should if the stack and args grow in opposite directions, but
62 only if we have push insns. */
66 #ifndef PUSH_ARGS_REVERSED
67 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
68 #define PUSH_ARGS_REVERSED /* If it's last to first. */
74 #ifndef STACK_PUSH_CODE
75 #ifdef STACK_GROWS_DOWNWARD
76 #define STACK_PUSH_CODE PRE_DEC
78 #define STACK_PUSH_CODE PRE_INC
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
91 /* This structure is used by move_by_pieces to describe the move to
93 struct move_by_pieces_d
102 int explicit_inc_from
;
103 unsigned HOST_WIDE_INT len
;
104 HOST_WIDE_INT offset
;
108 /* This structure is used by store_by_pieces to describe the clear to
111 struct store_by_pieces_d
117 unsigned HOST_WIDE_INT len
;
118 HOST_WIDE_INT offset
;
119 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
);
124 static unsigned HOST_WIDE_INT
move_by_pieces_ninsns (unsigned HOST_WIDE_INT
,
127 static void move_by_pieces_1 (rtx (*) (rtx
, ...), enum machine_mode
,
128 struct move_by_pieces_d
*);
129 static bool block_move_libcall_safe_for_call_parm (void);
130 static bool emit_block_move_via_movmem (rtx
, rtx
, rtx
, unsigned, unsigned, HOST_WIDE_INT
);
131 static tree
emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
133 static rtx
clear_by_pieces_1 (void *, HOST_WIDE_INT
, enum machine_mode
);
134 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces_d
*, unsigned int);
136 static void store_by_pieces_2 (rtx (*) (rtx
, ...), enum machine_mode
,
137 struct store_by_pieces_d
*);
138 static tree
clear_storage_libcall_fn (int);
139 static rtx
compress_float_constant (rtx
, rtx
);
140 static rtx
get_subtarget (rtx
);
141 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
142 HOST_WIDE_INT
, enum machine_mode
,
143 tree
, tree
, int, alias_set_type
);
144 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
);
145 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
, enum machine_mode
,
146 tree
, tree
, alias_set_type
, bool);
148 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (const_tree
, const_tree
);
150 static int is_aligning_offset (const_tree
, const_tree
);
151 static void expand_operands (tree
, tree
, rtx
, rtx
*, rtx
*,
152 enum expand_modifier
);
153 static rtx
reduce_to_bit_field_precision (rtx
, rtx
, tree
);
154 static rtx
do_store_flag (sepops
, rtx
, enum machine_mode
);
156 static void emit_single_push_insn (enum machine_mode
, rtx
, tree
);
158 static void do_tablejump (rtx
, enum machine_mode
, rtx
, rtx
, rtx
);
159 static rtx
const_vector_from_tree (tree
);
160 static void write_complex_part (rtx
, rtx
, bool);
162 /* Record for each mode whether we can move a register directly to or
163 from an object of that mode in memory. If we can't, we won't try
164 to use that mode directly when accessing a field of that mode. */
166 static char direct_load
[NUM_MACHINE_MODES
];
167 static char direct_store
[NUM_MACHINE_MODES
];
169 /* Record for each mode whether we can float-extend from memory. */
171 static bool float_extend_from_mem
[NUM_MACHINE_MODES
][NUM_MACHINE_MODES
];
173 /* This macro is used to determine whether move_by_pieces should be called
174 to perform a structure copy. */
175 #ifndef MOVE_BY_PIECES_P
176 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
177 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
178 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
181 /* This macro is used to determine whether clear_by_pieces should be
182 called to clear storage. */
183 #ifndef CLEAR_BY_PIECES_P
184 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
185 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
186 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
189 /* This macro is used to determine whether store_by_pieces should be
190 called to "memset" storage with byte values other than zero. */
191 #ifndef SET_BY_PIECES_P
192 #define SET_BY_PIECES_P(SIZE, ALIGN) \
193 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
194 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
197 /* This macro is used to determine whether store_by_pieces should be
198 called to "memcpy" storage when the source is a constant string. */
199 #ifndef STORE_BY_PIECES_P
200 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
201 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
202 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
205 /* This array records the insn_code of insns to perform block moves. */
206 enum insn_code movmem_optab
[NUM_MACHINE_MODES
];
208 /* This array records the insn_code of insns to perform block sets. */
209 enum insn_code setmem_optab
[NUM_MACHINE_MODES
];
211 /* These arrays record the insn_code of three different kinds of insns
212 to perform block compares. */
213 enum insn_code cmpstr_optab
[NUM_MACHINE_MODES
];
214 enum insn_code cmpstrn_optab
[NUM_MACHINE_MODES
];
215 enum insn_code cmpmem_optab
[NUM_MACHINE_MODES
];
217 /* Synchronization primitives. */
218 enum insn_code sync_add_optab
[NUM_MACHINE_MODES
];
219 enum insn_code sync_sub_optab
[NUM_MACHINE_MODES
];
220 enum insn_code sync_ior_optab
[NUM_MACHINE_MODES
];
221 enum insn_code sync_and_optab
[NUM_MACHINE_MODES
];
222 enum insn_code sync_xor_optab
[NUM_MACHINE_MODES
];
223 enum insn_code sync_nand_optab
[NUM_MACHINE_MODES
];
224 enum insn_code sync_old_add_optab
[NUM_MACHINE_MODES
];
225 enum insn_code sync_old_sub_optab
[NUM_MACHINE_MODES
];
226 enum insn_code sync_old_ior_optab
[NUM_MACHINE_MODES
];
227 enum insn_code sync_old_and_optab
[NUM_MACHINE_MODES
];
228 enum insn_code sync_old_xor_optab
[NUM_MACHINE_MODES
];
229 enum insn_code sync_old_nand_optab
[NUM_MACHINE_MODES
];
230 enum insn_code sync_new_add_optab
[NUM_MACHINE_MODES
];
231 enum insn_code sync_new_sub_optab
[NUM_MACHINE_MODES
];
232 enum insn_code sync_new_ior_optab
[NUM_MACHINE_MODES
];
233 enum insn_code sync_new_and_optab
[NUM_MACHINE_MODES
];
234 enum insn_code sync_new_xor_optab
[NUM_MACHINE_MODES
];
235 enum insn_code sync_new_nand_optab
[NUM_MACHINE_MODES
];
236 enum insn_code sync_compare_and_swap
[NUM_MACHINE_MODES
];
237 enum insn_code sync_lock_test_and_set
[NUM_MACHINE_MODES
];
238 enum insn_code sync_lock_release
[NUM_MACHINE_MODES
];
240 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
242 #ifndef SLOW_UNALIGNED_ACCESS
243 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
246 /* This is run to set up which modes can be used
247 directly in memory and to initialize the block move optab. It is run
248 at the beginning of compilation and when the target is reinitialized. */
251 init_expr_target (void)
254 enum machine_mode mode
;
259 /* Try indexing by frame ptr and try by stack ptr.
260 It is known that on the Convex the stack ptr isn't a valid index.
261 With luck, one or the other is valid on any machine. */
262 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
263 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
265 /* A scratch register we can modify in-place below to avoid
266 useless RTL allocations. */
267 reg
= gen_rtx_REG (VOIDmode
, -1);
269 insn
= rtx_alloc (INSN
);
270 pat
= gen_rtx_SET (VOIDmode
, NULL_RTX
, NULL_RTX
);
271 PATTERN (insn
) = pat
;
273 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
274 mode
= (enum machine_mode
) ((int) mode
+ 1))
278 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
279 PUT_MODE (mem
, mode
);
280 PUT_MODE (mem1
, mode
);
281 PUT_MODE (reg
, mode
);
283 /* See if there is some register that can be used in this mode and
284 directly loaded or stored from memory. */
286 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
287 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
288 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
291 if (! HARD_REGNO_MODE_OK (regno
, mode
))
294 SET_REGNO (reg
, regno
);
297 SET_DEST (pat
) = reg
;
298 if (recog (pat
, insn
, &num_clobbers
) >= 0)
299 direct_load
[(int) mode
] = 1;
301 SET_SRC (pat
) = mem1
;
302 SET_DEST (pat
) = reg
;
303 if (recog (pat
, insn
, &num_clobbers
) >= 0)
304 direct_load
[(int) mode
] = 1;
307 SET_DEST (pat
) = mem
;
308 if (recog (pat
, insn
, &num_clobbers
) >= 0)
309 direct_store
[(int) mode
] = 1;
312 SET_DEST (pat
) = mem1
;
313 if (recog (pat
, insn
, &num_clobbers
) >= 0)
314 direct_store
[(int) mode
] = 1;
318 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
320 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
321 mode
= GET_MODE_WIDER_MODE (mode
))
323 enum machine_mode srcmode
;
324 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
325 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
329 ic
= can_extend_p (mode
, srcmode
, 0);
330 if (ic
== CODE_FOR_nothing
)
333 PUT_MODE (mem
, srcmode
);
335 if ((*insn_data
[ic
].operand
[1].predicate
) (mem
, srcmode
))
336 float_extend_from_mem
[mode
][srcmode
] = true;
341 /* This is run at the start of compiling a function. */
346 memset (&crtl
->expr
, 0, sizeof (crtl
->expr
));
349 /* Copy data from FROM to TO, where the machine modes are not the same.
350 Both modes may be integer, or both may be floating, or both may be
352 UNSIGNEDP should be nonzero if FROM is an unsigned type.
353 This causes zero-extension instead of sign-extension. */
356 convert_move (rtx to
, rtx from
, int unsignedp
)
358 enum machine_mode to_mode
= GET_MODE (to
);
359 enum machine_mode from_mode
= GET_MODE (from
);
360 int to_real
= SCALAR_FLOAT_MODE_P (to_mode
);
361 int from_real
= SCALAR_FLOAT_MODE_P (from_mode
);
365 /* rtx code for making an equivalent value. */
366 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
367 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
370 gcc_assert (to_real
== from_real
);
371 gcc_assert (to_mode
!= BLKmode
);
372 gcc_assert (from_mode
!= BLKmode
);
374 /* If the source and destination are already the same, then there's
379 /* If FROM is a SUBREG that indicates that we have already done at least
380 the required extension, strip it. We don't handle such SUBREGs as
383 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
384 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
385 >= GET_MODE_SIZE (to_mode
))
386 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
387 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
389 gcc_assert (GET_CODE (to
) != SUBREG
|| !SUBREG_PROMOTED_VAR_P (to
));
391 if (to_mode
== from_mode
392 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
394 emit_move_insn (to
, from
);
398 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
400 gcc_assert (GET_MODE_BITSIZE (from_mode
) == GET_MODE_BITSIZE (to_mode
));
402 if (VECTOR_MODE_P (to_mode
))
403 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
405 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
407 emit_move_insn (to
, from
);
411 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
413 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
414 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
423 gcc_assert ((GET_MODE_PRECISION (from_mode
)
424 != GET_MODE_PRECISION (to_mode
))
425 || (DECIMAL_FLOAT_MODE_P (from_mode
)
426 != DECIMAL_FLOAT_MODE_P (to_mode
)));
428 if (GET_MODE_PRECISION (from_mode
) == GET_MODE_PRECISION (to_mode
))
429 /* Conversion between decimal float and binary float, same size. */
430 tab
= DECIMAL_FLOAT_MODE_P (from_mode
) ? trunc_optab
: sext_optab
;
431 else if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
436 /* Try converting directly if the insn is supported. */
438 code
= convert_optab_handler (tab
, to_mode
, from_mode
)->insn_code
;
439 if (code
!= CODE_FOR_nothing
)
441 emit_unop_insn (code
, to
, from
,
442 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
446 /* Otherwise use a libcall. */
447 libcall
= convert_optab_libfunc (tab
, to_mode
, from_mode
);
449 /* Is this conversion implemented yet? */
450 gcc_assert (libcall
);
453 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
455 insns
= get_insns ();
457 emit_libcall_block (insns
, to
, value
,
458 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
460 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
464 /* Handle pointer conversion. */ /* SPEE 900220. */
465 /* Targets are expected to provide conversion insns between PxImode and
466 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
467 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
469 enum machine_mode full_mode
470 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode
), MODE_INT
);
472 gcc_assert (convert_optab_handler (trunc_optab
, to_mode
, full_mode
)->insn_code
473 != CODE_FOR_nothing
);
475 if (full_mode
!= from_mode
)
476 from
= convert_to_mode (full_mode
, from
, unsignedp
);
477 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, full_mode
)->insn_code
,
481 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
484 enum machine_mode full_mode
485 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode
), MODE_INT
);
487 gcc_assert (convert_optab_handler (sext_optab
, full_mode
, from_mode
)->insn_code
488 != CODE_FOR_nothing
);
490 if (to_mode
== full_mode
)
492 emit_unop_insn (convert_optab_handler (sext_optab
, full_mode
, from_mode
)->insn_code
,
497 new_from
= gen_reg_rtx (full_mode
);
498 emit_unop_insn (convert_optab_handler (sext_optab
, full_mode
, from_mode
)->insn_code
,
499 new_from
, from
, UNKNOWN
);
501 /* else proceed to integer conversions below. */
502 from_mode
= full_mode
;
506 /* Make sure both are fixed-point modes or both are not. */
507 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
) ==
508 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode
));
509 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
))
511 /* If we widen from_mode to to_mode and they are in the same class,
512 we won't saturate the result.
513 Otherwise, always saturate the result to play safe. */
514 if (GET_MODE_CLASS (from_mode
) == GET_MODE_CLASS (to_mode
)
515 && GET_MODE_SIZE (from_mode
) < GET_MODE_SIZE (to_mode
))
516 expand_fixed_convert (to
, from
, 0, 0);
518 expand_fixed_convert (to
, from
, 0, 1);
522 /* Now both modes are integers. */
524 /* Handle expanding beyond a word. */
525 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
526 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
533 enum machine_mode lowpart_mode
;
534 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
536 /* Try converting directly if the insn is supported. */
537 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
540 /* If FROM is a SUBREG, put it into a register. Do this
541 so that we always generate the same set of insns for
542 better cse'ing; if an intermediate assignment occurred,
543 we won't be doing the operation directly on the SUBREG. */
544 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
545 from
= force_reg (from_mode
, from
);
546 emit_unop_insn (code
, to
, from
, equiv_code
);
549 /* Next, try converting via full word. */
550 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
551 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
552 != CODE_FOR_nothing
))
554 rtx word_to
= gen_reg_rtx (word_mode
);
557 if (reg_overlap_mentioned_p (to
, from
))
558 from
= force_reg (from_mode
, from
);
561 convert_move (word_to
, from
, unsignedp
);
562 emit_unop_insn (code
, to
, word_to
, equiv_code
);
566 /* No special multiword conversion insn; do it by hand. */
569 /* Since we will turn this into a no conflict block, we must ensure
570 that the source does not overlap the target. */
572 if (reg_overlap_mentioned_p (to
, from
))
573 from
= force_reg (from_mode
, from
);
575 /* Get a copy of FROM widened to a word, if necessary. */
576 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
577 lowpart_mode
= word_mode
;
579 lowpart_mode
= from_mode
;
581 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
583 lowpart
= gen_lowpart (lowpart_mode
, to
);
584 emit_move_insn (lowpart
, lowfrom
);
586 /* Compute the value to put in each remaining word. */
588 fill_value
= const0_rtx
;
590 fill_value
= emit_store_flag (gen_reg_rtx (word_mode
),
591 LT
, lowfrom
, const0_rtx
,
594 /* Fill the remaining words. */
595 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
597 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
598 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
600 gcc_assert (subword
);
602 if (fill_value
!= subword
)
603 emit_move_insn (subword
, fill_value
);
606 insns
= get_insns ();
613 /* Truncating multi-word to a word or less. */
614 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
615 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
618 && ! MEM_VOLATILE_P (from
)
619 && direct_load
[(int) to_mode
]
620 && ! mode_dependent_address_p (XEXP (from
, 0)))
622 || GET_CODE (from
) == SUBREG
))
623 from
= force_reg (from_mode
, from
);
624 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
628 /* Now follow all the conversions between integers
629 no more than a word long. */
631 /* For truncation, usually we can just refer to FROM in a narrower mode. */
632 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
633 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
634 GET_MODE_BITSIZE (from_mode
)))
637 && ! MEM_VOLATILE_P (from
)
638 && direct_load
[(int) to_mode
]
639 && ! mode_dependent_address_p (XEXP (from
, 0)))
641 || GET_CODE (from
) == SUBREG
))
642 from
= force_reg (from_mode
, from
);
643 if (REG_P (from
) && REGNO (from
) < FIRST_PSEUDO_REGISTER
644 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
645 from
= copy_to_reg (from
);
646 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
650 /* Handle extension. */
651 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
653 /* Convert directly if that works. */
654 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
657 emit_unop_insn (code
, to
, from
, equiv_code
);
662 enum machine_mode intermediate
;
666 /* Search for a mode to convert via. */
667 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
668 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
669 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
671 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
672 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
673 GET_MODE_BITSIZE (intermediate
))))
674 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
675 != CODE_FOR_nothing
))
677 convert_move (to
, convert_to_mode (intermediate
, from
,
678 unsignedp
), unsignedp
);
682 /* No suitable intermediate mode.
683 Generate what we need with shifts. */
684 shift_amount
= build_int_cst (NULL_TREE
,
685 GET_MODE_BITSIZE (to_mode
)
686 - GET_MODE_BITSIZE (from_mode
));
687 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
688 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
690 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
693 emit_move_insn (to
, tmp
);
698 /* Support special truncate insns for certain modes. */
699 if (convert_optab_handler (trunc_optab
, to_mode
, from_mode
)->insn_code
!= CODE_FOR_nothing
)
701 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, from_mode
)->insn_code
,
706 /* Handle truncation of volatile memrefs, and so on;
707 the things that couldn't be truncated directly,
708 and for which there was no special instruction.
710 ??? Code above formerly short-circuited this, for most integer
711 mode pairs, with a force_reg in from_mode followed by a recursive
712 call to this routine. Appears always to have been wrong. */
713 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
715 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
716 emit_move_insn (to
, temp
);
720 /* Mode combination is not recognized. */
724 /* Return an rtx for a value that would result
725 from converting X to mode MODE.
726 Both X and MODE may be floating, or both integer.
727 UNSIGNEDP is nonzero if X is an unsigned value.
728 This can be done by referring to a part of X in place
729 or by copying to a new temporary with conversion. */
732 convert_to_mode (enum machine_mode mode
, rtx x
, int unsignedp
)
734 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
737 /* Return an rtx for a value that would result
738 from converting X from mode OLDMODE to mode MODE.
739 Both modes may be floating, or both integer.
740 UNSIGNEDP is nonzero if X is an unsigned value.
742 This can be done by referring to a part of X in place
743 or by copying to a new temporary with conversion.
745 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
748 convert_modes (enum machine_mode mode
, enum machine_mode oldmode
, rtx x
, int unsignedp
)
752 /* If FROM is a SUBREG that indicates that we have already done at least
753 the required extension, strip it. */
755 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
756 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
757 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
758 x
= gen_lowpart (mode
, x
);
760 if (GET_MODE (x
) != VOIDmode
)
761 oldmode
= GET_MODE (x
);
766 /* There is one case that we must handle specially: If we are converting
767 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
768 we are to interpret the constant as unsigned, gen_lowpart will do
769 the wrong if the constant appears negative. What we want to do is
770 make the high-order word of the constant zero, not all ones. */
772 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
773 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
774 && CONST_INT_P (x
) && INTVAL (x
) < 0)
776 double_int val
= uhwi_to_double_int (INTVAL (x
));
778 /* We need to zero extend VAL. */
779 if (oldmode
!= VOIDmode
)
780 val
= double_int_zext (val
, GET_MODE_BITSIZE (oldmode
));
782 return immed_double_int_const (val
, mode
);
785 /* We can do this with a gen_lowpart if both desired and current modes
786 are integer, and this is either a constant integer, a register, or a
787 non-volatile MEM. Except for the constant case where MODE is no
788 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
791 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
792 || (GET_MODE_CLASS (mode
) == MODE_INT
793 && GET_MODE_CLASS (oldmode
) == MODE_INT
794 && (GET_CODE (x
) == CONST_DOUBLE
795 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
796 && ((MEM_P (x
) && ! MEM_VOLATILE_P (x
)
797 && direct_load
[(int) mode
])
799 && (! HARD_REGISTER_P (x
)
800 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
801 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
802 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
804 /* ?? If we don't know OLDMODE, we have to assume here that
805 X does not need sign- or zero-extension. This may not be
806 the case, but it's the best we can do. */
807 if (CONST_INT_P (x
) && oldmode
!= VOIDmode
808 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
810 HOST_WIDE_INT val
= INTVAL (x
);
811 int width
= GET_MODE_BITSIZE (oldmode
);
813 /* We must sign or zero-extend in this case. Start by
814 zero-extending, then sign extend if we need to. */
815 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
817 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
818 val
|= (HOST_WIDE_INT
) (-1) << width
;
820 return gen_int_mode (val
, mode
);
823 return gen_lowpart (mode
, x
);
826 /* Converting from integer constant into mode is always equivalent to an
828 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
830 gcc_assert (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (oldmode
));
831 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
834 temp
= gen_reg_rtx (mode
);
835 convert_move (temp
, x
, unsignedp
);
839 /* STORE_MAX_PIECES is the number of bytes at a time that we can
840 store efficiently. Due to internal GCC limitations, this is
841 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
842 for an immediate constant. */
844 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
846 /* Determine whether the LEN bytes can be moved by using several move
847 instructions. Return nonzero if a call to move_by_pieces should
851 can_move_by_pieces (unsigned HOST_WIDE_INT len
,
852 unsigned int align ATTRIBUTE_UNUSED
)
854 return MOVE_BY_PIECES_P (len
, align
);
857 /* Generate several move instructions to copy LEN bytes from block FROM to
858 block TO. (These are MEM rtx's with BLKmode).
860 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
861 used to push FROM to the stack.
863 ALIGN is maximum stack alignment we can assume.
865 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
866 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
870 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
871 unsigned int align
, int endp
)
873 struct move_by_pieces_d data
;
874 enum machine_mode to_addr_mode
, from_addr_mode
875 = targetm
.addr_space
.address_mode (MEM_ADDR_SPACE (from
));
876 rtx to_addr
, from_addr
= XEXP (from
, 0);
877 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
878 enum machine_mode mode
= VOIDmode
, tmode
;
879 enum insn_code icode
;
881 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
884 data
.from_addr
= from_addr
;
887 to_addr_mode
= targetm
.addr_space
.address_mode (MEM_ADDR_SPACE (to
));
888 to_addr
= XEXP (to
, 0);
891 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
892 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
894 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
898 to_addr_mode
= VOIDmode
;
902 #ifdef STACK_GROWS_DOWNWARD
908 data
.to_addr
= to_addr
;
911 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
912 || GET_CODE (from_addr
) == POST_INC
913 || GET_CODE (from_addr
) == POST_DEC
);
915 data
.explicit_inc_from
= 0;
916 data
.explicit_inc_to
= 0;
917 if (data
.reverse
) data
.offset
= len
;
920 /* If copying requires more than two move insns,
921 copy addresses to registers (to make displacements shorter)
922 and use post-increment if available. */
923 if (!(data
.autinc_from
&& data
.autinc_to
)
924 && move_by_pieces_ninsns (len
, align
, max_size
) > 2)
926 /* Find the mode of the largest move... */
927 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
928 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
929 if (GET_MODE_SIZE (tmode
) < max_size
)
932 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
934 data
.from_addr
= copy_to_mode_reg (from_addr_mode
,
935 plus_constant (from_addr
, len
));
936 data
.autinc_from
= 1;
937 data
.explicit_inc_from
= -1;
939 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
941 data
.from_addr
= copy_to_mode_reg (from_addr_mode
, from_addr
);
942 data
.autinc_from
= 1;
943 data
.explicit_inc_from
= 1;
945 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
946 data
.from_addr
= copy_to_mode_reg (from_addr_mode
, from_addr
);
947 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
949 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
950 plus_constant (to_addr
, len
));
952 data
.explicit_inc_to
= -1;
954 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
956 data
.to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
958 data
.explicit_inc_to
= 1;
960 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
961 data
.to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
964 tmode
= mode_for_size (MOVE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
965 if (align
>= GET_MODE_ALIGNMENT (tmode
))
966 align
= GET_MODE_ALIGNMENT (tmode
);
969 enum machine_mode xmode
;
971 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
973 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
974 if (GET_MODE_SIZE (tmode
) > MOVE_MAX_PIECES
975 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
978 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
981 /* First move what we can in the largest integer mode, then go to
982 successively smaller modes. */
986 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
987 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
988 if (GET_MODE_SIZE (tmode
) < max_size
)
991 if (mode
== VOIDmode
)
994 icode
= optab_handler (mov_optab
, mode
)->insn_code
;
995 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
996 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
998 max_size
= GET_MODE_SIZE (mode
);
1001 /* The code above should have handled everything. */
1002 gcc_assert (!data
.len
);
1008 gcc_assert (!data
.reverse
);
1013 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
1014 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
1016 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
1017 plus_constant (data
.to_addr
,
1020 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
1027 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
1035 /* Return number of insns required to move L bytes by pieces.
1036 ALIGN (in bits) is maximum alignment we can assume. */
1038 static unsigned HOST_WIDE_INT
1039 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
,
1040 unsigned int max_size
)
1042 unsigned HOST_WIDE_INT n_insns
= 0;
1043 enum machine_mode tmode
;
1045 tmode
= mode_for_size (MOVE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
1046 if (align
>= GET_MODE_ALIGNMENT (tmode
))
1047 align
= GET_MODE_ALIGNMENT (tmode
);
1050 enum machine_mode tmode
, xmode
;
1052 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
1054 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
1055 if (GET_MODE_SIZE (tmode
) > MOVE_MAX_PIECES
1056 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
1059 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
1062 while (max_size
> 1)
1064 enum machine_mode mode
= VOIDmode
;
1065 enum insn_code icode
;
1067 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1068 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1069 if (GET_MODE_SIZE (tmode
) < max_size
)
1072 if (mode
== VOIDmode
)
1075 icode
= optab_handler (mov_optab
, mode
)->insn_code
;
1076 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1077 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1079 max_size
= GET_MODE_SIZE (mode
);
1086 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1087 with move instructions for mode MODE. GENFUN is the gen_... function
1088 to make a move insn for that mode. DATA has all the other info. */
1091 move_by_pieces_1 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
1092 struct move_by_pieces_d
*data
)
1094 unsigned int size
= GET_MODE_SIZE (mode
);
1095 rtx to1
= NULL_RTX
, from1
;
1097 while (data
->len
>= size
)
1100 data
->offset
-= size
;
1104 if (data
->autinc_to
)
1105 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1108 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1111 if (data
->autinc_from
)
1112 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1115 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1117 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1118 emit_insn (gen_add2_insn (data
->to_addr
,
1119 GEN_INT (-(HOST_WIDE_INT
)size
)));
1120 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1121 emit_insn (gen_add2_insn (data
->from_addr
,
1122 GEN_INT (-(HOST_WIDE_INT
)size
)));
1125 emit_insn ((*genfun
) (to1
, from1
));
1128 #ifdef PUSH_ROUNDING
1129 emit_single_push_insn (mode
, from1
, NULL
);
1135 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1136 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1137 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1138 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1140 if (! data
->reverse
)
1141 data
->offset
+= size
;
1147 /* Emit code to move a block Y to a block X. This may be done with
1148 string-move instructions, with multiple scalar move instructions,
1149 or with a library call.
1151 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1152 SIZE is an rtx that says how long they are.
1153 ALIGN is the maximum alignment we can assume they have.
1154 METHOD describes what kind of copy this is, and what mechanisms may be used.
1156 Return the address of the new block, if memcpy is called and returns it,
1160 emit_block_move_hints (rtx x
, rtx y
, rtx size
, enum block_op_methods method
,
1161 unsigned int expected_align
, HOST_WIDE_INT expected_size
)
1169 case BLOCK_OP_NORMAL
:
1170 case BLOCK_OP_TAILCALL
:
1171 may_use_call
= true;
1174 case BLOCK_OP_CALL_PARM
:
1175 may_use_call
= block_move_libcall_safe_for_call_parm ();
1177 /* Make inhibit_defer_pop nonzero around the library call
1178 to force it to pop the arguments right away. */
1182 case BLOCK_OP_NO_LIBCALL
:
1183 may_use_call
= false;
1190 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1191 gcc_assert (align
>= BITS_PER_UNIT
);
1193 gcc_assert (MEM_P (x
));
1194 gcc_assert (MEM_P (y
));
1197 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1198 block copy is more efficient for other large modes, e.g. DCmode. */
1199 x
= adjust_address (x
, BLKmode
, 0);
1200 y
= adjust_address (y
, BLKmode
, 0);
1202 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1203 can be incorrect is coming from __builtin_memcpy. */
1204 if (CONST_INT_P (size
))
1206 if (INTVAL (size
) == 0)
1209 x
= shallow_copy_rtx (x
);
1210 y
= shallow_copy_rtx (y
);
1211 set_mem_size (x
, size
);
1212 set_mem_size (y
, size
);
1215 if (CONST_INT_P (size
) && MOVE_BY_PIECES_P (INTVAL (size
), align
))
1216 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1217 else if (emit_block_move_via_movmem (x
, y
, size
, align
,
1218 expected_align
, expected_size
))
1220 else if (may_use_call
1221 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
))
1222 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y
)))
1223 retval
= emit_block_move_via_libcall (x
, y
, size
,
1224 method
== BLOCK_OP_TAILCALL
);
1226 emit_block_move_via_loop (x
, y
, size
, align
);
1228 if (method
== BLOCK_OP_CALL_PARM
)
1235 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1237 return emit_block_move_hints (x
, y
, size
, method
, 0, -1);
1240 /* A subroutine of emit_block_move. Returns true if calling the
1241 block move libcall will not clobber any parameters which may have
1242 already been placed on the stack. */
1245 block_move_libcall_safe_for_call_parm (void)
1247 #if defined (REG_PARM_STACK_SPACE)
1251 /* If arguments are pushed on the stack, then they're safe. */
1255 /* If registers go on the stack anyway, any argument is sure to clobber
1256 an outgoing argument. */
1257 #if defined (REG_PARM_STACK_SPACE)
1258 fn
= emit_block_move_libcall_fn (false);
1259 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1260 depend on its argument. */
1262 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn
? NULL_TREE
: TREE_TYPE (fn
)))
1263 && REG_PARM_STACK_SPACE (fn
) != 0)
1267 /* If any argument goes in memory, then it might clobber an outgoing
1270 CUMULATIVE_ARGS args_so_far
;
1273 fn
= emit_block_move_libcall_fn (false);
1274 INIT_CUMULATIVE_ARGS (args_so_far
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1276 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1277 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1279 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1280 rtx tmp
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
1281 if (!tmp
|| !REG_P (tmp
))
1283 if (targetm
.calls
.arg_partial_bytes (&args_so_far
, mode
, NULL
, 1))
1285 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, NULL_TREE
, 1);
1291 /* A subroutine of emit_block_move. Expand a movmem pattern;
1292 return true if successful. */
1295 emit_block_move_via_movmem (rtx x
, rtx y
, rtx size
, unsigned int align
,
1296 unsigned int expected_align
, HOST_WIDE_INT expected_size
)
1298 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1299 int save_volatile_ok
= volatile_ok
;
1300 enum machine_mode mode
;
1302 if (expected_align
< align
)
1303 expected_align
= align
;
1305 /* Since this is a move insn, we don't care about volatility. */
1308 /* Try the most limited insn first, because there's no point
1309 including more than one in the machine description unless
1310 the more limited one has some advantage. */
1312 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1313 mode
= GET_MODE_WIDER_MODE (mode
))
1315 enum insn_code code
= movmem_optab
[(int) mode
];
1316 insn_operand_predicate_fn pred
;
1318 if (code
!= CODE_FOR_nothing
1319 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1320 here because if SIZE is less than the mode mask, as it is
1321 returned by the macro, it will definitely be less than the
1322 actual mode mask. */
1323 && ((CONST_INT_P (size
)
1324 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1325 <= (GET_MODE_MASK (mode
) >> 1)))
1326 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1327 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1328 || (*pred
) (x
, BLKmode
))
1329 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1330 || (*pred
) (y
, BLKmode
))
1331 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1332 || (*pred
) (opalign
, VOIDmode
)))
1335 rtx last
= get_last_insn ();
1338 op2
= convert_to_mode (mode
, size
, 1);
1339 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1340 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1341 op2
= copy_to_mode_reg (mode
, op2
);
1343 /* ??? When called via emit_block_move_for_call, it'd be
1344 nice if there were some way to inform the backend, so
1345 that it doesn't fail the expansion because it thinks
1346 emitting the libcall would be more efficient. */
1348 if (insn_data
[(int) code
].n_operands
== 4)
1349 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1351 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
,
1352 GEN_INT (expected_align
1354 GEN_INT (expected_size
));
1358 volatile_ok
= save_volatile_ok
;
1362 delete_insns_since (last
);
1366 volatile_ok
= save_volatile_ok
;
1370 /* A subroutine of emit_block_move. Expand a call to memcpy.
1371 Return the return value from memcpy, 0 otherwise. */
1374 emit_block_move_via_libcall (rtx dst
, rtx src
, rtx size
, bool tailcall
)
1376 rtx dst_addr
, src_addr
;
1377 tree call_expr
, fn
, src_tree
, dst_tree
, size_tree
;
1378 enum machine_mode size_mode
;
1381 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1382 pseudos. We can then place those new pseudos into a VAR_DECL and
1385 dst_addr
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
1386 src_addr
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
1388 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1389 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1391 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1392 src_tree
= make_tree (ptr_type_node
, src_addr
);
1394 size_mode
= TYPE_MODE (sizetype
);
1396 size
= convert_to_mode (size_mode
, size
, 1);
1397 size
= copy_to_mode_reg (size_mode
, size
);
1399 /* It is incorrect to use the libcall calling conventions to call
1400 memcpy in this context. This could be a user call to memcpy and
1401 the user may wish to examine the return value from memcpy. For
1402 targets where libcalls and normal calls have different conventions
1403 for returning pointers, we could end up generating incorrect code. */
1405 size_tree
= make_tree (sizetype
, size
);
1407 fn
= emit_block_move_libcall_fn (true);
1408 call_expr
= build_call_expr (fn
, 3, dst_tree
, src_tree
, size_tree
);
1409 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
1411 retval
= expand_normal (call_expr
);
1416 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1417 for the function we use for block copies. The first time FOR_CALL
1418 is true, we call assemble_external. */
1420 static GTY(()) tree block_move_fn
;
1423 init_block_move_fn (const char *asmspec
)
1429 fn
= get_identifier ("memcpy");
1430 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
1431 const_ptr_type_node
, sizetype
,
1434 fn
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
, fn
, args
);
1435 DECL_EXTERNAL (fn
) = 1;
1436 TREE_PUBLIC (fn
) = 1;
1437 DECL_ARTIFICIAL (fn
) = 1;
1438 TREE_NOTHROW (fn
) = 1;
1439 DECL_VISIBILITY (fn
) = VISIBILITY_DEFAULT
;
1440 DECL_VISIBILITY_SPECIFIED (fn
) = 1;
1446 set_user_assembler_name (block_move_fn
, asmspec
);
1450 emit_block_move_libcall_fn (int for_call
)
1452 static bool emitted_extern
;
1455 init_block_move_fn (NULL
);
1457 if (for_call
&& !emitted_extern
)
1459 emitted_extern
= true;
1460 make_decl_rtl (block_move_fn
);
1461 assemble_external (block_move_fn
);
1464 return block_move_fn
;
1467 /* A subroutine of emit_block_move. Copy the data via an explicit
1468 loop. This is used only when libcalls are forbidden. */
1469 /* ??? It'd be nice to copy in hunks larger than QImode. */
1472 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1473 unsigned int align ATTRIBUTE_UNUSED
)
1475 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
1476 enum machine_mode x_addr_mode
1477 = targetm
.addr_space
.address_mode (MEM_ADDR_SPACE (x
));
1478 enum machine_mode y_addr_mode
1479 = targetm
.addr_space
.address_mode (MEM_ADDR_SPACE (y
));
1480 enum machine_mode iter_mode
;
1482 iter_mode
= GET_MODE (size
);
1483 if (iter_mode
== VOIDmode
)
1484 iter_mode
= word_mode
;
1486 top_label
= gen_label_rtx ();
1487 cmp_label
= gen_label_rtx ();
1488 iter
= gen_reg_rtx (iter_mode
);
1490 emit_move_insn (iter
, const0_rtx
);
1492 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1493 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1494 do_pending_stack_adjust ();
1496 emit_jump (cmp_label
);
1497 emit_label (top_label
);
1499 tmp
= convert_modes (x_addr_mode
, iter_mode
, iter
, true);
1500 x_addr
= gen_rtx_PLUS (x_addr_mode
, x_addr
, tmp
);
1502 if (x_addr_mode
!= y_addr_mode
)
1503 tmp
= convert_modes (y_addr_mode
, iter_mode
, iter
, true);
1504 y_addr
= gen_rtx_PLUS (y_addr_mode
, y_addr
, tmp
);
1506 x
= change_address (x
, QImode
, x_addr
);
1507 y
= change_address (y
, QImode
, y_addr
);
1509 emit_move_insn (x
, y
);
1511 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1512 true, OPTAB_LIB_WIDEN
);
1514 emit_move_insn (iter
, tmp
);
1516 emit_label (cmp_label
);
1518 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1522 /* Copy all or part of a value X into registers starting at REGNO.
1523 The number of registers to be filled is NREGS. */
1526 move_block_to_reg (int regno
, rtx x
, int nregs
, enum machine_mode mode
)
1529 #ifdef HAVE_load_multiple
1537 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1538 x
= validize_mem (force_const_mem (mode
, x
));
1540 /* See if the machine can do this with a load multiple insn. */
1541 #ifdef HAVE_load_multiple
1542 if (HAVE_load_multiple
)
1544 last
= get_last_insn ();
1545 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1553 delete_insns_since (last
);
1557 for (i
= 0; i
< nregs
; i
++)
1558 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1559 operand_subword_force (x
, i
, mode
));
1562 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1563 The number of registers to be filled is NREGS. */
1566 move_block_from_reg (int regno
, rtx x
, int nregs
)
1573 /* See if the machine can do this with a store multiple insn. */
1574 #ifdef HAVE_store_multiple
1575 if (HAVE_store_multiple
)
1577 rtx last
= get_last_insn ();
1578 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1586 delete_insns_since (last
);
1590 for (i
= 0; i
< nregs
; i
++)
1592 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1596 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1600 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1601 ORIG, where ORIG is a non-consecutive group of registers represented by
1602 a PARALLEL. The clone is identical to the original except in that the
1603 original set of registers is replaced by a new set of pseudo registers.
1604 The new set has the same modes as the original set. */
1607 gen_group_rtx (rtx orig
)
1612 gcc_assert (GET_CODE (orig
) == PARALLEL
);
1614 length
= XVECLEN (orig
, 0);
1615 tmps
= XALLOCAVEC (rtx
, length
);
1617 /* Skip a NULL entry in first slot. */
1618 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
1623 for (; i
< length
; i
++)
1625 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
1626 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
1628 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
1631 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
1634 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1635 except that values are placed in TMPS[i], and must later be moved
1636 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1639 emit_group_load_1 (rtx
*tmps
, rtx dst
, rtx orig_src
, tree type
, int ssize
)
1643 enum machine_mode m
= GET_MODE (orig_src
);
1645 gcc_assert (GET_CODE (dst
) == PARALLEL
);
1648 && !SCALAR_INT_MODE_P (m
)
1649 && !MEM_P (orig_src
)
1650 && GET_CODE (orig_src
) != CONCAT
)
1652 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_src
));
1653 if (imode
== BLKmode
)
1654 src
= assign_stack_temp (GET_MODE (orig_src
), ssize
, 0);
1656 src
= gen_reg_rtx (imode
);
1657 if (imode
!= BLKmode
)
1658 src
= gen_lowpart (GET_MODE (orig_src
), src
);
1659 emit_move_insn (src
, orig_src
);
1660 /* ...and back again. */
1661 if (imode
!= BLKmode
)
1662 src
= gen_lowpart (imode
, src
);
1663 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1667 /* Check for a NULL entry, used to indicate that the parameter goes
1668 both on the stack and in registers. */
1669 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1674 /* Process the pieces. */
1675 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1677 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1678 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1679 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1682 /* Handle trailing fragments that run over the size of the struct. */
1683 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1685 /* Arrange to shift the fragment to where it belongs.
1686 extract_bit_field loads to the lsb of the reg. */
1688 #ifdef BLOCK_REG_PADDING
1689 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
1690 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1695 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1696 bytelen
= ssize
- bytepos
;
1697 gcc_assert (bytelen
> 0);
1700 /* If we won't be loading directly from memory, protect the real source
1701 from strange tricks we might play; but make sure that the source can
1702 be loaded directly into the destination. */
1704 if (!MEM_P (orig_src
)
1705 && (!CONSTANT_P (orig_src
)
1706 || (GET_MODE (orig_src
) != mode
1707 && GET_MODE (orig_src
) != VOIDmode
)))
1709 if (GET_MODE (orig_src
) == VOIDmode
)
1710 src
= gen_reg_rtx (mode
);
1712 src
= gen_reg_rtx (GET_MODE (orig_src
));
1714 emit_move_insn (src
, orig_src
);
1717 /* Optimize the access just a bit. */
1719 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
1720 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
1721 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1722 && bytelen
== GET_MODE_SIZE (mode
))
1724 tmps
[i
] = gen_reg_rtx (mode
);
1725 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
1727 else if (COMPLEX_MODE_P (mode
)
1728 && GET_MODE (src
) == mode
1729 && bytelen
== GET_MODE_SIZE (mode
))
1730 /* Let emit_move_complex do the bulk of the work. */
1732 else if (GET_CODE (src
) == CONCAT
)
1734 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
1735 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
1737 if ((bytepos
== 0 && bytelen
== slen0
)
1738 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
1740 /* The following assumes that the concatenated objects all
1741 have the same size. In this case, a simple calculation
1742 can be used to determine the object and the bit field
1744 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
1745 if (! CONSTANT_P (tmps
[i
])
1746 && (!REG_P (tmps
[i
]) || GET_MODE (tmps
[i
]) != mode
))
1747 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
1748 (bytepos
% slen0
) * BITS_PER_UNIT
,
1749 1, NULL_RTX
, mode
, mode
);
1755 gcc_assert (!bytepos
);
1756 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1757 emit_move_insn (mem
, src
);
1758 tmps
[i
] = extract_bit_field (mem
, bytelen
* BITS_PER_UNIT
,
1759 0, 1, NULL_RTX
, mode
, mode
);
1762 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1763 SIMD register, which is currently broken. While we get GCC
1764 to emit proper RTL for these cases, let's dump to memory. */
1765 else if (VECTOR_MODE_P (GET_MODE (dst
))
1768 int slen
= GET_MODE_SIZE (GET_MODE (src
));
1771 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1772 emit_move_insn (mem
, src
);
1773 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
1775 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
1776 && XVECLEN (dst
, 0) > 1)
1777 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE(dst
), bytepos
);
1778 else if (CONSTANT_P (src
))
1780 HOST_WIDE_INT len
= (HOST_WIDE_INT
) bytelen
;
1788 gcc_assert (2 * len
== ssize
);
1789 split_double (src
, &first
, &second
);
1796 else if (REG_P (src
) && GET_MODE (src
) == mode
)
1799 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
1800 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
1804 tmps
[i
] = expand_shift (LSHIFT_EXPR
, mode
, tmps
[i
],
1805 build_int_cst (NULL_TREE
, shift
), tmps
[i
], 0);
1809 /* Emit code to move a block SRC of type TYPE to a block DST,
1810 where DST is non-consecutive registers represented by a PARALLEL.
1811 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1815 emit_group_load (rtx dst
, rtx src
, tree type
, int ssize
)
1820 tmps
= XALLOCAVEC (rtx
, XVECLEN (dst
, 0));
1821 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1823 /* Copy the extracted pieces into the proper (probable) hard regs. */
1824 for (i
= 0; i
< XVECLEN (dst
, 0); i
++)
1826 rtx d
= XEXP (XVECEXP (dst
, 0, i
), 0);
1829 emit_move_insn (d
, tmps
[i
]);
1833 /* Similar, but load SRC into new pseudos in a format that looks like
1834 PARALLEL. This can later be fed to emit_group_move to get things
1835 in the right place. */
1838 emit_group_load_into_temps (rtx parallel
, rtx src
, tree type
, int ssize
)
1843 vec
= rtvec_alloc (XVECLEN (parallel
, 0));
1844 emit_group_load_1 (&RTVEC_ELT (vec
, 0), parallel
, src
, type
, ssize
);
1846 /* Convert the vector to look just like the original PARALLEL, except
1847 with the computed values. */
1848 for (i
= 0; i
< XVECLEN (parallel
, 0); i
++)
1850 rtx e
= XVECEXP (parallel
, 0, i
);
1851 rtx d
= XEXP (e
, 0);
1855 d
= force_reg (GET_MODE (d
), RTVEC_ELT (vec
, i
));
1856 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), d
, XEXP (e
, 1));
1858 RTVEC_ELT (vec
, i
) = e
;
1861 return gen_rtx_PARALLEL (GET_MODE (parallel
), vec
);
1864 /* Emit code to move a block SRC to block DST, where SRC and DST are
1865 non-consecutive groups of registers, each represented by a PARALLEL. */
1868 emit_group_move (rtx dst
, rtx src
)
1872 gcc_assert (GET_CODE (src
) == PARALLEL
1873 && GET_CODE (dst
) == PARALLEL
1874 && XVECLEN (src
, 0) == XVECLEN (dst
, 0));
1876 /* Skip first entry if NULL. */
1877 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
1878 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
1879 XEXP (XVECEXP (src
, 0, i
), 0));
1882 /* Move a group of registers represented by a PARALLEL into pseudos. */
1885 emit_group_move_into_temps (rtx src
)
1887 rtvec vec
= rtvec_alloc (XVECLEN (src
, 0));
1890 for (i
= 0; i
< XVECLEN (src
, 0); i
++)
1892 rtx e
= XVECEXP (src
, 0, i
);
1893 rtx d
= XEXP (e
, 0);
1896 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), copy_to_reg (d
), XEXP (e
, 1));
1897 RTVEC_ELT (vec
, i
) = e
;
1900 return gen_rtx_PARALLEL (GET_MODE (src
), vec
);
1903 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1904 where SRC is non-consecutive registers represented by a PARALLEL.
1905 SSIZE represents the total size of block ORIG_DST, or -1 if not
1909 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1912 int start
, finish
, i
;
1913 enum machine_mode m
= GET_MODE (orig_dst
);
1915 gcc_assert (GET_CODE (src
) == PARALLEL
);
1917 if (!SCALAR_INT_MODE_P (m
)
1918 && !MEM_P (orig_dst
) && GET_CODE (orig_dst
) != CONCAT
)
1920 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_dst
));
1921 if (imode
== BLKmode
)
1922 dst
= assign_stack_temp (GET_MODE (orig_dst
), ssize
, 0);
1924 dst
= gen_reg_rtx (imode
);
1925 emit_group_store (dst
, src
, type
, ssize
);
1926 if (imode
!= BLKmode
)
1927 dst
= gen_lowpart (GET_MODE (orig_dst
), dst
);
1928 emit_move_insn (orig_dst
, dst
);
1932 /* Check for a NULL entry, used to indicate that the parameter goes
1933 both on the stack and in registers. */
1934 if (XEXP (XVECEXP (src
, 0, 0), 0))
1938 finish
= XVECLEN (src
, 0);
1940 tmps
= XALLOCAVEC (rtx
, finish
);
1942 /* Copy the (probable) hard regs into pseudos. */
1943 for (i
= start
; i
< finish
; i
++)
1945 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
1946 if (!REG_P (reg
) || REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
1948 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
1949 emit_move_insn (tmps
[i
], reg
);
1955 /* If we won't be storing directly into memory, protect the real destination
1956 from strange tricks we might play. */
1958 if (GET_CODE (dst
) == PARALLEL
)
1962 /* We can get a PARALLEL dst if there is a conditional expression in
1963 a return statement. In that case, the dst and src are the same,
1964 so no action is necessary. */
1965 if (rtx_equal_p (dst
, src
))
1968 /* It is unclear if we can ever reach here, but we may as well handle
1969 it. Allocate a temporary, and split this into a store/load to/from
1972 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
1973 emit_group_store (temp
, src
, type
, ssize
);
1974 emit_group_load (dst
, temp
, type
, ssize
);
1977 else if (!MEM_P (dst
) && GET_CODE (dst
) != CONCAT
)
1979 enum machine_mode outer
= GET_MODE (dst
);
1980 enum machine_mode inner
;
1981 HOST_WIDE_INT bytepos
;
1985 if (!REG_P (dst
) || REGNO (dst
) < FIRST_PSEUDO_REGISTER
)
1986 dst
= gen_reg_rtx (outer
);
1988 /* Make life a bit easier for combine. */
1989 /* If the first element of the vector is the low part
1990 of the destination mode, use a paradoxical subreg to
1991 initialize the destination. */
1994 inner
= GET_MODE (tmps
[start
]);
1995 bytepos
= subreg_lowpart_offset (inner
, outer
);
1996 if (INTVAL (XEXP (XVECEXP (src
, 0, start
), 1)) == bytepos
)
1998 temp
= simplify_gen_subreg (outer
, tmps
[start
],
2002 emit_move_insn (dst
, temp
);
2009 /* If the first element wasn't the low part, try the last. */
2011 && start
< finish
- 1)
2013 inner
= GET_MODE (tmps
[finish
- 1]);
2014 bytepos
= subreg_lowpart_offset (inner
, outer
);
2015 if (INTVAL (XEXP (XVECEXP (src
, 0, finish
- 1), 1)) == bytepos
)
2017 temp
= simplify_gen_subreg (outer
, tmps
[finish
- 1],
2021 emit_move_insn (dst
, temp
);
2028 /* Otherwise, simply initialize the result to zero. */
2030 emit_move_insn (dst
, CONST0_RTX (outer
));
2033 /* Process the pieces. */
2034 for (i
= start
; i
< finish
; i
++)
2036 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2037 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2038 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2039 unsigned int adj_bytelen
= bytelen
;
2042 /* Handle trailing fragments that run over the size of the struct. */
2043 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2044 adj_bytelen
= ssize
- bytepos
;
2046 if (GET_CODE (dst
) == CONCAT
)
2048 if (bytepos
+ adj_bytelen
2049 <= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2050 dest
= XEXP (dst
, 0);
2051 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2053 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2054 dest
= XEXP (dst
, 1);
2058 enum machine_mode dest_mode
= GET_MODE (dest
);
2059 enum machine_mode tmp_mode
= GET_MODE (tmps
[i
]);
2061 gcc_assert (bytepos
== 0 && XVECLEN (src
, 0));
2063 if (GET_MODE_ALIGNMENT (dest_mode
)
2064 >= GET_MODE_ALIGNMENT (tmp_mode
))
2066 dest
= assign_stack_temp (dest_mode
,
2067 GET_MODE_SIZE (dest_mode
),
2069 emit_move_insn (adjust_address (dest
,
2077 dest
= assign_stack_temp (tmp_mode
,
2078 GET_MODE_SIZE (tmp_mode
),
2080 emit_move_insn (dest
, tmps
[i
]);
2081 dst
= adjust_address (dest
, dest_mode
, bytepos
);
2087 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2089 /* store_bit_field always takes its value from the lsb.
2090 Move the fragment to the lsb if it's not already there. */
2092 #ifdef BLOCK_REG_PADDING
2093 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
2094 == (BYTES_BIG_ENDIAN
? upward
: downward
)
2100 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2101 tmps
[i
] = expand_shift (RSHIFT_EXPR
, mode
, tmps
[i
],
2102 build_int_cst (NULL_TREE
, shift
),
2105 bytelen
= adj_bytelen
;
2108 /* Optimize the access just a bit. */
2110 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
2111 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2112 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2113 && bytelen
== GET_MODE_SIZE (mode
))
2114 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2116 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2120 /* Copy from the pseudo into the (probable) hard reg. */
2121 if (orig_dst
!= dst
)
2122 emit_move_insn (orig_dst
, dst
);
2125 /* Generate code to copy a BLKmode object of TYPE out of a
2126 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2127 is null, a stack temporary is created. TGTBLK is returned.
2129 The purpose of this routine is to handle functions that return
2130 BLKmode structures in registers. Some machines (the PA for example)
2131 want to return all small structures in registers regardless of the
2132 structure's alignment. */
2135 copy_blkmode_from_reg (rtx tgtblk
, rtx srcreg
, tree type
)
2137 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2138 rtx src
= NULL
, dst
= NULL
;
2139 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2140 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2141 enum machine_mode copy_mode
;
2145 tgtblk
= assign_temp (build_qualified_type (type
,
2147 | TYPE_QUAL_CONST
)),
2149 preserve_temp_slots (tgtblk
);
2152 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2153 into a new pseudo which is a full word. */
2155 if (GET_MODE (srcreg
) != BLKmode
2156 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2157 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
2159 /* If the structure doesn't take up a whole number of words, see whether
2160 SRCREG is padded on the left or on the right. If it's on the left,
2161 set PADDING_CORRECTION to the number of bits to skip.
2163 In most ABIs, the structure will be returned at the least end of
2164 the register, which translates to right padding on little-endian
2165 targets and left padding on big-endian targets. The opposite
2166 holds if the structure is returned at the most significant
2167 end of the register. */
2168 if (bytes
% UNITS_PER_WORD
!= 0
2169 && (targetm
.calls
.return_in_msb (type
)
2171 : BYTES_BIG_ENDIAN
))
2173 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2175 /* Copy the structure BITSIZE bits at a time. If the target lives in
2176 memory, take care of not reading/writing past its end by selecting
2177 a copy mode suited to BITSIZE. This should always be possible given
2180 We could probably emit more efficient code for machines which do not use
2181 strict alignment, but it doesn't seem worth the effort at the current
2184 copy_mode
= word_mode
;
2187 enum machine_mode mem_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
2188 if (mem_mode
!= BLKmode
)
2189 copy_mode
= mem_mode
;
2192 for (bitpos
= 0, xbitpos
= padding_correction
;
2193 bitpos
< bytes
* BITS_PER_UNIT
;
2194 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2196 /* We need a new source operand each time xbitpos is on a
2197 word boundary and when xbitpos == padding_correction
2198 (the first time through). */
2199 if (xbitpos
% BITS_PER_WORD
== 0
2200 || xbitpos
== padding_correction
)
2201 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2204 /* We need a new destination operand each time bitpos is on
2206 if (bitpos
% BITS_PER_WORD
== 0)
2207 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2209 /* Use xbitpos for the source extraction (right justified) and
2210 bitpos for the destination store (left justified). */
2211 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, copy_mode
,
2212 extract_bit_field (src
, bitsize
,
2213 xbitpos
% BITS_PER_WORD
, 1,
2214 NULL_RTX
, copy_mode
, copy_mode
));
2220 /* Add a USE expression for REG to the (possibly empty) list pointed
2221 to by CALL_FUSAGE. REG must denote a hard register. */
2224 use_reg (rtx
*call_fusage
, rtx reg
)
2226 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2229 = gen_rtx_EXPR_LIST (VOIDmode
,
2230 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2233 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2234 starting at REGNO. All of these registers must be hard registers. */
2237 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2241 gcc_assert (regno
+ nregs
<= FIRST_PSEUDO_REGISTER
);
2243 for (i
= 0; i
< nregs
; i
++)
2244 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2247 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2248 PARALLEL REGS. This is for calls that pass values in multiple
2249 non-contiguous locations. The Irix 6 ABI has examples of this. */
2252 use_group_regs (rtx
*call_fusage
, rtx regs
)
2256 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2258 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2260 /* A NULL entry means the parameter goes both on the stack and in
2261 registers. This can also be a MEM for targets that pass values
2262 partially on the stack and partially in registers. */
2263 if (reg
!= 0 && REG_P (reg
))
2264 use_reg (call_fusage
, reg
);
2268 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2269 assigment and the code of the expresion on the RHS is CODE. Return
2273 get_def_for_expr (tree name
, enum tree_code code
)
2277 if (TREE_CODE (name
) != SSA_NAME
)
2280 def_stmt
= get_gimple_for_ssa_name (name
);
2282 || gimple_assign_rhs_code (def_stmt
) != code
)
2289 /* Determine whether the LEN bytes generated by CONSTFUN can be
2290 stored to memory using several move instructions. CONSTFUNDATA is
2291 a pointer which will be passed as argument in every CONSTFUN call.
2292 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2293 a memset operation and false if it's a copy of a constant string.
2294 Return nonzero if a call to store_by_pieces should succeed. */
2297 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
2298 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2299 void *constfundata
, unsigned int align
, bool memsetp
)
2301 unsigned HOST_WIDE_INT l
;
2302 unsigned int max_size
;
2303 HOST_WIDE_INT offset
= 0;
2304 enum machine_mode mode
, tmode
;
2305 enum insn_code icode
;
2313 ? SET_BY_PIECES_P (len
, align
)
2314 : STORE_BY_PIECES_P (len
, align
)))
2317 tmode
= mode_for_size (STORE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
2318 if (align
>= GET_MODE_ALIGNMENT (tmode
))
2319 align
= GET_MODE_ALIGNMENT (tmode
);
2322 enum machine_mode xmode
;
2324 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
2326 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
2327 if (GET_MODE_SIZE (tmode
) > STORE_MAX_PIECES
2328 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
2331 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
2334 /* We would first store what we can in the largest integer mode, then go to
2335 successively smaller modes. */
2338 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2343 max_size
= STORE_MAX_PIECES
+ 1;
2344 while (max_size
> 1)
2346 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2347 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2348 if (GET_MODE_SIZE (tmode
) < max_size
)
2351 if (mode
== VOIDmode
)
2354 icode
= optab_handler (mov_optab
, mode
)->insn_code
;
2355 if (icode
!= CODE_FOR_nothing
2356 && align
>= GET_MODE_ALIGNMENT (mode
))
2358 unsigned int size
= GET_MODE_SIZE (mode
);
2365 cst
= (*constfun
) (constfundata
, offset
, mode
);
2366 if (!LEGITIMATE_CONSTANT_P (cst
))
2376 max_size
= GET_MODE_SIZE (mode
);
2379 /* The code above should have handled everything. */
2386 /* Generate several move instructions to store LEN bytes generated by
2387 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2388 pointer which will be passed as argument in every CONSTFUN call.
2389 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2390 a memset operation and false if it's a copy of a constant string.
2391 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2392 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2396 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
2397 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2398 void *constfundata
, unsigned int align
, bool memsetp
, int endp
)
2400 enum machine_mode to_addr_mode
2401 = targetm
.addr_space
.address_mode (MEM_ADDR_SPACE (to
));
2402 struct store_by_pieces_d data
;
2406 gcc_assert (endp
!= 2);
2411 ? SET_BY_PIECES_P (len
, align
)
2412 : STORE_BY_PIECES_P (len
, align
));
2413 data
.constfun
= constfun
;
2414 data
.constfundata
= constfundata
;
2417 store_by_pieces_1 (&data
, align
);
2422 gcc_assert (!data
.reverse
);
2427 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2428 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2430 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
2431 plus_constant (data
.to_addr
,
2434 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2441 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2449 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2450 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2453 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
2455 struct store_by_pieces_d data
;
2460 data
.constfun
= clear_by_pieces_1
;
2461 data
.constfundata
= NULL
;
2464 store_by_pieces_1 (&data
, align
);
2467 /* Callback routine for clear_by_pieces.
2468 Return const0_rtx unconditionally. */
2471 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED
,
2472 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2473 enum machine_mode mode ATTRIBUTE_UNUSED
)
2478 /* Subroutine of clear_by_pieces and store_by_pieces.
2479 Generate several move instructions to store LEN bytes of block TO. (A MEM
2480 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2483 store_by_pieces_1 (struct store_by_pieces_d
*data ATTRIBUTE_UNUSED
,
2484 unsigned int align ATTRIBUTE_UNUSED
)
2486 enum machine_mode to_addr_mode
2487 = targetm
.addr_space
.address_mode (MEM_ADDR_SPACE (data
->to
));
2488 rtx to_addr
= XEXP (data
->to
, 0);
2489 unsigned int max_size
= STORE_MAX_PIECES
+ 1;
2490 enum machine_mode mode
= VOIDmode
, tmode
;
2491 enum insn_code icode
;
2494 data
->to_addr
= to_addr
;
2496 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2497 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2499 data
->explicit_inc_to
= 0;
2501 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2503 data
->offset
= data
->len
;
2505 /* If storing requires more than two move insns,
2506 copy addresses to registers (to make displacements shorter)
2507 and use post-increment if available. */
2508 if (!data
->autinc_to
2509 && move_by_pieces_ninsns (data
->len
, align
, max_size
) > 2)
2511 /* Determine the main mode we'll be using. */
2512 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2513 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2514 if (GET_MODE_SIZE (tmode
) < max_size
)
2517 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2519 data
->to_addr
= copy_to_mode_reg (to_addr_mode
,
2520 plus_constant (to_addr
, data
->len
));
2521 data
->autinc_to
= 1;
2522 data
->explicit_inc_to
= -1;
2525 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2526 && ! data
->autinc_to
)
2528 data
->to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
2529 data
->autinc_to
= 1;
2530 data
->explicit_inc_to
= 1;
2533 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2534 data
->to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
2537 tmode
= mode_for_size (STORE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
2538 if (align
>= GET_MODE_ALIGNMENT (tmode
))
2539 align
= GET_MODE_ALIGNMENT (tmode
);
2542 enum machine_mode xmode
;
2544 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
2546 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
2547 if (GET_MODE_SIZE (tmode
) > STORE_MAX_PIECES
2548 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
2551 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
2554 /* First store what we can in the largest integer mode, then go to
2555 successively smaller modes. */
2557 while (max_size
> 1)
2559 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2560 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2561 if (GET_MODE_SIZE (tmode
) < max_size
)
2564 if (mode
== VOIDmode
)
2567 icode
= optab_handler (mov_optab
, mode
)->insn_code
;
2568 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2569 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2571 max_size
= GET_MODE_SIZE (mode
);
2574 /* The code above should have handled everything. */
2575 gcc_assert (!data
->len
);
2578 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2579 with move instructions for mode MODE. GENFUN is the gen_... function
2580 to make a move insn for that mode. DATA has all the other info. */
2583 store_by_pieces_2 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
2584 struct store_by_pieces_d
*data
)
2586 unsigned int size
= GET_MODE_SIZE (mode
);
2589 while (data
->len
>= size
)
2592 data
->offset
-= size
;
2594 if (data
->autinc_to
)
2595 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2598 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2600 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2601 emit_insn (gen_add2_insn (data
->to_addr
,
2602 GEN_INT (-(HOST_WIDE_INT
) size
)));
2604 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2605 emit_insn ((*genfun
) (to1
, cst
));
2607 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2608 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2610 if (! data
->reverse
)
2611 data
->offset
+= size
;
2617 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2618 its length in bytes. */
2621 clear_storage_hints (rtx object
, rtx size
, enum block_op_methods method
,
2622 unsigned int expected_align
, HOST_WIDE_INT expected_size
)
2624 enum machine_mode mode
= GET_MODE (object
);
2627 gcc_assert (method
== BLOCK_OP_NORMAL
|| method
== BLOCK_OP_TAILCALL
);
2629 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2630 just move a zero. Otherwise, do this a piece at a time. */
2632 && CONST_INT_P (size
)
2633 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (mode
))
2635 rtx zero
= CONST0_RTX (mode
);
2638 emit_move_insn (object
, zero
);
2642 if (COMPLEX_MODE_P (mode
))
2644 zero
= CONST0_RTX (GET_MODE_INNER (mode
));
2647 write_complex_part (object
, zero
, 0);
2648 write_complex_part (object
, zero
, 1);
2654 if (size
== const0_rtx
)
2657 align
= MEM_ALIGN (object
);
2659 if (CONST_INT_P (size
)
2660 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2661 clear_by_pieces (object
, INTVAL (size
), align
);
2662 else if (set_storage_via_setmem (object
, size
, const0_rtx
, align
,
2663 expected_align
, expected_size
))
2665 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object
)))
2666 return set_storage_via_libcall (object
, size
, const0_rtx
,
2667 method
== BLOCK_OP_TAILCALL
);
2675 clear_storage (rtx object
, rtx size
, enum block_op_methods method
)
2677 return clear_storage_hints (object
, size
, method
, 0, -1);
2681 /* A subroutine of clear_storage. Expand a call to memset.
2682 Return the return value of memset, 0 otherwise. */
2685 set_storage_via_libcall (rtx object
, rtx size
, rtx val
, bool tailcall
)
2687 tree call_expr
, fn
, object_tree
, size_tree
, val_tree
;
2688 enum machine_mode size_mode
;
2691 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2692 place those into new pseudos into a VAR_DECL and use them later. */
2694 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2696 size_mode
= TYPE_MODE (sizetype
);
2697 size
= convert_to_mode (size_mode
, size
, 1);
2698 size
= copy_to_mode_reg (size_mode
, size
);
2700 /* It is incorrect to use the libcall calling conventions to call
2701 memset in this context. This could be a user call to memset and
2702 the user may wish to examine the return value from memset. For
2703 targets where libcalls and normal calls have different conventions
2704 for returning pointers, we could end up generating incorrect code. */
2706 object_tree
= make_tree (ptr_type_node
, object
);
2707 if (!CONST_INT_P (val
))
2708 val
= convert_to_mode (TYPE_MODE (integer_type_node
), val
, 1);
2709 size_tree
= make_tree (sizetype
, size
);
2710 val_tree
= make_tree (integer_type_node
, val
);
2712 fn
= clear_storage_libcall_fn (true);
2713 call_expr
= build_call_expr (fn
, 3, object_tree
, val_tree
, size_tree
);
2714 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
2716 retval
= expand_normal (call_expr
);
2721 /* A subroutine of set_storage_via_libcall. Create the tree node
2722 for the function we use for block clears. The first time FOR_CALL
2723 is true, we call assemble_external. */
2725 tree block_clear_fn
;
2728 init_block_clear_fn (const char *asmspec
)
2730 if (!block_clear_fn
)
2734 fn
= get_identifier ("memset");
2735 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2736 integer_type_node
, sizetype
,
2739 fn
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
, fn
, args
);
2740 DECL_EXTERNAL (fn
) = 1;
2741 TREE_PUBLIC (fn
) = 1;
2742 DECL_ARTIFICIAL (fn
) = 1;
2743 TREE_NOTHROW (fn
) = 1;
2744 DECL_VISIBILITY (fn
) = VISIBILITY_DEFAULT
;
2745 DECL_VISIBILITY_SPECIFIED (fn
) = 1;
2747 block_clear_fn
= fn
;
2751 set_user_assembler_name (block_clear_fn
, asmspec
);
2755 clear_storage_libcall_fn (int for_call
)
2757 static bool emitted_extern
;
2759 if (!block_clear_fn
)
2760 init_block_clear_fn (NULL
);
2762 if (for_call
&& !emitted_extern
)
2764 emitted_extern
= true;
2765 make_decl_rtl (block_clear_fn
);
2766 assemble_external (block_clear_fn
);
2769 return block_clear_fn
;
2772 /* Expand a setmem pattern; return true if successful. */
2775 set_storage_via_setmem (rtx object
, rtx size
, rtx val
, unsigned int align
,
2776 unsigned int expected_align
, HOST_WIDE_INT expected_size
)
2778 /* Try the most limited insn first, because there's no point
2779 including more than one in the machine description unless
2780 the more limited one has some advantage. */
2782 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2783 enum machine_mode mode
;
2785 if (expected_align
< align
)
2786 expected_align
= align
;
2788 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2789 mode
= GET_MODE_WIDER_MODE (mode
))
2791 enum insn_code code
= setmem_optab
[(int) mode
];
2792 insn_operand_predicate_fn pred
;
2794 if (code
!= CODE_FOR_nothing
2795 /* We don't need MODE to be narrower than
2796 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2797 the mode mask, as it is returned by the macro, it will
2798 definitely be less than the actual mode mask. */
2799 && ((CONST_INT_P (size
)
2800 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2801 <= (GET_MODE_MASK (mode
) >> 1)))
2802 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2803 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2804 || (*pred
) (object
, BLKmode
))
2805 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
2806 || (*pred
) (opalign
, VOIDmode
)))
2809 enum machine_mode char_mode
;
2810 rtx last
= get_last_insn ();
2813 opsize
= convert_to_mode (mode
, size
, 1);
2814 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2815 if (pred
!= 0 && ! (*pred
) (opsize
, mode
))
2816 opsize
= copy_to_mode_reg (mode
, opsize
);
2819 char_mode
= insn_data
[(int) code
].operand
[2].mode
;
2820 if (char_mode
!= VOIDmode
)
2822 opchar
= convert_to_mode (char_mode
, opchar
, 1);
2823 pred
= insn_data
[(int) code
].operand
[2].predicate
;
2824 if (pred
!= 0 && ! (*pred
) (opchar
, char_mode
))
2825 opchar
= copy_to_mode_reg (char_mode
, opchar
);
2828 if (insn_data
[(int) code
].n_operands
== 4)
2829 pat
= GEN_FCN ((int) code
) (object
, opsize
, opchar
, opalign
);
2831 pat
= GEN_FCN ((int) code
) (object
, opsize
, opchar
, opalign
,
2832 GEN_INT (expected_align
2834 GEN_INT (expected_size
));
2841 delete_insns_since (last
);
2849 /* Write to one of the components of the complex value CPLX. Write VAL to
2850 the real part if IMAG_P is false, and the imaginary part if its true. */
2853 write_complex_part (rtx cplx
, rtx val
, bool imag_p
)
2855 enum machine_mode cmode
;
2856 enum machine_mode imode
;
2859 if (GET_CODE (cplx
) == CONCAT
)
2861 emit_move_insn (XEXP (cplx
, imag_p
), val
);
2865 cmode
= GET_MODE (cplx
);
2866 imode
= GET_MODE_INNER (cmode
);
2867 ibitsize
= GET_MODE_BITSIZE (imode
);
2869 /* For MEMs simplify_gen_subreg may generate an invalid new address
2870 because, e.g., the original address is considered mode-dependent
2871 by the target, which restricts simplify_subreg from invoking
2872 adjust_address_nv. Instead of preparing fallback support for an
2873 invalid address, we call adjust_address_nv directly. */
2876 emit_move_insn (adjust_address_nv (cplx
, imode
,
2877 imag_p
? GET_MODE_SIZE (imode
) : 0),
2882 /* If the sub-object is at least word sized, then we know that subregging
2883 will work. This special case is important, since store_bit_field
2884 wants to operate on integer modes, and there's rarely an OImode to
2885 correspond to TCmode. */
2886 if (ibitsize
>= BITS_PER_WORD
2887 /* For hard regs we have exact predicates. Assume we can split
2888 the original object if it spans an even number of hard regs.
2889 This special case is important for SCmode on 64-bit platforms
2890 where the natural size of floating-point regs is 32-bit. */
2892 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
2893 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0))
2895 rtx part
= simplify_gen_subreg (imode
, cplx
, cmode
,
2896 imag_p
? GET_MODE_SIZE (imode
) : 0);
2899 emit_move_insn (part
, val
);
2903 /* simplify_gen_subreg may fail for sub-word MEMs. */
2904 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
2907 store_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0, imode
, val
);
2910 /* Extract one of the components of the complex value CPLX. Extract the
2911 real part if IMAG_P is false, and the imaginary part if it's true. */
2914 read_complex_part (rtx cplx
, bool imag_p
)
2916 enum machine_mode cmode
, imode
;
2919 if (GET_CODE (cplx
) == CONCAT
)
2920 return XEXP (cplx
, imag_p
);
2922 cmode
= GET_MODE (cplx
);
2923 imode
= GET_MODE_INNER (cmode
);
2924 ibitsize
= GET_MODE_BITSIZE (imode
);
2926 /* Special case reads from complex constants that got spilled to memory. */
2927 if (MEM_P (cplx
) && GET_CODE (XEXP (cplx
, 0)) == SYMBOL_REF
)
2929 tree decl
= SYMBOL_REF_DECL (XEXP (cplx
, 0));
2930 if (decl
&& TREE_CODE (decl
) == COMPLEX_CST
)
2932 tree part
= imag_p
? TREE_IMAGPART (decl
) : TREE_REALPART (decl
);
2933 if (CONSTANT_CLASS_P (part
))
2934 return expand_expr (part
, NULL_RTX
, imode
, EXPAND_NORMAL
);
2938 /* For MEMs simplify_gen_subreg may generate an invalid new address
2939 because, e.g., the original address is considered mode-dependent
2940 by the target, which restricts simplify_subreg from invoking
2941 adjust_address_nv. Instead of preparing fallback support for an
2942 invalid address, we call adjust_address_nv directly. */
2944 return adjust_address_nv (cplx
, imode
,
2945 imag_p
? GET_MODE_SIZE (imode
) : 0);
2947 /* If the sub-object is at least word sized, then we know that subregging
2948 will work. This special case is important, since extract_bit_field
2949 wants to operate on integer modes, and there's rarely an OImode to
2950 correspond to TCmode. */
2951 if (ibitsize
>= BITS_PER_WORD
2952 /* For hard regs we have exact predicates. Assume we can split
2953 the original object if it spans an even number of hard regs.
2954 This special case is important for SCmode on 64-bit platforms
2955 where the natural size of floating-point regs is 32-bit. */
2957 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
2958 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0))
2960 rtx ret
= simplify_gen_subreg (imode
, cplx
, cmode
,
2961 imag_p
? GET_MODE_SIZE (imode
) : 0);
2965 /* simplify_gen_subreg may fail for sub-word MEMs. */
2966 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
2969 return extract_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0,
2970 true, NULL_RTX
, imode
, imode
);
2973 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2974 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2975 represented in NEW_MODE. If FORCE is true, this will never happen, as
2976 we'll force-create a SUBREG if needed. */
2979 emit_move_change_mode (enum machine_mode new_mode
,
2980 enum machine_mode old_mode
, rtx x
, bool force
)
2984 if (push_operand (x
, GET_MODE (x
)))
2986 ret
= gen_rtx_MEM (new_mode
, XEXP (x
, 0));
2987 MEM_COPY_ATTRIBUTES (ret
, x
);
2991 /* We don't have to worry about changing the address since the
2992 size in bytes is supposed to be the same. */
2993 if (reload_in_progress
)
2995 /* Copy the MEM to change the mode and move any
2996 substitutions from the old MEM to the new one. */
2997 ret
= adjust_address_nv (x
, new_mode
, 0);
2998 copy_replacements (x
, ret
);
3001 ret
= adjust_address (x
, new_mode
, 0);
3005 /* Note that we do want simplify_subreg's behavior of validating
3006 that the new mode is ok for a hard register. If we were to use
3007 simplify_gen_subreg, we would create the subreg, but would
3008 probably run into the target not being able to implement it. */
3009 /* Except, of course, when FORCE is true, when this is exactly what
3010 we want. Which is needed for CCmodes on some targets. */
3012 ret
= simplify_gen_subreg (new_mode
, x
, old_mode
, 0);
3014 ret
= simplify_subreg (new_mode
, x
, old_mode
, 0);
3020 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3021 an integer mode of the same size as MODE. Returns the instruction
3022 emitted, or NULL if such a move could not be generated. */
3025 emit_move_via_integer (enum machine_mode mode
, rtx x
, rtx y
, bool force
)
3027 enum machine_mode imode
;
3028 enum insn_code code
;
3030 /* There must exist a mode of the exact size we require. */
3031 imode
= int_mode_for_mode (mode
);
3032 if (imode
== BLKmode
)
3035 /* The target must support moves in this mode. */
3036 code
= optab_handler (mov_optab
, imode
)->insn_code
;
3037 if (code
== CODE_FOR_nothing
)
3040 x
= emit_move_change_mode (imode
, mode
, x
, force
);
3043 y
= emit_move_change_mode (imode
, mode
, y
, force
);
3046 return emit_insn (GEN_FCN (code
) (x
, y
));
3049 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3050 Return an equivalent MEM that does not use an auto-increment. */
3053 emit_move_resolve_push (enum machine_mode mode
, rtx x
)
3055 enum rtx_code code
= GET_CODE (XEXP (x
, 0));
3056 HOST_WIDE_INT adjust
;
3059 adjust
= GET_MODE_SIZE (mode
);
3060 #ifdef PUSH_ROUNDING
3061 adjust
= PUSH_ROUNDING (adjust
);
3063 if (code
== PRE_DEC
|| code
== POST_DEC
)
3065 else if (code
== PRE_MODIFY
|| code
== POST_MODIFY
)
3067 rtx expr
= XEXP (XEXP (x
, 0), 1);
3070 gcc_assert (GET_CODE (expr
) == PLUS
|| GET_CODE (expr
) == MINUS
);
3071 gcc_assert (CONST_INT_P (XEXP (expr
, 1)));
3072 val
= INTVAL (XEXP (expr
, 1));
3073 if (GET_CODE (expr
) == MINUS
)
3075 gcc_assert (adjust
== val
|| adjust
== -val
);
3079 /* Do not use anti_adjust_stack, since we don't want to update
3080 stack_pointer_delta. */
3081 temp
= expand_simple_binop (Pmode
, PLUS
, stack_pointer_rtx
,
3082 GEN_INT (adjust
), stack_pointer_rtx
,
3083 0, OPTAB_LIB_WIDEN
);
3084 if (temp
!= stack_pointer_rtx
)
3085 emit_move_insn (stack_pointer_rtx
, temp
);
3092 temp
= stack_pointer_rtx
;
3097 temp
= plus_constant (stack_pointer_rtx
, -adjust
);
3103 return replace_equiv_address (x
, temp
);
3106 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3107 X is known to satisfy push_operand, and MODE is known to be complex.
3108 Returns the last instruction emitted. */
3111 emit_move_complex_push (enum machine_mode mode
, rtx x
, rtx y
)
3113 enum machine_mode submode
= GET_MODE_INNER (mode
);
3116 #ifdef PUSH_ROUNDING
3117 unsigned int submodesize
= GET_MODE_SIZE (submode
);
3119 /* In case we output to the stack, but the size is smaller than the
3120 machine can push exactly, we need to use move instructions. */
3121 if (PUSH_ROUNDING (submodesize
) != submodesize
)
3123 x
= emit_move_resolve_push (mode
, x
);
3124 return emit_move_insn (x
, y
);
3128 /* Note that the real part always precedes the imag part in memory
3129 regardless of machine's endianness. */
3130 switch (GET_CODE (XEXP (x
, 0)))
3144 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3145 read_complex_part (y
, imag_first
));
3146 return emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3147 read_complex_part (y
, !imag_first
));
3150 /* A subroutine of emit_move_complex. Perform the move from Y to X
3151 via two moves of the parts. Returns the last instruction emitted. */
3154 emit_move_complex_parts (rtx x
, rtx y
)
3156 /* Show the output dies here. This is necessary for SUBREGs
3157 of pseudos since we cannot track their lifetimes correctly;
3158 hard regs shouldn't appear here except as return values. */
3159 if (!reload_completed
&& !reload_in_progress
3160 && REG_P (x
) && !reg_overlap_mentioned_p (x
, y
))
3163 write_complex_part (x
, read_complex_part (y
, false), false);
3164 write_complex_part (x
, read_complex_part (y
, true), true);
3166 return get_last_insn ();
3169 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3170 MODE is known to be complex. Returns the last instruction emitted. */
3173 emit_move_complex (enum machine_mode mode
, rtx x
, rtx y
)
3177 /* Need to take special care for pushes, to maintain proper ordering
3178 of the data, and possibly extra padding. */
3179 if (push_operand (x
, mode
))
3180 return emit_move_complex_push (mode
, x
, y
);
3182 /* See if we can coerce the target into moving both values at once. */
3184 /* Move floating point as parts. */
3185 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
3186 && optab_handler (mov_optab
, GET_MODE_INNER (mode
))->insn_code
!= CODE_FOR_nothing
)
3188 /* Not possible if the values are inherently not adjacent. */
3189 else if (GET_CODE (x
) == CONCAT
|| GET_CODE (y
) == CONCAT
)
3191 /* Is possible if both are registers (or subregs of registers). */
3192 else if (register_operand (x
, mode
) && register_operand (y
, mode
))
3194 /* If one of the operands is a memory, and alignment constraints
3195 are friendly enough, we may be able to do combined memory operations.
3196 We do not attempt this if Y is a constant because that combination is
3197 usually better with the by-parts thing below. */
3198 else if ((MEM_P (x
) ? !CONSTANT_P (y
) : MEM_P (y
))
3199 && (!STRICT_ALIGNMENT
3200 || get_mode_alignment (mode
) == BIGGEST_ALIGNMENT
))
3209 /* For memory to memory moves, optimal behavior can be had with the
3210 existing block move logic. */
3211 if (MEM_P (x
) && MEM_P (y
))
3213 emit_block_move (x
, y
, GEN_INT (GET_MODE_SIZE (mode
)),
3214 BLOCK_OP_NO_LIBCALL
);
3215 return get_last_insn ();
3218 ret
= emit_move_via_integer (mode
, x
, y
, true);
3223 return emit_move_complex_parts (x
, y
);
3226 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3227 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3230 emit_move_ccmode (enum machine_mode mode
, rtx x
, rtx y
)
3234 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3237 enum insn_code code
= optab_handler (mov_optab
, CCmode
)->insn_code
;
3238 if (code
!= CODE_FOR_nothing
)
3240 x
= emit_move_change_mode (CCmode
, mode
, x
, true);
3241 y
= emit_move_change_mode (CCmode
, mode
, y
, true);
3242 return emit_insn (GEN_FCN (code
) (x
, y
));
3246 /* Otherwise, find the MODE_INT mode of the same width. */
3247 ret
= emit_move_via_integer (mode
, x
, y
, false);
3248 gcc_assert (ret
!= NULL
);
3252 /* Return true if word I of OP lies entirely in the
3253 undefined bits of a paradoxical subreg. */
3256 undefined_operand_subword_p (const_rtx op
, int i
)
3258 enum machine_mode innermode
, innermostmode
;
3260 if (GET_CODE (op
) != SUBREG
)
3262 innermode
= GET_MODE (op
);
3263 innermostmode
= GET_MODE (SUBREG_REG (op
));
3264 offset
= i
* UNITS_PER_WORD
+ SUBREG_BYTE (op
);
3265 /* The SUBREG_BYTE represents offset, as if the value were stored in
3266 memory, except for a paradoxical subreg where we define
3267 SUBREG_BYTE to be 0; undo this exception as in
3269 if (SUBREG_BYTE (op
) == 0
3270 && GET_MODE_SIZE (innermostmode
) < GET_MODE_SIZE (innermode
))
3272 int difference
= (GET_MODE_SIZE (innermostmode
) - GET_MODE_SIZE (innermode
));
3273 if (WORDS_BIG_ENDIAN
)
3274 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
3275 if (BYTES_BIG_ENDIAN
)
3276 offset
+= difference
% UNITS_PER_WORD
;
3278 if (offset
>= GET_MODE_SIZE (innermostmode
)
3279 || offset
<= -GET_MODE_SIZE (word_mode
))
3284 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3285 MODE is any multi-word or full-word mode that lacks a move_insn
3286 pattern. Note that you will get better code if you define such
3287 patterns, even if they must turn into multiple assembler instructions. */
3290 emit_move_multi_word (enum machine_mode mode
, rtx x
, rtx y
)
3297 gcc_assert (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
);
3299 /* If X is a push on the stack, do the push now and replace
3300 X with a reference to the stack pointer. */
3301 if (push_operand (x
, mode
))
3302 x
= emit_move_resolve_push (mode
, x
);
3304 /* If we are in reload, see if either operand is a MEM whose address
3305 is scheduled for replacement. */
3306 if (reload_in_progress
&& MEM_P (x
)
3307 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3308 x
= replace_equiv_address_nv (x
, inner
);
3309 if (reload_in_progress
&& MEM_P (y
)
3310 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3311 y
= replace_equiv_address_nv (y
, inner
);
3315 need_clobber
= false;
3317 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3320 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3323 /* Do not generate code for a move if it would come entirely
3324 from the undefined bits of a paradoxical subreg. */
3325 if (undefined_operand_subword_p (y
, i
))
3328 ypart
= operand_subword (y
, i
, 1, mode
);
3330 /* If we can't get a part of Y, put Y into memory if it is a
3331 constant. Otherwise, force it into a register. Then we must
3332 be able to get a part of Y. */
3333 if (ypart
== 0 && CONSTANT_P (y
))
3335 y
= use_anchored_address (force_const_mem (mode
, y
));
3336 ypart
= operand_subword (y
, i
, 1, mode
);
3338 else if (ypart
== 0)
3339 ypart
= operand_subword_force (y
, i
, mode
);
3341 gcc_assert (xpart
&& ypart
);
3343 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3345 last_insn
= emit_move_insn (xpart
, ypart
);
3351 /* Show the output dies here. This is necessary for SUBREGs
3352 of pseudos since we cannot track their lifetimes correctly;
3353 hard regs shouldn't appear here except as return values.
3354 We never want to emit such a clobber after reload. */
3356 && ! (reload_in_progress
|| reload_completed
)
3357 && need_clobber
!= 0)
3365 /* Low level part of emit_move_insn.
3366 Called just like emit_move_insn, but assumes X and Y
3367 are basically valid. */
3370 emit_move_insn_1 (rtx x
, rtx y
)
3372 enum machine_mode mode
= GET_MODE (x
);
3373 enum insn_code code
;
3375 gcc_assert ((unsigned int) mode
< (unsigned int) MAX_MACHINE_MODE
);
3377 code
= optab_handler (mov_optab
, mode
)->insn_code
;
3378 if (code
!= CODE_FOR_nothing
)
3379 return emit_insn (GEN_FCN (code
) (x
, y
));
3381 /* Expand complex moves by moving real part and imag part. */
3382 if (COMPLEX_MODE_P (mode
))
3383 return emit_move_complex (mode
, x
, y
);
3385 if (GET_MODE_CLASS (mode
) == MODE_DECIMAL_FLOAT
3386 || ALL_FIXED_POINT_MODE_P (mode
))
3388 rtx result
= emit_move_via_integer (mode
, x
, y
, true);
3390 /* If we can't find an integer mode, use multi words. */
3394 return emit_move_multi_word (mode
, x
, y
);
3397 if (GET_MODE_CLASS (mode
) == MODE_CC
)
3398 return emit_move_ccmode (mode
, x
, y
);
3400 /* Try using a move pattern for the corresponding integer mode. This is
3401 only safe when simplify_subreg can convert MODE constants into integer
3402 constants. At present, it can only do this reliably if the value
3403 fits within a HOST_WIDE_INT. */
3404 if (!CONSTANT_P (y
) || GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
3406 rtx ret
= emit_move_via_integer (mode
, x
, y
, false);
3411 return emit_move_multi_word (mode
, x
, y
);
3414 /* Generate code to copy Y into X.
3415 Both Y and X must have the same mode, except that
3416 Y can be a constant with VOIDmode.
3417 This mode cannot be BLKmode; use emit_block_move for that.
3419 Return the last instruction emitted. */
3422 emit_move_insn (rtx x
, rtx y
)
3424 enum machine_mode mode
= GET_MODE (x
);
3425 rtx y_cst
= NULL_RTX
;
3428 gcc_assert (mode
!= BLKmode
3429 && (GET_MODE (y
) == mode
|| GET_MODE (y
) == VOIDmode
));
3434 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3435 && (last_insn
= compress_float_constant (x
, y
)))
3440 if (!LEGITIMATE_CONSTANT_P (y
))
3442 y
= force_const_mem (mode
, y
);
3444 /* If the target's cannot_force_const_mem prevented the spill,
3445 assume that the target's move expanders will also take care
3446 of the non-legitimate constant. */
3450 y
= use_anchored_address (y
);
3454 /* If X or Y are memory references, verify that their addresses are valid
3457 && (! memory_address_addr_space_p (GET_MODE (x
), XEXP (x
, 0),
3459 && ! push_operand (x
, GET_MODE (x
))))
3460 x
= validize_mem (x
);
3463 && ! memory_address_addr_space_p (GET_MODE (y
), XEXP (y
, 0),
3464 MEM_ADDR_SPACE (y
)))
3465 y
= validize_mem (y
);
3467 gcc_assert (mode
!= BLKmode
);
3469 last_insn
= emit_move_insn_1 (x
, y
);
3471 if (y_cst
&& REG_P (x
)
3472 && (set
= single_set (last_insn
)) != NULL_RTX
3473 && SET_DEST (set
) == x
3474 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
3475 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
3480 /* If Y is representable exactly in a narrower mode, and the target can
3481 perform the extension directly from constant or memory, then emit the
3482 move as an extension. */
3485 compress_float_constant (rtx x
, rtx y
)
3487 enum machine_mode dstmode
= GET_MODE (x
);
3488 enum machine_mode orig_srcmode
= GET_MODE (y
);
3489 enum machine_mode srcmode
;
3491 int oldcost
, newcost
;
3492 bool speed
= optimize_insn_for_speed_p ();
3494 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3496 if (LEGITIMATE_CONSTANT_P (y
))
3497 oldcost
= rtx_cost (y
, SET
, speed
);
3499 oldcost
= rtx_cost (force_const_mem (dstmode
, y
), SET
, speed
);
3501 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3502 srcmode
!= orig_srcmode
;
3503 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3506 rtx trunc_y
, last_insn
;
3508 /* Skip if the target can't extend this way. */
3509 ic
= can_extend_p (dstmode
, srcmode
, 0);
3510 if (ic
== CODE_FOR_nothing
)
3513 /* Skip if the narrowed value isn't exact. */
3514 if (! exact_real_truncate (srcmode
, &r
))
3517 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3519 if (LEGITIMATE_CONSTANT_P (trunc_y
))
3521 /* Skip if the target needs extra instructions to perform
3523 if (! (*insn_data
[ic
].operand
[1].predicate
) (trunc_y
, srcmode
))
3525 /* This is valid, but may not be cheaper than the original. */
3526 newcost
= rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
), SET
, speed
);
3527 if (oldcost
< newcost
)
3530 else if (float_extend_from_mem
[dstmode
][srcmode
])
3532 trunc_y
= force_const_mem (srcmode
, trunc_y
);
3533 /* This is valid, but may not be cheaper than the original. */
3534 newcost
= rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
), SET
, speed
);
3535 if (oldcost
< newcost
)
3537 trunc_y
= validize_mem (trunc_y
);
3542 /* For CSE's benefit, force the compressed constant pool entry
3543 into a new pseudo. This constant may be used in different modes,
3544 and if not, combine will put things back together for us. */
3545 trunc_y
= force_reg (srcmode
, trunc_y
);
3546 emit_unop_insn (ic
, x
, trunc_y
, UNKNOWN
);
3547 last_insn
= get_last_insn ();
3550 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3558 /* Pushing data onto the stack. */
3560 /* Push a block of length SIZE (perhaps variable)
3561 and return an rtx to address the beginning of the block.
3562 The value may be virtual_outgoing_args_rtx.
3564 EXTRA is the number of bytes of padding to push in addition to SIZE.
3565 BELOW nonzero means this padding comes at low addresses;
3566 otherwise, the padding comes at high addresses. */
3569 push_block (rtx size
, int extra
, int below
)
3573 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3574 if (CONSTANT_P (size
))
3575 anti_adjust_stack (plus_constant (size
, extra
));
3576 else if (REG_P (size
) && extra
== 0)
3577 anti_adjust_stack (size
);
3580 temp
= copy_to_mode_reg (Pmode
, size
);
3582 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3583 temp
, 0, OPTAB_LIB_WIDEN
);
3584 anti_adjust_stack (temp
);
3587 #ifndef STACK_GROWS_DOWNWARD
3593 temp
= virtual_outgoing_args_rtx
;
3594 if (extra
!= 0 && below
)
3595 temp
= plus_constant (temp
, extra
);
3599 if (CONST_INT_P (size
))
3600 temp
= plus_constant (virtual_outgoing_args_rtx
,
3601 -INTVAL (size
) - (below
? 0 : extra
));
3602 else if (extra
!= 0 && !below
)
3603 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3604 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3606 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3607 negate_rtx (Pmode
, size
));
3610 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3613 #ifdef PUSH_ROUNDING
3615 /* Emit single push insn. */
3618 emit_single_push_insn (enum machine_mode mode
, rtx x
, tree type
)
3621 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3623 enum insn_code icode
;
3624 insn_operand_predicate_fn pred
;
3626 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3627 /* If there is push pattern, use it. Otherwise try old way of throwing
3628 MEM representing push operation to move expander. */
3629 icode
= optab_handler (push_optab
, mode
)->insn_code
;
3630 if (icode
!= CODE_FOR_nothing
)
3632 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3633 && !((*pred
) (x
, mode
))))
3634 x
= force_reg (mode
, x
);
3635 emit_insn (GEN_FCN (icode
) (x
));
3638 if (GET_MODE_SIZE (mode
) == rounded_size
)
3639 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3640 /* If we are to pad downward, adjust the stack pointer first and
3641 then store X into the stack location using an offset. This is
3642 because emit_move_insn does not know how to pad; it does not have
3644 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
3646 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
3647 HOST_WIDE_INT offset
;
3649 emit_move_insn (stack_pointer_rtx
,
3650 expand_binop (Pmode
,
3651 #ifdef STACK_GROWS_DOWNWARD
3657 GEN_INT (rounded_size
),
3658 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
3660 offset
= (HOST_WIDE_INT
) padding_size
;
3661 #ifdef STACK_GROWS_DOWNWARD
3662 if (STACK_PUSH_CODE
== POST_DEC
)
3663 /* We have already decremented the stack pointer, so get the
3665 offset
+= (HOST_WIDE_INT
) rounded_size
;
3667 if (STACK_PUSH_CODE
== POST_INC
)
3668 /* We have already incremented the stack pointer, so get the
3670 offset
-= (HOST_WIDE_INT
) rounded_size
;
3672 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (offset
));
3676 #ifdef STACK_GROWS_DOWNWARD
3677 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3678 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3679 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3681 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3682 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3683 GEN_INT (rounded_size
));
3685 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3688 dest
= gen_rtx_MEM (mode
, dest_addr
);
3692 set_mem_attributes (dest
, type
, 1);
3694 if (flag_optimize_sibling_calls
)
3695 /* Function incoming arguments may overlap with sibling call
3696 outgoing arguments and we cannot allow reordering of reads
3697 from function arguments with stores to outgoing arguments
3698 of sibling calls. */
3699 set_mem_alias_set (dest
, 0);
3701 emit_move_insn (dest
, x
);
3705 /* Generate code to push X onto the stack, assuming it has mode MODE and
3707 MODE is redundant except when X is a CONST_INT (since they don't
3709 SIZE is an rtx for the size of data to be copied (in bytes),
3710 needed only if X is BLKmode.
3712 ALIGN (in bits) is maximum alignment we can assume.
3714 If PARTIAL and REG are both nonzero, then copy that many of the first
3715 bytes of X into registers starting with REG, and push the rest of X.
3716 The amount of space pushed is decreased by PARTIAL bytes.
3717 REG must be a hard register in this case.
3718 If REG is zero but PARTIAL is not, take any all others actions for an
3719 argument partially in registers, but do not actually load any
3722 EXTRA is the amount in bytes of extra space to leave next to this arg.
3723 This is ignored if an argument block has already been allocated.
3725 On a machine that lacks real push insns, ARGS_ADDR is the address of
3726 the bottom of the argument block for this call. We use indexing off there
3727 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3728 argument block has not been preallocated.
3730 ARGS_SO_FAR is the size of args previously pushed for this call.
3732 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3733 for arguments passed in registers. If nonzero, it will be the number
3734 of bytes required. */
3737 emit_push_insn (rtx x
, enum machine_mode mode
, tree type
, rtx size
,
3738 unsigned int align
, int partial
, rtx reg
, int extra
,
3739 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
3743 enum direction stack_direction
3744 #ifdef STACK_GROWS_DOWNWARD
3750 /* Decide where to pad the argument: `downward' for below,
3751 `upward' for above, or `none' for don't pad it.
3752 Default is below for small data on big-endian machines; else above. */
3753 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3755 /* Invert direction if stack is post-decrement.
3757 if (STACK_PUSH_CODE
== POST_DEC
)
3758 if (where_pad
!= none
)
3759 where_pad
= (where_pad
== downward
? upward
: downward
);
3764 || (STRICT_ALIGNMENT
&& align
< GET_MODE_ALIGNMENT (mode
)))
3766 /* Copy a block into the stack, entirely or partially. */
3773 offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3774 used
= partial
- offset
;
3776 if (mode
!= BLKmode
)
3778 /* A value is to be stored in an insufficiently aligned
3779 stack slot; copy via a suitably aligned slot if
3781 size
= GEN_INT (GET_MODE_SIZE (mode
));
3782 if (!MEM_P (xinner
))
3784 temp
= assign_temp (type
, 0, 1, 1);
3785 emit_move_insn (temp
, xinner
);
3792 /* USED is now the # of bytes we need not copy to the stack
3793 because registers will take care of them. */
3796 xinner
= adjust_address (xinner
, BLKmode
, used
);
3798 /* If the partial register-part of the arg counts in its stack size,
3799 skip the part of stack space corresponding to the registers.
3800 Otherwise, start copying to the beginning of the stack space,
3801 by setting SKIP to 0. */
3802 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3804 #ifdef PUSH_ROUNDING
3805 /* Do it with several push insns if that doesn't take lots of insns
3806 and if there is no difficulty with push insns that skip bytes
3807 on the stack for alignment purposes. */
3810 && CONST_INT_P (size
)
3812 && MEM_ALIGN (xinner
) >= align
3813 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3814 /* Here we avoid the case of a structure whose weak alignment
3815 forces many pushes of a small amount of data,
3816 and such small pushes do rounding that causes trouble. */
3817 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3818 || align
>= BIGGEST_ALIGNMENT
3819 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3820 == (align
/ BITS_PER_UNIT
)))
3821 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3823 /* Push padding now if padding above and stack grows down,
3824 or if padding below and stack grows up.
3825 But if space already allocated, this has already been done. */
3826 if (extra
&& args_addr
== 0
3827 && where_pad
!= none
&& where_pad
!= stack_direction
)
3828 anti_adjust_stack (GEN_INT (extra
));
3830 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
3833 #endif /* PUSH_ROUNDING */
3837 /* Otherwise make space on the stack and copy the data
3838 to the address of that space. */
3840 /* Deduct words put into registers from the size we must copy. */
3843 if (CONST_INT_P (size
))
3844 size
= GEN_INT (INTVAL (size
) - used
);
3846 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3847 GEN_INT (used
), NULL_RTX
, 0,
3851 /* Get the address of the stack space.
3852 In this case, we do not deal with EXTRA separately.
3853 A single stack adjust will do. */
3856 temp
= push_block (size
, extra
, where_pad
== downward
);
3859 else if (CONST_INT_P (args_so_far
))
3860 temp
= memory_address (BLKmode
,
3861 plus_constant (args_addr
,
3862 skip
+ INTVAL (args_so_far
)));
3864 temp
= memory_address (BLKmode
,
3865 plus_constant (gen_rtx_PLUS (Pmode
,
3870 if (!ACCUMULATE_OUTGOING_ARGS
)
3872 /* If the source is referenced relative to the stack pointer,
3873 copy it to another register to stabilize it. We do not need
3874 to do this if we know that we won't be changing sp. */
3876 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3877 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3878 temp
= copy_to_reg (temp
);
3881 target
= gen_rtx_MEM (BLKmode
, temp
);
3883 /* We do *not* set_mem_attributes here, because incoming arguments
3884 may overlap with sibling call outgoing arguments and we cannot
3885 allow reordering of reads from function arguments with stores
3886 to outgoing arguments of sibling calls. We do, however, want
3887 to record the alignment of the stack slot. */
3888 /* ALIGN may well be better aligned than TYPE, e.g. due to
3889 PARM_BOUNDARY. Assume the caller isn't lying. */
3890 set_mem_align (target
, align
);
3892 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
3895 else if (partial
> 0)
3897 /* Scalar partly in registers. */
3899 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3902 /* # bytes of start of argument
3903 that we must make space for but need not store. */
3904 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3905 int args_offset
= INTVAL (args_so_far
);
3908 /* Push padding now if padding above and stack grows down,
3909 or if padding below and stack grows up.
3910 But if space already allocated, this has already been done. */
3911 if (extra
&& args_addr
== 0
3912 && where_pad
!= none
&& where_pad
!= stack_direction
)
3913 anti_adjust_stack (GEN_INT (extra
));
3915 /* If we make space by pushing it, we might as well push
3916 the real data. Otherwise, we can leave OFFSET nonzero
3917 and leave the space uninitialized. */
3921 /* Now NOT_STACK gets the number of words that we don't need to
3922 allocate on the stack. Convert OFFSET to words too. */
3923 not_stack
= (partial
- offset
) / UNITS_PER_WORD
;
3924 offset
/= UNITS_PER_WORD
;
3926 /* If the partial register-part of the arg counts in its stack size,
3927 skip the part of stack space corresponding to the registers.
3928 Otherwise, start copying to the beginning of the stack space,
3929 by setting SKIP to 0. */
3930 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3932 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3933 x
= validize_mem (force_const_mem (mode
, x
));
3935 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3936 SUBREGs of such registers are not allowed. */
3937 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
3938 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3939 x
= copy_to_reg (x
);
3941 /* Loop over all the words allocated on the stack for this arg. */
3942 /* We can do it by words, because any scalar bigger than a word
3943 has a size a multiple of a word. */
3944 #ifndef PUSH_ARGS_REVERSED
3945 for (i
= not_stack
; i
< size
; i
++)
3947 for (i
= size
- 1; i
>= not_stack
; i
--)
3949 if (i
>= not_stack
+ offset
)
3950 emit_push_insn (operand_subword_force (x
, i
, mode
),
3951 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3953 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3955 reg_parm_stack_space
, alignment_pad
);
3962 /* Push padding now if padding above and stack grows down,
3963 or if padding below and stack grows up.
3964 But if space already allocated, this has already been done. */
3965 if (extra
&& args_addr
== 0
3966 && where_pad
!= none
&& where_pad
!= stack_direction
)
3967 anti_adjust_stack (GEN_INT (extra
));
3969 #ifdef PUSH_ROUNDING
3970 if (args_addr
== 0 && PUSH_ARGS
)
3971 emit_single_push_insn (mode
, x
, type
);
3975 if (CONST_INT_P (args_so_far
))
3977 = memory_address (mode
,
3978 plus_constant (args_addr
,
3979 INTVAL (args_so_far
)));
3981 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3983 dest
= gen_rtx_MEM (mode
, addr
);
3985 /* We do *not* set_mem_attributes here, because incoming arguments
3986 may overlap with sibling call outgoing arguments and we cannot
3987 allow reordering of reads from function arguments with stores
3988 to outgoing arguments of sibling calls. We do, however, want
3989 to record the alignment of the stack slot. */
3990 /* ALIGN may well be better aligned than TYPE, e.g. due to
3991 PARM_BOUNDARY. Assume the caller isn't lying. */
3992 set_mem_align (dest
, align
);
3994 emit_move_insn (dest
, x
);
3998 /* If part should go in registers, copy that part
3999 into the appropriate registers. Do this now, at the end,
4000 since mem-to-mem copies above may do function calls. */
4001 if (partial
> 0 && reg
!= 0)
4003 /* Handle calls that pass values in multiple non-contiguous locations.
4004 The Irix 6 ABI has examples of this. */
4005 if (GET_CODE (reg
) == PARALLEL
)
4006 emit_group_load (reg
, x
, type
, -1);
4009 gcc_assert (partial
% UNITS_PER_WORD
== 0);
4010 move_block_to_reg (REGNO (reg
), x
, partial
/ UNITS_PER_WORD
, mode
);
4014 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
4015 anti_adjust_stack (GEN_INT (extra
));
4017 if (alignment_pad
&& args_addr
== 0)
4018 anti_adjust_stack (alignment_pad
);
4021 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4025 get_subtarget (rtx x
)
4029 /* Only registers can be subtargets. */
4031 /* Don't use hard regs to avoid extending their life. */
4032 || REGNO (x
) < FIRST_PSEUDO_REGISTER
4036 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4037 FIELD is a bitfield. Returns true if the optimization was successful,
4038 and there's nothing else to do. */
4041 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize
,
4042 unsigned HOST_WIDE_INT bitpos
,
4043 enum machine_mode mode1
, rtx str_rtx
,
4046 enum machine_mode str_mode
= GET_MODE (str_rtx
);
4047 unsigned int str_bitsize
= GET_MODE_BITSIZE (str_mode
);
4052 if (mode1
!= VOIDmode
4053 || bitsize
>= BITS_PER_WORD
4054 || str_bitsize
> BITS_PER_WORD
4055 || TREE_SIDE_EFFECTS (to
)
4056 || TREE_THIS_VOLATILE (to
))
4060 if (!BINARY_CLASS_P (src
)
4061 || TREE_CODE (TREE_TYPE (src
)) != INTEGER_TYPE
)
4064 op0
= TREE_OPERAND (src
, 0);
4065 op1
= TREE_OPERAND (src
, 1);
4068 if (!operand_equal_p (to
, op0
, 0))
4071 if (MEM_P (str_rtx
))
4073 unsigned HOST_WIDE_INT offset1
;
4075 if (str_bitsize
== 0 || str_bitsize
> BITS_PER_WORD
)
4076 str_mode
= word_mode
;
4077 str_mode
= get_best_mode (bitsize
, bitpos
,
4078 MEM_ALIGN (str_rtx
), str_mode
, 0);
4079 if (str_mode
== VOIDmode
)
4081 str_bitsize
= GET_MODE_BITSIZE (str_mode
);
4084 bitpos
%= str_bitsize
;
4085 offset1
= (offset1
- bitpos
) / BITS_PER_UNIT
;
4086 str_rtx
= adjust_address (str_rtx
, str_mode
, offset1
);
4088 else if (!REG_P (str_rtx
) && GET_CODE (str_rtx
) != SUBREG
)
4091 /* If the bit field covers the whole REG/MEM, store_field
4092 will likely generate better code. */
4093 if (bitsize
>= str_bitsize
)
4096 /* We can't handle fields split across multiple entities. */
4097 if (bitpos
+ bitsize
> str_bitsize
)
4100 if (BYTES_BIG_ENDIAN
)
4101 bitpos
= str_bitsize
- bitpos
- bitsize
;
4103 switch (TREE_CODE (src
))
4107 /* For now, just optimize the case of the topmost bitfield
4108 where we don't need to do any masking and also
4109 1 bit bitfields where xor can be used.
4110 We might win by one instruction for the other bitfields
4111 too if insv/extv instructions aren't used, so that
4112 can be added later. */
4113 if (bitpos
+ bitsize
!= str_bitsize
4114 && (bitsize
!= 1 || TREE_CODE (op1
) != INTEGER_CST
))
4117 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4118 value
= convert_modes (str_mode
,
4119 TYPE_MODE (TREE_TYPE (op1
)), value
,
4120 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4122 /* We may be accessing data outside the field, which means
4123 we can alias adjacent data. */
4124 if (MEM_P (str_rtx
))
4126 str_rtx
= shallow_copy_rtx (str_rtx
);
4127 set_mem_alias_set (str_rtx
, 0);
4128 set_mem_expr (str_rtx
, 0);
4131 binop
= TREE_CODE (src
) == PLUS_EXPR
? add_optab
: sub_optab
;
4132 if (bitsize
== 1 && bitpos
+ bitsize
!= str_bitsize
)
4134 value
= expand_and (str_mode
, value
, const1_rtx
, NULL
);
4137 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
,
4138 build_int_cst (NULL_TREE
, bitpos
),
4140 result
= expand_binop (str_mode
, binop
, str_rtx
,
4141 value
, str_rtx
, 1, OPTAB_WIDEN
);
4142 if (result
!= str_rtx
)
4143 emit_move_insn (str_rtx
, result
);
4148 if (TREE_CODE (op1
) != INTEGER_CST
)
4150 value
= expand_expr (op1
, NULL_RTX
, GET_MODE (str_rtx
), EXPAND_NORMAL
);
4151 value
= convert_modes (GET_MODE (str_rtx
),
4152 TYPE_MODE (TREE_TYPE (op1
)), value
,
4153 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4155 /* We may be accessing data outside the field, which means
4156 we can alias adjacent data. */
4157 if (MEM_P (str_rtx
))
4159 str_rtx
= shallow_copy_rtx (str_rtx
);
4160 set_mem_alias_set (str_rtx
, 0);
4161 set_mem_expr (str_rtx
, 0);
4164 binop
= TREE_CODE (src
) == BIT_IOR_EXPR
? ior_optab
: xor_optab
;
4165 if (bitpos
+ bitsize
!= GET_MODE_BITSIZE (GET_MODE (str_rtx
)))
4167 rtx mask
= GEN_INT (((unsigned HOST_WIDE_INT
) 1 << bitsize
)
4169 value
= expand_and (GET_MODE (str_rtx
), value
, mask
,
4172 value
= expand_shift (LSHIFT_EXPR
, GET_MODE (str_rtx
), value
,
4173 build_int_cst (NULL_TREE
, bitpos
),
4175 result
= expand_binop (GET_MODE (str_rtx
), binop
, str_rtx
,
4176 value
, str_rtx
, 1, OPTAB_WIDEN
);
4177 if (result
!= str_rtx
)
4178 emit_move_insn (str_rtx
, result
);
4189 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4190 is true, try generating a nontemporal store. */
4193 expand_assignment (tree to
, tree from
, bool nontemporal
)
4198 /* Don't crash if the lhs of the assignment was erroneous. */
4199 if (TREE_CODE (to
) == ERROR_MARK
)
4201 result
= expand_normal (from
);
4205 /* Optimize away no-op moves without side-effects. */
4206 if (operand_equal_p (to
, from
, 0))
4209 /* Assignment of a structure component needs special treatment
4210 if the structure component's rtx is not simply a MEM.
4211 Assignment of an array element at a constant index, and assignment of
4212 an array element in an unaligned packed structure field, has the same
4214 if (handled_component_p (to
)
4215 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
4217 enum machine_mode mode1
;
4218 HOST_WIDE_INT bitsize
, bitpos
;
4225 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
4226 &unsignedp
, &volatilep
, true);
4228 /* If we are going to use store_bit_field and extract_bit_field,
4229 make sure to_rtx will be safe for multiple use. */
4231 to_rtx
= expand_normal (tem
);
4235 enum machine_mode address_mode
;
4238 if (!MEM_P (to_rtx
))
4240 /* We can get constant negative offsets into arrays with broken
4241 user code. Translate this to a trap instead of ICEing. */
4242 gcc_assert (TREE_CODE (offset
) == INTEGER_CST
);
4243 expand_builtin_trap ();
4244 to_rtx
= gen_rtx_MEM (BLKmode
, const0_rtx
);
4247 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
4249 = targetm
.addr_space
.address_mode (MEM_ADDR_SPACE (to_rtx
));
4250 if (GET_MODE (offset_rtx
) != address_mode
)
4251 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
4253 /* A constant address in TO_RTX can have VOIDmode, we must not try
4254 to call force_reg for that case. Avoid that case. */
4256 && GET_MODE (to_rtx
) == BLKmode
4257 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
4259 && (bitpos
% bitsize
) == 0
4260 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
4261 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
4263 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
4267 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4268 highest_pow2_factor_for_target (to
,
4272 /* Handle expand_expr of a complex value returning a CONCAT. */
4273 if (GET_CODE (to_rtx
) == CONCAT
)
4275 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from
))))
4277 gcc_assert (bitpos
== 0);
4278 result
= store_expr (from
, to_rtx
, false, nontemporal
);
4282 gcc_assert (bitpos
== 0 || bitpos
== GET_MODE_BITSIZE (mode1
));
4283 result
= store_expr (from
, XEXP (to_rtx
, bitpos
!= 0), false,
4291 /* If the field is at offset zero, we could have been given the
4292 DECL_RTX of the parent struct. Don't munge it. */
4293 to_rtx
= shallow_copy_rtx (to_rtx
);
4295 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
4297 /* Deal with volatile and readonly fields. The former is only
4298 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4300 MEM_VOLATILE_P (to_rtx
) = 1;
4301 if (component_uses_parent_alias_set (to
))
4302 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4305 if (optimize_bitfield_assignment_op (bitsize
, bitpos
, mode1
,
4309 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
4310 TREE_TYPE (tem
), get_alias_set (to
),
4315 preserve_temp_slots (result
);
4321 else if (TREE_CODE (to
) == MISALIGNED_INDIRECT_REF
)
4323 addr_space_t as
= ADDR_SPACE_GENERIC
;
4324 enum machine_mode mode
, op_mode1
;
4325 enum insn_code icode
;
4326 rtx reg
, addr
, mem
, insn
;
4328 if (POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (to
, 0))))
4329 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to
, 0))));
4331 reg
= expand_expr (from
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4332 reg
= force_not_mem (reg
);
4334 mode
= TYPE_MODE (TREE_TYPE (to
));
4335 addr
= expand_expr (TREE_OPERAND (to
, 0), NULL_RTX
, VOIDmode
,
4337 addr
= memory_address_addr_space (mode
, addr
, as
);
4338 mem
= gen_rtx_MEM (mode
, addr
);
4340 set_mem_attributes (mem
, to
, 0);
4341 set_mem_addr_space (mem
, as
);
4343 icode
= movmisalign_optab
->handlers
[mode
].insn_code
;
4344 gcc_assert (icode
!= CODE_FOR_nothing
);
4346 op_mode1
= insn_data
[icode
].operand
[1].mode
;
4347 if (! (*insn_data
[icode
].operand
[1].predicate
) (reg
, op_mode1
)
4348 && op_mode1
!= VOIDmode
)
4349 reg
= copy_to_mode_reg (op_mode1
, reg
);
4351 insn
= GEN_FCN (icode
) (mem
, reg
);
4356 /* If the rhs is a function call and its value is not an aggregate,
4357 call the function before we start to compute the lhs.
4358 This is needed for correct code for cases such as
4359 val = setjmp (buf) on machines where reference to val
4360 requires loading up part of an address in a separate insn.
4362 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4363 since it might be a promoted variable where the zero- or sign- extension
4364 needs to be done. Handling this in the normal way is safe because no
4365 computation is done before the call. The same is true for SSA names. */
4366 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
4367 && COMPLETE_TYPE_P (TREE_TYPE (from
))
4368 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
4369 && ! (((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
4370 && REG_P (DECL_RTL (to
)))
4371 || TREE_CODE (to
) == SSA_NAME
))
4376 value
= expand_normal (from
);
4378 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4380 /* Handle calls that return values in multiple non-contiguous locations.
4381 The Irix 6 ABI has examples of this. */
4382 if (GET_CODE (to_rtx
) == PARALLEL
)
4383 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
4384 int_size_in_bytes (TREE_TYPE (from
)));
4385 else if (GET_MODE (to_rtx
) == BLKmode
)
4386 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
4389 if (POINTER_TYPE_P (TREE_TYPE (to
)))
4390 value
= convert_memory_address_addr_space
4391 (GET_MODE (to_rtx
), value
,
4392 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to
))));
4394 emit_move_insn (to_rtx
, value
);
4396 preserve_temp_slots (to_rtx
);
4402 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4403 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4406 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4408 /* Don't move directly into a return register. */
4409 if (TREE_CODE (to
) == RESULT_DECL
4410 && (REG_P (to_rtx
) || GET_CODE (to_rtx
) == PARALLEL
))
4415 temp
= expand_expr (from
, NULL_RTX
, GET_MODE (to_rtx
), EXPAND_NORMAL
);
4417 if (GET_CODE (to_rtx
) == PARALLEL
)
4418 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
4419 int_size_in_bytes (TREE_TYPE (from
)));
4421 emit_move_insn (to_rtx
, temp
);
4423 preserve_temp_slots (to_rtx
);
4429 /* In case we are returning the contents of an object which overlaps
4430 the place the value is being stored, use a safe function when copying
4431 a value through a pointer into a structure value return block. */
4432 if (TREE_CODE (to
) == RESULT_DECL
4433 && TREE_CODE (from
) == INDIRECT_REF
4434 && ADDR_SPACE_GENERIC_P
4435 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from
, 0)))))
4436 && refs_may_alias_p (to
, from
)
4437 && cfun
->returns_struct
4438 && !cfun
->returns_pcc_struct
)
4443 size
= expr_size (from
);
4444 from_rtx
= expand_normal (from
);
4446 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
4447 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
4448 XEXP (from_rtx
, 0), Pmode
,
4449 convert_to_mode (TYPE_MODE (sizetype
),
4450 size
, TYPE_UNSIGNED (sizetype
)),
4451 TYPE_MODE (sizetype
));
4453 preserve_temp_slots (to_rtx
);
4459 /* Compute FROM and store the value in the rtx we got. */
4462 result
= store_expr (from
, to_rtx
, 0, nontemporal
);
4463 preserve_temp_slots (result
);
4469 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4470 succeeded, false otherwise. */
4473 emit_storent_insn (rtx to
, rtx from
)
4475 enum machine_mode mode
= GET_MODE (to
), imode
;
4476 enum insn_code code
= optab_handler (storent_optab
, mode
)->insn_code
;
4479 if (code
== CODE_FOR_nothing
)
4482 imode
= insn_data
[code
].operand
[0].mode
;
4483 if (!insn_data
[code
].operand
[0].predicate (to
, imode
))
4486 imode
= insn_data
[code
].operand
[1].mode
;
4487 if (!insn_data
[code
].operand
[1].predicate (from
, imode
))
4489 from
= copy_to_mode_reg (imode
, from
);
4490 if (!insn_data
[code
].operand
[1].predicate (from
, imode
))
4494 pattern
= GEN_FCN (code
) (to
, from
);
4495 if (pattern
== NULL_RTX
)
4498 emit_insn (pattern
);
4502 /* Generate code for computing expression EXP,
4503 and storing the value into TARGET.
4505 If the mode is BLKmode then we may return TARGET itself.
4506 It turns out that in BLKmode it doesn't cause a problem.
4507 because C has no operators that could combine two different
4508 assignments into the same BLKmode object with different values
4509 with no sequence point. Will other languages need this to
4512 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4513 stack, and block moves may need to be treated specially.
4515 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4518 store_expr (tree exp
, rtx target
, int call_param_p
, bool nontemporal
)
4521 rtx alt_rtl
= NULL_RTX
;
4522 location_t loc
= EXPR_LOCATION (exp
);
4524 if (VOID_TYPE_P (TREE_TYPE (exp
)))
4526 /* C++ can generate ?: expressions with a throw expression in one
4527 branch and an rvalue in the other. Here, we resolve attempts to
4528 store the throw expression's nonexistent result. */
4529 gcc_assert (!call_param_p
);
4530 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4533 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4535 /* Perform first part of compound expression, then assign from second
4537 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
4538 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4539 return store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
,
4542 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4544 /* For conditional expression, get safe form of the target. Then
4545 test the condition, doing the appropriate assignment on either
4546 side. This avoids the creation of unnecessary temporaries.
4547 For non-BLKmode, it is more efficient not to do this. */
4549 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4551 do_pending_stack_adjust ();
4553 jumpifnot (TREE_OPERAND (exp
, 0), lab1
, -1);
4554 store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
,
4556 emit_jump_insn (gen_jump (lab2
));
4559 store_expr (TREE_OPERAND (exp
, 2), target
, call_param_p
,
4566 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4567 /* If this is a scalar in a register that is stored in a wider mode
4568 than the declared mode, compute the result into its declared mode
4569 and then convert to the wider mode. Our value is the computed
4572 rtx inner_target
= 0;
4574 /* We can do the conversion inside EXP, which will often result
4575 in some optimizations. Do the conversion in two steps: first
4576 change the signedness, if needed, then the extend. But don't
4577 do this if the type of EXP is a subtype of something else
4578 since then the conversion might involve more than just
4579 converting modes. */
4580 if (INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4581 && TREE_TYPE (TREE_TYPE (exp
)) == 0
4582 && GET_MODE_PRECISION (GET_MODE (target
))
4583 == TYPE_PRECISION (TREE_TYPE (exp
)))
4585 if (TYPE_UNSIGNED (TREE_TYPE (exp
))
4586 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4588 /* Some types, e.g. Fortran's logical*4, won't have a signed
4589 version, so use the mode instead. */
4591 = (signed_or_unsigned_type_for
4592 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)));
4594 ntype
= lang_hooks
.types
.type_for_mode
4595 (TYPE_MODE (TREE_TYPE (exp
)),
4596 SUBREG_PROMOTED_UNSIGNED_P (target
));
4598 exp
= fold_convert_loc (loc
, ntype
, exp
);
4601 exp
= fold_convert_loc (loc
, lang_hooks
.types
.type_for_mode
4602 (GET_MODE (SUBREG_REG (target
)),
4603 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4606 inner_target
= SUBREG_REG (target
);
4609 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
4610 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4612 /* If TEMP is a VOIDmode constant, use convert_modes to make
4613 sure that we properly convert it. */
4614 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4616 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4617 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4618 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4619 GET_MODE (target
), temp
,
4620 SUBREG_PROMOTED_UNSIGNED_P (target
));
4623 convert_move (SUBREG_REG (target
), temp
,
4624 SUBREG_PROMOTED_UNSIGNED_P (target
));
4628 else if (TREE_CODE (exp
) == STRING_CST
4629 && !nontemporal
&& !call_param_p
4630 && TREE_STRING_LENGTH (exp
) > 0
4631 && TYPE_MODE (TREE_TYPE (exp
)) == BLKmode
)
4633 /* Optimize initialization of an array with a STRING_CST. */
4634 HOST_WIDE_INT exp_len
, str_copy_len
;
4637 exp_len
= int_expr_size (exp
);
4641 str_copy_len
= strlen (TREE_STRING_POINTER (exp
));
4642 if (str_copy_len
< TREE_STRING_LENGTH (exp
) - 1)
4645 str_copy_len
= TREE_STRING_LENGTH (exp
);
4646 if ((STORE_MAX_PIECES
& (STORE_MAX_PIECES
- 1)) == 0)
4648 str_copy_len
+= STORE_MAX_PIECES
- 1;
4649 str_copy_len
&= ~(STORE_MAX_PIECES
- 1);
4651 str_copy_len
= MIN (str_copy_len
, exp_len
);
4652 if (!can_store_by_pieces (str_copy_len
, builtin_strncpy_read_str
,
4653 CONST_CAST(char *, TREE_STRING_POINTER (exp
)),
4654 MEM_ALIGN (target
), false))
4659 dest_mem
= store_by_pieces (dest_mem
,
4660 str_copy_len
, builtin_strncpy_read_str
,
4661 CONST_CAST(char *, TREE_STRING_POINTER (exp
)),
4662 MEM_ALIGN (target
), false,
4663 exp_len
> str_copy_len
? 1 : 0);
4664 if (exp_len
> str_copy_len
)
4665 clear_storage (adjust_address (dest_mem
, BLKmode
, 0),
4666 GEN_INT (exp_len
- str_copy_len
),
4675 /* If we want to use a nontemporal store, force the value to
4677 tmp_target
= nontemporal
? NULL_RTX
: target
;
4678 temp
= expand_expr_real (exp
, tmp_target
, GET_MODE (target
),
4680 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
4684 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4685 the same as that of TARGET, adjust the constant. This is needed, for
4686 example, in case it is a CONST_DOUBLE and we want only a word-sized
4688 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4689 && TREE_CODE (exp
) != ERROR_MARK
4690 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4691 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4692 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
4694 /* If value was not generated in the target, store it there.
4695 Convert the value to TARGET's type first if necessary and emit the
4696 pending incrementations that have been queued when expanding EXP.
4697 Note that we cannot emit the whole queue blindly because this will
4698 effectively disable the POST_INC optimization later.
4700 If TEMP and TARGET compare equal according to rtx_equal_p, but
4701 one or both of them are volatile memory refs, we have to distinguish
4703 - expand_expr has used TARGET. In this case, we must not generate
4704 another copy. This can be detected by TARGET being equal according
4706 - expand_expr has not used TARGET - that means that the source just
4707 happens to have the same RTX form. Since temp will have been created
4708 by expand_expr, it will compare unequal according to == .
4709 We must generate a copy in this case, to reach the correct number
4710 of volatile memory references. */
4712 if ((! rtx_equal_p (temp
, target
)
4713 || (temp
!= target
&& (side_effects_p (temp
)
4714 || side_effects_p (target
))))
4715 && TREE_CODE (exp
) != ERROR_MARK
4716 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4717 but TARGET is not valid memory reference, TEMP will differ
4718 from TARGET although it is really the same location. */
4719 && !(alt_rtl
&& rtx_equal_p (alt_rtl
, target
))
4720 /* If there's nothing to copy, don't bother. Don't call
4721 expr_size unless necessary, because some front-ends (C++)
4722 expr_size-hook must not be given objects that are not
4723 supposed to be bit-copied or bit-initialized. */
4724 && expr_size (exp
) != const0_rtx
)
4726 if (GET_MODE (temp
) != GET_MODE (target
)
4727 && GET_MODE (temp
) != VOIDmode
)
4729 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
4730 if (GET_MODE (target
) == BLKmode
4731 || GET_MODE (temp
) == BLKmode
)
4732 emit_block_move (target
, temp
, expr_size (exp
),
4734 ? BLOCK_OP_CALL_PARM
4735 : BLOCK_OP_NORMAL
));
4737 convert_move (target
, temp
, unsignedp
);
4740 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4742 /* Handle copying a string constant into an array. The string
4743 constant may be shorter than the array. So copy just the string's
4744 actual length, and clear the rest. First get the size of the data
4745 type of the string, which is actually the size of the target. */
4746 rtx size
= expr_size (exp
);
4748 if (CONST_INT_P (size
)
4749 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4750 emit_block_move (target
, temp
, size
,
4752 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4755 enum machine_mode pointer_mode
4756 = targetm
.addr_space
.pointer_mode (MEM_ADDR_SPACE (target
));
4757 enum machine_mode address_mode
4758 = targetm
.addr_space
.address_mode (MEM_ADDR_SPACE (target
));
4760 /* Compute the size of the data to copy from the string. */
4762 = size_binop_loc (loc
, MIN_EXPR
,
4763 make_tree (sizetype
, size
),
4764 size_int (TREE_STRING_LENGTH (exp
)));
4766 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
4768 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4771 /* Copy that much. */
4772 copy_size_rtx
= convert_to_mode (pointer_mode
, copy_size_rtx
,
4773 TYPE_UNSIGNED (sizetype
));
4774 emit_block_move (target
, temp
, copy_size_rtx
,
4776 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4778 /* Figure out how much is left in TARGET that we have to clear.
4779 Do all calculations in pointer_mode. */
4780 if (CONST_INT_P (copy_size_rtx
))
4782 size
= plus_constant (size
, -INTVAL (copy_size_rtx
));
4783 target
= adjust_address (target
, BLKmode
,
4784 INTVAL (copy_size_rtx
));
4788 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
4789 copy_size_rtx
, NULL_RTX
, 0,
4792 if (GET_MODE (copy_size_rtx
) != address_mode
)
4793 copy_size_rtx
= convert_to_mode (address_mode
,
4795 TYPE_UNSIGNED (sizetype
));
4797 target
= offset_address (target
, copy_size_rtx
,
4798 highest_pow2_factor (copy_size
));
4799 label
= gen_label_rtx ();
4800 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4801 GET_MODE (size
), 0, label
);
4804 if (size
!= const0_rtx
)
4805 clear_storage (target
, size
, BLOCK_OP_NORMAL
);
4811 /* Handle calls that return values in multiple non-contiguous locations.
4812 The Irix 6 ABI has examples of this. */
4813 else if (GET_CODE (target
) == PARALLEL
)
4814 emit_group_load (target
, temp
, TREE_TYPE (exp
),
4815 int_size_in_bytes (TREE_TYPE (exp
)));
4816 else if (GET_MODE (temp
) == BLKmode
)
4817 emit_block_move (target
, temp
, expr_size (exp
),
4819 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4820 else if (nontemporal
4821 && emit_storent_insn (target
, temp
))
4822 /* If we managed to emit a nontemporal store, there is nothing else to
4827 temp
= force_operand (temp
, target
);
4829 emit_move_insn (target
, temp
);
4836 /* Helper for categorize_ctor_elements. Identical interface. */
4839 categorize_ctor_elements_1 (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
4840 HOST_WIDE_INT
*p_elt_count
,
4843 unsigned HOST_WIDE_INT idx
;
4844 HOST_WIDE_INT nz_elts
, elt_count
;
4845 tree value
, purpose
;
4847 /* Whether CTOR is a valid constant initializer, in accordance with what
4848 initializer_constant_valid_p does. If inferred from the constructor
4849 elements, true until proven otherwise. */
4850 bool const_from_elts_p
= constructor_static_from_elts_p (ctor
);
4851 bool const_p
= const_from_elts_p
? true : TREE_STATIC (ctor
);
4856 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), idx
, purpose
, value
)
4858 HOST_WIDE_INT mult
= 1;
4860 if (TREE_CODE (purpose
) == RANGE_EXPR
)
4862 tree lo_index
= TREE_OPERAND (purpose
, 0);
4863 tree hi_index
= TREE_OPERAND (purpose
, 1);
4865 if (host_integerp (lo_index
, 1) && host_integerp (hi_index
, 1))
4866 mult
= (tree_low_cst (hi_index
, 1)
4867 - tree_low_cst (lo_index
, 1) + 1);
4870 switch (TREE_CODE (value
))
4874 HOST_WIDE_INT nz
= 0, ic
= 0;
4877 = categorize_ctor_elements_1 (value
, &nz
, &ic
, p_must_clear
);
4879 nz_elts
+= mult
* nz
;
4880 elt_count
+= mult
* ic
;
4882 if (const_from_elts_p
&& const_p
)
4883 const_p
= const_elt_p
;
4890 if (!initializer_zerop (value
))
4896 nz_elts
+= mult
* TREE_STRING_LENGTH (value
);
4897 elt_count
+= mult
* TREE_STRING_LENGTH (value
);
4901 if (!initializer_zerop (TREE_REALPART (value
)))
4903 if (!initializer_zerop (TREE_IMAGPART (value
)))
4911 for (v
= TREE_VECTOR_CST_ELTS (value
); v
; v
= TREE_CHAIN (v
))
4913 if (!initializer_zerop (TREE_VALUE (v
)))
4922 HOST_WIDE_INT tc
= count_type_elements (TREE_TYPE (value
), true);
4925 nz_elts
+= mult
* tc
;
4926 elt_count
+= mult
* tc
;
4928 if (const_from_elts_p
&& const_p
)
4929 const_p
= initializer_constant_valid_p (value
, TREE_TYPE (value
))
4937 && (TREE_CODE (TREE_TYPE (ctor
)) == UNION_TYPE
4938 || TREE_CODE (TREE_TYPE (ctor
)) == QUAL_UNION_TYPE
))
4941 bool clear_this
= true;
4943 if (!VEC_empty (constructor_elt
, CONSTRUCTOR_ELTS (ctor
)))
4945 /* We don't expect more than one element of the union to be
4946 initialized. Not sure what we should do otherwise... */
4947 gcc_assert (VEC_length (constructor_elt
, CONSTRUCTOR_ELTS (ctor
))
4950 init_sub_type
= TREE_TYPE (VEC_index (constructor_elt
,
4951 CONSTRUCTOR_ELTS (ctor
),
4954 /* ??? We could look at each element of the union, and find the
4955 largest element. Which would avoid comparing the size of the
4956 initialized element against any tail padding in the union.
4957 Doesn't seem worth the effort... */
4958 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor
)),
4959 TYPE_SIZE (init_sub_type
)) == 1)
4961 /* And now we have to find out if the element itself is fully
4962 constructed. E.g. for union { struct { int a, b; } s; } u
4963 = { .s = { .a = 1 } }. */
4964 if (elt_count
== count_type_elements (init_sub_type
, false))
4969 *p_must_clear
= clear_this
;
4972 *p_nz_elts
+= nz_elts
;
4973 *p_elt_count
+= elt_count
;
4978 /* Examine CTOR to discover:
4979 * how many scalar fields are set to nonzero values,
4980 and place it in *P_NZ_ELTS;
4981 * how many scalar fields in total are in CTOR,
4982 and place it in *P_ELT_COUNT.
4983 * if a type is a union, and the initializer from the constructor
4984 is not the largest element in the union, then set *p_must_clear.
4986 Return whether or not CTOR is a valid static constant initializer, the same
4987 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4990 categorize_ctor_elements (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
4991 HOST_WIDE_INT
*p_elt_count
,
4996 *p_must_clear
= false;
4999 categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_elt_count
, p_must_clear
);
5002 /* Count the number of scalars in TYPE. Return -1 on overflow or
5003 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
5004 array member at the end of the structure. */
5007 count_type_elements (const_tree type
, bool allow_flexarr
)
5009 const HOST_WIDE_INT max
= ~((HOST_WIDE_INT
)1 << (HOST_BITS_PER_WIDE_INT
-1));
5010 switch (TREE_CODE (type
))
5014 tree telts
= array_type_nelts (type
);
5015 if (telts
&& host_integerp (telts
, 1))
5017 HOST_WIDE_INT n
= tree_low_cst (telts
, 1) + 1;
5018 HOST_WIDE_INT m
= count_type_elements (TREE_TYPE (type
), false);
5021 else if (max
/ n
> m
)
5029 HOST_WIDE_INT n
= 0, t
;
5032 for (f
= TYPE_FIELDS (type
); f
; f
= TREE_CHAIN (f
))
5033 if (TREE_CODE (f
) == FIELD_DECL
)
5035 t
= count_type_elements (TREE_TYPE (f
), false);
5038 /* Check for structures with flexible array member. */
5039 tree tf
= TREE_TYPE (f
);
5041 && TREE_CHAIN (f
) == NULL
5042 && TREE_CODE (tf
) == ARRAY_TYPE
5044 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf
))
5045 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf
)))
5046 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf
))
5047 && int_size_in_bytes (type
) >= 0)
5059 case QUAL_UNION_TYPE
:
5066 return TYPE_VECTOR_SUBPARTS (type
);
5070 case FIXED_POINT_TYPE
:
5075 case REFERENCE_TYPE
:
5090 /* Return 1 if EXP contains mostly (3/4) zeros. */
5093 mostly_zeros_p (const_tree exp
)
5095 if (TREE_CODE (exp
) == CONSTRUCTOR
)
5098 HOST_WIDE_INT nz_elts
, count
, elts
;
5101 categorize_ctor_elements (exp
, &nz_elts
, &count
, &must_clear
);
5105 elts
= count_type_elements (TREE_TYPE (exp
), false);
5107 return nz_elts
< elts
/ 4;
5110 return initializer_zerop (exp
);
5113 /* Return 1 if EXP contains all zeros. */
5116 all_zeros_p (const_tree exp
)
5118 if (TREE_CODE (exp
) == CONSTRUCTOR
)
5121 HOST_WIDE_INT nz_elts
, count
;
5124 categorize_ctor_elements (exp
, &nz_elts
, &count
, &must_clear
);
5125 return nz_elts
== 0;
5128 return initializer_zerop (exp
);
5131 /* Helper function for store_constructor.
5132 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5133 TYPE is the type of the CONSTRUCTOR, not the element type.
5134 CLEARED is as for store_constructor.
5135 ALIAS_SET is the alias set to use for any stores.
5137 This provides a recursive shortcut back to store_constructor when it isn't
5138 necessary to go through store_field. This is so that we can pass through
5139 the cleared field to let store_constructor know that we may not have to
5140 clear a substructure if the outer structure has already been cleared. */
5143 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
5144 HOST_WIDE_INT bitpos
, enum machine_mode mode
,
5145 tree exp
, tree type
, int cleared
,
5146 alias_set_type alias_set
)
5148 if (TREE_CODE (exp
) == CONSTRUCTOR
5149 /* We can only call store_constructor recursively if the size and
5150 bit position are on a byte boundary. */
5151 && bitpos
% BITS_PER_UNIT
== 0
5152 && (bitsize
> 0 && bitsize
% BITS_PER_UNIT
== 0)
5153 /* If we have a nonzero bitpos for a register target, then we just
5154 let store_field do the bitfield handling. This is unlikely to
5155 generate unnecessary clear instructions anyways. */
5156 && (bitpos
== 0 || MEM_P (target
)))
5160 = adjust_address (target
,
5161 GET_MODE (target
) == BLKmode
5163 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
5164 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5167 /* Update the alias set, if required. */
5168 if (MEM_P (target
) && ! MEM_KEEP_ALIAS_SET_P (target
)
5169 && MEM_ALIAS_SET (target
) != 0)
5171 target
= copy_rtx (target
);
5172 set_mem_alias_set (target
, alias_set
);
5175 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
5178 store_field (target
, bitsize
, bitpos
, mode
, exp
, type
, alias_set
, false);
5181 /* Store the value of constructor EXP into the rtx TARGET.
5182 TARGET is either a REG or a MEM; we know it cannot conflict, since
5183 safe_from_p has been called.
5184 CLEARED is true if TARGET is known to have been zero'd.
5185 SIZE is the number of bytes of TARGET we are allowed to modify: this
5186 may not be the same as the size of EXP if we are assigning to a field
5187 which has been packed to exclude padding bits. */
5190 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
)
5192 tree type
= TREE_TYPE (exp
);
5193 #ifdef WORD_REGISTER_OPERATIONS
5194 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
5197 switch (TREE_CODE (type
))
5201 case QUAL_UNION_TYPE
:
5203 unsigned HOST_WIDE_INT idx
;
5206 /* If size is zero or the target is already cleared, do nothing. */
5207 if (size
== 0 || cleared
)
5209 /* We either clear the aggregate or indicate the value is dead. */
5210 else if ((TREE_CODE (type
) == UNION_TYPE
5211 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
5212 && ! CONSTRUCTOR_ELTS (exp
))
5213 /* If the constructor is empty, clear the union. */
5215 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
5219 /* If we are building a static constructor into a register,
5220 set the initial value as zero so we can fold the value into
5221 a constant. But if more than one register is involved,
5222 this probably loses. */
5223 else if (REG_P (target
) && TREE_STATIC (exp
)
5224 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
5226 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5230 /* If the constructor has fewer fields than the structure or
5231 if we are initializing the structure to mostly zeros, clear
5232 the whole structure first. Don't do this if TARGET is a
5233 register whose mode size isn't equal to SIZE since
5234 clear_storage can't handle this case. */
5236 && (((int)VEC_length (constructor_elt
, CONSTRUCTOR_ELTS (exp
))
5237 != fields_length (type
))
5238 || mostly_zeros_p (exp
))
5240 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
5243 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
5247 if (REG_P (target
) && !cleared
)
5248 emit_clobber (target
);
5250 /* Store each element of the constructor into the
5251 corresponding field of TARGET. */
5252 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, field
, value
)
5254 enum machine_mode mode
;
5255 HOST_WIDE_INT bitsize
;
5256 HOST_WIDE_INT bitpos
= 0;
5258 rtx to_rtx
= target
;
5260 /* Just ignore missing fields. We cleared the whole
5261 structure, above, if any fields are missing. */
5265 if (cleared
&& initializer_zerop (value
))
5268 if (host_integerp (DECL_SIZE (field
), 1))
5269 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
5273 mode
= DECL_MODE (field
);
5274 if (DECL_BIT_FIELD (field
))
5277 offset
= DECL_FIELD_OFFSET (field
);
5278 if (host_integerp (offset
, 0)
5279 && host_integerp (bit_position (field
), 0))
5281 bitpos
= int_bit_position (field
);
5285 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
5289 enum machine_mode address_mode
;
5293 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset
,
5294 make_tree (TREE_TYPE (exp
),
5297 offset_rtx
= expand_normal (offset
);
5298 gcc_assert (MEM_P (to_rtx
));
5301 = targetm
.addr_space
.address_mode (MEM_ADDR_SPACE (to_rtx
));
5302 if (GET_MODE (offset_rtx
) != address_mode
)
5303 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
5305 to_rtx
= offset_address (to_rtx
, offset_rtx
,
5306 highest_pow2_factor (offset
));
5309 #ifdef WORD_REGISTER_OPERATIONS
5310 /* If this initializes a field that is smaller than a
5311 word, at the start of a word, try to widen it to a full
5312 word. This special case allows us to output C++ member
5313 function initializations in a form that the optimizers
5316 && bitsize
< BITS_PER_WORD
5317 && bitpos
% BITS_PER_WORD
== 0
5318 && GET_MODE_CLASS (mode
) == MODE_INT
5319 && TREE_CODE (value
) == INTEGER_CST
5321 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
5323 tree type
= TREE_TYPE (value
);
5325 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
5327 type
= lang_hooks
.types
.type_for_size
5328 (BITS_PER_WORD
, TYPE_UNSIGNED (type
));
5329 value
= fold_convert (type
, value
);
5332 if (BYTES_BIG_ENDIAN
)
5334 = fold_build2 (LSHIFT_EXPR
, type
, value
,
5335 build_int_cst (type
,
5336 BITS_PER_WORD
- bitsize
));
5337 bitsize
= BITS_PER_WORD
;
5342 if (MEM_P (to_rtx
) && !MEM_KEEP_ALIAS_SET_P (to_rtx
)
5343 && DECL_NONADDRESSABLE_P (field
))
5345 to_rtx
= copy_rtx (to_rtx
);
5346 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
5349 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
5350 value
, type
, cleared
,
5351 get_alias_set (TREE_TYPE (field
)));
5358 unsigned HOST_WIDE_INT i
;
5361 tree elttype
= TREE_TYPE (type
);
5363 HOST_WIDE_INT minelt
= 0;
5364 HOST_WIDE_INT maxelt
= 0;
5366 domain
= TYPE_DOMAIN (type
);
5367 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
5368 && TYPE_MAX_VALUE (domain
)
5369 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
5370 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
5372 /* If we have constant bounds for the range of the type, get them. */
5375 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
5376 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
5379 /* If the constructor has fewer elements than the array, clear
5380 the whole array first. Similarly if this is static
5381 constructor of a non-BLKmode object. */
5384 else if (REG_P (target
) && TREE_STATIC (exp
))
5388 unsigned HOST_WIDE_INT idx
;
5390 HOST_WIDE_INT count
= 0, zero_count
= 0;
5391 need_to_clear
= ! const_bounds_p
;
5393 /* This loop is a more accurate version of the loop in
5394 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5395 is also needed to check for missing elements. */
5396 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, index
, value
)
5398 HOST_WIDE_INT this_node_count
;
5403 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
5405 tree lo_index
= TREE_OPERAND (index
, 0);
5406 tree hi_index
= TREE_OPERAND (index
, 1);
5408 if (! host_integerp (lo_index
, 1)
5409 || ! host_integerp (hi_index
, 1))
5415 this_node_count
= (tree_low_cst (hi_index
, 1)
5416 - tree_low_cst (lo_index
, 1) + 1);
5419 this_node_count
= 1;
5421 count
+= this_node_count
;
5422 if (mostly_zeros_p (value
))
5423 zero_count
+= this_node_count
;
5426 /* Clear the entire array first if there are any missing
5427 elements, or if the incidence of zero elements is >=
5430 && (count
< maxelt
- minelt
+ 1
5431 || 4 * zero_count
>= 3 * count
))
5435 if (need_to_clear
&& size
> 0)
5438 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5440 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
5444 if (!cleared
&& REG_P (target
))
5445 /* Inform later passes that the old value is dead. */
5446 emit_clobber (target
);
5448 /* Store each element of the constructor into the
5449 corresponding element of TARGET, determined by counting the
5451 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), i
, index
, value
)
5453 enum machine_mode mode
;
5454 HOST_WIDE_INT bitsize
;
5455 HOST_WIDE_INT bitpos
;
5456 rtx xtarget
= target
;
5458 if (cleared
&& initializer_zerop (value
))
5461 mode
= TYPE_MODE (elttype
);
5462 if (mode
== BLKmode
)
5463 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
5464 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
5467 bitsize
= GET_MODE_BITSIZE (mode
);
5469 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
5471 tree lo_index
= TREE_OPERAND (index
, 0);
5472 tree hi_index
= TREE_OPERAND (index
, 1);
5473 rtx index_r
, pos_rtx
;
5474 HOST_WIDE_INT lo
, hi
, count
;
5477 /* If the range is constant and "small", unroll the loop. */
5479 && host_integerp (lo_index
, 0)
5480 && host_integerp (hi_index
, 0)
5481 && (lo
= tree_low_cst (lo_index
, 0),
5482 hi
= tree_low_cst (hi_index
, 0),
5483 count
= hi
- lo
+ 1,
5486 || (host_integerp (TYPE_SIZE (elttype
), 1)
5487 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
5490 lo
-= minelt
; hi
-= minelt
;
5491 for (; lo
<= hi
; lo
++)
5493 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
5496 && !MEM_KEEP_ALIAS_SET_P (target
)
5497 && TREE_CODE (type
) == ARRAY_TYPE
5498 && TYPE_NONALIASED_COMPONENT (type
))
5500 target
= copy_rtx (target
);
5501 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5504 store_constructor_field
5505 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
5506 get_alias_set (elttype
));
5511 rtx loop_start
= gen_label_rtx ();
5512 rtx loop_end
= gen_label_rtx ();
5515 expand_normal (hi_index
);
5517 index
= build_decl (EXPR_LOCATION (exp
),
5518 VAR_DECL
, NULL_TREE
, domain
);
5519 index_r
= gen_reg_rtx (promote_decl_mode (index
, NULL
));
5520 SET_DECL_RTL (index
, index_r
);
5521 store_expr (lo_index
, index_r
, 0, false);
5523 /* Build the head of the loop. */
5524 do_pending_stack_adjust ();
5525 emit_label (loop_start
);
5527 /* Assign value to element index. */
5529 fold_convert (ssizetype
,
5530 fold_build2 (MINUS_EXPR
,
5533 TYPE_MIN_VALUE (domain
)));
5536 size_binop (MULT_EXPR
, position
,
5537 fold_convert (ssizetype
,
5538 TYPE_SIZE_UNIT (elttype
)));
5540 pos_rtx
= expand_normal (position
);
5541 xtarget
= offset_address (target
, pos_rtx
,
5542 highest_pow2_factor (position
));
5543 xtarget
= adjust_address (xtarget
, mode
, 0);
5544 if (TREE_CODE (value
) == CONSTRUCTOR
)
5545 store_constructor (value
, xtarget
, cleared
,
5546 bitsize
/ BITS_PER_UNIT
);
5548 store_expr (value
, xtarget
, 0, false);
5550 /* Generate a conditional jump to exit the loop. */
5551 exit_cond
= build2 (LT_EXPR
, integer_type_node
,
5553 jumpif (exit_cond
, loop_end
, -1);
5555 /* Update the loop counter, and jump to the head of
5557 expand_assignment (index
,
5558 build2 (PLUS_EXPR
, TREE_TYPE (index
),
5559 index
, integer_one_node
),
5562 emit_jump (loop_start
);
5564 /* Build the end of the loop. */
5565 emit_label (loop_end
);
5568 else if ((index
!= 0 && ! host_integerp (index
, 0))
5569 || ! host_integerp (TYPE_SIZE (elttype
), 1))
5574 index
= ssize_int (1);
5577 index
= fold_convert (ssizetype
,
5578 fold_build2 (MINUS_EXPR
,
5581 TYPE_MIN_VALUE (domain
)));
5584 size_binop (MULT_EXPR
, index
,
5585 fold_convert (ssizetype
,
5586 TYPE_SIZE_UNIT (elttype
)));
5587 xtarget
= offset_address (target
,
5588 expand_normal (position
),
5589 highest_pow2_factor (position
));
5590 xtarget
= adjust_address (xtarget
, mode
, 0);
5591 store_expr (value
, xtarget
, 0, false);
5596 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
5597 * tree_low_cst (TYPE_SIZE (elttype
), 1));
5599 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
5601 if (MEM_P (target
) && !MEM_KEEP_ALIAS_SET_P (target
)
5602 && TREE_CODE (type
) == ARRAY_TYPE
5603 && TYPE_NONALIASED_COMPONENT (type
))
5605 target
= copy_rtx (target
);
5606 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5608 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
5609 type
, cleared
, get_alias_set (elttype
));
5617 unsigned HOST_WIDE_INT idx
;
5618 constructor_elt
*ce
;
5622 tree elttype
= TREE_TYPE (type
);
5623 int elt_size
= tree_low_cst (TYPE_SIZE (elttype
), 1);
5624 enum machine_mode eltmode
= TYPE_MODE (elttype
);
5625 HOST_WIDE_INT bitsize
;
5626 HOST_WIDE_INT bitpos
;
5627 rtvec vector
= NULL
;
5629 alias_set_type alias
;
5631 gcc_assert (eltmode
!= BLKmode
);
5633 n_elts
= TYPE_VECTOR_SUBPARTS (type
);
5634 if (REG_P (target
) && VECTOR_MODE_P (GET_MODE (target
)))
5636 enum machine_mode mode
= GET_MODE (target
);
5638 icode
= (int) optab_handler (vec_init_optab
, mode
)->insn_code
;
5639 if (icode
!= CODE_FOR_nothing
)
5643 vector
= rtvec_alloc (n_elts
);
5644 for (i
= 0; i
< n_elts
; i
++)
5645 RTVEC_ELT (vector
, i
) = CONST0_RTX (GET_MODE_INNER (mode
));
5649 /* If the constructor has fewer elements than the vector,
5650 clear the whole array first. Similarly if this is static
5651 constructor of a non-BLKmode object. */
5654 else if (REG_P (target
) && TREE_STATIC (exp
))
5658 unsigned HOST_WIDE_INT count
= 0, zero_count
= 0;
5661 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
5663 int n_elts_here
= tree_low_cst
5664 (int_const_binop (TRUNC_DIV_EXPR
,
5665 TYPE_SIZE (TREE_TYPE (value
)),
5666 TYPE_SIZE (elttype
), 0), 1);
5668 count
+= n_elts_here
;
5669 if (mostly_zeros_p (value
))
5670 zero_count
+= n_elts_here
;
5673 /* Clear the entire vector first if there are any missing elements,
5674 or if the incidence of zero elements is >= 75%. */
5675 need_to_clear
= (count
< n_elts
|| 4 * zero_count
>= 3 * count
);
5678 if (need_to_clear
&& size
> 0 && !vector
)
5681 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5683 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
5687 /* Inform later passes that the old value is dead. */
5688 if (!cleared
&& !vector
&& REG_P (target
))
5689 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5692 alias
= MEM_ALIAS_SET (target
);
5694 alias
= get_alias_set (elttype
);
5696 /* Store each element of the constructor into the corresponding
5697 element of TARGET, determined by counting the elements. */
5698 for (idx
= 0, i
= 0;
5699 VEC_iterate (constructor_elt
, CONSTRUCTOR_ELTS (exp
), idx
, ce
);
5700 idx
++, i
+= bitsize
/ elt_size
)
5702 HOST_WIDE_INT eltpos
;
5703 tree value
= ce
->value
;
5705 bitsize
= tree_low_cst (TYPE_SIZE (TREE_TYPE (value
)), 1);
5706 if (cleared
&& initializer_zerop (value
))
5710 eltpos
= tree_low_cst (ce
->index
, 1);
5716 /* Vector CONSTRUCTORs should only be built from smaller
5717 vectors in the case of BLKmode vectors. */
5718 gcc_assert (TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
);
5719 RTVEC_ELT (vector
, eltpos
)
5720 = expand_normal (value
);
5724 enum machine_mode value_mode
=
5725 TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
5726 ? TYPE_MODE (TREE_TYPE (value
))
5728 bitpos
= eltpos
* elt_size
;
5729 store_constructor_field (target
, bitsize
, bitpos
,
5730 value_mode
, value
, type
,
5736 emit_insn (GEN_FCN (icode
)
5738 gen_rtx_PARALLEL (GET_MODE (target
), vector
)));
5747 /* Store the value of EXP (an expression tree)
5748 into a subfield of TARGET which has mode MODE and occupies
5749 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5750 If MODE is VOIDmode, it means that we are storing into a bit-field.
5752 Always return const0_rtx unless we have something particular to
5755 TYPE is the type of the underlying object,
5757 ALIAS_SET is the alias set for the destination. This value will
5758 (in general) be different from that for TARGET, since TARGET is a
5759 reference to the containing structure.
5761 If NONTEMPORAL is true, try generating a nontemporal store. */
5764 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
5765 enum machine_mode mode
, tree exp
, tree type
,
5766 alias_set_type alias_set
, bool nontemporal
)
5768 if (TREE_CODE (exp
) == ERROR_MARK
)
5771 /* If we have nothing to store, do nothing unless the expression has
5774 return expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5776 /* If we are storing into an unaligned field of an aligned union that is
5777 in a register, we may have the mode of TARGET being an integer mode but
5778 MODE == BLKmode. In that case, get an aligned object whose size and
5779 alignment are the same as TARGET and store TARGET into it (we can avoid
5780 the store if the field being stored is the entire width of TARGET). Then
5781 call ourselves recursively to store the field into a BLKmode version of
5782 that object. Finally, load from the object into TARGET. This is not
5783 very efficient in general, but should only be slightly more expensive
5784 than the otherwise-required unaligned accesses. Perhaps this can be
5785 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5786 twice, once with emit_move_insn and once via store_field. */
5789 && (REG_P (target
) || GET_CODE (target
) == SUBREG
))
5791 rtx object
= assign_temp (type
, 0, 1, 1);
5792 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
5794 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5795 emit_move_insn (object
, target
);
5797 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, type
, alias_set
,
5800 emit_move_insn (target
, object
);
5802 /* We want to return the BLKmode version of the data. */
5806 if (GET_CODE (target
) == CONCAT
)
5808 /* We're storing into a struct containing a single __complex. */
5810 gcc_assert (!bitpos
);
5811 return store_expr (exp
, target
, 0, nontemporal
);
5814 /* If the structure is in a register or if the component
5815 is a bit field, we cannot use addressing to access it.
5816 Use bit-field techniques or SUBREG to store in it. */
5818 if (mode
== VOIDmode
5819 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5820 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5821 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5823 || GET_CODE (target
) == SUBREG
5824 /* If the field isn't aligned enough to store as an ordinary memref,
5825 store it as a bit field. */
5827 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
5828 || bitpos
% GET_MODE_ALIGNMENT (mode
))
5829 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
5830 || (bitpos
% BITS_PER_UNIT
!= 0)))
5831 /* If the RHS and field are a constant size and the size of the
5832 RHS isn't the same size as the bitfield, we must use bitfield
5835 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5836 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5841 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5842 implies a mask operation. If the precision is the same size as
5843 the field we're storing into, that mask is redundant. This is
5844 particularly common with bit field assignments generated by the
5846 nop_def
= get_def_for_expr (exp
, NOP_EXPR
);
5849 tree type
= TREE_TYPE (exp
);
5850 if (INTEGRAL_TYPE_P (type
)
5851 && TYPE_PRECISION (type
) < GET_MODE_BITSIZE (TYPE_MODE (type
))
5852 && bitsize
== TYPE_PRECISION (type
))
5854 tree op
= gimple_assign_rhs1 (nop_def
);
5855 type
= TREE_TYPE (op
);
5856 if (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) >= bitsize
)
5861 temp
= expand_normal (exp
);
5863 /* If BITSIZE is narrower than the size of the type of EXP
5864 we will be narrowing TEMP. Normally, what's wanted are the
5865 low-order bits. However, if EXP's type is a record and this is
5866 big-endian machine, we want the upper BITSIZE bits. */
5867 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5868 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
5869 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5870 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5871 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5875 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5877 if (mode
!= VOIDmode
&& mode
!= BLKmode
5878 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5879 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5881 /* If the modes of TEMP and TARGET are both BLKmode, both
5882 must be in memory and BITPOS must be aligned on a byte
5883 boundary. If so, we simply do a block copy. Likewise
5884 for a BLKmode-like TARGET. */
5885 if (GET_MODE (temp
) == BLKmode
5886 && (GET_MODE (target
) == BLKmode
5888 && GET_MODE_CLASS (GET_MODE (target
)) == MODE_INT
5889 && (bitpos
% BITS_PER_UNIT
) == 0
5890 && (bitsize
% BITS_PER_UNIT
) == 0)))
5892 gcc_assert (MEM_P (target
) && MEM_P (temp
)
5893 && (bitpos
% BITS_PER_UNIT
) == 0);
5895 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5896 emit_block_move (target
, temp
,
5897 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5904 /* Store the value in the bitfield. */
5905 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
);
5911 /* Now build a reference to just the desired component. */
5912 rtx to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5914 if (to_rtx
== target
)
5915 to_rtx
= copy_rtx (to_rtx
);
5917 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5918 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5919 set_mem_alias_set (to_rtx
, alias_set
);
5921 return store_expr (exp
, to_rtx
, 0, nontemporal
);
5925 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5926 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5927 codes and find the ultimate containing object, which we return.
5929 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5930 bit position, and *PUNSIGNEDP to the signedness of the field.
5931 If the position of the field is variable, we store a tree
5932 giving the variable offset (in units) in *POFFSET.
5933 This offset is in addition to the bit position.
5934 If the position is not variable, we store 0 in *POFFSET.
5936 If any of the extraction expressions is volatile,
5937 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5939 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
5940 Otherwise, it is a mode that can be used to access the field.
5942 If the field describes a variable-sized object, *PMODE is set to
5943 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
5944 this case, but the address of the object can be found.
5946 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5947 look through nodes that serve as markers of a greater alignment than
5948 the one that can be deduced from the expression. These nodes make it
5949 possible for front-ends to prevent temporaries from being created by
5950 the middle-end on alignment considerations. For that purpose, the
5951 normal operating mode at high-level is to always pass FALSE so that
5952 the ultimate containing object is really returned; moreover, the
5953 associated predicate handled_component_p will always return TRUE
5954 on these nodes, thus indicating that they are essentially handled
5955 by get_inner_reference. TRUE should only be passed when the caller
5956 is scanning the expression in order to build another representation
5957 and specifically knows how to handle these nodes; as such, this is
5958 the normal operating mode in the RTL expanders. */
5961 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
5962 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
5963 enum machine_mode
*pmode
, int *punsignedp
,
5964 int *pvolatilep
, bool keep_aligning
)
5967 enum machine_mode mode
= VOIDmode
;
5968 bool blkmode_bitfield
= false;
5969 tree offset
= size_zero_node
;
5970 tree bit_offset
= bitsize_zero_node
;
5972 /* First get the mode, signedness, and size. We do this from just the
5973 outermost expression. */
5975 if (TREE_CODE (exp
) == COMPONENT_REF
)
5977 tree field
= TREE_OPERAND (exp
, 1);
5978 size_tree
= DECL_SIZE (field
);
5979 if (!DECL_BIT_FIELD (field
))
5980 mode
= DECL_MODE (field
);
5981 else if (DECL_MODE (field
) == BLKmode
)
5982 blkmode_bitfield
= true;
5984 *punsignedp
= DECL_UNSIGNED (field
);
5986 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5988 size_tree
= TREE_OPERAND (exp
, 1);
5989 *punsignedp
= (! INTEGRAL_TYPE_P (TREE_TYPE (exp
))
5990 || TYPE_UNSIGNED (TREE_TYPE (exp
)));
5992 /* For vector types, with the correct size of access, use the mode of
5994 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == VECTOR_TYPE
5995 && TREE_TYPE (exp
) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
5996 && tree_int_cst_equal (size_tree
, TYPE_SIZE (TREE_TYPE (exp
))))
5997 mode
= TYPE_MODE (TREE_TYPE (exp
));
6001 mode
= TYPE_MODE (TREE_TYPE (exp
));
6002 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
6004 if (mode
== BLKmode
)
6005 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
6007 *pbitsize
= GET_MODE_BITSIZE (mode
);
6012 if (! host_integerp (size_tree
, 1))
6013 mode
= BLKmode
, *pbitsize
= -1;
6015 *pbitsize
= tree_low_cst (size_tree
, 1);
6018 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6019 and find the ultimate containing object. */
6022 switch (TREE_CODE (exp
))
6025 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
6026 TREE_OPERAND (exp
, 2));
6031 tree field
= TREE_OPERAND (exp
, 1);
6032 tree this_offset
= component_ref_field_offset (exp
);
6034 /* If this field hasn't been filled in yet, don't go past it.
6035 This should only happen when folding expressions made during
6036 type construction. */
6037 if (this_offset
== 0)
6040 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
6041 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
6042 DECL_FIELD_BIT_OFFSET (field
));
6044 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6049 case ARRAY_RANGE_REF
:
6051 tree index
= TREE_OPERAND (exp
, 1);
6052 tree low_bound
= array_ref_low_bound (exp
);
6053 tree unit_size
= array_ref_element_size (exp
);
6055 /* We assume all arrays have sizes that are a multiple of a byte.
6056 First subtract the lower bound, if any, in the type of the
6057 index, then convert to sizetype and multiply by the size of
6058 the array element. */
6059 if (! integer_zerop (low_bound
))
6060 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
6063 offset
= size_binop (PLUS_EXPR
, offset
,
6064 size_binop (MULT_EXPR
,
6065 fold_convert (sizetype
, index
),
6074 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
6075 bitsize_int (*pbitsize
));
6078 case VIEW_CONVERT_EXPR
:
6079 if (keep_aligning
&& STRICT_ALIGNMENT
6080 && (TYPE_ALIGN (TREE_TYPE (exp
))
6081 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
6082 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
6083 < BIGGEST_ALIGNMENT
)
6084 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
6085 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
6093 /* If any reference in the chain is volatile, the effect is volatile. */
6094 if (TREE_THIS_VOLATILE (exp
))
6097 exp
= TREE_OPERAND (exp
, 0);
6101 /* If OFFSET is constant, see if we can return the whole thing as a
6102 constant bit position. Make sure to handle overflow during
6104 if (host_integerp (offset
, 0))
6106 double_int tem
= double_int_mul (tree_to_double_int (offset
),
6107 uhwi_to_double_int (BITS_PER_UNIT
));
6108 tem
= double_int_add (tem
, tree_to_double_int (bit_offset
));
6109 if (double_int_fits_in_shwi_p (tem
))
6111 *pbitpos
= double_int_to_shwi (tem
);
6112 *poffset
= offset
= NULL_TREE
;
6116 /* Otherwise, split it up. */
6119 *pbitpos
= tree_low_cst (bit_offset
, 0);
6123 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6124 if (mode
== VOIDmode
6126 && (*pbitpos
% BITS_PER_UNIT
) == 0
6127 && (*pbitsize
% BITS_PER_UNIT
) == 0)
6135 /* Given an expression EXP that may be a COMPONENT_REF, an ARRAY_REF or an
6136 ARRAY_RANGE_REF, look for whether EXP or any nested component-refs within
6137 EXP is marked as PACKED. */
6140 contains_packed_reference (const_tree exp
)
6142 bool packed_p
= false;
6146 switch (TREE_CODE (exp
))
6150 tree field
= TREE_OPERAND (exp
, 1);
6151 packed_p
= DECL_PACKED (field
)
6152 || TYPE_PACKED (TREE_TYPE (field
))
6153 || TYPE_PACKED (TREE_TYPE (exp
));
6161 case ARRAY_RANGE_REF
:
6164 case VIEW_CONVERT_EXPR
:
6170 exp
= TREE_OPERAND (exp
, 0);
6176 /* Return a tree of sizetype representing the size, in bytes, of the element
6177 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6180 array_ref_element_size (tree exp
)
6182 tree aligned_size
= TREE_OPERAND (exp
, 3);
6183 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6184 location_t loc
= EXPR_LOCATION (exp
);
6186 /* If a size was specified in the ARRAY_REF, it's the size measured
6187 in alignment units of the element type. So multiply by that value. */
6190 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6191 sizetype from another type of the same width and signedness. */
6192 if (TREE_TYPE (aligned_size
) != sizetype
)
6193 aligned_size
= fold_convert_loc (loc
, sizetype
, aligned_size
);
6194 return size_binop_loc (loc
, MULT_EXPR
, aligned_size
,
6195 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
6198 /* Otherwise, take the size from that of the element type. Substitute
6199 any PLACEHOLDER_EXPR that we have. */
6201 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type
), exp
);
6204 /* Return a tree representing the lower bound of the array mentioned in
6205 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6208 array_ref_low_bound (tree exp
)
6210 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6212 /* If a lower bound is specified in EXP, use it. */
6213 if (TREE_OPERAND (exp
, 2))
6214 return TREE_OPERAND (exp
, 2);
6216 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6217 substituting for a PLACEHOLDER_EXPR as needed. */
6218 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
6219 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type
), exp
);
6221 /* Otherwise, return a zero of the appropriate type. */
6222 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp
, 1)), 0);
6225 /* Return a tree representing the upper bound of the array mentioned in
6226 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6229 array_ref_up_bound (tree exp
)
6231 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6233 /* If there is a domain type and it has an upper bound, use it, substituting
6234 for a PLACEHOLDER_EXPR as needed. */
6235 if (domain_type
&& TYPE_MAX_VALUE (domain_type
))
6236 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type
), exp
);
6238 /* Otherwise fail. */
6242 /* Return a tree representing the offset, in bytes, of the field referenced
6243 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6246 component_ref_field_offset (tree exp
)
6248 tree aligned_offset
= TREE_OPERAND (exp
, 2);
6249 tree field
= TREE_OPERAND (exp
, 1);
6250 location_t loc
= EXPR_LOCATION (exp
);
6252 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6253 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6257 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6258 sizetype from another type of the same width and signedness. */
6259 if (TREE_TYPE (aligned_offset
) != sizetype
)
6260 aligned_offset
= fold_convert_loc (loc
, sizetype
, aligned_offset
);
6261 return size_binop_loc (loc
, MULT_EXPR
, aligned_offset
,
6262 size_int (DECL_OFFSET_ALIGN (field
)
6266 /* Otherwise, take the offset from that of the field. Substitute
6267 any PLACEHOLDER_EXPR that we have. */
6269 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field
), exp
);
6272 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6274 static unsigned HOST_WIDE_INT
6275 target_align (const_tree target
)
6277 /* We might have a chain of nested references with intermediate misaligning
6278 bitfields components, so need to recurse to find out. */
6280 unsigned HOST_WIDE_INT this_align
, outer_align
;
6282 switch (TREE_CODE (target
))
6288 this_align
= DECL_ALIGN (TREE_OPERAND (target
, 1));
6289 outer_align
= target_align (TREE_OPERAND (target
, 0));
6290 return MIN (this_align
, outer_align
);
6293 case ARRAY_RANGE_REF
:
6294 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
6295 outer_align
= target_align (TREE_OPERAND (target
, 0));
6296 return MIN (this_align
, outer_align
);
6299 case NON_LVALUE_EXPR
:
6300 case VIEW_CONVERT_EXPR
:
6301 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
6302 outer_align
= target_align (TREE_OPERAND (target
, 0));
6303 return MAX (this_align
, outer_align
);
6306 return TYPE_ALIGN (TREE_TYPE (target
));
6311 /* Given an rtx VALUE that may contain additions and multiplications, return
6312 an equivalent value that just refers to a register, memory, or constant.
6313 This is done by generating instructions to perform the arithmetic and
6314 returning a pseudo-register containing the value.
6316 The returned value may be a REG, SUBREG, MEM or constant. */
6319 force_operand (rtx value
, rtx target
)
6322 /* Use subtarget as the target for operand 0 of a binary operation. */
6323 rtx subtarget
= get_subtarget (target
);
6324 enum rtx_code code
= GET_CODE (value
);
6326 /* Check for subreg applied to an expression produced by loop optimizer. */
6328 && !REG_P (SUBREG_REG (value
))
6329 && !MEM_P (SUBREG_REG (value
)))
6332 = simplify_gen_subreg (GET_MODE (value
),
6333 force_reg (GET_MODE (SUBREG_REG (value
)),
6334 force_operand (SUBREG_REG (value
),
6336 GET_MODE (SUBREG_REG (value
)),
6337 SUBREG_BYTE (value
));
6338 code
= GET_CODE (value
);
6341 /* Check for a PIC address load. */
6342 if ((code
== PLUS
|| code
== MINUS
)
6343 && XEXP (value
, 0) == pic_offset_table_rtx
6344 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
6345 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
6346 || GET_CODE (XEXP (value
, 1)) == CONST
))
6349 subtarget
= gen_reg_rtx (GET_MODE (value
));
6350 emit_move_insn (subtarget
, value
);
6354 if (ARITHMETIC_P (value
))
6356 op2
= XEXP (value
, 1);
6357 if (!CONSTANT_P (op2
) && !(REG_P (op2
) && op2
!= subtarget
))
6359 if (code
== MINUS
&& CONST_INT_P (op2
))
6362 op2
= negate_rtx (GET_MODE (value
), op2
);
6365 /* Check for an addition with OP2 a constant integer and our first
6366 operand a PLUS of a virtual register and something else. In that
6367 case, we want to emit the sum of the virtual register and the
6368 constant first and then add the other value. This allows virtual
6369 register instantiation to simply modify the constant rather than
6370 creating another one around this addition. */
6371 if (code
== PLUS
&& CONST_INT_P (op2
)
6372 && GET_CODE (XEXP (value
, 0)) == PLUS
6373 && REG_P (XEXP (XEXP (value
, 0), 0))
6374 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6375 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
6377 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
6378 XEXP (XEXP (value
, 0), 0), op2
,
6379 subtarget
, 0, OPTAB_LIB_WIDEN
);
6380 return expand_simple_binop (GET_MODE (value
), code
, temp
,
6381 force_operand (XEXP (XEXP (value
,
6383 target
, 0, OPTAB_LIB_WIDEN
);
6386 op1
= force_operand (XEXP (value
, 0), subtarget
);
6387 op2
= force_operand (op2
, NULL_RTX
);
6391 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
6393 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
6394 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
6395 target
, 1, OPTAB_LIB_WIDEN
);
6397 return expand_divmod (0,
6398 FLOAT_MODE_P (GET_MODE (value
))
6399 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
6400 GET_MODE (value
), op1
, op2
, target
, 0);
6402 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
6405 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
6408 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
6411 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
6412 target
, 0, OPTAB_LIB_WIDEN
);
6414 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
6415 target
, 1, OPTAB_LIB_WIDEN
);
6418 if (UNARY_P (value
))
6421 target
= gen_reg_rtx (GET_MODE (value
));
6422 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
6429 case FLOAT_TRUNCATE
:
6430 convert_move (target
, op1
, code
== ZERO_EXTEND
);
6435 expand_fix (target
, op1
, code
== UNSIGNED_FIX
);
6439 case UNSIGNED_FLOAT
:
6440 expand_float (target
, op1
, code
== UNSIGNED_FLOAT
);
6444 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
6448 #ifdef INSN_SCHEDULING
6449 /* On machines that have insn scheduling, we want all memory reference to be
6450 explicit, so we need to deal with such paradoxical SUBREGs. */
6451 if (GET_CODE (value
) == SUBREG
&& MEM_P (SUBREG_REG (value
))
6452 && (GET_MODE_SIZE (GET_MODE (value
))
6453 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
6455 = simplify_gen_subreg (GET_MODE (value
),
6456 force_reg (GET_MODE (SUBREG_REG (value
)),
6457 force_operand (SUBREG_REG (value
),
6459 GET_MODE (SUBREG_REG (value
)),
6460 SUBREG_BYTE (value
));
6466 /* Subroutine of expand_expr: return nonzero iff there is no way that
6467 EXP can reference X, which is being modified. TOP_P is nonzero if this
6468 call is going to be used to determine whether we need a temporary
6469 for EXP, as opposed to a recursive call to this function.
6471 It is always safe for this routine to return zero since it merely
6472 searches for optimization opportunities. */
6475 safe_from_p (const_rtx x
, tree exp
, int top_p
)
6481 /* If EXP has varying size, we MUST use a target since we currently
6482 have no way of allocating temporaries of variable size
6483 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6484 So we assume here that something at a higher level has prevented a
6485 clash. This is somewhat bogus, but the best we can do. Only
6486 do this when X is BLKmode and when we are at the top level. */
6487 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
6488 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
6489 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
6490 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
6491 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
6493 && GET_MODE (x
) == BLKmode
)
6494 /* If X is in the outgoing argument area, it is always safe. */
6496 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
6497 || (GET_CODE (XEXP (x
, 0)) == PLUS
6498 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
6501 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6502 find the underlying pseudo. */
6503 if (GET_CODE (x
) == SUBREG
)
6506 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6510 /* Now look at our tree code and possibly recurse. */
6511 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
6513 case tcc_declaration
:
6514 exp_rtl
= DECL_RTL_IF_SET (exp
);
6520 case tcc_exceptional
:
6521 if (TREE_CODE (exp
) == TREE_LIST
)
6525 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
6527 exp
= TREE_CHAIN (exp
);
6530 if (TREE_CODE (exp
) != TREE_LIST
)
6531 return safe_from_p (x
, exp
, 0);
6534 else if (TREE_CODE (exp
) == CONSTRUCTOR
)
6536 constructor_elt
*ce
;
6537 unsigned HOST_WIDE_INT idx
;
6540 VEC_iterate (constructor_elt
, CONSTRUCTOR_ELTS (exp
), idx
, ce
);
6542 if ((ce
->index
!= NULL_TREE
&& !safe_from_p (x
, ce
->index
, 0))
6543 || !safe_from_p (x
, ce
->value
, 0))
6547 else if (TREE_CODE (exp
) == ERROR_MARK
)
6548 return 1; /* An already-visited SAVE_EXPR? */
6553 /* The only case we look at here is the DECL_INITIAL inside a
6555 return (TREE_CODE (exp
) != DECL_EXPR
6556 || TREE_CODE (DECL_EXPR_DECL (exp
)) != VAR_DECL
6557 || !DECL_INITIAL (DECL_EXPR_DECL (exp
))
6558 || safe_from_p (x
, DECL_INITIAL (DECL_EXPR_DECL (exp
)), 0));
6561 case tcc_comparison
:
6562 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
6567 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6569 case tcc_expression
:
6572 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6573 the expression. If it is set, we conflict iff we are that rtx or
6574 both are in memory. Otherwise, we check all operands of the
6575 expression recursively. */
6577 switch (TREE_CODE (exp
))
6580 /* If the operand is static or we are static, we can't conflict.
6581 Likewise if we don't conflict with the operand at all. */
6582 if (staticp (TREE_OPERAND (exp
, 0))
6583 || TREE_STATIC (exp
)
6584 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
6587 /* Otherwise, the only way this can conflict is if we are taking
6588 the address of a DECL a that address if part of X, which is
6590 exp
= TREE_OPERAND (exp
, 0);
6593 if (!DECL_RTL_SET_P (exp
)
6594 || !MEM_P (DECL_RTL (exp
)))
6597 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
6601 case MISALIGNED_INDIRECT_REF
:
6602 case ALIGN_INDIRECT_REF
:
6605 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
6606 get_alias_set (exp
)))
6611 /* Assume that the call will clobber all hard registers and
6613 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6618 case WITH_CLEANUP_EXPR
:
6619 case CLEANUP_POINT_EXPR
:
6620 /* Lowered by gimplify.c. */
6624 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6630 /* If we have an rtx, we do not need to scan our operands. */
6634 nops
= TREE_OPERAND_LENGTH (exp
);
6635 for (i
= 0; i
< nops
; i
++)
6636 if (TREE_OPERAND (exp
, i
) != 0
6637 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
6643 /* Should never get a type here. */
6647 /* If we have an rtl, find any enclosed object. Then see if we conflict
6651 if (GET_CODE (exp_rtl
) == SUBREG
)
6653 exp_rtl
= SUBREG_REG (exp_rtl
);
6655 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
6659 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6660 are memory and they conflict. */
6661 return ! (rtx_equal_p (x
, exp_rtl
)
6662 || (MEM_P (x
) && MEM_P (exp_rtl
)
6663 && true_dependence (exp_rtl
, VOIDmode
, x
,
6664 rtx_addr_varies_p
)));
6667 /* If we reach here, it is safe. */
6672 /* Return the highest power of two that EXP is known to be a multiple of.
6673 This is used in updating alignment of MEMs in array references. */
6675 unsigned HOST_WIDE_INT
6676 highest_pow2_factor (const_tree exp
)
6678 unsigned HOST_WIDE_INT c0
, c1
;
6680 switch (TREE_CODE (exp
))
6683 /* We can find the lowest bit that's a one. If the low
6684 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6685 We need to handle this case since we can find it in a COND_EXPR,
6686 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6687 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6689 if (TREE_OVERFLOW (exp
))
6690 return BIGGEST_ALIGNMENT
;
6693 /* Note: tree_low_cst is intentionally not used here,
6694 we don't care about the upper bits. */
6695 c0
= TREE_INT_CST_LOW (exp
);
6697 return c0
? c0
: BIGGEST_ALIGNMENT
;
6701 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
6702 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6703 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6704 return MIN (c0
, c1
);
6707 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6708 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6711 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6713 if (integer_pow2p (TREE_OPERAND (exp
, 1))
6714 && host_integerp (TREE_OPERAND (exp
, 1), 1))
6716 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6717 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
6718 return MAX (1, c0
/ c1
);
6723 /* The highest power of two of a bit-and expression is the maximum of
6724 that of its operands. We typically get here for a complex LHS and
6725 a constant negative power of two on the RHS to force an explicit
6726 alignment, so don't bother looking at the LHS. */
6727 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6731 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
6734 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6737 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6738 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
6739 return MIN (c0
, c1
);
6748 /* Similar, except that the alignment requirements of TARGET are
6749 taken into account. Assume it is at least as aligned as its
6750 type, unless it is a COMPONENT_REF in which case the layout of
6751 the structure gives the alignment. */
6753 static unsigned HOST_WIDE_INT
6754 highest_pow2_factor_for_target (const_tree target
, const_tree exp
)
6756 unsigned HOST_WIDE_INT talign
= target_align (target
) / BITS_PER_UNIT
;
6757 unsigned HOST_WIDE_INT factor
= highest_pow2_factor (exp
);
6759 return MAX (factor
, talign
);
6762 /* Return &VAR expression for emulated thread local VAR. */
6765 emutls_var_address (tree var
)
6767 tree emuvar
= emutls_decl (var
);
6768 tree fn
= built_in_decls
[BUILT_IN_EMUTLS_GET_ADDRESS
];
6769 tree arg
= build_fold_addr_expr_with_type (emuvar
, ptr_type_node
);
6770 tree arglist
= build_tree_list (NULL_TREE
, arg
);
6771 tree call
= build_function_call_expr (UNKNOWN_LOCATION
, fn
, arglist
);
6772 return fold_convert (build_pointer_type (TREE_TYPE (var
)), call
);
6776 /* Subroutine of expand_expr. Expand the two operands of a binary
6777 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6778 The value may be stored in TARGET if TARGET is nonzero. The
6779 MODIFIER argument is as documented by expand_expr. */
6782 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
6783 enum expand_modifier modifier
)
6785 if (! safe_from_p (target
, exp1
, 1))
6787 if (operand_equal_p (exp0
, exp1
, 0))
6789 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6790 *op1
= copy_rtx (*op0
);
6794 /* If we need to preserve evaluation order, copy exp0 into its own
6795 temporary variable so that it can't be clobbered by exp1. */
6796 if (flag_evaluation_order
&& TREE_SIDE_EFFECTS (exp1
))
6797 exp0
= save_expr (exp0
);
6798 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6799 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
6804 /* Return a MEM that contains constant EXP. DEFER is as for
6805 output_constant_def and MODIFIER is as for expand_expr. */
6808 expand_expr_constant (tree exp
, int defer
, enum expand_modifier modifier
)
6812 mem
= output_constant_def (exp
, defer
);
6813 if (modifier
!= EXPAND_INITIALIZER
)
6814 mem
= use_anchored_address (mem
);
6818 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6819 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6822 expand_expr_addr_expr_1 (tree exp
, rtx target
, enum machine_mode tmode
,
6823 enum expand_modifier modifier
, addr_space_t as
)
6825 rtx result
, subtarget
;
6827 HOST_WIDE_INT bitsize
, bitpos
;
6828 int volatilep
, unsignedp
;
6829 enum machine_mode mode1
;
6831 /* If we are taking the address of a constant and are at the top level,
6832 we have to use output_constant_def since we can't call force_const_mem
6834 /* ??? This should be considered a front-end bug. We should not be
6835 generating ADDR_EXPR of something that isn't an LVALUE. The only
6836 exception here is STRING_CST. */
6837 if (CONSTANT_CLASS_P (exp
))
6838 return XEXP (expand_expr_constant (exp
, 0, modifier
), 0);
6840 /* Everything must be something allowed by is_gimple_addressable. */
6841 switch (TREE_CODE (exp
))
6844 /* This case will happen via recursion for &a->b. */
6845 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6848 /* Expand the initializer like constants above. */
6849 return XEXP (expand_expr_constant (DECL_INITIAL (exp
), 0, modifier
), 0);
6852 /* The real part of the complex number is always first, therefore
6853 the address is the same as the address of the parent object. */
6856 inner
= TREE_OPERAND (exp
, 0);
6860 /* The imaginary part of the complex number is always second.
6861 The expression is therefore always offset by the size of the
6864 bitpos
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp
)));
6865 inner
= TREE_OPERAND (exp
, 0);
6869 /* TLS emulation hook - replace __thread VAR's &VAR with
6870 __emutls_get_address (&_emutls.VAR). */
6871 if (! targetm
.have_tls
6872 && TREE_CODE (exp
) == VAR_DECL
6873 && DECL_THREAD_LOCAL_P (exp
))
6875 exp
= emutls_var_address (exp
);
6876 return expand_expr (exp
, target
, tmode
, modifier
);
6881 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6882 expand_expr, as that can have various side effects; LABEL_DECLs for
6883 example, may not have their DECL_RTL set yet. Expand the rtl of
6884 CONSTRUCTORs too, which should yield a memory reference for the
6885 constructor's contents. Assume language specific tree nodes can
6886 be expanded in some interesting way. */
6887 gcc_assert (TREE_CODE (exp
) < LAST_AND_UNUSED_TREE_CODE
);
6889 || TREE_CODE (exp
) == CONSTRUCTOR
6890 || TREE_CODE (exp
) == COMPOUND_LITERAL_EXPR
)
6892 result
= expand_expr (exp
, target
, tmode
,
6893 modifier
== EXPAND_INITIALIZER
6894 ? EXPAND_INITIALIZER
: EXPAND_CONST_ADDRESS
);
6896 /* If the DECL isn't in memory, then the DECL wasn't properly
6897 marked TREE_ADDRESSABLE, which will be either a front-end
6898 or a tree optimizer bug. */
6899 gcc_assert (MEM_P (result
));
6900 result
= XEXP (result
, 0);
6902 /* ??? Is this needed anymore? */
6903 if (DECL_P (exp
) && !TREE_USED (exp
) == 0)
6905 assemble_external (exp
);
6906 TREE_USED (exp
) = 1;
6909 if (modifier
!= EXPAND_INITIALIZER
6910 && modifier
!= EXPAND_CONST_ADDRESS
)
6911 result
= force_operand (result
, target
);
6915 /* Pass FALSE as the last argument to get_inner_reference although
6916 we are expanding to RTL. The rationale is that we know how to
6917 handle "aligning nodes" here: we can just bypass them because
6918 they won't change the final object whose address will be returned
6919 (they actually exist only for that purpose). */
6920 inner
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
6921 &mode1
, &unsignedp
, &volatilep
, false);
6925 /* We must have made progress. */
6926 gcc_assert (inner
!= exp
);
6928 subtarget
= offset
|| bitpos
? NULL_RTX
: target
;
6929 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
6930 inner alignment, force the inner to be sufficiently aligned. */
6931 if (CONSTANT_CLASS_P (inner
)
6932 && TYPE_ALIGN (TREE_TYPE (inner
)) < TYPE_ALIGN (TREE_TYPE (exp
)))
6934 inner
= copy_node (inner
);
6935 TREE_TYPE (inner
) = copy_node (TREE_TYPE (inner
));
6936 TYPE_ALIGN (TREE_TYPE (inner
)) = TYPE_ALIGN (TREE_TYPE (exp
));
6937 TYPE_USER_ALIGN (TREE_TYPE (inner
)) = 1;
6939 result
= expand_expr_addr_expr_1 (inner
, subtarget
, tmode
, modifier
, as
);
6945 if (modifier
!= EXPAND_NORMAL
)
6946 result
= force_operand (result
, NULL
);
6947 tmp
= expand_expr (offset
, NULL_RTX
, tmode
,
6948 modifier
== EXPAND_INITIALIZER
6949 ? EXPAND_INITIALIZER
: EXPAND_NORMAL
);
6951 result
= convert_memory_address_addr_space (tmode
, result
, as
);
6952 tmp
= convert_memory_address_addr_space (tmode
, tmp
, as
);
6954 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
6955 result
= gen_rtx_PLUS (tmode
, result
, tmp
);
6958 subtarget
= bitpos
? NULL_RTX
: target
;
6959 result
= expand_simple_binop (tmode
, PLUS
, result
, tmp
, subtarget
,
6960 1, OPTAB_LIB_WIDEN
);
6966 /* Someone beforehand should have rejected taking the address
6967 of such an object. */
6968 gcc_assert ((bitpos
% BITS_PER_UNIT
) == 0);
6970 result
= plus_constant (result
, bitpos
/ BITS_PER_UNIT
);
6971 if (modifier
< EXPAND_SUM
)
6972 result
= force_operand (result
, target
);
6978 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6979 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6982 expand_expr_addr_expr (tree exp
, rtx target
, enum machine_mode tmode
,
6983 enum expand_modifier modifier
)
6985 addr_space_t as
= ADDR_SPACE_GENERIC
;
6986 enum machine_mode address_mode
= Pmode
;
6987 enum machine_mode pointer_mode
= ptr_mode
;
6988 enum machine_mode rmode
;
6991 /* Target mode of VOIDmode says "whatever's natural". */
6992 if (tmode
== VOIDmode
)
6993 tmode
= TYPE_MODE (TREE_TYPE (exp
));
6995 if (POINTER_TYPE_P (TREE_TYPE (exp
)))
6997 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
6998 address_mode
= targetm
.addr_space
.address_mode (as
);
6999 pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
7002 /* We can get called with some Weird Things if the user does silliness
7003 like "(short) &a". In that case, convert_memory_address won't do
7004 the right thing, so ignore the given target mode. */
7005 if (tmode
!= address_mode
&& tmode
!= pointer_mode
)
7006 tmode
= address_mode
;
7008 result
= expand_expr_addr_expr_1 (TREE_OPERAND (exp
, 0), target
,
7009 tmode
, modifier
, as
);
7011 /* Despite expand_expr claims concerning ignoring TMODE when not
7012 strictly convenient, stuff breaks if we don't honor it. Note
7013 that combined with the above, we only do this for pointer modes. */
7014 rmode
= GET_MODE (result
);
7015 if (rmode
== VOIDmode
)
7018 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7023 /* Generate code for computing CONSTRUCTOR EXP.
7024 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7025 is TRUE, instead of creating a temporary variable in memory
7026 NULL is returned and the caller needs to handle it differently. */
7029 expand_constructor (tree exp
, rtx target
, enum expand_modifier modifier
,
7030 bool avoid_temp_mem
)
7032 tree type
= TREE_TYPE (exp
);
7033 enum machine_mode mode
= TYPE_MODE (type
);
7035 /* Try to avoid creating a temporary at all. This is possible
7036 if all of the initializer is zero.
7037 FIXME: try to handle all [0..255] initializers we can handle
7039 if (TREE_STATIC (exp
)
7040 && !TREE_ADDRESSABLE (exp
)
7041 && target
!= 0 && mode
== BLKmode
7042 && all_zeros_p (exp
))
7044 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
7048 /* All elts simple constants => refer to a constant in memory. But
7049 if this is a non-BLKmode mode, let it store a field at a time
7050 since that should make a CONST_INT or CONST_DOUBLE when we
7051 fold. Likewise, if we have a target we can use, it is best to
7052 store directly into the target unless the type is large enough
7053 that memcpy will be used. If we are making an initializer and
7054 all operands are constant, put it in memory as well.
7056 FIXME: Avoid trying to fill vector constructors piece-meal.
7057 Output them with output_constant_def below unless we're sure
7058 they're zeros. This should go away when vector initializers
7059 are treated like VECTOR_CST instead of arrays. */
7060 if ((TREE_STATIC (exp
)
7061 && ((mode
== BLKmode
7062 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
7063 || TREE_ADDRESSABLE (exp
)
7064 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
7065 && (! MOVE_BY_PIECES_P
7066 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
7068 && ! mostly_zeros_p (exp
))))
7069 || ((modifier
== EXPAND_INITIALIZER
|| modifier
== EXPAND_CONST_ADDRESS
)
7070 && TREE_CONSTANT (exp
)))
7077 constructor
= expand_expr_constant (exp
, 1, modifier
);
7079 if (modifier
!= EXPAND_CONST_ADDRESS
7080 && modifier
!= EXPAND_INITIALIZER
7081 && modifier
!= EXPAND_SUM
)
7082 constructor
= validize_mem (constructor
);
7087 /* Handle calls that pass values in multiple non-contiguous
7088 locations. The Irix 6 ABI has examples of this. */
7089 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
7090 || GET_CODE (target
) == PARALLEL
|| modifier
== EXPAND_STACK_PARM
)
7096 = assign_temp (build_qualified_type (type
, (TYPE_QUALS (type
)
7097 | (TREE_READONLY (exp
)
7098 * TYPE_QUAL_CONST
))),
7099 0, TREE_ADDRESSABLE (exp
), 1);
7102 store_constructor (exp
, target
, 0, int_expr_size (exp
));
7107 /* expand_expr: generate code for computing expression EXP.
7108 An rtx for the computed value is returned. The value is never null.
7109 In the case of a void EXP, const0_rtx is returned.
7111 The value may be stored in TARGET if TARGET is nonzero.
7112 TARGET is just a suggestion; callers must assume that
7113 the rtx returned may not be the same as TARGET.
7115 If TARGET is CONST0_RTX, it means that the value will be ignored.
7117 If TMODE is not VOIDmode, it suggests generating the
7118 result in mode TMODE. But this is done only when convenient.
7119 Otherwise, TMODE is ignored and the value generated in its natural mode.
7120 TMODE is just a suggestion; callers must assume that
7121 the rtx returned may not have mode TMODE.
7123 Note that TARGET may have neither TMODE nor MODE. In that case, it
7124 probably will not be used.
7126 If MODIFIER is EXPAND_SUM then when EXP is an addition
7127 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7128 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7129 products as above, or REG or MEM, or constant.
7130 Ordinarily in such cases we would output mul or add instructions
7131 and then return a pseudo reg containing the sum.
7133 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7134 it also marks a label as absolutely required (it can't be dead).
7135 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7136 This is used for outputting expressions used in initializers.
7138 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7139 with a constant address even if that address is not normally legitimate.
7140 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7142 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7143 a call parameter. Such targets require special care as we haven't yet
7144 marked TARGET so that it's safe from being trashed by libcalls. We
7145 don't want to use TARGET for anything but the final result;
7146 Intermediate values must go elsewhere. Additionally, calls to
7147 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7149 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7150 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7151 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7152 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7156 expand_expr_real (tree exp
, rtx target
, enum machine_mode tmode
,
7157 enum expand_modifier modifier
, rtx
*alt_rtl
)
7161 /* Handle ERROR_MARK before anybody tries to access its type. */
7162 if (TREE_CODE (exp
) == ERROR_MARK
7163 || (TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
))
7165 ret
= CONST0_RTX (tmode
);
7166 return ret
? ret
: const0_rtx
;
7169 /* If this is an expression of some kind and it has an associated line
7170 number, then emit the line number before expanding the expression.
7172 We need to save and restore the file and line information so that
7173 errors discovered during expansion are emitted with the right
7174 information. It would be better of the diagnostic routines
7175 used the file/line information embedded in the tree nodes rather
7177 if (cfun
&& EXPR_HAS_LOCATION (exp
))
7179 location_t saved_location
= input_location
;
7180 location_t saved_curr_loc
= get_curr_insn_source_location ();
7181 tree saved_block
= get_curr_insn_block ();
7182 input_location
= EXPR_LOCATION (exp
);
7183 set_curr_insn_source_location (input_location
);
7185 /* Record where the insns produced belong. */
7186 set_curr_insn_block (TREE_BLOCK (exp
));
7188 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
7190 input_location
= saved_location
;
7191 set_curr_insn_block (saved_block
);
7192 set_curr_insn_source_location (saved_curr_loc
);
7196 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
7203 expand_expr_real_2 (sepops ops
, rtx target
, enum machine_mode tmode
,
7204 enum expand_modifier modifier
)
7206 rtx op0
, op1
, op2
, temp
;
7209 enum machine_mode mode
;
7210 enum tree_code code
= ops
->code
;
7212 rtx subtarget
, original_target
;
7214 bool reduce_bit_field
;
7215 gimple subexp0_def
, subexp1_def
;
7217 location_t loc
= ops
->location
;
7218 tree treeop0
, treeop1
;
7219 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7220 ? reduce_to_bit_field_precision ((expr), \
7226 mode
= TYPE_MODE (type
);
7227 unsignedp
= TYPE_UNSIGNED (type
);
7232 /* We should be called only on simple (binary or unary) expressions,
7233 exactly those that are valid in gimple expressions that aren't
7234 GIMPLE_SINGLE_RHS (or invalid). */
7235 gcc_assert (get_gimple_rhs_class (code
) == GIMPLE_UNARY_RHS
7236 || get_gimple_rhs_class (code
) == GIMPLE_BINARY_RHS
);
7238 ignore
= (target
== const0_rtx
7239 || ((CONVERT_EXPR_CODE_P (code
)
7240 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
7241 && TREE_CODE (type
) == VOID_TYPE
));
7243 /* We should be called only if we need the result. */
7244 gcc_assert (!ignore
);
7246 /* An operation in what may be a bit-field type needs the
7247 result to be reduced to the precision of the bit-field type,
7248 which is narrower than that of the type's mode. */
7249 reduce_bit_field
= (TREE_CODE (type
) == INTEGER_TYPE
7250 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
));
7252 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
7255 /* Use subtarget as the target for operand 0 of a binary operation. */
7256 subtarget
= get_subtarget (target
);
7257 original_target
= target
;
7261 case NON_LVALUE_EXPR
:
7264 if (treeop0
== error_mark_node
)
7267 if (TREE_CODE (type
) == UNION_TYPE
)
7269 tree valtype
= TREE_TYPE (treeop0
);
7271 /* If both input and output are BLKmode, this conversion isn't doing
7272 anything except possibly changing memory attribute. */
7273 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7275 rtx result
= expand_expr (treeop0
, target
, tmode
,
7278 result
= copy_rtx (result
);
7279 set_mem_attributes (result
, type
, 0);
7285 if (TYPE_MODE (type
) != BLKmode
)
7286 target
= gen_reg_rtx (TYPE_MODE (type
));
7288 target
= assign_temp (type
, 0, 1, 1);
7292 /* Store data into beginning of memory target. */
7293 store_expr (treeop0
,
7294 adjust_address (target
, TYPE_MODE (valtype
), 0),
7295 modifier
== EXPAND_STACK_PARM
,
7300 gcc_assert (REG_P (target
));
7302 /* Store this field into a union of the proper type. */
7303 store_field (target
,
7304 MIN ((int_size_in_bytes (TREE_TYPE
7307 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7308 0, TYPE_MODE (valtype
), treeop0
,
7312 /* Return the entire union. */
7316 if (mode
== TYPE_MODE (TREE_TYPE (treeop0
)))
7318 op0
= expand_expr (treeop0
, target
, VOIDmode
,
7321 /* If the signedness of the conversion differs and OP0 is
7322 a promoted SUBREG, clear that indication since we now
7323 have to do the proper extension. */
7324 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)) != unsignedp
7325 && GET_CODE (op0
) == SUBREG
)
7326 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7328 return REDUCE_BIT_FIELD (op0
);
7331 op0
= expand_expr (treeop0
, NULL_RTX
, mode
,
7332 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
);
7333 if (GET_MODE (op0
) == mode
)
7336 /* If OP0 is a constant, just convert it into the proper mode. */
7337 else if (CONSTANT_P (op0
))
7339 tree inner_type
= TREE_TYPE (treeop0
);
7340 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
7342 if (modifier
== EXPAND_INITIALIZER
)
7343 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
,
7344 subreg_lowpart_offset (mode
,
7347 op0
= convert_modes (mode
, inner_mode
, op0
,
7348 TYPE_UNSIGNED (inner_type
));
7351 else if (modifier
== EXPAND_INITIALIZER
)
7352 op0
= gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7354 else if (target
== 0)
7355 op0
= convert_to_mode (mode
, op0
,
7356 TYPE_UNSIGNED (TREE_TYPE
7360 convert_move (target
, op0
,
7361 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
7365 return REDUCE_BIT_FIELD (op0
);
7367 case ADDR_SPACE_CONVERT_EXPR
:
7369 tree treeop0_type
= TREE_TYPE (treeop0
);
7371 addr_space_t as_from
;
7373 gcc_assert (POINTER_TYPE_P (type
));
7374 gcc_assert (POINTER_TYPE_P (treeop0_type
));
7376 as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type
));
7377 as_from
= TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type
));
7379 /* Conversions between pointers to the same address space should
7380 have been implemented via CONVERT_EXPR / NOP_EXPR. */
7381 gcc_assert (as_to
!= as_from
);
7383 /* Ask target code to handle conversion between pointers
7384 to overlapping address spaces. */
7385 if (targetm
.addr_space
.subset_p (as_to
, as_from
)
7386 || targetm
.addr_space
.subset_p (as_from
, as_to
))
7388 op0
= expand_expr (treeop0
, NULL_RTX
, VOIDmode
, modifier
);
7389 op0
= targetm
.addr_space
.convert (op0
, treeop0_type
, type
);
7394 /* For disjoint address spaces, converting anything but
7395 a null pointer invokes undefined behaviour. We simply
7396 always return a null pointer here. */
7397 return CONST0_RTX (mode
);
7400 case POINTER_PLUS_EXPR
:
7401 /* Even though the sizetype mode and the pointer's mode can be different
7402 expand is able to handle this correctly and get the correct result out
7403 of the PLUS_EXPR code. */
7404 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
7405 if sizetype precision is smaller than pointer precision. */
7406 if (TYPE_PRECISION (sizetype
) < TYPE_PRECISION (type
))
7407 treeop1
= fold_convert_loc (loc
, type
,
7408 fold_convert_loc (loc
, ssizetype
,
7412 /* Check if this is a case for multiplication and addition. */
7413 if ((TREE_CODE (type
) == INTEGER_TYPE
7414 || TREE_CODE (type
) == FIXED_POINT_TYPE
)
7415 && (subexp0_def
= get_def_for_expr (treeop0
,
7418 tree subsubexp0
, subsubexp1
;
7419 gimple subsubexp0_def
, subsubexp1_def
;
7420 enum tree_code this_code
;
7422 this_code
= TREE_CODE (type
) == INTEGER_TYPE
? NOP_EXPR
7423 : FIXED_CONVERT_EXPR
;
7424 subsubexp0
= gimple_assign_rhs1 (subexp0_def
);
7425 subsubexp0_def
= get_def_for_expr (subsubexp0
, this_code
);
7426 subsubexp1
= gimple_assign_rhs2 (subexp0_def
);
7427 subsubexp1_def
= get_def_for_expr (subsubexp1
, this_code
);
7428 if (subsubexp0_def
&& subsubexp1_def
7429 && (top0
= gimple_assign_rhs1 (subsubexp0_def
))
7430 && (top1
= gimple_assign_rhs1 (subsubexp1_def
))
7431 && (TYPE_PRECISION (TREE_TYPE (top0
))
7432 < TYPE_PRECISION (TREE_TYPE (subsubexp0
)))
7433 && (TYPE_PRECISION (TREE_TYPE (top0
))
7434 == TYPE_PRECISION (TREE_TYPE (top1
)))
7435 && (TYPE_UNSIGNED (TREE_TYPE (top0
))
7436 == TYPE_UNSIGNED (TREE_TYPE (top1
))))
7438 tree op0type
= TREE_TYPE (top0
);
7439 enum machine_mode innermode
= TYPE_MODE (op0type
);
7440 bool zextend_p
= TYPE_UNSIGNED (op0type
);
7441 bool sat_p
= TYPE_SATURATING (TREE_TYPE (subsubexp0
));
7443 this_optab
= zextend_p
? umadd_widen_optab
: smadd_widen_optab
;
7445 this_optab
= zextend_p
? usmadd_widen_optab
7446 : ssmadd_widen_optab
;
7447 if (mode
== GET_MODE_2XWIDER_MODE (innermode
)
7448 && (optab_handler (this_optab
, mode
)->insn_code
7449 != CODE_FOR_nothing
))
7451 expand_operands (top0
, top1
, NULL_RTX
, &op0
, &op1
,
7453 op2
= expand_expr (treeop1
, subtarget
,
7454 VOIDmode
, EXPAND_NORMAL
);
7455 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
7458 return REDUCE_BIT_FIELD (temp
);
7463 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7464 something else, make sure we add the register to the constant and
7465 then to the other thing. This case can occur during strength
7466 reduction and doing it this way will produce better code if the
7467 frame pointer or argument pointer is eliminated.
7469 fold-const.c will ensure that the constant is always in the inner
7470 PLUS_EXPR, so the only case we need to do anything about is if
7471 sp, ap, or fp is our second argument, in which case we must swap
7472 the innermost first argument and our second argument. */
7474 if (TREE_CODE (treeop0
) == PLUS_EXPR
7475 && TREE_CODE (TREE_OPERAND (treeop0
, 1)) == INTEGER_CST
7476 && TREE_CODE (treeop1
) == VAR_DECL
7477 && (DECL_RTL (treeop1
) == frame_pointer_rtx
7478 || DECL_RTL (treeop1
) == stack_pointer_rtx
7479 || DECL_RTL (treeop1
) == arg_pointer_rtx
))
7483 treeop1
= TREE_OPERAND (treeop0
, 0);
7484 TREE_OPERAND (treeop0
, 0) = t
;
7487 /* If the result is to be ptr_mode and we are adding an integer to
7488 something, we might be forming a constant. So try to use
7489 plus_constant. If it produces a sum and we can't accept it,
7490 use force_operand. This allows P = &ARR[const] to generate
7491 efficient code on machines where a SYMBOL_REF is not a valid
7494 If this is an EXPAND_SUM call, always return the sum. */
7495 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7496 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
7498 if (modifier
== EXPAND_STACK_PARM
)
7500 if (TREE_CODE (treeop0
) == INTEGER_CST
7501 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7502 && TREE_CONSTANT (treeop1
))
7506 op1
= expand_expr (treeop1
, subtarget
, VOIDmode
,
7508 /* Use immed_double_const to ensure that the constant is
7509 truncated according to the mode of OP1, then sign extended
7510 to a HOST_WIDE_INT. Using the constant directly can result
7511 in non-canonical RTL in a 64x32 cross compile. */
7513 = immed_double_const (TREE_INT_CST_LOW (treeop0
),
7515 TYPE_MODE (TREE_TYPE (treeop1
)));
7516 op1
= plus_constant (op1
, INTVAL (constant_part
));
7517 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7518 op1
= force_operand (op1
, target
);
7519 return REDUCE_BIT_FIELD (op1
);
7522 else if (TREE_CODE (treeop1
) == INTEGER_CST
7523 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7524 && TREE_CONSTANT (treeop0
))
7528 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
7529 (modifier
== EXPAND_INITIALIZER
7530 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
7531 if (! CONSTANT_P (op0
))
7533 op1
= expand_expr (treeop1
, NULL_RTX
,
7534 VOIDmode
, modifier
);
7535 /* Return a PLUS if modifier says it's OK. */
7536 if (modifier
== EXPAND_SUM
7537 || modifier
== EXPAND_INITIALIZER
)
7538 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7541 /* Use immed_double_const to ensure that the constant is
7542 truncated according to the mode of OP1, then sign extended
7543 to a HOST_WIDE_INT. Using the constant directly can result
7544 in non-canonical RTL in a 64x32 cross compile. */
7546 = immed_double_const (TREE_INT_CST_LOW (treeop1
),
7548 TYPE_MODE (TREE_TYPE (treeop0
)));
7549 op0
= plus_constant (op0
, INTVAL (constant_part
));
7550 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7551 op0
= force_operand (op0
, target
);
7552 return REDUCE_BIT_FIELD (op0
);
7556 /* No sense saving up arithmetic to be done
7557 if it's all in the wrong mode to form part of an address.
7558 And force_operand won't know whether to sign-extend or
7560 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7561 || mode
!= ptr_mode
)
7563 expand_operands (treeop0
, treeop1
,
7564 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
7565 if (op0
== const0_rtx
)
7567 if (op1
== const0_rtx
)
7572 expand_operands (treeop0
, treeop1
,
7573 subtarget
, &op0
, &op1
, modifier
);
7574 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
7577 /* Check if this is a case for multiplication and subtraction. */
7578 if ((TREE_CODE (type
) == INTEGER_TYPE
7579 || TREE_CODE (type
) == FIXED_POINT_TYPE
)
7580 && (subexp1_def
= get_def_for_expr (treeop1
,
7583 tree subsubexp0
, subsubexp1
;
7584 gimple subsubexp0_def
, subsubexp1_def
;
7585 enum tree_code this_code
;
7587 this_code
= TREE_CODE (type
) == INTEGER_TYPE
? NOP_EXPR
7588 : FIXED_CONVERT_EXPR
;
7589 subsubexp0
= gimple_assign_rhs1 (subexp1_def
);
7590 subsubexp0_def
= get_def_for_expr (subsubexp0
, this_code
);
7591 subsubexp1
= gimple_assign_rhs2 (subexp1_def
);
7592 subsubexp1_def
= get_def_for_expr (subsubexp1
, this_code
);
7593 if (subsubexp0_def
&& subsubexp1_def
7594 && (top0
= gimple_assign_rhs1 (subsubexp0_def
))
7595 && (top1
= gimple_assign_rhs1 (subsubexp1_def
))
7596 && (TYPE_PRECISION (TREE_TYPE (top0
))
7597 < TYPE_PRECISION (TREE_TYPE (subsubexp0
)))
7598 && (TYPE_PRECISION (TREE_TYPE (top0
))
7599 == TYPE_PRECISION (TREE_TYPE (top1
)))
7600 && (TYPE_UNSIGNED (TREE_TYPE (top0
))
7601 == TYPE_UNSIGNED (TREE_TYPE (top1
))))
7603 tree op0type
= TREE_TYPE (top0
);
7604 enum machine_mode innermode
= TYPE_MODE (op0type
);
7605 bool zextend_p
= TYPE_UNSIGNED (op0type
);
7606 bool sat_p
= TYPE_SATURATING (TREE_TYPE (subsubexp0
));
7608 this_optab
= zextend_p
? umsub_widen_optab
: smsub_widen_optab
;
7610 this_optab
= zextend_p
? usmsub_widen_optab
7611 : ssmsub_widen_optab
;
7612 if (mode
== GET_MODE_2XWIDER_MODE (innermode
)
7613 && (optab_handler (this_optab
, mode
)->insn_code
7614 != CODE_FOR_nothing
))
7616 expand_operands (top0
, top1
, NULL_RTX
, &op0
, &op1
,
7618 op2
= expand_expr (treeop0
, subtarget
,
7619 VOIDmode
, EXPAND_NORMAL
);
7620 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
7623 return REDUCE_BIT_FIELD (temp
);
7628 /* For initializers, we are allowed to return a MINUS of two
7629 symbolic constants. Here we handle all cases when both operands
7631 /* Handle difference of two symbolic constants,
7632 for the sake of an initializer. */
7633 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7634 && really_constant_p (treeop0
)
7635 && really_constant_p (treeop1
))
7637 expand_operands (treeop0
, treeop1
,
7638 NULL_RTX
, &op0
, &op1
, modifier
);
7640 /* If the last operand is a CONST_INT, use plus_constant of
7641 the negated constant. Else make the MINUS. */
7642 if (CONST_INT_P (op1
))
7643 return REDUCE_BIT_FIELD (plus_constant (op0
, - INTVAL (op1
)));
7645 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode
, op0
, op1
));
7648 /* No sense saving up arithmetic to be done
7649 if it's all in the wrong mode to form part of an address.
7650 And force_operand won't know whether to sign-extend or
7652 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7653 || mode
!= ptr_mode
)
7656 expand_operands (treeop0
, treeop1
,
7657 subtarget
, &op0
, &op1
, modifier
);
7659 /* Convert A - const to A + (-const). */
7660 if (CONST_INT_P (op1
))
7662 op1
= negate_rtx (mode
, op1
);
7663 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
7668 case WIDEN_MULT_EXPR
:
7669 /* If first operand is constant, swap them.
7670 Thus the following special case checks need only
7671 check the second operand. */
7672 if (TREE_CODE (treeop0
) == INTEGER_CST
)
7679 /* First, check if we have a multiplication of one signed and one
7680 unsigned operand. */
7681 if (TREE_CODE (treeop1
) != INTEGER_CST
7682 && (TYPE_UNSIGNED (TREE_TYPE (treeop0
))
7683 != TYPE_UNSIGNED (TREE_TYPE (treeop1
))))
7685 enum machine_mode innermode
= TYPE_MODE (TREE_TYPE (treeop0
));
7686 this_optab
= usmul_widen_optab
;
7687 if (mode
== GET_MODE_2XWIDER_MODE (innermode
))
7689 if (optab_handler (this_optab
, mode
)->insn_code
!= CODE_FOR_nothing
)
7691 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
7692 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
,
7695 expand_operands (treeop0
, treeop1
, subtarget
, &op1
, &op0
,
7701 /* Check for a multiplication with matching signedness. */
7702 else if ((TREE_CODE (treeop1
) == INTEGER_CST
7703 && int_fits_type_p (treeop1
, TREE_TYPE (treeop0
)))
7704 || (TYPE_UNSIGNED (TREE_TYPE (treeop1
))
7705 == TYPE_UNSIGNED (TREE_TYPE (treeop0
))))
7707 tree op0type
= TREE_TYPE (treeop0
);
7708 enum machine_mode innermode
= TYPE_MODE (op0type
);
7709 bool zextend_p
= TYPE_UNSIGNED (op0type
);
7710 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
7711 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
7713 if (mode
== GET_MODE_2XWIDER_MODE (innermode
))
7715 if (optab_handler (this_optab
, mode
)->insn_code
!= CODE_FOR_nothing
)
7717 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
7719 temp
= expand_widening_mult (mode
, op0
, op1
, target
,
7720 unsignedp
, this_optab
);
7721 return REDUCE_BIT_FIELD (temp
);
7723 if (optab_handler (other_optab
, mode
)->insn_code
!= CODE_FOR_nothing
7724 && innermode
== word_mode
)
7727 op0
= expand_normal (treeop0
);
7728 if (TREE_CODE (treeop1
) == INTEGER_CST
)
7729 op1
= convert_modes (innermode
, mode
,
7730 expand_normal (treeop1
), unsignedp
);
7732 op1
= expand_normal (treeop1
);
7733 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
7734 unsignedp
, OPTAB_LIB_WIDEN
);
7735 hipart
= gen_highpart (innermode
, temp
);
7736 htem
= expand_mult_highpart_adjust (innermode
, hipart
,
7740 emit_move_insn (hipart
, htem
);
7741 return REDUCE_BIT_FIELD (temp
);
7745 treeop0
= fold_build1 (CONVERT_EXPR
, type
, treeop0
);
7746 treeop1
= fold_build1 (CONVERT_EXPR
, type
, treeop1
);
7747 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
7748 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
7751 /* If this is a fixed-point operation, then we cannot use the code
7752 below because "expand_mult" doesn't support sat/no-sat fixed-point
7754 if (ALL_FIXED_POINT_MODE_P (mode
))
7757 /* If first operand is constant, swap them.
7758 Thus the following special case checks need only
7759 check the second operand. */
7760 if (TREE_CODE (treeop0
) == INTEGER_CST
)
7767 /* Attempt to return something suitable for generating an
7768 indexed address, for machines that support that. */
7770 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
7771 && host_integerp (treeop1
, 0))
7773 tree exp1
= treeop1
;
7775 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
7779 op0
= force_operand (op0
, NULL_RTX
);
7781 op0
= copy_to_mode_reg (mode
, op0
);
7783 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode
, op0
,
7784 gen_int_mode (tree_low_cst (exp1
, 0),
7785 TYPE_MODE (TREE_TYPE (exp1
)))));
7788 if (modifier
== EXPAND_STACK_PARM
)
7791 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
7792 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
7794 case TRUNC_DIV_EXPR
:
7795 case FLOOR_DIV_EXPR
:
7797 case ROUND_DIV_EXPR
:
7798 case EXACT_DIV_EXPR
:
7799 /* If this is a fixed-point operation, then we cannot use the code
7800 below because "expand_divmod" doesn't support sat/no-sat fixed-point
7802 if (ALL_FIXED_POINT_MODE_P (mode
))
7805 if (modifier
== EXPAND_STACK_PARM
)
7807 /* Possible optimization: compute the dividend with EXPAND_SUM
7808 then if the divisor is constant can optimize the case
7809 where some terms of the dividend have coeffs divisible by it. */
7810 expand_operands (treeop0
, treeop1
,
7811 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
7812 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
7817 case TRUNC_MOD_EXPR
:
7818 case FLOOR_MOD_EXPR
:
7820 case ROUND_MOD_EXPR
:
7821 if (modifier
== EXPAND_STACK_PARM
)
7823 expand_operands (treeop0
, treeop1
,
7824 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
7825 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
7827 case FIXED_CONVERT_EXPR
:
7828 op0
= expand_normal (treeop0
);
7829 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
7830 target
= gen_reg_rtx (mode
);
7832 if ((TREE_CODE (TREE_TYPE (treeop0
)) == INTEGER_TYPE
7833 && TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
7834 || (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_UNSIGNED (type
)))
7835 expand_fixed_convert (target
, op0
, 1, TYPE_SATURATING (type
));
7837 expand_fixed_convert (target
, op0
, 0, TYPE_SATURATING (type
));
7840 case FIX_TRUNC_EXPR
:
7841 op0
= expand_normal (treeop0
);
7842 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
7843 target
= gen_reg_rtx (mode
);
7844 expand_fix (target
, op0
, unsignedp
);
7848 op0
= expand_normal (treeop0
);
7849 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
7850 target
= gen_reg_rtx (mode
);
7851 /* expand_float can't figure out what to do if FROM has VOIDmode.
7852 So give it the correct mode. With -O, cse will optimize this. */
7853 if (GET_MODE (op0
) == VOIDmode
)
7854 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0
)),
7856 expand_float (target
, op0
,
7857 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
7861 op0
= expand_expr (treeop0
, subtarget
,
7862 VOIDmode
, EXPAND_NORMAL
);
7863 if (modifier
== EXPAND_STACK_PARM
)
7865 temp
= expand_unop (mode
,
7866 optab_for_tree_code (NEGATE_EXPR
, type
,
7870 return REDUCE_BIT_FIELD (temp
);
7873 op0
= expand_expr (treeop0
, subtarget
,
7874 VOIDmode
, EXPAND_NORMAL
);
7875 if (modifier
== EXPAND_STACK_PARM
)
7878 /* ABS_EXPR is not valid for complex arguments. */
7879 gcc_assert (GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7880 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
);
7882 /* Unsigned abs is simply the operand. Testing here means we don't
7883 risk generating incorrect code below. */
7884 if (TYPE_UNSIGNED (type
))
7887 return expand_abs (mode
, op0
, target
, unsignedp
,
7888 safe_from_p (target
, treeop0
, 1));
7892 target
= original_target
;
7894 || modifier
== EXPAND_STACK_PARM
7895 || (MEM_P (target
) && MEM_VOLATILE_P (target
))
7896 || GET_MODE (target
) != mode
7898 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
7899 target
= gen_reg_rtx (mode
);
7900 expand_operands (treeop0
, treeop1
,
7901 target
, &op0
, &op1
, EXPAND_NORMAL
);
7903 /* First try to do it with a special MIN or MAX instruction.
7904 If that does not win, use a conditional jump to select the proper
7906 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
7907 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
7912 /* At this point, a MEM target is no longer useful; we will get better
7915 if (! REG_P (target
))
7916 target
= gen_reg_rtx (mode
);
7918 /* If op1 was placed in target, swap op0 and op1. */
7919 if (target
!= op0
&& target
== op1
)
7926 /* We generate better code and avoid problems with op1 mentioning
7927 target by forcing op1 into a pseudo if it isn't a constant. */
7928 if (! CONSTANT_P (op1
))
7929 op1
= force_reg (mode
, op1
);
7932 enum rtx_code comparison_code
;
7935 if (code
== MAX_EXPR
)
7936 comparison_code
= unsignedp
? GEU
: GE
;
7938 comparison_code
= unsignedp
? LEU
: LE
;
7940 /* Canonicalize to comparisons against 0. */
7941 if (op1
== const1_rtx
)
7943 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
7944 or (a != 0 ? a : 1) for unsigned.
7945 For MIN we are safe converting (a <= 1 ? a : 1)
7946 into (a <= 0 ? a : 1) */
7947 cmpop1
= const0_rtx
;
7948 if (code
== MAX_EXPR
)
7949 comparison_code
= unsignedp
? NE
: GT
;
7951 if (op1
== constm1_rtx
&& !unsignedp
)
7953 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
7954 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
7955 cmpop1
= const0_rtx
;
7956 if (code
== MIN_EXPR
)
7957 comparison_code
= LT
;
7959 #ifdef HAVE_conditional_move
7960 /* Use a conditional move if possible. */
7961 if (can_conditionally_move_p (mode
))
7965 /* ??? Same problem as in expmed.c: emit_conditional_move
7966 forces a stack adjustment via compare_from_rtx, and we
7967 lose the stack adjustment if the sequence we are about
7968 to create is discarded. */
7969 do_pending_stack_adjust ();
7973 /* Try to emit the conditional move. */
7974 insn
= emit_conditional_move (target
, comparison_code
,
7979 /* If we could do the conditional move, emit the sequence,
7983 rtx seq
= get_insns ();
7989 /* Otherwise discard the sequence and fall back to code with
7995 emit_move_insn (target
, op0
);
7997 temp
= gen_label_rtx ();
7998 do_compare_rtx_and_jump (target
, cmpop1
, comparison_code
,
7999 unsignedp
, mode
, NULL_RTX
, NULL_RTX
, temp
,
8002 emit_move_insn (target
, op1
);
8007 op0
= expand_expr (treeop0
, subtarget
,
8008 VOIDmode
, EXPAND_NORMAL
);
8009 if (modifier
== EXPAND_STACK_PARM
)
8011 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8015 /* ??? Can optimize bitwise operations with one arg constant.
8016 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8017 and (a bitwise1 b) bitwise2 b (etc)
8018 but that is probably not worth while. */
8020 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8021 boolean values when we want in all cases to compute both of them. In
8022 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8023 as actual zero-or-1 values and then bitwise anding. In cases where
8024 there cannot be any side effects, better code would be made by
8025 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8026 how to recognize those cases. */
8028 case TRUTH_AND_EXPR
:
8029 code
= BIT_AND_EXPR
;
8034 code
= BIT_IOR_EXPR
;
8038 case TRUTH_XOR_EXPR
:
8039 code
= BIT_XOR_EXPR
;
8045 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type
))
8046 || (GET_MODE_PRECISION (TYPE_MODE (type
))
8047 == TYPE_PRECISION (type
)));
8052 /* If this is a fixed-point operation, then we cannot use the code
8053 below because "expand_shift" doesn't support sat/no-sat fixed-point
8055 if (ALL_FIXED_POINT_MODE_P (mode
))
8058 if (! safe_from_p (subtarget
, treeop1
, 1))
8060 if (modifier
== EXPAND_STACK_PARM
)
8062 op0
= expand_expr (treeop0
, subtarget
,
8063 VOIDmode
, EXPAND_NORMAL
);
8064 temp
= expand_shift (code
, mode
, op0
, treeop1
, target
,
8066 if (code
== LSHIFT_EXPR
)
8067 temp
= REDUCE_BIT_FIELD (temp
);
8070 /* Could determine the answer when only additive constants differ. Also,
8071 the addition of one can be handled by changing the condition. */
8078 case UNORDERED_EXPR
:
8086 temp
= do_store_flag (ops
,
8087 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
8088 tmode
!= VOIDmode
? tmode
: mode
);
8092 /* Use a compare and a jump for BLKmode comparisons, or for function
8093 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8096 || modifier
== EXPAND_STACK_PARM
8097 || ! safe_from_p (target
, treeop0
, 1)
8098 || ! safe_from_p (target
, treeop1
, 1)
8099 /* Make sure we don't have a hard reg (such as function's return
8100 value) live across basic blocks, if not optimizing. */
8101 || (!optimize
&& REG_P (target
)
8102 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8103 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8105 emit_move_insn (target
, const0_rtx
);
8107 op1
= gen_label_rtx ();
8108 jumpifnot_1 (code
, treeop0
, treeop1
, op1
, -1);
8110 emit_move_insn (target
, const1_rtx
);
8115 case TRUTH_NOT_EXPR
:
8116 if (modifier
== EXPAND_STACK_PARM
)
8118 op0
= expand_expr (treeop0
, target
,
8119 VOIDmode
, EXPAND_NORMAL
);
8120 /* The parser is careful to generate TRUTH_NOT_EXPR
8121 only with operands that are always zero or one. */
8122 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8123 target
, 1, OPTAB_LIB_WIDEN
);
8128 /* Get the rtx code of the operands. */
8129 op0
= expand_normal (treeop0
);
8130 op1
= expand_normal (treeop1
);
8133 target
= gen_reg_rtx (TYPE_MODE (type
));
8135 /* Move the real (op0) and imaginary (op1) parts to their location. */
8136 write_complex_part (target
, op0
, false);
8137 write_complex_part (target
, op1
, true);
8141 case WIDEN_SUM_EXPR
:
8143 tree oprnd0
= treeop0
;
8144 tree oprnd1
= treeop1
;
8146 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8147 target
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, op1
,
8152 case REDUC_MAX_EXPR
:
8153 case REDUC_MIN_EXPR
:
8154 case REDUC_PLUS_EXPR
:
8156 op0
= expand_normal (treeop0
);
8157 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8158 temp
= expand_unop (mode
, this_optab
, op0
, target
, unsignedp
);
8163 case VEC_EXTRACT_EVEN_EXPR
:
8164 case VEC_EXTRACT_ODD_EXPR
:
8166 expand_operands (treeop0
, treeop1
,
8167 NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8168 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8169 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8175 case VEC_INTERLEAVE_HIGH_EXPR
:
8176 case VEC_INTERLEAVE_LOW_EXPR
:
8178 expand_operands (treeop0
, treeop1
,
8179 NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8180 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8181 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8187 case VEC_LSHIFT_EXPR
:
8188 case VEC_RSHIFT_EXPR
:
8190 target
= expand_vec_shift_expr (ops
, target
);
8194 case VEC_UNPACK_HI_EXPR
:
8195 case VEC_UNPACK_LO_EXPR
:
8197 op0
= expand_normal (treeop0
);
8198 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8199 temp
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, NULL_RTX
,
8205 case VEC_UNPACK_FLOAT_HI_EXPR
:
8206 case VEC_UNPACK_FLOAT_LO_EXPR
:
8208 op0
= expand_normal (treeop0
);
8209 /* The signedness is determined from input operand. */
8210 this_optab
= optab_for_tree_code (code
,
8211 TREE_TYPE (treeop0
),
8213 temp
= expand_widen_pattern_expr
8214 (ops
, op0
, NULL_RTX
, NULL_RTX
,
8215 target
, TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8221 case VEC_WIDEN_MULT_HI_EXPR
:
8222 case VEC_WIDEN_MULT_LO_EXPR
:
8224 tree oprnd0
= treeop0
;
8225 tree oprnd1
= treeop1
;
8227 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8228 target
= expand_widen_pattern_expr (ops
, op0
, op1
, NULL_RTX
,
8230 gcc_assert (target
);
8234 case VEC_PACK_TRUNC_EXPR
:
8235 case VEC_PACK_SAT_EXPR
:
8236 case VEC_PACK_FIX_TRUNC_EXPR
:
8237 mode
= TYPE_MODE (TREE_TYPE (treeop0
));
8244 /* Here to do an ordinary binary operator. */
8246 expand_operands (treeop0
, treeop1
,
8247 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8249 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8251 if (modifier
== EXPAND_STACK_PARM
)
8253 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
8254 unsignedp
, OPTAB_LIB_WIDEN
);
8256 return REDUCE_BIT_FIELD (temp
);
8258 #undef REDUCE_BIT_FIELD
8261 expand_expr_real_1 (tree exp
, rtx target
, enum machine_mode tmode
,
8262 enum expand_modifier modifier
, rtx
*alt_rtl
)
8264 rtx op0
, op1
, temp
, decl_rtl
;
8267 enum machine_mode mode
;
8268 enum tree_code code
= TREE_CODE (exp
);
8270 rtx subtarget
, original_target
;
8273 bool reduce_bit_field
;
8274 location_t loc
= EXPR_LOCATION (exp
);
8275 struct separate_ops ops
;
8276 tree treeop0
, treeop1
, treeop2
;
8278 type
= TREE_TYPE (exp
);
8279 mode
= TYPE_MODE (type
);
8280 unsignedp
= TYPE_UNSIGNED (type
);
8282 treeop0
= treeop1
= treeop2
= NULL_TREE
;
8283 if (!VL_EXP_CLASS_P (exp
))
8284 switch (TREE_CODE_LENGTH (code
))
8287 case 3: treeop2
= TREE_OPERAND (exp
, 2);
8288 case 2: treeop1
= TREE_OPERAND (exp
, 1);
8289 case 1: treeop0
= TREE_OPERAND (exp
, 0);
8299 ignore
= (target
== const0_rtx
8300 || ((CONVERT_EXPR_CODE_P (code
)
8301 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
8302 && TREE_CODE (type
) == VOID_TYPE
));
8304 /* An operation in what may be a bit-field type needs the
8305 result to be reduced to the precision of the bit-field type,
8306 which is narrower than that of the type's mode. */
8307 reduce_bit_field
= (!ignore
8308 && TREE_CODE (type
) == INTEGER_TYPE
8309 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
));
8311 /* If we are going to ignore this result, we need only do something
8312 if there is a side-effect somewhere in the expression. If there
8313 is, short-circuit the most common cases here. Note that we must
8314 not call expand_expr with anything but const0_rtx in case this
8315 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
8319 if (! TREE_SIDE_EFFECTS (exp
))
8322 /* Ensure we reference a volatile object even if value is ignored, but
8323 don't do this if all we are doing is taking its address. */
8324 if (TREE_THIS_VOLATILE (exp
)
8325 && TREE_CODE (exp
) != FUNCTION_DECL
8326 && mode
!= VOIDmode
&& mode
!= BLKmode
8327 && modifier
!= EXPAND_CONST_ADDRESS
)
8329 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
8331 temp
= copy_to_reg (temp
);
8335 if (TREE_CODE_CLASS (code
) == tcc_unary
8336 || code
== COMPONENT_REF
|| code
== INDIRECT_REF
)
8337 return expand_expr (treeop0
, const0_rtx
, VOIDmode
,
8340 else if (TREE_CODE_CLASS (code
) == tcc_binary
8341 || TREE_CODE_CLASS (code
) == tcc_comparison
8342 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
8344 expand_expr (treeop0
, const0_rtx
, VOIDmode
, modifier
);
8345 expand_expr (treeop1
, const0_rtx
, VOIDmode
, modifier
);
8348 else if (code
== BIT_FIELD_REF
)
8350 expand_expr (treeop0
, const0_rtx
, VOIDmode
, modifier
);
8351 expand_expr (treeop1
, const0_rtx
, VOIDmode
, modifier
);
8352 expand_expr (treeop2
, const0_rtx
, VOIDmode
, modifier
);
8359 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
8362 /* Use subtarget as the target for operand 0 of a binary operation. */
8363 subtarget
= get_subtarget (target
);
8364 original_target
= target
;
8370 tree function
= decl_function_context (exp
);
8372 temp
= label_rtx (exp
);
8373 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
8375 if (function
!= current_function_decl
8377 LABEL_REF_NONLOCAL_P (temp
) = 1;
8379 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
8384 /* ??? ivopts calls expander, without any preparation from
8385 out-of-ssa. So fake instructions as if this was an access to the
8386 base variable. This unnecessarily allocates a pseudo, see how we can
8387 reuse it, if partition base vars have it set already. */
8388 if (!currently_expanding_to_rtl
)
8389 return expand_expr_real_1 (SSA_NAME_VAR (exp
), target
, tmode
, modifier
, NULL
);
8391 gimple g
= get_gimple_for_ssa_name (exp
);
8393 return expand_expr_real (gimple_assign_rhs_to_tree (g
), target
,
8394 tmode
, modifier
, NULL
);
8396 decl_rtl
= get_rtx_for_ssa_name (exp
);
8397 exp
= SSA_NAME_VAR (exp
);
8398 goto expand_decl_rtl
;
8402 /* If a static var's type was incomplete when the decl was written,
8403 but the type is complete now, lay out the decl now. */
8404 if (DECL_SIZE (exp
) == 0
8405 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
8406 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
8407 layout_decl (exp
, 0);
8409 /* TLS emulation hook - replace __thread vars with
8410 *__emutls_get_address (&_emutls.var). */
8411 if (! targetm
.have_tls
8412 && TREE_CODE (exp
) == VAR_DECL
8413 && DECL_THREAD_LOCAL_P (exp
))
8415 exp
= build_fold_indirect_ref_loc (loc
, emutls_var_address (exp
));
8416 return expand_expr_real_1 (exp
, target
, tmode
, modifier
, NULL
);
8419 /* ... fall through ... */
8423 decl_rtl
= DECL_RTL (exp
);
8425 gcc_assert (decl_rtl
);
8426 decl_rtl
= copy_rtx (decl_rtl
);
8428 /* Ensure variable marked as used even if it doesn't go through
8429 a parser. If it hasn't be used yet, write out an external
8431 if (! TREE_USED (exp
))
8433 assemble_external (exp
);
8434 TREE_USED (exp
) = 1;
8437 /* Show we haven't gotten RTL for this yet. */
8440 /* Variables inherited from containing functions should have
8441 been lowered by this point. */
8442 context
= decl_function_context (exp
);
8443 gcc_assert (!context
8444 || context
== current_function_decl
8445 || TREE_STATIC (exp
)
8446 /* ??? C++ creates functions that are not TREE_STATIC. */
8447 || TREE_CODE (exp
) == FUNCTION_DECL
);
8449 /* This is the case of an array whose size is to be determined
8450 from its initializer, while the initializer is still being parsed.
8453 if (MEM_P (decl_rtl
) && REG_P (XEXP (decl_rtl
, 0)))
8454 temp
= validize_mem (decl_rtl
);
8456 /* If DECL_RTL is memory, we are in the normal case and the
8457 address is not valid, get the address into a register. */
8459 else if (MEM_P (decl_rtl
) && modifier
!= EXPAND_INITIALIZER
)
8462 *alt_rtl
= decl_rtl
;
8463 decl_rtl
= use_anchored_address (decl_rtl
);
8464 if (modifier
!= EXPAND_CONST_ADDRESS
8465 && modifier
!= EXPAND_SUM
8466 && !memory_address_addr_space_p (DECL_MODE (exp
),
8468 MEM_ADDR_SPACE (decl_rtl
)))
8469 temp
= replace_equiv_address (decl_rtl
,
8470 copy_rtx (XEXP (decl_rtl
, 0)));
8473 /* If we got something, return it. But first, set the alignment
8474 if the address is a register. */
8477 if (MEM_P (temp
) && REG_P (XEXP (temp
, 0)))
8478 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
8483 /* If the mode of DECL_RTL does not match that of the decl, it
8484 must be a promoted value. We return a SUBREG of the wanted mode,
8485 but mark it so that we know that it was already extended. */
8487 if (REG_P (decl_rtl
)
8488 && GET_MODE (decl_rtl
) != DECL_MODE (exp
))
8490 enum machine_mode pmode
;
8492 /* Get the signedness used for this variable. Ensure we get the
8493 same mode we got when the variable was declared. */
8494 pmode
= promote_decl_mode (exp
, &unsignedp
);
8495 gcc_assert (GET_MODE (decl_rtl
) == pmode
);
8497 temp
= gen_lowpart_SUBREG (mode
, decl_rtl
);
8498 SUBREG_PROMOTED_VAR_P (temp
) = 1;
8499 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
8506 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
8507 TREE_INT_CST_HIGH (exp
), mode
);
8513 tree tmp
= NULL_TREE
;
8514 if (GET_MODE_CLASS (mode
) == MODE_VECTOR_INT
8515 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FLOAT
8516 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FRACT
8517 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UFRACT
8518 || GET_MODE_CLASS (mode
) == MODE_VECTOR_ACCUM
8519 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UACCUM
)
8520 return const_vector_from_tree (exp
);
8521 if (GET_MODE_CLASS (mode
) == MODE_INT
)
8523 tree type_for_mode
= lang_hooks
.types
.type_for_mode (mode
, 1);
8525 tmp
= fold_unary_loc (loc
, VIEW_CONVERT_EXPR
, type_for_mode
, exp
);
8528 tmp
= build_constructor_from_list (type
,
8529 TREE_VECTOR_CST_ELTS (exp
));
8530 return expand_expr (tmp
, ignore
? const0_rtx
: target
,
8535 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
8538 /* If optimized, generate immediate CONST_DOUBLE
8539 which will be turned into memory by reload if necessary.
8541 We used to force a register so that loop.c could see it. But
8542 this does not allow gen_* patterns to perform optimizations with
8543 the constants. It also produces two insns in cases like "x = 1.0;".
8544 On most machines, floating-point constants are not permitted in
8545 many insns, so we'd end up copying it to a register in any case.
8547 Now, we do the copying in expand_binop, if appropriate. */
8548 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
8549 TYPE_MODE (TREE_TYPE (exp
)));
8552 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp
),
8553 TYPE_MODE (TREE_TYPE (exp
)));
8556 /* Handle evaluating a complex constant in a CONCAT target. */
8557 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
8559 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8562 rtarg
= XEXP (original_target
, 0);
8563 itarg
= XEXP (original_target
, 1);
8565 /* Move the real and imaginary parts separately. */
8566 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, EXPAND_NORMAL
);
8567 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, EXPAND_NORMAL
);
8570 emit_move_insn (rtarg
, op0
);
8572 emit_move_insn (itarg
, op1
);
8574 return original_target
;
8577 /* ... fall through ... */
8580 temp
= expand_expr_constant (exp
, 1, modifier
);
8582 /* temp contains a constant address.
8583 On RISC machines where a constant address isn't valid,
8584 make some insns to get that address into a register. */
8585 if (modifier
!= EXPAND_CONST_ADDRESS
8586 && modifier
!= EXPAND_INITIALIZER
8587 && modifier
!= EXPAND_SUM
8588 && ! memory_address_addr_space_p (mode
, XEXP (temp
, 0),
8589 MEM_ADDR_SPACE (temp
)))
8590 return replace_equiv_address (temp
,
8591 copy_rtx (XEXP (temp
, 0)));
8597 rtx ret
= expand_expr_real_1 (val
, target
, tmode
, modifier
, alt_rtl
);
8599 if (!SAVE_EXPR_RESOLVED_P (exp
))
8601 /* We can indeed still hit this case, typically via builtin
8602 expanders calling save_expr immediately before expanding
8603 something. Assume this means that we only have to deal
8604 with non-BLKmode values. */
8605 gcc_assert (GET_MODE (ret
) != BLKmode
);
8607 val
= build_decl (EXPR_LOCATION (exp
),
8608 VAR_DECL
, NULL
, TREE_TYPE (exp
));
8609 DECL_ARTIFICIAL (val
) = 1;
8610 DECL_IGNORED_P (val
) = 1;
8612 TREE_OPERAND (exp
, 0) = treeop0
;
8613 SAVE_EXPR_RESOLVED_P (exp
) = 1;
8615 if (!CONSTANT_P (ret
))
8616 ret
= copy_to_reg (ret
);
8617 SET_DECL_RTL (val
, ret
);
8625 /* If we don't need the result, just ensure we evaluate any
8629 unsigned HOST_WIDE_INT idx
;
8632 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
8633 expand_expr (value
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
8638 return expand_constructor (exp
, target
, modifier
, false);
8640 case MISALIGNED_INDIRECT_REF
:
8641 case ALIGN_INDIRECT_REF
:
8644 tree exp1
= treeop0
;
8645 addr_space_t as
= ADDR_SPACE_GENERIC
;
8646 enum machine_mode address_mode
= Pmode
;
8648 if (modifier
!= EXPAND_WRITE
)
8652 t
= fold_read_from_constant_string (exp
);
8654 return expand_expr (t
, target
, tmode
, modifier
);
8657 if (POINTER_TYPE_P (TREE_TYPE (exp1
)))
8659 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp1
)));
8660 address_mode
= targetm
.addr_space
.address_mode (as
);
8663 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
8664 op0
= memory_address_addr_space (mode
, op0
, as
);
8666 if (code
== ALIGN_INDIRECT_REF
)
8668 int align
= TYPE_ALIGN_UNIT (type
);
8669 op0
= gen_rtx_AND (address_mode
, op0
, GEN_INT (-align
));
8670 op0
= memory_address_addr_space (mode
, op0
, as
);
8673 temp
= gen_rtx_MEM (mode
, op0
);
8675 set_mem_attributes (temp
, exp
, 0);
8676 set_mem_addr_space (temp
, as
);
8678 /* Resolve the misalignment now, so that we don't have to remember
8679 to resolve it later. Of course, this only works for reads. */
8680 if (code
== MISALIGNED_INDIRECT_REF
)
8685 gcc_assert (modifier
== EXPAND_NORMAL
8686 || modifier
== EXPAND_STACK_PARM
);
8688 /* The vectorizer should have already checked the mode. */
8689 icode
= optab_handler (movmisalign_optab
, mode
)->insn_code
;
8690 gcc_assert (icode
!= CODE_FOR_nothing
);
8692 /* We've already validated the memory, and we're creating a
8693 new pseudo destination. The predicates really can't fail. */
8694 reg
= gen_reg_rtx (mode
);
8696 /* Nor can the insn generator. */
8697 insn
= GEN_FCN (icode
) (reg
, temp
);
8706 case TARGET_MEM_REF
:
8708 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (exp
));
8709 struct mem_address addr
;
8712 get_address_description (exp
, &addr
);
8713 op0
= addr_for_mem_ref (&addr
, as
, true);
8714 op0
= memory_address_addr_space (mode
, op0
, as
);
8715 temp
= gen_rtx_MEM (mode
, op0
);
8716 set_mem_attributes (temp
, TMR_ORIGINAL (exp
), 0);
8717 set_mem_addr_space (temp
, as
);
8718 base
= get_base_address (TMR_ORIGINAL (exp
));
8719 if (INDIRECT_REF_P (base
)
8721 && TREE_CODE (TMR_BASE (exp
)) == SSA_NAME
8722 && POINTER_TYPE_P (TREE_TYPE (TMR_BASE (exp
))))
8724 set_mem_expr (temp
, build1 (INDIRECT_REF
,
8725 TREE_TYPE (exp
), TMR_BASE (exp
)));
8726 set_mem_offset (temp
, NULL_RTX
);
8734 tree array
= treeop0
;
8735 tree index
= treeop1
;
8737 /* Fold an expression like: "foo"[2].
8738 This is not done in fold so it won't happen inside &.
8739 Don't fold if this is for wide characters since it's too
8740 difficult to do correctly and this is a very rare case. */
8742 if (modifier
!= EXPAND_CONST_ADDRESS
8743 && modifier
!= EXPAND_INITIALIZER
8744 && modifier
!= EXPAND_MEMORY
)
8746 tree t
= fold_read_from_constant_string (exp
);
8749 return expand_expr (t
, target
, tmode
, modifier
);
8752 /* If this is a constant index into a constant array,
8753 just get the value from the array. Handle both the cases when
8754 we have an explicit constructor and when our operand is a variable
8755 that was declared const. */
8757 if (modifier
!= EXPAND_CONST_ADDRESS
8758 && modifier
!= EXPAND_INITIALIZER
8759 && modifier
!= EXPAND_MEMORY
8760 && TREE_CODE (array
) == CONSTRUCTOR
8761 && ! TREE_SIDE_EFFECTS (array
)
8762 && TREE_CODE (index
) == INTEGER_CST
)
8764 unsigned HOST_WIDE_INT ix
;
8767 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array
), ix
,
8769 if (tree_int_cst_equal (field
, index
))
8771 if (!TREE_SIDE_EFFECTS (value
))
8772 return expand_expr (fold (value
), target
, tmode
, modifier
);
8777 else if (optimize
>= 1
8778 && modifier
!= EXPAND_CONST_ADDRESS
8779 && modifier
!= EXPAND_INITIALIZER
8780 && modifier
!= EXPAND_MEMORY
8781 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
8782 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
8783 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
8784 && targetm
.binds_local_p (array
))
8786 if (TREE_CODE (index
) == INTEGER_CST
)
8788 tree init
= DECL_INITIAL (array
);
8790 if (TREE_CODE (init
) == CONSTRUCTOR
)
8792 unsigned HOST_WIDE_INT ix
;
8795 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), ix
,
8797 if (tree_int_cst_equal (field
, index
))
8799 if (TREE_SIDE_EFFECTS (value
))
8802 if (TREE_CODE (value
) == CONSTRUCTOR
)
8804 /* If VALUE is a CONSTRUCTOR, this
8805 optimization is only useful if
8806 this doesn't store the CONSTRUCTOR
8807 into memory. If it does, it is more
8808 efficient to just load the data from
8809 the array directly. */
8810 rtx ret
= expand_constructor (value
, target
,
8812 if (ret
== NULL_RTX
)
8816 return expand_expr (fold (value
), target
, tmode
,
8820 else if(TREE_CODE (init
) == STRING_CST
)
8822 tree index1
= index
;
8823 tree low_bound
= array_ref_low_bound (exp
);
8824 index1
= fold_convert_loc (loc
, sizetype
,
8827 /* Optimize the special-case of a zero lower bound.
8829 We convert the low_bound to sizetype to avoid some problems
8830 with constant folding. (E.g. suppose the lower bound is 1,
8831 and its mode is QI. Without the conversion,l (ARRAY
8832 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8833 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
8835 if (! integer_zerop (low_bound
))
8836 index1
= size_diffop_loc (loc
, index1
,
8837 fold_convert_loc (loc
, sizetype
,
8840 if (0 > compare_tree_int (index1
,
8841 TREE_STRING_LENGTH (init
)))
8843 tree type
= TREE_TYPE (TREE_TYPE (init
));
8844 enum machine_mode mode
= TYPE_MODE (type
);
8846 if (GET_MODE_CLASS (mode
) == MODE_INT
8847 && GET_MODE_SIZE (mode
) == 1)
8848 return gen_int_mode (TREE_STRING_POINTER (init
)
8849 [TREE_INT_CST_LOW (index1
)],
8856 goto normal_inner_ref
;
8859 /* If the operand is a CONSTRUCTOR, we can just extract the
8860 appropriate field if it is present. */
8861 if (TREE_CODE (treeop0
) == CONSTRUCTOR
)
8863 unsigned HOST_WIDE_INT idx
;
8866 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0
),
8868 if (field
== treeop1
8869 /* We can normally use the value of the field in the
8870 CONSTRUCTOR. However, if this is a bitfield in
8871 an integral mode that we can fit in a HOST_WIDE_INT,
8872 we must mask only the number of bits in the bitfield,
8873 since this is done implicitly by the constructor. If
8874 the bitfield does not meet either of those conditions,
8875 we can't do this optimization. */
8876 && (! DECL_BIT_FIELD (field
)
8877 || ((GET_MODE_CLASS (DECL_MODE (field
)) == MODE_INT
)
8878 && (GET_MODE_BITSIZE (DECL_MODE (field
))
8879 <= HOST_BITS_PER_WIDE_INT
))))
8881 if (DECL_BIT_FIELD (field
)
8882 && modifier
== EXPAND_STACK_PARM
)
8884 op0
= expand_expr (value
, target
, tmode
, modifier
);
8885 if (DECL_BIT_FIELD (field
))
8887 HOST_WIDE_INT bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
8888 enum machine_mode imode
= TYPE_MODE (TREE_TYPE (field
));
8890 if (TYPE_UNSIGNED (TREE_TYPE (field
)))
8892 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
8893 op0
= expand_and (imode
, op0
, op1
, target
);
8898 = build_int_cst (NULL_TREE
,
8899 GET_MODE_BITSIZE (imode
) - bitsize
);
8901 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
8903 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
8911 goto normal_inner_ref
;
8914 case ARRAY_RANGE_REF
:
8917 enum machine_mode mode1
, mode2
;
8918 HOST_WIDE_INT bitsize
, bitpos
;
8920 int volatilep
= 0, must_force_mem
;
8921 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
8922 &mode1
, &unsignedp
, &volatilep
, true);
8923 rtx orig_op0
, memloc
;
8925 /* If we got back the original object, something is wrong. Perhaps
8926 we are evaluating an expression too early. In any event, don't
8927 infinitely recurse. */
8928 gcc_assert (tem
!= exp
);
8930 /* If TEM's type is a union of variable size, pass TARGET to the inner
8931 computation, since it will need a temporary and TARGET is known
8932 to have to do. This occurs in unchecked conversion in Ada. */
8935 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
8936 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
8938 && modifier
!= EXPAND_STACK_PARM
8939 ? target
: NULL_RTX
),
8941 (modifier
== EXPAND_INITIALIZER
8942 || modifier
== EXPAND_CONST_ADDRESS
8943 || modifier
== EXPAND_STACK_PARM
)
8944 ? modifier
: EXPAND_NORMAL
);
8947 = CONSTANT_P (op0
) ? TYPE_MODE (TREE_TYPE (tem
)) : GET_MODE (op0
);
8949 /* If we have either an offset, a BLKmode result, or a reference
8950 outside the underlying object, we must force it to memory.
8951 Such a case can occur in Ada if we have unchecked conversion
8952 of an expression from a scalar type to an aggregate type or
8953 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
8954 passed a partially uninitialized object or a view-conversion
8955 to a larger size. */
8956 must_force_mem
= (offset
8958 || bitpos
+ bitsize
> GET_MODE_BITSIZE (mode2
));
8960 /* Handle CONCAT first. */
8961 if (GET_CODE (op0
) == CONCAT
&& !must_force_mem
)
8964 && bitsize
== GET_MODE_BITSIZE (GET_MODE (op0
)))
8967 && bitsize
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
8970 op0
= XEXP (op0
, 0);
8971 mode2
= GET_MODE (op0
);
8973 else if (bitpos
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
8974 && bitsize
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 1)))
8978 op0
= XEXP (op0
, 1);
8980 mode2
= GET_MODE (op0
);
8983 /* Otherwise force into memory. */
8987 /* If this is a constant, put it in a register if it is a legitimate
8988 constant and we don't need a memory reference. */
8989 if (CONSTANT_P (op0
)
8991 && LEGITIMATE_CONSTANT_P (op0
)
8993 op0
= force_reg (mode2
, op0
);
8995 /* Otherwise, if this is a constant, try to force it to the constant
8996 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
8997 is a legitimate constant. */
8998 else if (CONSTANT_P (op0
) && (memloc
= force_const_mem (mode2
, op0
)))
8999 op0
= validize_mem (memloc
);
9001 /* Otherwise, if this is a constant or the object is not in memory
9002 and need be, put it there. */
9003 else if (CONSTANT_P (op0
) || (!MEM_P (op0
) && must_force_mem
))
9005 tree nt
= build_qualified_type (TREE_TYPE (tem
),
9006 (TYPE_QUALS (TREE_TYPE (tem
))
9007 | TYPE_QUAL_CONST
));
9008 memloc
= assign_temp (nt
, 1, 1, 1);
9009 emit_move_insn (memloc
, op0
);
9015 enum machine_mode address_mode
;
9016 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
9019 gcc_assert (MEM_P (op0
));
9022 = targetm
.addr_space
.address_mode (MEM_ADDR_SPACE (op0
));
9023 if (GET_MODE (offset_rtx
) != address_mode
)
9024 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
9026 if (GET_MODE (op0
) == BLKmode
9027 /* A constant address in OP0 can have VOIDmode, we must
9028 not try to call force_reg in that case. */
9029 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
9031 && (bitpos
% bitsize
) == 0
9032 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
9033 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
9035 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
9039 op0
= offset_address (op0
, offset_rtx
,
9040 highest_pow2_factor (offset
));
9043 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
9044 record its alignment as BIGGEST_ALIGNMENT. */
9045 if (MEM_P (op0
) && bitpos
== 0 && offset
!= 0
9046 && is_aligning_offset (offset
, tem
))
9047 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
9049 /* Don't forget about volatility even if this is a bitfield. */
9050 if (MEM_P (op0
) && volatilep
&& ! MEM_VOLATILE_P (op0
))
9052 if (op0
== orig_op0
)
9053 op0
= copy_rtx (op0
);
9055 MEM_VOLATILE_P (op0
) = 1;
9058 /* In cases where an aligned union has an unaligned object
9059 as a field, we might be extracting a BLKmode value from
9060 an integer-mode (e.g., SImode) object. Handle this case
9061 by doing the extract into an object as wide as the field
9062 (which we know to be the width of a basic mode), then
9063 storing into memory, and changing the mode to BLKmode. */
9064 if (mode1
== VOIDmode
9065 || REG_P (op0
) || GET_CODE (op0
) == SUBREG
9066 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
9067 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
9068 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
9069 && modifier
!= EXPAND_CONST_ADDRESS
9070 && modifier
!= EXPAND_INITIALIZER
)
9071 /* If the field isn't aligned enough to fetch as a memref,
9072 fetch it as a bit field. */
9073 || (mode1
!= BLKmode
9074 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
9075 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)
9077 && (MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
9078 || (bitpos
% GET_MODE_ALIGNMENT (mode1
) != 0))))
9079 && ((modifier
== EXPAND_CONST_ADDRESS
9080 || modifier
== EXPAND_INITIALIZER
)
9082 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
9083 || (bitpos
% BITS_PER_UNIT
!= 0)))
9084 /* If the type and the field are a constant size and the
9085 size of the type isn't the same size as the bitfield,
9086 we must use bitfield operations. */
9088 && TYPE_SIZE (TREE_TYPE (exp
))
9089 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
9090 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
9093 enum machine_mode ext_mode
= mode
;
9095 if (ext_mode
== BLKmode
9096 && ! (target
!= 0 && MEM_P (op0
)
9098 && bitpos
% BITS_PER_UNIT
== 0))
9099 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
9101 if (ext_mode
== BLKmode
)
9104 target
= assign_temp (type
, 0, 1, 1);
9109 /* In this case, BITPOS must start at a byte boundary and
9110 TARGET, if specified, must be a MEM. */
9111 gcc_assert (MEM_P (op0
)
9112 && (!target
|| MEM_P (target
))
9113 && !(bitpos
% BITS_PER_UNIT
));
9115 emit_block_move (target
,
9116 adjust_address (op0
, VOIDmode
,
9117 bitpos
/ BITS_PER_UNIT
),
9118 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
9120 (modifier
== EXPAND_STACK_PARM
9121 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
9126 op0
= validize_mem (op0
);
9128 if (MEM_P (op0
) && REG_P (XEXP (op0
, 0)))
9129 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
9131 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
9132 (modifier
== EXPAND_STACK_PARM
9133 ? NULL_RTX
: target
),
9134 ext_mode
, ext_mode
);
9136 /* If the result is a record type and BITSIZE is narrower than
9137 the mode of OP0, an integral mode, and this is a big endian
9138 machine, we must put the field into the high-order bits. */
9139 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
9140 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
9141 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
9142 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
9143 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
9147 /* If the result type is BLKmode, store the data into a temporary
9148 of the appropriate type, but with the mode corresponding to the
9149 mode for the data we have (op0's mode). It's tempting to make
9150 this a constant type, since we know it's only being stored once,
9151 but that can cause problems if we are taking the address of this
9152 COMPONENT_REF because the MEM of any reference via that address
9153 will have flags corresponding to the type, which will not
9154 necessarily be constant. */
9155 if (mode
== BLKmode
)
9157 HOST_WIDE_INT size
= GET_MODE_BITSIZE (ext_mode
);
9160 /* If the reference doesn't use the alias set of its type,
9161 we cannot create the temporary using that type. */
9162 if (component_uses_parent_alias_set (exp
))
9164 new_rtx
= assign_stack_local (ext_mode
, size
, 0);
9165 set_mem_alias_set (new_rtx
, get_alias_set (exp
));
9168 new_rtx
= assign_stack_temp_for_type (ext_mode
, size
, 0, type
);
9170 emit_move_insn (new_rtx
, op0
);
9171 op0
= copy_rtx (new_rtx
);
9172 PUT_MODE (op0
, BLKmode
);
9173 set_mem_attributes (op0
, exp
, 1);
9179 /* If the result is BLKmode, use that to access the object
9181 if (mode
== BLKmode
)
9184 /* Get a reference to just this component. */
9185 if (modifier
== EXPAND_CONST_ADDRESS
9186 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
9187 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
9189 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
9191 if (op0
== orig_op0
)
9192 op0
= copy_rtx (op0
);
9194 set_mem_attributes (op0
, exp
, 0);
9195 if (REG_P (XEXP (op0
, 0)))
9196 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
9198 MEM_VOLATILE_P (op0
) |= volatilep
;
9199 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
9200 || modifier
== EXPAND_CONST_ADDRESS
9201 || modifier
== EXPAND_INITIALIZER
)
9203 else if (target
== 0)
9204 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
9206 convert_move (target
, op0
, unsignedp
);
9211 return expand_expr (OBJ_TYPE_REF_EXPR (exp
), target
, tmode
, modifier
);
9214 /* All valid uses of __builtin_va_arg_pack () are removed during
9216 if (CALL_EXPR_VA_ARG_PACK (exp
))
9217 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
9219 tree fndecl
= get_callee_fndecl (exp
), attr
;
9222 && (attr
= lookup_attribute ("error",
9223 DECL_ATTRIBUTES (fndecl
))) != NULL
)
9224 error ("%Kcall to %qs declared with attribute error: %s",
9225 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
9226 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
9228 && (attr
= lookup_attribute ("warning",
9229 DECL_ATTRIBUTES (fndecl
))) != NULL
)
9230 warning_at (tree_nonartificial_location (exp
),
9231 0, "%Kcall to %qs declared with attribute warning: %s",
9232 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
9233 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
9235 /* Check for a built-in function. */
9236 if (fndecl
&& DECL_BUILT_IN (fndecl
))
9238 gcc_assert (DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_FRONTEND
);
9239 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
9242 return expand_call (exp
, target
, ignore
);
9244 case VIEW_CONVERT_EXPR
:
9247 /* If we are converting to BLKmode, try to avoid an intermediate
9248 temporary by fetching an inner memory reference. */
9250 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
9251 && TYPE_MODE (TREE_TYPE (treeop0
)) != BLKmode
9252 && handled_component_p (treeop0
))
9254 enum machine_mode mode1
;
9255 HOST_WIDE_INT bitsize
, bitpos
;
9260 = get_inner_reference (treeop0
, &bitsize
, &bitpos
,
9261 &offset
, &mode1
, &unsignedp
, &volatilep
,
9265 /* ??? We should work harder and deal with non-zero offsets. */
9267 && (bitpos
% BITS_PER_UNIT
) == 0
9269 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) == 0)
9271 /* See the normal_inner_ref case for the rationale. */
9274 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
9275 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
9277 && modifier
!= EXPAND_STACK_PARM
9278 ? target
: NULL_RTX
),
9280 (modifier
== EXPAND_INITIALIZER
9281 || modifier
== EXPAND_CONST_ADDRESS
9282 || modifier
== EXPAND_STACK_PARM
)
9283 ? modifier
: EXPAND_NORMAL
);
9285 if (MEM_P (orig_op0
))
9289 /* Get a reference to just this component. */
9290 if (modifier
== EXPAND_CONST_ADDRESS
9291 || modifier
== EXPAND_SUM
9292 || modifier
== EXPAND_INITIALIZER
)
9293 op0
= adjust_address_nv (op0
, mode
, bitpos
/ BITS_PER_UNIT
);
9295 op0
= adjust_address (op0
, mode
, bitpos
/ BITS_PER_UNIT
);
9297 if (op0
== orig_op0
)
9298 op0
= copy_rtx (op0
);
9300 set_mem_attributes (op0
, treeop0
, 0);
9301 if (REG_P (XEXP (op0
, 0)))
9302 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
9304 MEM_VOLATILE_P (op0
) |= volatilep
;
9310 op0
= expand_expr (treeop0
,
9311 NULL_RTX
, VOIDmode
, modifier
);
9313 /* If the input and output modes are both the same, we are done. */
9314 if (mode
== GET_MODE (op0
))
9316 /* If neither mode is BLKmode, and both modes are the same size
9317 then we can use gen_lowpart. */
9318 else if (mode
!= BLKmode
&& GET_MODE (op0
) != BLKmode
9319 && GET_MODE_SIZE (mode
) == GET_MODE_SIZE (GET_MODE (op0
))
9320 && !COMPLEX_MODE_P (GET_MODE (op0
)))
9322 if (GET_CODE (op0
) == SUBREG
)
9323 op0
= force_reg (GET_MODE (op0
), op0
);
9324 op0
= gen_lowpart (mode
, op0
);
9326 /* If both types are integral, convert from one mode to the other. */
9327 else if (INTEGRAL_TYPE_P (type
) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0
)))
9328 op0
= convert_modes (mode
, GET_MODE (op0
), op0
,
9329 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
9330 /* As a last resort, spill op0 to memory, and reload it in a
9332 else if (!MEM_P (op0
))
9334 /* If the operand is not a MEM, force it into memory. Since we
9335 are going to be changing the mode of the MEM, don't call
9336 force_const_mem for constants because we don't allow pool
9337 constants to change mode. */
9338 tree inner_type
= TREE_TYPE (treeop0
);
9340 gcc_assert (!TREE_ADDRESSABLE (exp
));
9342 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
9344 = assign_stack_temp_for_type
9345 (TYPE_MODE (inner_type
),
9346 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
9348 emit_move_insn (target
, op0
);
9352 /* At this point, OP0 is in the correct mode. If the output type is
9353 such that the operand is known to be aligned, indicate that it is.
9354 Otherwise, we need only be concerned about alignment for non-BLKmode
9358 op0
= copy_rtx (op0
);
9360 if (TYPE_ALIGN_OK (type
))
9361 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
9362 else if (STRICT_ALIGNMENT
9364 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode
))
9366 tree inner_type
= TREE_TYPE (treeop0
);
9367 HOST_WIDE_INT temp_size
9368 = MAX (int_size_in_bytes (inner_type
),
9369 (HOST_WIDE_INT
) GET_MODE_SIZE (mode
));
9371 = assign_stack_temp_for_type (mode
, temp_size
, 0, type
);
9372 rtx new_with_op0_mode
9373 = adjust_address (new_rtx
, GET_MODE (op0
), 0);
9375 gcc_assert (!TREE_ADDRESSABLE (exp
));
9377 if (GET_MODE (op0
) == BLKmode
)
9378 emit_block_move (new_with_op0_mode
, op0
,
9379 GEN_INT (GET_MODE_SIZE (mode
)),
9380 (modifier
== EXPAND_STACK_PARM
9381 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
9383 emit_move_insn (new_with_op0_mode
, op0
);
9388 op0
= adjust_address (op0
, mode
, 0);
9393 /* Use a compare and a jump for BLKmode comparisons, or for function
9394 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
9396 /* Although TRUTH_{AND,OR}IF_EXPR aren't present in GIMPLE, they
9397 are occassionally created by folding during expansion. */
9398 case TRUTH_ANDIF_EXPR
:
9399 case TRUTH_ORIF_EXPR
:
9402 || modifier
== EXPAND_STACK_PARM
9403 || ! safe_from_p (target
, treeop0
, 1)
9404 || ! safe_from_p (target
, treeop1
, 1)
9405 /* Make sure we don't have a hard reg (such as function's return
9406 value) live across basic blocks, if not optimizing. */
9407 || (!optimize
&& REG_P (target
)
9408 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
9409 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
9412 emit_move_insn (target
, const0_rtx
);
9414 op1
= gen_label_rtx ();
9415 jumpifnot_1 (code
, treeop0
, treeop1
, op1
, -1);
9418 emit_move_insn (target
, const1_rtx
);
9421 return ignore
? const0_rtx
: target
;
9423 case STATEMENT_LIST
:
9425 tree_stmt_iterator iter
;
9427 gcc_assert (ignore
);
9429 for (iter
= tsi_start (exp
); !tsi_end_p (iter
); tsi_next (&iter
))
9430 expand_expr (tsi_stmt (iter
), const0_rtx
, VOIDmode
, modifier
);
9435 /* A COND_EXPR with its type being VOID_TYPE represents a
9436 conditional jump and is handled in
9437 expand_gimple_cond_expr. */
9438 gcc_assert (!VOID_TYPE_P (type
));
9440 /* Note that COND_EXPRs whose type is a structure or union
9441 are required to be constructed to contain assignments of
9442 a temporary variable, so that we can evaluate them here
9443 for side effect only. If type is void, we must do likewise. */
9445 gcc_assert (!TREE_ADDRESSABLE (type
)
9447 && TREE_TYPE (treeop1
) != void_type_node
9448 && TREE_TYPE (treeop2
) != void_type_node
);
9450 /* If we are not to produce a result, we have no target. Otherwise,
9451 if a target was specified use it; it will not be used as an
9452 intermediate target unless it is safe. If no target, use a
9455 if (modifier
!= EXPAND_STACK_PARM
9457 && safe_from_p (original_target
, treeop0
, 1)
9458 && GET_MODE (original_target
) == mode
9459 #ifdef HAVE_conditional_move
9460 && (! can_conditionally_move_p (mode
)
9461 || REG_P (original_target
))
9463 && !MEM_P (original_target
))
9464 temp
= original_target
;
9466 temp
= assign_temp (type
, 0, 0, 1);
9468 do_pending_stack_adjust ();
9470 op0
= gen_label_rtx ();
9471 op1
= gen_label_rtx ();
9472 jumpifnot (treeop0
, op0
, -1);
9473 store_expr (treeop1
, temp
,
9474 modifier
== EXPAND_STACK_PARM
,
9477 emit_jump_insn (gen_jump (op1
));
9480 store_expr (treeop2
, temp
,
9481 modifier
== EXPAND_STACK_PARM
,
9489 target
= expand_vec_cond_expr (type
, treeop0
, treeop1
, treeop2
, target
);
9496 gcc_assert (ignore
);
9498 /* Check for |= or &= of a bitfield of size one into another bitfield
9499 of size 1. In this case, (unless we need the result of the
9500 assignment) we can do this more efficiently with a
9501 test followed by an assignment, if necessary.
9503 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9504 things change so we do, this code should be enhanced to
9506 if (TREE_CODE (lhs
) == COMPONENT_REF
9507 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
9508 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
9509 && TREE_OPERAND (rhs
, 0) == lhs
9510 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
9511 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
9512 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
9514 rtx label
= gen_label_rtx ();
9515 int value
= TREE_CODE (rhs
) == BIT_IOR_EXPR
;
9516 do_jump (TREE_OPERAND (rhs
, 1),
9518 value
? 0 : label
, -1);
9519 expand_assignment (lhs
, build_int_cst (TREE_TYPE (rhs
), value
),
9520 MOVE_NONTEMPORAL (exp
));
9521 do_pending_stack_adjust ();
9526 expand_assignment (lhs
, rhs
, MOVE_NONTEMPORAL (exp
));
9531 return expand_expr_addr_expr (exp
, target
, tmode
, modifier
);
9534 op0
= expand_normal (treeop0
);
9535 return read_complex_part (op0
, false);
9538 op0
= expand_normal (treeop0
);
9539 return read_complex_part (op0
, true);
9546 /* Expanded in cfgexpand.c. */
9549 case TRY_CATCH_EXPR
:
9551 case EH_FILTER_EXPR
:
9552 case TRY_FINALLY_EXPR
:
9553 /* Lowered by tree-eh.c. */
9556 case WITH_CLEANUP_EXPR
:
9557 case CLEANUP_POINT_EXPR
:
9559 case CASE_LABEL_EXPR
:
9565 case PREINCREMENT_EXPR
:
9566 case PREDECREMENT_EXPR
:
9567 case POSTINCREMENT_EXPR
:
9568 case POSTDECREMENT_EXPR
:
9571 /* Lowered by gimplify.c. */
9575 /* Function descriptors are not valid except for as
9576 initialization constants, and should not be expanded. */
9579 case WITH_SIZE_EXPR
:
9580 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9581 have pulled out the size to use in whatever context it needed. */
9582 return expand_expr_real (treeop0
, original_target
, tmode
,
9585 case REALIGN_LOAD_EXPR
:
9587 tree oprnd0
= treeop0
;
9588 tree oprnd1
= treeop1
;
9589 tree oprnd2
= treeop2
;
9592 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9593 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9594 op2
= expand_normal (oprnd2
);
9595 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
9603 tree oprnd0
= treeop0
;
9604 tree oprnd1
= treeop1
;
9605 tree oprnd2
= treeop2
;
9608 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9609 op2
= expand_normal (oprnd2
);
9610 target
= expand_widen_pattern_expr (&ops
, op0
, op1
, op2
,
9615 case COMPOUND_LITERAL_EXPR
:
9617 /* Initialize the anonymous variable declared in the compound
9618 literal, then return the variable. */
9619 tree decl
= COMPOUND_LITERAL_EXPR_DECL (exp
);
9621 /* Create RTL for this variable. */
9622 if (!DECL_RTL_SET_P (decl
))
9624 if (DECL_HARD_REGISTER (decl
))
9625 /* The user specified an assembler name for this variable.
9627 rest_of_decl_compilation (decl
, 0, 0);
9632 return expand_expr_real (decl
, original_target
, tmode
,
9637 return expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
9641 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9642 signedness of TYPE), possibly returning the result in TARGET. */
9644 reduce_to_bit_field_precision (rtx exp
, rtx target
, tree type
)
9646 HOST_WIDE_INT prec
= TYPE_PRECISION (type
);
9647 if (target
&& GET_MODE (target
) != GET_MODE (exp
))
9649 /* For constant values, reduce using build_int_cst_type. */
9650 if (CONST_INT_P (exp
))
9652 HOST_WIDE_INT value
= INTVAL (exp
);
9653 tree t
= build_int_cst_type (type
, value
);
9654 return expand_expr (t
, target
, VOIDmode
, EXPAND_NORMAL
);
9656 else if (TYPE_UNSIGNED (type
))
9658 rtx mask
= immed_double_int_const (double_int_mask (prec
),
9660 return expand_and (GET_MODE (exp
), exp
, mask
, target
);
9664 tree count
= build_int_cst (NULL_TREE
,
9665 GET_MODE_BITSIZE (GET_MODE (exp
)) - prec
);
9666 exp
= expand_shift (LSHIFT_EXPR
, GET_MODE (exp
), exp
, count
, target
, 0);
9667 return expand_shift (RSHIFT_EXPR
, GET_MODE (exp
), exp
, count
, target
, 0);
9671 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9672 when applied to the address of EXP produces an address known to be
9673 aligned more than BIGGEST_ALIGNMENT. */
9676 is_aligning_offset (const_tree offset
, const_tree exp
)
9678 /* Strip off any conversions. */
9679 while (CONVERT_EXPR_P (offset
))
9680 offset
= TREE_OPERAND (offset
, 0);
9682 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9683 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9684 if (TREE_CODE (offset
) != BIT_AND_EXPR
9685 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
9686 || compare_tree_int (TREE_OPERAND (offset
, 1),
9687 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
9688 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
9691 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9692 It must be NEGATE_EXPR. Then strip any more conversions. */
9693 offset
= TREE_OPERAND (offset
, 0);
9694 while (CONVERT_EXPR_P (offset
))
9695 offset
= TREE_OPERAND (offset
, 0);
9697 if (TREE_CODE (offset
) != NEGATE_EXPR
)
9700 offset
= TREE_OPERAND (offset
, 0);
9701 while (CONVERT_EXPR_P (offset
))
9702 offset
= TREE_OPERAND (offset
, 0);
9704 /* This must now be the address of EXP. */
9705 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
9708 /* Return the tree node if an ARG corresponds to a string constant or zero
9709 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9710 in bytes within the string that ARG is accessing. The type of the
9711 offset will be `sizetype'. */
9714 string_constant (tree arg
, tree
*ptr_offset
)
9716 tree array
, offset
, lower_bound
;
9719 if (TREE_CODE (arg
) == ADDR_EXPR
)
9721 if (TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
9723 *ptr_offset
= size_zero_node
;
9724 return TREE_OPERAND (arg
, 0);
9726 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == VAR_DECL
)
9728 array
= TREE_OPERAND (arg
, 0);
9729 offset
= size_zero_node
;
9731 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
9733 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
9734 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
9735 if (TREE_CODE (array
) != STRING_CST
9736 && TREE_CODE (array
) != VAR_DECL
)
9739 /* Check if the array has a nonzero lower bound. */
9740 lower_bound
= array_ref_low_bound (TREE_OPERAND (arg
, 0));
9741 if (!integer_zerop (lower_bound
))
9743 /* If the offset and base aren't both constants, return 0. */
9744 if (TREE_CODE (lower_bound
) != INTEGER_CST
)
9746 if (TREE_CODE (offset
) != INTEGER_CST
)
9748 /* Adjust offset by the lower bound. */
9749 offset
= size_diffop (fold_convert (sizetype
, offset
),
9750 fold_convert (sizetype
, lower_bound
));
9756 else if (TREE_CODE (arg
) == PLUS_EXPR
|| TREE_CODE (arg
) == POINTER_PLUS_EXPR
)
9758 tree arg0
= TREE_OPERAND (arg
, 0);
9759 tree arg1
= TREE_OPERAND (arg
, 1);
9764 if (TREE_CODE (arg0
) == ADDR_EXPR
9765 && (TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
9766 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == VAR_DECL
))
9768 array
= TREE_OPERAND (arg0
, 0);
9771 else if (TREE_CODE (arg1
) == ADDR_EXPR
9772 && (TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
9773 || TREE_CODE (TREE_OPERAND (arg1
, 0)) == VAR_DECL
))
9775 array
= TREE_OPERAND (arg1
, 0);
9784 if (TREE_CODE (array
) == STRING_CST
)
9786 *ptr_offset
= fold_convert (sizetype
, offset
);
9789 else if (TREE_CODE (array
) == VAR_DECL
)
9793 /* Variables initialized to string literals can be handled too. */
9794 if (DECL_INITIAL (array
) == NULL_TREE
9795 || TREE_CODE (DECL_INITIAL (array
)) != STRING_CST
)
9798 /* If they are read-only, non-volatile and bind locally. */
9799 if (! TREE_READONLY (array
)
9800 || TREE_SIDE_EFFECTS (array
)
9801 || ! targetm
.binds_local_p (array
))
9804 /* Avoid const char foo[4] = "abcde"; */
9805 if (DECL_SIZE_UNIT (array
) == NULL_TREE
9806 || TREE_CODE (DECL_SIZE_UNIT (array
)) != INTEGER_CST
9807 || (length
= TREE_STRING_LENGTH (DECL_INITIAL (array
))) <= 0
9808 || compare_tree_int (DECL_SIZE_UNIT (array
), length
) < 0)
9811 /* If variable is bigger than the string literal, OFFSET must be constant
9812 and inside of the bounds of the string literal. */
9813 offset
= fold_convert (sizetype
, offset
);
9814 if (compare_tree_int (DECL_SIZE_UNIT (array
), length
) > 0
9815 && (! host_integerp (offset
, 1)
9816 || compare_tree_int (offset
, length
) >= 0))
9819 *ptr_offset
= offset
;
9820 return DECL_INITIAL (array
);
9826 /* Generate code to calculate OPS, and exploded expression
9827 using a store-flag instruction and return an rtx for the result.
9828 OPS reflects a comparison.
9830 If TARGET is nonzero, store the result there if convenient.
9832 Return zero if there is no suitable set-flag instruction
9833 available on this machine.
9835 Once expand_expr has been called on the arguments of the comparison,
9836 we are committed to doing the store flag, since it is not safe to
9837 re-evaluate the expression. We emit the store-flag insn by calling
9838 emit_store_flag, but only expand the arguments if we have a reason
9839 to believe that emit_store_flag will be successful. If we think that
9840 it will, but it isn't, we have to simulate the store-flag with a
9841 set/jump/set sequence. */
9844 do_store_flag (sepops ops
, rtx target
, enum machine_mode mode
)
9847 tree arg0
, arg1
, type
;
9849 enum machine_mode operand_mode
;
9852 rtx subtarget
= target
;
9853 location_t loc
= ops
->location
;
9858 /* Don't crash if the comparison was erroneous. */
9859 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
9862 type
= TREE_TYPE (arg0
);
9863 operand_mode
= TYPE_MODE (type
);
9864 unsignedp
= TYPE_UNSIGNED (type
);
9866 /* We won't bother with BLKmode store-flag operations because it would mean
9867 passing a lot of information to emit_store_flag. */
9868 if (operand_mode
== BLKmode
)
9871 /* We won't bother with store-flag operations involving function pointers
9872 when function pointers must be canonicalized before comparisons. */
9873 #ifdef HAVE_canonicalize_funcptr_for_compare
9874 if (HAVE_canonicalize_funcptr_for_compare
9875 && ((TREE_CODE (TREE_TYPE (arg0
)) == POINTER_TYPE
9876 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
)))
9878 || (TREE_CODE (TREE_TYPE (arg1
)) == POINTER_TYPE
9879 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
)))
9880 == FUNCTION_TYPE
))))
9887 /* Get the rtx comparison code to use. We know that EXP is a comparison
9888 operation of some type. Some comparisons against 1 and -1 can be
9889 converted to comparisons with zero. Do so here so that the tests
9890 below will be aware that we have a comparison with zero. These
9891 tests will not catch constants in the first operand, but constants
9892 are rarely passed as the first operand. */
9903 if (integer_onep (arg1
))
9904 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
9906 code
= unsignedp
? LTU
: LT
;
9909 if (! unsignedp
&& integer_all_onesp (arg1
))
9910 arg1
= integer_zero_node
, code
= LT
;
9912 code
= unsignedp
? LEU
: LE
;
9915 if (! unsignedp
&& integer_all_onesp (arg1
))
9916 arg1
= integer_zero_node
, code
= GE
;
9918 code
= unsignedp
? GTU
: GT
;
9921 if (integer_onep (arg1
))
9922 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
9924 code
= unsignedp
? GEU
: GE
;
9927 case UNORDERED_EXPR
:
9956 /* Put a constant second. */
9957 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
9958 || TREE_CODE (arg0
) == FIXED_CST
)
9960 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
9961 code
= swap_condition (code
);
9964 /* If this is an equality or inequality test of a single bit, we can
9965 do this by shifting the bit being tested to the low-order bit and
9966 masking the result with the constant 1. If the condition was EQ,
9967 we xor it with 1. This does not require an scc insn and is faster
9968 than an scc insn even if we have it.
9970 The code to make this transformation was moved into fold_single_bit_test,
9971 so we just call into the folder and expand its result. */
9973 if ((code
== NE
|| code
== EQ
)
9974 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
9975 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
9977 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
9978 return expand_expr (fold_single_bit_test (loc
,
9979 code
== NE
? NE_EXPR
: EQ_EXPR
,
9981 target
, VOIDmode
, EXPAND_NORMAL
);
9984 if (! get_subtarget (target
)
9985 || GET_MODE (subtarget
) != operand_mode
)
9988 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
9991 target
= gen_reg_rtx (mode
);
9993 /* Try a cstore if possible. */
9994 return emit_store_flag_force (target
, code
, op0
, op1
,
9995 operand_mode
, unsignedp
, 1);
9999 /* Stubs in case we haven't got a casesi insn. */
10000 #ifndef HAVE_casesi
10001 # define HAVE_casesi 0
10002 # define gen_casesi(a, b, c, d, e) (0)
10003 # define CODE_FOR_casesi CODE_FOR_nothing
10006 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10007 0 otherwise (i.e. if there is no casesi instruction). */
10009 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
10010 rtx table_label ATTRIBUTE_UNUSED
, rtx default_label
,
10011 rtx fallback_label ATTRIBUTE_UNUSED
)
10013 enum machine_mode index_mode
= SImode
;
10014 int index_bits
= GET_MODE_BITSIZE (index_mode
);
10015 rtx op1
, op2
, index
;
10016 enum machine_mode op_mode
;
10021 /* Convert the index to SImode. */
10022 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
10024 enum machine_mode omode
= TYPE_MODE (index_type
);
10025 rtx rangertx
= expand_normal (range
);
10027 /* We must handle the endpoints in the original mode. */
10028 index_expr
= build2 (MINUS_EXPR
, index_type
,
10029 index_expr
, minval
);
10030 minval
= integer_zero_node
;
10031 index
= expand_normal (index_expr
);
10033 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
10034 omode
, 1, default_label
);
10035 /* Now we can safely truncate. */
10036 index
= convert_to_mode (index_mode
, index
, 0);
10040 if (TYPE_MODE (index_type
) != index_mode
)
10042 index_type
= lang_hooks
.types
.type_for_size (index_bits
, 0);
10043 index_expr
= fold_convert (index_type
, index_expr
);
10046 index
= expand_normal (index_expr
);
10049 do_pending_stack_adjust ();
10051 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
10052 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
10054 index
= copy_to_mode_reg (op_mode
, index
);
10056 op1
= expand_normal (minval
);
10058 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
10059 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
10060 op1
, TYPE_UNSIGNED (TREE_TYPE (minval
)));
10061 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
10063 op1
= copy_to_mode_reg (op_mode
, op1
);
10065 op2
= expand_normal (range
);
10067 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
10068 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
10069 op2
, TYPE_UNSIGNED (TREE_TYPE (range
)));
10070 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
10072 op2
= copy_to_mode_reg (op_mode
, op2
);
10074 emit_jump_insn (gen_casesi (index
, op1
, op2
,
10075 table_label
, !default_label
10076 ? fallback_label
: default_label
));
10080 /* Attempt to generate a tablejump instruction; same concept. */
10081 #ifndef HAVE_tablejump
10082 #define HAVE_tablejump 0
10083 #define gen_tablejump(x, y) (0)
10086 /* Subroutine of the next function.
10088 INDEX is the value being switched on, with the lowest value
10089 in the table already subtracted.
10090 MODE is its expected mode (needed if INDEX is constant).
10091 RANGE is the length of the jump table.
10092 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10094 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10095 index value is out of range. */
10098 do_tablejump (rtx index
, enum machine_mode mode
, rtx range
, rtx table_label
,
10103 if (INTVAL (range
) > cfun
->cfg
->max_jumptable_ents
)
10104 cfun
->cfg
->max_jumptable_ents
= INTVAL (range
);
10106 /* Do an unsigned comparison (in the proper mode) between the index
10107 expression and the value which represents the length of the range.
10108 Since we just finished subtracting the lower bound of the range
10109 from the index expression, this comparison allows us to simultaneously
10110 check that the original index expression value is both greater than
10111 or equal to the minimum value of the range and less than or equal to
10112 the maximum value of the range. */
10115 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
10118 /* If index is in range, it must fit in Pmode.
10119 Convert to Pmode so we can index with it. */
10121 index
= convert_to_mode (Pmode
, index
, 1);
10123 /* Don't let a MEM slip through, because then INDEX that comes
10124 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10125 and break_out_memory_refs will go to work on it and mess it up. */
10126 #ifdef PIC_CASE_VECTOR_ADDRESS
10127 if (flag_pic
&& !REG_P (index
))
10128 index
= copy_to_mode_reg (Pmode
, index
);
10131 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10132 GET_MODE_SIZE, because this indicates how large insns are. The other
10133 uses should all be Pmode, because they are addresses. This code
10134 could fail if addresses and insns are not the same size. */
10135 index
= gen_rtx_PLUS (Pmode
,
10136 gen_rtx_MULT (Pmode
, index
,
10137 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
10138 gen_rtx_LABEL_REF (Pmode
, table_label
));
10139 #ifdef PIC_CASE_VECTOR_ADDRESS
10141 index
= PIC_CASE_VECTOR_ADDRESS (index
);
10144 index
= memory_address (CASE_VECTOR_MODE
, index
);
10145 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
10146 vector
= gen_const_mem (CASE_VECTOR_MODE
, index
);
10147 convert_move (temp
, vector
, 0);
10149 emit_jump_insn (gen_tablejump (temp
, table_label
));
10151 /* If we are generating PIC code or if the table is PC-relative, the
10152 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10153 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
10158 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
10159 rtx table_label
, rtx default_label
)
10163 if (! HAVE_tablejump
)
10166 index_expr
= fold_build2 (MINUS_EXPR
, index_type
,
10167 fold_convert (index_type
, index_expr
),
10168 fold_convert (index_type
, minval
));
10169 index
= expand_normal (index_expr
);
10170 do_pending_stack_adjust ();
10172 do_tablejump (index
, TYPE_MODE (index_type
),
10173 convert_modes (TYPE_MODE (index_type
),
10174 TYPE_MODE (TREE_TYPE (range
)),
10175 expand_normal (range
),
10176 TYPE_UNSIGNED (TREE_TYPE (range
))),
10177 table_label
, default_label
);
10181 /* Nonzero if the mode is a valid vector mode for this architecture.
10182 This returns nonzero even if there is no hardware support for the
10183 vector mode, but we can emulate with narrower modes. */
10186 vector_mode_valid_p (enum machine_mode mode
)
10188 enum mode_class mclass
= GET_MODE_CLASS (mode
);
10189 enum machine_mode innermode
;
10191 /* Doh! What's going on? */
10192 if (mclass
!= MODE_VECTOR_INT
10193 && mclass
!= MODE_VECTOR_FLOAT
10194 && mclass
!= MODE_VECTOR_FRACT
10195 && mclass
!= MODE_VECTOR_UFRACT
10196 && mclass
!= MODE_VECTOR_ACCUM
10197 && mclass
!= MODE_VECTOR_UACCUM
)
10200 /* Hardware support. Woo hoo! */
10201 if (targetm
.vector_mode_supported_p (mode
))
10204 innermode
= GET_MODE_INNER (mode
);
10206 /* We should probably return 1 if requesting V4DI and we have no DI,
10207 but we have V2DI, but this is probably very unlikely. */
10209 /* If we have support for the inner mode, we can safely emulate it.
10210 We may not have V2DI, but me can emulate with a pair of DIs. */
10211 return targetm
.scalar_mode_supported_p (innermode
);
10214 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10216 const_vector_from_tree (tree exp
)
10221 enum machine_mode inner
, mode
;
10223 mode
= TYPE_MODE (TREE_TYPE (exp
));
10225 if (initializer_zerop (exp
))
10226 return CONST0_RTX (mode
);
10228 units
= GET_MODE_NUNITS (mode
);
10229 inner
= GET_MODE_INNER (mode
);
10231 v
= rtvec_alloc (units
);
10233 link
= TREE_VECTOR_CST_ELTS (exp
);
10234 for (i
= 0; link
; link
= TREE_CHAIN (link
), ++i
)
10236 elt
= TREE_VALUE (link
);
10238 if (TREE_CODE (elt
) == REAL_CST
)
10239 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
10241 else if (TREE_CODE (elt
) == FIXED_CST
)
10242 RTVEC_ELT (v
, i
) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt
),
10245 RTVEC_ELT (v
, i
) = immed_double_int_const (tree_to_double_int (elt
),
10249 /* Initialize remaining elements to 0. */
10250 for (; i
< units
; ++i
)
10251 RTVEC_ELT (v
, i
) = CONST0_RTX (inner
);
10253 return gen_rtx_CONST_VECTOR (mode
, v
);
10257 /* Build a decl for a EH personality function named NAME. */
10260 build_personality_function (const char *name
)
10264 type
= build_function_type_list (integer_type_node
, integer_type_node
,
10265 long_long_unsigned_type_node
,
10266 ptr_type_node
, ptr_type_node
, NULL_TREE
);
10267 decl
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
,
10268 get_identifier (name
), type
);
10269 DECL_ARTIFICIAL (decl
) = 1;
10270 DECL_EXTERNAL (decl
) = 1;
10271 TREE_PUBLIC (decl
) = 1;
10273 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
10274 are the flags assigned by targetm.encode_section_info. */
10275 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl
), 0), NULL
);
10280 /* Extracts the personality function of DECL and returns the corresponding
10284 get_personality_function (tree decl
)
10286 tree personality
= DECL_FUNCTION_PERSONALITY (decl
);
10287 enum eh_personality_kind pk
;
10289 pk
= function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl
));
10290 if (pk
== eh_personality_none
)
10294 && pk
== eh_personality_any
)
10295 personality
= lang_hooks
.eh_personality ();
10297 if (pk
== eh_personality_lang
)
10298 gcc_assert (personality
!= NULL_TREE
);
10300 return XEXP (DECL_RTL (personality
), 0);
10303 #include "gt-expr.h"