1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 #include "diagnostic.h"
58 /* Decide whether a function's arguments should be processed
59 from first to last or from last to first.
61 They should if the stack and args grow in opposite directions, but
62 only if we have push insns. */
66 #ifndef PUSH_ARGS_REVERSED
67 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
68 #define PUSH_ARGS_REVERSED /* If it's last to first. */
74 #ifndef STACK_PUSH_CODE
75 #ifdef STACK_GROWS_DOWNWARD
76 #define STACK_PUSH_CODE PRE_DEC
78 #define STACK_PUSH_CODE PRE_INC
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
91 /* This structure is used by move_by_pieces to describe the move to
102 int explicit_inc_from
;
103 unsigned HOST_WIDE_INT len
;
104 HOST_WIDE_INT offset
;
108 /* This structure is used by store_by_pieces to describe the clear to
111 struct store_by_pieces
117 unsigned HOST_WIDE_INT len
;
118 HOST_WIDE_INT offset
;
119 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
);
124 static unsigned HOST_WIDE_INT
move_by_pieces_ninsns (unsigned HOST_WIDE_INT
,
127 static void move_by_pieces_1 (rtx (*) (rtx
, ...), enum machine_mode
,
128 struct move_by_pieces
*);
129 static bool block_move_libcall_safe_for_call_parm (void);
130 static bool emit_block_move_via_movmem (rtx
, rtx
, rtx
, unsigned, unsigned, HOST_WIDE_INT
);
131 static tree
emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
133 static rtx
clear_by_pieces_1 (void *, HOST_WIDE_INT
, enum machine_mode
);
134 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces
*, unsigned int);
136 static void store_by_pieces_2 (rtx (*) (rtx
, ...), enum machine_mode
,
137 struct store_by_pieces
*);
138 static tree
clear_storage_libcall_fn (int);
139 static rtx
compress_float_constant (rtx
, rtx
);
140 static rtx
get_subtarget (rtx
);
141 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
142 HOST_WIDE_INT
, enum machine_mode
,
143 tree
, tree
, int, alias_set_type
);
144 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
);
145 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
, enum machine_mode
,
146 tree
, tree
, alias_set_type
, bool);
148 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (const_tree
, const_tree
);
150 static int is_aligning_offset (const_tree
, const_tree
);
151 static void expand_operands (tree
, tree
, rtx
, rtx
*, rtx
*,
152 enum expand_modifier
);
153 static rtx
reduce_to_bit_field_precision (rtx
, rtx
, tree
);
154 static rtx
do_store_flag (tree
, rtx
, enum machine_mode
, int);
156 static void emit_single_push_insn (enum machine_mode
, rtx
, tree
);
158 static void do_tablejump (rtx
, enum machine_mode
, rtx
, rtx
, rtx
);
159 static rtx
const_vector_from_tree (tree
);
160 static void write_complex_part (rtx
, rtx
, bool);
162 /* Record for each mode whether we can move a register directly to or
163 from an object of that mode in memory. If we can't, we won't try
164 to use that mode directly when accessing a field of that mode. */
166 static char direct_load
[NUM_MACHINE_MODES
];
167 static char direct_store
[NUM_MACHINE_MODES
];
169 /* Record for each mode whether we can float-extend from memory. */
171 static bool float_extend_from_mem
[NUM_MACHINE_MODES
][NUM_MACHINE_MODES
];
173 /* This macro is used to determine whether move_by_pieces should be called
174 to perform a structure copy. */
175 #ifndef MOVE_BY_PIECES_P
176 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
177 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
178 < (unsigned int) MOVE_RATIO)
181 /* This macro is used to determine whether clear_by_pieces should be
182 called to clear storage. */
183 #ifndef CLEAR_BY_PIECES_P
184 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
185 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
186 < (unsigned int) CLEAR_RATIO)
189 /* This macro is used to determine whether store_by_pieces should be
190 called to "memset" storage with byte values other than zero. */
191 #ifndef SET_BY_PIECES_P
192 #define SET_BY_PIECES_P(SIZE, ALIGN) \
193 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
194 < (unsigned int) SET_RATIO)
197 /* This macro is used to determine whether store_by_pieces should be
198 called to "memcpy" storage when the source is a constant string. */
199 #ifndef STORE_BY_PIECES_P
200 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
201 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
202 < (unsigned int) MOVE_RATIO)
205 /* This array records the insn_code of insns to perform block moves. */
206 enum insn_code movmem_optab
[NUM_MACHINE_MODES
];
208 /* This array records the insn_code of insns to perform block sets. */
209 enum insn_code setmem_optab
[NUM_MACHINE_MODES
];
211 /* These arrays record the insn_code of three different kinds of insns
212 to perform block compares. */
213 enum insn_code cmpstr_optab
[NUM_MACHINE_MODES
];
214 enum insn_code cmpstrn_optab
[NUM_MACHINE_MODES
];
215 enum insn_code cmpmem_optab
[NUM_MACHINE_MODES
];
217 /* Synchronization primitives. */
218 enum insn_code sync_add_optab
[NUM_MACHINE_MODES
];
219 enum insn_code sync_sub_optab
[NUM_MACHINE_MODES
];
220 enum insn_code sync_ior_optab
[NUM_MACHINE_MODES
];
221 enum insn_code sync_and_optab
[NUM_MACHINE_MODES
];
222 enum insn_code sync_xor_optab
[NUM_MACHINE_MODES
];
223 enum insn_code sync_nand_optab
[NUM_MACHINE_MODES
];
224 enum insn_code sync_old_add_optab
[NUM_MACHINE_MODES
];
225 enum insn_code sync_old_sub_optab
[NUM_MACHINE_MODES
];
226 enum insn_code sync_old_ior_optab
[NUM_MACHINE_MODES
];
227 enum insn_code sync_old_and_optab
[NUM_MACHINE_MODES
];
228 enum insn_code sync_old_xor_optab
[NUM_MACHINE_MODES
];
229 enum insn_code sync_old_nand_optab
[NUM_MACHINE_MODES
];
230 enum insn_code sync_new_add_optab
[NUM_MACHINE_MODES
];
231 enum insn_code sync_new_sub_optab
[NUM_MACHINE_MODES
];
232 enum insn_code sync_new_ior_optab
[NUM_MACHINE_MODES
];
233 enum insn_code sync_new_and_optab
[NUM_MACHINE_MODES
];
234 enum insn_code sync_new_xor_optab
[NUM_MACHINE_MODES
];
235 enum insn_code sync_new_nand_optab
[NUM_MACHINE_MODES
];
236 enum insn_code sync_compare_and_swap
[NUM_MACHINE_MODES
];
237 enum insn_code sync_compare_and_swap_cc
[NUM_MACHINE_MODES
];
238 enum insn_code sync_lock_test_and_set
[NUM_MACHINE_MODES
];
239 enum insn_code sync_lock_release
[NUM_MACHINE_MODES
];
241 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
243 #ifndef SLOW_UNALIGNED_ACCESS
244 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
247 /* This is run to set up which modes can be used
248 directly in memory and to initialize the block move optab. It is run
249 at the beginning of compilation and when the target is reinitialized. */
252 init_expr_target (void)
255 enum machine_mode mode
;
260 /* Try indexing by frame ptr and try by stack ptr.
261 It is known that on the Convex the stack ptr isn't a valid index.
262 With luck, one or the other is valid on any machine. */
263 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
264 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
266 /* A scratch register we can modify in-place below to avoid
267 useless RTL allocations. */
268 reg
= gen_rtx_REG (VOIDmode
, -1);
270 insn
= rtx_alloc (INSN
);
271 pat
= gen_rtx_SET (0, NULL_RTX
, NULL_RTX
);
272 PATTERN (insn
) = pat
;
274 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
275 mode
= (enum machine_mode
) ((int) mode
+ 1))
279 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
280 PUT_MODE (mem
, mode
);
281 PUT_MODE (mem1
, mode
);
282 PUT_MODE (reg
, mode
);
284 /* See if there is some register that can be used in this mode and
285 directly loaded or stored from memory. */
287 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
288 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
289 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
292 if (! HARD_REGNO_MODE_OK (regno
, mode
))
295 SET_REGNO (reg
, regno
);
298 SET_DEST (pat
) = reg
;
299 if (recog (pat
, insn
, &num_clobbers
) >= 0)
300 direct_load
[(int) mode
] = 1;
302 SET_SRC (pat
) = mem1
;
303 SET_DEST (pat
) = reg
;
304 if (recog (pat
, insn
, &num_clobbers
) >= 0)
305 direct_load
[(int) mode
] = 1;
308 SET_DEST (pat
) = mem
;
309 if (recog (pat
, insn
, &num_clobbers
) >= 0)
310 direct_store
[(int) mode
] = 1;
313 SET_DEST (pat
) = mem1
;
314 if (recog (pat
, insn
, &num_clobbers
) >= 0)
315 direct_store
[(int) mode
] = 1;
319 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
321 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
322 mode
= GET_MODE_WIDER_MODE (mode
))
324 enum machine_mode srcmode
;
325 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
326 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
330 ic
= can_extend_p (mode
, srcmode
, 0);
331 if (ic
== CODE_FOR_nothing
)
334 PUT_MODE (mem
, srcmode
);
336 if ((*insn_data
[ic
].operand
[1].predicate
) (mem
, srcmode
))
337 float_extend_from_mem
[mode
][srcmode
] = true;
342 /* This is run at the start of compiling a function. */
347 memset (&crtl
->expr
, 0, sizeof (crtl
->expr
));
350 /* Copy data from FROM to TO, where the machine modes are not the same.
351 Both modes may be integer, or both may be floating, or both may be
353 UNSIGNEDP should be nonzero if FROM is an unsigned type.
354 This causes zero-extension instead of sign-extension. */
357 convert_move (rtx to
, rtx from
, int unsignedp
)
359 enum machine_mode to_mode
= GET_MODE (to
);
360 enum machine_mode from_mode
= GET_MODE (from
);
361 int to_real
= SCALAR_FLOAT_MODE_P (to_mode
);
362 int from_real
= SCALAR_FLOAT_MODE_P (from_mode
);
366 /* rtx code for making an equivalent value. */
367 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
368 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
371 gcc_assert (to_real
== from_real
);
372 gcc_assert (to_mode
!= BLKmode
);
373 gcc_assert (from_mode
!= BLKmode
);
375 /* If the source and destination are already the same, then there's
380 /* If FROM is a SUBREG that indicates that we have already done at least
381 the required extension, strip it. We don't handle such SUBREGs as
384 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
385 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
386 >= GET_MODE_SIZE (to_mode
))
387 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
388 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
390 gcc_assert (GET_CODE (to
) != SUBREG
|| !SUBREG_PROMOTED_VAR_P (to
));
392 if (to_mode
== from_mode
393 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
395 emit_move_insn (to
, from
);
399 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
401 gcc_assert (GET_MODE_BITSIZE (from_mode
) == GET_MODE_BITSIZE (to_mode
));
403 if (VECTOR_MODE_P (to_mode
))
404 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
406 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
408 emit_move_insn (to
, from
);
412 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
414 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
415 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
424 gcc_assert ((GET_MODE_PRECISION (from_mode
)
425 != GET_MODE_PRECISION (to_mode
))
426 || (DECIMAL_FLOAT_MODE_P (from_mode
)
427 != DECIMAL_FLOAT_MODE_P (to_mode
)));
429 if (GET_MODE_PRECISION (from_mode
) == GET_MODE_PRECISION (to_mode
))
430 /* Conversion between decimal float and binary float, same size. */
431 tab
= DECIMAL_FLOAT_MODE_P (from_mode
) ? trunc_optab
: sext_optab
;
432 else if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
437 /* Try converting directly if the insn is supported. */
439 code
= convert_optab_handler (tab
, to_mode
, from_mode
)->insn_code
;
440 if (code
!= CODE_FOR_nothing
)
442 emit_unop_insn (code
, to
, from
,
443 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
447 /* Otherwise use a libcall. */
448 libcall
= convert_optab_libfunc (tab
, to_mode
, from_mode
);
450 /* Is this conversion implemented yet? */
451 gcc_assert (libcall
);
454 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
456 insns
= get_insns ();
458 emit_libcall_block (insns
, to
, value
,
459 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
461 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
465 /* Handle pointer conversion. */ /* SPEE 900220. */
466 /* Targets are expected to provide conversion insns between PxImode and
467 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
468 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
470 enum machine_mode full_mode
471 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode
), MODE_INT
);
473 gcc_assert (convert_optab_handler (trunc_optab
, to_mode
, full_mode
)->insn_code
474 != CODE_FOR_nothing
);
476 if (full_mode
!= from_mode
)
477 from
= convert_to_mode (full_mode
, from
, unsignedp
);
478 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, full_mode
)->insn_code
,
482 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
485 enum machine_mode full_mode
486 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode
), MODE_INT
);
488 gcc_assert (convert_optab_handler (sext_optab
, full_mode
, from_mode
)->insn_code
489 != CODE_FOR_nothing
);
491 if (to_mode
== full_mode
)
493 emit_unop_insn (convert_optab_handler (sext_optab
, full_mode
, from_mode
)->insn_code
,
498 new_from
= gen_reg_rtx (full_mode
);
499 emit_unop_insn (convert_optab_handler (sext_optab
, full_mode
, from_mode
)->insn_code
,
500 new_from
, from
, UNKNOWN
);
502 /* else proceed to integer conversions below. */
503 from_mode
= full_mode
;
507 /* Make sure both are fixed-point modes or both are not. */
508 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
) ==
509 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode
));
510 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
))
512 /* If we widen from_mode to to_mode and they are in the same class,
513 we won't saturate the result.
514 Otherwise, always saturate the result to play safe. */
515 if (GET_MODE_CLASS (from_mode
) == GET_MODE_CLASS (to_mode
)
516 && GET_MODE_SIZE (from_mode
) < GET_MODE_SIZE (to_mode
))
517 expand_fixed_convert (to
, from
, 0, 0);
519 expand_fixed_convert (to
, from
, 0, 1);
523 /* Now both modes are integers. */
525 /* Handle expanding beyond a word. */
526 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
527 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
534 enum machine_mode lowpart_mode
;
535 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
537 /* Try converting directly if the insn is supported. */
538 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
541 /* If FROM is a SUBREG, put it into a register. Do this
542 so that we always generate the same set of insns for
543 better cse'ing; if an intermediate assignment occurred,
544 we won't be doing the operation directly on the SUBREG. */
545 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
546 from
= force_reg (from_mode
, from
);
547 emit_unop_insn (code
, to
, from
, equiv_code
);
550 /* Next, try converting via full word. */
551 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
552 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
553 != CODE_FOR_nothing
))
555 rtx word_to
= gen_reg_rtx (word_mode
);
558 if (reg_overlap_mentioned_p (to
, from
))
559 from
= force_reg (from_mode
, from
);
562 convert_move (word_to
, from
, unsignedp
);
563 emit_unop_insn (code
, to
, word_to
, equiv_code
);
567 /* No special multiword conversion insn; do it by hand. */
570 /* Since we will turn this into a no conflict block, we must ensure
571 that the source does not overlap the target. */
573 if (reg_overlap_mentioned_p (to
, from
))
574 from
= force_reg (from_mode
, from
);
576 /* Get a copy of FROM widened to a word, if necessary. */
577 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
578 lowpart_mode
= word_mode
;
580 lowpart_mode
= from_mode
;
582 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
584 lowpart
= gen_lowpart (lowpart_mode
, to
);
585 emit_move_insn (lowpart
, lowfrom
);
587 /* Compute the value to put in each remaining word. */
589 fill_value
= const0_rtx
;
594 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
595 && STORE_FLAG_VALUE
== -1)
597 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
599 fill_value
= gen_reg_rtx (word_mode
);
600 emit_insn (gen_slt (fill_value
));
606 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
607 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
609 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
613 /* Fill the remaining words. */
614 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
616 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
617 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
619 gcc_assert (subword
);
621 if (fill_value
!= subword
)
622 emit_move_insn (subword
, fill_value
);
625 insns
= get_insns ();
632 /* Truncating multi-word to a word or less. */
633 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
634 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
637 && ! MEM_VOLATILE_P (from
)
638 && direct_load
[(int) to_mode
]
639 && ! mode_dependent_address_p (XEXP (from
, 0)))
641 || GET_CODE (from
) == SUBREG
))
642 from
= force_reg (from_mode
, from
);
643 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
647 /* Now follow all the conversions between integers
648 no more than a word long. */
650 /* For truncation, usually we can just refer to FROM in a narrower mode. */
651 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
652 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
653 GET_MODE_BITSIZE (from_mode
)))
656 && ! MEM_VOLATILE_P (from
)
657 && direct_load
[(int) to_mode
]
658 && ! mode_dependent_address_p (XEXP (from
, 0)))
660 || GET_CODE (from
) == SUBREG
))
661 from
= force_reg (from_mode
, from
);
662 if (REG_P (from
) && REGNO (from
) < FIRST_PSEUDO_REGISTER
663 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
664 from
= copy_to_reg (from
);
665 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
669 /* Handle extension. */
670 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
672 /* Convert directly if that works. */
673 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
676 emit_unop_insn (code
, to
, from
, equiv_code
);
681 enum machine_mode intermediate
;
685 /* Search for a mode to convert via. */
686 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
687 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
688 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
690 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
691 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
692 GET_MODE_BITSIZE (intermediate
))))
693 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
694 != CODE_FOR_nothing
))
696 convert_move (to
, convert_to_mode (intermediate
, from
,
697 unsignedp
), unsignedp
);
701 /* No suitable intermediate mode.
702 Generate what we need with shifts. */
703 shift_amount
= build_int_cst (NULL_TREE
,
704 GET_MODE_BITSIZE (to_mode
)
705 - GET_MODE_BITSIZE (from_mode
));
706 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
707 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
709 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
712 emit_move_insn (to
, tmp
);
717 /* Support special truncate insns for certain modes. */
718 if (convert_optab_handler (trunc_optab
, to_mode
, from_mode
)->insn_code
!= CODE_FOR_nothing
)
720 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, from_mode
)->insn_code
,
725 /* Handle truncation of volatile memrefs, and so on;
726 the things that couldn't be truncated directly,
727 and for which there was no special instruction.
729 ??? Code above formerly short-circuited this, for most integer
730 mode pairs, with a force_reg in from_mode followed by a recursive
731 call to this routine. Appears always to have been wrong. */
732 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
734 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
735 emit_move_insn (to
, temp
);
739 /* Mode combination is not recognized. */
743 /* Return an rtx for a value that would result
744 from converting X to mode MODE.
745 Both X and MODE may be floating, or both integer.
746 UNSIGNEDP is nonzero if X is an unsigned value.
747 This can be done by referring to a part of X in place
748 or by copying to a new temporary with conversion. */
751 convert_to_mode (enum machine_mode mode
, rtx x
, int unsignedp
)
753 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
756 /* Return an rtx for a value that would result
757 from converting X from mode OLDMODE to mode MODE.
758 Both modes may be floating, or both integer.
759 UNSIGNEDP is nonzero if X is an unsigned value.
761 This can be done by referring to a part of X in place
762 or by copying to a new temporary with conversion.
764 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
767 convert_modes (enum machine_mode mode
, enum machine_mode oldmode
, rtx x
, int unsignedp
)
771 /* If FROM is a SUBREG that indicates that we have already done at least
772 the required extension, strip it. */
774 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
775 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
776 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
777 x
= gen_lowpart (mode
, x
);
779 if (GET_MODE (x
) != VOIDmode
)
780 oldmode
= GET_MODE (x
);
785 /* There is one case that we must handle specially: If we are converting
786 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
787 we are to interpret the constant as unsigned, gen_lowpart will do
788 the wrong if the constant appears negative. What we want to do is
789 make the high-order word of the constant zero, not all ones. */
791 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
792 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
793 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
795 HOST_WIDE_INT val
= INTVAL (x
);
797 if (oldmode
!= VOIDmode
798 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
800 int width
= GET_MODE_BITSIZE (oldmode
);
802 /* We need to zero extend VAL. */
803 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
806 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
809 /* We can do this with a gen_lowpart if both desired and current modes
810 are integer, and this is either a constant integer, a register, or a
811 non-volatile MEM. Except for the constant case where MODE is no
812 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
814 if ((GET_CODE (x
) == CONST_INT
815 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
816 || (GET_MODE_CLASS (mode
) == MODE_INT
817 && GET_MODE_CLASS (oldmode
) == MODE_INT
818 && (GET_CODE (x
) == CONST_DOUBLE
819 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
820 && ((MEM_P (x
) && ! MEM_VOLATILE_P (x
)
821 && direct_load
[(int) mode
])
823 && (! HARD_REGISTER_P (x
)
824 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
825 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
826 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
828 /* ?? If we don't know OLDMODE, we have to assume here that
829 X does not need sign- or zero-extension. This may not be
830 the case, but it's the best we can do. */
831 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
832 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
834 HOST_WIDE_INT val
= INTVAL (x
);
835 int width
= GET_MODE_BITSIZE (oldmode
);
837 /* We must sign or zero-extend in this case. Start by
838 zero-extending, then sign extend if we need to. */
839 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
841 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
842 val
|= (HOST_WIDE_INT
) (-1) << width
;
844 return gen_int_mode (val
, mode
);
847 return gen_lowpart (mode
, x
);
850 /* Converting from integer constant into mode is always equivalent to an
852 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
854 gcc_assert (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (oldmode
));
855 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
858 temp
= gen_reg_rtx (mode
);
859 convert_move (temp
, x
, unsignedp
);
863 /* STORE_MAX_PIECES is the number of bytes at a time that we can
864 store efficiently. Due to internal GCC limitations, this is
865 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
866 for an immediate constant. */
868 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
870 /* Determine whether the LEN bytes can be moved by using several move
871 instructions. Return nonzero if a call to move_by_pieces should
875 can_move_by_pieces (unsigned HOST_WIDE_INT len
,
876 unsigned int align ATTRIBUTE_UNUSED
)
878 return MOVE_BY_PIECES_P (len
, align
);
881 /* Generate several move instructions to copy LEN bytes from block FROM to
882 block TO. (These are MEM rtx's with BLKmode).
884 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
885 used to push FROM to the stack.
887 ALIGN is maximum stack alignment we can assume.
889 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
890 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
894 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
895 unsigned int align
, int endp
)
897 struct move_by_pieces data
;
898 rtx to_addr
, from_addr
= XEXP (from
, 0);
899 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
900 enum machine_mode mode
= VOIDmode
, tmode
;
901 enum insn_code icode
;
903 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
906 data
.from_addr
= from_addr
;
909 to_addr
= XEXP (to
, 0);
912 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
913 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
915 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
922 #ifdef STACK_GROWS_DOWNWARD
928 data
.to_addr
= to_addr
;
931 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
932 || GET_CODE (from_addr
) == POST_INC
933 || GET_CODE (from_addr
) == POST_DEC
);
935 data
.explicit_inc_from
= 0;
936 data
.explicit_inc_to
= 0;
937 if (data
.reverse
) data
.offset
= len
;
940 /* If copying requires more than two move insns,
941 copy addresses to registers (to make displacements shorter)
942 and use post-increment if available. */
943 if (!(data
.autinc_from
&& data
.autinc_to
)
944 && move_by_pieces_ninsns (len
, align
, max_size
) > 2)
946 /* Find the mode of the largest move... */
947 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
948 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
949 if (GET_MODE_SIZE (tmode
) < max_size
)
952 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
954 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
955 data
.autinc_from
= 1;
956 data
.explicit_inc_from
= -1;
958 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
960 data
.from_addr
= copy_addr_to_reg (from_addr
);
961 data
.autinc_from
= 1;
962 data
.explicit_inc_from
= 1;
964 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
965 data
.from_addr
= copy_addr_to_reg (from_addr
);
966 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
968 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
970 data
.explicit_inc_to
= -1;
972 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
974 data
.to_addr
= copy_addr_to_reg (to_addr
);
976 data
.explicit_inc_to
= 1;
978 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
979 data
.to_addr
= copy_addr_to_reg (to_addr
);
982 tmode
= mode_for_size (MOVE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
983 if (align
>= GET_MODE_ALIGNMENT (tmode
))
984 align
= GET_MODE_ALIGNMENT (tmode
);
987 enum machine_mode xmode
;
989 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
991 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
992 if (GET_MODE_SIZE (tmode
) > MOVE_MAX_PIECES
993 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
996 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
999 /* First move what we can in the largest integer mode, then go to
1000 successively smaller modes. */
1002 while (max_size
> 1)
1004 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1005 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1006 if (GET_MODE_SIZE (tmode
) < max_size
)
1009 if (mode
== VOIDmode
)
1012 icode
= optab_handler (mov_optab
, mode
)->insn_code
;
1013 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1014 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1016 max_size
= GET_MODE_SIZE (mode
);
1019 /* The code above should have handled everything. */
1020 gcc_assert (!data
.len
);
1026 gcc_assert (!data
.reverse
);
1031 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
1032 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
1034 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
1037 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
1044 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
1052 /* Return number of insns required to move L bytes by pieces.
1053 ALIGN (in bits) is maximum alignment we can assume. */
1055 static unsigned HOST_WIDE_INT
1056 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
,
1057 unsigned int max_size
)
1059 unsigned HOST_WIDE_INT n_insns
= 0;
1060 enum machine_mode tmode
;
1062 tmode
= mode_for_size (MOVE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
1063 if (align
>= GET_MODE_ALIGNMENT (tmode
))
1064 align
= GET_MODE_ALIGNMENT (tmode
);
1067 enum machine_mode tmode
, xmode
;
1069 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
1071 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
1072 if (GET_MODE_SIZE (tmode
) > MOVE_MAX_PIECES
1073 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
1076 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
1079 while (max_size
> 1)
1081 enum machine_mode mode
= VOIDmode
;
1082 enum insn_code icode
;
1084 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1085 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1086 if (GET_MODE_SIZE (tmode
) < max_size
)
1089 if (mode
== VOIDmode
)
1092 icode
= optab_handler (mov_optab
, mode
)->insn_code
;
1093 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1094 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1096 max_size
= GET_MODE_SIZE (mode
);
1103 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1104 with move instructions for mode MODE. GENFUN is the gen_... function
1105 to make a move insn for that mode. DATA has all the other info. */
1108 move_by_pieces_1 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
1109 struct move_by_pieces
*data
)
1111 unsigned int size
= GET_MODE_SIZE (mode
);
1112 rtx to1
= NULL_RTX
, from1
;
1114 while (data
->len
>= size
)
1117 data
->offset
-= size
;
1121 if (data
->autinc_to
)
1122 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1125 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1128 if (data
->autinc_from
)
1129 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1132 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1134 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1135 emit_insn (gen_add2_insn (data
->to_addr
,
1136 GEN_INT (-(HOST_WIDE_INT
)size
)));
1137 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1138 emit_insn (gen_add2_insn (data
->from_addr
,
1139 GEN_INT (-(HOST_WIDE_INT
)size
)));
1142 emit_insn ((*genfun
) (to1
, from1
));
1145 #ifdef PUSH_ROUNDING
1146 emit_single_push_insn (mode
, from1
, NULL
);
1152 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1153 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1154 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1155 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1157 if (! data
->reverse
)
1158 data
->offset
+= size
;
1164 /* Emit code to move a block Y to a block X. This may be done with
1165 string-move instructions, with multiple scalar move instructions,
1166 or with a library call.
1168 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1169 SIZE is an rtx that says how long they are.
1170 ALIGN is the maximum alignment we can assume they have.
1171 METHOD describes what kind of copy this is, and what mechanisms may be used.
1173 Return the address of the new block, if memcpy is called and returns it,
1177 emit_block_move_hints (rtx x
, rtx y
, rtx size
, enum block_op_methods method
,
1178 unsigned int expected_align
, HOST_WIDE_INT expected_size
)
1186 case BLOCK_OP_NORMAL
:
1187 case BLOCK_OP_TAILCALL
:
1188 may_use_call
= true;
1191 case BLOCK_OP_CALL_PARM
:
1192 may_use_call
= block_move_libcall_safe_for_call_parm ();
1194 /* Make inhibit_defer_pop nonzero around the library call
1195 to force it to pop the arguments right away. */
1199 case BLOCK_OP_NO_LIBCALL
:
1200 may_use_call
= false;
1207 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1209 gcc_assert (MEM_P (x
));
1210 gcc_assert (MEM_P (y
));
1213 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1214 block copy is more efficient for other large modes, e.g. DCmode. */
1215 x
= adjust_address (x
, BLKmode
, 0);
1216 y
= adjust_address (y
, BLKmode
, 0);
1218 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1219 can be incorrect is coming from __builtin_memcpy. */
1220 if (GET_CODE (size
) == CONST_INT
)
1222 if (INTVAL (size
) == 0)
1225 x
= shallow_copy_rtx (x
);
1226 y
= shallow_copy_rtx (y
);
1227 set_mem_size (x
, size
);
1228 set_mem_size (y
, size
);
1231 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1232 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1233 else if (emit_block_move_via_movmem (x
, y
, size
, align
,
1234 expected_align
, expected_size
))
1236 else if (may_use_call
)
1237 retval
= emit_block_move_via_libcall (x
, y
, size
,
1238 method
== BLOCK_OP_TAILCALL
);
1240 emit_block_move_via_loop (x
, y
, size
, align
);
1242 if (method
== BLOCK_OP_CALL_PARM
)
1249 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1251 return emit_block_move_hints (x
, y
, size
, method
, 0, -1);
1254 /* A subroutine of emit_block_move. Returns true if calling the
1255 block move libcall will not clobber any parameters which may have
1256 already been placed on the stack. */
1259 block_move_libcall_safe_for_call_parm (void)
1261 #if defined (REG_PARM_STACK_SPACE)
1265 /* If arguments are pushed on the stack, then they're safe. */
1269 /* If registers go on the stack anyway, any argument is sure to clobber
1270 an outgoing argument. */
1271 #if defined (REG_PARM_STACK_SPACE)
1272 fn
= emit_block_move_libcall_fn (false);
1273 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn
? NULL_TREE
: TREE_TYPE (fn
)))
1274 && REG_PARM_STACK_SPACE (fn
) != 0)
1278 /* If any argument goes in memory, then it might clobber an outgoing
1281 CUMULATIVE_ARGS args_so_far
;
1284 fn
= emit_block_move_libcall_fn (false);
1285 INIT_CUMULATIVE_ARGS (args_so_far
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1287 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1288 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1290 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1291 rtx tmp
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
1292 if (!tmp
|| !REG_P (tmp
))
1294 if (targetm
.calls
.arg_partial_bytes (&args_so_far
, mode
, NULL
, 1))
1296 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, NULL_TREE
, 1);
1302 /* A subroutine of emit_block_move. Expand a movmem pattern;
1303 return true if successful. */
1306 emit_block_move_via_movmem (rtx x
, rtx y
, rtx size
, unsigned int align
,
1307 unsigned int expected_align
, HOST_WIDE_INT expected_size
)
1309 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1310 int save_volatile_ok
= volatile_ok
;
1311 enum machine_mode mode
;
1313 if (expected_align
< align
)
1314 expected_align
= align
;
1316 /* Since this is a move insn, we don't care about volatility. */
1319 /* Try the most limited insn first, because there's no point
1320 including more than one in the machine description unless
1321 the more limited one has some advantage. */
1323 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1324 mode
= GET_MODE_WIDER_MODE (mode
))
1326 enum insn_code code
= movmem_optab
[(int) mode
];
1327 insn_operand_predicate_fn pred
;
1329 if (code
!= CODE_FOR_nothing
1330 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1331 here because if SIZE is less than the mode mask, as it is
1332 returned by the macro, it will definitely be less than the
1333 actual mode mask. */
1334 && ((GET_CODE (size
) == CONST_INT
1335 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1336 <= (GET_MODE_MASK (mode
) >> 1)))
1337 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1338 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1339 || (*pred
) (x
, BLKmode
))
1340 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1341 || (*pred
) (y
, BLKmode
))
1342 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1343 || (*pred
) (opalign
, VOIDmode
)))
1346 rtx last
= get_last_insn ();
1349 op2
= convert_to_mode (mode
, size
, 1);
1350 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1351 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1352 op2
= copy_to_mode_reg (mode
, op2
);
1354 /* ??? When called via emit_block_move_for_call, it'd be
1355 nice if there were some way to inform the backend, so
1356 that it doesn't fail the expansion because it thinks
1357 emitting the libcall would be more efficient. */
1359 if (insn_data
[(int) code
].n_operands
== 4)
1360 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1362 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
,
1363 GEN_INT (expected_align
),
1364 GEN_INT (expected_size
));
1368 volatile_ok
= save_volatile_ok
;
1372 delete_insns_since (last
);
1376 volatile_ok
= save_volatile_ok
;
1380 /* A subroutine of emit_block_move. Expand a call to memcpy.
1381 Return the return value from memcpy, 0 otherwise. */
1384 emit_block_move_via_libcall (rtx dst
, rtx src
, rtx size
, bool tailcall
)
1386 rtx dst_addr
, src_addr
;
1387 tree call_expr
, fn
, src_tree
, dst_tree
, size_tree
;
1388 enum machine_mode size_mode
;
1391 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1392 pseudos. We can then place those new pseudos into a VAR_DECL and
1395 dst_addr
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
1396 src_addr
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
1398 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1399 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1401 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1402 src_tree
= make_tree (ptr_type_node
, src_addr
);
1404 size_mode
= TYPE_MODE (sizetype
);
1406 size
= convert_to_mode (size_mode
, size
, 1);
1407 size
= copy_to_mode_reg (size_mode
, size
);
1409 /* It is incorrect to use the libcall calling conventions to call
1410 memcpy in this context. This could be a user call to memcpy and
1411 the user may wish to examine the return value from memcpy. For
1412 targets where libcalls and normal calls have different conventions
1413 for returning pointers, we could end up generating incorrect code. */
1415 size_tree
= make_tree (sizetype
, size
);
1417 fn
= emit_block_move_libcall_fn (true);
1418 call_expr
= build_call_expr (fn
, 3, dst_tree
, src_tree
, size_tree
);
1419 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
1421 retval
= expand_normal (call_expr
);
1426 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1427 for the function we use for block copies. The first time FOR_CALL
1428 is true, we call assemble_external. */
1430 static GTY(()) tree block_move_fn
;
1433 init_block_move_fn (const char *asmspec
)
1439 fn
= get_identifier ("memcpy");
1440 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
1441 const_ptr_type_node
, sizetype
,
1444 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
1445 DECL_EXTERNAL (fn
) = 1;
1446 TREE_PUBLIC (fn
) = 1;
1447 DECL_ARTIFICIAL (fn
) = 1;
1448 TREE_NOTHROW (fn
) = 1;
1449 DECL_VISIBILITY (fn
) = VISIBILITY_DEFAULT
;
1450 DECL_VISIBILITY_SPECIFIED (fn
) = 1;
1456 set_user_assembler_name (block_move_fn
, asmspec
);
1460 emit_block_move_libcall_fn (int for_call
)
1462 static bool emitted_extern
;
1465 init_block_move_fn (NULL
);
1467 if (for_call
&& !emitted_extern
)
1469 emitted_extern
= true;
1470 make_decl_rtl (block_move_fn
);
1471 assemble_external (block_move_fn
);
1474 return block_move_fn
;
1477 /* A subroutine of emit_block_move. Copy the data via an explicit
1478 loop. This is used only when libcalls are forbidden. */
1479 /* ??? It'd be nice to copy in hunks larger than QImode. */
1482 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1483 unsigned int align ATTRIBUTE_UNUSED
)
1485 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
1486 enum machine_mode iter_mode
;
1488 iter_mode
= GET_MODE (size
);
1489 if (iter_mode
== VOIDmode
)
1490 iter_mode
= word_mode
;
1492 top_label
= gen_label_rtx ();
1493 cmp_label
= gen_label_rtx ();
1494 iter
= gen_reg_rtx (iter_mode
);
1496 emit_move_insn (iter
, const0_rtx
);
1498 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1499 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1500 do_pending_stack_adjust ();
1502 emit_jump (cmp_label
);
1503 emit_label (top_label
);
1505 tmp
= convert_modes (Pmode
, iter_mode
, iter
, true);
1506 x_addr
= gen_rtx_PLUS (Pmode
, x_addr
, tmp
);
1507 y_addr
= gen_rtx_PLUS (Pmode
, y_addr
, tmp
);
1508 x
= change_address (x
, QImode
, x_addr
);
1509 y
= change_address (y
, QImode
, y_addr
);
1511 emit_move_insn (x
, y
);
1513 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1514 true, OPTAB_LIB_WIDEN
);
1516 emit_move_insn (iter
, tmp
);
1518 emit_label (cmp_label
);
1520 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1524 /* Copy all or part of a value X into registers starting at REGNO.
1525 The number of registers to be filled is NREGS. */
1528 move_block_to_reg (int regno
, rtx x
, int nregs
, enum machine_mode mode
)
1531 #ifdef HAVE_load_multiple
1539 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1540 x
= validize_mem (force_const_mem (mode
, x
));
1542 /* See if the machine can do this with a load multiple insn. */
1543 #ifdef HAVE_load_multiple
1544 if (HAVE_load_multiple
)
1546 last
= get_last_insn ();
1547 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1555 delete_insns_since (last
);
1559 for (i
= 0; i
< nregs
; i
++)
1560 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1561 operand_subword_force (x
, i
, mode
));
1564 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1565 The number of registers to be filled is NREGS. */
1568 move_block_from_reg (int regno
, rtx x
, int nregs
)
1575 /* See if the machine can do this with a store multiple insn. */
1576 #ifdef HAVE_store_multiple
1577 if (HAVE_store_multiple
)
1579 rtx last
= get_last_insn ();
1580 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1588 delete_insns_since (last
);
1592 for (i
= 0; i
< nregs
; i
++)
1594 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1598 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1602 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1603 ORIG, where ORIG is a non-consecutive group of registers represented by
1604 a PARALLEL. The clone is identical to the original except in that the
1605 original set of registers is replaced by a new set of pseudo registers.
1606 The new set has the same modes as the original set. */
1609 gen_group_rtx (rtx orig
)
1614 gcc_assert (GET_CODE (orig
) == PARALLEL
);
1616 length
= XVECLEN (orig
, 0);
1617 tmps
= XALLOCAVEC (rtx
, length
);
1619 /* Skip a NULL entry in first slot. */
1620 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
1625 for (; i
< length
; i
++)
1627 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
1628 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
1630 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
1633 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
1636 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1637 except that values are placed in TMPS[i], and must later be moved
1638 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1641 emit_group_load_1 (rtx
*tmps
, rtx dst
, rtx orig_src
, tree type
, int ssize
)
1645 enum machine_mode m
= GET_MODE (orig_src
);
1647 gcc_assert (GET_CODE (dst
) == PARALLEL
);
1650 && !SCALAR_INT_MODE_P (m
)
1651 && !MEM_P (orig_src
)
1652 && GET_CODE (orig_src
) != CONCAT
)
1654 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_src
));
1655 if (imode
== BLKmode
)
1656 src
= assign_stack_temp (GET_MODE (orig_src
), ssize
, 0);
1658 src
= gen_reg_rtx (imode
);
1659 if (imode
!= BLKmode
)
1660 src
= gen_lowpart (GET_MODE (orig_src
), src
);
1661 emit_move_insn (src
, orig_src
);
1662 /* ...and back again. */
1663 if (imode
!= BLKmode
)
1664 src
= gen_lowpart (imode
, src
);
1665 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1669 /* Check for a NULL entry, used to indicate that the parameter goes
1670 both on the stack and in registers. */
1671 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1676 /* Process the pieces. */
1677 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1679 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1680 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1681 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1684 /* Handle trailing fragments that run over the size of the struct. */
1685 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1687 /* Arrange to shift the fragment to where it belongs.
1688 extract_bit_field loads to the lsb of the reg. */
1690 #ifdef BLOCK_REG_PADDING
1691 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
1692 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1697 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1698 bytelen
= ssize
- bytepos
;
1699 gcc_assert (bytelen
> 0);
1702 /* If we won't be loading directly from memory, protect the real source
1703 from strange tricks we might play; but make sure that the source can
1704 be loaded directly into the destination. */
1706 if (!MEM_P (orig_src
)
1707 && (!CONSTANT_P (orig_src
)
1708 || (GET_MODE (orig_src
) != mode
1709 && GET_MODE (orig_src
) != VOIDmode
)))
1711 if (GET_MODE (orig_src
) == VOIDmode
)
1712 src
= gen_reg_rtx (mode
);
1714 src
= gen_reg_rtx (GET_MODE (orig_src
));
1716 emit_move_insn (src
, orig_src
);
1719 /* Optimize the access just a bit. */
1721 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
1722 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
1723 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1724 && bytelen
== GET_MODE_SIZE (mode
))
1726 tmps
[i
] = gen_reg_rtx (mode
);
1727 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
1729 else if (COMPLEX_MODE_P (mode
)
1730 && GET_MODE (src
) == mode
1731 && bytelen
== GET_MODE_SIZE (mode
))
1732 /* Let emit_move_complex do the bulk of the work. */
1734 else if (GET_CODE (src
) == CONCAT
)
1736 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
1737 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
1739 if ((bytepos
== 0 && bytelen
== slen0
)
1740 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
1742 /* The following assumes that the concatenated objects all
1743 have the same size. In this case, a simple calculation
1744 can be used to determine the object and the bit field
1746 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
1747 if (! CONSTANT_P (tmps
[i
])
1748 && (!REG_P (tmps
[i
]) || GET_MODE (tmps
[i
]) != mode
))
1749 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
1750 (bytepos
% slen0
) * BITS_PER_UNIT
,
1751 1, NULL_RTX
, mode
, mode
);
1757 gcc_assert (!bytepos
);
1758 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1759 emit_move_insn (mem
, src
);
1760 tmps
[i
] = extract_bit_field (mem
, bytelen
* BITS_PER_UNIT
,
1761 0, 1, NULL_RTX
, mode
, mode
);
1764 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1765 SIMD register, which is currently broken. While we get GCC
1766 to emit proper RTL for these cases, let's dump to memory. */
1767 else if (VECTOR_MODE_P (GET_MODE (dst
))
1770 int slen
= GET_MODE_SIZE (GET_MODE (src
));
1773 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1774 emit_move_insn (mem
, src
);
1775 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
1777 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
1778 && XVECLEN (dst
, 0) > 1)
1779 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE(dst
), bytepos
);
1780 else if (CONSTANT_P (src
))
1782 HOST_WIDE_INT len
= (HOST_WIDE_INT
) bytelen
;
1790 gcc_assert (2 * len
== ssize
);
1791 split_double (src
, &first
, &second
);
1798 else if (REG_P (src
) && GET_MODE (src
) == mode
)
1801 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
1802 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
1806 tmps
[i
] = expand_shift (LSHIFT_EXPR
, mode
, tmps
[i
],
1807 build_int_cst (NULL_TREE
, shift
), tmps
[i
], 0);
1811 /* Emit code to move a block SRC of type TYPE to a block DST,
1812 where DST is non-consecutive registers represented by a PARALLEL.
1813 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1817 emit_group_load (rtx dst
, rtx src
, tree type
, int ssize
)
1822 tmps
= XALLOCAVEC (rtx
, XVECLEN (dst
, 0));
1823 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1825 /* Copy the extracted pieces into the proper (probable) hard regs. */
1826 for (i
= 0; i
< XVECLEN (dst
, 0); i
++)
1828 rtx d
= XEXP (XVECEXP (dst
, 0, i
), 0);
1831 emit_move_insn (d
, tmps
[i
]);
1835 /* Similar, but load SRC into new pseudos in a format that looks like
1836 PARALLEL. This can later be fed to emit_group_move to get things
1837 in the right place. */
1840 emit_group_load_into_temps (rtx parallel
, rtx src
, tree type
, int ssize
)
1845 vec
= rtvec_alloc (XVECLEN (parallel
, 0));
1846 emit_group_load_1 (&RTVEC_ELT (vec
, 0), parallel
, src
, type
, ssize
);
1848 /* Convert the vector to look just like the original PARALLEL, except
1849 with the computed values. */
1850 for (i
= 0; i
< XVECLEN (parallel
, 0); i
++)
1852 rtx e
= XVECEXP (parallel
, 0, i
);
1853 rtx d
= XEXP (e
, 0);
1857 d
= force_reg (GET_MODE (d
), RTVEC_ELT (vec
, i
));
1858 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), d
, XEXP (e
, 1));
1860 RTVEC_ELT (vec
, i
) = e
;
1863 return gen_rtx_PARALLEL (GET_MODE (parallel
), vec
);
1866 /* Emit code to move a block SRC to block DST, where SRC and DST are
1867 non-consecutive groups of registers, each represented by a PARALLEL. */
1870 emit_group_move (rtx dst
, rtx src
)
1874 gcc_assert (GET_CODE (src
) == PARALLEL
1875 && GET_CODE (dst
) == PARALLEL
1876 && XVECLEN (src
, 0) == XVECLEN (dst
, 0));
1878 /* Skip first entry if NULL. */
1879 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
1880 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
1881 XEXP (XVECEXP (src
, 0, i
), 0));
1884 /* Move a group of registers represented by a PARALLEL into pseudos. */
1887 emit_group_move_into_temps (rtx src
)
1889 rtvec vec
= rtvec_alloc (XVECLEN (src
, 0));
1892 for (i
= 0; i
< XVECLEN (src
, 0); i
++)
1894 rtx e
= XVECEXP (src
, 0, i
);
1895 rtx d
= XEXP (e
, 0);
1898 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), copy_to_reg (d
), XEXP (e
, 1));
1899 RTVEC_ELT (vec
, i
) = e
;
1902 return gen_rtx_PARALLEL (GET_MODE (src
), vec
);
1905 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1906 where SRC is non-consecutive registers represented by a PARALLEL.
1907 SSIZE represents the total size of block ORIG_DST, or -1 if not
1911 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1914 int start
, finish
, i
;
1915 enum machine_mode m
= GET_MODE (orig_dst
);
1917 gcc_assert (GET_CODE (src
) == PARALLEL
);
1919 if (!SCALAR_INT_MODE_P (m
)
1920 && !MEM_P (orig_dst
) && GET_CODE (orig_dst
) != CONCAT
)
1922 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_dst
));
1923 if (imode
== BLKmode
)
1924 dst
= assign_stack_temp (GET_MODE (orig_dst
), ssize
, 0);
1926 dst
= gen_reg_rtx (imode
);
1927 emit_group_store (dst
, src
, type
, ssize
);
1928 if (imode
!= BLKmode
)
1929 dst
= gen_lowpart (GET_MODE (orig_dst
), dst
);
1930 emit_move_insn (orig_dst
, dst
);
1934 /* Check for a NULL entry, used to indicate that the parameter goes
1935 both on the stack and in registers. */
1936 if (XEXP (XVECEXP (src
, 0, 0), 0))
1940 finish
= XVECLEN (src
, 0);
1942 tmps
= XALLOCAVEC (rtx
, finish
);
1944 /* Copy the (probable) hard regs into pseudos. */
1945 for (i
= start
; i
< finish
; i
++)
1947 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
1948 if (!REG_P (reg
) || REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
1950 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
1951 emit_move_insn (tmps
[i
], reg
);
1957 /* If we won't be storing directly into memory, protect the real destination
1958 from strange tricks we might play. */
1960 if (GET_CODE (dst
) == PARALLEL
)
1964 /* We can get a PARALLEL dst if there is a conditional expression in
1965 a return statement. In that case, the dst and src are the same,
1966 so no action is necessary. */
1967 if (rtx_equal_p (dst
, src
))
1970 /* It is unclear if we can ever reach here, but we may as well handle
1971 it. Allocate a temporary, and split this into a store/load to/from
1974 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
1975 emit_group_store (temp
, src
, type
, ssize
);
1976 emit_group_load (dst
, temp
, type
, ssize
);
1979 else if (!MEM_P (dst
) && GET_CODE (dst
) != CONCAT
)
1981 enum machine_mode outer
= GET_MODE (dst
);
1982 enum machine_mode inner
;
1983 HOST_WIDE_INT bytepos
;
1987 if (!REG_P (dst
) || REGNO (dst
) < FIRST_PSEUDO_REGISTER
)
1988 dst
= gen_reg_rtx (outer
);
1990 /* Make life a bit easier for combine. */
1991 /* If the first element of the vector is the low part
1992 of the destination mode, use a paradoxical subreg to
1993 initialize the destination. */
1996 inner
= GET_MODE (tmps
[start
]);
1997 bytepos
= subreg_lowpart_offset (inner
, outer
);
1998 if (INTVAL (XEXP (XVECEXP (src
, 0, start
), 1)) == bytepos
)
2000 temp
= simplify_gen_subreg (outer
, tmps
[start
],
2004 emit_move_insn (dst
, temp
);
2011 /* If the first element wasn't the low part, try the last. */
2013 && start
< finish
- 1)
2015 inner
= GET_MODE (tmps
[finish
- 1]);
2016 bytepos
= subreg_lowpart_offset (inner
, outer
);
2017 if (INTVAL (XEXP (XVECEXP (src
, 0, finish
- 1), 1)) == bytepos
)
2019 temp
= simplify_gen_subreg (outer
, tmps
[finish
- 1],
2023 emit_move_insn (dst
, temp
);
2030 /* Otherwise, simply initialize the result to zero. */
2032 emit_move_insn (dst
, CONST0_RTX (outer
));
2035 /* Process the pieces. */
2036 for (i
= start
; i
< finish
; i
++)
2038 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2039 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2040 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2043 /* Handle trailing fragments that run over the size of the struct. */
2044 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2046 /* store_bit_field always takes its value from the lsb.
2047 Move the fragment to the lsb if it's not already there. */
2049 #ifdef BLOCK_REG_PADDING
2050 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
2051 == (BYTES_BIG_ENDIAN
? upward
: downward
)
2057 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2058 tmps
[i
] = expand_shift (RSHIFT_EXPR
, mode
, tmps
[i
],
2059 build_int_cst (NULL_TREE
, shift
),
2062 bytelen
= ssize
- bytepos
;
2065 if (GET_CODE (dst
) == CONCAT
)
2067 if (bytepos
+ bytelen
<= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2068 dest
= XEXP (dst
, 0);
2069 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2071 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2072 dest
= XEXP (dst
, 1);
2076 gcc_assert (bytepos
== 0 && XVECLEN (src
, 0));
2077 dest
= assign_stack_temp (GET_MODE (dest
),
2078 GET_MODE_SIZE (GET_MODE (dest
)), 0);
2079 emit_move_insn (adjust_address (dest
, GET_MODE (tmps
[i
]), bytepos
),
2086 /* Optimize the access just a bit. */
2088 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
2089 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2090 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2091 && bytelen
== GET_MODE_SIZE (mode
))
2092 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2094 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2098 /* Copy from the pseudo into the (probable) hard reg. */
2099 if (orig_dst
!= dst
)
2100 emit_move_insn (orig_dst
, dst
);
2103 /* Generate code to copy a BLKmode object of TYPE out of a
2104 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2105 is null, a stack temporary is created. TGTBLK is returned.
2107 The purpose of this routine is to handle functions that return
2108 BLKmode structures in registers. Some machines (the PA for example)
2109 want to return all small structures in registers regardless of the
2110 structure's alignment. */
2113 copy_blkmode_from_reg (rtx tgtblk
, rtx srcreg
, tree type
)
2115 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2116 rtx src
= NULL
, dst
= NULL
;
2117 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2118 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2119 enum machine_mode copy_mode
;
2123 tgtblk
= assign_temp (build_qualified_type (type
,
2125 | TYPE_QUAL_CONST
)),
2127 preserve_temp_slots (tgtblk
);
2130 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2131 into a new pseudo which is a full word. */
2133 if (GET_MODE (srcreg
) != BLKmode
2134 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2135 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
2137 /* If the structure doesn't take up a whole number of words, see whether
2138 SRCREG is padded on the left or on the right. If it's on the left,
2139 set PADDING_CORRECTION to the number of bits to skip.
2141 In most ABIs, the structure will be returned at the least end of
2142 the register, which translates to right padding on little-endian
2143 targets and left padding on big-endian targets. The opposite
2144 holds if the structure is returned at the most significant
2145 end of the register. */
2146 if (bytes
% UNITS_PER_WORD
!= 0
2147 && (targetm
.calls
.return_in_msb (type
)
2149 : BYTES_BIG_ENDIAN
))
2151 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2153 /* Copy the structure BITSIZE bits at a time. If the target lives in
2154 memory, take care of not reading/writing past its end by selecting
2155 a copy mode suited to BITSIZE. This should always be possible given
2158 We could probably emit more efficient code for machines which do not use
2159 strict alignment, but it doesn't seem worth the effort at the current
2162 copy_mode
= word_mode
;
2165 enum machine_mode mem_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
2166 if (mem_mode
!= BLKmode
)
2167 copy_mode
= mem_mode
;
2170 for (bitpos
= 0, xbitpos
= padding_correction
;
2171 bitpos
< bytes
* BITS_PER_UNIT
;
2172 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2174 /* We need a new source operand each time xbitpos is on a
2175 word boundary and when xbitpos == padding_correction
2176 (the first time through). */
2177 if (xbitpos
% BITS_PER_WORD
== 0
2178 || xbitpos
== padding_correction
)
2179 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2182 /* We need a new destination operand each time bitpos is on
2184 if (bitpos
% BITS_PER_WORD
== 0)
2185 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2187 /* Use xbitpos for the source extraction (right justified) and
2188 bitpos for the destination store (left justified). */
2189 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, copy_mode
,
2190 extract_bit_field (src
, bitsize
,
2191 xbitpos
% BITS_PER_WORD
, 1,
2192 NULL_RTX
, copy_mode
, copy_mode
));
2198 /* Add a USE expression for REG to the (possibly empty) list pointed
2199 to by CALL_FUSAGE. REG must denote a hard register. */
2202 use_reg (rtx
*call_fusage
, rtx reg
)
2204 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2207 = gen_rtx_EXPR_LIST (VOIDmode
,
2208 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2211 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2212 starting at REGNO. All of these registers must be hard registers. */
2215 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2219 gcc_assert (regno
+ nregs
<= FIRST_PSEUDO_REGISTER
);
2221 for (i
= 0; i
< nregs
; i
++)
2222 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2225 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2226 PARALLEL REGS. This is for calls that pass values in multiple
2227 non-contiguous locations. The Irix 6 ABI has examples of this. */
2230 use_group_regs (rtx
*call_fusage
, rtx regs
)
2234 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2236 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2238 /* A NULL entry means the parameter goes both on the stack and in
2239 registers. This can also be a MEM for targets that pass values
2240 partially on the stack and partially in registers. */
2241 if (reg
!= 0 && REG_P (reg
))
2242 use_reg (call_fusage
, reg
);
2247 /* Determine whether the LEN bytes generated by CONSTFUN can be
2248 stored to memory using several move instructions. CONSTFUNDATA is
2249 a pointer which will be passed as argument in every CONSTFUN call.
2250 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2251 a memset operation and false if it's a copy of a constant string.
2252 Return nonzero if a call to store_by_pieces should succeed. */
2255 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
2256 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2257 void *constfundata
, unsigned int align
, bool memsetp
)
2259 unsigned HOST_WIDE_INT l
;
2260 unsigned int max_size
;
2261 HOST_WIDE_INT offset
= 0;
2262 enum machine_mode mode
, tmode
;
2263 enum insn_code icode
;
2271 ? SET_BY_PIECES_P (len
, align
)
2272 : STORE_BY_PIECES_P (len
, align
)))
2275 tmode
= mode_for_size (STORE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
2276 if (align
>= GET_MODE_ALIGNMENT (tmode
))
2277 align
= GET_MODE_ALIGNMENT (tmode
);
2280 enum machine_mode xmode
;
2282 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
2284 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
2285 if (GET_MODE_SIZE (tmode
) > STORE_MAX_PIECES
2286 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
2289 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
2292 /* We would first store what we can in the largest integer mode, then go to
2293 successively smaller modes. */
2296 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2301 max_size
= STORE_MAX_PIECES
+ 1;
2302 while (max_size
> 1)
2304 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2305 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2306 if (GET_MODE_SIZE (tmode
) < max_size
)
2309 if (mode
== VOIDmode
)
2312 icode
= optab_handler (mov_optab
, mode
)->insn_code
;
2313 if (icode
!= CODE_FOR_nothing
2314 && align
>= GET_MODE_ALIGNMENT (mode
))
2316 unsigned int size
= GET_MODE_SIZE (mode
);
2323 cst
= (*constfun
) (constfundata
, offset
, mode
);
2324 if (!LEGITIMATE_CONSTANT_P (cst
))
2334 max_size
= GET_MODE_SIZE (mode
);
2337 /* The code above should have handled everything. */
2344 /* Generate several move instructions to store LEN bytes generated by
2345 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2346 pointer which will be passed as argument in every CONSTFUN call.
2347 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2348 a memset operation and false if it's a copy of a constant string.
2349 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2350 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2354 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
2355 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2356 void *constfundata
, unsigned int align
, bool memsetp
, int endp
)
2358 struct store_by_pieces data
;
2362 gcc_assert (endp
!= 2);
2367 ? SET_BY_PIECES_P (len
, align
)
2368 : STORE_BY_PIECES_P (len
, align
));
2369 data
.constfun
= constfun
;
2370 data
.constfundata
= constfundata
;
2373 store_by_pieces_1 (&data
, align
);
2378 gcc_assert (!data
.reverse
);
2383 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2384 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2386 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
2389 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2396 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2404 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2405 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2408 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
2410 struct store_by_pieces data
;
2415 data
.constfun
= clear_by_pieces_1
;
2416 data
.constfundata
= NULL
;
2419 store_by_pieces_1 (&data
, align
);
2422 /* Callback routine for clear_by_pieces.
2423 Return const0_rtx unconditionally. */
2426 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED
,
2427 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2428 enum machine_mode mode ATTRIBUTE_UNUSED
)
2433 /* Subroutine of clear_by_pieces and store_by_pieces.
2434 Generate several move instructions to store LEN bytes of block TO. (A MEM
2435 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2438 store_by_pieces_1 (struct store_by_pieces
*data ATTRIBUTE_UNUSED
,
2439 unsigned int align ATTRIBUTE_UNUSED
)
2441 rtx to_addr
= XEXP (data
->to
, 0);
2442 unsigned int max_size
= STORE_MAX_PIECES
+ 1;
2443 enum machine_mode mode
= VOIDmode
, tmode
;
2444 enum insn_code icode
;
2447 data
->to_addr
= to_addr
;
2449 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2450 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2452 data
->explicit_inc_to
= 0;
2454 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2456 data
->offset
= data
->len
;
2458 /* If storing requires more than two move insns,
2459 copy addresses to registers (to make displacements shorter)
2460 and use post-increment if available. */
2461 if (!data
->autinc_to
2462 && move_by_pieces_ninsns (data
->len
, align
, max_size
) > 2)
2464 /* Determine the main mode we'll be using. */
2465 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2466 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2467 if (GET_MODE_SIZE (tmode
) < max_size
)
2470 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2472 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2473 data
->autinc_to
= 1;
2474 data
->explicit_inc_to
= -1;
2477 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2478 && ! data
->autinc_to
)
2480 data
->to_addr
= copy_addr_to_reg (to_addr
);
2481 data
->autinc_to
= 1;
2482 data
->explicit_inc_to
= 1;
2485 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2486 data
->to_addr
= copy_addr_to_reg (to_addr
);
2489 tmode
= mode_for_size (STORE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
2490 if (align
>= GET_MODE_ALIGNMENT (tmode
))
2491 align
= GET_MODE_ALIGNMENT (tmode
);
2494 enum machine_mode xmode
;
2496 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
2498 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
2499 if (GET_MODE_SIZE (tmode
) > STORE_MAX_PIECES
2500 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
2503 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
2506 /* First store what we can in the largest integer mode, then go to
2507 successively smaller modes. */
2509 while (max_size
> 1)
2511 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2512 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2513 if (GET_MODE_SIZE (tmode
) < max_size
)
2516 if (mode
== VOIDmode
)
2519 icode
= optab_handler (mov_optab
, mode
)->insn_code
;
2520 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2521 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2523 max_size
= GET_MODE_SIZE (mode
);
2526 /* The code above should have handled everything. */
2527 gcc_assert (!data
->len
);
2530 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2531 with move instructions for mode MODE. GENFUN is the gen_... function
2532 to make a move insn for that mode. DATA has all the other info. */
2535 store_by_pieces_2 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
2536 struct store_by_pieces
*data
)
2538 unsigned int size
= GET_MODE_SIZE (mode
);
2541 while (data
->len
>= size
)
2544 data
->offset
-= size
;
2546 if (data
->autinc_to
)
2547 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2550 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2552 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2553 emit_insn (gen_add2_insn (data
->to_addr
,
2554 GEN_INT (-(HOST_WIDE_INT
) size
)));
2556 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2557 emit_insn ((*genfun
) (to1
, cst
));
2559 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2560 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2562 if (! data
->reverse
)
2563 data
->offset
+= size
;
2569 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2570 its length in bytes. */
2573 clear_storage_hints (rtx object
, rtx size
, enum block_op_methods method
,
2574 unsigned int expected_align
, HOST_WIDE_INT expected_size
)
2576 enum machine_mode mode
= GET_MODE (object
);
2579 gcc_assert (method
== BLOCK_OP_NORMAL
|| method
== BLOCK_OP_TAILCALL
);
2581 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2582 just move a zero. Otherwise, do this a piece at a time. */
2584 && GET_CODE (size
) == CONST_INT
2585 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (mode
))
2587 rtx zero
= CONST0_RTX (mode
);
2590 emit_move_insn (object
, zero
);
2594 if (COMPLEX_MODE_P (mode
))
2596 zero
= CONST0_RTX (GET_MODE_INNER (mode
));
2599 write_complex_part (object
, zero
, 0);
2600 write_complex_part (object
, zero
, 1);
2606 if (size
== const0_rtx
)
2609 align
= MEM_ALIGN (object
);
2611 if (GET_CODE (size
) == CONST_INT
2612 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2613 clear_by_pieces (object
, INTVAL (size
), align
);
2614 else if (set_storage_via_setmem (object
, size
, const0_rtx
, align
,
2615 expected_align
, expected_size
))
2618 return set_storage_via_libcall (object
, size
, const0_rtx
,
2619 method
== BLOCK_OP_TAILCALL
);
2625 clear_storage (rtx object
, rtx size
, enum block_op_methods method
)
2627 return clear_storage_hints (object
, size
, method
, 0, -1);
2631 /* A subroutine of clear_storage. Expand a call to memset.
2632 Return the return value of memset, 0 otherwise. */
2635 set_storage_via_libcall (rtx object
, rtx size
, rtx val
, bool tailcall
)
2637 tree call_expr
, fn
, object_tree
, size_tree
, val_tree
;
2638 enum machine_mode size_mode
;
2641 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2642 place those into new pseudos into a VAR_DECL and use them later. */
2644 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2646 size_mode
= TYPE_MODE (sizetype
);
2647 size
= convert_to_mode (size_mode
, size
, 1);
2648 size
= copy_to_mode_reg (size_mode
, size
);
2650 /* It is incorrect to use the libcall calling conventions to call
2651 memset in this context. This could be a user call to memset and
2652 the user may wish to examine the return value from memset. For
2653 targets where libcalls and normal calls have different conventions
2654 for returning pointers, we could end up generating incorrect code. */
2656 object_tree
= make_tree (ptr_type_node
, object
);
2657 if (GET_CODE (val
) != CONST_INT
)
2658 val
= convert_to_mode (TYPE_MODE (integer_type_node
), val
, 1);
2659 size_tree
= make_tree (sizetype
, size
);
2660 val_tree
= make_tree (integer_type_node
, val
);
2662 fn
= clear_storage_libcall_fn (true);
2663 call_expr
= build_call_expr (fn
, 3,
2664 object_tree
, integer_zero_node
, size_tree
);
2665 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
2667 retval
= expand_normal (call_expr
);
2672 /* A subroutine of set_storage_via_libcall. Create the tree node
2673 for the function we use for block clears. The first time FOR_CALL
2674 is true, we call assemble_external. */
2676 static GTY(()) tree block_clear_fn
;
2679 init_block_clear_fn (const char *asmspec
)
2681 if (!block_clear_fn
)
2685 fn
= get_identifier ("memset");
2686 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2687 integer_type_node
, sizetype
,
2690 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
2691 DECL_EXTERNAL (fn
) = 1;
2692 TREE_PUBLIC (fn
) = 1;
2693 DECL_ARTIFICIAL (fn
) = 1;
2694 TREE_NOTHROW (fn
) = 1;
2695 DECL_VISIBILITY (fn
) = VISIBILITY_DEFAULT
;
2696 DECL_VISIBILITY_SPECIFIED (fn
) = 1;
2698 block_clear_fn
= fn
;
2702 set_user_assembler_name (block_clear_fn
, asmspec
);
2706 clear_storage_libcall_fn (int for_call
)
2708 static bool emitted_extern
;
2710 if (!block_clear_fn
)
2711 init_block_clear_fn (NULL
);
2713 if (for_call
&& !emitted_extern
)
2715 emitted_extern
= true;
2716 make_decl_rtl (block_clear_fn
);
2717 assemble_external (block_clear_fn
);
2720 return block_clear_fn
;
2723 /* Expand a setmem pattern; return true if successful. */
2726 set_storage_via_setmem (rtx object
, rtx size
, rtx val
, unsigned int align
,
2727 unsigned int expected_align
, HOST_WIDE_INT expected_size
)
2729 /* Try the most limited insn first, because there's no point
2730 including more than one in the machine description unless
2731 the more limited one has some advantage. */
2733 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2734 enum machine_mode mode
;
2736 if (expected_align
< align
)
2737 expected_align
= align
;
2739 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2740 mode
= GET_MODE_WIDER_MODE (mode
))
2742 enum insn_code code
= setmem_optab
[(int) mode
];
2743 insn_operand_predicate_fn pred
;
2745 if (code
!= CODE_FOR_nothing
2746 /* We don't need MODE to be narrower than
2747 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2748 the mode mask, as it is returned by the macro, it will
2749 definitely be less than the actual mode mask. */
2750 && ((GET_CODE (size
) == CONST_INT
2751 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2752 <= (GET_MODE_MASK (mode
) >> 1)))
2753 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2754 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2755 || (*pred
) (object
, BLKmode
))
2756 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
2757 || (*pred
) (opalign
, VOIDmode
)))
2760 enum machine_mode char_mode
;
2761 rtx last
= get_last_insn ();
2764 opsize
= convert_to_mode (mode
, size
, 1);
2765 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2766 if (pred
!= 0 && ! (*pred
) (opsize
, mode
))
2767 opsize
= copy_to_mode_reg (mode
, opsize
);
2770 char_mode
= insn_data
[(int) code
].operand
[2].mode
;
2771 if (char_mode
!= VOIDmode
)
2773 opchar
= convert_to_mode (char_mode
, opchar
, 1);
2774 pred
= insn_data
[(int) code
].operand
[2].predicate
;
2775 if (pred
!= 0 && ! (*pred
) (opchar
, char_mode
))
2776 opchar
= copy_to_mode_reg (char_mode
, opchar
);
2779 if (insn_data
[(int) code
].n_operands
== 4)
2780 pat
= GEN_FCN ((int) code
) (object
, opsize
, opchar
, opalign
);
2782 pat
= GEN_FCN ((int) code
) (object
, opsize
, opchar
, opalign
,
2783 GEN_INT (expected_align
),
2784 GEN_INT (expected_size
));
2791 delete_insns_since (last
);
2799 /* Write to one of the components of the complex value CPLX. Write VAL to
2800 the real part if IMAG_P is false, and the imaginary part if its true. */
2803 write_complex_part (rtx cplx
, rtx val
, bool imag_p
)
2805 enum machine_mode cmode
;
2806 enum machine_mode imode
;
2809 if (GET_CODE (cplx
) == CONCAT
)
2811 emit_move_insn (XEXP (cplx
, imag_p
), val
);
2815 cmode
= GET_MODE (cplx
);
2816 imode
= GET_MODE_INNER (cmode
);
2817 ibitsize
= GET_MODE_BITSIZE (imode
);
2819 /* For MEMs simplify_gen_subreg may generate an invalid new address
2820 because, e.g., the original address is considered mode-dependent
2821 by the target, which restricts simplify_subreg from invoking
2822 adjust_address_nv. Instead of preparing fallback support for an
2823 invalid address, we call adjust_address_nv directly. */
2826 emit_move_insn (adjust_address_nv (cplx
, imode
,
2827 imag_p
? GET_MODE_SIZE (imode
) : 0),
2832 /* If the sub-object is at least word sized, then we know that subregging
2833 will work. This special case is important, since store_bit_field
2834 wants to operate on integer modes, and there's rarely an OImode to
2835 correspond to TCmode. */
2836 if (ibitsize
>= BITS_PER_WORD
2837 /* For hard regs we have exact predicates. Assume we can split
2838 the original object if it spans an even number of hard regs.
2839 This special case is important for SCmode on 64-bit platforms
2840 where the natural size of floating-point regs is 32-bit. */
2842 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
2843 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0))
2845 rtx part
= simplify_gen_subreg (imode
, cplx
, cmode
,
2846 imag_p
? GET_MODE_SIZE (imode
) : 0);
2849 emit_move_insn (part
, val
);
2853 /* simplify_gen_subreg may fail for sub-word MEMs. */
2854 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
2857 store_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0, imode
, val
);
2860 /* Extract one of the components of the complex value CPLX. Extract the
2861 real part if IMAG_P is false, and the imaginary part if it's true. */
2864 read_complex_part (rtx cplx
, bool imag_p
)
2866 enum machine_mode cmode
, imode
;
2869 if (GET_CODE (cplx
) == CONCAT
)
2870 return XEXP (cplx
, imag_p
);
2872 cmode
= GET_MODE (cplx
);
2873 imode
= GET_MODE_INNER (cmode
);
2874 ibitsize
= GET_MODE_BITSIZE (imode
);
2876 /* Special case reads from complex constants that got spilled to memory. */
2877 if (MEM_P (cplx
) && GET_CODE (XEXP (cplx
, 0)) == SYMBOL_REF
)
2879 tree decl
= SYMBOL_REF_DECL (XEXP (cplx
, 0));
2880 if (decl
&& TREE_CODE (decl
) == COMPLEX_CST
)
2882 tree part
= imag_p
? TREE_IMAGPART (decl
) : TREE_REALPART (decl
);
2883 if (CONSTANT_CLASS_P (part
))
2884 return expand_expr (part
, NULL_RTX
, imode
, EXPAND_NORMAL
);
2888 /* For MEMs simplify_gen_subreg may generate an invalid new address
2889 because, e.g., the original address is considered mode-dependent
2890 by the target, which restricts simplify_subreg from invoking
2891 adjust_address_nv. Instead of preparing fallback support for an
2892 invalid address, we call adjust_address_nv directly. */
2894 return adjust_address_nv (cplx
, imode
,
2895 imag_p
? GET_MODE_SIZE (imode
) : 0);
2897 /* If the sub-object is at least word sized, then we know that subregging
2898 will work. This special case is important, since extract_bit_field
2899 wants to operate on integer modes, and there's rarely an OImode to
2900 correspond to TCmode. */
2901 if (ibitsize
>= BITS_PER_WORD
2902 /* For hard regs we have exact predicates. Assume we can split
2903 the original object if it spans an even number of hard regs.
2904 This special case is important for SCmode on 64-bit platforms
2905 where the natural size of floating-point regs is 32-bit. */
2907 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
2908 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0))
2910 rtx ret
= simplify_gen_subreg (imode
, cplx
, cmode
,
2911 imag_p
? GET_MODE_SIZE (imode
) : 0);
2915 /* simplify_gen_subreg may fail for sub-word MEMs. */
2916 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
2919 return extract_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0,
2920 true, NULL_RTX
, imode
, imode
);
2923 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2924 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2925 represented in NEW_MODE. If FORCE is true, this will never happen, as
2926 we'll force-create a SUBREG if needed. */
2929 emit_move_change_mode (enum machine_mode new_mode
,
2930 enum machine_mode old_mode
, rtx x
, bool force
)
2934 if (push_operand (x
, GET_MODE (x
)))
2936 ret
= gen_rtx_MEM (new_mode
, XEXP (x
, 0));
2937 MEM_COPY_ATTRIBUTES (ret
, x
);
2941 /* We don't have to worry about changing the address since the
2942 size in bytes is supposed to be the same. */
2943 if (reload_in_progress
)
2945 /* Copy the MEM to change the mode and move any
2946 substitutions from the old MEM to the new one. */
2947 ret
= adjust_address_nv (x
, new_mode
, 0);
2948 copy_replacements (x
, ret
);
2951 ret
= adjust_address (x
, new_mode
, 0);
2955 /* Note that we do want simplify_subreg's behavior of validating
2956 that the new mode is ok for a hard register. If we were to use
2957 simplify_gen_subreg, we would create the subreg, but would
2958 probably run into the target not being able to implement it. */
2959 /* Except, of course, when FORCE is true, when this is exactly what
2960 we want. Which is needed for CCmodes on some targets. */
2962 ret
= simplify_gen_subreg (new_mode
, x
, old_mode
, 0);
2964 ret
= simplify_subreg (new_mode
, x
, old_mode
, 0);
2970 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2971 an integer mode of the same size as MODE. Returns the instruction
2972 emitted, or NULL if such a move could not be generated. */
2975 emit_move_via_integer (enum machine_mode mode
, rtx x
, rtx y
, bool force
)
2977 enum machine_mode imode
;
2978 enum insn_code code
;
2980 /* There must exist a mode of the exact size we require. */
2981 imode
= int_mode_for_mode (mode
);
2982 if (imode
== BLKmode
)
2985 /* The target must support moves in this mode. */
2986 code
= optab_handler (mov_optab
, imode
)->insn_code
;
2987 if (code
== CODE_FOR_nothing
)
2990 x
= emit_move_change_mode (imode
, mode
, x
, force
);
2993 y
= emit_move_change_mode (imode
, mode
, y
, force
);
2996 return emit_insn (GEN_FCN (code
) (x
, y
));
2999 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3000 Return an equivalent MEM that does not use an auto-increment. */
3003 emit_move_resolve_push (enum machine_mode mode
, rtx x
)
3005 enum rtx_code code
= GET_CODE (XEXP (x
, 0));
3006 HOST_WIDE_INT adjust
;
3009 adjust
= GET_MODE_SIZE (mode
);
3010 #ifdef PUSH_ROUNDING
3011 adjust
= PUSH_ROUNDING (adjust
);
3013 if (code
== PRE_DEC
|| code
== POST_DEC
)
3015 else if (code
== PRE_MODIFY
|| code
== POST_MODIFY
)
3017 rtx expr
= XEXP (XEXP (x
, 0), 1);
3020 gcc_assert (GET_CODE (expr
) == PLUS
|| GET_CODE (expr
) == MINUS
);
3021 gcc_assert (GET_CODE (XEXP (expr
, 1)) == CONST_INT
);
3022 val
= INTVAL (XEXP (expr
, 1));
3023 if (GET_CODE (expr
) == MINUS
)
3025 gcc_assert (adjust
== val
|| adjust
== -val
);
3029 /* Do not use anti_adjust_stack, since we don't want to update
3030 stack_pointer_delta. */
3031 temp
= expand_simple_binop (Pmode
, PLUS
, stack_pointer_rtx
,
3032 GEN_INT (adjust
), stack_pointer_rtx
,
3033 0, OPTAB_LIB_WIDEN
);
3034 if (temp
!= stack_pointer_rtx
)
3035 emit_move_insn (stack_pointer_rtx
, temp
);
3042 temp
= stack_pointer_rtx
;
3047 temp
= plus_constant (stack_pointer_rtx
, -adjust
);
3053 return replace_equiv_address (x
, temp
);
3056 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3057 X is known to satisfy push_operand, and MODE is known to be complex.
3058 Returns the last instruction emitted. */
3061 emit_move_complex_push (enum machine_mode mode
, rtx x
, rtx y
)
3063 enum machine_mode submode
= GET_MODE_INNER (mode
);
3066 #ifdef PUSH_ROUNDING
3067 unsigned int submodesize
= GET_MODE_SIZE (submode
);
3069 /* In case we output to the stack, but the size is smaller than the
3070 machine can push exactly, we need to use move instructions. */
3071 if (PUSH_ROUNDING (submodesize
) != submodesize
)
3073 x
= emit_move_resolve_push (mode
, x
);
3074 return emit_move_insn (x
, y
);
3078 /* Note that the real part always precedes the imag part in memory
3079 regardless of machine's endianness. */
3080 switch (GET_CODE (XEXP (x
, 0)))
3094 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3095 read_complex_part (y
, imag_first
));
3096 return emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3097 read_complex_part (y
, !imag_first
));
3100 /* A subroutine of emit_move_complex. Perform the move from Y to X
3101 via two moves of the parts. Returns the last instruction emitted. */
3104 emit_move_complex_parts (rtx x
, rtx y
)
3106 /* Show the output dies here. This is necessary for SUBREGs
3107 of pseudos since we cannot track their lifetimes correctly;
3108 hard regs shouldn't appear here except as return values. */
3109 if (!reload_completed
&& !reload_in_progress
3110 && REG_P (x
) && !reg_overlap_mentioned_p (x
, y
))
3113 write_complex_part (x
, read_complex_part (y
, false), false);
3114 write_complex_part (x
, read_complex_part (y
, true), true);
3116 return get_last_insn ();
3119 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3120 MODE is known to be complex. Returns the last instruction emitted. */
3123 emit_move_complex (enum machine_mode mode
, rtx x
, rtx y
)
3127 /* Need to take special care for pushes, to maintain proper ordering
3128 of the data, and possibly extra padding. */
3129 if (push_operand (x
, mode
))
3130 return emit_move_complex_push (mode
, x
, y
);
3132 /* See if we can coerce the target into moving both values at once. */
3134 /* Move floating point as parts. */
3135 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
3136 && optab_handler (mov_optab
, GET_MODE_INNER (mode
))->insn_code
!= CODE_FOR_nothing
)
3138 /* Not possible if the values are inherently not adjacent. */
3139 else if (GET_CODE (x
) == CONCAT
|| GET_CODE (y
) == CONCAT
)
3141 /* Is possible if both are registers (or subregs of registers). */
3142 else if (register_operand (x
, mode
) && register_operand (y
, mode
))
3144 /* If one of the operands is a memory, and alignment constraints
3145 are friendly enough, we may be able to do combined memory operations.
3146 We do not attempt this if Y is a constant because that combination is
3147 usually better with the by-parts thing below. */
3148 else if ((MEM_P (x
) ? !CONSTANT_P (y
) : MEM_P (y
))
3149 && (!STRICT_ALIGNMENT
3150 || get_mode_alignment (mode
) == BIGGEST_ALIGNMENT
))
3159 /* For memory to memory moves, optimal behavior can be had with the
3160 existing block move logic. */
3161 if (MEM_P (x
) && MEM_P (y
))
3163 emit_block_move (x
, y
, GEN_INT (GET_MODE_SIZE (mode
)),
3164 BLOCK_OP_NO_LIBCALL
);
3165 return get_last_insn ();
3168 ret
= emit_move_via_integer (mode
, x
, y
, true);
3173 return emit_move_complex_parts (x
, y
);
3176 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3177 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3180 emit_move_ccmode (enum machine_mode mode
, rtx x
, rtx y
)
3184 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3187 enum insn_code code
= optab_handler (mov_optab
, CCmode
)->insn_code
;
3188 if (code
!= CODE_FOR_nothing
)
3190 x
= emit_move_change_mode (CCmode
, mode
, x
, true);
3191 y
= emit_move_change_mode (CCmode
, mode
, y
, true);
3192 return emit_insn (GEN_FCN (code
) (x
, y
));
3196 /* Otherwise, find the MODE_INT mode of the same width. */
3197 ret
= emit_move_via_integer (mode
, x
, y
, false);
3198 gcc_assert (ret
!= NULL
);
3202 /* Return true if word I of OP lies entirely in the
3203 undefined bits of a paradoxical subreg. */
3206 undefined_operand_subword_p (const_rtx op
, int i
)
3208 enum machine_mode innermode
, innermostmode
;
3210 if (GET_CODE (op
) != SUBREG
)
3212 innermode
= GET_MODE (op
);
3213 innermostmode
= GET_MODE (SUBREG_REG (op
));
3214 offset
= i
* UNITS_PER_WORD
+ SUBREG_BYTE (op
);
3215 /* The SUBREG_BYTE represents offset, as if the value were stored in
3216 memory, except for a paradoxical subreg where we define
3217 SUBREG_BYTE to be 0; undo this exception as in
3219 if (SUBREG_BYTE (op
) == 0
3220 && GET_MODE_SIZE (innermostmode
) < GET_MODE_SIZE (innermode
))
3222 int difference
= (GET_MODE_SIZE (innermostmode
) - GET_MODE_SIZE (innermode
));
3223 if (WORDS_BIG_ENDIAN
)
3224 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
3225 if (BYTES_BIG_ENDIAN
)
3226 offset
+= difference
% UNITS_PER_WORD
;
3228 if (offset
>= GET_MODE_SIZE (innermostmode
)
3229 || offset
<= -GET_MODE_SIZE (word_mode
))
3234 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3235 MODE is any multi-word or full-word mode that lacks a move_insn
3236 pattern. Note that you will get better code if you define such
3237 patterns, even if they must turn into multiple assembler instructions. */
3240 emit_move_multi_word (enum machine_mode mode
, rtx x
, rtx y
)
3247 gcc_assert (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
);
3249 /* If X is a push on the stack, do the push now and replace
3250 X with a reference to the stack pointer. */
3251 if (push_operand (x
, mode
))
3252 x
= emit_move_resolve_push (mode
, x
);
3254 /* If we are in reload, see if either operand is a MEM whose address
3255 is scheduled for replacement. */
3256 if (reload_in_progress
&& MEM_P (x
)
3257 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3258 x
= replace_equiv_address_nv (x
, inner
);
3259 if (reload_in_progress
&& MEM_P (y
)
3260 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3261 y
= replace_equiv_address_nv (y
, inner
);
3265 need_clobber
= false;
3267 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3270 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3273 /* Do not generate code for a move if it would come entirely
3274 from the undefined bits of a paradoxical subreg. */
3275 if (undefined_operand_subword_p (y
, i
))
3278 ypart
= operand_subword (y
, i
, 1, mode
);
3280 /* If we can't get a part of Y, put Y into memory if it is a
3281 constant. Otherwise, force it into a register. Then we must
3282 be able to get a part of Y. */
3283 if (ypart
== 0 && CONSTANT_P (y
))
3285 y
= use_anchored_address (force_const_mem (mode
, y
));
3286 ypart
= operand_subword (y
, i
, 1, mode
);
3288 else if (ypart
== 0)
3289 ypart
= operand_subword_force (y
, i
, mode
);
3291 gcc_assert (xpart
&& ypart
);
3293 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3295 last_insn
= emit_move_insn (xpart
, ypart
);
3301 /* Show the output dies here. This is necessary for SUBREGs
3302 of pseudos since we cannot track their lifetimes correctly;
3303 hard regs shouldn't appear here except as return values.
3304 We never want to emit such a clobber after reload. */
3306 && ! (reload_in_progress
|| reload_completed
)
3307 && need_clobber
!= 0)
3315 /* Low level part of emit_move_insn.
3316 Called just like emit_move_insn, but assumes X and Y
3317 are basically valid. */
3320 emit_move_insn_1 (rtx x
, rtx y
)
3322 enum machine_mode mode
= GET_MODE (x
);
3323 enum insn_code code
;
3325 gcc_assert ((unsigned int) mode
< (unsigned int) MAX_MACHINE_MODE
);
3327 code
= optab_handler (mov_optab
, mode
)->insn_code
;
3328 if (code
!= CODE_FOR_nothing
)
3329 return emit_insn (GEN_FCN (code
) (x
, y
));
3331 /* Expand complex moves by moving real part and imag part. */
3332 if (COMPLEX_MODE_P (mode
))
3333 return emit_move_complex (mode
, x
, y
);
3335 if (GET_MODE_CLASS (mode
) == MODE_DECIMAL_FLOAT
3336 || ALL_FIXED_POINT_MODE_P (mode
))
3338 rtx result
= emit_move_via_integer (mode
, x
, y
, true);
3340 /* If we can't find an integer mode, use multi words. */
3344 return emit_move_multi_word (mode
, x
, y
);
3347 if (GET_MODE_CLASS (mode
) == MODE_CC
)
3348 return emit_move_ccmode (mode
, x
, y
);
3350 /* Try using a move pattern for the corresponding integer mode. This is
3351 only safe when simplify_subreg can convert MODE constants into integer
3352 constants. At present, it can only do this reliably if the value
3353 fits within a HOST_WIDE_INT. */
3354 if (!CONSTANT_P (y
) || GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
3356 rtx ret
= emit_move_via_integer (mode
, x
, y
, false);
3361 return emit_move_multi_word (mode
, x
, y
);
3364 /* Generate code to copy Y into X.
3365 Both Y and X must have the same mode, except that
3366 Y can be a constant with VOIDmode.
3367 This mode cannot be BLKmode; use emit_block_move for that.
3369 Return the last instruction emitted. */
3372 emit_move_insn (rtx x
, rtx y
)
3374 enum machine_mode mode
= GET_MODE (x
);
3375 rtx y_cst
= NULL_RTX
;
3378 gcc_assert (mode
!= BLKmode
3379 && (GET_MODE (y
) == mode
|| GET_MODE (y
) == VOIDmode
));
3384 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3385 && (last_insn
= compress_float_constant (x
, y
)))
3390 if (!LEGITIMATE_CONSTANT_P (y
))
3392 y
= force_const_mem (mode
, y
);
3394 /* If the target's cannot_force_const_mem prevented the spill,
3395 assume that the target's move expanders will also take care
3396 of the non-legitimate constant. */
3400 y
= use_anchored_address (y
);
3404 /* If X or Y are memory references, verify that their addresses are valid
3407 && (! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
3408 && ! push_operand (x
, GET_MODE (x
))))
3409 x
= validize_mem (x
);
3412 && ! memory_address_p (GET_MODE (y
), XEXP (y
, 0)))
3413 y
= validize_mem (y
);
3415 gcc_assert (mode
!= BLKmode
);
3417 last_insn
= emit_move_insn_1 (x
, y
);
3419 if (y_cst
&& REG_P (x
)
3420 && (set
= single_set (last_insn
)) != NULL_RTX
3421 && SET_DEST (set
) == x
3422 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
3423 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
3428 /* If Y is representable exactly in a narrower mode, and the target can
3429 perform the extension directly from constant or memory, then emit the
3430 move as an extension. */
3433 compress_float_constant (rtx x
, rtx y
)
3435 enum machine_mode dstmode
= GET_MODE (x
);
3436 enum machine_mode orig_srcmode
= GET_MODE (y
);
3437 enum machine_mode srcmode
;
3439 int oldcost
, newcost
;
3441 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3443 if (LEGITIMATE_CONSTANT_P (y
))
3444 oldcost
= rtx_cost (y
, SET
);
3446 oldcost
= rtx_cost (force_const_mem (dstmode
, y
), SET
);
3448 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3449 srcmode
!= orig_srcmode
;
3450 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3453 rtx trunc_y
, last_insn
;
3455 /* Skip if the target can't extend this way. */
3456 ic
= can_extend_p (dstmode
, srcmode
, 0);
3457 if (ic
== CODE_FOR_nothing
)
3460 /* Skip if the narrowed value isn't exact. */
3461 if (! exact_real_truncate (srcmode
, &r
))
3464 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3466 if (LEGITIMATE_CONSTANT_P (trunc_y
))
3468 /* Skip if the target needs extra instructions to perform
3470 if (! (*insn_data
[ic
].operand
[1].predicate
) (trunc_y
, srcmode
))
3472 /* This is valid, but may not be cheaper than the original. */
3473 newcost
= rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
), SET
);
3474 if (oldcost
< newcost
)
3477 else if (float_extend_from_mem
[dstmode
][srcmode
])
3479 trunc_y
= force_const_mem (srcmode
, trunc_y
);
3480 /* This is valid, but may not be cheaper than the original. */
3481 newcost
= rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
), SET
);
3482 if (oldcost
< newcost
)
3484 trunc_y
= validize_mem (trunc_y
);
3489 /* For CSE's benefit, force the compressed constant pool entry
3490 into a new pseudo. This constant may be used in different modes,
3491 and if not, combine will put things back together for us. */
3492 trunc_y
= force_reg (srcmode
, trunc_y
);
3493 emit_unop_insn (ic
, x
, trunc_y
, UNKNOWN
);
3494 last_insn
= get_last_insn ();
3497 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3505 /* Pushing data onto the stack. */
3507 /* Push a block of length SIZE (perhaps variable)
3508 and return an rtx to address the beginning of the block.
3509 The value may be virtual_outgoing_args_rtx.
3511 EXTRA is the number of bytes of padding to push in addition to SIZE.
3512 BELOW nonzero means this padding comes at low addresses;
3513 otherwise, the padding comes at high addresses. */
3516 push_block (rtx size
, int extra
, int below
)
3520 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3521 if (CONSTANT_P (size
))
3522 anti_adjust_stack (plus_constant (size
, extra
));
3523 else if (REG_P (size
) && extra
== 0)
3524 anti_adjust_stack (size
);
3527 temp
= copy_to_mode_reg (Pmode
, size
);
3529 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3530 temp
, 0, OPTAB_LIB_WIDEN
);
3531 anti_adjust_stack (temp
);
3534 #ifndef STACK_GROWS_DOWNWARD
3540 temp
= virtual_outgoing_args_rtx
;
3541 if (extra
!= 0 && below
)
3542 temp
= plus_constant (temp
, extra
);
3546 if (GET_CODE (size
) == CONST_INT
)
3547 temp
= plus_constant (virtual_outgoing_args_rtx
,
3548 -INTVAL (size
) - (below
? 0 : extra
));
3549 else if (extra
!= 0 && !below
)
3550 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3551 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3553 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3554 negate_rtx (Pmode
, size
));
3557 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3560 #ifdef PUSH_ROUNDING
3562 /* Emit single push insn. */
3565 emit_single_push_insn (enum machine_mode mode
, rtx x
, tree type
)
3568 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3570 enum insn_code icode
;
3571 insn_operand_predicate_fn pred
;
3573 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3574 /* If there is push pattern, use it. Otherwise try old way of throwing
3575 MEM representing push operation to move expander. */
3576 icode
= optab_handler (push_optab
, mode
)->insn_code
;
3577 if (icode
!= CODE_FOR_nothing
)
3579 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3580 && !((*pred
) (x
, mode
))))
3581 x
= force_reg (mode
, x
);
3582 emit_insn (GEN_FCN (icode
) (x
));
3585 if (GET_MODE_SIZE (mode
) == rounded_size
)
3586 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3587 /* If we are to pad downward, adjust the stack pointer first and
3588 then store X into the stack location using an offset. This is
3589 because emit_move_insn does not know how to pad; it does not have
3591 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
3593 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
3594 HOST_WIDE_INT offset
;
3596 emit_move_insn (stack_pointer_rtx
,
3597 expand_binop (Pmode
,
3598 #ifdef STACK_GROWS_DOWNWARD
3604 GEN_INT (rounded_size
),
3605 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
3607 offset
= (HOST_WIDE_INT
) padding_size
;
3608 #ifdef STACK_GROWS_DOWNWARD
3609 if (STACK_PUSH_CODE
== POST_DEC
)
3610 /* We have already decremented the stack pointer, so get the
3612 offset
+= (HOST_WIDE_INT
) rounded_size
;
3614 if (STACK_PUSH_CODE
== POST_INC
)
3615 /* We have already incremented the stack pointer, so get the
3617 offset
-= (HOST_WIDE_INT
) rounded_size
;
3619 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (offset
));
3623 #ifdef STACK_GROWS_DOWNWARD
3624 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3625 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3626 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3628 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3629 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3630 GEN_INT (rounded_size
));
3632 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3635 dest
= gen_rtx_MEM (mode
, dest_addr
);
3639 set_mem_attributes (dest
, type
, 1);
3641 if (flag_optimize_sibling_calls
)
3642 /* Function incoming arguments may overlap with sibling call
3643 outgoing arguments and we cannot allow reordering of reads
3644 from function arguments with stores to outgoing arguments
3645 of sibling calls. */
3646 set_mem_alias_set (dest
, 0);
3648 emit_move_insn (dest
, x
);
3652 /* Generate code to push X onto the stack, assuming it has mode MODE and
3654 MODE is redundant except when X is a CONST_INT (since they don't
3656 SIZE is an rtx for the size of data to be copied (in bytes),
3657 needed only if X is BLKmode.
3659 ALIGN (in bits) is maximum alignment we can assume.
3661 If PARTIAL and REG are both nonzero, then copy that many of the first
3662 bytes of X into registers starting with REG, and push the rest of X.
3663 The amount of space pushed is decreased by PARTIAL bytes.
3664 REG must be a hard register in this case.
3665 If REG is zero but PARTIAL is not, take any all others actions for an
3666 argument partially in registers, but do not actually load any
3669 EXTRA is the amount in bytes of extra space to leave next to this arg.
3670 This is ignored if an argument block has already been allocated.
3672 On a machine that lacks real push insns, ARGS_ADDR is the address of
3673 the bottom of the argument block for this call. We use indexing off there
3674 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3675 argument block has not been preallocated.
3677 ARGS_SO_FAR is the size of args previously pushed for this call.
3679 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3680 for arguments passed in registers. If nonzero, it will be the number
3681 of bytes required. */
3684 emit_push_insn (rtx x
, enum machine_mode mode
, tree type
, rtx size
,
3685 unsigned int align
, int partial
, rtx reg
, int extra
,
3686 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
3690 enum direction stack_direction
3691 #ifdef STACK_GROWS_DOWNWARD
3697 /* Decide where to pad the argument: `downward' for below,
3698 `upward' for above, or `none' for don't pad it.
3699 Default is below for small data on big-endian machines; else above. */
3700 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3702 /* Invert direction if stack is post-decrement.
3704 if (STACK_PUSH_CODE
== POST_DEC
)
3705 if (where_pad
!= none
)
3706 where_pad
= (where_pad
== downward
? upward
: downward
);
3711 || (STRICT_ALIGNMENT
&& align
< GET_MODE_ALIGNMENT (mode
)))
3713 /* Copy a block into the stack, entirely or partially. */
3720 offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3721 used
= partial
- offset
;
3723 if (mode
!= BLKmode
)
3725 /* A value is to be stored in an insufficiently aligned
3726 stack slot; copy via a suitably aligned slot if
3728 size
= GEN_INT (GET_MODE_SIZE (mode
));
3729 if (!MEM_P (xinner
))
3731 temp
= assign_temp (type
, 0, 1, 1);
3732 emit_move_insn (temp
, xinner
);
3739 /* USED is now the # of bytes we need not copy to the stack
3740 because registers will take care of them. */
3743 xinner
= adjust_address (xinner
, BLKmode
, used
);
3745 /* If the partial register-part of the arg counts in its stack size,
3746 skip the part of stack space corresponding to the registers.
3747 Otherwise, start copying to the beginning of the stack space,
3748 by setting SKIP to 0. */
3749 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3751 #ifdef PUSH_ROUNDING
3752 /* Do it with several push insns if that doesn't take lots of insns
3753 and if there is no difficulty with push insns that skip bytes
3754 on the stack for alignment purposes. */
3757 && GET_CODE (size
) == CONST_INT
3759 && MEM_ALIGN (xinner
) >= align
3760 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3761 /* Here we avoid the case of a structure whose weak alignment
3762 forces many pushes of a small amount of data,
3763 and such small pushes do rounding that causes trouble. */
3764 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3765 || align
>= BIGGEST_ALIGNMENT
3766 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3767 == (align
/ BITS_PER_UNIT
)))
3768 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3770 /* Push padding now if padding above and stack grows down,
3771 or if padding below and stack grows up.
3772 But if space already allocated, this has already been done. */
3773 if (extra
&& args_addr
== 0
3774 && where_pad
!= none
&& where_pad
!= stack_direction
)
3775 anti_adjust_stack (GEN_INT (extra
));
3777 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
3780 #endif /* PUSH_ROUNDING */
3784 /* Otherwise make space on the stack and copy the data
3785 to the address of that space. */
3787 /* Deduct words put into registers from the size we must copy. */
3790 if (GET_CODE (size
) == CONST_INT
)
3791 size
= GEN_INT (INTVAL (size
) - used
);
3793 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3794 GEN_INT (used
), NULL_RTX
, 0,
3798 /* Get the address of the stack space.
3799 In this case, we do not deal with EXTRA separately.
3800 A single stack adjust will do. */
3803 temp
= push_block (size
, extra
, where_pad
== downward
);
3806 else if (GET_CODE (args_so_far
) == CONST_INT
)
3807 temp
= memory_address (BLKmode
,
3808 plus_constant (args_addr
,
3809 skip
+ INTVAL (args_so_far
)));
3811 temp
= memory_address (BLKmode
,
3812 plus_constant (gen_rtx_PLUS (Pmode
,
3817 if (!ACCUMULATE_OUTGOING_ARGS
)
3819 /* If the source is referenced relative to the stack pointer,
3820 copy it to another register to stabilize it. We do not need
3821 to do this if we know that we won't be changing sp. */
3823 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3824 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3825 temp
= copy_to_reg (temp
);
3828 target
= gen_rtx_MEM (BLKmode
, temp
);
3830 /* We do *not* set_mem_attributes here, because incoming arguments
3831 may overlap with sibling call outgoing arguments and we cannot
3832 allow reordering of reads from function arguments with stores
3833 to outgoing arguments of sibling calls. We do, however, want
3834 to record the alignment of the stack slot. */
3835 /* ALIGN may well be better aligned than TYPE, e.g. due to
3836 PARM_BOUNDARY. Assume the caller isn't lying. */
3837 set_mem_align (target
, align
);
3839 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
3842 else if (partial
> 0)
3844 /* Scalar partly in registers. */
3846 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3849 /* # bytes of start of argument
3850 that we must make space for but need not store. */
3851 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3852 int args_offset
= INTVAL (args_so_far
);
3855 /* Push padding now if padding above and stack grows down,
3856 or if padding below and stack grows up.
3857 But if space already allocated, this has already been done. */
3858 if (extra
&& args_addr
== 0
3859 && where_pad
!= none
&& where_pad
!= stack_direction
)
3860 anti_adjust_stack (GEN_INT (extra
));
3862 /* If we make space by pushing it, we might as well push
3863 the real data. Otherwise, we can leave OFFSET nonzero
3864 and leave the space uninitialized. */
3868 /* Now NOT_STACK gets the number of words that we don't need to
3869 allocate on the stack. Convert OFFSET to words too. */
3870 not_stack
= (partial
- offset
) / UNITS_PER_WORD
;
3871 offset
/= UNITS_PER_WORD
;
3873 /* If the partial register-part of the arg counts in its stack size,
3874 skip the part of stack space corresponding to the registers.
3875 Otherwise, start copying to the beginning of the stack space,
3876 by setting SKIP to 0. */
3877 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3879 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3880 x
= validize_mem (force_const_mem (mode
, x
));
3882 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3883 SUBREGs of such registers are not allowed. */
3884 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
3885 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3886 x
= copy_to_reg (x
);
3888 /* Loop over all the words allocated on the stack for this arg. */
3889 /* We can do it by words, because any scalar bigger than a word
3890 has a size a multiple of a word. */
3891 #ifndef PUSH_ARGS_REVERSED
3892 for (i
= not_stack
; i
< size
; i
++)
3894 for (i
= size
- 1; i
>= not_stack
; i
--)
3896 if (i
>= not_stack
+ offset
)
3897 emit_push_insn (operand_subword_force (x
, i
, mode
),
3898 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3900 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3902 reg_parm_stack_space
, alignment_pad
);
3909 /* Push padding now if padding above and stack grows down,
3910 or if padding below and stack grows up.
3911 But if space already allocated, this has already been done. */
3912 if (extra
&& args_addr
== 0
3913 && where_pad
!= none
&& where_pad
!= stack_direction
)
3914 anti_adjust_stack (GEN_INT (extra
));
3916 #ifdef PUSH_ROUNDING
3917 if (args_addr
== 0 && PUSH_ARGS
)
3918 emit_single_push_insn (mode
, x
, type
);
3922 if (GET_CODE (args_so_far
) == CONST_INT
)
3924 = memory_address (mode
,
3925 plus_constant (args_addr
,
3926 INTVAL (args_so_far
)));
3928 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3930 dest
= gen_rtx_MEM (mode
, addr
);
3932 /* We do *not* set_mem_attributes here, because incoming arguments
3933 may overlap with sibling call outgoing arguments and we cannot
3934 allow reordering of reads from function arguments with stores
3935 to outgoing arguments of sibling calls. We do, however, want
3936 to record the alignment of the stack slot. */
3937 /* ALIGN may well be better aligned than TYPE, e.g. due to
3938 PARM_BOUNDARY. Assume the caller isn't lying. */
3939 set_mem_align (dest
, align
);
3941 emit_move_insn (dest
, x
);
3945 /* If part should go in registers, copy that part
3946 into the appropriate registers. Do this now, at the end,
3947 since mem-to-mem copies above may do function calls. */
3948 if (partial
> 0 && reg
!= 0)
3950 /* Handle calls that pass values in multiple non-contiguous locations.
3951 The Irix 6 ABI has examples of this. */
3952 if (GET_CODE (reg
) == PARALLEL
)
3953 emit_group_load (reg
, x
, type
, -1);
3956 gcc_assert (partial
% UNITS_PER_WORD
== 0);
3957 move_block_to_reg (REGNO (reg
), x
, partial
/ UNITS_PER_WORD
, mode
);
3961 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3962 anti_adjust_stack (GEN_INT (extra
));
3964 if (alignment_pad
&& args_addr
== 0)
3965 anti_adjust_stack (alignment_pad
);
3968 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3972 get_subtarget (rtx x
)
3976 /* Only registers can be subtargets. */
3978 /* Don't use hard regs to avoid extending their life. */
3979 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3983 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3984 FIELD is a bitfield. Returns true if the optimization was successful,
3985 and there's nothing else to do. */
3988 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize
,
3989 unsigned HOST_WIDE_INT bitpos
,
3990 enum machine_mode mode1
, rtx str_rtx
,
3993 enum machine_mode str_mode
= GET_MODE (str_rtx
);
3994 unsigned int str_bitsize
= GET_MODE_BITSIZE (str_mode
);
3999 if (mode1
!= VOIDmode
4000 || bitsize
>= BITS_PER_WORD
4001 || str_bitsize
> BITS_PER_WORD
4002 || TREE_SIDE_EFFECTS (to
)
4003 || TREE_THIS_VOLATILE (to
))
4007 if (!BINARY_CLASS_P (src
)
4008 || TREE_CODE (TREE_TYPE (src
)) != INTEGER_TYPE
)
4011 op0
= TREE_OPERAND (src
, 0);
4012 op1
= TREE_OPERAND (src
, 1);
4015 if (!operand_equal_p (to
, op0
, 0))
4018 if (MEM_P (str_rtx
))
4020 unsigned HOST_WIDE_INT offset1
;
4022 if (str_bitsize
== 0 || str_bitsize
> BITS_PER_WORD
)
4023 str_mode
= word_mode
;
4024 str_mode
= get_best_mode (bitsize
, bitpos
,
4025 MEM_ALIGN (str_rtx
), str_mode
, 0);
4026 if (str_mode
== VOIDmode
)
4028 str_bitsize
= GET_MODE_BITSIZE (str_mode
);
4031 bitpos
%= str_bitsize
;
4032 offset1
= (offset1
- bitpos
) / BITS_PER_UNIT
;
4033 str_rtx
= adjust_address (str_rtx
, str_mode
, offset1
);
4035 else if (!REG_P (str_rtx
) && GET_CODE (str_rtx
) != SUBREG
)
4038 /* If the bit field covers the whole REG/MEM, store_field
4039 will likely generate better code. */
4040 if (bitsize
>= str_bitsize
)
4043 /* We can't handle fields split across multiple entities. */
4044 if (bitpos
+ bitsize
> str_bitsize
)
4047 if (BYTES_BIG_ENDIAN
)
4048 bitpos
= str_bitsize
- bitpos
- bitsize
;
4050 switch (TREE_CODE (src
))
4054 /* For now, just optimize the case of the topmost bitfield
4055 where we don't need to do any masking and also
4056 1 bit bitfields where xor can be used.
4057 We might win by one instruction for the other bitfields
4058 too if insv/extv instructions aren't used, so that
4059 can be added later. */
4060 if (bitpos
+ bitsize
!= str_bitsize
4061 && (bitsize
!= 1 || TREE_CODE (op1
) != INTEGER_CST
))
4064 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4065 value
= convert_modes (str_mode
,
4066 TYPE_MODE (TREE_TYPE (op1
)), value
,
4067 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4069 /* We may be accessing data outside the field, which means
4070 we can alias adjacent data. */
4071 if (MEM_P (str_rtx
))
4073 str_rtx
= shallow_copy_rtx (str_rtx
);
4074 set_mem_alias_set (str_rtx
, 0);
4075 set_mem_expr (str_rtx
, 0);
4078 binop
= TREE_CODE (src
) == PLUS_EXPR
? add_optab
: sub_optab
;
4079 if (bitsize
== 1 && bitpos
+ bitsize
!= str_bitsize
)
4081 value
= expand_and (str_mode
, value
, const1_rtx
, NULL
);
4084 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
,
4085 build_int_cst (NULL_TREE
, bitpos
),
4087 result
= expand_binop (str_mode
, binop
, str_rtx
,
4088 value
, str_rtx
, 1, OPTAB_WIDEN
);
4089 if (result
!= str_rtx
)
4090 emit_move_insn (str_rtx
, result
);
4095 if (TREE_CODE (op1
) != INTEGER_CST
)
4097 value
= expand_expr (op1
, NULL_RTX
, GET_MODE (str_rtx
), EXPAND_NORMAL
);
4098 value
= convert_modes (GET_MODE (str_rtx
),
4099 TYPE_MODE (TREE_TYPE (op1
)), value
,
4100 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4102 /* We may be accessing data outside the field, which means
4103 we can alias adjacent data. */
4104 if (MEM_P (str_rtx
))
4106 str_rtx
= shallow_copy_rtx (str_rtx
);
4107 set_mem_alias_set (str_rtx
, 0);
4108 set_mem_expr (str_rtx
, 0);
4111 binop
= TREE_CODE (src
) == BIT_IOR_EXPR
? ior_optab
: xor_optab
;
4112 if (bitpos
+ bitsize
!= GET_MODE_BITSIZE (GET_MODE (str_rtx
)))
4114 rtx mask
= GEN_INT (((unsigned HOST_WIDE_INT
) 1 << bitsize
)
4116 value
= expand_and (GET_MODE (str_rtx
), value
, mask
,
4119 value
= expand_shift (LSHIFT_EXPR
, GET_MODE (str_rtx
), value
,
4120 build_int_cst (NULL_TREE
, bitpos
),
4122 result
= expand_binop (GET_MODE (str_rtx
), binop
, str_rtx
,
4123 value
, str_rtx
, 1, OPTAB_WIDEN
);
4124 if (result
!= str_rtx
)
4125 emit_move_insn (str_rtx
, result
);
4136 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4137 is true, try generating a nontemporal store. */
4140 expand_assignment (tree to
, tree from
, bool nontemporal
)
4145 /* Don't crash if the lhs of the assignment was erroneous. */
4146 if (TREE_CODE (to
) == ERROR_MARK
)
4148 result
= expand_normal (from
);
4152 /* Optimize away no-op moves without side-effects. */
4153 if (operand_equal_p (to
, from
, 0))
4156 /* Assignment of a structure component needs special treatment
4157 if the structure component's rtx is not simply a MEM.
4158 Assignment of an array element at a constant index, and assignment of
4159 an array element in an unaligned packed structure field, has the same
4161 if (handled_component_p (to
)
4162 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
4164 enum machine_mode mode1
;
4165 HOST_WIDE_INT bitsize
, bitpos
;
4172 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
4173 &unsignedp
, &volatilep
, true);
4175 /* If we are going to use store_bit_field and extract_bit_field,
4176 make sure to_rtx will be safe for multiple use. */
4178 to_rtx
= expand_normal (tem
);
4184 if (!MEM_P (to_rtx
))
4186 /* We can get constant negative offsets into arrays with broken
4187 user code. Translate this to a trap instead of ICEing. */
4188 gcc_assert (TREE_CODE (offset
) == INTEGER_CST
);
4189 expand_builtin_trap ();
4190 to_rtx
= gen_rtx_MEM (BLKmode
, const0_rtx
);
4193 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
4194 #ifdef POINTERS_EXTEND_UNSIGNED
4195 if (GET_MODE (offset_rtx
) != Pmode
)
4196 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
4198 if (GET_MODE (offset_rtx
) != ptr_mode
)
4199 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4202 /* A constant address in TO_RTX can have VOIDmode, we must not try
4203 to call force_reg for that case. Avoid that case. */
4205 && GET_MODE (to_rtx
) == BLKmode
4206 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
4208 && (bitpos
% bitsize
) == 0
4209 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
4210 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
4212 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
4216 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4217 highest_pow2_factor_for_target (to
,
4221 /* Handle expand_expr of a complex value returning a CONCAT. */
4222 if (GET_CODE (to_rtx
) == CONCAT
)
4224 if (TREE_CODE (TREE_TYPE (from
)) == COMPLEX_TYPE
)
4226 gcc_assert (bitpos
== 0);
4227 result
= store_expr (from
, to_rtx
, false, nontemporal
);
4231 gcc_assert (bitpos
== 0 || bitpos
== GET_MODE_BITSIZE (mode1
));
4232 result
= store_expr (from
, XEXP (to_rtx
, bitpos
!= 0), false,
4240 /* If the field is at offset zero, we could have been given the
4241 DECL_RTX of the parent struct. Don't munge it. */
4242 to_rtx
= shallow_copy_rtx (to_rtx
);
4244 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
4246 /* Deal with volatile and readonly fields. The former is only
4247 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4249 MEM_VOLATILE_P (to_rtx
) = 1;
4250 if (component_uses_parent_alias_set (to
))
4251 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4254 if (optimize_bitfield_assignment_op (bitsize
, bitpos
, mode1
,
4258 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
4259 TREE_TYPE (tem
), get_alias_set (to
),
4264 preserve_temp_slots (result
);
4270 /* If the rhs is a function call and its value is not an aggregate,
4271 call the function before we start to compute the lhs.
4272 This is needed for correct code for cases such as
4273 val = setjmp (buf) on machines where reference to val
4274 requires loading up part of an address in a separate insn.
4276 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4277 since it might be a promoted variable where the zero- or sign- extension
4278 needs to be done. Handling this in the normal way is safe because no
4279 computation is done before the call. */
4280 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
4281 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
4282 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
4283 && REG_P (DECL_RTL (to
))))
4288 value
= expand_normal (from
);
4290 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4292 /* Handle calls that return values in multiple non-contiguous locations.
4293 The Irix 6 ABI has examples of this. */
4294 if (GET_CODE (to_rtx
) == PARALLEL
)
4295 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
4296 int_size_in_bytes (TREE_TYPE (from
)));
4297 else if (GET_MODE (to_rtx
) == BLKmode
)
4298 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
4301 if (POINTER_TYPE_P (TREE_TYPE (to
)))
4302 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
4303 emit_move_insn (to_rtx
, value
);
4305 preserve_temp_slots (to_rtx
);
4311 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4312 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4315 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4317 /* Don't move directly into a return register. */
4318 if (TREE_CODE (to
) == RESULT_DECL
4319 && (REG_P (to_rtx
) || GET_CODE (to_rtx
) == PARALLEL
))
4324 temp
= expand_expr (from
, NULL_RTX
, GET_MODE (to_rtx
), EXPAND_NORMAL
);
4326 if (GET_CODE (to_rtx
) == PARALLEL
)
4327 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
4328 int_size_in_bytes (TREE_TYPE (from
)));
4330 emit_move_insn (to_rtx
, temp
);
4332 preserve_temp_slots (to_rtx
);
4338 /* In case we are returning the contents of an object which overlaps
4339 the place the value is being stored, use a safe function when copying
4340 a value through a pointer into a structure value return block. */
4341 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
4342 && cfun
->returns_struct
4343 && !cfun
->returns_pcc_struct
)
4348 size
= expr_size (from
);
4349 from_rtx
= expand_normal (from
);
4351 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
4352 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
4353 XEXP (from_rtx
, 0), Pmode
,
4354 convert_to_mode (TYPE_MODE (sizetype
),
4355 size
, TYPE_UNSIGNED (sizetype
)),
4356 TYPE_MODE (sizetype
));
4358 preserve_temp_slots (to_rtx
);
4364 /* Compute FROM and store the value in the rtx we got. */
4367 result
= store_expr (from
, to_rtx
, 0, nontemporal
);
4368 preserve_temp_slots (result
);
4374 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4375 succeeded, false otherwise. */
4378 emit_storent_insn (rtx to
, rtx from
)
4380 enum machine_mode mode
= GET_MODE (to
), imode
;
4381 enum insn_code code
= optab_handler (storent_optab
, mode
)->insn_code
;
4384 if (code
== CODE_FOR_nothing
)
4387 imode
= insn_data
[code
].operand
[0].mode
;
4388 if (!insn_data
[code
].operand
[0].predicate (to
, imode
))
4391 imode
= insn_data
[code
].operand
[1].mode
;
4392 if (!insn_data
[code
].operand
[1].predicate (from
, imode
))
4394 from
= copy_to_mode_reg (imode
, from
);
4395 if (!insn_data
[code
].operand
[1].predicate (from
, imode
))
4399 pattern
= GEN_FCN (code
) (to
, from
);
4400 if (pattern
== NULL_RTX
)
4403 emit_insn (pattern
);
4407 /* Generate code for computing expression EXP,
4408 and storing the value into TARGET.
4410 If the mode is BLKmode then we may return TARGET itself.
4411 It turns out that in BLKmode it doesn't cause a problem.
4412 because C has no operators that could combine two different
4413 assignments into the same BLKmode object with different values
4414 with no sequence point. Will other languages need this to
4417 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4418 stack, and block moves may need to be treated specially.
4420 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4423 store_expr (tree exp
, rtx target
, int call_param_p
, bool nontemporal
)
4426 rtx alt_rtl
= NULL_RTX
;
4427 int dont_return_target
= 0;
4429 if (VOID_TYPE_P (TREE_TYPE (exp
)))
4431 /* C++ can generate ?: expressions with a throw expression in one
4432 branch and an rvalue in the other. Here, we resolve attempts to
4433 store the throw expression's nonexistent result. */
4434 gcc_assert (!call_param_p
);
4435 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4438 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4440 /* Perform first part of compound expression, then assign from second
4442 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
4443 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4444 return store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
,
4447 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4449 /* For conditional expression, get safe form of the target. Then
4450 test the condition, doing the appropriate assignment on either
4451 side. This avoids the creation of unnecessary temporaries.
4452 For non-BLKmode, it is more efficient not to do this. */
4454 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4456 do_pending_stack_adjust ();
4458 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
4459 store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
,
4461 emit_jump_insn (gen_jump (lab2
));
4464 store_expr (TREE_OPERAND (exp
, 2), target
, call_param_p
,
4471 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4472 /* If this is a scalar in a register that is stored in a wider mode
4473 than the declared mode, compute the result into its declared mode
4474 and then convert to the wider mode. Our value is the computed
4477 rtx inner_target
= 0;
4479 /* We can do the conversion inside EXP, which will often result
4480 in some optimizations. Do the conversion in two steps: first
4481 change the signedness, if needed, then the extend. But don't
4482 do this if the type of EXP is a subtype of something else
4483 since then the conversion might involve more than just
4484 converting modes. */
4485 if (INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4486 && TREE_TYPE (TREE_TYPE (exp
)) == 0
4487 && GET_MODE_PRECISION (GET_MODE (target
))
4488 == TYPE_PRECISION (TREE_TYPE (exp
)))
4490 if (TYPE_UNSIGNED (TREE_TYPE (exp
))
4491 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4493 /* Some types, e.g. Fortran's logical*4, won't have a signed
4494 version, so use the mode instead. */
4496 = (signed_or_unsigned_type_for
4497 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)));
4499 ntype
= lang_hooks
.types
.type_for_mode
4500 (TYPE_MODE (TREE_TYPE (exp
)),
4501 SUBREG_PROMOTED_UNSIGNED_P (target
));
4503 exp
= fold_convert (ntype
, exp
);
4506 exp
= fold_convert (lang_hooks
.types
.type_for_mode
4507 (GET_MODE (SUBREG_REG (target
)),
4508 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4511 inner_target
= SUBREG_REG (target
);
4514 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
4515 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4517 /* If TEMP is a VOIDmode constant, use convert_modes to make
4518 sure that we properly convert it. */
4519 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4521 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4522 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4523 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4524 GET_MODE (target
), temp
,
4525 SUBREG_PROMOTED_UNSIGNED_P (target
));
4528 convert_move (SUBREG_REG (target
), temp
,
4529 SUBREG_PROMOTED_UNSIGNED_P (target
));
4533 else if (TREE_CODE (exp
) == STRING_CST
4534 && !nontemporal
&& !call_param_p
4535 && TREE_STRING_LENGTH (exp
) > 0
4536 && TYPE_MODE (TREE_TYPE (exp
)) == BLKmode
)
4538 /* Optimize initialization of an array with a STRING_CST. */
4539 HOST_WIDE_INT exp_len
, str_copy_len
;
4542 exp_len
= int_expr_size (exp
);
4546 str_copy_len
= strlen (TREE_STRING_POINTER (exp
));
4547 if (str_copy_len
< TREE_STRING_LENGTH (exp
) - 1)
4550 str_copy_len
= TREE_STRING_LENGTH (exp
);
4551 if ((STORE_MAX_PIECES
& (STORE_MAX_PIECES
- 1)) == 0)
4553 str_copy_len
+= STORE_MAX_PIECES
- 1;
4554 str_copy_len
&= ~(STORE_MAX_PIECES
- 1);
4556 str_copy_len
= MIN (str_copy_len
, exp_len
);
4557 if (!can_store_by_pieces (str_copy_len
, builtin_strncpy_read_str
,
4558 CONST_CAST(char *, TREE_STRING_POINTER (exp
)),
4559 MEM_ALIGN (target
), false))
4564 dest_mem
= store_by_pieces (dest_mem
,
4565 str_copy_len
, builtin_strncpy_read_str
,
4566 CONST_CAST(char *, TREE_STRING_POINTER (exp
)),
4567 MEM_ALIGN (target
), false,
4568 exp_len
> str_copy_len
? 1 : 0);
4569 if (exp_len
> str_copy_len
)
4570 clear_storage (adjust_address (dest_mem
, BLKmode
, 0),
4571 GEN_INT (exp_len
- str_copy_len
),
4580 /* If we want to use a nontemporal store, force the value to
4582 tmp_target
= nontemporal
? NULL_RTX
: target
;
4583 temp
= expand_expr_real (exp
, tmp_target
, GET_MODE (target
),
4585 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
4587 /* Return TARGET if it's a specified hardware register.
4588 If TARGET is a volatile mem ref, either return TARGET
4589 or return a reg copied *from* TARGET; ANSI requires this.
4591 Otherwise, if TEMP is not TARGET, return TEMP
4592 if it is constant (for efficiency),
4593 or if we really want the correct value. */
4594 if (!(target
&& REG_P (target
)
4595 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4596 && !(MEM_P (target
) && MEM_VOLATILE_P (target
))
4597 && ! rtx_equal_p (temp
, target
)
4598 && CONSTANT_P (temp
))
4599 dont_return_target
= 1;
4602 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4603 the same as that of TARGET, adjust the constant. This is needed, for
4604 example, in case it is a CONST_DOUBLE and we want only a word-sized
4606 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4607 && TREE_CODE (exp
) != ERROR_MARK
4608 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4609 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4610 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
4612 /* If value was not generated in the target, store it there.
4613 Convert the value to TARGET's type first if necessary and emit the
4614 pending incrementations that have been queued when expanding EXP.
4615 Note that we cannot emit the whole queue blindly because this will
4616 effectively disable the POST_INC optimization later.
4618 If TEMP and TARGET compare equal according to rtx_equal_p, but
4619 one or both of them are volatile memory refs, we have to distinguish
4621 - expand_expr has used TARGET. In this case, we must not generate
4622 another copy. This can be detected by TARGET being equal according
4624 - expand_expr has not used TARGET - that means that the source just
4625 happens to have the same RTX form. Since temp will have been created
4626 by expand_expr, it will compare unequal according to == .
4627 We must generate a copy in this case, to reach the correct number
4628 of volatile memory references. */
4630 if ((! rtx_equal_p (temp
, target
)
4631 || (temp
!= target
&& (side_effects_p (temp
)
4632 || side_effects_p (target
))))
4633 && TREE_CODE (exp
) != ERROR_MARK
4634 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4635 but TARGET is not valid memory reference, TEMP will differ
4636 from TARGET although it is really the same location. */
4637 && !(alt_rtl
&& rtx_equal_p (alt_rtl
, target
))
4638 /* If there's nothing to copy, don't bother. Don't call
4639 expr_size unless necessary, because some front-ends (C++)
4640 expr_size-hook must not be given objects that are not
4641 supposed to be bit-copied or bit-initialized. */
4642 && expr_size (exp
) != const0_rtx
)
4644 if (GET_MODE (temp
) != GET_MODE (target
)
4645 && GET_MODE (temp
) != VOIDmode
)
4647 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
4648 if (dont_return_target
)
4650 /* In this case, we will return TEMP,
4651 so make sure it has the proper mode.
4652 But don't forget to store the value into TARGET. */
4653 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4654 emit_move_insn (target
, temp
);
4656 else if (GET_MODE (target
) == BLKmode
4657 || GET_MODE (temp
) == BLKmode
)
4658 emit_block_move (target
, temp
, expr_size (exp
),
4660 ? BLOCK_OP_CALL_PARM
4661 : BLOCK_OP_NORMAL
));
4663 convert_move (target
, temp
, unsignedp
);
4666 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4668 /* Handle copying a string constant into an array. The string
4669 constant may be shorter than the array. So copy just the string's
4670 actual length, and clear the rest. First get the size of the data
4671 type of the string, which is actually the size of the target. */
4672 rtx size
= expr_size (exp
);
4674 if (GET_CODE (size
) == CONST_INT
4675 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4676 emit_block_move (target
, temp
, size
,
4678 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4681 /* Compute the size of the data to copy from the string. */
4683 = size_binop (MIN_EXPR
,
4684 make_tree (sizetype
, size
),
4685 size_int (TREE_STRING_LENGTH (exp
)));
4687 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
4689 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4692 /* Copy that much. */
4693 copy_size_rtx
= convert_to_mode (ptr_mode
, copy_size_rtx
,
4694 TYPE_UNSIGNED (sizetype
));
4695 emit_block_move (target
, temp
, copy_size_rtx
,
4697 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4699 /* Figure out how much is left in TARGET that we have to clear.
4700 Do all calculations in ptr_mode. */
4701 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4703 size
= plus_constant (size
, -INTVAL (copy_size_rtx
));
4704 target
= adjust_address (target
, BLKmode
,
4705 INTVAL (copy_size_rtx
));
4709 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
4710 copy_size_rtx
, NULL_RTX
, 0,
4713 #ifdef POINTERS_EXTEND_UNSIGNED
4714 if (GET_MODE (copy_size_rtx
) != Pmode
)
4715 copy_size_rtx
= convert_to_mode (Pmode
, copy_size_rtx
,
4716 TYPE_UNSIGNED (sizetype
));
4719 target
= offset_address (target
, copy_size_rtx
,
4720 highest_pow2_factor (copy_size
));
4721 label
= gen_label_rtx ();
4722 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4723 GET_MODE (size
), 0, label
);
4726 if (size
!= const0_rtx
)
4727 clear_storage (target
, size
, BLOCK_OP_NORMAL
);
4733 /* Handle calls that return values in multiple non-contiguous locations.
4734 The Irix 6 ABI has examples of this. */
4735 else if (GET_CODE (target
) == PARALLEL
)
4736 emit_group_load (target
, temp
, TREE_TYPE (exp
),
4737 int_size_in_bytes (TREE_TYPE (exp
)));
4738 else if (GET_MODE (temp
) == BLKmode
)
4739 emit_block_move (target
, temp
, expr_size (exp
),
4741 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4742 else if (nontemporal
4743 && emit_storent_insn (target
, temp
))
4744 /* If we managed to emit a nontemporal store, there is nothing else to
4749 temp
= force_operand (temp
, target
);
4751 emit_move_insn (target
, temp
);
4758 /* Helper for categorize_ctor_elements. Identical interface. */
4761 categorize_ctor_elements_1 (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
4762 HOST_WIDE_INT
*p_elt_count
,
4765 unsigned HOST_WIDE_INT idx
;
4766 HOST_WIDE_INT nz_elts
, elt_count
;
4767 tree value
, purpose
;
4769 /* Whether CTOR is a valid constant initializer, in accordance with what
4770 initializer_constant_valid_p does. If inferred from the constructor
4771 elements, true until proven otherwise. */
4772 bool const_from_elts_p
= constructor_static_from_elts_p (ctor
);
4773 bool const_p
= const_from_elts_p
? true : TREE_STATIC (ctor
);
4778 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), idx
, purpose
, value
)
4783 if (TREE_CODE (purpose
) == RANGE_EXPR
)
4785 tree lo_index
= TREE_OPERAND (purpose
, 0);
4786 tree hi_index
= TREE_OPERAND (purpose
, 1);
4788 if (host_integerp (lo_index
, 1) && host_integerp (hi_index
, 1))
4789 mult
= (tree_low_cst (hi_index
, 1)
4790 - tree_low_cst (lo_index
, 1) + 1);
4793 switch (TREE_CODE (value
))
4797 HOST_WIDE_INT nz
= 0, ic
= 0;
4800 = categorize_ctor_elements_1 (value
, &nz
, &ic
, p_must_clear
);
4802 nz_elts
+= mult
* nz
;
4803 elt_count
+= mult
* ic
;
4805 if (const_from_elts_p
&& const_p
)
4806 const_p
= const_elt_p
;
4813 if (!initializer_zerop (value
))
4819 nz_elts
+= mult
* TREE_STRING_LENGTH (value
);
4820 elt_count
+= mult
* TREE_STRING_LENGTH (value
);
4824 if (!initializer_zerop (TREE_REALPART (value
)))
4826 if (!initializer_zerop (TREE_IMAGPART (value
)))
4834 for (v
= TREE_VECTOR_CST_ELTS (value
); v
; v
= TREE_CHAIN (v
))
4836 if (!initializer_zerop (TREE_VALUE (v
)))
4847 if (const_from_elts_p
&& const_p
)
4848 const_p
= initializer_constant_valid_p (value
, TREE_TYPE (value
))
4855 && (TREE_CODE (TREE_TYPE (ctor
)) == UNION_TYPE
4856 || TREE_CODE (TREE_TYPE (ctor
)) == QUAL_UNION_TYPE
))
4859 bool clear_this
= true;
4861 if (!VEC_empty (constructor_elt
, CONSTRUCTOR_ELTS (ctor
)))
4863 /* We don't expect more than one element of the union to be
4864 initialized. Not sure what we should do otherwise... */
4865 gcc_assert (VEC_length (constructor_elt
, CONSTRUCTOR_ELTS (ctor
))
4868 init_sub_type
= TREE_TYPE (VEC_index (constructor_elt
,
4869 CONSTRUCTOR_ELTS (ctor
),
4872 /* ??? We could look at each element of the union, and find the
4873 largest element. Which would avoid comparing the size of the
4874 initialized element against any tail padding in the union.
4875 Doesn't seem worth the effort... */
4876 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor
)),
4877 TYPE_SIZE (init_sub_type
)) == 1)
4879 /* And now we have to find out if the element itself is fully
4880 constructed. E.g. for union { struct { int a, b; } s; } u
4881 = { .s = { .a = 1 } }. */
4882 if (elt_count
== count_type_elements (init_sub_type
, false))
4887 *p_must_clear
= clear_this
;
4890 *p_nz_elts
+= nz_elts
;
4891 *p_elt_count
+= elt_count
;
4896 /* Examine CTOR to discover:
4897 * how many scalar fields are set to nonzero values,
4898 and place it in *P_NZ_ELTS;
4899 * how many scalar fields in total are in CTOR,
4900 and place it in *P_ELT_COUNT.
4901 * if a type is a union, and the initializer from the constructor
4902 is not the largest element in the union, then set *p_must_clear.
4904 Return whether or not CTOR is a valid static constant initializer, the same
4905 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4908 categorize_ctor_elements (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
4909 HOST_WIDE_INT
*p_elt_count
,
4914 *p_must_clear
= false;
4917 categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_elt_count
, p_must_clear
);
4920 /* Count the number of scalars in TYPE. Return -1 on overflow or
4921 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4922 array member at the end of the structure. */
4925 count_type_elements (const_tree type
, bool allow_flexarr
)
4927 const HOST_WIDE_INT max
= ~((HOST_WIDE_INT
)1 << (HOST_BITS_PER_WIDE_INT
-1));
4928 switch (TREE_CODE (type
))
4932 tree telts
= array_type_nelts (type
);
4933 if (telts
&& host_integerp (telts
, 1))
4935 HOST_WIDE_INT n
= tree_low_cst (telts
, 1) + 1;
4936 HOST_WIDE_INT m
= count_type_elements (TREE_TYPE (type
), false);
4939 else if (max
/ n
> m
)
4947 HOST_WIDE_INT n
= 0, t
;
4950 for (f
= TYPE_FIELDS (type
); f
; f
= TREE_CHAIN (f
))
4951 if (TREE_CODE (f
) == FIELD_DECL
)
4953 t
= count_type_elements (TREE_TYPE (f
), false);
4956 /* Check for structures with flexible array member. */
4957 tree tf
= TREE_TYPE (f
);
4959 && TREE_CHAIN (f
) == NULL
4960 && TREE_CODE (tf
) == ARRAY_TYPE
4962 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf
))
4963 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf
)))
4964 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf
))
4965 && int_size_in_bytes (type
) >= 0)
4977 case QUAL_UNION_TYPE
:
4984 return TYPE_VECTOR_SUBPARTS (type
);
4988 case FIXED_POINT_TYPE
:
4993 case REFERENCE_TYPE
:
5005 /* Return 1 if EXP contains mostly (3/4) zeros. */
5008 mostly_zeros_p (const_tree exp
)
5010 if (TREE_CODE (exp
) == CONSTRUCTOR
)
5013 HOST_WIDE_INT nz_elts
, count
, elts
;
5016 categorize_ctor_elements (exp
, &nz_elts
, &count
, &must_clear
);
5020 elts
= count_type_elements (TREE_TYPE (exp
), false);
5022 return nz_elts
< elts
/ 4;
5025 return initializer_zerop (exp
);
5028 /* Return 1 if EXP contains all zeros. */
5031 all_zeros_p (const_tree exp
)
5033 if (TREE_CODE (exp
) == CONSTRUCTOR
)
5036 HOST_WIDE_INT nz_elts
, count
;
5039 categorize_ctor_elements (exp
, &nz_elts
, &count
, &must_clear
);
5040 return nz_elts
== 0;
5043 return initializer_zerop (exp
);
5046 /* Helper function for store_constructor.
5047 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5048 TYPE is the type of the CONSTRUCTOR, not the element type.
5049 CLEARED is as for store_constructor.
5050 ALIAS_SET is the alias set to use for any stores.
5052 This provides a recursive shortcut back to store_constructor when it isn't
5053 necessary to go through store_field. This is so that we can pass through
5054 the cleared field to let store_constructor know that we may not have to
5055 clear a substructure if the outer structure has already been cleared. */
5058 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
5059 HOST_WIDE_INT bitpos
, enum machine_mode mode
,
5060 tree exp
, tree type
, int cleared
,
5061 alias_set_type alias_set
)
5063 if (TREE_CODE (exp
) == CONSTRUCTOR
5064 /* We can only call store_constructor recursively if the size and
5065 bit position are on a byte boundary. */
5066 && bitpos
% BITS_PER_UNIT
== 0
5067 && (bitsize
> 0 && bitsize
% BITS_PER_UNIT
== 0)
5068 /* If we have a nonzero bitpos for a register target, then we just
5069 let store_field do the bitfield handling. This is unlikely to
5070 generate unnecessary clear instructions anyways. */
5071 && (bitpos
== 0 || MEM_P (target
)))
5075 = adjust_address (target
,
5076 GET_MODE (target
) == BLKmode
5078 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
5079 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5082 /* Update the alias set, if required. */
5083 if (MEM_P (target
) && ! MEM_KEEP_ALIAS_SET_P (target
)
5084 && MEM_ALIAS_SET (target
) != 0)
5086 target
= copy_rtx (target
);
5087 set_mem_alias_set (target
, alias_set
);
5090 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
5093 store_field (target
, bitsize
, bitpos
, mode
, exp
, type
, alias_set
, false);
5096 /* Store the value of constructor EXP into the rtx TARGET.
5097 TARGET is either a REG or a MEM; we know it cannot conflict, since
5098 safe_from_p has been called.
5099 CLEARED is true if TARGET is known to have been zero'd.
5100 SIZE is the number of bytes of TARGET we are allowed to modify: this
5101 may not be the same as the size of EXP if we are assigning to a field
5102 which has been packed to exclude padding bits. */
5105 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
)
5107 tree type
= TREE_TYPE (exp
);
5108 #ifdef WORD_REGISTER_OPERATIONS
5109 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
5112 switch (TREE_CODE (type
))
5116 case QUAL_UNION_TYPE
:
5118 unsigned HOST_WIDE_INT idx
;
5121 /* If size is zero or the target is already cleared, do nothing. */
5122 if (size
== 0 || cleared
)
5124 /* We either clear the aggregate or indicate the value is dead. */
5125 else if ((TREE_CODE (type
) == UNION_TYPE
5126 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
5127 && ! CONSTRUCTOR_ELTS (exp
))
5128 /* If the constructor is empty, clear the union. */
5130 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
5134 /* If we are building a static constructor into a register,
5135 set the initial value as zero so we can fold the value into
5136 a constant. But if more than one register is involved,
5137 this probably loses. */
5138 else if (REG_P (target
) && TREE_STATIC (exp
)
5139 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
5141 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5145 /* If the constructor has fewer fields than the structure or
5146 if we are initializing the structure to mostly zeros, clear
5147 the whole structure first. Don't do this if TARGET is a
5148 register whose mode size isn't equal to SIZE since
5149 clear_storage can't handle this case. */
5151 && (((int)VEC_length (constructor_elt
, CONSTRUCTOR_ELTS (exp
))
5152 != fields_length (type
))
5153 || mostly_zeros_p (exp
))
5155 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
5158 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
5162 if (REG_P (target
) && !cleared
)
5163 emit_clobber (target
);
5165 /* Store each element of the constructor into the
5166 corresponding field of TARGET. */
5167 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, field
, value
)
5169 enum machine_mode mode
;
5170 HOST_WIDE_INT bitsize
;
5171 HOST_WIDE_INT bitpos
= 0;
5173 rtx to_rtx
= target
;
5175 /* Just ignore missing fields. We cleared the whole
5176 structure, above, if any fields are missing. */
5180 if (cleared
&& initializer_zerop (value
))
5183 if (host_integerp (DECL_SIZE (field
), 1))
5184 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
5188 mode
= DECL_MODE (field
);
5189 if (DECL_BIT_FIELD (field
))
5192 offset
= DECL_FIELD_OFFSET (field
);
5193 if (host_integerp (offset
, 0)
5194 && host_integerp (bit_position (field
), 0))
5196 bitpos
= int_bit_position (field
);
5200 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
5207 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset
,
5208 make_tree (TREE_TYPE (exp
),
5211 offset_rtx
= expand_normal (offset
);
5212 gcc_assert (MEM_P (to_rtx
));
5214 #ifdef POINTERS_EXTEND_UNSIGNED
5215 if (GET_MODE (offset_rtx
) != Pmode
)
5216 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
5218 if (GET_MODE (offset_rtx
) != ptr_mode
)
5219 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
5222 to_rtx
= offset_address (to_rtx
, offset_rtx
,
5223 highest_pow2_factor (offset
));
5226 #ifdef WORD_REGISTER_OPERATIONS
5227 /* If this initializes a field that is smaller than a
5228 word, at the start of a word, try to widen it to a full
5229 word. This special case allows us to output C++ member
5230 function initializations in a form that the optimizers
5233 && bitsize
< BITS_PER_WORD
5234 && bitpos
% BITS_PER_WORD
== 0
5235 && GET_MODE_CLASS (mode
) == MODE_INT
5236 && TREE_CODE (value
) == INTEGER_CST
5238 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
5240 tree type
= TREE_TYPE (value
);
5242 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
5244 type
= lang_hooks
.types
.type_for_size
5245 (BITS_PER_WORD
, TYPE_UNSIGNED (type
));
5246 value
= fold_convert (type
, value
);
5249 if (BYTES_BIG_ENDIAN
)
5251 = fold_build2 (LSHIFT_EXPR
, type
, value
,
5252 build_int_cst (type
,
5253 BITS_PER_WORD
- bitsize
));
5254 bitsize
= BITS_PER_WORD
;
5259 if (MEM_P (to_rtx
) && !MEM_KEEP_ALIAS_SET_P (to_rtx
)
5260 && DECL_NONADDRESSABLE_P (field
))
5262 to_rtx
= copy_rtx (to_rtx
);
5263 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
5266 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
5267 value
, type
, cleared
,
5268 get_alias_set (TREE_TYPE (field
)));
5275 unsigned HOST_WIDE_INT i
;
5278 tree elttype
= TREE_TYPE (type
);
5280 HOST_WIDE_INT minelt
= 0;
5281 HOST_WIDE_INT maxelt
= 0;
5283 domain
= TYPE_DOMAIN (type
);
5284 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
5285 && TYPE_MAX_VALUE (domain
)
5286 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
5287 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
5289 /* If we have constant bounds for the range of the type, get them. */
5292 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
5293 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
5296 /* If the constructor has fewer elements than the array, clear
5297 the whole array first. Similarly if this is static
5298 constructor of a non-BLKmode object. */
5301 else if (REG_P (target
) && TREE_STATIC (exp
))
5305 unsigned HOST_WIDE_INT idx
;
5307 HOST_WIDE_INT count
= 0, zero_count
= 0;
5308 need_to_clear
= ! const_bounds_p
;
5310 /* This loop is a more accurate version of the loop in
5311 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5312 is also needed to check for missing elements. */
5313 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, index
, value
)
5315 HOST_WIDE_INT this_node_count
;
5320 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
5322 tree lo_index
= TREE_OPERAND (index
, 0);
5323 tree hi_index
= TREE_OPERAND (index
, 1);
5325 if (! host_integerp (lo_index
, 1)
5326 || ! host_integerp (hi_index
, 1))
5332 this_node_count
= (tree_low_cst (hi_index
, 1)
5333 - tree_low_cst (lo_index
, 1) + 1);
5336 this_node_count
= 1;
5338 count
+= this_node_count
;
5339 if (mostly_zeros_p (value
))
5340 zero_count
+= this_node_count
;
5343 /* Clear the entire array first if there are any missing
5344 elements, or if the incidence of zero elements is >=
5347 && (count
< maxelt
- minelt
+ 1
5348 || 4 * zero_count
>= 3 * count
))
5352 if (need_to_clear
&& size
> 0)
5355 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5357 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
5361 if (!cleared
&& REG_P (target
))
5362 /* Inform later passes that the old value is dead. */
5363 emit_clobber (target
);
5365 /* Store each element of the constructor into the
5366 corresponding element of TARGET, determined by counting the
5368 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), i
, index
, value
)
5370 enum machine_mode mode
;
5371 HOST_WIDE_INT bitsize
;
5372 HOST_WIDE_INT bitpos
;
5374 rtx xtarget
= target
;
5376 if (cleared
&& initializer_zerop (value
))
5379 unsignedp
= TYPE_UNSIGNED (elttype
);
5380 mode
= TYPE_MODE (elttype
);
5381 if (mode
== BLKmode
)
5382 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
5383 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
5386 bitsize
= GET_MODE_BITSIZE (mode
);
5388 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
5390 tree lo_index
= TREE_OPERAND (index
, 0);
5391 tree hi_index
= TREE_OPERAND (index
, 1);
5392 rtx index_r
, pos_rtx
;
5393 HOST_WIDE_INT lo
, hi
, count
;
5396 /* If the range is constant and "small", unroll the loop. */
5398 && host_integerp (lo_index
, 0)
5399 && host_integerp (hi_index
, 0)
5400 && (lo
= tree_low_cst (lo_index
, 0),
5401 hi
= tree_low_cst (hi_index
, 0),
5402 count
= hi
- lo
+ 1,
5405 || (host_integerp (TYPE_SIZE (elttype
), 1)
5406 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
5409 lo
-= minelt
; hi
-= minelt
;
5410 for (; lo
<= hi
; lo
++)
5412 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
5415 && !MEM_KEEP_ALIAS_SET_P (target
)
5416 && TREE_CODE (type
) == ARRAY_TYPE
5417 && TYPE_NONALIASED_COMPONENT (type
))
5419 target
= copy_rtx (target
);
5420 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5423 store_constructor_field
5424 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
5425 get_alias_set (elttype
));
5430 rtx loop_start
= gen_label_rtx ();
5431 rtx loop_end
= gen_label_rtx ();
5434 expand_normal (hi_index
);
5435 unsignedp
= TYPE_UNSIGNED (domain
);
5437 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
5440 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
5442 SET_DECL_RTL (index
, index_r
);
5443 store_expr (lo_index
, index_r
, 0, false);
5445 /* Build the head of the loop. */
5446 do_pending_stack_adjust ();
5447 emit_label (loop_start
);
5449 /* Assign value to element index. */
5451 fold_convert (ssizetype
,
5452 fold_build2 (MINUS_EXPR
,
5455 TYPE_MIN_VALUE (domain
)));
5458 size_binop (MULT_EXPR
, position
,
5459 fold_convert (ssizetype
,
5460 TYPE_SIZE_UNIT (elttype
)));
5462 pos_rtx
= expand_normal (position
);
5463 xtarget
= offset_address (target
, pos_rtx
,
5464 highest_pow2_factor (position
));
5465 xtarget
= adjust_address (xtarget
, mode
, 0);
5466 if (TREE_CODE (value
) == CONSTRUCTOR
)
5467 store_constructor (value
, xtarget
, cleared
,
5468 bitsize
/ BITS_PER_UNIT
);
5470 store_expr (value
, xtarget
, 0, false);
5472 /* Generate a conditional jump to exit the loop. */
5473 exit_cond
= build2 (LT_EXPR
, integer_type_node
,
5475 jumpif (exit_cond
, loop_end
);
5477 /* Update the loop counter, and jump to the head of
5479 expand_assignment (index
,
5480 build2 (PLUS_EXPR
, TREE_TYPE (index
),
5481 index
, integer_one_node
),
5484 emit_jump (loop_start
);
5486 /* Build the end of the loop. */
5487 emit_label (loop_end
);
5490 else if ((index
!= 0 && ! host_integerp (index
, 0))
5491 || ! host_integerp (TYPE_SIZE (elttype
), 1))
5496 index
= ssize_int (1);
5499 index
= fold_convert (ssizetype
,
5500 fold_build2 (MINUS_EXPR
,
5503 TYPE_MIN_VALUE (domain
)));
5506 size_binop (MULT_EXPR
, index
,
5507 fold_convert (ssizetype
,
5508 TYPE_SIZE_UNIT (elttype
)));
5509 xtarget
= offset_address (target
,
5510 expand_normal (position
),
5511 highest_pow2_factor (position
));
5512 xtarget
= adjust_address (xtarget
, mode
, 0);
5513 store_expr (value
, xtarget
, 0, false);
5518 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
5519 * tree_low_cst (TYPE_SIZE (elttype
), 1));
5521 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
5523 if (MEM_P (target
) && !MEM_KEEP_ALIAS_SET_P (target
)
5524 && TREE_CODE (type
) == ARRAY_TYPE
5525 && TYPE_NONALIASED_COMPONENT (type
))
5527 target
= copy_rtx (target
);
5528 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5530 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
5531 type
, cleared
, get_alias_set (elttype
));
5539 unsigned HOST_WIDE_INT idx
;
5540 constructor_elt
*ce
;
5544 tree elttype
= TREE_TYPE (type
);
5545 int elt_size
= tree_low_cst (TYPE_SIZE (elttype
), 1);
5546 enum machine_mode eltmode
= TYPE_MODE (elttype
);
5547 HOST_WIDE_INT bitsize
;
5548 HOST_WIDE_INT bitpos
;
5549 rtvec vector
= NULL
;
5552 gcc_assert (eltmode
!= BLKmode
);
5554 n_elts
= TYPE_VECTOR_SUBPARTS (type
);
5555 if (REG_P (target
) && VECTOR_MODE_P (GET_MODE (target
)))
5557 enum machine_mode mode
= GET_MODE (target
);
5559 icode
= (int) optab_handler (vec_init_optab
, mode
)->insn_code
;
5560 if (icode
!= CODE_FOR_nothing
)
5564 vector
= rtvec_alloc (n_elts
);
5565 for (i
= 0; i
< n_elts
; i
++)
5566 RTVEC_ELT (vector
, i
) = CONST0_RTX (GET_MODE_INNER (mode
));
5570 /* If the constructor has fewer elements than the vector,
5571 clear the whole array first. Similarly if this is static
5572 constructor of a non-BLKmode object. */
5575 else if (REG_P (target
) && TREE_STATIC (exp
))
5579 unsigned HOST_WIDE_INT count
= 0, zero_count
= 0;
5582 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
5584 int n_elts_here
= tree_low_cst
5585 (int_const_binop (TRUNC_DIV_EXPR
,
5586 TYPE_SIZE (TREE_TYPE (value
)),
5587 TYPE_SIZE (elttype
), 0), 1);
5589 count
+= n_elts_here
;
5590 if (mostly_zeros_p (value
))
5591 zero_count
+= n_elts_here
;
5594 /* Clear the entire vector first if there are any missing elements,
5595 or if the incidence of zero elements is >= 75%. */
5596 need_to_clear
= (count
< n_elts
|| 4 * zero_count
>= 3 * count
);
5599 if (need_to_clear
&& size
> 0 && !vector
)
5602 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5604 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
5608 /* Inform later passes that the old value is dead. */
5609 if (!cleared
&& !vector
&& REG_P (target
))
5610 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5612 /* Store each element of the constructor into the corresponding
5613 element of TARGET, determined by counting the elements. */
5614 for (idx
= 0, i
= 0;
5615 VEC_iterate (constructor_elt
, CONSTRUCTOR_ELTS (exp
), idx
, ce
);
5616 idx
++, i
+= bitsize
/ elt_size
)
5618 HOST_WIDE_INT eltpos
;
5619 tree value
= ce
->value
;
5621 bitsize
= tree_low_cst (TYPE_SIZE (TREE_TYPE (value
)), 1);
5622 if (cleared
&& initializer_zerop (value
))
5626 eltpos
= tree_low_cst (ce
->index
, 1);
5632 /* Vector CONSTRUCTORs should only be built from smaller
5633 vectors in the case of BLKmode vectors. */
5634 gcc_assert (TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
);
5635 RTVEC_ELT (vector
, eltpos
)
5636 = expand_normal (value
);
5640 enum machine_mode value_mode
=
5641 TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
5642 ? TYPE_MODE (TREE_TYPE (value
))
5644 bitpos
= eltpos
* elt_size
;
5645 store_constructor_field (target
, bitsize
, bitpos
,
5646 value_mode
, value
, type
,
5647 cleared
, get_alias_set (elttype
));
5652 emit_insn (GEN_FCN (icode
)
5654 gen_rtx_PARALLEL (GET_MODE (target
), vector
)));
5663 /* Store the value of EXP (an expression tree)
5664 into a subfield of TARGET which has mode MODE and occupies
5665 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5666 If MODE is VOIDmode, it means that we are storing into a bit-field.
5668 Always return const0_rtx unless we have something particular to
5671 TYPE is the type of the underlying object,
5673 ALIAS_SET is the alias set for the destination. This value will
5674 (in general) be different from that for TARGET, since TARGET is a
5675 reference to the containing structure.
5677 If NONTEMPORAL is true, try generating a nontemporal store. */
5680 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
5681 enum machine_mode mode
, tree exp
, tree type
,
5682 alias_set_type alias_set
, bool nontemporal
)
5684 HOST_WIDE_INT width_mask
= 0;
5686 if (TREE_CODE (exp
) == ERROR_MARK
)
5689 /* If we have nothing to store, do nothing unless the expression has
5692 return expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5693 else if (bitsize
>= 0 && bitsize
< HOST_BITS_PER_WIDE_INT
)
5694 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5696 /* If we are storing into an unaligned field of an aligned union that is
5697 in a register, we may have the mode of TARGET being an integer mode but
5698 MODE == BLKmode. In that case, get an aligned object whose size and
5699 alignment are the same as TARGET and store TARGET into it (we can avoid
5700 the store if the field being stored is the entire width of TARGET). Then
5701 call ourselves recursively to store the field into a BLKmode version of
5702 that object. Finally, load from the object into TARGET. This is not
5703 very efficient in general, but should only be slightly more expensive
5704 than the otherwise-required unaligned accesses. Perhaps this can be
5705 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5706 twice, once with emit_move_insn and once via store_field. */
5709 && (REG_P (target
) || GET_CODE (target
) == SUBREG
))
5711 rtx object
= assign_temp (type
, 0, 1, 1);
5712 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
5714 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5715 emit_move_insn (object
, target
);
5717 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, type
, alias_set
,
5720 emit_move_insn (target
, object
);
5722 /* We want to return the BLKmode version of the data. */
5726 if (GET_CODE (target
) == CONCAT
)
5728 /* We're storing into a struct containing a single __complex. */
5730 gcc_assert (!bitpos
);
5731 return store_expr (exp
, target
, 0, nontemporal
);
5734 /* If the structure is in a register or if the component
5735 is a bit field, we cannot use addressing to access it.
5736 Use bit-field techniques or SUBREG to store in it. */
5738 if (mode
== VOIDmode
5739 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5740 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5741 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5743 || GET_CODE (target
) == SUBREG
5744 /* If the field isn't aligned enough to store as an ordinary memref,
5745 store it as a bit field. */
5747 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
5748 || bitpos
% GET_MODE_ALIGNMENT (mode
))
5749 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
5750 || (bitpos
% BITS_PER_UNIT
!= 0)))
5751 /* If the RHS and field are a constant size and the size of the
5752 RHS isn't the same size as the bitfield, we must use bitfield
5755 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5756 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5760 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5761 implies a mask operation. If the precision is the same size as
5762 the field we're storing into, that mask is redundant. This is
5763 particularly common with bit field assignments generated by the
5765 if (TREE_CODE (exp
) == NOP_EXPR
)
5767 tree type
= TREE_TYPE (exp
);
5768 if (INTEGRAL_TYPE_P (type
)
5769 && TYPE_PRECISION (type
) < GET_MODE_BITSIZE (TYPE_MODE (type
))
5770 && bitsize
== TYPE_PRECISION (type
))
5772 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
5773 if (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) >= bitsize
)
5774 exp
= TREE_OPERAND (exp
, 0);
5778 temp
= expand_normal (exp
);
5780 /* If BITSIZE is narrower than the size of the type of EXP
5781 we will be narrowing TEMP. Normally, what's wanted are the
5782 low-order bits. However, if EXP's type is a record and this is
5783 big-endian machine, we want the upper BITSIZE bits. */
5784 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5785 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
5786 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5787 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5788 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5792 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5794 if (mode
!= VOIDmode
&& mode
!= BLKmode
5795 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5796 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5798 /* If the modes of TEMP and TARGET are both BLKmode, both
5799 must be in memory and BITPOS must be aligned on a byte
5800 boundary. If so, we simply do a block copy. Likewise
5801 for a BLKmode-like TARGET. */
5802 if (GET_MODE (temp
) == BLKmode
5803 && (GET_MODE (target
) == BLKmode
5805 && GET_MODE_CLASS (GET_MODE (target
)) == MODE_INT
5806 && (bitpos
% BITS_PER_UNIT
) == 0
5807 && (bitsize
% BITS_PER_UNIT
) == 0)))
5809 gcc_assert (MEM_P (target
) && MEM_P (temp
)
5810 && (bitpos
% BITS_PER_UNIT
) == 0);
5812 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5813 emit_block_move (target
, temp
,
5814 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5821 /* Store the value in the bitfield. */
5822 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
);
5828 /* Now build a reference to just the desired component. */
5829 rtx to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5831 if (to_rtx
== target
)
5832 to_rtx
= copy_rtx (to_rtx
);
5834 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5835 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5836 set_mem_alias_set (to_rtx
, alias_set
);
5838 return store_expr (exp
, to_rtx
, 0, nontemporal
);
5842 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5843 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5844 codes and find the ultimate containing object, which we return.
5846 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5847 bit position, and *PUNSIGNEDP to the signedness of the field.
5848 If the position of the field is variable, we store a tree
5849 giving the variable offset (in units) in *POFFSET.
5850 This offset is in addition to the bit position.
5851 If the position is not variable, we store 0 in *POFFSET.
5853 If any of the extraction expressions is volatile,
5854 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5856 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
5857 Otherwise, it is a mode that can be used to access the field.
5859 If the field describes a variable-sized object, *PMODE is set to
5860 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
5861 this case, but the address of the object can be found.
5863 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5864 look through nodes that serve as markers of a greater alignment than
5865 the one that can be deduced from the expression. These nodes make it
5866 possible for front-ends to prevent temporaries from being created by
5867 the middle-end on alignment considerations. For that purpose, the
5868 normal operating mode at high-level is to always pass FALSE so that
5869 the ultimate containing object is really returned; moreover, the
5870 associated predicate handled_component_p will always return TRUE
5871 on these nodes, thus indicating that they are essentially handled
5872 by get_inner_reference. TRUE should only be passed when the caller
5873 is scanning the expression in order to build another representation
5874 and specifically knows how to handle these nodes; as such, this is
5875 the normal operating mode in the RTL expanders. */
5878 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
5879 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
5880 enum machine_mode
*pmode
, int *punsignedp
,
5881 int *pvolatilep
, bool keep_aligning
)
5884 enum machine_mode mode
= VOIDmode
;
5885 bool blkmode_bitfield
= false;
5886 tree offset
= size_zero_node
;
5887 tree bit_offset
= bitsize_zero_node
;
5889 /* First get the mode, signedness, and size. We do this from just the
5890 outermost expression. */
5891 if (TREE_CODE (exp
) == COMPONENT_REF
)
5893 tree field
= TREE_OPERAND (exp
, 1);
5894 size_tree
= DECL_SIZE (field
);
5895 if (!DECL_BIT_FIELD (field
))
5896 mode
= DECL_MODE (field
);
5897 else if (DECL_MODE (field
) == BLKmode
)
5898 blkmode_bitfield
= true;
5900 *punsignedp
= DECL_UNSIGNED (field
);
5902 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5904 size_tree
= TREE_OPERAND (exp
, 1);
5905 *punsignedp
= (! INTEGRAL_TYPE_P (TREE_TYPE (exp
))
5906 || TYPE_UNSIGNED (TREE_TYPE (exp
)));
5908 /* For vector types, with the correct size of access, use the mode of
5910 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == VECTOR_TYPE
5911 && TREE_TYPE (exp
) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
5912 && tree_int_cst_equal (size_tree
, TYPE_SIZE (TREE_TYPE (exp
))))
5913 mode
= TYPE_MODE (TREE_TYPE (exp
));
5917 mode
= TYPE_MODE (TREE_TYPE (exp
));
5918 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
5920 if (mode
== BLKmode
)
5921 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5923 *pbitsize
= GET_MODE_BITSIZE (mode
);
5928 if (! host_integerp (size_tree
, 1))
5929 mode
= BLKmode
, *pbitsize
= -1;
5931 *pbitsize
= tree_low_cst (size_tree
, 1);
5934 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5935 and find the ultimate containing object. */
5938 switch (TREE_CODE (exp
))
5941 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5942 TREE_OPERAND (exp
, 2));
5947 tree field
= TREE_OPERAND (exp
, 1);
5948 tree this_offset
= component_ref_field_offset (exp
);
5950 /* If this field hasn't been filled in yet, don't go past it.
5951 This should only happen when folding expressions made during
5952 type construction. */
5953 if (this_offset
== 0)
5956 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5957 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5958 DECL_FIELD_BIT_OFFSET (field
));
5960 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5965 case ARRAY_RANGE_REF
:
5967 tree index
= TREE_OPERAND (exp
, 1);
5968 tree low_bound
= array_ref_low_bound (exp
);
5969 tree unit_size
= array_ref_element_size (exp
);
5971 /* We assume all arrays have sizes that are a multiple of a byte.
5972 First subtract the lower bound, if any, in the type of the
5973 index, then convert to sizetype and multiply by the size of
5974 the array element. */
5975 if (! integer_zerop (low_bound
))
5976 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
5979 offset
= size_binop (PLUS_EXPR
, offset
,
5980 size_binop (MULT_EXPR
,
5981 fold_convert (sizetype
, index
),
5990 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5991 bitsize_int (*pbitsize
));
5994 case VIEW_CONVERT_EXPR
:
5995 if (keep_aligning
&& STRICT_ALIGNMENT
5996 && (TYPE_ALIGN (TREE_TYPE (exp
))
5997 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5998 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
5999 < BIGGEST_ALIGNMENT
)
6000 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
6001 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
6009 /* If any reference in the chain is volatile, the effect is volatile. */
6010 if (TREE_THIS_VOLATILE (exp
))
6013 exp
= TREE_OPERAND (exp
, 0);
6017 /* If OFFSET is constant, see if we can return the whole thing as a
6018 constant bit position. Make sure to handle overflow during
6020 if (host_integerp (offset
, 0))
6022 double_int tem
= double_int_mul (tree_to_double_int (offset
),
6023 uhwi_to_double_int (BITS_PER_UNIT
));
6024 tem
= double_int_add (tem
, tree_to_double_int (bit_offset
));
6025 if (double_int_fits_in_shwi_p (tem
))
6027 *pbitpos
= double_int_to_shwi (tem
);
6028 *poffset
= offset
= NULL_TREE
;
6032 /* Otherwise, split it up. */
6035 *pbitpos
= tree_low_cst (bit_offset
, 0);
6039 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6040 if (mode
== VOIDmode
6042 && (*pbitpos
% BITS_PER_UNIT
) == 0
6043 && (*pbitsize
% BITS_PER_UNIT
) == 0)
6051 /* Given an expression EXP that may be a COMPONENT_REF or an ARRAY_REF,
6052 look for whether EXP or any nested component-refs within EXP is marked
6056 contains_packed_reference (const_tree exp
)
6058 bool packed_p
= false;
6062 switch (TREE_CODE (exp
))
6066 tree field
= TREE_OPERAND (exp
, 1);
6067 packed_p
= DECL_PACKED (field
)
6068 || TYPE_PACKED (TREE_TYPE (field
))
6069 || TYPE_PACKED (TREE_TYPE (exp
));
6077 case ARRAY_RANGE_REF
:
6080 case VIEW_CONVERT_EXPR
:
6086 exp
= TREE_OPERAND (exp
, 0);
6092 /* Return a tree of sizetype representing the size, in bytes, of the element
6093 of EXP, an ARRAY_REF. */
6096 array_ref_element_size (tree exp
)
6098 tree aligned_size
= TREE_OPERAND (exp
, 3);
6099 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6101 /* If a size was specified in the ARRAY_REF, it's the size measured
6102 in alignment units of the element type. So multiply by that value. */
6105 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6106 sizetype from another type of the same width and signedness. */
6107 if (TREE_TYPE (aligned_size
) != sizetype
)
6108 aligned_size
= fold_convert (sizetype
, aligned_size
);
6109 return size_binop (MULT_EXPR
, aligned_size
,
6110 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
6113 /* Otherwise, take the size from that of the element type. Substitute
6114 any PLACEHOLDER_EXPR that we have. */
6116 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type
), exp
);
6119 /* Return a tree representing the lower bound of the array mentioned in
6120 EXP, an ARRAY_REF. */
6123 array_ref_low_bound (tree exp
)
6125 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6127 /* If a lower bound is specified in EXP, use it. */
6128 if (TREE_OPERAND (exp
, 2))
6129 return TREE_OPERAND (exp
, 2);
6131 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6132 substituting for a PLACEHOLDER_EXPR as needed. */
6133 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
6134 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type
), exp
);
6136 /* Otherwise, return a zero of the appropriate type. */
6137 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp
, 1)), 0);
6140 /* Return a tree representing the upper bound of the array mentioned in
6141 EXP, an ARRAY_REF. */
6144 array_ref_up_bound (tree exp
)
6146 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6148 /* If there is a domain type and it has an upper bound, use it, substituting
6149 for a PLACEHOLDER_EXPR as needed. */
6150 if (domain_type
&& TYPE_MAX_VALUE (domain_type
))
6151 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type
), exp
);
6153 /* Otherwise fail. */
6157 /* Return a tree representing the offset, in bytes, of the field referenced
6158 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6161 component_ref_field_offset (tree exp
)
6163 tree aligned_offset
= TREE_OPERAND (exp
, 2);
6164 tree field
= TREE_OPERAND (exp
, 1);
6166 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6167 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6171 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6172 sizetype from another type of the same width and signedness. */
6173 if (TREE_TYPE (aligned_offset
) != sizetype
)
6174 aligned_offset
= fold_convert (sizetype
, aligned_offset
);
6175 return size_binop (MULT_EXPR
, aligned_offset
,
6176 size_int (DECL_OFFSET_ALIGN (field
) / BITS_PER_UNIT
));
6179 /* Otherwise, take the offset from that of the field. Substitute
6180 any PLACEHOLDER_EXPR that we have. */
6182 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field
), exp
);
6185 /* Return 1 if T is an expression that get_inner_reference handles. */
6188 handled_component_p (const_tree t
)
6190 switch (TREE_CODE (t
))
6195 case ARRAY_RANGE_REF
:
6196 case VIEW_CONVERT_EXPR
:
6206 /* Given an rtx VALUE that may contain additions and multiplications, return
6207 an equivalent value that just refers to a register, memory, or constant.
6208 This is done by generating instructions to perform the arithmetic and
6209 returning a pseudo-register containing the value.
6211 The returned value may be a REG, SUBREG, MEM or constant. */
6214 force_operand (rtx value
, rtx target
)
6217 /* Use subtarget as the target for operand 0 of a binary operation. */
6218 rtx subtarget
= get_subtarget (target
);
6219 enum rtx_code code
= GET_CODE (value
);
6221 /* Check for subreg applied to an expression produced by loop optimizer. */
6223 && !REG_P (SUBREG_REG (value
))
6224 && !MEM_P (SUBREG_REG (value
)))
6227 = simplify_gen_subreg (GET_MODE (value
),
6228 force_reg (GET_MODE (SUBREG_REG (value
)),
6229 force_operand (SUBREG_REG (value
),
6231 GET_MODE (SUBREG_REG (value
)),
6232 SUBREG_BYTE (value
));
6233 code
= GET_CODE (value
);
6236 /* Check for a PIC address load. */
6237 if ((code
== PLUS
|| code
== MINUS
)
6238 && XEXP (value
, 0) == pic_offset_table_rtx
6239 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
6240 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
6241 || GET_CODE (XEXP (value
, 1)) == CONST
))
6244 subtarget
= gen_reg_rtx (GET_MODE (value
));
6245 emit_move_insn (subtarget
, value
);
6249 if (ARITHMETIC_P (value
))
6251 op2
= XEXP (value
, 1);
6252 if (!CONSTANT_P (op2
) && !(REG_P (op2
) && op2
!= subtarget
))
6254 if (code
== MINUS
&& GET_CODE (op2
) == CONST_INT
)
6257 op2
= negate_rtx (GET_MODE (value
), op2
);
6260 /* Check for an addition with OP2 a constant integer and our first
6261 operand a PLUS of a virtual register and something else. In that
6262 case, we want to emit the sum of the virtual register and the
6263 constant first and then add the other value. This allows virtual
6264 register instantiation to simply modify the constant rather than
6265 creating another one around this addition. */
6266 if (code
== PLUS
&& GET_CODE (op2
) == CONST_INT
6267 && GET_CODE (XEXP (value
, 0)) == PLUS
6268 && REG_P (XEXP (XEXP (value
, 0), 0))
6269 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6270 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
6272 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
6273 XEXP (XEXP (value
, 0), 0), op2
,
6274 subtarget
, 0, OPTAB_LIB_WIDEN
);
6275 return expand_simple_binop (GET_MODE (value
), code
, temp
,
6276 force_operand (XEXP (XEXP (value
,
6278 target
, 0, OPTAB_LIB_WIDEN
);
6281 op1
= force_operand (XEXP (value
, 0), subtarget
);
6282 op2
= force_operand (op2
, NULL_RTX
);
6286 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
6288 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
6289 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
6290 target
, 1, OPTAB_LIB_WIDEN
);
6292 return expand_divmod (0,
6293 FLOAT_MODE_P (GET_MODE (value
))
6294 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
6295 GET_MODE (value
), op1
, op2
, target
, 0);
6297 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
6300 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
6303 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
6306 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
6307 target
, 0, OPTAB_LIB_WIDEN
);
6309 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
6310 target
, 1, OPTAB_LIB_WIDEN
);
6313 if (UNARY_P (value
))
6316 target
= gen_reg_rtx (GET_MODE (value
));
6317 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
6324 case FLOAT_TRUNCATE
:
6325 convert_move (target
, op1
, code
== ZERO_EXTEND
);
6330 expand_fix (target
, op1
, code
== UNSIGNED_FIX
);
6334 case UNSIGNED_FLOAT
:
6335 expand_float (target
, op1
, code
== UNSIGNED_FLOAT
);
6339 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
6343 #ifdef INSN_SCHEDULING
6344 /* On machines that have insn scheduling, we want all memory reference to be
6345 explicit, so we need to deal with such paradoxical SUBREGs. */
6346 if (GET_CODE (value
) == SUBREG
&& MEM_P (SUBREG_REG (value
))
6347 && (GET_MODE_SIZE (GET_MODE (value
))
6348 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
6350 = simplify_gen_subreg (GET_MODE (value
),
6351 force_reg (GET_MODE (SUBREG_REG (value
)),
6352 force_operand (SUBREG_REG (value
),
6354 GET_MODE (SUBREG_REG (value
)),
6355 SUBREG_BYTE (value
));
6361 /* Subroutine of expand_expr: return nonzero iff there is no way that
6362 EXP can reference X, which is being modified. TOP_P is nonzero if this
6363 call is going to be used to determine whether we need a temporary
6364 for EXP, as opposed to a recursive call to this function.
6366 It is always safe for this routine to return zero since it merely
6367 searches for optimization opportunities. */
6370 safe_from_p (const_rtx x
, tree exp
, int top_p
)
6376 /* If EXP has varying size, we MUST use a target since we currently
6377 have no way of allocating temporaries of variable size
6378 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6379 So we assume here that something at a higher level has prevented a
6380 clash. This is somewhat bogus, but the best we can do. Only
6381 do this when X is BLKmode and when we are at the top level. */
6382 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
6383 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
6384 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
6385 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
6386 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
6388 && GET_MODE (x
) == BLKmode
)
6389 /* If X is in the outgoing argument area, it is always safe. */
6391 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
6392 || (GET_CODE (XEXP (x
, 0)) == PLUS
6393 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
6396 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6397 find the underlying pseudo. */
6398 if (GET_CODE (x
) == SUBREG
)
6401 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6405 /* Now look at our tree code and possibly recurse. */
6406 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
6408 case tcc_declaration
:
6409 exp_rtl
= DECL_RTL_IF_SET (exp
);
6415 case tcc_exceptional
:
6416 if (TREE_CODE (exp
) == TREE_LIST
)
6420 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
6422 exp
= TREE_CHAIN (exp
);
6425 if (TREE_CODE (exp
) != TREE_LIST
)
6426 return safe_from_p (x
, exp
, 0);
6429 else if (TREE_CODE (exp
) == CONSTRUCTOR
)
6431 constructor_elt
*ce
;
6432 unsigned HOST_WIDE_INT idx
;
6435 VEC_iterate (constructor_elt
, CONSTRUCTOR_ELTS (exp
), idx
, ce
);
6437 if ((ce
->index
!= NULL_TREE
&& !safe_from_p (x
, ce
->index
, 0))
6438 || !safe_from_p (x
, ce
->value
, 0))
6442 else if (TREE_CODE (exp
) == ERROR_MARK
)
6443 return 1; /* An already-visited SAVE_EXPR? */
6448 /* The only case we look at here is the DECL_INITIAL inside a
6450 return (TREE_CODE (exp
) != DECL_EXPR
6451 || TREE_CODE (DECL_EXPR_DECL (exp
)) != VAR_DECL
6452 || !DECL_INITIAL (DECL_EXPR_DECL (exp
))
6453 || safe_from_p (x
, DECL_INITIAL (DECL_EXPR_DECL (exp
)), 0));
6456 case tcc_comparison
:
6457 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
6462 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6464 case tcc_expression
:
6467 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6468 the expression. If it is set, we conflict iff we are that rtx or
6469 both are in memory. Otherwise, we check all operands of the
6470 expression recursively. */
6472 switch (TREE_CODE (exp
))
6475 /* If the operand is static or we are static, we can't conflict.
6476 Likewise if we don't conflict with the operand at all. */
6477 if (staticp (TREE_OPERAND (exp
, 0))
6478 || TREE_STATIC (exp
)
6479 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
6482 /* Otherwise, the only way this can conflict is if we are taking
6483 the address of a DECL a that address if part of X, which is
6485 exp
= TREE_OPERAND (exp
, 0);
6488 if (!DECL_RTL_SET_P (exp
)
6489 || !MEM_P (DECL_RTL (exp
)))
6492 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
6496 case MISALIGNED_INDIRECT_REF
:
6497 case ALIGN_INDIRECT_REF
:
6500 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
6501 get_alias_set (exp
)))
6506 /* Assume that the call will clobber all hard registers and
6508 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6513 case WITH_CLEANUP_EXPR
:
6514 case CLEANUP_POINT_EXPR
:
6515 /* Lowered by gimplify.c. */
6519 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6525 /* If we have an rtx, we do not need to scan our operands. */
6529 nops
= TREE_OPERAND_LENGTH (exp
);
6530 for (i
= 0; i
< nops
; i
++)
6531 if (TREE_OPERAND (exp
, i
) != 0
6532 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
6538 /* Should never get a type here. */
6541 case tcc_gimple_stmt
:
6545 /* If we have an rtl, find any enclosed object. Then see if we conflict
6549 if (GET_CODE (exp_rtl
) == SUBREG
)
6551 exp_rtl
= SUBREG_REG (exp_rtl
);
6553 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
6557 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6558 are memory and they conflict. */
6559 return ! (rtx_equal_p (x
, exp_rtl
)
6560 || (MEM_P (x
) && MEM_P (exp_rtl
)
6561 && true_dependence (exp_rtl
, VOIDmode
, x
,
6562 rtx_addr_varies_p
)));
6565 /* If we reach here, it is safe. */
6570 /* Return the highest power of two that EXP is known to be a multiple of.
6571 This is used in updating alignment of MEMs in array references. */
6573 unsigned HOST_WIDE_INT
6574 highest_pow2_factor (const_tree exp
)
6576 unsigned HOST_WIDE_INT c0
, c1
;
6578 switch (TREE_CODE (exp
))
6581 /* We can find the lowest bit that's a one. If the low
6582 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6583 We need to handle this case since we can find it in a COND_EXPR,
6584 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6585 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6587 if (TREE_OVERFLOW (exp
))
6588 return BIGGEST_ALIGNMENT
;
6591 /* Note: tree_low_cst is intentionally not used here,
6592 we don't care about the upper bits. */
6593 c0
= TREE_INT_CST_LOW (exp
);
6595 return c0
? c0
: BIGGEST_ALIGNMENT
;
6599 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
6600 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6601 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6602 return MIN (c0
, c1
);
6605 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6606 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6609 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6611 if (integer_pow2p (TREE_OPERAND (exp
, 1))
6612 && host_integerp (TREE_OPERAND (exp
, 1), 1))
6614 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6615 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
6616 return MAX (1, c0
/ c1
);
6621 /* The highest power of two of a bit-and expression is the maximum of
6622 that of its operands. We typically get here for a complex LHS and
6623 a constant negative power of two on the RHS to force an explicit
6624 alignment, so don't bother looking at the LHS. */
6625 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6629 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
6632 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6635 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6636 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
6637 return MIN (c0
, c1
);
6646 /* Similar, except that the alignment requirements of TARGET are
6647 taken into account. Assume it is at least as aligned as its
6648 type, unless it is a COMPONENT_REF in which case the layout of
6649 the structure gives the alignment. */
6651 static unsigned HOST_WIDE_INT
6652 highest_pow2_factor_for_target (const_tree target
, const_tree exp
)
6654 unsigned HOST_WIDE_INT target_align
, factor
;
6656 factor
= highest_pow2_factor (exp
);
6657 if (TREE_CODE (target
) == COMPONENT_REF
)
6658 target_align
= DECL_ALIGN_UNIT (TREE_OPERAND (target
, 1));
6660 target_align
= TYPE_ALIGN_UNIT (TREE_TYPE (target
));
6661 return MAX (factor
, target_align
);
6664 /* Return &VAR expression for emulated thread local VAR. */
6667 emutls_var_address (tree var
)
6669 tree emuvar
= emutls_decl (var
);
6670 tree fn
= built_in_decls
[BUILT_IN_EMUTLS_GET_ADDRESS
];
6671 tree arg
= build_fold_addr_expr_with_type (emuvar
, ptr_type_node
);
6672 tree arglist
= build_tree_list (NULL_TREE
, arg
);
6673 tree call
= build_function_call_expr (fn
, arglist
);
6674 return fold_convert (build_pointer_type (TREE_TYPE (var
)), call
);
6678 /* Subroutine of expand_expr. Expand the two operands of a binary
6679 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6680 The value may be stored in TARGET if TARGET is nonzero. The
6681 MODIFIER argument is as documented by expand_expr. */
6684 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
6685 enum expand_modifier modifier
)
6687 if (! safe_from_p (target
, exp1
, 1))
6689 if (operand_equal_p (exp0
, exp1
, 0))
6691 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6692 *op1
= copy_rtx (*op0
);
6696 /* If we need to preserve evaluation order, copy exp0 into its own
6697 temporary variable so that it can't be clobbered by exp1. */
6698 if (flag_evaluation_order
&& TREE_SIDE_EFFECTS (exp1
))
6699 exp0
= save_expr (exp0
);
6700 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6701 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
6706 /* Return a MEM that contains constant EXP. DEFER is as for
6707 output_constant_def and MODIFIER is as for expand_expr. */
6710 expand_expr_constant (tree exp
, int defer
, enum expand_modifier modifier
)
6714 mem
= output_constant_def (exp
, defer
);
6715 if (modifier
!= EXPAND_INITIALIZER
)
6716 mem
= use_anchored_address (mem
);
6720 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6721 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6724 expand_expr_addr_expr_1 (tree exp
, rtx target
, enum machine_mode tmode
,
6725 enum expand_modifier modifier
)
6727 rtx result
, subtarget
;
6729 HOST_WIDE_INT bitsize
, bitpos
;
6730 int volatilep
, unsignedp
;
6731 enum machine_mode mode1
;
6733 /* If we are taking the address of a constant and are at the top level,
6734 we have to use output_constant_def since we can't call force_const_mem
6736 /* ??? This should be considered a front-end bug. We should not be
6737 generating ADDR_EXPR of something that isn't an LVALUE. The only
6738 exception here is STRING_CST. */
6739 if (CONSTANT_CLASS_P (exp
))
6740 return XEXP (expand_expr_constant (exp
, 0, modifier
), 0);
6742 /* Everything must be something allowed by is_gimple_addressable. */
6743 switch (TREE_CODE (exp
))
6746 /* This case will happen via recursion for &a->b. */
6747 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6750 /* Recurse and make the output_constant_def clause above handle this. */
6751 return expand_expr_addr_expr_1 (DECL_INITIAL (exp
), target
,
6755 /* The real part of the complex number is always first, therefore
6756 the address is the same as the address of the parent object. */
6759 inner
= TREE_OPERAND (exp
, 0);
6763 /* The imaginary part of the complex number is always second.
6764 The expression is therefore always offset by the size of the
6767 bitpos
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp
)));
6768 inner
= TREE_OPERAND (exp
, 0);
6772 /* TLS emulation hook - replace __thread VAR's &VAR with
6773 __emutls_get_address (&_emutls.VAR). */
6774 if (! targetm
.have_tls
6775 && TREE_CODE (exp
) == VAR_DECL
6776 && DECL_THREAD_LOCAL_P (exp
))
6778 exp
= emutls_var_address (exp
);
6779 return expand_expr (exp
, target
, tmode
, modifier
);
6784 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6785 expand_expr, as that can have various side effects; LABEL_DECLs for
6786 example, may not have their DECL_RTL set yet. Expand the rtl of
6787 CONSTRUCTORs too, which should yield a memory reference for the
6788 constructor's contents. Assume language specific tree nodes can
6789 be expanded in some interesting way. */
6791 || TREE_CODE (exp
) == CONSTRUCTOR
6792 || TREE_CODE (exp
) >= LAST_AND_UNUSED_TREE_CODE
)
6794 result
= expand_expr (exp
, target
, tmode
,
6795 modifier
== EXPAND_INITIALIZER
6796 ? EXPAND_INITIALIZER
: EXPAND_CONST_ADDRESS
);
6798 /* If the DECL isn't in memory, then the DECL wasn't properly
6799 marked TREE_ADDRESSABLE, which will be either a front-end
6800 or a tree optimizer bug. */
6801 gcc_assert (MEM_P (result
));
6802 result
= XEXP (result
, 0);
6804 /* ??? Is this needed anymore? */
6805 if (DECL_P (exp
) && !TREE_USED (exp
) == 0)
6807 assemble_external (exp
);
6808 TREE_USED (exp
) = 1;
6811 if (modifier
!= EXPAND_INITIALIZER
6812 && modifier
!= EXPAND_CONST_ADDRESS
)
6813 result
= force_operand (result
, target
);
6817 /* Pass FALSE as the last argument to get_inner_reference although
6818 we are expanding to RTL. The rationale is that we know how to
6819 handle "aligning nodes" here: we can just bypass them because
6820 they won't change the final object whose address will be returned
6821 (they actually exist only for that purpose). */
6822 inner
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
6823 &mode1
, &unsignedp
, &volatilep
, false);
6827 /* We must have made progress. */
6828 gcc_assert (inner
!= exp
);
6830 subtarget
= offset
|| bitpos
? NULL_RTX
: target
;
6831 result
= expand_expr_addr_expr_1 (inner
, subtarget
, tmode
, modifier
);
6837 if (modifier
!= EXPAND_NORMAL
)
6838 result
= force_operand (result
, NULL
);
6839 tmp
= expand_expr (offset
, NULL_RTX
, tmode
,
6840 modifier
== EXPAND_INITIALIZER
6841 ? EXPAND_INITIALIZER
: EXPAND_NORMAL
);
6843 result
= convert_memory_address (tmode
, result
);
6844 tmp
= convert_memory_address (tmode
, tmp
);
6846 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
6847 result
= gen_rtx_PLUS (tmode
, result
, tmp
);
6850 subtarget
= bitpos
? NULL_RTX
: target
;
6851 result
= expand_simple_binop (tmode
, PLUS
, result
, tmp
, subtarget
,
6852 1, OPTAB_LIB_WIDEN
);
6858 /* Someone beforehand should have rejected taking the address
6859 of such an object. */
6860 gcc_assert ((bitpos
% BITS_PER_UNIT
) == 0);
6862 result
= plus_constant (result
, bitpos
/ BITS_PER_UNIT
);
6863 if (modifier
< EXPAND_SUM
)
6864 result
= force_operand (result
, target
);
6870 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6871 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6874 expand_expr_addr_expr (tree exp
, rtx target
, enum machine_mode tmode
,
6875 enum expand_modifier modifier
)
6877 enum machine_mode rmode
;
6880 /* Target mode of VOIDmode says "whatever's natural". */
6881 if (tmode
== VOIDmode
)
6882 tmode
= TYPE_MODE (TREE_TYPE (exp
));
6884 /* We can get called with some Weird Things if the user does silliness
6885 like "(short) &a". In that case, convert_memory_address won't do
6886 the right thing, so ignore the given target mode. */
6887 if (tmode
!= Pmode
&& tmode
!= ptr_mode
)
6890 result
= expand_expr_addr_expr_1 (TREE_OPERAND (exp
, 0), target
,
6893 /* Despite expand_expr claims concerning ignoring TMODE when not
6894 strictly convenient, stuff breaks if we don't honor it. Note
6895 that combined with the above, we only do this for pointer modes. */
6896 rmode
= GET_MODE (result
);
6897 if (rmode
== VOIDmode
)
6900 result
= convert_memory_address (tmode
, result
);
6905 /* Generate code for computing CONSTRUCTOR EXP.
6906 An rtx for the computed value is returned. If AVOID_TEMP_MEM
6907 is TRUE, instead of creating a temporary variable in memory
6908 NULL is returned and the caller needs to handle it differently. */
6911 expand_constructor (tree exp
, rtx target
, enum expand_modifier modifier
,
6912 bool avoid_temp_mem
)
6914 tree type
= TREE_TYPE (exp
);
6915 enum machine_mode mode
= TYPE_MODE (type
);
6917 /* Try to avoid creating a temporary at all. This is possible
6918 if all of the initializer is zero.
6919 FIXME: try to handle all [0..255] initializers we can handle
6921 if (TREE_STATIC (exp
)
6922 && !TREE_ADDRESSABLE (exp
)
6923 && target
!= 0 && mode
== BLKmode
6924 && all_zeros_p (exp
))
6926 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
6930 /* All elts simple constants => refer to a constant in memory. But
6931 if this is a non-BLKmode mode, let it store a field at a time
6932 since that should make a CONST_INT or CONST_DOUBLE when we
6933 fold. Likewise, if we have a target we can use, it is best to
6934 store directly into the target unless the type is large enough
6935 that memcpy will be used. If we are making an initializer and
6936 all operands are constant, put it in memory as well.
6938 FIXME: Avoid trying to fill vector constructors piece-meal.
6939 Output them with output_constant_def below unless we're sure
6940 they're zeros. This should go away when vector initializers
6941 are treated like VECTOR_CST instead of arrays. */
6942 if ((TREE_STATIC (exp
)
6943 && ((mode
== BLKmode
6944 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
6945 || TREE_ADDRESSABLE (exp
)
6946 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
6947 && (! MOVE_BY_PIECES_P
6948 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
6950 && ! mostly_zeros_p (exp
))))
6951 || ((modifier
== EXPAND_INITIALIZER
|| modifier
== EXPAND_CONST_ADDRESS
)
6952 && TREE_CONSTANT (exp
)))
6959 constructor
= expand_expr_constant (exp
, 1, modifier
);
6961 if (modifier
!= EXPAND_CONST_ADDRESS
6962 && modifier
!= EXPAND_INITIALIZER
6963 && modifier
!= EXPAND_SUM
)
6964 constructor
= validize_mem (constructor
);
6969 /* Handle calls that pass values in multiple non-contiguous
6970 locations. The Irix 6 ABI has examples of this. */
6971 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
6972 || GET_CODE (target
) == PARALLEL
|| modifier
== EXPAND_STACK_PARM
)
6978 = assign_temp (build_qualified_type (type
, (TYPE_QUALS (type
)
6979 | (TREE_READONLY (exp
)
6980 * TYPE_QUAL_CONST
))),
6981 0, TREE_ADDRESSABLE (exp
), 1);
6984 store_constructor (exp
, target
, 0, int_expr_size (exp
));
6989 /* expand_expr: generate code for computing expression EXP.
6990 An rtx for the computed value is returned. The value is never null.
6991 In the case of a void EXP, const0_rtx is returned.
6993 The value may be stored in TARGET if TARGET is nonzero.
6994 TARGET is just a suggestion; callers must assume that
6995 the rtx returned may not be the same as TARGET.
6997 If TARGET is CONST0_RTX, it means that the value will be ignored.
6999 If TMODE is not VOIDmode, it suggests generating the
7000 result in mode TMODE. But this is done only when convenient.
7001 Otherwise, TMODE is ignored and the value generated in its natural mode.
7002 TMODE is just a suggestion; callers must assume that
7003 the rtx returned may not have mode TMODE.
7005 Note that TARGET may have neither TMODE nor MODE. In that case, it
7006 probably will not be used.
7008 If MODIFIER is EXPAND_SUM then when EXP is an addition
7009 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7010 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7011 products as above, or REG or MEM, or constant.
7012 Ordinarily in such cases we would output mul or add instructions
7013 and then return a pseudo reg containing the sum.
7015 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7016 it also marks a label as absolutely required (it can't be dead).
7017 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7018 This is used for outputting expressions used in initializers.
7020 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7021 with a constant address even if that address is not normally legitimate.
7022 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7024 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7025 a call parameter. Such targets require special care as we haven't yet
7026 marked TARGET so that it's safe from being trashed by libcalls. We
7027 don't want to use TARGET for anything but the final result;
7028 Intermediate values must go elsewhere. Additionally, calls to
7029 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7031 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7032 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7033 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7034 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7037 static rtx
expand_expr_real_1 (tree
, rtx
, enum machine_mode
,
7038 enum expand_modifier
, rtx
*);
7041 expand_expr_real (tree exp
, rtx target
, enum machine_mode tmode
,
7042 enum expand_modifier modifier
, rtx
*alt_rtl
)
7045 rtx ret
, last
= NULL
;
7047 /* Handle ERROR_MARK before anybody tries to access its type. */
7048 if (TREE_CODE (exp
) == ERROR_MARK
7049 || TREE_CODE (exp
) == PREDICT_EXPR
7050 || (!GIMPLE_TUPLE_P (exp
) && TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
))
7052 ret
= CONST0_RTX (tmode
);
7053 return ret
? ret
: const0_rtx
;
7056 if (flag_non_call_exceptions
)
7058 rn
= lookup_stmt_eh_region (exp
);
7059 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
7061 last
= get_last_insn ();
7064 /* If this is an expression of some kind and it has an associated line
7065 number, then emit the line number before expanding the expression.
7067 We need to save and restore the file and line information so that
7068 errors discovered during expansion are emitted with the right
7069 information. It would be better of the diagnostic routines
7070 used the file/line information embedded in the tree nodes rather
7072 if (cfun
&& EXPR_HAS_LOCATION (exp
))
7074 location_t saved_location
= input_location
;
7075 input_location
= EXPR_LOCATION (exp
);
7076 set_curr_insn_source_location (input_location
);
7078 /* Record where the insns produced belong. */
7079 set_curr_insn_block (TREE_BLOCK (exp
));
7081 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
7083 input_location
= saved_location
;
7087 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
7090 /* If using non-call exceptions, mark all insns that may trap.
7091 expand_call() will mark CALL_INSNs before we get to this code,
7092 but it doesn't handle libcalls, and these may trap. */
7096 for (insn
= next_real_insn (last
); insn
;
7097 insn
= next_real_insn (insn
))
7099 if (! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
7100 /* If we want exceptions for non-call insns, any
7101 may_trap_p instruction may throw. */
7102 && GET_CODE (PATTERN (insn
)) != CLOBBER
7103 && GET_CODE (PATTERN (insn
)) != USE
7104 && (CALL_P (insn
) || may_trap_p (PATTERN (insn
))))
7106 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_EH_REGION
, GEN_INT (rn
),
7116 expand_expr_real_1 (tree exp
, rtx target
, enum machine_mode tmode
,
7117 enum expand_modifier modifier
, rtx
*alt_rtl
)
7119 rtx op0
, op1
, op2
, temp
, decl_rtl
;
7122 enum machine_mode mode
;
7123 enum tree_code code
= TREE_CODE (exp
);
7125 rtx subtarget
, original_target
;
7127 tree context
, subexp0
, subexp1
;
7128 bool reduce_bit_field
;
7129 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7130 ? reduce_to_bit_field_precision ((expr), \
7135 if (GIMPLE_STMT_P (exp
))
7137 type
= void_type_node
;
7143 type
= TREE_TYPE (exp
);
7144 mode
= TYPE_MODE (type
);
7145 unsignedp
= TYPE_UNSIGNED (type
);
7148 ignore
= (target
== const0_rtx
7149 || ((code
== NOP_EXPR
|| code
== CONVERT_EXPR
7150 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
7151 && TREE_CODE (type
) == VOID_TYPE
));
7153 /* An operation in what may be a bit-field type needs the
7154 result to be reduced to the precision of the bit-field type,
7155 which is narrower than that of the type's mode. */
7156 reduce_bit_field
= (!ignore
7157 && TREE_CODE (type
) == INTEGER_TYPE
7158 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
));
7160 /* If we are going to ignore this result, we need only do something
7161 if there is a side-effect somewhere in the expression. If there
7162 is, short-circuit the most common cases here. Note that we must
7163 not call expand_expr with anything but const0_rtx in case this
7164 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
7168 if (! TREE_SIDE_EFFECTS (exp
))
7171 /* Ensure we reference a volatile object even if value is ignored, but
7172 don't do this if all we are doing is taking its address. */
7173 if (TREE_THIS_VOLATILE (exp
)
7174 && TREE_CODE (exp
) != FUNCTION_DECL
7175 && mode
!= VOIDmode
&& mode
!= BLKmode
7176 && modifier
!= EXPAND_CONST_ADDRESS
)
7178 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
7180 temp
= copy_to_reg (temp
);
7184 if (TREE_CODE_CLASS (code
) == tcc_unary
7185 || code
== COMPONENT_REF
|| code
== INDIRECT_REF
)
7186 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
7189 else if (TREE_CODE_CLASS (code
) == tcc_binary
7190 || TREE_CODE_CLASS (code
) == tcc_comparison
7191 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
7193 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
7194 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
7197 else if (code
== BIT_FIELD_REF
)
7199 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
7200 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
7201 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, modifier
);
7208 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
7211 /* Use subtarget as the target for operand 0 of a binary operation. */
7212 subtarget
= get_subtarget (target
);
7213 original_target
= target
;
7219 tree function
= decl_function_context (exp
);
7221 temp
= label_rtx (exp
);
7222 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
7224 if (function
!= current_function_decl
7226 LABEL_REF_NONLOCAL_P (temp
) = 1;
7228 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
7233 return expand_expr_real_1 (SSA_NAME_VAR (exp
), target
, tmode
, modifier
,
7238 /* If a static var's type was incomplete when the decl was written,
7239 but the type is complete now, lay out the decl now. */
7240 if (DECL_SIZE (exp
) == 0
7241 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
7242 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
7243 layout_decl (exp
, 0);
7245 /* TLS emulation hook - replace __thread vars with
7246 *__emutls_get_address (&_emutls.var). */
7247 if (! targetm
.have_tls
7248 && TREE_CODE (exp
) == VAR_DECL
7249 && DECL_THREAD_LOCAL_P (exp
))
7251 exp
= build_fold_indirect_ref (emutls_var_address (exp
));
7252 return expand_expr_real_1 (exp
, target
, tmode
, modifier
, NULL
);
7255 /* ... fall through ... */
7259 decl_rtl
= DECL_RTL (exp
);
7260 gcc_assert (decl_rtl
);
7261 decl_rtl
= copy_rtx (decl_rtl
);
7263 /* Ensure variable marked as used even if it doesn't go through
7264 a parser. If it hasn't be used yet, write out an external
7266 if (! TREE_USED (exp
))
7268 assemble_external (exp
);
7269 TREE_USED (exp
) = 1;
7272 /* Show we haven't gotten RTL for this yet. */
7275 /* Variables inherited from containing functions should have
7276 been lowered by this point. */
7277 context
= decl_function_context (exp
);
7278 gcc_assert (!context
7279 || context
== current_function_decl
7280 || TREE_STATIC (exp
)
7281 /* ??? C++ creates functions that are not TREE_STATIC. */
7282 || TREE_CODE (exp
) == FUNCTION_DECL
);
7284 /* This is the case of an array whose size is to be determined
7285 from its initializer, while the initializer is still being parsed.
7288 if (MEM_P (decl_rtl
) && REG_P (XEXP (decl_rtl
, 0)))
7289 temp
= validize_mem (decl_rtl
);
7291 /* If DECL_RTL is memory, we are in the normal case and the
7292 address is not valid, get the address into a register. */
7294 else if (MEM_P (decl_rtl
) && modifier
!= EXPAND_INITIALIZER
)
7297 *alt_rtl
= decl_rtl
;
7298 decl_rtl
= use_anchored_address (decl_rtl
);
7299 if (modifier
!= EXPAND_CONST_ADDRESS
7300 && modifier
!= EXPAND_SUM
7301 && !memory_address_p (DECL_MODE (exp
), XEXP (decl_rtl
, 0)))
7302 temp
= replace_equiv_address (decl_rtl
,
7303 copy_rtx (XEXP (decl_rtl
, 0)));
7306 /* If we got something, return it. But first, set the alignment
7307 if the address is a register. */
7310 if (MEM_P (temp
) && REG_P (XEXP (temp
, 0)))
7311 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
7316 /* If the mode of DECL_RTL does not match that of the decl, it
7317 must be a promoted value. We return a SUBREG of the wanted mode,
7318 but mark it so that we know that it was already extended. */
7320 if (REG_P (decl_rtl
)
7321 && GET_MODE (decl_rtl
) != DECL_MODE (exp
))
7323 enum machine_mode pmode
;
7325 /* Get the signedness used for this variable. Ensure we get the
7326 same mode we got when the variable was declared. */
7327 pmode
= promote_mode (type
, DECL_MODE (exp
), &unsignedp
,
7328 (TREE_CODE (exp
) == RESULT_DECL
7329 || TREE_CODE (exp
) == PARM_DECL
) ? 1 : 0);
7330 gcc_assert (GET_MODE (decl_rtl
) == pmode
);
7332 temp
= gen_lowpart_SUBREG (mode
, decl_rtl
);
7333 SUBREG_PROMOTED_VAR_P (temp
) = 1;
7334 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
7341 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
7342 TREE_INT_CST_HIGH (exp
), mode
);
7348 tree tmp
= NULL_TREE
;
7349 if (GET_MODE_CLASS (mode
) == MODE_VECTOR_INT
7350 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FLOAT
7351 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FRACT
7352 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UFRACT
7353 || GET_MODE_CLASS (mode
) == MODE_VECTOR_ACCUM
7354 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UACCUM
)
7355 return const_vector_from_tree (exp
);
7356 if (GET_MODE_CLASS (mode
) == MODE_INT
)
7358 tree type_for_mode
= lang_hooks
.types
.type_for_mode (mode
, 1);
7360 tmp
= fold_unary (VIEW_CONVERT_EXPR
, type_for_mode
, exp
);
7363 tmp
= build_constructor_from_list (type
,
7364 TREE_VECTOR_CST_ELTS (exp
));
7365 return expand_expr (tmp
, ignore
? const0_rtx
: target
,
7370 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
7373 /* If optimized, generate immediate CONST_DOUBLE
7374 which will be turned into memory by reload if necessary.
7376 We used to force a register so that loop.c could see it. But
7377 this does not allow gen_* patterns to perform optimizations with
7378 the constants. It also produces two insns in cases like "x = 1.0;".
7379 On most machines, floating-point constants are not permitted in
7380 many insns, so we'd end up copying it to a register in any case.
7382 Now, we do the copying in expand_binop, if appropriate. */
7383 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
7384 TYPE_MODE (TREE_TYPE (exp
)));
7387 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp
),
7388 TYPE_MODE (TREE_TYPE (exp
)));
7391 /* Handle evaluating a complex constant in a CONCAT target. */
7392 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
7394 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
7397 rtarg
= XEXP (original_target
, 0);
7398 itarg
= XEXP (original_target
, 1);
7400 /* Move the real and imaginary parts separately. */
7401 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, EXPAND_NORMAL
);
7402 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, EXPAND_NORMAL
);
7405 emit_move_insn (rtarg
, op0
);
7407 emit_move_insn (itarg
, op1
);
7409 return original_target
;
7412 /* ... fall through ... */
7415 temp
= expand_expr_constant (exp
, 1, modifier
);
7417 /* temp contains a constant address.
7418 On RISC machines where a constant address isn't valid,
7419 make some insns to get that address into a register. */
7420 if (modifier
!= EXPAND_CONST_ADDRESS
7421 && modifier
!= EXPAND_INITIALIZER
7422 && modifier
!= EXPAND_SUM
7423 && ! memory_address_p (mode
, XEXP (temp
, 0)))
7424 return replace_equiv_address (temp
,
7425 copy_rtx (XEXP (temp
, 0)));
7430 tree val
= TREE_OPERAND (exp
, 0);
7431 rtx ret
= expand_expr_real_1 (val
, target
, tmode
, modifier
, alt_rtl
);
7433 if (!SAVE_EXPR_RESOLVED_P (exp
))
7435 /* We can indeed still hit this case, typically via builtin
7436 expanders calling save_expr immediately before expanding
7437 something. Assume this means that we only have to deal
7438 with non-BLKmode values. */
7439 gcc_assert (GET_MODE (ret
) != BLKmode
);
7441 val
= build_decl (VAR_DECL
, NULL
, TREE_TYPE (exp
));
7442 DECL_ARTIFICIAL (val
) = 1;
7443 DECL_IGNORED_P (val
) = 1;
7444 TREE_OPERAND (exp
, 0) = val
;
7445 SAVE_EXPR_RESOLVED_P (exp
) = 1;
7447 if (!CONSTANT_P (ret
))
7448 ret
= copy_to_reg (ret
);
7449 SET_DECL_RTL (val
, ret
);
7456 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
7457 expand_goto (TREE_OPERAND (exp
, 0));
7459 expand_computed_goto (TREE_OPERAND (exp
, 0));
7463 /* If we don't need the result, just ensure we evaluate any
7467 unsigned HOST_WIDE_INT idx
;
7470 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
7471 expand_expr (value
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
7476 return expand_constructor (exp
, target
, modifier
, false);
7478 case MISALIGNED_INDIRECT_REF
:
7479 case ALIGN_INDIRECT_REF
:
7482 tree exp1
= TREE_OPERAND (exp
, 0);
7484 if (modifier
!= EXPAND_WRITE
)
7488 t
= fold_read_from_constant_string (exp
);
7490 return expand_expr (t
, target
, tmode
, modifier
);
7493 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
7494 op0
= memory_address (mode
, op0
);
7496 if (code
== ALIGN_INDIRECT_REF
)
7498 int align
= TYPE_ALIGN_UNIT (type
);
7499 op0
= gen_rtx_AND (Pmode
, op0
, GEN_INT (-align
));
7500 op0
= memory_address (mode
, op0
);
7503 temp
= gen_rtx_MEM (mode
, op0
);
7505 set_mem_attributes (temp
, exp
, 0);
7507 /* Resolve the misalignment now, so that we don't have to remember
7508 to resolve it later. Of course, this only works for reads. */
7509 /* ??? When we get around to supporting writes, we'll have to handle
7510 this in store_expr directly. The vectorizer isn't generating
7511 those yet, however. */
7512 if (code
== MISALIGNED_INDIRECT_REF
)
7517 gcc_assert (modifier
== EXPAND_NORMAL
7518 || modifier
== EXPAND_STACK_PARM
);
7520 /* The vectorizer should have already checked the mode. */
7521 icode
= optab_handler (movmisalign_optab
, mode
)->insn_code
;
7522 gcc_assert (icode
!= CODE_FOR_nothing
);
7524 /* We've already validated the memory, and we're creating a
7525 new pseudo destination. The predicates really can't fail. */
7526 reg
= gen_reg_rtx (mode
);
7528 /* Nor can the insn generator. */
7529 insn
= GEN_FCN (icode
) (reg
, temp
);
7538 case TARGET_MEM_REF
:
7540 struct mem_address addr
;
7542 get_address_description (exp
, &addr
);
7543 op0
= addr_for_mem_ref (&addr
, true);
7544 op0
= memory_address (mode
, op0
);
7545 temp
= gen_rtx_MEM (mode
, op0
);
7546 set_mem_attributes (temp
, TMR_ORIGINAL (exp
), 0);
7553 tree array
= TREE_OPERAND (exp
, 0);
7554 tree index
= TREE_OPERAND (exp
, 1);
7556 /* Fold an expression like: "foo"[2].
7557 This is not done in fold so it won't happen inside &.
7558 Don't fold if this is for wide characters since it's too
7559 difficult to do correctly and this is a very rare case. */
7561 if (modifier
!= EXPAND_CONST_ADDRESS
7562 && modifier
!= EXPAND_INITIALIZER
7563 && modifier
!= EXPAND_MEMORY
)
7565 tree t
= fold_read_from_constant_string (exp
);
7568 return expand_expr (t
, target
, tmode
, modifier
);
7571 /* If this is a constant index into a constant array,
7572 just get the value from the array. Handle both the cases when
7573 we have an explicit constructor and when our operand is a variable
7574 that was declared const. */
7576 if (modifier
!= EXPAND_CONST_ADDRESS
7577 && modifier
!= EXPAND_INITIALIZER
7578 && modifier
!= EXPAND_MEMORY
7579 && TREE_CODE (array
) == CONSTRUCTOR
7580 && ! TREE_SIDE_EFFECTS (array
)
7581 && TREE_CODE (index
) == INTEGER_CST
)
7583 unsigned HOST_WIDE_INT ix
;
7586 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array
), ix
,
7588 if (tree_int_cst_equal (field
, index
))
7590 if (!TREE_SIDE_EFFECTS (value
))
7591 return expand_expr (fold (value
), target
, tmode
, modifier
);
7596 else if (optimize
>= 1
7597 && modifier
!= EXPAND_CONST_ADDRESS
7598 && modifier
!= EXPAND_INITIALIZER
7599 && modifier
!= EXPAND_MEMORY
7600 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
7601 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
7602 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
7603 && targetm
.binds_local_p (array
))
7605 if (TREE_CODE (index
) == INTEGER_CST
)
7607 tree init
= DECL_INITIAL (array
);
7609 if (TREE_CODE (init
) == CONSTRUCTOR
)
7611 unsigned HOST_WIDE_INT ix
;
7614 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), ix
,
7616 if (tree_int_cst_equal (field
, index
))
7618 if (TREE_SIDE_EFFECTS (value
))
7621 if (TREE_CODE (value
) == CONSTRUCTOR
)
7623 /* If VALUE is a CONSTRUCTOR, this
7624 optimization is only useful if
7625 this doesn't store the CONSTRUCTOR
7626 into memory. If it does, it is more
7627 efficient to just load the data from
7628 the array directly. */
7629 rtx ret
= expand_constructor (value
, target
,
7631 if (ret
== NULL_RTX
)
7635 return expand_expr (fold (value
), target
, tmode
,
7639 else if(TREE_CODE (init
) == STRING_CST
)
7641 tree index1
= index
;
7642 tree low_bound
= array_ref_low_bound (exp
);
7643 index1
= fold_convert (sizetype
, TREE_OPERAND (exp
, 1));
7645 /* Optimize the special-case of a zero lower bound.
7647 We convert the low_bound to sizetype to avoid some problems
7648 with constant folding. (E.g. suppose the lower bound is 1,
7649 and its mode is QI. Without the conversion,l (ARRAY
7650 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7651 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7653 if (! integer_zerop (low_bound
))
7654 index1
= size_diffop (index1
, fold_convert (sizetype
,
7657 if (0 > compare_tree_int (index1
,
7658 TREE_STRING_LENGTH (init
)))
7660 tree type
= TREE_TYPE (TREE_TYPE (init
));
7661 enum machine_mode mode
= TYPE_MODE (type
);
7663 if (GET_MODE_CLASS (mode
) == MODE_INT
7664 && GET_MODE_SIZE (mode
) == 1)
7665 return gen_int_mode (TREE_STRING_POINTER (init
)
7666 [TREE_INT_CST_LOW (index1
)],
7673 goto normal_inner_ref
;
7676 /* If the operand is a CONSTRUCTOR, we can just extract the
7677 appropriate field if it is present. */
7678 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
)
7680 unsigned HOST_WIDE_INT idx
;
7683 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
7685 if (field
== TREE_OPERAND (exp
, 1)
7686 /* We can normally use the value of the field in the
7687 CONSTRUCTOR. However, if this is a bitfield in
7688 an integral mode that we can fit in a HOST_WIDE_INT,
7689 we must mask only the number of bits in the bitfield,
7690 since this is done implicitly by the constructor. If
7691 the bitfield does not meet either of those conditions,
7692 we can't do this optimization. */
7693 && (! DECL_BIT_FIELD (field
)
7694 || ((GET_MODE_CLASS (DECL_MODE (field
)) == MODE_INT
)
7695 && (GET_MODE_BITSIZE (DECL_MODE (field
))
7696 <= HOST_BITS_PER_WIDE_INT
))))
7698 if (DECL_BIT_FIELD (field
)
7699 && modifier
== EXPAND_STACK_PARM
)
7701 op0
= expand_expr (value
, target
, tmode
, modifier
);
7702 if (DECL_BIT_FIELD (field
))
7704 HOST_WIDE_INT bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
7705 enum machine_mode imode
= TYPE_MODE (TREE_TYPE (field
));
7707 if (TYPE_UNSIGNED (TREE_TYPE (field
)))
7709 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
7710 op0
= expand_and (imode
, op0
, op1
, target
);
7715 = build_int_cst (NULL_TREE
,
7716 GET_MODE_BITSIZE (imode
) - bitsize
);
7718 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
7720 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
7728 goto normal_inner_ref
;
7731 case ARRAY_RANGE_REF
:
7734 enum machine_mode mode1
;
7735 HOST_WIDE_INT bitsize
, bitpos
;
7738 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7739 &mode1
, &unsignedp
, &volatilep
, true);
7742 /* If we got back the original object, something is wrong. Perhaps
7743 we are evaluating an expression too early. In any event, don't
7744 infinitely recurse. */
7745 gcc_assert (tem
!= exp
);
7747 /* If TEM's type is a union of variable size, pass TARGET to the inner
7748 computation, since it will need a temporary and TARGET is known
7749 to have to do. This occurs in unchecked conversion in Ada. */
7753 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
7754 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
7756 && modifier
!= EXPAND_STACK_PARM
7757 ? target
: NULL_RTX
),
7759 (modifier
== EXPAND_INITIALIZER
7760 || modifier
== EXPAND_CONST_ADDRESS
7761 || modifier
== EXPAND_STACK_PARM
)
7762 ? modifier
: EXPAND_NORMAL
);
7764 /* If this is a constant, put it into a register if it is a legitimate
7765 constant, OFFSET is 0, and we won't try to extract outside the
7766 register (in case we were passed a partially uninitialized object
7767 or a view_conversion to a larger size) or a BLKmode piece of it
7768 (e.g. if it is unchecked-converted to a record type in Ada). Force
7769 the constant to memory otherwise. */
7770 if (CONSTANT_P (op0
))
7772 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
7773 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
7776 && bitpos
+ bitsize
<= GET_MODE_BITSIZE (mode
))
7777 op0
= force_reg (mode
, op0
);
7779 op0
= validize_mem (force_const_mem (mode
, op0
));
7782 /* Otherwise, if this object not in memory and we either have an
7783 offset, a BLKmode result, or a reference outside the object, put it
7784 there. Such cases can occur in Ada if we have unchecked conversion
7785 of an expression from a scalar type to an array or record type or
7786 for an ARRAY_RANGE_REF whose type is BLKmode. */
7787 else if (!MEM_P (op0
)
7790 || (bitpos
+ bitsize
7791 > GET_MODE_BITSIZE (GET_MODE (op0
)))))
7793 tree nt
= build_qualified_type (TREE_TYPE (tem
),
7794 (TYPE_QUALS (TREE_TYPE (tem
))
7795 | TYPE_QUAL_CONST
));
7796 rtx memloc
= assign_temp (nt
, 1, 1, 1);
7798 emit_move_insn (memloc
, op0
);
7804 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
7807 gcc_assert (MEM_P (op0
));
7809 #ifdef POINTERS_EXTEND_UNSIGNED
7810 if (GET_MODE (offset_rtx
) != Pmode
)
7811 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
7813 if (GET_MODE (offset_rtx
) != ptr_mode
)
7814 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
7817 if (GET_MODE (op0
) == BLKmode
7818 /* A constant address in OP0 can have VOIDmode, we must
7819 not try to call force_reg in that case. */
7820 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
7822 && (bitpos
% bitsize
) == 0
7823 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
7824 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
7826 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7830 op0
= offset_address (op0
, offset_rtx
,
7831 highest_pow2_factor (offset
));
7834 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7835 record its alignment as BIGGEST_ALIGNMENT. */
7836 if (MEM_P (op0
) && bitpos
== 0 && offset
!= 0
7837 && is_aligning_offset (offset
, tem
))
7838 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
7840 /* Don't forget about volatility even if this is a bitfield. */
7841 if (MEM_P (op0
) && volatilep
&& ! MEM_VOLATILE_P (op0
))
7843 if (op0
== orig_op0
)
7844 op0
= copy_rtx (op0
);
7846 MEM_VOLATILE_P (op0
) = 1;
7849 /* The following code doesn't handle CONCAT.
7850 Assume only bitpos == 0 can be used for CONCAT, due to
7851 one element arrays having the same mode as its element. */
7852 if (GET_CODE (op0
) == CONCAT
)
7854 gcc_assert (bitpos
== 0
7855 && bitsize
== GET_MODE_BITSIZE (GET_MODE (op0
)));
7859 /* In cases where an aligned union has an unaligned object
7860 as a field, we might be extracting a BLKmode value from
7861 an integer-mode (e.g., SImode) object. Handle this case
7862 by doing the extract into an object as wide as the field
7863 (which we know to be the width of a basic mode), then
7864 storing into memory, and changing the mode to BLKmode. */
7865 if (mode1
== VOIDmode
7866 || REG_P (op0
) || GET_CODE (op0
) == SUBREG
7867 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
7868 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7869 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
7870 && modifier
!= EXPAND_CONST_ADDRESS
7871 && modifier
!= EXPAND_INITIALIZER
)
7872 /* If the field isn't aligned enough to fetch as a memref,
7873 fetch it as a bit field. */
7874 || (mode1
!= BLKmode
7875 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
7876 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)
7878 && (MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
7879 || (bitpos
% GET_MODE_ALIGNMENT (mode1
) != 0))))
7880 && ((modifier
== EXPAND_CONST_ADDRESS
7881 || modifier
== EXPAND_INITIALIZER
)
7883 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
7884 || (bitpos
% BITS_PER_UNIT
!= 0)))
7885 /* If the type and the field are a constant size and the
7886 size of the type isn't the same size as the bitfield,
7887 we must use bitfield operations. */
7889 && TYPE_SIZE (TREE_TYPE (exp
))
7890 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
7891 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7894 enum machine_mode ext_mode
= mode
;
7896 if (ext_mode
== BLKmode
7897 && ! (target
!= 0 && MEM_P (op0
)
7899 && bitpos
% BITS_PER_UNIT
== 0))
7900 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7902 if (ext_mode
== BLKmode
)
7905 target
= assign_temp (type
, 0, 1, 1);
7910 /* In this case, BITPOS must start at a byte boundary and
7911 TARGET, if specified, must be a MEM. */
7912 gcc_assert (MEM_P (op0
)
7913 && (!target
|| MEM_P (target
))
7914 && !(bitpos
% BITS_PER_UNIT
));
7916 emit_block_move (target
,
7917 adjust_address (op0
, VOIDmode
,
7918 bitpos
/ BITS_PER_UNIT
),
7919 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7921 (modifier
== EXPAND_STACK_PARM
7922 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7927 op0
= validize_mem (op0
);
7929 if (MEM_P (op0
) && REG_P (XEXP (op0
, 0)))
7930 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7932 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
7933 (modifier
== EXPAND_STACK_PARM
7934 ? NULL_RTX
: target
),
7935 ext_mode
, ext_mode
);
7937 /* If the result is a record type and BITSIZE is narrower than
7938 the mode of OP0, an integral mode, and this is a big endian
7939 machine, we must put the field into the high-order bits. */
7940 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7941 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7942 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
7943 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7944 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7948 /* If the result type is BLKmode, store the data into a temporary
7949 of the appropriate type, but with the mode corresponding to the
7950 mode for the data we have (op0's mode). It's tempting to make
7951 this a constant type, since we know it's only being stored once,
7952 but that can cause problems if we are taking the address of this
7953 COMPONENT_REF because the MEM of any reference via that address
7954 will have flags corresponding to the type, which will not
7955 necessarily be constant. */
7956 if (mode
== BLKmode
)
7958 HOST_WIDE_INT size
= GET_MODE_BITSIZE (ext_mode
);
7961 /* If the reference doesn't use the alias set of its type,
7962 we cannot create the temporary using that type. */
7963 if (component_uses_parent_alias_set (exp
))
7965 new = assign_stack_local (ext_mode
, size
, 0);
7966 set_mem_alias_set (new, get_alias_set (exp
));
7969 new = assign_stack_temp_for_type (ext_mode
, size
, 0, type
);
7971 emit_move_insn (new, op0
);
7972 op0
= copy_rtx (new);
7973 PUT_MODE (op0
, BLKmode
);
7974 set_mem_attributes (op0
, exp
, 1);
7980 /* If the result is BLKmode, use that to access the object
7982 if (mode
== BLKmode
)
7985 /* Get a reference to just this component. */
7986 if (modifier
== EXPAND_CONST_ADDRESS
7987 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7988 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7990 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7992 if (op0
== orig_op0
)
7993 op0
= copy_rtx (op0
);
7995 set_mem_attributes (op0
, exp
, 0);
7996 if (REG_P (XEXP (op0
, 0)))
7997 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7999 MEM_VOLATILE_P (op0
) |= volatilep
;
8000 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
8001 || modifier
== EXPAND_CONST_ADDRESS
8002 || modifier
== EXPAND_INITIALIZER
)
8004 else if (target
== 0)
8005 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8007 convert_move (target
, op0
, unsignedp
);
8012 return expand_expr (OBJ_TYPE_REF_EXPR (exp
), target
, tmode
, modifier
);
8015 /* All valid uses of __builtin_va_arg_pack () are removed during
8017 if (CALL_EXPR_VA_ARG_PACK (exp
))
8018 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
8020 tree fndecl
= get_callee_fndecl (exp
), attr
;
8023 && (attr
= lookup_attribute ("error",
8024 DECL_ATTRIBUTES (fndecl
))) != NULL
)
8025 error ("%Kcall to %qs declared with attribute error: %s",
8026 exp
, lang_hooks
.decl_printable_name (fndecl
, 1),
8027 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
8029 && (attr
= lookup_attribute ("warning",
8030 DECL_ATTRIBUTES (fndecl
))) != NULL
)
8031 warning (0, "%Kcall to %qs declared with attribute warning: %s",
8032 exp
, lang_hooks
.decl_printable_name (fndecl
, 1),
8033 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
8035 /* Check for a built-in function. */
8036 if (fndecl
&& DECL_BUILT_IN (fndecl
))
8038 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_FRONTEND
)
8039 return lang_hooks
.expand_expr (exp
, original_target
,
8040 tmode
, modifier
, alt_rtl
);
8042 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
8045 return expand_call (exp
, target
, ignore
);
8049 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
8052 if (TREE_CODE (type
) == UNION_TYPE
)
8054 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8056 /* If both input and output are BLKmode, this conversion isn't doing
8057 anything except possibly changing memory attribute. */
8058 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
8060 rtx result
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
8063 result
= copy_rtx (result
);
8064 set_mem_attributes (result
, exp
, 0);
8070 if (TYPE_MODE (type
) != BLKmode
)
8071 target
= gen_reg_rtx (TYPE_MODE (type
));
8073 target
= assign_temp (type
, 0, 1, 1);
8077 /* Store data into beginning of memory target. */
8078 store_expr (TREE_OPERAND (exp
, 0),
8079 adjust_address (target
, TYPE_MODE (valtype
), 0),
8080 modifier
== EXPAND_STACK_PARM
,
8085 gcc_assert (REG_P (target
));
8087 /* Store this field into a union of the proper type. */
8088 store_field (target
,
8089 MIN ((int_size_in_bytes (TREE_TYPE
8090 (TREE_OPERAND (exp
, 0)))
8092 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
8093 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
8097 /* Return the entire union. */
8101 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
8103 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
8106 /* If the signedness of the conversion differs and OP0 is
8107 a promoted SUBREG, clear that indication since we now
8108 have to do the proper extension. */
8109 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
8110 && GET_CODE (op0
) == SUBREG
)
8111 SUBREG_PROMOTED_VAR_P (op0
) = 0;
8113 return REDUCE_BIT_FIELD (op0
);
8116 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
,
8117 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
);
8118 if (GET_MODE (op0
) == mode
)
8121 /* If OP0 is a constant, just convert it into the proper mode. */
8122 else if (CONSTANT_P (op0
))
8124 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8125 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
8127 if (modifier
== EXPAND_INITIALIZER
)
8128 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
,
8129 subreg_lowpart_offset (mode
,
8132 op0
= convert_modes (mode
, inner_mode
, op0
,
8133 TYPE_UNSIGNED (inner_type
));
8136 else if (modifier
== EXPAND_INITIALIZER
)
8137 op0
= gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
8139 else if (target
== 0)
8140 op0
= convert_to_mode (mode
, op0
,
8141 TYPE_UNSIGNED (TREE_TYPE
8142 (TREE_OPERAND (exp
, 0))));
8145 convert_move (target
, op0
,
8146 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8150 return REDUCE_BIT_FIELD (op0
);
8152 case VIEW_CONVERT_EXPR
:
8153 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
8155 /* If the input and output modes are both the same, we are done. */
8156 if (TYPE_MODE (type
) == GET_MODE (op0
))
8158 /* If neither mode is BLKmode, and both modes are the same size
8159 then we can use gen_lowpart. */
8160 else if (TYPE_MODE (type
) != BLKmode
&& GET_MODE (op0
) != BLKmode
8161 && GET_MODE_SIZE (TYPE_MODE (type
))
8162 == GET_MODE_SIZE (GET_MODE (op0
)))
8164 if (GET_CODE (op0
) == SUBREG
)
8165 op0
= force_reg (GET_MODE (op0
), op0
);
8166 op0
= gen_lowpart (TYPE_MODE (type
), op0
);
8168 /* If both modes are integral, then we can convert from one to the
8170 else if (SCALAR_INT_MODE_P (GET_MODE (op0
))
8171 && SCALAR_INT_MODE_P (TYPE_MODE (type
)))
8172 op0
= convert_modes (TYPE_MODE (type
), GET_MODE (op0
), op0
,
8173 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8174 /* As a last resort, spill op0 to memory, and reload it in a
8176 else if (!MEM_P (op0
))
8178 /* If the operand is not a MEM, force it into memory. Since we
8179 are going to be changing the mode of the MEM, don't call
8180 force_const_mem for constants because we don't allow pool
8181 constants to change mode. */
8182 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8184 gcc_assert (!TREE_ADDRESSABLE (exp
));
8186 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
8188 = assign_stack_temp_for_type
8189 (TYPE_MODE (inner_type
),
8190 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
8192 emit_move_insn (target
, op0
);
8196 /* At this point, OP0 is in the correct mode. If the output type is such
8197 that the operand is known to be aligned, indicate that it is.
8198 Otherwise, we need only be concerned about alignment for non-BLKmode
8202 op0
= copy_rtx (op0
);
8204 if (TYPE_ALIGN_OK (type
))
8205 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
8206 else if (TYPE_MODE (type
) != BLKmode
&& STRICT_ALIGNMENT
8207 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
8209 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8210 HOST_WIDE_INT temp_size
8211 = MAX (int_size_in_bytes (inner_type
),
8212 (HOST_WIDE_INT
) GET_MODE_SIZE (TYPE_MODE (type
)));
8213 rtx
new = assign_stack_temp_for_type (TYPE_MODE (type
),
8214 temp_size
, 0, type
);
8215 rtx new_with_op0_mode
= adjust_address (new, GET_MODE (op0
), 0);
8217 gcc_assert (!TREE_ADDRESSABLE (exp
));
8219 if (GET_MODE (op0
) == BLKmode
)
8220 emit_block_move (new_with_op0_mode
, op0
,
8221 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type
))),
8222 (modifier
== EXPAND_STACK_PARM
8223 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
8225 emit_move_insn (new_with_op0_mode
, op0
);
8230 op0
= adjust_address (op0
, TYPE_MODE (type
), 0);
8235 case POINTER_PLUS_EXPR
:
8236 /* Even though the sizetype mode and the pointer's mode can be different
8237 expand is able to handle this correctly and get the correct result out
8238 of the PLUS_EXPR code. */
8241 /* Check if this is a case for multiplication and addition. */
8242 if ((TREE_CODE (type
) == INTEGER_TYPE
8243 || TREE_CODE (type
) == FIXED_POINT_TYPE
)
8244 && TREE_CODE (TREE_OPERAND (exp
, 0)) == MULT_EXPR
)
8246 tree subsubexp0
, subsubexp1
;
8247 enum tree_code code0
, code1
, this_code
;
8249 subexp0
= TREE_OPERAND (exp
, 0);
8250 subsubexp0
= TREE_OPERAND (subexp0
, 0);
8251 subsubexp1
= TREE_OPERAND (subexp0
, 1);
8252 code0
= TREE_CODE (subsubexp0
);
8253 code1
= TREE_CODE (subsubexp1
);
8254 this_code
= TREE_CODE (type
) == INTEGER_TYPE
? NOP_EXPR
8255 : FIXED_CONVERT_EXPR
;
8256 if (code0
== this_code
&& code1
== this_code
8257 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0
, 0)))
8258 < TYPE_PRECISION (TREE_TYPE (subsubexp0
)))
8259 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0
, 0)))
8260 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1
, 0))))
8261 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0
, 0)))
8262 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1
, 0)))))
8264 tree op0type
= TREE_TYPE (TREE_OPERAND (subsubexp0
, 0));
8265 enum machine_mode innermode
= TYPE_MODE (op0type
);
8266 bool zextend_p
= TYPE_UNSIGNED (op0type
);
8267 bool sat_p
= TYPE_SATURATING (TREE_TYPE (subsubexp0
));
8269 this_optab
= zextend_p
? umadd_widen_optab
: smadd_widen_optab
;
8271 this_optab
= zextend_p
? usmadd_widen_optab
8272 : ssmadd_widen_optab
;
8273 if (mode
== GET_MODE_2XWIDER_MODE (innermode
)
8274 && (optab_handler (this_optab
, mode
)->insn_code
8275 != CODE_FOR_nothing
))
8277 expand_operands (TREE_OPERAND (subsubexp0
, 0),
8278 TREE_OPERAND (subsubexp1
, 0),
8279 NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8280 op2
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
,
8281 VOIDmode
, EXPAND_NORMAL
);
8282 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
8285 return REDUCE_BIT_FIELD (temp
);
8290 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8291 something else, make sure we add the register to the constant and
8292 then to the other thing. This case can occur during strength
8293 reduction and doing it this way will produce better code if the
8294 frame pointer or argument pointer is eliminated.
8296 fold-const.c will ensure that the constant is always in the inner
8297 PLUS_EXPR, so the only case we need to do anything about is if
8298 sp, ap, or fp is our second argument, in which case we must swap
8299 the innermost first argument and our second argument. */
8301 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
8302 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
8303 && TREE_CODE (TREE_OPERAND (exp
, 1)) == VAR_DECL
8304 && (DECL_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
8305 || DECL_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
8306 || DECL_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
8308 tree t
= TREE_OPERAND (exp
, 1);
8310 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
8311 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
8314 /* If the result is to be ptr_mode and we are adding an integer to
8315 something, we might be forming a constant. So try to use
8316 plus_constant. If it produces a sum and we can't accept it,
8317 use force_operand. This allows P = &ARR[const] to generate
8318 efficient code on machines where a SYMBOL_REF is not a valid
8321 If this is an EXPAND_SUM call, always return the sum. */
8322 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
8323 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
8325 if (modifier
== EXPAND_STACK_PARM
)
8327 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
8328 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
8329 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
8333 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
8335 /* Use immed_double_const to ensure that the constant is
8336 truncated according to the mode of OP1, then sign extended
8337 to a HOST_WIDE_INT. Using the constant directly can result
8338 in non-canonical RTL in a 64x32 cross compile. */
8340 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
8342 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
8343 op1
= plus_constant (op1
, INTVAL (constant_part
));
8344 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8345 op1
= force_operand (op1
, target
);
8346 return REDUCE_BIT_FIELD (op1
);
8349 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
8350 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
8351 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
8355 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
8356 (modifier
== EXPAND_INITIALIZER
8357 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
8358 if (! CONSTANT_P (op0
))
8360 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
8361 VOIDmode
, modifier
);
8362 /* Return a PLUS if modifier says it's OK. */
8363 if (modifier
== EXPAND_SUM
8364 || modifier
== EXPAND_INITIALIZER
)
8365 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
8368 /* Use immed_double_const to ensure that the constant is
8369 truncated according to the mode of OP1, then sign extended
8370 to a HOST_WIDE_INT. Using the constant directly can result
8371 in non-canonical RTL in a 64x32 cross compile. */
8373 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
8375 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8376 op0
= plus_constant (op0
, INTVAL (constant_part
));
8377 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8378 op0
= force_operand (op0
, target
);
8379 return REDUCE_BIT_FIELD (op0
);
8383 /* No sense saving up arithmetic to be done
8384 if it's all in the wrong mode to form part of an address.
8385 And force_operand won't know whether to sign-extend or
8387 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8388 || mode
!= ptr_mode
)
8390 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8391 subtarget
, &op0
, &op1
, 0);
8392 if (op0
== const0_rtx
)
8394 if (op1
== const0_rtx
)
8399 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8400 subtarget
, &op0
, &op1
, modifier
);
8401 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8404 /* Check if this is a case for multiplication and subtraction. */
8405 if ((TREE_CODE (type
) == INTEGER_TYPE
8406 || TREE_CODE (type
) == FIXED_POINT_TYPE
)
8407 && TREE_CODE (TREE_OPERAND (exp
, 1)) == MULT_EXPR
)
8409 tree subsubexp0
, subsubexp1
;
8410 enum tree_code code0
, code1
, this_code
;
8412 subexp1
= TREE_OPERAND (exp
, 1);
8413 subsubexp0
= TREE_OPERAND (subexp1
, 0);
8414 subsubexp1
= TREE_OPERAND (subexp1
, 1);
8415 code0
= TREE_CODE (subsubexp0
);
8416 code1
= TREE_CODE (subsubexp1
);
8417 this_code
= TREE_CODE (type
) == INTEGER_TYPE
? NOP_EXPR
8418 : FIXED_CONVERT_EXPR
;
8419 if (code0
== this_code
&& code1
== this_code
8420 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0
, 0)))
8421 < TYPE_PRECISION (TREE_TYPE (subsubexp0
)))
8422 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0
, 0)))
8423 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1
, 0))))
8424 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0
, 0)))
8425 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1
, 0)))))
8427 tree op0type
= TREE_TYPE (TREE_OPERAND (subsubexp0
, 0));
8428 enum machine_mode innermode
= TYPE_MODE (op0type
);
8429 bool zextend_p
= TYPE_UNSIGNED (op0type
);
8430 bool sat_p
= TYPE_SATURATING (TREE_TYPE (subsubexp0
));
8432 this_optab
= zextend_p
? umsub_widen_optab
: smsub_widen_optab
;
8434 this_optab
= zextend_p
? usmsub_widen_optab
8435 : ssmsub_widen_optab
;
8436 if (mode
== GET_MODE_2XWIDER_MODE (innermode
)
8437 && (optab_handler (this_optab
, mode
)->insn_code
8438 != CODE_FOR_nothing
))
8440 expand_operands (TREE_OPERAND (subsubexp0
, 0),
8441 TREE_OPERAND (subsubexp1
, 0),
8442 NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8443 op2
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
,
8444 VOIDmode
, EXPAND_NORMAL
);
8445 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
8448 return REDUCE_BIT_FIELD (temp
);
8453 /* For initializers, we are allowed to return a MINUS of two
8454 symbolic constants. Here we handle all cases when both operands
8456 /* Handle difference of two symbolic constants,
8457 for the sake of an initializer. */
8458 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8459 && really_constant_p (TREE_OPERAND (exp
, 0))
8460 && really_constant_p (TREE_OPERAND (exp
, 1)))
8462 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8463 NULL_RTX
, &op0
, &op1
, modifier
);
8465 /* If the last operand is a CONST_INT, use plus_constant of
8466 the negated constant. Else make the MINUS. */
8467 if (GET_CODE (op1
) == CONST_INT
)
8468 return REDUCE_BIT_FIELD (plus_constant (op0
, - INTVAL (op1
)));
8470 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode
, op0
, op1
));
8473 /* No sense saving up arithmetic to be done
8474 if it's all in the wrong mode to form part of an address.
8475 And force_operand won't know whether to sign-extend or
8477 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8478 || mode
!= ptr_mode
)
8481 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8482 subtarget
, &op0
, &op1
, modifier
);
8484 /* Convert A - const to A + (-const). */
8485 if (GET_CODE (op1
) == CONST_INT
)
8487 op1
= negate_rtx (mode
, op1
);
8488 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8494 /* If this is a fixed-point operation, then we cannot use the code
8495 below because "expand_mult" doesn't support sat/no-sat fixed-point
8497 if (ALL_FIXED_POINT_MODE_P (mode
))
8500 /* If first operand is constant, swap them.
8501 Thus the following special case checks need only
8502 check the second operand. */
8503 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
8505 tree t1
= TREE_OPERAND (exp
, 0);
8506 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
8507 TREE_OPERAND (exp
, 1) = t1
;
8510 /* Attempt to return something suitable for generating an
8511 indexed address, for machines that support that. */
8513 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
8514 && host_integerp (TREE_OPERAND (exp
, 1), 0))
8516 tree exp1
= TREE_OPERAND (exp
, 1);
8518 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
8522 op0
= force_operand (op0
, NULL_RTX
);
8524 op0
= copy_to_mode_reg (mode
, op0
);
8526 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode
, op0
,
8527 gen_int_mode (tree_low_cst (exp1
, 0),
8528 TYPE_MODE (TREE_TYPE (exp1
)))));
8531 if (modifier
== EXPAND_STACK_PARM
)
8534 /* Check for multiplying things that have been extended
8535 from a narrower type. If this machine supports multiplying
8536 in that narrower type with a result in the desired type,
8537 do it that way, and avoid the explicit type-conversion. */
8539 subexp0
= TREE_OPERAND (exp
, 0);
8540 subexp1
= TREE_OPERAND (exp
, 1);
8541 /* First, check if we have a multiplication of one signed and one
8542 unsigned operand. */
8543 if (TREE_CODE (subexp0
) == NOP_EXPR
8544 && TREE_CODE (subexp1
) == NOP_EXPR
8545 && TREE_CODE (type
) == INTEGER_TYPE
8546 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0
, 0)))
8547 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
8548 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0
, 0)))
8549 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1
, 0))))
8550 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0
, 0)))
8551 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1
, 0)))))
8553 enum machine_mode innermode
8554 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0
, 0)));
8555 this_optab
= usmul_widen_optab
;
8556 if (mode
== GET_MODE_WIDER_MODE (innermode
))
8558 if (optab_handler (this_optab
, mode
)->insn_code
!= CODE_FOR_nothing
)
8560 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0
, 0))))
8561 expand_operands (TREE_OPERAND (subexp0
, 0),
8562 TREE_OPERAND (subexp1
, 0),
8563 NULL_RTX
, &op0
, &op1
, 0);
8565 expand_operands (TREE_OPERAND (subexp0
, 0),
8566 TREE_OPERAND (subexp1
, 0),
8567 NULL_RTX
, &op1
, &op0
, 0);
8573 /* Check for a multiplication with matching signedness. */
8574 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
8575 && TREE_CODE (type
) == INTEGER_TYPE
8576 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8577 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
8578 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
8579 && int_fits_type_p (TREE_OPERAND (exp
, 1),
8580 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8581 /* Don't use a widening multiply if a shift will do. */
8582 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
8583 > HOST_BITS_PER_WIDE_INT
)
8584 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
8586 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8587 && (TYPE_PRECISION (TREE_TYPE
8588 (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
8589 == TYPE_PRECISION (TREE_TYPE
8591 (TREE_OPERAND (exp
, 0), 0))))
8592 /* If both operands are extended, they must either both
8593 be zero-extended or both be sign-extended. */
8594 && (TYPE_UNSIGNED (TREE_TYPE
8595 (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
8596 == TYPE_UNSIGNED (TREE_TYPE
8598 (TREE_OPERAND (exp
, 0), 0)))))))
8600 tree op0type
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0));
8601 enum machine_mode innermode
= TYPE_MODE (op0type
);
8602 bool zextend_p
= TYPE_UNSIGNED (op0type
);
8603 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
8604 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
8606 if (mode
== GET_MODE_2XWIDER_MODE (innermode
))
8608 if (optab_handler (this_optab
, mode
)->insn_code
!= CODE_FOR_nothing
)
8610 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
8611 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8612 TREE_OPERAND (exp
, 1),
8613 NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8615 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8616 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
8617 NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8620 else if (optab_handler (other_optab
, mode
)->insn_code
!= CODE_FOR_nothing
8621 && innermode
== word_mode
)
8624 op0
= expand_normal (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0));
8625 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
8626 op1
= convert_modes (innermode
, mode
,
8627 expand_normal (TREE_OPERAND (exp
, 1)),
8630 op1
= expand_normal (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0));
8631 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8632 unsignedp
, OPTAB_LIB_WIDEN
);
8633 hipart
= gen_highpart (innermode
, temp
);
8634 htem
= expand_mult_highpart_adjust (innermode
, hipart
,
8638 emit_move_insn (hipart
, htem
);
8639 return REDUCE_BIT_FIELD (temp
);
8643 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8644 subtarget
, &op0
, &op1
, 0);
8645 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8647 case TRUNC_DIV_EXPR
:
8648 case FLOOR_DIV_EXPR
:
8650 case ROUND_DIV_EXPR
:
8651 case EXACT_DIV_EXPR
:
8652 /* If this is a fixed-point operation, then we cannot use the code
8653 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8655 if (ALL_FIXED_POINT_MODE_P (mode
))
8658 if (modifier
== EXPAND_STACK_PARM
)
8660 /* Possible optimization: compute the dividend with EXPAND_SUM
8661 then if the divisor is constant can optimize the case
8662 where some terms of the dividend have coeffs divisible by it. */
8663 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8664 subtarget
, &op0
, &op1
, 0);
8665 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
8670 case TRUNC_MOD_EXPR
:
8671 case FLOOR_MOD_EXPR
:
8673 case ROUND_MOD_EXPR
:
8674 if (modifier
== EXPAND_STACK_PARM
)
8676 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8677 subtarget
, &op0
, &op1
, 0);
8678 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
8680 case FIXED_CONVERT_EXPR
:
8681 op0
= expand_normal (TREE_OPERAND (exp
, 0));
8682 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8683 target
= gen_reg_rtx (mode
);
8685 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == INTEGER_TYPE
8686 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
8687 || (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_UNSIGNED (type
)))
8688 expand_fixed_convert (target
, op0
, 1, TYPE_SATURATING (type
));
8690 expand_fixed_convert (target
, op0
, 0, TYPE_SATURATING (type
));
8693 case FIX_TRUNC_EXPR
:
8694 op0
= expand_normal (TREE_OPERAND (exp
, 0));
8695 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8696 target
= gen_reg_rtx (mode
);
8697 expand_fix (target
, op0
, unsignedp
);
8701 op0
= expand_normal (TREE_OPERAND (exp
, 0));
8702 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8703 target
= gen_reg_rtx (mode
);
8704 /* expand_float can't figure out what to do if FROM has VOIDmode.
8705 So give it the correct mode. With -O, cse will optimize this. */
8706 if (GET_MODE (op0
) == VOIDmode
)
8707 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8709 expand_float (target
, op0
,
8710 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8714 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
,
8715 VOIDmode
, EXPAND_NORMAL
);
8716 if (modifier
== EXPAND_STACK_PARM
)
8718 temp
= expand_unop (mode
,
8719 optab_for_tree_code (NEGATE_EXPR
, type
,
8723 return REDUCE_BIT_FIELD (temp
);
8726 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
,
8727 VOIDmode
, EXPAND_NORMAL
);
8728 if (modifier
== EXPAND_STACK_PARM
)
8731 /* ABS_EXPR is not valid for complex arguments. */
8732 gcc_assert (GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
8733 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
);
8735 /* Unsigned abs is simply the operand. Testing here means we don't
8736 risk generating incorrect code below. */
8737 if (TYPE_UNSIGNED (type
))
8740 return expand_abs (mode
, op0
, target
, unsignedp
,
8741 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
8745 target
= original_target
;
8747 || modifier
== EXPAND_STACK_PARM
8748 || (MEM_P (target
) && MEM_VOLATILE_P (target
))
8749 || GET_MODE (target
) != mode
8751 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8752 target
= gen_reg_rtx (mode
);
8753 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8754 target
, &op0
, &op1
, 0);
8756 /* First try to do it with a special MIN or MAX instruction.
8757 If that does not win, use a conditional jump to select the proper
8759 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8760 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8765 /* At this point, a MEM target is no longer useful; we will get better
8768 if (! REG_P (target
))
8769 target
= gen_reg_rtx (mode
);
8771 /* If op1 was placed in target, swap op0 and op1. */
8772 if (target
!= op0
&& target
== op1
)
8779 /* We generate better code and avoid problems with op1 mentioning
8780 target by forcing op1 into a pseudo if it isn't a constant. */
8781 if (! CONSTANT_P (op1
))
8782 op1
= force_reg (mode
, op1
);
8785 enum rtx_code comparison_code
;
8788 if (code
== MAX_EXPR
)
8789 comparison_code
= unsignedp
? GEU
: GE
;
8791 comparison_code
= unsignedp
? LEU
: LE
;
8793 /* Canonicalize to comparisons against 0. */
8794 if (op1
== const1_rtx
)
8796 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8797 or (a != 0 ? a : 1) for unsigned.
8798 For MIN we are safe converting (a <= 1 ? a : 1)
8799 into (a <= 0 ? a : 1) */
8800 cmpop1
= const0_rtx
;
8801 if (code
== MAX_EXPR
)
8802 comparison_code
= unsignedp
? NE
: GT
;
8804 if (op1
== constm1_rtx
&& !unsignedp
)
8806 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8807 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8808 cmpop1
= const0_rtx
;
8809 if (code
== MIN_EXPR
)
8810 comparison_code
= LT
;
8812 #ifdef HAVE_conditional_move
8813 /* Use a conditional move if possible. */
8814 if (can_conditionally_move_p (mode
))
8818 /* ??? Same problem as in expmed.c: emit_conditional_move
8819 forces a stack adjustment via compare_from_rtx, and we
8820 lose the stack adjustment if the sequence we are about
8821 to create is discarded. */
8822 do_pending_stack_adjust ();
8826 /* Try to emit the conditional move. */
8827 insn
= emit_conditional_move (target
, comparison_code
,
8832 /* If we could do the conditional move, emit the sequence,
8836 rtx seq
= get_insns ();
8842 /* Otherwise discard the sequence and fall back to code with
8848 emit_move_insn (target
, op0
);
8850 temp
= gen_label_rtx ();
8851 do_compare_rtx_and_jump (target
, cmpop1
, comparison_code
,
8852 unsignedp
, mode
, NULL_RTX
, NULL_RTX
, temp
);
8854 emit_move_insn (target
, op1
);
8859 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
,
8860 VOIDmode
, EXPAND_NORMAL
);
8861 if (modifier
== EXPAND_STACK_PARM
)
8863 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8867 /* ??? Can optimize bitwise operations with one arg constant.
8868 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8869 and (a bitwise1 b) bitwise2 b (etc)
8870 but that is probably not worth while. */
8872 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8873 boolean values when we want in all cases to compute both of them. In
8874 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8875 as actual zero-or-1 values and then bitwise anding. In cases where
8876 there cannot be any side effects, better code would be made by
8877 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8878 how to recognize those cases. */
8880 case TRUTH_AND_EXPR
:
8881 code
= BIT_AND_EXPR
;
8886 code
= BIT_IOR_EXPR
;
8890 case TRUTH_XOR_EXPR
:
8891 code
= BIT_XOR_EXPR
;
8897 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type
))
8898 || (GET_MODE_PRECISION (TYPE_MODE (type
))
8899 == TYPE_PRECISION (type
)));
8904 /* If this is a fixed-point operation, then we cannot use the code
8905 below because "expand_shift" doesn't support sat/no-sat fixed-point
8907 if (ALL_FIXED_POINT_MODE_P (mode
))
8910 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8912 if (modifier
== EXPAND_STACK_PARM
)
8914 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
,
8915 VOIDmode
, EXPAND_NORMAL
);
8916 temp
= expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
8918 if (code
== LSHIFT_EXPR
)
8919 temp
= REDUCE_BIT_FIELD (temp
);
8922 /* Could determine the answer when only additive constants differ. Also,
8923 the addition of one can be handled by changing the condition. */
8930 case UNORDERED_EXPR
:
8938 temp
= do_store_flag (exp
,
8939 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
8940 tmode
!= VOIDmode
? tmode
: mode
, 0);
8944 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8945 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
8947 && REG_P (original_target
)
8948 && (GET_MODE (original_target
)
8949 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
8951 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
8952 VOIDmode
, EXPAND_NORMAL
);
8954 /* If temp is constant, we can just compute the result. */
8955 if (GET_CODE (temp
) == CONST_INT
)
8957 if (INTVAL (temp
) != 0)
8958 emit_move_insn (target
, const1_rtx
);
8960 emit_move_insn (target
, const0_rtx
);
8965 if (temp
!= original_target
)
8967 enum machine_mode mode1
= GET_MODE (temp
);
8968 if (mode1
== VOIDmode
)
8969 mode1
= tmode
!= VOIDmode
? tmode
: mode
;
8971 temp
= copy_to_mode_reg (mode1
, temp
);
8974 op1
= gen_label_rtx ();
8975 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
8976 GET_MODE (temp
), unsignedp
, op1
);
8977 emit_move_insn (temp
, const1_rtx
);
8982 /* If no set-flag instruction, must generate a conditional store
8983 into a temporary variable. Drop through and handle this
8988 || modifier
== EXPAND_STACK_PARM
8989 || ! safe_from_p (target
, exp
, 1)
8990 /* Make sure we don't have a hard reg (such as function's return
8991 value) live across basic blocks, if not optimizing. */
8992 || (!optimize
&& REG_P (target
)
8993 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8994 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8997 emit_move_insn (target
, const0_rtx
);
8999 op1
= gen_label_rtx ();
9000 jumpifnot (exp
, op1
);
9003 emit_move_insn (target
, const1_rtx
);
9006 return ignore
? const0_rtx
: target
;
9008 case TRUTH_NOT_EXPR
:
9009 if (modifier
== EXPAND_STACK_PARM
)
9011 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
,
9012 VOIDmode
, EXPAND_NORMAL
);
9013 /* The parser is careful to generate TRUTH_NOT_EXPR
9014 only with operands that are always zero or one. */
9015 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
9016 target
, 1, OPTAB_LIB_WIDEN
);
9020 case STATEMENT_LIST
:
9022 tree_stmt_iterator iter
;
9024 gcc_assert (ignore
);
9026 for (iter
= tsi_start (exp
); !tsi_end_p (iter
); tsi_next (&iter
))
9027 expand_expr (tsi_stmt (iter
), const0_rtx
, VOIDmode
, modifier
);
9032 /* A COND_EXPR with its type being VOID_TYPE represents a
9033 conditional jump and is handled in
9034 expand_gimple_cond_expr. */
9035 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp
)));
9037 /* Note that COND_EXPRs whose type is a structure or union
9038 are required to be constructed to contain assignments of
9039 a temporary variable, so that we can evaluate them here
9040 for side effect only. If type is void, we must do likewise. */
9042 gcc_assert (!TREE_ADDRESSABLE (type
)
9044 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
9045 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
);
9047 /* If we are not to produce a result, we have no target. Otherwise,
9048 if a target was specified use it; it will not be used as an
9049 intermediate target unless it is safe. If no target, use a
9052 if (modifier
!= EXPAND_STACK_PARM
9054 && safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
9055 && GET_MODE (original_target
) == mode
9056 #ifdef HAVE_conditional_move
9057 && (! can_conditionally_move_p (mode
)
9058 || REG_P (original_target
))
9060 && !MEM_P (original_target
))
9061 temp
= original_target
;
9063 temp
= assign_temp (type
, 0, 0, 1);
9065 do_pending_stack_adjust ();
9067 op0
= gen_label_rtx ();
9068 op1
= gen_label_rtx ();
9069 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
9070 store_expr (TREE_OPERAND (exp
, 1), temp
,
9071 modifier
== EXPAND_STACK_PARM
,
9074 emit_jump_insn (gen_jump (op1
));
9077 store_expr (TREE_OPERAND (exp
, 2), temp
,
9078 modifier
== EXPAND_STACK_PARM
,
9086 target
= expand_vec_cond_expr (exp
, target
);
9091 tree lhs
= TREE_OPERAND (exp
, 0);
9092 tree rhs
= TREE_OPERAND (exp
, 1);
9093 gcc_assert (ignore
);
9094 expand_assignment (lhs
, rhs
, false);
9098 case GIMPLE_MODIFY_STMT
:
9100 tree lhs
= GIMPLE_STMT_OPERAND (exp
, 0);
9101 tree rhs
= GIMPLE_STMT_OPERAND (exp
, 1);
9103 gcc_assert (ignore
);
9105 /* Check for |= or &= of a bitfield of size one into another bitfield
9106 of size 1. In this case, (unless we need the result of the
9107 assignment) we can do this more efficiently with a
9108 test followed by an assignment, if necessary.
9110 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9111 things change so we do, this code should be enhanced to
9113 if (TREE_CODE (lhs
) == COMPONENT_REF
9114 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
9115 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
9116 && TREE_OPERAND (rhs
, 0) == lhs
9117 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
9118 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
9119 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
9121 rtx label
= gen_label_rtx ();
9122 int value
= TREE_CODE (rhs
) == BIT_IOR_EXPR
;
9123 do_jump (TREE_OPERAND (rhs
, 1),
9126 expand_assignment (lhs
, build_int_cst (TREE_TYPE (rhs
), value
),
9127 MOVE_NONTEMPORAL (exp
));
9128 do_pending_stack_adjust ();
9133 expand_assignment (lhs
, rhs
, MOVE_NONTEMPORAL (exp
));
9138 if (!TREE_OPERAND (exp
, 0))
9139 expand_null_return ();
9141 expand_return (TREE_OPERAND (exp
, 0));
9145 return expand_expr_addr_expr (exp
, target
, tmode
, modifier
);
9148 /* Get the rtx code of the operands. */
9149 op0
= expand_normal (TREE_OPERAND (exp
, 0));
9150 op1
= expand_normal (TREE_OPERAND (exp
, 1));
9153 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
9155 /* Move the real (op0) and imaginary (op1) parts to their location. */
9156 write_complex_part (target
, op0
, false);
9157 write_complex_part (target
, op1
, true);
9162 op0
= expand_normal (TREE_OPERAND (exp
, 0));
9163 return read_complex_part (op0
, false);
9166 op0
= expand_normal (TREE_OPERAND (exp
, 0));
9167 return read_complex_part (op0
, true);
9170 expand_resx_expr (exp
);
9173 case TRY_CATCH_EXPR
:
9175 case EH_FILTER_EXPR
:
9176 case TRY_FINALLY_EXPR
:
9177 /* Lowered by tree-eh.c. */
9180 case WITH_CLEANUP_EXPR
:
9181 case CLEANUP_POINT_EXPR
:
9183 case CASE_LABEL_EXPR
:
9189 case PREINCREMENT_EXPR
:
9190 case PREDECREMENT_EXPR
:
9191 case POSTINCREMENT_EXPR
:
9192 case POSTDECREMENT_EXPR
:
9195 case TRUTH_ANDIF_EXPR
:
9196 case TRUTH_ORIF_EXPR
:
9197 /* Lowered by gimplify.c. */
9200 case CHANGE_DYNAMIC_TYPE_EXPR
:
9201 /* This is ignored at the RTL level. The tree level set
9202 DECL_POINTER_ALIAS_SET of any variable to be 0, which is
9203 overkill for the RTL layer but is all that we can
9208 return get_exception_pointer ();
9211 return get_exception_filter ();
9214 /* Function descriptors are not valid except for as
9215 initialization constants, and should not be expanded. */
9223 expand_label (TREE_OPERAND (exp
, 0));
9227 expand_asm_expr (exp
);
9230 case WITH_SIZE_EXPR
:
9231 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9232 have pulled out the size to use in whatever context it needed. */
9233 return expand_expr_real (TREE_OPERAND (exp
, 0), original_target
, tmode
,
9236 case REALIGN_LOAD_EXPR
:
9238 tree oprnd0
= TREE_OPERAND (exp
, 0);
9239 tree oprnd1
= TREE_OPERAND (exp
, 1);
9240 tree oprnd2
= TREE_OPERAND (exp
, 2);
9243 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9244 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9245 op2
= expand_normal (oprnd2
);
9246 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
9254 tree oprnd0
= TREE_OPERAND (exp
, 0);
9255 tree oprnd1
= TREE_OPERAND (exp
, 1);
9256 tree oprnd2
= TREE_OPERAND (exp
, 2);
9259 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9260 op2
= expand_normal (oprnd2
);
9261 target
= expand_widen_pattern_expr (exp
, op0
, op1
, op2
,
9266 case WIDEN_SUM_EXPR
:
9268 tree oprnd0
= TREE_OPERAND (exp
, 0);
9269 tree oprnd1
= TREE_OPERAND (exp
, 1);
9271 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, 0);
9272 target
= expand_widen_pattern_expr (exp
, op0
, NULL_RTX
, op1
,
9277 case REDUC_MAX_EXPR
:
9278 case REDUC_MIN_EXPR
:
9279 case REDUC_PLUS_EXPR
:
9281 op0
= expand_normal (TREE_OPERAND (exp
, 0));
9282 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9283 temp
= expand_unop (mode
, this_optab
, op0
, target
, unsignedp
);
9288 case VEC_EXTRACT_EVEN_EXPR
:
9289 case VEC_EXTRACT_ODD_EXPR
:
9291 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
9292 NULL_RTX
, &op0
, &op1
, 0);
9293 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9294 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
9300 case VEC_INTERLEAVE_HIGH_EXPR
:
9301 case VEC_INTERLEAVE_LOW_EXPR
:
9303 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
9304 NULL_RTX
, &op0
, &op1
, 0);
9305 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9306 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
9312 case VEC_LSHIFT_EXPR
:
9313 case VEC_RSHIFT_EXPR
:
9315 target
= expand_vec_shift_expr (exp
, target
);
9319 case VEC_UNPACK_HI_EXPR
:
9320 case VEC_UNPACK_LO_EXPR
:
9322 op0
= expand_normal (TREE_OPERAND (exp
, 0));
9323 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9324 temp
= expand_widen_pattern_expr (exp
, op0
, NULL_RTX
, NULL_RTX
,
9330 case VEC_UNPACK_FLOAT_HI_EXPR
:
9331 case VEC_UNPACK_FLOAT_LO_EXPR
:
9333 op0
= expand_normal (TREE_OPERAND (exp
, 0));
9334 /* The signedness is determined from input operand. */
9335 this_optab
= optab_for_tree_code (code
,
9336 TREE_TYPE (TREE_OPERAND (exp
, 0)),
9338 temp
= expand_widen_pattern_expr
9339 (exp
, op0
, NULL_RTX
, NULL_RTX
,
9340 target
, TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
9346 case VEC_WIDEN_MULT_HI_EXPR
:
9347 case VEC_WIDEN_MULT_LO_EXPR
:
9349 tree oprnd0
= TREE_OPERAND (exp
, 0);
9350 tree oprnd1
= TREE_OPERAND (exp
, 1);
9352 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, 0);
9353 target
= expand_widen_pattern_expr (exp
, op0
, op1
, NULL_RTX
,
9355 gcc_assert (target
);
9359 case VEC_PACK_TRUNC_EXPR
:
9360 case VEC_PACK_SAT_EXPR
:
9361 case VEC_PACK_FIX_TRUNC_EXPR
:
9363 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9367 case OMP_ATOMIC_LOAD
:
9368 case OMP_ATOMIC_STORE
:
9369 /* OMP expansion is not run when there were errors, so these codes
9371 gcc_assert (errorcount
!= 0);
9375 return lang_hooks
.expand_expr (exp
, original_target
, tmode
,
9379 /* Here to do an ordinary binary operator. */
9381 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
9382 subtarget
, &op0
, &op1
, 0);
9384 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9386 if (modifier
== EXPAND_STACK_PARM
)
9388 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9389 unsignedp
, OPTAB_LIB_WIDEN
);
9391 return REDUCE_BIT_FIELD (temp
);
9393 #undef REDUCE_BIT_FIELD
9395 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9396 signedness of TYPE), possibly returning the result in TARGET. */
9398 reduce_to_bit_field_precision (rtx exp
, rtx target
, tree type
)
9400 HOST_WIDE_INT prec
= TYPE_PRECISION (type
);
9401 if (target
&& GET_MODE (target
) != GET_MODE (exp
))
9403 /* For constant values, reduce using build_int_cst_type. */
9404 if (GET_CODE (exp
) == CONST_INT
)
9406 HOST_WIDE_INT value
= INTVAL (exp
);
9407 tree t
= build_int_cst_type (type
, value
);
9408 return expand_expr (t
, target
, VOIDmode
, EXPAND_NORMAL
);
9410 else if (TYPE_UNSIGNED (type
))
9413 if (prec
< HOST_BITS_PER_WIDE_INT
)
9414 mask
= immed_double_const (((unsigned HOST_WIDE_INT
) 1 << prec
) - 1, 0,
9417 mask
= immed_double_const ((unsigned HOST_WIDE_INT
) -1,
9418 ((unsigned HOST_WIDE_INT
) 1
9419 << (prec
- HOST_BITS_PER_WIDE_INT
)) - 1,
9421 return expand_and (GET_MODE (exp
), exp
, mask
, target
);
9425 tree count
= build_int_cst (NULL_TREE
,
9426 GET_MODE_BITSIZE (GET_MODE (exp
)) - prec
);
9427 exp
= expand_shift (LSHIFT_EXPR
, GET_MODE (exp
), exp
, count
, target
, 0);
9428 return expand_shift (RSHIFT_EXPR
, GET_MODE (exp
), exp
, count
, target
, 0);
9432 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9433 when applied to the address of EXP produces an address known to be
9434 aligned more than BIGGEST_ALIGNMENT. */
9437 is_aligning_offset (const_tree offset
, const_tree exp
)
9439 /* Strip off any conversions. */
9440 while (CONVERT_EXPR_P (offset
))
9441 offset
= TREE_OPERAND (offset
, 0);
9443 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9444 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9445 if (TREE_CODE (offset
) != BIT_AND_EXPR
9446 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
9447 || compare_tree_int (TREE_OPERAND (offset
, 1),
9448 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
9449 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
9452 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9453 It must be NEGATE_EXPR. Then strip any more conversions. */
9454 offset
= TREE_OPERAND (offset
, 0);
9455 while (CONVERT_EXPR_P (offset
))
9456 offset
= TREE_OPERAND (offset
, 0);
9458 if (TREE_CODE (offset
) != NEGATE_EXPR
)
9461 offset
= TREE_OPERAND (offset
, 0);
9462 while (CONVERT_EXPR_P (offset
))
9463 offset
= TREE_OPERAND (offset
, 0);
9465 /* This must now be the address of EXP. */
9466 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
9469 /* Return the tree node if an ARG corresponds to a string constant or zero
9470 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9471 in bytes within the string that ARG is accessing. The type of the
9472 offset will be `sizetype'. */
9475 string_constant (tree arg
, tree
*ptr_offset
)
9477 tree array
, offset
, lower_bound
;
9480 if (TREE_CODE (arg
) == ADDR_EXPR
)
9482 if (TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
9484 *ptr_offset
= size_zero_node
;
9485 return TREE_OPERAND (arg
, 0);
9487 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == VAR_DECL
)
9489 array
= TREE_OPERAND (arg
, 0);
9490 offset
= size_zero_node
;
9492 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
9494 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
9495 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
9496 if (TREE_CODE (array
) != STRING_CST
9497 && TREE_CODE (array
) != VAR_DECL
)
9500 /* Check if the array has a nonzero lower bound. */
9501 lower_bound
= array_ref_low_bound (TREE_OPERAND (arg
, 0));
9502 if (!integer_zerop (lower_bound
))
9504 /* If the offset and base aren't both constants, return 0. */
9505 if (TREE_CODE (lower_bound
) != INTEGER_CST
)
9507 if (TREE_CODE (offset
) != INTEGER_CST
)
9509 /* Adjust offset by the lower bound. */
9510 offset
= size_diffop (fold_convert (sizetype
, offset
),
9511 fold_convert (sizetype
, lower_bound
));
9517 else if (TREE_CODE (arg
) == PLUS_EXPR
|| TREE_CODE (arg
) == POINTER_PLUS_EXPR
)
9519 tree arg0
= TREE_OPERAND (arg
, 0);
9520 tree arg1
= TREE_OPERAND (arg
, 1);
9525 if (TREE_CODE (arg0
) == ADDR_EXPR
9526 && (TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
9527 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == VAR_DECL
))
9529 array
= TREE_OPERAND (arg0
, 0);
9532 else if (TREE_CODE (arg1
) == ADDR_EXPR
9533 && (TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
9534 || TREE_CODE (TREE_OPERAND (arg1
, 0)) == VAR_DECL
))
9536 array
= TREE_OPERAND (arg1
, 0);
9545 if (TREE_CODE (array
) == STRING_CST
)
9547 *ptr_offset
= fold_convert (sizetype
, offset
);
9550 else if (TREE_CODE (array
) == VAR_DECL
)
9554 /* Variables initialized to string literals can be handled too. */
9555 if (DECL_INITIAL (array
) == NULL_TREE
9556 || TREE_CODE (DECL_INITIAL (array
)) != STRING_CST
)
9559 /* If they are read-only, non-volatile and bind locally. */
9560 if (! TREE_READONLY (array
)
9561 || TREE_SIDE_EFFECTS (array
)
9562 || ! targetm
.binds_local_p (array
))
9565 /* Avoid const char foo[4] = "abcde"; */
9566 if (DECL_SIZE_UNIT (array
) == NULL_TREE
9567 || TREE_CODE (DECL_SIZE_UNIT (array
)) != INTEGER_CST
9568 || (length
= TREE_STRING_LENGTH (DECL_INITIAL (array
))) <= 0
9569 || compare_tree_int (DECL_SIZE_UNIT (array
), length
) < 0)
9572 /* If variable is bigger than the string literal, OFFSET must be constant
9573 and inside of the bounds of the string literal. */
9574 offset
= fold_convert (sizetype
, offset
);
9575 if (compare_tree_int (DECL_SIZE_UNIT (array
), length
) > 0
9576 && (! host_integerp (offset
, 1)
9577 || compare_tree_int (offset
, length
) >= 0))
9580 *ptr_offset
= offset
;
9581 return DECL_INITIAL (array
);
9587 /* Generate code to calculate EXP using a store-flag instruction
9588 and return an rtx for the result. EXP is either a comparison
9589 or a TRUTH_NOT_EXPR whose operand is a comparison.
9591 If TARGET is nonzero, store the result there if convenient.
9593 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9596 Return zero if there is no suitable set-flag instruction
9597 available on this machine.
9599 Once expand_expr has been called on the arguments of the comparison,
9600 we are committed to doing the store flag, since it is not safe to
9601 re-evaluate the expression. We emit the store-flag insn by calling
9602 emit_store_flag, but only expand the arguments if we have a reason
9603 to believe that emit_store_flag will be successful. If we think that
9604 it will, but it isn't, we have to simulate the store-flag with a
9605 set/jump/set sequence. */
9608 do_store_flag (tree exp
, rtx target
, enum machine_mode mode
, int only_cheap
)
9611 tree arg0
, arg1
, type
;
9613 enum machine_mode operand_mode
;
9617 enum insn_code icode
;
9618 rtx subtarget
= target
;
9621 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9622 result at the end. We can't simply invert the test since it would
9623 have already been inverted if it were valid. This case occurs for
9624 some floating-point comparisons. */
9626 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
9627 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
9629 arg0
= TREE_OPERAND (exp
, 0);
9630 arg1
= TREE_OPERAND (exp
, 1);
9632 /* Don't crash if the comparison was erroneous. */
9633 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
9636 type
= TREE_TYPE (arg0
);
9637 operand_mode
= TYPE_MODE (type
);
9638 unsignedp
= TYPE_UNSIGNED (type
);
9640 /* We won't bother with BLKmode store-flag operations because it would mean
9641 passing a lot of information to emit_store_flag. */
9642 if (operand_mode
== BLKmode
)
9645 /* We won't bother with store-flag operations involving function pointers
9646 when function pointers must be canonicalized before comparisons. */
9647 #ifdef HAVE_canonicalize_funcptr_for_compare
9648 if (HAVE_canonicalize_funcptr_for_compare
9649 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
9650 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
9652 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
9653 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
9654 == FUNCTION_TYPE
))))
9661 /* Get the rtx comparison code to use. We know that EXP is a comparison
9662 operation of some type. Some comparisons against 1 and -1 can be
9663 converted to comparisons with zero. Do so here so that the tests
9664 below will be aware that we have a comparison with zero. These
9665 tests will not catch constants in the first operand, but constants
9666 are rarely passed as the first operand. */
9668 switch (TREE_CODE (exp
))
9677 if (integer_onep (arg1
))
9678 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
9680 code
= unsignedp
? LTU
: LT
;
9683 if (! unsignedp
&& integer_all_onesp (arg1
))
9684 arg1
= integer_zero_node
, code
= LT
;
9686 code
= unsignedp
? LEU
: LE
;
9689 if (! unsignedp
&& integer_all_onesp (arg1
))
9690 arg1
= integer_zero_node
, code
= GE
;
9692 code
= unsignedp
? GTU
: GT
;
9695 if (integer_onep (arg1
))
9696 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
9698 code
= unsignedp
? GEU
: GE
;
9701 case UNORDERED_EXPR
:
9730 /* Put a constant second. */
9731 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
9732 || TREE_CODE (arg0
) == FIXED_CST
)
9734 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
9735 code
= swap_condition (code
);
9738 /* If this is an equality or inequality test of a single bit, we can
9739 do this by shifting the bit being tested to the low-order bit and
9740 masking the result with the constant 1. If the condition was EQ,
9741 we xor it with 1. This does not require an scc insn and is faster
9742 than an scc insn even if we have it.
9744 The code to make this transformation was moved into fold_single_bit_test,
9745 so we just call into the folder and expand its result. */
9747 if ((code
== NE
|| code
== EQ
)
9748 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
9749 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
9751 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
9752 return expand_expr (fold_single_bit_test (code
== NE
? NE_EXPR
: EQ_EXPR
,
9754 target
, VOIDmode
, EXPAND_NORMAL
);
9757 /* Now see if we are likely to be able to do this. Return if not. */
9758 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
9761 icode
= setcc_gen_code
[(int) code
];
9763 if (icode
== CODE_FOR_nothing
)
9765 enum machine_mode wmode
;
9767 for (wmode
= operand_mode
;
9768 icode
== CODE_FOR_nothing
&& wmode
!= VOIDmode
;
9769 wmode
= GET_MODE_WIDER_MODE (wmode
))
9770 icode
= optab_handler (cstore_optab
, wmode
)->insn_code
;
9773 if (icode
== CODE_FOR_nothing
9774 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
9776 /* We can only do this if it is one of the special cases that
9777 can be handled without an scc insn. */
9778 if ((code
== LT
&& integer_zerop (arg1
))
9779 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
9781 else if (! only_cheap
&& (code
== NE
|| code
== EQ
)
9782 && TREE_CODE (type
) != REAL_TYPE
9783 && ((optab_handler (abs_optab
, operand_mode
)->insn_code
9784 != CODE_FOR_nothing
)
9785 || (optab_handler (ffs_optab
, operand_mode
)->insn_code
9786 != CODE_FOR_nothing
)))
9792 if (! get_subtarget (target
)
9793 || GET_MODE (subtarget
) != operand_mode
)
9796 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, 0);
9799 target
= gen_reg_rtx (mode
);
9801 result
= emit_store_flag (target
, code
, op0
, op1
,
9802 operand_mode
, unsignedp
, 1);
9807 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
9808 result
, 0, OPTAB_LIB_WIDEN
);
9812 /* If this failed, we have to do this with set/compare/jump/set code. */
9814 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
9815 target
= gen_reg_rtx (GET_MODE (target
));
9817 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
9818 label
= gen_label_rtx ();
9819 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, operand_mode
, NULL_RTX
,
9822 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
9829 /* Stubs in case we haven't got a casesi insn. */
9831 # define HAVE_casesi 0
9832 # define gen_casesi(a, b, c, d, e) (0)
9833 # define CODE_FOR_casesi CODE_FOR_nothing
9836 /* If the machine does not have a case insn that compares the bounds,
9837 this means extra overhead for dispatch tables, which raises the
9838 threshold for using them. */
9839 #ifndef CASE_VALUES_THRESHOLD
9840 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9841 #endif /* CASE_VALUES_THRESHOLD */
9844 case_values_threshold (void)
9846 return CASE_VALUES_THRESHOLD
;
9849 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9850 0 otherwise (i.e. if there is no casesi instruction). */
9852 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
9853 rtx table_label ATTRIBUTE_UNUSED
, rtx default_label
,
9854 rtx fallback_label ATTRIBUTE_UNUSED
)
9856 enum machine_mode index_mode
= SImode
;
9857 int index_bits
= GET_MODE_BITSIZE (index_mode
);
9858 rtx op1
, op2
, index
;
9859 enum machine_mode op_mode
;
9864 /* Convert the index to SImode. */
9865 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
9867 enum machine_mode omode
= TYPE_MODE (index_type
);
9868 rtx rangertx
= expand_normal (range
);
9870 /* We must handle the endpoints in the original mode. */
9871 index_expr
= build2 (MINUS_EXPR
, index_type
,
9872 index_expr
, minval
);
9873 minval
= integer_zero_node
;
9874 index
= expand_normal (index_expr
);
9876 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
9877 omode
, 1, default_label
);
9878 /* Now we can safely truncate. */
9879 index
= convert_to_mode (index_mode
, index
, 0);
9883 if (TYPE_MODE (index_type
) != index_mode
)
9885 index_type
= lang_hooks
.types
.type_for_size (index_bits
, 0);
9886 index_expr
= fold_convert (index_type
, index_expr
);
9889 index
= expand_normal (index_expr
);
9892 do_pending_stack_adjust ();
9894 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
9895 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
9897 index
= copy_to_mode_reg (op_mode
, index
);
9899 op1
= expand_normal (minval
);
9901 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
9902 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
9903 op1
, TYPE_UNSIGNED (TREE_TYPE (minval
)));
9904 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
9906 op1
= copy_to_mode_reg (op_mode
, op1
);
9908 op2
= expand_normal (range
);
9910 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
9911 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
9912 op2
, TYPE_UNSIGNED (TREE_TYPE (range
)));
9913 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
9915 op2
= copy_to_mode_reg (op_mode
, op2
);
9917 emit_jump_insn (gen_casesi (index
, op1
, op2
,
9918 table_label
, !default_label
9919 ? fallback_label
: default_label
));
9923 /* Attempt to generate a tablejump instruction; same concept. */
9924 #ifndef HAVE_tablejump
9925 #define HAVE_tablejump 0
9926 #define gen_tablejump(x, y) (0)
9929 /* Subroutine of the next function.
9931 INDEX is the value being switched on, with the lowest value
9932 in the table already subtracted.
9933 MODE is its expected mode (needed if INDEX is constant).
9934 RANGE is the length of the jump table.
9935 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9937 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9938 index value is out of range. */
9941 do_tablejump (rtx index
, enum machine_mode mode
, rtx range
, rtx table_label
,
9946 if (INTVAL (range
) > cfun
->cfg
->max_jumptable_ents
)
9947 cfun
->cfg
->max_jumptable_ents
= INTVAL (range
);
9949 /* Do an unsigned comparison (in the proper mode) between the index
9950 expression and the value which represents the length of the range.
9951 Since we just finished subtracting the lower bound of the range
9952 from the index expression, this comparison allows us to simultaneously
9953 check that the original index expression value is both greater than
9954 or equal to the minimum value of the range and less than or equal to
9955 the maximum value of the range. */
9958 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
9961 /* If index is in range, it must fit in Pmode.
9962 Convert to Pmode so we can index with it. */
9964 index
= convert_to_mode (Pmode
, index
, 1);
9966 /* Don't let a MEM slip through, because then INDEX that comes
9967 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9968 and break_out_memory_refs will go to work on it and mess it up. */
9969 #ifdef PIC_CASE_VECTOR_ADDRESS
9970 if (flag_pic
&& !REG_P (index
))
9971 index
= copy_to_mode_reg (Pmode
, index
);
9974 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9975 GET_MODE_SIZE, because this indicates how large insns are. The other
9976 uses should all be Pmode, because they are addresses. This code
9977 could fail if addresses and insns are not the same size. */
9978 index
= gen_rtx_PLUS (Pmode
,
9979 gen_rtx_MULT (Pmode
, index
,
9980 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
9981 gen_rtx_LABEL_REF (Pmode
, table_label
));
9982 #ifdef PIC_CASE_VECTOR_ADDRESS
9984 index
= PIC_CASE_VECTOR_ADDRESS (index
);
9987 index
= memory_address (CASE_VECTOR_MODE
, index
);
9988 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
9989 vector
= gen_const_mem (CASE_VECTOR_MODE
, index
);
9990 convert_move (temp
, vector
, 0);
9992 emit_jump_insn (gen_tablejump (temp
, table_label
));
9994 /* If we are generating PIC code or if the table is PC-relative, the
9995 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9996 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
10001 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
10002 rtx table_label
, rtx default_label
)
10006 if (! HAVE_tablejump
)
10009 index_expr
= fold_build2 (MINUS_EXPR
, index_type
,
10010 fold_convert (index_type
, index_expr
),
10011 fold_convert (index_type
, minval
));
10012 index
= expand_normal (index_expr
);
10013 do_pending_stack_adjust ();
10015 do_tablejump (index
, TYPE_MODE (index_type
),
10016 convert_modes (TYPE_MODE (index_type
),
10017 TYPE_MODE (TREE_TYPE (range
)),
10018 expand_normal (range
),
10019 TYPE_UNSIGNED (TREE_TYPE (range
))),
10020 table_label
, default_label
);
10024 /* Nonzero if the mode is a valid vector mode for this architecture.
10025 This returns nonzero even if there is no hardware support for the
10026 vector mode, but we can emulate with narrower modes. */
10029 vector_mode_valid_p (enum machine_mode mode
)
10031 enum mode_class
class = GET_MODE_CLASS (mode
);
10032 enum machine_mode innermode
;
10034 /* Doh! What's going on? */
10035 if (class != MODE_VECTOR_INT
10036 && class != MODE_VECTOR_FLOAT
10037 && class != MODE_VECTOR_FRACT
10038 && class != MODE_VECTOR_UFRACT
10039 && class != MODE_VECTOR_ACCUM
10040 && class != MODE_VECTOR_UACCUM
)
10043 /* Hardware support. Woo hoo! */
10044 if (targetm
.vector_mode_supported_p (mode
))
10047 innermode
= GET_MODE_INNER (mode
);
10049 /* We should probably return 1 if requesting V4DI and we have no DI,
10050 but we have V2DI, but this is probably very unlikely. */
10052 /* If we have support for the inner mode, we can safely emulate it.
10053 We may not have V2DI, but me can emulate with a pair of DIs. */
10054 return targetm
.scalar_mode_supported_p (innermode
);
10057 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10059 const_vector_from_tree (tree exp
)
10064 enum machine_mode inner
, mode
;
10066 mode
= TYPE_MODE (TREE_TYPE (exp
));
10068 if (initializer_zerop (exp
))
10069 return CONST0_RTX (mode
);
10071 units
= GET_MODE_NUNITS (mode
);
10072 inner
= GET_MODE_INNER (mode
);
10074 v
= rtvec_alloc (units
);
10076 link
= TREE_VECTOR_CST_ELTS (exp
);
10077 for (i
= 0; link
; link
= TREE_CHAIN (link
), ++i
)
10079 elt
= TREE_VALUE (link
);
10081 if (TREE_CODE (elt
) == REAL_CST
)
10082 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
10084 else if (TREE_CODE (elt
) == FIXED_CST
)
10085 RTVEC_ELT (v
, i
) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt
),
10088 RTVEC_ELT (v
, i
) = immed_double_const (TREE_INT_CST_LOW (elt
),
10089 TREE_INT_CST_HIGH (elt
),
10093 /* Initialize remaining elements to 0. */
10094 for (; i
< units
; ++i
)
10095 RTVEC_ELT (v
, i
) = CONST0_RTX (inner
);
10097 return gen_rtx_CONST_VECTOR (mode
, v
);
10099 #include "gt-expr.h"