1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation,
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
25 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
45 #include "typeclass.h"
48 #include "langhooks.h"
51 #include "tree-iterator.h"
52 #include "tree-pass.h"
53 #include "tree-flow.h"
57 /* Decide whether a function's arguments should be processed
58 from first to last or from last to first.
60 They should if the stack and args grow in opposite directions, but
61 only if we have push insns. */
65 #ifndef PUSH_ARGS_REVERSED
66 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
67 #define PUSH_ARGS_REVERSED /* If it's last to first. */
73 #ifndef STACK_PUSH_CODE
74 #ifdef STACK_GROWS_DOWNWARD
75 #define STACK_PUSH_CODE PRE_DEC
77 #define STACK_PUSH_CODE PRE_INC
82 /* If this is nonzero, we do not bother generating VOLATILE
83 around volatile memory references, and we are willing to
84 output indirect addresses. If cse is to follow, we reject
85 indirect addresses so a useful potential cse is generated;
86 if it is used only once, instruction combination will produce
87 the same indirect address eventually. */
90 /* This structure is used by move_by_pieces to describe the move to
101 int explicit_inc_from
;
102 unsigned HOST_WIDE_INT len
;
103 HOST_WIDE_INT offset
;
107 /* This structure is used by store_by_pieces to describe the clear to
110 struct store_by_pieces
116 unsigned HOST_WIDE_INT len
;
117 HOST_WIDE_INT offset
;
118 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
);
123 static unsigned HOST_WIDE_INT
move_by_pieces_ninsns (unsigned HOST_WIDE_INT
,
126 static void move_by_pieces_1 (rtx (*) (rtx
, ...), enum machine_mode
,
127 struct move_by_pieces
*);
128 static bool block_move_libcall_safe_for_call_parm (void);
129 static bool emit_block_move_via_movmem (rtx
, rtx
, rtx
, unsigned);
130 static rtx
emit_block_move_via_libcall (rtx
, rtx
, rtx
, bool);
131 static tree
emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
133 static rtx
clear_by_pieces_1 (void *, HOST_WIDE_INT
, enum machine_mode
);
134 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces
*, unsigned int);
136 static void store_by_pieces_2 (rtx (*) (rtx
, ...), enum machine_mode
,
137 struct store_by_pieces
*);
138 static rtx
clear_storage_via_libcall (rtx
, rtx
, bool);
139 static tree
clear_storage_libcall_fn (int);
140 static rtx
compress_float_constant (rtx
, rtx
);
141 static rtx
get_subtarget (rtx
);
142 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
143 HOST_WIDE_INT
, enum machine_mode
,
144 tree
, tree
, int, int);
145 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
);
146 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
, enum machine_mode
,
149 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (tree
, tree
);
151 static int is_aligning_offset (tree
, tree
);
152 static void expand_operands (tree
, tree
, rtx
, rtx
*, rtx
*,
153 enum expand_modifier
);
154 static rtx
reduce_to_bit_field_precision (rtx
, rtx
, tree
);
155 static rtx
do_store_flag (tree
, rtx
, enum machine_mode
, int);
157 static void emit_single_push_insn (enum machine_mode
, rtx
, tree
);
159 static void do_tablejump (rtx
, enum machine_mode
, rtx
, rtx
, rtx
);
160 static rtx
const_vector_from_tree (tree
);
161 static void write_complex_part (rtx
, rtx
, bool);
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load
[NUM_MACHINE_MODES
];
168 static char direct_store
[NUM_MACHINE_MODES
];
170 /* Record for each mode whether we can float-extend from memory. */
172 static bool float_extend_from_mem
[NUM_MACHINE_MODES
][NUM_MACHINE_MODES
];
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO)
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO)
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193 #ifndef STORE_BY_PIECES_P
194 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) MOVE_RATIO)
199 /* This array records the insn_code of insns to perform block moves. */
200 enum insn_code movmem_optab
[NUM_MACHINE_MODES
];
202 /* This array records the insn_code of insns to perform block sets. */
203 enum insn_code setmem_optab
[NUM_MACHINE_MODES
];
205 /* These arrays record the insn_code of three different kinds of insns
206 to perform block compares. */
207 enum insn_code cmpstr_optab
[NUM_MACHINE_MODES
];
208 enum insn_code cmpstrn_optab
[NUM_MACHINE_MODES
];
209 enum insn_code cmpmem_optab
[NUM_MACHINE_MODES
];
211 /* Synchronization primitives. */
212 enum insn_code sync_add_optab
[NUM_MACHINE_MODES
];
213 enum insn_code sync_sub_optab
[NUM_MACHINE_MODES
];
214 enum insn_code sync_ior_optab
[NUM_MACHINE_MODES
];
215 enum insn_code sync_and_optab
[NUM_MACHINE_MODES
];
216 enum insn_code sync_xor_optab
[NUM_MACHINE_MODES
];
217 enum insn_code sync_nand_optab
[NUM_MACHINE_MODES
];
218 enum insn_code sync_old_add_optab
[NUM_MACHINE_MODES
];
219 enum insn_code sync_old_sub_optab
[NUM_MACHINE_MODES
];
220 enum insn_code sync_old_ior_optab
[NUM_MACHINE_MODES
];
221 enum insn_code sync_old_and_optab
[NUM_MACHINE_MODES
];
222 enum insn_code sync_old_xor_optab
[NUM_MACHINE_MODES
];
223 enum insn_code sync_old_nand_optab
[NUM_MACHINE_MODES
];
224 enum insn_code sync_new_add_optab
[NUM_MACHINE_MODES
];
225 enum insn_code sync_new_sub_optab
[NUM_MACHINE_MODES
];
226 enum insn_code sync_new_ior_optab
[NUM_MACHINE_MODES
];
227 enum insn_code sync_new_and_optab
[NUM_MACHINE_MODES
];
228 enum insn_code sync_new_xor_optab
[NUM_MACHINE_MODES
];
229 enum insn_code sync_new_nand_optab
[NUM_MACHINE_MODES
];
230 enum insn_code sync_compare_and_swap
[NUM_MACHINE_MODES
];
231 enum insn_code sync_compare_and_swap_cc
[NUM_MACHINE_MODES
];
232 enum insn_code sync_lock_test_and_set
[NUM_MACHINE_MODES
];
233 enum insn_code sync_lock_release
[NUM_MACHINE_MODES
];
235 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
237 #ifndef SLOW_UNALIGNED_ACCESS
238 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
241 /* This is run once per compilation to set up which modes can be used
242 directly in memory and to initialize the block move optab. */
245 init_expr_once (void)
248 enum machine_mode mode
;
253 /* Try indexing by frame ptr and try by stack ptr.
254 It is known that on the Convex the stack ptr isn't a valid index.
255 With luck, one or the other is valid on any machine. */
256 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
257 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
259 /* A scratch register we can modify in-place below to avoid
260 useless RTL allocations. */
261 reg
= gen_rtx_REG (VOIDmode
, -1);
263 insn
= rtx_alloc (INSN
);
264 pat
= gen_rtx_SET (0, NULL_RTX
, NULL_RTX
);
265 PATTERN (insn
) = pat
;
267 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
268 mode
= (enum machine_mode
) ((int) mode
+ 1))
272 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
273 PUT_MODE (mem
, mode
);
274 PUT_MODE (mem1
, mode
);
275 PUT_MODE (reg
, mode
);
277 /* See if there is some register that can be used in this mode and
278 directly loaded or stored from memory. */
280 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
281 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
282 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
285 if (! HARD_REGNO_MODE_OK (regno
, mode
))
291 SET_DEST (pat
) = reg
;
292 if (recog (pat
, insn
, &num_clobbers
) >= 0)
293 direct_load
[(int) mode
] = 1;
295 SET_SRC (pat
) = mem1
;
296 SET_DEST (pat
) = reg
;
297 if (recog (pat
, insn
, &num_clobbers
) >= 0)
298 direct_load
[(int) mode
] = 1;
301 SET_DEST (pat
) = mem
;
302 if (recog (pat
, insn
, &num_clobbers
) >= 0)
303 direct_store
[(int) mode
] = 1;
306 SET_DEST (pat
) = mem1
;
307 if (recog (pat
, insn
, &num_clobbers
) >= 0)
308 direct_store
[(int) mode
] = 1;
312 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
314 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
315 mode
= GET_MODE_WIDER_MODE (mode
))
317 enum machine_mode srcmode
;
318 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
319 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
323 ic
= can_extend_p (mode
, srcmode
, 0);
324 if (ic
== CODE_FOR_nothing
)
327 PUT_MODE (mem
, srcmode
);
329 if ((*insn_data
[ic
].operand
[1].predicate
) (mem
, srcmode
))
330 float_extend_from_mem
[mode
][srcmode
] = true;
335 /* This is run at the start of compiling a function. */
340 cfun
->expr
= ggc_alloc_cleared (sizeof (struct expr_status
));
343 /* Copy data from FROM to TO, where the machine modes are not the same.
344 Both modes may be integer, or both may be floating.
345 UNSIGNEDP should be nonzero if FROM is an unsigned type.
346 This causes zero-extension instead of sign-extension. */
349 convert_move (rtx to
, rtx from
, int unsignedp
)
351 enum machine_mode to_mode
= GET_MODE (to
);
352 enum machine_mode from_mode
= GET_MODE (from
);
353 int to_real
= SCALAR_FLOAT_MODE_P (to_mode
);
354 int from_real
= SCALAR_FLOAT_MODE_P (from_mode
);
358 /* rtx code for making an equivalent value. */
359 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
360 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
363 gcc_assert (to_real
== from_real
);
365 /* If the source and destination are already the same, then there's
370 /* If FROM is a SUBREG that indicates that we have already done at least
371 the required extension, strip it. We don't handle such SUBREGs as
374 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
375 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
376 >= GET_MODE_SIZE (to_mode
))
377 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
378 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
380 gcc_assert (GET_CODE (to
) != SUBREG
|| !SUBREG_PROMOTED_VAR_P (to
));
382 if (to_mode
== from_mode
383 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
385 emit_move_insn (to
, from
);
389 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
391 gcc_assert (GET_MODE_BITSIZE (from_mode
) == GET_MODE_BITSIZE (to_mode
));
393 if (VECTOR_MODE_P (to_mode
))
394 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
396 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
398 emit_move_insn (to
, from
);
402 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
404 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
405 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
414 gcc_assert ((GET_MODE_PRECISION (from_mode
)
415 != GET_MODE_PRECISION (to_mode
))
416 || (DECIMAL_FLOAT_MODE_P (from_mode
)
417 != DECIMAL_FLOAT_MODE_P (to_mode
)));
419 if (GET_MODE_PRECISION (from_mode
) == GET_MODE_PRECISION (to_mode
))
420 /* Conversion between decimal float and binary float, same size. */
421 tab
= DECIMAL_FLOAT_MODE_P (from_mode
) ? trunc_optab
: sext_optab
;
422 else if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
427 /* Try converting directly if the insn is supported. */
429 code
= tab
->handlers
[to_mode
][from_mode
].insn_code
;
430 if (code
!= CODE_FOR_nothing
)
432 emit_unop_insn (code
, to
, from
,
433 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
437 /* Otherwise use a libcall. */
438 libcall
= tab
->handlers
[to_mode
][from_mode
].libfunc
;
440 /* Is this conversion implemented yet? */
441 gcc_assert (libcall
);
444 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
446 insns
= get_insns ();
448 emit_libcall_block (insns
, to
, value
,
449 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
451 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
455 /* Handle pointer conversion. */ /* SPEE 900220. */
456 /* Targets are expected to provide conversion insns between PxImode and
457 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
458 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
460 enum machine_mode full_mode
461 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode
), MODE_INT
);
463 gcc_assert (trunc_optab
->handlers
[to_mode
][full_mode
].insn_code
464 != CODE_FOR_nothing
);
466 if (full_mode
!= from_mode
)
467 from
= convert_to_mode (full_mode
, from
, unsignedp
);
468 emit_unop_insn (trunc_optab
->handlers
[to_mode
][full_mode
].insn_code
,
472 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
475 enum machine_mode full_mode
476 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode
), MODE_INT
);
478 gcc_assert (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
479 != CODE_FOR_nothing
);
481 if (to_mode
== full_mode
)
483 emit_unop_insn (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
,
488 new_from
= gen_reg_rtx (full_mode
);
489 emit_unop_insn (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
,
490 new_from
, from
, UNKNOWN
);
492 /* else proceed to integer conversions below. */
493 from_mode
= full_mode
;
497 /* Now both modes are integers. */
499 /* Handle expanding beyond a word. */
500 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
501 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
508 enum machine_mode lowpart_mode
;
509 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
511 /* Try converting directly if the insn is supported. */
512 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
515 /* If FROM is a SUBREG, put it into a register. Do this
516 so that we always generate the same set of insns for
517 better cse'ing; if an intermediate assignment occurred,
518 we won't be doing the operation directly on the SUBREG. */
519 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
520 from
= force_reg (from_mode
, from
);
521 emit_unop_insn (code
, to
, from
, equiv_code
);
524 /* Next, try converting via full word. */
525 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
526 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
527 != CODE_FOR_nothing
))
531 if (reg_overlap_mentioned_p (to
, from
))
532 from
= force_reg (from_mode
, from
);
533 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
535 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
536 emit_unop_insn (code
, to
,
537 gen_lowpart (word_mode
, to
), equiv_code
);
541 /* No special multiword conversion insn; do it by hand. */
544 /* Since we will turn this into a no conflict block, we must ensure
545 that the source does not overlap the target. */
547 if (reg_overlap_mentioned_p (to
, from
))
548 from
= force_reg (from_mode
, from
);
550 /* Get a copy of FROM widened to a word, if necessary. */
551 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
552 lowpart_mode
= word_mode
;
554 lowpart_mode
= from_mode
;
556 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
558 lowpart
= gen_lowpart (lowpart_mode
, to
);
559 emit_move_insn (lowpart
, lowfrom
);
561 /* Compute the value to put in each remaining word. */
563 fill_value
= const0_rtx
;
568 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
569 && STORE_FLAG_VALUE
== -1)
571 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
573 fill_value
= gen_reg_rtx (word_mode
);
574 emit_insn (gen_slt (fill_value
));
580 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
581 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
583 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
587 /* Fill the remaining words. */
588 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
590 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
591 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
593 gcc_assert (subword
);
595 if (fill_value
!= subword
)
596 emit_move_insn (subword
, fill_value
);
599 insns
= get_insns ();
602 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
603 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
607 /* Truncating multi-word to a word or less. */
608 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
609 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
612 && ! MEM_VOLATILE_P (from
)
613 && direct_load
[(int) to_mode
]
614 && ! mode_dependent_address_p (XEXP (from
, 0)))
616 || GET_CODE (from
) == SUBREG
))
617 from
= force_reg (from_mode
, from
);
618 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
622 /* Now follow all the conversions between integers
623 no more than a word long. */
625 /* For truncation, usually we can just refer to FROM in a narrower mode. */
626 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
627 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
628 GET_MODE_BITSIZE (from_mode
)))
631 && ! MEM_VOLATILE_P (from
)
632 && direct_load
[(int) to_mode
]
633 && ! mode_dependent_address_p (XEXP (from
, 0)))
635 || GET_CODE (from
) == SUBREG
))
636 from
= force_reg (from_mode
, from
);
637 if (REG_P (from
) && REGNO (from
) < FIRST_PSEUDO_REGISTER
638 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
639 from
= copy_to_reg (from
);
640 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
644 /* Handle extension. */
645 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
647 /* Convert directly if that works. */
648 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
651 emit_unop_insn (code
, to
, from
, equiv_code
);
656 enum machine_mode intermediate
;
660 /* Search for a mode to convert via. */
661 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
662 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
663 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
665 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
666 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
667 GET_MODE_BITSIZE (intermediate
))))
668 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
669 != CODE_FOR_nothing
))
671 convert_move (to
, convert_to_mode (intermediate
, from
,
672 unsignedp
), unsignedp
);
676 /* No suitable intermediate mode.
677 Generate what we need with shifts. */
678 shift_amount
= build_int_cst (NULL_TREE
,
679 GET_MODE_BITSIZE (to_mode
)
680 - GET_MODE_BITSIZE (from_mode
));
681 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
682 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
684 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
687 emit_move_insn (to
, tmp
);
692 /* Support special truncate insns for certain modes. */
693 if (trunc_optab
->handlers
[to_mode
][from_mode
].insn_code
!= CODE_FOR_nothing
)
695 emit_unop_insn (trunc_optab
->handlers
[to_mode
][from_mode
].insn_code
,
700 /* Handle truncation of volatile memrefs, and so on;
701 the things that couldn't be truncated directly,
702 and for which there was no special instruction.
704 ??? Code above formerly short-circuited this, for most integer
705 mode pairs, with a force_reg in from_mode followed by a recursive
706 call to this routine. Appears always to have been wrong. */
707 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
709 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
710 emit_move_insn (to
, temp
);
714 /* Mode combination is not recognized. */
718 /* Return an rtx for a value that would result
719 from converting X to mode MODE.
720 Both X and MODE may be floating, or both integer.
721 UNSIGNEDP is nonzero if X is an unsigned value.
722 This can be done by referring to a part of X in place
723 or by copying to a new temporary with conversion. */
726 convert_to_mode (enum machine_mode mode
, rtx x
, int unsignedp
)
728 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
731 /* Return an rtx for a value that would result
732 from converting X from mode OLDMODE to mode MODE.
733 Both modes may be floating, or both integer.
734 UNSIGNEDP is nonzero if X is an unsigned value.
736 This can be done by referring to a part of X in place
737 or by copying to a new temporary with conversion.
739 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
742 convert_modes (enum machine_mode mode
, enum machine_mode oldmode
, rtx x
, int unsignedp
)
746 /* If FROM is a SUBREG that indicates that we have already done at least
747 the required extension, strip it. */
749 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
750 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
751 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
752 x
= gen_lowpart (mode
, x
);
754 if (GET_MODE (x
) != VOIDmode
)
755 oldmode
= GET_MODE (x
);
760 /* There is one case that we must handle specially: If we are converting
761 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
762 we are to interpret the constant as unsigned, gen_lowpart will do
763 the wrong if the constant appears negative. What we want to do is
764 make the high-order word of the constant zero, not all ones. */
766 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
767 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
768 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
770 HOST_WIDE_INT val
= INTVAL (x
);
772 if (oldmode
!= VOIDmode
773 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
775 int width
= GET_MODE_BITSIZE (oldmode
);
777 /* We need to zero extend VAL. */
778 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
781 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
784 /* We can do this with a gen_lowpart if both desired and current modes
785 are integer, and this is either a constant integer, a register, or a
786 non-volatile MEM. Except for the constant case where MODE is no
787 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
789 if ((GET_CODE (x
) == CONST_INT
790 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
791 || (GET_MODE_CLASS (mode
) == MODE_INT
792 && GET_MODE_CLASS (oldmode
) == MODE_INT
793 && (GET_CODE (x
) == CONST_DOUBLE
794 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
795 && ((MEM_P (x
) && ! MEM_VOLATILE_P (x
)
796 && direct_load
[(int) mode
])
798 && (! HARD_REGISTER_P (x
)
799 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
800 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
801 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
803 /* ?? If we don't know OLDMODE, we have to assume here that
804 X does not need sign- or zero-extension. This may not be
805 the case, but it's the best we can do. */
806 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
807 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
809 HOST_WIDE_INT val
= INTVAL (x
);
810 int width
= GET_MODE_BITSIZE (oldmode
);
812 /* We must sign or zero-extend in this case. Start by
813 zero-extending, then sign extend if we need to. */
814 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
816 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
817 val
|= (HOST_WIDE_INT
) (-1) << width
;
819 return gen_int_mode (val
, mode
);
822 return gen_lowpart (mode
, x
);
825 /* Converting from integer constant into mode is always equivalent to an
827 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
829 gcc_assert (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (oldmode
));
830 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
833 temp
= gen_reg_rtx (mode
);
834 convert_move (temp
, x
, unsignedp
);
838 /* STORE_MAX_PIECES is the number of bytes at a time that we can
839 store efficiently. Due to internal GCC limitations, this is
840 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
841 for an immediate constant. */
843 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
845 /* Determine whether the LEN bytes can be moved by using several move
846 instructions. Return nonzero if a call to move_by_pieces should
850 can_move_by_pieces (unsigned HOST_WIDE_INT len
,
851 unsigned int align ATTRIBUTE_UNUSED
)
853 return MOVE_BY_PIECES_P (len
, align
);
856 /* Generate several move instructions to copy LEN bytes from block FROM to
857 block TO. (These are MEM rtx's with BLKmode).
859 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
860 used to push FROM to the stack.
862 ALIGN is maximum stack alignment we can assume.
864 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
865 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
869 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
870 unsigned int align
, int endp
)
872 struct move_by_pieces data
;
873 rtx to_addr
, from_addr
= XEXP (from
, 0);
874 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
875 enum machine_mode mode
= VOIDmode
, tmode
;
876 enum insn_code icode
;
878 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
881 data
.from_addr
= from_addr
;
884 to_addr
= XEXP (to
, 0);
887 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
888 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
890 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
897 #ifdef STACK_GROWS_DOWNWARD
903 data
.to_addr
= to_addr
;
906 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
907 || GET_CODE (from_addr
) == POST_INC
908 || GET_CODE (from_addr
) == POST_DEC
);
910 data
.explicit_inc_from
= 0;
911 data
.explicit_inc_to
= 0;
912 if (data
.reverse
) data
.offset
= len
;
915 /* If copying requires more than two move insns,
916 copy addresses to registers (to make displacements shorter)
917 and use post-increment if available. */
918 if (!(data
.autinc_from
&& data
.autinc_to
)
919 && move_by_pieces_ninsns (len
, align
, max_size
) > 2)
921 /* Find the mode of the largest move... */
922 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
923 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
924 if (GET_MODE_SIZE (tmode
) < max_size
)
927 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
929 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
930 data
.autinc_from
= 1;
931 data
.explicit_inc_from
= -1;
933 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
935 data
.from_addr
= copy_addr_to_reg (from_addr
);
936 data
.autinc_from
= 1;
937 data
.explicit_inc_from
= 1;
939 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
940 data
.from_addr
= copy_addr_to_reg (from_addr
);
941 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
943 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
945 data
.explicit_inc_to
= -1;
947 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
949 data
.to_addr
= copy_addr_to_reg (to_addr
);
951 data
.explicit_inc_to
= 1;
953 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
954 data
.to_addr
= copy_addr_to_reg (to_addr
);
957 tmode
= mode_for_size (MOVE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
958 if (align
>= GET_MODE_ALIGNMENT (tmode
))
959 align
= GET_MODE_ALIGNMENT (tmode
);
962 enum machine_mode xmode
;
964 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
966 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
967 if (GET_MODE_SIZE (tmode
) > MOVE_MAX_PIECES
968 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
971 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
974 /* First move what we can in the largest integer mode, then go to
975 successively smaller modes. */
979 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
980 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
981 if (GET_MODE_SIZE (tmode
) < max_size
)
984 if (mode
== VOIDmode
)
987 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
988 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
989 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
991 max_size
= GET_MODE_SIZE (mode
);
994 /* The code above should have handled everything. */
995 gcc_assert (!data
.len
);
1001 gcc_assert (!data
.reverse
);
1006 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
1007 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
1009 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
1012 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
1019 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
1027 /* Return number of insns required to move L bytes by pieces.
1028 ALIGN (in bits) is maximum alignment we can assume. */
1030 static unsigned HOST_WIDE_INT
1031 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
,
1032 unsigned int max_size
)
1034 unsigned HOST_WIDE_INT n_insns
= 0;
1035 enum machine_mode tmode
;
1037 tmode
= mode_for_size (MOVE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
1038 if (align
>= GET_MODE_ALIGNMENT (tmode
))
1039 align
= GET_MODE_ALIGNMENT (tmode
);
1042 enum machine_mode tmode
, xmode
;
1044 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
1046 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
1047 if (GET_MODE_SIZE (tmode
) > MOVE_MAX_PIECES
1048 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
1051 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
1054 while (max_size
> 1)
1056 enum machine_mode mode
= VOIDmode
;
1057 enum insn_code icode
;
1059 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1060 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1061 if (GET_MODE_SIZE (tmode
) < max_size
)
1064 if (mode
== VOIDmode
)
1067 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1068 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1069 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1071 max_size
= GET_MODE_SIZE (mode
);
1078 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1079 with move instructions for mode MODE. GENFUN is the gen_... function
1080 to make a move insn for that mode. DATA has all the other info. */
1083 move_by_pieces_1 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
1084 struct move_by_pieces
*data
)
1086 unsigned int size
= GET_MODE_SIZE (mode
);
1087 rtx to1
= NULL_RTX
, from1
;
1089 while (data
->len
>= size
)
1092 data
->offset
-= size
;
1096 if (data
->autinc_to
)
1097 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1100 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1103 if (data
->autinc_from
)
1104 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1107 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1109 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1110 emit_insn (gen_add2_insn (data
->to_addr
,
1111 GEN_INT (-(HOST_WIDE_INT
)size
)));
1112 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1113 emit_insn (gen_add2_insn (data
->from_addr
,
1114 GEN_INT (-(HOST_WIDE_INT
)size
)));
1117 emit_insn ((*genfun
) (to1
, from1
));
1120 #ifdef PUSH_ROUNDING
1121 emit_single_push_insn (mode
, from1
, NULL
);
1127 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1128 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1129 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1130 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1132 if (! data
->reverse
)
1133 data
->offset
+= size
;
1139 /* Emit code to move a block Y to a block X. This may be done with
1140 string-move instructions, with multiple scalar move instructions,
1141 or with a library call.
1143 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1144 SIZE is an rtx that says how long they are.
1145 ALIGN is the maximum alignment we can assume they have.
1146 METHOD describes what kind of copy this is, and what mechanisms may be used.
1148 Return the address of the new block, if memcpy is called and returns it,
1152 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1160 case BLOCK_OP_NORMAL
:
1161 case BLOCK_OP_TAILCALL
:
1162 may_use_call
= true;
1165 case BLOCK_OP_CALL_PARM
:
1166 may_use_call
= block_move_libcall_safe_for_call_parm ();
1168 /* Make inhibit_defer_pop nonzero around the library call
1169 to force it to pop the arguments right away. */
1173 case BLOCK_OP_NO_LIBCALL
:
1174 may_use_call
= false;
1181 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1183 gcc_assert (MEM_P (x
));
1184 gcc_assert (MEM_P (y
));
1187 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1188 block copy is more efficient for other large modes, e.g. DCmode. */
1189 x
= adjust_address (x
, BLKmode
, 0);
1190 y
= adjust_address (y
, BLKmode
, 0);
1192 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1193 can be incorrect is coming from __builtin_memcpy. */
1194 if (GET_CODE (size
) == CONST_INT
)
1196 if (INTVAL (size
) == 0)
1199 x
= shallow_copy_rtx (x
);
1200 y
= shallow_copy_rtx (y
);
1201 set_mem_size (x
, size
);
1202 set_mem_size (y
, size
);
1205 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1206 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1207 else if (emit_block_move_via_movmem (x
, y
, size
, align
))
1209 else if (may_use_call
)
1210 retval
= emit_block_move_via_libcall (x
, y
, size
,
1211 method
== BLOCK_OP_TAILCALL
);
1213 emit_block_move_via_loop (x
, y
, size
, align
);
1215 if (method
== BLOCK_OP_CALL_PARM
)
1221 /* A subroutine of emit_block_move. Returns true if calling the
1222 block move libcall will not clobber any parameters which may have
1223 already been placed on the stack. */
1226 block_move_libcall_safe_for_call_parm (void)
1228 /* If arguments are pushed on the stack, then they're safe. */
1232 /* If registers go on the stack anyway, any argument is sure to clobber
1233 an outgoing argument. */
1234 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1236 tree fn
= emit_block_move_libcall_fn (false);
1238 if (REG_PARM_STACK_SPACE (fn
) != 0)
1243 /* If any argument goes in memory, then it might clobber an outgoing
1246 CUMULATIVE_ARGS args_so_far
;
1249 fn
= emit_block_move_libcall_fn (false);
1250 INIT_CUMULATIVE_ARGS (args_so_far
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1252 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1253 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1255 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1256 rtx tmp
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
1257 if (!tmp
|| !REG_P (tmp
))
1259 if (targetm
.calls
.arg_partial_bytes (&args_so_far
, mode
, NULL
, 1))
1261 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, NULL_TREE
, 1);
1267 /* A subroutine of emit_block_move. Expand a movmem pattern;
1268 return true if successful. */
1271 emit_block_move_via_movmem (rtx x
, rtx y
, rtx size
, unsigned int align
)
1273 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1274 int save_volatile_ok
= volatile_ok
;
1275 enum machine_mode mode
;
1277 /* Since this is a move insn, we don't care about volatility. */
1280 /* Try the most limited insn first, because there's no point
1281 including more than one in the machine description unless
1282 the more limited one has some advantage. */
1284 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1285 mode
= GET_MODE_WIDER_MODE (mode
))
1287 enum insn_code code
= movmem_optab
[(int) mode
];
1288 insn_operand_predicate_fn pred
;
1290 if (code
!= CODE_FOR_nothing
1291 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1292 here because if SIZE is less than the mode mask, as it is
1293 returned by the macro, it will definitely be less than the
1294 actual mode mask. */
1295 && ((GET_CODE (size
) == CONST_INT
1296 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1297 <= (GET_MODE_MASK (mode
) >> 1)))
1298 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1299 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1300 || (*pred
) (x
, BLKmode
))
1301 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1302 || (*pred
) (y
, BLKmode
))
1303 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1304 || (*pred
) (opalign
, VOIDmode
)))
1307 rtx last
= get_last_insn ();
1310 op2
= convert_to_mode (mode
, size
, 1);
1311 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1312 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1313 op2
= copy_to_mode_reg (mode
, op2
);
1315 /* ??? When called via emit_block_move_for_call, it'd be
1316 nice if there were some way to inform the backend, so
1317 that it doesn't fail the expansion because it thinks
1318 emitting the libcall would be more efficient. */
1320 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1324 volatile_ok
= save_volatile_ok
;
1328 delete_insns_since (last
);
1332 volatile_ok
= save_volatile_ok
;
1336 /* A subroutine of emit_block_move. Expand a call to memcpy.
1337 Return the return value from memcpy, 0 otherwise. */
1340 emit_block_move_via_libcall (rtx dst
, rtx src
, rtx size
, bool tailcall
)
1342 rtx dst_addr
, src_addr
;
1343 tree call_expr
, arg_list
, fn
, src_tree
, dst_tree
, size_tree
;
1344 enum machine_mode size_mode
;
1347 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1348 pseudos. We can then place those new pseudos into a VAR_DECL and
1351 dst_addr
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
1352 src_addr
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
1354 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1355 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1357 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1358 src_tree
= make_tree (ptr_type_node
, src_addr
);
1360 size_mode
= TYPE_MODE (sizetype
);
1362 size
= convert_to_mode (size_mode
, size
, 1);
1363 size
= copy_to_mode_reg (size_mode
, size
);
1365 /* It is incorrect to use the libcall calling conventions to call
1366 memcpy in this context. This could be a user call to memcpy and
1367 the user may wish to examine the return value from memcpy. For
1368 targets where libcalls and normal calls have different conventions
1369 for returning pointers, we could end up generating incorrect code. */
1371 size_tree
= make_tree (sizetype
, size
);
1373 fn
= emit_block_move_libcall_fn (true);
1374 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
1375 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1376 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1378 /* Now we have to build up the CALL_EXPR itself. */
1379 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1380 call_expr
= build3 (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1381 call_expr
, arg_list
, NULL_TREE
);
1382 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
1384 retval
= expand_normal (call_expr
);
1389 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1390 for the function we use for block copies. The first time FOR_CALL
1391 is true, we call assemble_external. */
1393 static GTY(()) tree block_move_fn
;
1396 init_block_move_fn (const char *asmspec
)
1402 fn
= get_identifier ("memcpy");
1403 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
1404 const_ptr_type_node
, sizetype
,
1407 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
1408 DECL_EXTERNAL (fn
) = 1;
1409 TREE_PUBLIC (fn
) = 1;
1410 DECL_ARTIFICIAL (fn
) = 1;
1411 TREE_NOTHROW (fn
) = 1;
1417 set_user_assembler_name (block_move_fn
, asmspec
);
1421 emit_block_move_libcall_fn (int for_call
)
1423 static bool emitted_extern
;
1426 init_block_move_fn (NULL
);
1428 if (for_call
&& !emitted_extern
)
1430 emitted_extern
= true;
1431 make_decl_rtl (block_move_fn
);
1432 assemble_external (block_move_fn
);
1435 return block_move_fn
;
1438 /* A subroutine of emit_block_move. Copy the data via an explicit
1439 loop. This is used only when libcalls are forbidden. */
1440 /* ??? It'd be nice to copy in hunks larger than QImode. */
1443 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1444 unsigned int align ATTRIBUTE_UNUSED
)
1446 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
1447 enum machine_mode iter_mode
;
1449 iter_mode
= GET_MODE (size
);
1450 if (iter_mode
== VOIDmode
)
1451 iter_mode
= word_mode
;
1453 top_label
= gen_label_rtx ();
1454 cmp_label
= gen_label_rtx ();
1455 iter
= gen_reg_rtx (iter_mode
);
1457 emit_move_insn (iter
, const0_rtx
);
1459 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1460 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1461 do_pending_stack_adjust ();
1463 emit_jump (cmp_label
);
1464 emit_label (top_label
);
1466 tmp
= convert_modes (Pmode
, iter_mode
, iter
, true);
1467 x_addr
= gen_rtx_PLUS (Pmode
, x_addr
, tmp
);
1468 y_addr
= gen_rtx_PLUS (Pmode
, y_addr
, tmp
);
1469 x
= change_address (x
, QImode
, x_addr
);
1470 y
= change_address (y
, QImode
, y_addr
);
1472 emit_move_insn (x
, y
);
1474 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1475 true, OPTAB_LIB_WIDEN
);
1477 emit_move_insn (iter
, tmp
);
1479 emit_label (cmp_label
);
1481 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1485 /* Copy all or part of a value X into registers starting at REGNO.
1486 The number of registers to be filled is NREGS. */
1489 move_block_to_reg (int regno
, rtx x
, int nregs
, enum machine_mode mode
)
1492 #ifdef HAVE_load_multiple
1500 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1501 x
= validize_mem (force_const_mem (mode
, x
));
1503 /* See if the machine can do this with a load multiple insn. */
1504 #ifdef HAVE_load_multiple
1505 if (HAVE_load_multiple
)
1507 last
= get_last_insn ();
1508 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1516 delete_insns_since (last
);
1520 for (i
= 0; i
< nregs
; i
++)
1521 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1522 operand_subword_force (x
, i
, mode
));
1525 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1526 The number of registers to be filled is NREGS. */
1529 move_block_from_reg (int regno
, rtx x
, int nregs
)
1536 /* See if the machine can do this with a store multiple insn. */
1537 #ifdef HAVE_store_multiple
1538 if (HAVE_store_multiple
)
1540 rtx last
= get_last_insn ();
1541 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1549 delete_insns_since (last
);
1553 for (i
= 0; i
< nregs
; i
++)
1555 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1559 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1563 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1564 ORIG, where ORIG is a non-consecutive group of registers represented by
1565 a PARALLEL. The clone is identical to the original except in that the
1566 original set of registers is replaced by a new set of pseudo registers.
1567 The new set has the same modes as the original set. */
1570 gen_group_rtx (rtx orig
)
1575 gcc_assert (GET_CODE (orig
) == PARALLEL
);
1577 length
= XVECLEN (orig
, 0);
1578 tmps
= alloca (sizeof (rtx
) * length
);
1580 /* Skip a NULL entry in first slot. */
1581 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
1586 for (; i
< length
; i
++)
1588 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
1589 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
1591 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
1594 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
1597 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1598 except that values are placed in TMPS[i], and must later be moved
1599 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1602 emit_group_load_1 (rtx
*tmps
, rtx dst
, rtx orig_src
, tree type
, int ssize
)
1606 enum machine_mode m
= GET_MODE (orig_src
);
1608 gcc_assert (GET_CODE (dst
) == PARALLEL
);
1611 && !SCALAR_INT_MODE_P (m
)
1612 && !MEM_P (orig_src
)
1613 && GET_CODE (orig_src
) != CONCAT
)
1615 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_src
));
1616 if (imode
== BLKmode
)
1617 src
= assign_stack_temp (GET_MODE (orig_src
), ssize
, 0);
1619 src
= gen_reg_rtx (imode
);
1620 if (imode
!= BLKmode
)
1621 src
= gen_lowpart (GET_MODE (orig_src
), src
);
1622 emit_move_insn (src
, orig_src
);
1623 /* ...and back again. */
1624 if (imode
!= BLKmode
)
1625 src
= gen_lowpart (imode
, src
);
1626 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1630 /* Check for a NULL entry, used to indicate that the parameter goes
1631 both on the stack and in registers. */
1632 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1637 /* Process the pieces. */
1638 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1640 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1641 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1642 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1645 /* Handle trailing fragments that run over the size of the struct. */
1646 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1648 /* Arrange to shift the fragment to where it belongs.
1649 extract_bit_field loads to the lsb of the reg. */
1651 #ifdef BLOCK_REG_PADDING
1652 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
1653 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1658 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1659 bytelen
= ssize
- bytepos
;
1660 gcc_assert (bytelen
> 0);
1663 /* If we won't be loading directly from memory, protect the real source
1664 from strange tricks we might play; but make sure that the source can
1665 be loaded directly into the destination. */
1667 if (!MEM_P (orig_src
)
1668 && (!CONSTANT_P (orig_src
)
1669 || (GET_MODE (orig_src
) != mode
1670 && GET_MODE (orig_src
) != VOIDmode
)))
1672 if (GET_MODE (orig_src
) == VOIDmode
)
1673 src
= gen_reg_rtx (mode
);
1675 src
= gen_reg_rtx (GET_MODE (orig_src
));
1677 emit_move_insn (src
, orig_src
);
1680 /* Optimize the access just a bit. */
1682 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
1683 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
1684 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1685 && bytelen
== GET_MODE_SIZE (mode
))
1687 tmps
[i
] = gen_reg_rtx (mode
);
1688 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
1690 else if (COMPLEX_MODE_P (mode
)
1691 && GET_MODE (src
) == mode
1692 && bytelen
== GET_MODE_SIZE (mode
))
1693 /* Let emit_move_complex do the bulk of the work. */
1695 else if (GET_CODE (src
) == CONCAT
)
1697 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
1698 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
1700 if ((bytepos
== 0 && bytelen
== slen0
)
1701 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
1703 /* The following assumes that the concatenated objects all
1704 have the same size. In this case, a simple calculation
1705 can be used to determine the object and the bit field
1707 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
1708 if (! CONSTANT_P (tmps
[i
])
1709 && (!REG_P (tmps
[i
]) || GET_MODE (tmps
[i
]) != mode
))
1710 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
1711 (bytepos
% slen0
) * BITS_PER_UNIT
,
1712 1, NULL_RTX
, mode
, mode
);
1718 gcc_assert (!bytepos
);
1719 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1720 emit_move_insn (mem
, src
);
1721 tmps
[i
] = extract_bit_field (mem
, bytelen
* BITS_PER_UNIT
,
1722 0, 1, NULL_RTX
, mode
, mode
);
1725 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1726 SIMD register, which is currently broken. While we get GCC
1727 to emit proper RTL for these cases, let's dump to memory. */
1728 else if (VECTOR_MODE_P (GET_MODE (dst
))
1731 int slen
= GET_MODE_SIZE (GET_MODE (src
));
1734 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1735 emit_move_insn (mem
, src
);
1736 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
1738 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
1739 && XVECLEN (dst
, 0) > 1)
1740 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE(dst
), bytepos
);
1741 else if (CONSTANT_P (src
)
1742 || (REG_P (src
) && GET_MODE (src
) == mode
))
1745 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
1746 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
1750 tmps
[i
] = expand_shift (LSHIFT_EXPR
, mode
, tmps
[i
],
1751 build_int_cst (NULL_TREE
, shift
), tmps
[i
], 0);
1755 /* Emit code to move a block SRC of type TYPE to a block DST,
1756 where DST is non-consecutive registers represented by a PARALLEL.
1757 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1761 emit_group_load (rtx dst
, rtx src
, tree type
, int ssize
)
1766 tmps
= alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
1767 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1769 /* Copy the extracted pieces into the proper (probable) hard regs. */
1770 for (i
= 0; i
< XVECLEN (dst
, 0); i
++)
1772 rtx d
= XEXP (XVECEXP (dst
, 0, i
), 0);
1775 emit_move_insn (d
, tmps
[i
]);
1779 /* Similar, but load SRC into new pseudos in a format that looks like
1780 PARALLEL. This can later be fed to emit_group_move to get things
1781 in the right place. */
1784 emit_group_load_into_temps (rtx parallel
, rtx src
, tree type
, int ssize
)
1789 vec
= rtvec_alloc (XVECLEN (parallel
, 0));
1790 emit_group_load_1 (&RTVEC_ELT (vec
, 0), parallel
, src
, type
, ssize
);
1792 /* Convert the vector to look just like the original PARALLEL, except
1793 with the computed values. */
1794 for (i
= 0; i
< XVECLEN (parallel
, 0); i
++)
1796 rtx e
= XVECEXP (parallel
, 0, i
);
1797 rtx d
= XEXP (e
, 0);
1801 d
= force_reg (GET_MODE (d
), RTVEC_ELT (vec
, i
));
1802 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), d
, XEXP (e
, 1));
1804 RTVEC_ELT (vec
, i
) = e
;
1807 return gen_rtx_PARALLEL (GET_MODE (parallel
), vec
);
1810 /* Emit code to move a block SRC to block DST, where SRC and DST are
1811 non-consecutive groups of registers, each represented by a PARALLEL. */
1814 emit_group_move (rtx dst
, rtx src
)
1818 gcc_assert (GET_CODE (src
) == PARALLEL
1819 && GET_CODE (dst
) == PARALLEL
1820 && XVECLEN (src
, 0) == XVECLEN (dst
, 0));
1822 /* Skip first entry if NULL. */
1823 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
1824 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
1825 XEXP (XVECEXP (src
, 0, i
), 0));
1828 /* Move a group of registers represented by a PARALLEL into pseudos. */
1831 emit_group_move_into_temps (rtx src
)
1833 rtvec vec
= rtvec_alloc (XVECLEN (src
, 0));
1836 for (i
= 0; i
< XVECLEN (src
, 0); i
++)
1838 rtx e
= XVECEXP (src
, 0, i
);
1839 rtx d
= XEXP (e
, 0);
1842 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), copy_to_reg (d
), XEXP (e
, 1));
1843 RTVEC_ELT (vec
, i
) = e
;
1846 return gen_rtx_PARALLEL (GET_MODE (src
), vec
);
1849 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1850 where SRC is non-consecutive registers represented by a PARALLEL.
1851 SSIZE represents the total size of block ORIG_DST, or -1 if not
1855 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1859 enum machine_mode m
= GET_MODE (orig_dst
);
1861 gcc_assert (GET_CODE (src
) == PARALLEL
);
1863 if (!SCALAR_INT_MODE_P (m
)
1864 && !MEM_P (orig_dst
) && GET_CODE (orig_dst
) != CONCAT
)
1866 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_dst
));
1867 if (imode
== BLKmode
)
1868 dst
= assign_stack_temp (GET_MODE (orig_dst
), ssize
, 0);
1870 dst
= gen_reg_rtx (imode
);
1871 emit_group_store (dst
, src
, type
, ssize
);
1872 if (imode
!= BLKmode
)
1873 dst
= gen_lowpart (GET_MODE (orig_dst
), dst
);
1874 emit_move_insn (orig_dst
, dst
);
1878 /* Check for a NULL entry, used to indicate that the parameter goes
1879 both on the stack and in registers. */
1880 if (XEXP (XVECEXP (src
, 0, 0), 0))
1885 tmps
= alloca (sizeof (rtx
) * XVECLEN (src
, 0));
1887 /* Copy the (probable) hard regs into pseudos. */
1888 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
1890 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
1891 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
1892 emit_move_insn (tmps
[i
], reg
);
1895 /* If we won't be storing directly into memory, protect the real destination
1896 from strange tricks we might play. */
1898 if (GET_CODE (dst
) == PARALLEL
)
1902 /* We can get a PARALLEL dst if there is a conditional expression in
1903 a return statement. In that case, the dst and src are the same,
1904 so no action is necessary. */
1905 if (rtx_equal_p (dst
, src
))
1908 /* It is unclear if we can ever reach here, but we may as well handle
1909 it. Allocate a temporary, and split this into a store/load to/from
1912 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
1913 emit_group_store (temp
, src
, type
, ssize
);
1914 emit_group_load (dst
, temp
, type
, ssize
);
1917 else if (!MEM_P (dst
) && GET_CODE (dst
) != CONCAT
)
1919 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
1920 /* Make life a bit easier for combine. */
1921 emit_move_insn (dst
, CONST0_RTX (GET_MODE (orig_dst
)));
1924 /* Process the pieces. */
1925 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
1927 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
1928 enum machine_mode mode
= GET_MODE (tmps
[i
]);
1929 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1932 /* Handle trailing fragments that run over the size of the struct. */
1933 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1935 /* store_bit_field always takes its value from the lsb.
1936 Move the fragment to the lsb if it's not already there. */
1938 #ifdef BLOCK_REG_PADDING
1939 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
1940 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1946 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1947 tmps
[i
] = expand_shift (RSHIFT_EXPR
, mode
, tmps
[i
],
1948 build_int_cst (NULL_TREE
, shift
),
1951 bytelen
= ssize
- bytepos
;
1954 if (GET_CODE (dst
) == CONCAT
)
1956 if (bytepos
+ bytelen
<= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
1957 dest
= XEXP (dst
, 0);
1958 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
1960 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
1961 dest
= XEXP (dst
, 1);
1965 gcc_assert (bytepos
== 0 && XVECLEN (src
, 0));
1966 dest
= assign_stack_temp (GET_MODE (dest
),
1967 GET_MODE_SIZE (GET_MODE (dest
)), 0);
1968 emit_move_insn (adjust_address (dest
, GET_MODE (tmps
[i
]), bytepos
),
1975 /* Optimize the access just a bit. */
1977 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
1978 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
1979 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1980 && bytelen
== GET_MODE_SIZE (mode
))
1981 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
1983 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
1987 /* Copy from the pseudo into the (probable) hard reg. */
1988 if (orig_dst
!= dst
)
1989 emit_move_insn (orig_dst
, dst
);
1992 /* Generate code to copy a BLKmode object of TYPE out of a
1993 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1994 is null, a stack temporary is created. TGTBLK is returned.
1996 The purpose of this routine is to handle functions that return
1997 BLKmode structures in registers. Some machines (the PA for example)
1998 want to return all small structures in registers regardless of the
1999 structure's alignment. */
2002 copy_blkmode_from_reg (rtx tgtblk
, rtx srcreg
, tree type
)
2004 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2005 rtx src
= NULL
, dst
= NULL
;
2006 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2007 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2011 tgtblk
= assign_temp (build_qualified_type (type
,
2013 | TYPE_QUAL_CONST
)),
2015 preserve_temp_slots (tgtblk
);
2018 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2019 into a new pseudo which is a full word. */
2021 if (GET_MODE (srcreg
) != BLKmode
2022 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2023 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
2025 /* If the structure doesn't take up a whole number of words, see whether
2026 SRCREG is padded on the left or on the right. If it's on the left,
2027 set PADDING_CORRECTION to the number of bits to skip.
2029 In most ABIs, the structure will be returned at the least end of
2030 the register, which translates to right padding on little-endian
2031 targets and left padding on big-endian targets. The opposite
2032 holds if the structure is returned at the most significant
2033 end of the register. */
2034 if (bytes
% UNITS_PER_WORD
!= 0
2035 && (targetm
.calls
.return_in_msb (type
)
2037 : BYTES_BIG_ENDIAN
))
2039 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2041 /* Copy the structure BITSIZE bites at a time.
2043 We could probably emit more efficient code for machines which do not use
2044 strict alignment, but it doesn't seem worth the effort at the current
2046 for (bitpos
= 0, xbitpos
= padding_correction
;
2047 bitpos
< bytes
* BITS_PER_UNIT
;
2048 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2050 /* We need a new source operand each time xbitpos is on a
2051 word boundary and when xbitpos == padding_correction
2052 (the first time through). */
2053 if (xbitpos
% BITS_PER_WORD
== 0
2054 || xbitpos
== padding_correction
)
2055 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2058 /* We need a new destination operand each time bitpos is on
2060 if (bitpos
% BITS_PER_WORD
== 0)
2061 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2063 /* Use xbitpos for the source extraction (right justified) and
2064 xbitpos for the destination store (left justified). */
2065 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2066 extract_bit_field (src
, bitsize
,
2067 xbitpos
% BITS_PER_WORD
, 1,
2068 NULL_RTX
, word_mode
, word_mode
));
2074 /* Add a USE expression for REG to the (possibly empty) list pointed
2075 to by CALL_FUSAGE. REG must denote a hard register. */
2078 use_reg (rtx
*call_fusage
, rtx reg
)
2080 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2083 = gen_rtx_EXPR_LIST (VOIDmode
,
2084 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2087 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2088 starting at REGNO. All of these registers must be hard registers. */
2091 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2095 gcc_assert (regno
+ nregs
<= FIRST_PSEUDO_REGISTER
);
2097 for (i
= 0; i
< nregs
; i
++)
2098 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2101 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2102 PARALLEL REGS. This is for calls that pass values in multiple
2103 non-contiguous locations. The Irix 6 ABI has examples of this. */
2106 use_group_regs (rtx
*call_fusage
, rtx regs
)
2110 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2112 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2114 /* A NULL entry means the parameter goes both on the stack and in
2115 registers. This can also be a MEM for targets that pass values
2116 partially on the stack and partially in registers. */
2117 if (reg
!= 0 && REG_P (reg
))
2118 use_reg (call_fusage
, reg
);
2123 /* Determine whether the LEN bytes generated by CONSTFUN can be
2124 stored to memory using several move instructions. CONSTFUNDATA is
2125 a pointer which will be passed as argument in every CONSTFUN call.
2126 ALIGN is maximum alignment we can assume. Return nonzero if a
2127 call to store_by_pieces should succeed. */
2130 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
2131 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2132 void *constfundata
, unsigned int align
)
2134 unsigned HOST_WIDE_INT l
;
2135 unsigned int max_size
;
2136 HOST_WIDE_INT offset
= 0;
2137 enum machine_mode mode
, tmode
;
2138 enum insn_code icode
;
2145 if (! STORE_BY_PIECES_P (len
, align
))
2148 tmode
= mode_for_size (STORE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
2149 if (align
>= GET_MODE_ALIGNMENT (tmode
))
2150 align
= GET_MODE_ALIGNMENT (tmode
);
2153 enum machine_mode xmode
;
2155 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
2157 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
2158 if (GET_MODE_SIZE (tmode
) > STORE_MAX_PIECES
2159 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
2162 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
2165 /* We would first store what we can in the largest integer mode, then go to
2166 successively smaller modes. */
2169 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2174 max_size
= STORE_MAX_PIECES
+ 1;
2175 while (max_size
> 1)
2177 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2178 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2179 if (GET_MODE_SIZE (tmode
) < max_size
)
2182 if (mode
== VOIDmode
)
2185 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2186 if (icode
!= CODE_FOR_nothing
2187 && align
>= GET_MODE_ALIGNMENT (mode
))
2189 unsigned int size
= GET_MODE_SIZE (mode
);
2196 cst
= (*constfun
) (constfundata
, offset
, mode
);
2197 if (!LEGITIMATE_CONSTANT_P (cst
))
2207 max_size
= GET_MODE_SIZE (mode
);
2210 /* The code above should have handled everything. */
2217 /* Generate several move instructions to store LEN bytes generated by
2218 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2219 pointer which will be passed as argument in every CONSTFUN call.
2220 ALIGN is maximum alignment we can assume.
2221 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2222 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2226 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
2227 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2228 void *constfundata
, unsigned int align
, int endp
)
2230 struct store_by_pieces data
;
2234 gcc_assert (endp
!= 2);
2238 gcc_assert (STORE_BY_PIECES_P (len
, align
));
2239 data
.constfun
= constfun
;
2240 data
.constfundata
= constfundata
;
2243 store_by_pieces_1 (&data
, align
);
2248 gcc_assert (!data
.reverse
);
2253 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2254 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2256 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
2259 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2266 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2274 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2275 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2278 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
2280 struct store_by_pieces data
;
2285 data
.constfun
= clear_by_pieces_1
;
2286 data
.constfundata
= NULL
;
2289 store_by_pieces_1 (&data
, align
);
2292 /* Callback routine for clear_by_pieces.
2293 Return const0_rtx unconditionally. */
2296 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED
,
2297 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2298 enum machine_mode mode ATTRIBUTE_UNUSED
)
2303 /* Subroutine of clear_by_pieces and store_by_pieces.
2304 Generate several move instructions to store LEN bytes of block TO. (A MEM
2305 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2308 store_by_pieces_1 (struct store_by_pieces
*data ATTRIBUTE_UNUSED
,
2309 unsigned int align ATTRIBUTE_UNUSED
)
2311 rtx to_addr
= XEXP (data
->to
, 0);
2312 unsigned int max_size
= STORE_MAX_PIECES
+ 1;
2313 enum machine_mode mode
= VOIDmode
, tmode
;
2314 enum insn_code icode
;
2317 data
->to_addr
= to_addr
;
2319 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2320 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2322 data
->explicit_inc_to
= 0;
2324 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2326 data
->offset
= data
->len
;
2328 /* If storing requires more than two move insns,
2329 copy addresses to registers (to make displacements shorter)
2330 and use post-increment if available. */
2331 if (!data
->autinc_to
2332 && move_by_pieces_ninsns (data
->len
, align
, max_size
) > 2)
2334 /* Determine the main mode we'll be using. */
2335 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2336 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2337 if (GET_MODE_SIZE (tmode
) < max_size
)
2340 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2342 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2343 data
->autinc_to
= 1;
2344 data
->explicit_inc_to
= -1;
2347 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2348 && ! data
->autinc_to
)
2350 data
->to_addr
= copy_addr_to_reg (to_addr
);
2351 data
->autinc_to
= 1;
2352 data
->explicit_inc_to
= 1;
2355 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2356 data
->to_addr
= copy_addr_to_reg (to_addr
);
2359 tmode
= mode_for_size (STORE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
2360 if (align
>= GET_MODE_ALIGNMENT (tmode
))
2361 align
= GET_MODE_ALIGNMENT (tmode
);
2364 enum machine_mode xmode
;
2366 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
2368 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
2369 if (GET_MODE_SIZE (tmode
) > STORE_MAX_PIECES
2370 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
2373 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
2376 /* First store what we can in the largest integer mode, then go to
2377 successively smaller modes. */
2379 while (max_size
> 1)
2381 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2382 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2383 if (GET_MODE_SIZE (tmode
) < max_size
)
2386 if (mode
== VOIDmode
)
2389 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2390 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2391 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2393 max_size
= GET_MODE_SIZE (mode
);
2396 /* The code above should have handled everything. */
2397 gcc_assert (!data
->len
);
2400 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2401 with move instructions for mode MODE. GENFUN is the gen_... function
2402 to make a move insn for that mode. DATA has all the other info. */
2405 store_by_pieces_2 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
2406 struct store_by_pieces
*data
)
2408 unsigned int size
= GET_MODE_SIZE (mode
);
2411 while (data
->len
>= size
)
2414 data
->offset
-= size
;
2416 if (data
->autinc_to
)
2417 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2420 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2422 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2423 emit_insn (gen_add2_insn (data
->to_addr
,
2424 GEN_INT (-(HOST_WIDE_INT
) size
)));
2426 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2427 emit_insn ((*genfun
) (to1
, cst
));
2429 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2430 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2432 if (! data
->reverse
)
2433 data
->offset
+= size
;
2439 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2440 its length in bytes. */
2443 clear_storage (rtx object
, rtx size
, enum block_op_methods method
)
2445 enum machine_mode mode
= GET_MODE (object
);
2448 gcc_assert (method
== BLOCK_OP_NORMAL
|| method
== BLOCK_OP_TAILCALL
);
2450 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2451 just move a zero. Otherwise, do this a piece at a time. */
2453 && GET_CODE (size
) == CONST_INT
2454 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (mode
))
2456 rtx zero
= CONST0_RTX (mode
);
2459 emit_move_insn (object
, zero
);
2463 if (COMPLEX_MODE_P (mode
))
2465 zero
= CONST0_RTX (GET_MODE_INNER (mode
));
2468 write_complex_part (object
, zero
, 0);
2469 write_complex_part (object
, zero
, 1);
2475 if (size
== const0_rtx
)
2478 align
= MEM_ALIGN (object
);
2480 if (GET_CODE (size
) == CONST_INT
2481 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2482 clear_by_pieces (object
, INTVAL (size
), align
);
2483 else if (set_storage_via_setmem (object
, size
, const0_rtx
, align
))
2486 return clear_storage_via_libcall (object
, size
,
2487 method
== BLOCK_OP_TAILCALL
);
2492 /* A subroutine of clear_storage. Expand a call to memset.
2493 Return the return value of memset, 0 otherwise. */
2496 clear_storage_via_libcall (rtx object
, rtx size
, bool tailcall
)
2498 tree call_expr
, arg_list
, fn
, object_tree
, size_tree
;
2499 enum machine_mode size_mode
;
2502 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2503 place those into new pseudos into a VAR_DECL and use them later. */
2505 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2507 size_mode
= TYPE_MODE (sizetype
);
2508 size
= convert_to_mode (size_mode
, size
, 1);
2509 size
= copy_to_mode_reg (size_mode
, size
);
2511 /* It is incorrect to use the libcall calling conventions to call
2512 memset in this context. This could be a user call to memset and
2513 the user may wish to examine the return value from memset. For
2514 targets where libcalls and normal calls have different conventions
2515 for returning pointers, we could end up generating incorrect code. */
2517 object_tree
= make_tree (ptr_type_node
, object
);
2518 size_tree
= make_tree (sizetype
, size
);
2520 fn
= clear_storage_libcall_fn (true);
2521 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
2522 arg_list
= tree_cons (NULL_TREE
, integer_zero_node
, arg_list
);
2523 arg_list
= tree_cons (NULL_TREE
, object_tree
, arg_list
);
2525 /* Now we have to build up the CALL_EXPR itself. */
2526 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2527 call_expr
= build3 (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2528 call_expr
, arg_list
, NULL_TREE
);
2529 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
2531 retval
= expand_normal (call_expr
);
2536 /* A subroutine of clear_storage_via_libcall. Create the tree node
2537 for the function we use for block clears. The first time FOR_CALL
2538 is true, we call assemble_external. */
2540 static GTY(()) tree block_clear_fn
;
2543 init_block_clear_fn (const char *asmspec
)
2545 if (!block_clear_fn
)
2549 fn
= get_identifier ("memset");
2550 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2551 integer_type_node
, sizetype
,
2554 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
2555 DECL_EXTERNAL (fn
) = 1;
2556 TREE_PUBLIC (fn
) = 1;
2557 DECL_ARTIFICIAL (fn
) = 1;
2558 TREE_NOTHROW (fn
) = 1;
2560 block_clear_fn
= fn
;
2564 set_user_assembler_name (block_clear_fn
, asmspec
);
2568 clear_storage_libcall_fn (int for_call
)
2570 static bool emitted_extern
;
2572 if (!block_clear_fn
)
2573 init_block_clear_fn (NULL
);
2575 if (for_call
&& !emitted_extern
)
2577 emitted_extern
= true;
2578 make_decl_rtl (block_clear_fn
);
2579 assemble_external (block_clear_fn
);
2582 return block_clear_fn
;
2585 /* Expand a setmem pattern; return true if successful. */
2588 set_storage_via_setmem (rtx object
, rtx size
, rtx val
, unsigned int align
)
2590 /* Try the most limited insn first, because there's no point
2591 including more than one in the machine description unless
2592 the more limited one has some advantage. */
2594 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2595 enum machine_mode mode
;
2597 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2598 mode
= GET_MODE_WIDER_MODE (mode
))
2600 enum insn_code code
= setmem_optab
[(int) mode
];
2601 insn_operand_predicate_fn pred
;
2603 if (code
!= CODE_FOR_nothing
2604 /* We don't need MODE to be narrower than
2605 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2606 the mode mask, as it is returned by the macro, it will
2607 definitely be less than the actual mode mask. */
2608 && ((GET_CODE (size
) == CONST_INT
2609 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2610 <= (GET_MODE_MASK (mode
) >> 1)))
2611 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2612 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2613 || (*pred
) (object
, BLKmode
))
2614 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
2615 || (*pred
) (opalign
, VOIDmode
)))
2618 enum machine_mode char_mode
;
2619 rtx last
= get_last_insn ();
2622 opsize
= convert_to_mode (mode
, size
, 1);
2623 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2624 if (pred
!= 0 && ! (*pred
) (opsize
, mode
))
2625 opsize
= copy_to_mode_reg (mode
, opsize
);
2628 char_mode
= insn_data
[(int) code
].operand
[2].mode
;
2629 if (char_mode
!= VOIDmode
)
2631 opchar
= convert_to_mode (char_mode
, opchar
, 1);
2632 pred
= insn_data
[(int) code
].operand
[2].predicate
;
2633 if (pred
!= 0 && ! (*pred
) (opchar
, char_mode
))
2634 opchar
= copy_to_mode_reg (char_mode
, opchar
);
2637 pat
= GEN_FCN ((int) code
) (object
, opsize
, opchar
, opalign
);
2644 delete_insns_since (last
);
2652 /* Write to one of the components of the complex value CPLX. Write VAL to
2653 the real part if IMAG_P is false, and the imaginary part if its true. */
2656 write_complex_part (rtx cplx
, rtx val
, bool imag_p
)
2658 enum machine_mode cmode
;
2659 enum machine_mode imode
;
2662 if (GET_CODE (cplx
) == CONCAT
)
2664 emit_move_insn (XEXP (cplx
, imag_p
), val
);
2668 cmode
= GET_MODE (cplx
);
2669 imode
= GET_MODE_INNER (cmode
);
2670 ibitsize
= GET_MODE_BITSIZE (imode
);
2672 /* For MEMs simplify_gen_subreg may generate an invalid new address
2673 because, e.g., the original address is considered mode-dependent
2674 by the target, which restricts simplify_subreg from invoking
2675 adjust_address_nv. Instead of preparing fallback support for an
2676 invalid address, we call adjust_address_nv directly. */
2679 emit_move_insn (adjust_address_nv (cplx
, imode
,
2680 imag_p
? GET_MODE_SIZE (imode
) : 0),
2685 /* If the sub-object is at least word sized, then we know that subregging
2686 will work. This special case is important, since store_bit_field
2687 wants to operate on integer modes, and there's rarely an OImode to
2688 correspond to TCmode. */
2689 if (ibitsize
>= BITS_PER_WORD
2690 /* For hard regs we have exact predicates. Assume we can split
2691 the original object if it spans an even number of hard regs.
2692 This special case is important for SCmode on 64-bit platforms
2693 where the natural size of floating-point regs is 32-bit. */
2695 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
2696 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0))
2698 rtx part
= simplify_gen_subreg (imode
, cplx
, cmode
,
2699 imag_p
? GET_MODE_SIZE (imode
) : 0);
2702 emit_move_insn (part
, val
);
2706 /* simplify_gen_subreg may fail for sub-word MEMs. */
2707 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
2710 store_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0, imode
, val
);
2713 /* Extract one of the components of the complex value CPLX. Extract the
2714 real part if IMAG_P is false, and the imaginary part if it's true. */
2717 read_complex_part (rtx cplx
, bool imag_p
)
2719 enum machine_mode cmode
, imode
;
2722 if (GET_CODE (cplx
) == CONCAT
)
2723 return XEXP (cplx
, imag_p
);
2725 cmode
= GET_MODE (cplx
);
2726 imode
= GET_MODE_INNER (cmode
);
2727 ibitsize
= GET_MODE_BITSIZE (imode
);
2729 /* Special case reads from complex constants that got spilled to memory. */
2730 if (MEM_P (cplx
) && GET_CODE (XEXP (cplx
, 0)) == SYMBOL_REF
)
2732 tree decl
= SYMBOL_REF_DECL (XEXP (cplx
, 0));
2733 if (decl
&& TREE_CODE (decl
) == COMPLEX_CST
)
2735 tree part
= imag_p
? TREE_IMAGPART (decl
) : TREE_REALPART (decl
);
2736 if (CONSTANT_CLASS_P (part
))
2737 return expand_expr (part
, NULL_RTX
, imode
, EXPAND_NORMAL
);
2741 /* For MEMs simplify_gen_subreg may generate an invalid new address
2742 because, e.g., the original address is considered mode-dependent
2743 by the target, which restricts simplify_subreg from invoking
2744 adjust_address_nv. Instead of preparing fallback support for an
2745 invalid address, we call adjust_address_nv directly. */
2747 return adjust_address_nv (cplx
, imode
,
2748 imag_p
? GET_MODE_SIZE (imode
) : 0);
2750 /* If the sub-object is at least word sized, then we know that subregging
2751 will work. This special case is important, since extract_bit_field
2752 wants to operate on integer modes, and there's rarely an OImode to
2753 correspond to TCmode. */
2754 if (ibitsize
>= BITS_PER_WORD
2755 /* For hard regs we have exact predicates. Assume we can split
2756 the original object if it spans an even number of hard regs.
2757 This special case is important for SCmode on 64-bit platforms
2758 where the natural size of floating-point regs is 32-bit. */
2760 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
2761 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0))
2763 rtx ret
= simplify_gen_subreg (imode
, cplx
, cmode
,
2764 imag_p
? GET_MODE_SIZE (imode
) : 0);
2768 /* simplify_gen_subreg may fail for sub-word MEMs. */
2769 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
2772 return extract_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0,
2773 true, NULL_RTX
, imode
, imode
);
2776 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2777 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2778 represented in NEW_MODE. If FORCE is true, this will never happen, as
2779 we'll force-create a SUBREG if needed. */
2782 emit_move_change_mode (enum machine_mode new_mode
,
2783 enum machine_mode old_mode
, rtx x
, bool force
)
2789 /* We don't have to worry about changing the address since the
2790 size in bytes is supposed to be the same. */
2791 if (reload_in_progress
)
2793 /* Copy the MEM to change the mode and move any
2794 substitutions from the old MEM to the new one. */
2795 ret
= adjust_address_nv (x
, new_mode
, 0);
2796 copy_replacements (x
, ret
);
2799 ret
= adjust_address (x
, new_mode
, 0);
2803 /* Note that we do want simplify_subreg's behavior of validating
2804 that the new mode is ok for a hard register. If we were to use
2805 simplify_gen_subreg, we would create the subreg, but would
2806 probably run into the target not being able to implement it. */
2807 /* Except, of course, when FORCE is true, when this is exactly what
2808 we want. Which is needed for CCmodes on some targets. */
2810 ret
= simplify_gen_subreg (new_mode
, x
, old_mode
, 0);
2812 ret
= simplify_subreg (new_mode
, x
, old_mode
, 0);
2818 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2819 an integer mode of the same size as MODE. Returns the instruction
2820 emitted, or NULL if such a move could not be generated. */
2823 emit_move_via_integer (enum machine_mode mode
, rtx x
, rtx y
, bool force
)
2825 enum machine_mode imode
;
2826 enum insn_code code
;
2828 /* There must exist a mode of the exact size we require. */
2829 imode
= int_mode_for_mode (mode
);
2830 if (imode
== BLKmode
)
2833 /* The target must support moves in this mode. */
2834 code
= mov_optab
->handlers
[imode
].insn_code
;
2835 if (code
== CODE_FOR_nothing
)
2838 x
= emit_move_change_mode (imode
, mode
, x
, force
);
2841 y
= emit_move_change_mode (imode
, mode
, y
, force
);
2844 return emit_insn (GEN_FCN (code
) (x
, y
));
2847 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2848 Return an equivalent MEM that does not use an auto-increment. */
2851 emit_move_resolve_push (enum machine_mode mode
, rtx x
)
2853 enum rtx_code code
= GET_CODE (XEXP (x
, 0));
2854 HOST_WIDE_INT adjust
;
2857 adjust
= GET_MODE_SIZE (mode
);
2858 #ifdef PUSH_ROUNDING
2859 adjust
= PUSH_ROUNDING (adjust
);
2861 if (code
== PRE_DEC
|| code
== POST_DEC
)
2863 else if (code
== PRE_MODIFY
|| code
== POST_MODIFY
)
2865 rtx expr
= XEXP (XEXP (x
, 0), 1);
2868 gcc_assert (GET_CODE (expr
) == PLUS
|| GET_CODE (expr
) == MINUS
);
2869 gcc_assert (GET_CODE (XEXP (expr
, 1)) == CONST_INT
);
2870 val
= INTVAL (XEXP (expr
, 1));
2871 if (GET_CODE (expr
) == MINUS
)
2873 gcc_assert (adjust
== val
|| adjust
== -val
);
2877 /* Do not use anti_adjust_stack, since we don't want to update
2878 stack_pointer_delta. */
2879 temp
= expand_simple_binop (Pmode
, PLUS
, stack_pointer_rtx
,
2880 GEN_INT (adjust
), stack_pointer_rtx
,
2881 0, OPTAB_LIB_WIDEN
);
2882 if (temp
!= stack_pointer_rtx
)
2883 emit_move_insn (stack_pointer_rtx
, temp
);
2890 temp
= stack_pointer_rtx
;
2895 temp
= plus_constant (stack_pointer_rtx
, -adjust
);
2901 return replace_equiv_address (x
, temp
);
2904 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2905 X is known to satisfy push_operand, and MODE is known to be complex.
2906 Returns the last instruction emitted. */
2909 emit_move_complex_push (enum machine_mode mode
, rtx x
, rtx y
)
2911 enum machine_mode submode
= GET_MODE_INNER (mode
);
2914 #ifdef PUSH_ROUNDING
2915 unsigned int submodesize
= GET_MODE_SIZE (submode
);
2917 /* In case we output to the stack, but the size is smaller than the
2918 machine can push exactly, we need to use move instructions. */
2919 if (PUSH_ROUNDING (submodesize
) != submodesize
)
2921 x
= emit_move_resolve_push (mode
, x
);
2922 return emit_move_insn (x
, y
);
2926 /* Note that the real part always precedes the imag part in memory
2927 regardless of machine's endianness. */
2928 switch (GET_CODE (XEXP (x
, 0)))
2942 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2943 read_complex_part (y
, imag_first
));
2944 return emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2945 read_complex_part (y
, !imag_first
));
2948 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2949 MODE is known to be complex. Returns the last instruction emitted. */
2952 emit_move_complex (enum machine_mode mode
, rtx x
, rtx y
)
2956 /* Need to take special care for pushes, to maintain proper ordering
2957 of the data, and possibly extra padding. */
2958 if (push_operand (x
, mode
))
2959 return emit_move_complex_push (mode
, x
, y
);
2961 /* See if we can coerce the target into moving both values at once. */
2963 /* Move floating point as parts. */
2964 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
2965 && mov_optab
->handlers
[GET_MODE_INNER (mode
)].insn_code
!= CODE_FOR_nothing
)
2967 /* Not possible if the values are inherently not adjacent. */
2968 else if (GET_CODE (x
) == CONCAT
|| GET_CODE (y
) == CONCAT
)
2970 /* Is possible if both are registers (or subregs of registers). */
2971 else if (register_operand (x
, mode
) && register_operand (y
, mode
))
2973 /* If one of the operands is a memory, and alignment constraints
2974 are friendly enough, we may be able to do combined memory operations.
2975 We do not attempt this if Y is a constant because that combination is
2976 usually better with the by-parts thing below. */
2977 else if ((MEM_P (x
) ? !CONSTANT_P (y
) : MEM_P (y
))
2978 && (!STRICT_ALIGNMENT
2979 || get_mode_alignment (mode
) == BIGGEST_ALIGNMENT
))
2988 /* For memory to memory moves, optimal behavior can be had with the
2989 existing block move logic. */
2990 if (MEM_P (x
) && MEM_P (y
))
2992 emit_block_move (x
, y
, GEN_INT (GET_MODE_SIZE (mode
)),
2993 BLOCK_OP_NO_LIBCALL
);
2994 return get_last_insn ();
2997 ret
= emit_move_via_integer (mode
, x
, y
, true);
3002 /* Show the output dies here. This is necessary for SUBREGs
3003 of pseudos since we cannot track their lifetimes correctly;
3004 hard regs shouldn't appear here except as return values. */
3005 if (!reload_completed
&& !reload_in_progress
3006 && REG_P (x
) && !reg_overlap_mentioned_p (x
, y
))
3007 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3009 write_complex_part (x
, read_complex_part (y
, false), false);
3010 write_complex_part (x
, read_complex_part (y
, true), true);
3011 return get_last_insn ();
3014 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3015 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3018 emit_move_ccmode (enum machine_mode mode
, rtx x
, rtx y
)
3022 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3025 enum insn_code code
= mov_optab
->handlers
[CCmode
].insn_code
;
3026 if (code
!= CODE_FOR_nothing
)
3028 x
= emit_move_change_mode (CCmode
, mode
, x
, true);
3029 y
= emit_move_change_mode (CCmode
, mode
, y
, true);
3030 return emit_insn (GEN_FCN (code
) (x
, y
));
3034 /* Otherwise, find the MODE_INT mode of the same width. */
3035 ret
= emit_move_via_integer (mode
, x
, y
, false);
3036 gcc_assert (ret
!= NULL
);
3040 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3041 MODE is any multi-word or full-word mode that lacks a move_insn
3042 pattern. Note that you will get better code if you define such
3043 patterns, even if they must turn into multiple assembler instructions. */
3046 emit_move_multi_word (enum machine_mode mode
, rtx x
, rtx y
)
3053 gcc_assert (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
);
3055 /* If X is a push on the stack, do the push now and replace
3056 X with a reference to the stack pointer. */
3057 if (push_operand (x
, mode
))
3058 x
= emit_move_resolve_push (mode
, x
);
3060 /* If we are in reload, see if either operand is a MEM whose address
3061 is scheduled for replacement. */
3062 if (reload_in_progress
&& MEM_P (x
)
3063 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3064 x
= replace_equiv_address_nv (x
, inner
);
3065 if (reload_in_progress
&& MEM_P (y
)
3066 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3067 y
= replace_equiv_address_nv (y
, inner
);
3071 need_clobber
= false;
3073 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3076 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3077 rtx ypart
= operand_subword (y
, i
, 1, mode
);
3079 /* If we can't get a part of Y, put Y into memory if it is a
3080 constant. Otherwise, force it into a register. Then we must
3081 be able to get a part of Y. */
3082 if (ypart
== 0 && CONSTANT_P (y
))
3084 y
= force_const_mem (mode
, y
);
3085 ypart
= operand_subword (y
, i
, 1, mode
);
3087 else if (ypart
== 0)
3088 ypart
= operand_subword_force (y
, i
, mode
);
3090 gcc_assert (xpart
&& ypart
);
3092 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3094 last_insn
= emit_move_insn (xpart
, ypart
);
3100 /* Show the output dies here. This is necessary for SUBREGs
3101 of pseudos since we cannot track their lifetimes correctly;
3102 hard regs shouldn't appear here except as return values.
3103 We never want to emit such a clobber after reload. */
3105 && ! (reload_in_progress
|| reload_completed
)
3106 && need_clobber
!= 0)
3107 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3114 /* Low level part of emit_move_insn.
3115 Called just like emit_move_insn, but assumes X and Y
3116 are basically valid. */
3119 emit_move_insn_1 (rtx x
, rtx y
)
3121 enum machine_mode mode
= GET_MODE (x
);
3122 enum insn_code code
;
3124 gcc_assert ((unsigned int) mode
< (unsigned int) MAX_MACHINE_MODE
);
3126 code
= mov_optab
->handlers
[mode
].insn_code
;
3127 if (code
!= CODE_FOR_nothing
)
3128 return emit_insn (GEN_FCN (code
) (x
, y
));
3130 /* Expand complex moves by moving real part and imag part. */
3131 if (COMPLEX_MODE_P (mode
))
3132 return emit_move_complex (mode
, x
, y
);
3134 if (GET_MODE_CLASS (mode
) == MODE_DECIMAL_FLOAT
)
3136 rtx result
= emit_move_via_integer (mode
, x
, y
, true);
3138 /* If we can't find an integer mode, use multi words. */
3142 return emit_move_multi_word (mode
, x
, y
);
3145 if (GET_MODE_CLASS (mode
) == MODE_CC
)
3146 return emit_move_ccmode (mode
, x
, y
);
3148 /* Try using a move pattern for the corresponding integer mode. This is
3149 only safe when simplify_subreg can convert MODE constants into integer
3150 constants. At present, it can only do this reliably if the value
3151 fits within a HOST_WIDE_INT. */
3152 if (!CONSTANT_P (y
) || GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
3154 rtx ret
= emit_move_via_integer (mode
, x
, y
, false);
3159 return emit_move_multi_word (mode
, x
, y
);
3162 /* Generate code to copy Y into X.
3163 Both Y and X must have the same mode, except that
3164 Y can be a constant with VOIDmode.
3165 This mode cannot be BLKmode; use emit_block_move for that.
3167 Return the last instruction emitted. */
3170 emit_move_insn (rtx x
, rtx y
)
3172 enum machine_mode mode
= GET_MODE (x
);
3173 rtx y_cst
= NULL_RTX
;
3176 gcc_assert (mode
!= BLKmode
3177 && (GET_MODE (y
) == mode
|| GET_MODE (y
) == VOIDmode
));
3182 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3183 && (last_insn
= compress_float_constant (x
, y
)))
3188 if (!LEGITIMATE_CONSTANT_P (y
))
3190 y
= force_const_mem (mode
, y
);
3192 /* If the target's cannot_force_const_mem prevented the spill,
3193 assume that the target's move expanders will also take care
3194 of the non-legitimate constant. */
3200 /* If X or Y are memory references, verify that their addresses are valid
3203 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
3204 && ! push_operand (x
, GET_MODE (x
)))
3206 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
3207 x
= validize_mem (x
);
3210 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
3212 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
3213 y
= validize_mem (y
);
3215 gcc_assert (mode
!= BLKmode
);
3217 last_insn
= emit_move_insn_1 (x
, y
);
3219 if (y_cst
&& REG_P (x
)
3220 && (set
= single_set (last_insn
)) != NULL_RTX
3221 && SET_DEST (set
) == x
3222 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
3223 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
3228 /* If Y is representable exactly in a narrower mode, and the target can
3229 perform the extension directly from constant or memory, then emit the
3230 move as an extension. */
3233 compress_float_constant (rtx x
, rtx y
)
3235 enum machine_mode dstmode
= GET_MODE (x
);
3236 enum machine_mode orig_srcmode
= GET_MODE (y
);
3237 enum machine_mode srcmode
;
3239 int oldcost
, newcost
;
3241 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3243 if (LEGITIMATE_CONSTANT_P (y
))
3244 oldcost
= rtx_cost (y
, SET
);
3246 oldcost
= rtx_cost (force_const_mem (dstmode
, y
), SET
);
3248 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3249 srcmode
!= orig_srcmode
;
3250 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3253 rtx trunc_y
, last_insn
;
3255 /* Skip if the target can't extend this way. */
3256 ic
= can_extend_p (dstmode
, srcmode
, 0);
3257 if (ic
== CODE_FOR_nothing
)
3260 /* Skip if the narrowed value isn't exact. */
3261 if (! exact_real_truncate (srcmode
, &r
))
3264 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3266 if (LEGITIMATE_CONSTANT_P (trunc_y
))
3268 /* Skip if the target needs extra instructions to perform
3270 if (! (*insn_data
[ic
].operand
[1].predicate
) (trunc_y
, srcmode
))
3272 /* This is valid, but may not be cheaper than the original. */
3273 newcost
= rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
), SET
);
3274 if (oldcost
< newcost
)
3277 else if (float_extend_from_mem
[dstmode
][srcmode
])
3279 trunc_y
= force_const_mem (srcmode
, trunc_y
);
3280 /* This is valid, but may not be cheaper than the original. */
3281 newcost
= rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
), SET
);
3282 if (oldcost
< newcost
)
3284 trunc_y
= validize_mem (trunc_y
);
3289 emit_unop_insn (ic
, x
, trunc_y
, UNKNOWN
);
3290 last_insn
= get_last_insn ();
3293 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3301 /* Pushing data onto the stack. */
3303 /* Push a block of length SIZE (perhaps variable)
3304 and return an rtx to address the beginning of the block.
3305 The value may be virtual_outgoing_args_rtx.
3307 EXTRA is the number of bytes of padding to push in addition to SIZE.
3308 BELOW nonzero means this padding comes at low addresses;
3309 otherwise, the padding comes at high addresses. */
3312 push_block (rtx size
, int extra
, int below
)
3316 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3317 if (CONSTANT_P (size
))
3318 anti_adjust_stack (plus_constant (size
, extra
));
3319 else if (REG_P (size
) && extra
== 0)
3320 anti_adjust_stack (size
);
3323 temp
= copy_to_mode_reg (Pmode
, size
);
3325 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3326 temp
, 0, OPTAB_LIB_WIDEN
);
3327 anti_adjust_stack (temp
);
3330 #ifndef STACK_GROWS_DOWNWARD
3336 temp
= virtual_outgoing_args_rtx
;
3337 if (extra
!= 0 && below
)
3338 temp
= plus_constant (temp
, extra
);
3342 if (GET_CODE (size
) == CONST_INT
)
3343 temp
= plus_constant (virtual_outgoing_args_rtx
,
3344 -INTVAL (size
) - (below
? 0 : extra
));
3345 else if (extra
!= 0 && !below
)
3346 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3347 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3349 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3350 negate_rtx (Pmode
, size
));
3353 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3356 #ifdef PUSH_ROUNDING
3358 /* Emit single push insn. */
3361 emit_single_push_insn (enum machine_mode mode
, rtx x
, tree type
)
3364 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3366 enum insn_code icode
;
3367 insn_operand_predicate_fn pred
;
3369 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3370 /* If there is push pattern, use it. Otherwise try old way of throwing
3371 MEM representing push operation to move expander. */
3372 icode
= push_optab
->handlers
[(int) mode
].insn_code
;
3373 if (icode
!= CODE_FOR_nothing
)
3375 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3376 && !((*pred
) (x
, mode
))))
3377 x
= force_reg (mode
, x
);
3378 emit_insn (GEN_FCN (icode
) (x
));
3381 if (GET_MODE_SIZE (mode
) == rounded_size
)
3382 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3383 /* If we are to pad downward, adjust the stack pointer first and
3384 then store X into the stack location using an offset. This is
3385 because emit_move_insn does not know how to pad; it does not have
3387 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
3389 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
3390 HOST_WIDE_INT offset
;
3392 emit_move_insn (stack_pointer_rtx
,
3393 expand_binop (Pmode
,
3394 #ifdef STACK_GROWS_DOWNWARD
3400 GEN_INT (rounded_size
),
3401 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
3403 offset
= (HOST_WIDE_INT
) padding_size
;
3404 #ifdef STACK_GROWS_DOWNWARD
3405 if (STACK_PUSH_CODE
== POST_DEC
)
3406 /* We have already decremented the stack pointer, so get the
3408 offset
+= (HOST_WIDE_INT
) rounded_size
;
3410 if (STACK_PUSH_CODE
== POST_INC
)
3411 /* We have already incremented the stack pointer, so get the
3413 offset
-= (HOST_WIDE_INT
) rounded_size
;
3415 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (offset
));
3419 #ifdef STACK_GROWS_DOWNWARD
3420 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3421 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3422 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3424 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3425 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3426 GEN_INT (rounded_size
));
3428 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3431 dest
= gen_rtx_MEM (mode
, dest_addr
);
3435 set_mem_attributes (dest
, type
, 1);
3437 if (flag_optimize_sibling_calls
)
3438 /* Function incoming arguments may overlap with sibling call
3439 outgoing arguments and we cannot allow reordering of reads
3440 from function arguments with stores to outgoing arguments
3441 of sibling calls. */
3442 set_mem_alias_set (dest
, 0);
3444 emit_move_insn (dest
, x
);
3448 /* Generate code to push X onto the stack, assuming it has mode MODE and
3450 MODE is redundant except when X is a CONST_INT (since they don't
3452 SIZE is an rtx for the size of data to be copied (in bytes),
3453 needed only if X is BLKmode.
3455 ALIGN (in bits) is maximum alignment we can assume.
3457 If PARTIAL and REG are both nonzero, then copy that many of the first
3458 bytes of X into registers starting with REG, and push the rest of X.
3459 The amount of space pushed is decreased by PARTIAL bytes.
3460 REG must be a hard register in this case.
3461 If REG is zero but PARTIAL is not, take any all others actions for an
3462 argument partially in registers, but do not actually load any
3465 EXTRA is the amount in bytes of extra space to leave next to this arg.
3466 This is ignored if an argument block has already been allocated.
3468 On a machine that lacks real push insns, ARGS_ADDR is the address of
3469 the bottom of the argument block for this call. We use indexing off there
3470 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3471 argument block has not been preallocated.
3473 ARGS_SO_FAR is the size of args previously pushed for this call.
3475 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3476 for arguments passed in registers. If nonzero, it will be the number
3477 of bytes required. */
3480 emit_push_insn (rtx x
, enum machine_mode mode
, tree type
, rtx size
,
3481 unsigned int align
, int partial
, rtx reg
, int extra
,
3482 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
3486 enum direction stack_direction
3487 #ifdef STACK_GROWS_DOWNWARD
3493 /* Decide where to pad the argument: `downward' for below,
3494 `upward' for above, or `none' for don't pad it.
3495 Default is below for small data on big-endian machines; else above. */
3496 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3498 /* Invert direction if stack is post-decrement.
3500 if (STACK_PUSH_CODE
== POST_DEC
)
3501 if (where_pad
!= none
)
3502 where_pad
= (where_pad
== downward
? upward
: downward
);
3506 if (mode
== BLKmode
)
3508 /* Copy a block into the stack, entirely or partially. */
3515 offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3516 used
= partial
- offset
;
3520 /* USED is now the # of bytes we need not copy to the stack
3521 because registers will take care of them. */
3524 xinner
= adjust_address (xinner
, BLKmode
, used
);
3526 /* If the partial register-part of the arg counts in its stack size,
3527 skip the part of stack space corresponding to the registers.
3528 Otherwise, start copying to the beginning of the stack space,
3529 by setting SKIP to 0. */
3530 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3532 #ifdef PUSH_ROUNDING
3533 /* Do it with several push insns if that doesn't take lots of insns
3534 and if there is no difficulty with push insns that skip bytes
3535 on the stack for alignment purposes. */
3538 && GET_CODE (size
) == CONST_INT
3540 && MEM_ALIGN (xinner
) >= align
3541 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3542 /* Here we avoid the case of a structure whose weak alignment
3543 forces many pushes of a small amount of data,
3544 and such small pushes do rounding that causes trouble. */
3545 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3546 || align
>= BIGGEST_ALIGNMENT
3547 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3548 == (align
/ BITS_PER_UNIT
)))
3549 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3551 /* Push padding now if padding above and stack grows down,
3552 or if padding below and stack grows up.
3553 But if space already allocated, this has already been done. */
3554 if (extra
&& args_addr
== 0
3555 && where_pad
!= none
&& where_pad
!= stack_direction
)
3556 anti_adjust_stack (GEN_INT (extra
));
3558 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
3561 #endif /* PUSH_ROUNDING */
3565 /* Otherwise make space on the stack and copy the data
3566 to the address of that space. */
3568 /* Deduct words put into registers from the size we must copy. */
3571 if (GET_CODE (size
) == CONST_INT
)
3572 size
= GEN_INT (INTVAL (size
) - used
);
3574 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3575 GEN_INT (used
), NULL_RTX
, 0,
3579 /* Get the address of the stack space.
3580 In this case, we do not deal with EXTRA separately.
3581 A single stack adjust will do. */
3584 temp
= push_block (size
, extra
, where_pad
== downward
);
3587 else if (GET_CODE (args_so_far
) == CONST_INT
)
3588 temp
= memory_address (BLKmode
,
3589 plus_constant (args_addr
,
3590 skip
+ INTVAL (args_so_far
)));
3592 temp
= memory_address (BLKmode
,
3593 plus_constant (gen_rtx_PLUS (Pmode
,
3598 if (!ACCUMULATE_OUTGOING_ARGS
)
3600 /* If the source is referenced relative to the stack pointer,
3601 copy it to another register to stabilize it. We do not need
3602 to do this if we know that we won't be changing sp. */
3604 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3605 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3606 temp
= copy_to_reg (temp
);
3609 target
= gen_rtx_MEM (BLKmode
, temp
);
3611 /* We do *not* set_mem_attributes here, because incoming arguments
3612 may overlap with sibling call outgoing arguments and we cannot
3613 allow reordering of reads from function arguments with stores
3614 to outgoing arguments of sibling calls. We do, however, want
3615 to record the alignment of the stack slot. */
3616 /* ALIGN may well be better aligned than TYPE, e.g. due to
3617 PARM_BOUNDARY. Assume the caller isn't lying. */
3618 set_mem_align (target
, align
);
3620 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
3623 else if (partial
> 0)
3625 /* Scalar partly in registers. */
3627 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3630 /* # bytes of start of argument
3631 that we must make space for but need not store. */
3632 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3633 int args_offset
= INTVAL (args_so_far
);
3636 /* Push padding now if padding above and stack grows down,
3637 or if padding below and stack grows up.
3638 But if space already allocated, this has already been done. */
3639 if (extra
&& args_addr
== 0
3640 && where_pad
!= none
&& where_pad
!= stack_direction
)
3641 anti_adjust_stack (GEN_INT (extra
));
3643 /* If we make space by pushing it, we might as well push
3644 the real data. Otherwise, we can leave OFFSET nonzero
3645 and leave the space uninitialized. */
3649 /* Now NOT_STACK gets the number of words that we don't need to
3650 allocate on the stack. Convert OFFSET to words too. */
3651 not_stack
= (partial
- offset
) / UNITS_PER_WORD
;
3652 offset
/= UNITS_PER_WORD
;
3654 /* If the partial register-part of the arg counts in its stack size,
3655 skip the part of stack space corresponding to the registers.
3656 Otherwise, start copying to the beginning of the stack space,
3657 by setting SKIP to 0. */
3658 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3660 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3661 x
= validize_mem (force_const_mem (mode
, x
));
3663 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3664 SUBREGs of such registers are not allowed. */
3665 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
3666 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3667 x
= copy_to_reg (x
);
3669 /* Loop over all the words allocated on the stack for this arg. */
3670 /* We can do it by words, because any scalar bigger than a word
3671 has a size a multiple of a word. */
3672 #ifndef PUSH_ARGS_REVERSED
3673 for (i
= not_stack
; i
< size
; i
++)
3675 for (i
= size
- 1; i
>= not_stack
; i
--)
3677 if (i
>= not_stack
+ offset
)
3678 emit_push_insn (operand_subword_force (x
, i
, mode
),
3679 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3681 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3683 reg_parm_stack_space
, alignment_pad
);
3690 /* Push padding now if padding above and stack grows down,
3691 or if padding below and stack grows up.
3692 But if space already allocated, this has already been done. */
3693 if (extra
&& args_addr
== 0
3694 && where_pad
!= none
&& where_pad
!= stack_direction
)
3695 anti_adjust_stack (GEN_INT (extra
));
3697 #ifdef PUSH_ROUNDING
3698 if (args_addr
== 0 && PUSH_ARGS
)
3699 emit_single_push_insn (mode
, x
, type
);
3703 if (GET_CODE (args_so_far
) == CONST_INT
)
3705 = memory_address (mode
,
3706 plus_constant (args_addr
,
3707 INTVAL (args_so_far
)));
3709 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3711 dest
= gen_rtx_MEM (mode
, addr
);
3713 /* We do *not* set_mem_attributes here, because incoming arguments
3714 may overlap with sibling call outgoing arguments and we cannot
3715 allow reordering of reads from function arguments with stores
3716 to outgoing arguments of sibling calls. We do, however, want
3717 to record the alignment of the stack slot. */
3718 /* ALIGN may well be better aligned than TYPE, e.g. due to
3719 PARM_BOUNDARY. Assume the caller isn't lying. */
3720 set_mem_align (dest
, align
);
3722 emit_move_insn (dest
, x
);
3726 /* If part should go in registers, copy that part
3727 into the appropriate registers. Do this now, at the end,
3728 since mem-to-mem copies above may do function calls. */
3729 if (partial
> 0 && reg
!= 0)
3731 /* Handle calls that pass values in multiple non-contiguous locations.
3732 The Irix 6 ABI has examples of this. */
3733 if (GET_CODE (reg
) == PARALLEL
)
3734 emit_group_load (reg
, x
, type
, -1);
3737 gcc_assert (partial
% UNITS_PER_WORD
== 0);
3738 move_block_to_reg (REGNO (reg
), x
, partial
/ UNITS_PER_WORD
, mode
);
3742 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3743 anti_adjust_stack (GEN_INT (extra
));
3745 if (alignment_pad
&& args_addr
== 0)
3746 anti_adjust_stack (alignment_pad
);
3749 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3753 get_subtarget (rtx x
)
3757 /* Only registers can be subtargets. */
3759 /* Don't use hard regs to avoid extending their life. */
3760 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3764 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3765 FIELD is a bitfield. Returns true if the optimization was successful,
3766 and there's nothing else to do. */
3769 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize
,
3770 unsigned HOST_WIDE_INT bitpos
,
3771 enum machine_mode mode1
, rtx str_rtx
,
3774 enum machine_mode str_mode
= GET_MODE (str_rtx
);
3775 unsigned int str_bitsize
= GET_MODE_BITSIZE (str_mode
);
3780 if (mode1
!= VOIDmode
3781 || bitsize
>= BITS_PER_WORD
3782 || str_bitsize
> BITS_PER_WORD
3783 || TREE_SIDE_EFFECTS (to
)
3784 || TREE_THIS_VOLATILE (to
))
3788 if (!BINARY_CLASS_P (src
)
3789 || TREE_CODE (TREE_TYPE (src
)) != INTEGER_TYPE
)
3792 op0
= TREE_OPERAND (src
, 0);
3793 op1
= TREE_OPERAND (src
, 1);
3796 if (!operand_equal_p (to
, op0
, 0))
3799 if (MEM_P (str_rtx
))
3801 unsigned HOST_WIDE_INT offset1
;
3803 if (str_bitsize
== 0 || str_bitsize
> BITS_PER_WORD
)
3804 str_mode
= word_mode
;
3805 str_mode
= get_best_mode (bitsize
, bitpos
,
3806 MEM_ALIGN (str_rtx
), str_mode
, 0);
3807 if (str_mode
== VOIDmode
)
3809 str_bitsize
= GET_MODE_BITSIZE (str_mode
);
3812 bitpos
%= str_bitsize
;
3813 offset1
= (offset1
- bitpos
) / BITS_PER_UNIT
;
3814 str_rtx
= adjust_address (str_rtx
, str_mode
, offset1
);
3816 else if (!REG_P (str_rtx
) && GET_CODE (str_rtx
) != SUBREG
)
3819 /* If the bit field covers the whole REG/MEM, store_field
3820 will likely generate better code. */
3821 if (bitsize
>= str_bitsize
)
3824 /* We can't handle fields split across multiple entities. */
3825 if (bitpos
+ bitsize
> str_bitsize
)
3828 if (BYTES_BIG_ENDIAN
)
3829 bitpos
= str_bitsize
- bitpos
- bitsize
;
3831 switch (TREE_CODE (src
))
3835 /* For now, just optimize the case of the topmost bitfield
3836 where we don't need to do any masking and also
3837 1 bit bitfields where xor can be used.
3838 We might win by one instruction for the other bitfields
3839 too if insv/extv instructions aren't used, so that
3840 can be added later. */
3841 if (bitpos
+ bitsize
!= str_bitsize
3842 && (bitsize
!= 1 || TREE_CODE (op1
) != INTEGER_CST
))
3845 value
= expand_expr (op1
, NULL_RTX
, str_mode
, 0);
3846 value
= convert_modes (str_mode
,
3847 TYPE_MODE (TREE_TYPE (op1
)), value
,
3848 TYPE_UNSIGNED (TREE_TYPE (op1
)));
3850 /* We may be accessing data outside the field, which means
3851 we can alias adjacent data. */
3852 if (MEM_P (str_rtx
))
3854 str_rtx
= shallow_copy_rtx (str_rtx
);
3855 set_mem_alias_set (str_rtx
, 0);
3856 set_mem_expr (str_rtx
, 0);
3859 binop
= TREE_CODE (src
) == PLUS_EXPR
? add_optab
: sub_optab
;
3860 if (bitsize
== 1 && bitpos
+ bitsize
!= str_bitsize
)
3862 value
= expand_and (str_mode
, value
, const1_rtx
, NULL
);
3865 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
,
3866 build_int_cst (NULL_TREE
, bitpos
),
3868 result
= expand_binop (str_mode
, binop
, str_rtx
,
3869 value
, str_rtx
, 1, OPTAB_WIDEN
);
3870 if (result
!= str_rtx
)
3871 emit_move_insn (str_rtx
, result
);
3876 if (TREE_CODE (op1
) != INTEGER_CST
)
3878 value
= expand_expr (op1
, NULL_RTX
, GET_MODE (str_rtx
), 0);
3879 value
= convert_modes (GET_MODE (str_rtx
),
3880 TYPE_MODE (TREE_TYPE (op1
)), value
,
3881 TYPE_UNSIGNED (TREE_TYPE (op1
)));
3883 /* We may be accessing data outside the field, which means
3884 we can alias adjacent data. */
3885 if (MEM_P (str_rtx
))
3887 str_rtx
= shallow_copy_rtx (str_rtx
);
3888 set_mem_alias_set (str_rtx
, 0);
3889 set_mem_expr (str_rtx
, 0);
3892 binop
= TREE_CODE (src
) == BIT_IOR_EXPR
? ior_optab
: xor_optab
;
3893 if (bitpos
+ bitsize
!= GET_MODE_BITSIZE (GET_MODE (str_rtx
)))
3895 rtx mask
= GEN_INT (((unsigned HOST_WIDE_INT
) 1 << bitsize
)
3897 value
= expand_and (GET_MODE (str_rtx
), value
, mask
,
3900 value
= expand_shift (LSHIFT_EXPR
, GET_MODE (str_rtx
), value
,
3901 build_int_cst (NULL_TREE
, bitpos
),
3903 result
= expand_binop (GET_MODE (str_rtx
), binop
, str_rtx
,
3904 value
, str_rtx
, 1, OPTAB_WIDEN
);
3905 if (result
!= str_rtx
)
3906 emit_move_insn (str_rtx
, result
);
3917 /* Expand an assignment that stores the value of FROM into TO. */
3920 expand_assignment (tree to
, tree from
)
3925 /* Don't crash if the lhs of the assignment was erroneous. */
3927 if (TREE_CODE (to
) == ERROR_MARK
)
3929 result
= expand_normal (from
);
3933 /* Assignment of a structure component needs special treatment
3934 if the structure component's rtx is not simply a MEM.
3935 Assignment of an array element at a constant index, and assignment of
3936 an array element in an unaligned packed structure field, has the same
3938 if (handled_component_p (to
)
3939 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
3941 enum machine_mode mode1
;
3942 HOST_WIDE_INT bitsize
, bitpos
;
3949 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
3950 &unsignedp
, &volatilep
, true);
3952 /* If we are going to use store_bit_field and extract_bit_field,
3953 make sure to_rtx will be safe for multiple use. */
3955 to_rtx
= expand_normal (tem
);
3961 if (!MEM_P (to_rtx
))
3963 /* We can get constant negative offsets into arrays with broken
3964 user code. Translate this to a trap instead of ICEing. */
3965 gcc_assert (TREE_CODE (offset
) == INTEGER_CST
);
3966 expand_builtin_trap ();
3967 to_rtx
= gen_rtx_MEM (BLKmode
, const0_rtx
);
3970 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
3971 #ifdef POINTERS_EXTEND_UNSIGNED
3972 if (GET_MODE (offset_rtx
) != Pmode
)
3973 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
3975 if (GET_MODE (offset_rtx
) != ptr_mode
)
3976 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
3979 /* A constant address in TO_RTX can have VOIDmode, we must not try
3980 to call force_reg for that case. Avoid that case. */
3982 && GET_MODE (to_rtx
) == BLKmode
3983 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
3985 && (bitpos
% bitsize
) == 0
3986 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
3987 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
3989 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
3993 to_rtx
= offset_address (to_rtx
, offset_rtx
,
3994 highest_pow2_factor_for_target (to
,
3998 /* Handle expand_expr of a complex value returning a CONCAT. */
3999 if (GET_CODE (to_rtx
) == CONCAT
)
4001 if (TREE_CODE (TREE_TYPE (from
)) == COMPLEX_TYPE
)
4003 gcc_assert (bitpos
== 0);
4004 result
= store_expr (from
, to_rtx
, false);
4008 gcc_assert (bitpos
== 0 || bitpos
== GET_MODE_BITSIZE (mode1
));
4009 result
= store_expr (from
, XEXP (to_rtx
, bitpos
!= 0), false);
4016 /* If the field is at offset zero, we could have been given the
4017 DECL_RTX of the parent struct. Don't munge it. */
4018 to_rtx
= shallow_copy_rtx (to_rtx
);
4020 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
4022 /* Deal with volatile and readonly fields. The former is only
4023 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4025 MEM_VOLATILE_P (to_rtx
) = 1;
4026 if (component_uses_parent_alias_set (to
))
4027 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4030 if (optimize_bitfield_assignment_op (bitsize
, bitpos
, mode1
,
4034 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
4035 TREE_TYPE (tem
), get_alias_set (to
));
4039 preserve_temp_slots (result
);
4045 /* If the rhs is a function call and its value is not an aggregate,
4046 call the function before we start to compute the lhs.
4047 This is needed for correct code for cases such as
4048 val = setjmp (buf) on machines where reference to val
4049 requires loading up part of an address in a separate insn.
4051 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4052 since it might be a promoted variable where the zero- or sign- extension
4053 needs to be done. Handling this in the normal way is safe because no
4054 computation is done before the call. */
4055 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
4056 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
4057 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
4058 && REG_P (DECL_RTL (to
))))
4063 value
= expand_normal (from
);
4065 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4067 /* Handle calls that return values in multiple non-contiguous locations.
4068 The Irix 6 ABI has examples of this. */
4069 if (GET_CODE (to_rtx
) == PARALLEL
)
4070 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
4071 int_size_in_bytes (TREE_TYPE (from
)));
4072 else if (GET_MODE (to_rtx
) == BLKmode
)
4073 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
4076 if (POINTER_TYPE_P (TREE_TYPE (to
)))
4077 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
4078 emit_move_insn (to_rtx
, value
);
4080 preserve_temp_slots (to_rtx
);
4086 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4087 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4090 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4092 /* Don't move directly into a return register. */
4093 if (TREE_CODE (to
) == RESULT_DECL
4094 && (REG_P (to_rtx
) || GET_CODE (to_rtx
) == PARALLEL
))
4099 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
4101 if (GET_CODE (to_rtx
) == PARALLEL
)
4102 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
4103 int_size_in_bytes (TREE_TYPE (from
)));
4105 emit_move_insn (to_rtx
, temp
);
4107 preserve_temp_slots (to_rtx
);
4113 /* In case we are returning the contents of an object which overlaps
4114 the place the value is being stored, use a safe function when copying
4115 a value through a pointer into a structure value return block. */
4116 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
4117 && current_function_returns_struct
4118 && !current_function_returns_pcc_struct
)
4123 size
= expr_size (from
);
4124 from_rtx
= expand_normal (from
);
4126 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
4127 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
4128 XEXP (from_rtx
, 0), Pmode
,
4129 convert_to_mode (TYPE_MODE (sizetype
),
4130 size
, TYPE_UNSIGNED (sizetype
)),
4131 TYPE_MODE (sizetype
));
4133 preserve_temp_slots (to_rtx
);
4139 /* Compute FROM and store the value in the rtx we got. */
4142 result
= store_expr (from
, to_rtx
, 0);
4143 preserve_temp_slots (result
);
4149 /* Generate code for computing expression EXP,
4150 and storing the value into TARGET.
4152 If the mode is BLKmode then we may return TARGET itself.
4153 It turns out that in BLKmode it doesn't cause a problem.
4154 because C has no operators that could combine two different
4155 assignments into the same BLKmode object with different values
4156 with no sequence point. Will other languages need this to
4159 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4160 stack, and block moves may need to be treated specially. */
4163 store_expr (tree exp
, rtx target
, int call_param_p
)
4166 rtx alt_rtl
= NULL_RTX
;
4167 int dont_return_target
= 0;
4169 if (VOID_TYPE_P (TREE_TYPE (exp
)))
4171 /* C++ can generate ?: expressions with a throw expression in one
4172 branch and an rvalue in the other. Here, we resolve attempts to
4173 store the throw expression's nonexistent result. */
4174 gcc_assert (!call_param_p
);
4175 expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
4178 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4180 /* Perform first part of compound expression, then assign from second
4182 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
4183 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4184 return store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
);
4186 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4188 /* For conditional expression, get safe form of the target. Then
4189 test the condition, doing the appropriate assignment on either
4190 side. This avoids the creation of unnecessary temporaries.
4191 For non-BLKmode, it is more efficient not to do this. */
4193 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4195 do_pending_stack_adjust ();
4197 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
4198 store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
);
4199 emit_jump_insn (gen_jump (lab2
));
4202 store_expr (TREE_OPERAND (exp
, 2), target
, call_param_p
);
4208 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4209 /* If this is a scalar in a register that is stored in a wider mode
4210 than the declared mode, compute the result into its declared mode
4211 and then convert to the wider mode. Our value is the computed
4214 rtx inner_target
= 0;
4216 /* We can do the conversion inside EXP, which will often result
4217 in some optimizations. Do the conversion in two steps: first
4218 change the signedness, if needed, then the extend. But don't
4219 do this if the type of EXP is a subtype of something else
4220 since then the conversion might involve more than just
4221 converting modes. */
4222 if (INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4223 && TREE_TYPE (TREE_TYPE (exp
)) == 0
4224 && (!lang_hooks
.reduce_bit_field_operations
4225 || (GET_MODE_PRECISION (GET_MODE (target
))
4226 == TYPE_PRECISION (TREE_TYPE (exp
)))))
4228 if (TYPE_UNSIGNED (TREE_TYPE (exp
))
4229 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4231 (lang_hooks
.types
.signed_or_unsigned_type
4232 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)), exp
);
4234 exp
= convert (lang_hooks
.types
.type_for_mode
4235 (GET_MODE (SUBREG_REG (target
)),
4236 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4239 inner_target
= SUBREG_REG (target
);
4242 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
4243 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4245 /* If TEMP is a VOIDmode constant, use convert_modes to make
4246 sure that we properly convert it. */
4247 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4249 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4250 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4251 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4252 GET_MODE (target
), temp
,
4253 SUBREG_PROMOTED_UNSIGNED_P (target
));
4256 convert_move (SUBREG_REG (target
), temp
,
4257 SUBREG_PROMOTED_UNSIGNED_P (target
));
4263 temp
= expand_expr_real (exp
, target
, GET_MODE (target
),
4265 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
4267 /* Return TARGET if it's a specified hardware register.
4268 If TARGET is a volatile mem ref, either return TARGET
4269 or return a reg copied *from* TARGET; ANSI requires this.
4271 Otherwise, if TEMP is not TARGET, return TEMP
4272 if it is constant (for efficiency),
4273 or if we really want the correct value. */
4274 if (!(target
&& REG_P (target
)
4275 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4276 && !(MEM_P (target
) && MEM_VOLATILE_P (target
))
4277 && ! rtx_equal_p (temp
, target
)
4278 && CONSTANT_P (temp
))
4279 dont_return_target
= 1;
4282 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4283 the same as that of TARGET, adjust the constant. This is needed, for
4284 example, in case it is a CONST_DOUBLE and we want only a word-sized
4286 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4287 && TREE_CODE (exp
) != ERROR_MARK
4288 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4289 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4290 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
4292 /* If value was not generated in the target, store it there.
4293 Convert the value to TARGET's type first if necessary and emit the
4294 pending incrementations that have been queued when expanding EXP.
4295 Note that we cannot emit the whole queue blindly because this will
4296 effectively disable the POST_INC optimization later.
4298 If TEMP and TARGET compare equal according to rtx_equal_p, but
4299 one or both of them are volatile memory refs, we have to distinguish
4301 - expand_expr has used TARGET. In this case, we must not generate
4302 another copy. This can be detected by TARGET being equal according
4304 - expand_expr has not used TARGET - that means that the source just
4305 happens to have the same RTX form. Since temp will have been created
4306 by expand_expr, it will compare unequal according to == .
4307 We must generate a copy in this case, to reach the correct number
4308 of volatile memory references. */
4310 if ((! rtx_equal_p (temp
, target
)
4311 || (temp
!= target
&& (side_effects_p (temp
)
4312 || side_effects_p (target
))))
4313 && TREE_CODE (exp
) != ERROR_MARK
4314 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4315 but TARGET is not valid memory reference, TEMP will differ
4316 from TARGET although it is really the same location. */
4317 && !(alt_rtl
&& rtx_equal_p (alt_rtl
, target
))
4318 /* If there's nothing to copy, don't bother. Don't call
4319 expr_size unless necessary, because some front-ends (C++)
4320 expr_size-hook must not be given objects that are not
4321 supposed to be bit-copied or bit-initialized. */
4322 && expr_size (exp
) != const0_rtx
)
4324 if (GET_MODE (temp
) != GET_MODE (target
)
4325 && GET_MODE (temp
) != VOIDmode
)
4327 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
4328 if (dont_return_target
)
4330 /* In this case, we will return TEMP,
4331 so make sure it has the proper mode.
4332 But don't forget to store the value into TARGET. */
4333 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4334 emit_move_insn (target
, temp
);
4337 convert_move (target
, temp
, unsignedp
);
4340 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4342 /* Handle copying a string constant into an array. The string
4343 constant may be shorter than the array. So copy just the string's
4344 actual length, and clear the rest. First get the size of the data
4345 type of the string, which is actually the size of the target. */
4346 rtx size
= expr_size (exp
);
4348 if (GET_CODE (size
) == CONST_INT
4349 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4350 emit_block_move (target
, temp
, size
,
4352 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4355 /* Compute the size of the data to copy from the string. */
4357 = size_binop (MIN_EXPR
,
4358 make_tree (sizetype
, size
),
4359 size_int (TREE_STRING_LENGTH (exp
)));
4361 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
4363 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4366 /* Copy that much. */
4367 copy_size_rtx
= convert_to_mode (ptr_mode
, copy_size_rtx
,
4368 TYPE_UNSIGNED (sizetype
));
4369 emit_block_move (target
, temp
, copy_size_rtx
,
4371 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4373 /* Figure out how much is left in TARGET that we have to clear.
4374 Do all calculations in ptr_mode. */
4375 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4377 size
= plus_constant (size
, -INTVAL (copy_size_rtx
));
4378 target
= adjust_address (target
, BLKmode
,
4379 INTVAL (copy_size_rtx
));
4383 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
4384 copy_size_rtx
, NULL_RTX
, 0,
4387 #ifdef POINTERS_EXTEND_UNSIGNED
4388 if (GET_MODE (copy_size_rtx
) != Pmode
)
4389 copy_size_rtx
= convert_to_mode (Pmode
, copy_size_rtx
,
4390 TYPE_UNSIGNED (sizetype
));
4393 target
= offset_address (target
, copy_size_rtx
,
4394 highest_pow2_factor (copy_size
));
4395 label
= gen_label_rtx ();
4396 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4397 GET_MODE (size
), 0, label
);
4400 if (size
!= const0_rtx
)
4401 clear_storage (target
, size
, BLOCK_OP_NORMAL
);
4407 /* Handle calls that return values in multiple non-contiguous locations.
4408 The Irix 6 ABI has examples of this. */
4409 else if (GET_CODE (target
) == PARALLEL
)
4410 emit_group_load (target
, temp
, TREE_TYPE (exp
),
4411 int_size_in_bytes (TREE_TYPE (exp
)));
4412 else if (GET_MODE (temp
) == BLKmode
)
4413 emit_block_move (target
, temp
, expr_size (exp
),
4415 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4418 temp
= force_operand (temp
, target
);
4420 emit_move_insn (target
, temp
);
4427 /* Examine CTOR to discover:
4428 * how many scalar fields are set to nonzero values,
4429 and place it in *P_NZ_ELTS;
4430 * how many scalar fields are set to non-constant values,
4431 and place it in *P_NC_ELTS; and
4432 * how many scalar fields in total are in CTOR,
4433 and place it in *P_ELT_COUNT.
4434 * if a type is a union, and the initializer from the constructor
4435 is not the largest element in the union, then set *p_must_clear. */
4438 categorize_ctor_elements_1 (tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
4439 HOST_WIDE_INT
*p_nc_elts
,
4440 HOST_WIDE_INT
*p_elt_count
,
4443 unsigned HOST_WIDE_INT idx
;
4444 HOST_WIDE_INT nz_elts
, nc_elts
, elt_count
;
4445 tree value
, purpose
;
4451 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), idx
, purpose
, value
)
4456 if (TREE_CODE (purpose
) == RANGE_EXPR
)
4458 tree lo_index
= TREE_OPERAND (purpose
, 0);
4459 tree hi_index
= TREE_OPERAND (purpose
, 1);
4461 if (host_integerp (lo_index
, 1) && host_integerp (hi_index
, 1))
4462 mult
= (tree_low_cst (hi_index
, 1)
4463 - tree_low_cst (lo_index
, 1) + 1);
4466 switch (TREE_CODE (value
))
4470 HOST_WIDE_INT nz
= 0, nc
= 0, ic
= 0;
4471 categorize_ctor_elements_1 (value
, &nz
, &nc
, &ic
, p_must_clear
);
4472 nz_elts
+= mult
* nz
;
4473 nc_elts
+= mult
* nc
;
4474 elt_count
+= mult
* ic
;
4480 if (!initializer_zerop (value
))
4486 nz_elts
+= mult
* TREE_STRING_LENGTH (value
);
4487 elt_count
+= mult
* TREE_STRING_LENGTH (value
);
4491 if (!initializer_zerop (TREE_REALPART (value
)))
4493 if (!initializer_zerop (TREE_IMAGPART (value
)))
4501 for (v
= TREE_VECTOR_CST_ELTS (value
); v
; v
= TREE_CHAIN (v
))
4503 if (!initializer_zerop (TREE_VALUE (v
)))
4513 if (!initializer_constant_valid_p (value
, TREE_TYPE (value
)))
4520 && (TREE_CODE (TREE_TYPE (ctor
)) == UNION_TYPE
4521 || TREE_CODE (TREE_TYPE (ctor
)) == QUAL_UNION_TYPE
))
4524 bool clear_this
= true;
4526 if (!VEC_empty (constructor_elt
, CONSTRUCTOR_ELTS (ctor
)))
4528 /* We don't expect more than one element of the union to be
4529 initialized. Not sure what we should do otherwise... */
4530 gcc_assert (VEC_length (constructor_elt
, CONSTRUCTOR_ELTS (ctor
))
4533 init_sub_type
= TREE_TYPE (VEC_index (constructor_elt
,
4534 CONSTRUCTOR_ELTS (ctor
),
4537 /* ??? We could look at each element of the union, and find the
4538 largest element. Which would avoid comparing the size of the
4539 initialized element against any tail padding in the union.
4540 Doesn't seem worth the effort... */
4541 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor
)),
4542 TYPE_SIZE (init_sub_type
)) == 1)
4544 /* And now we have to find out if the element itself is fully
4545 constructed. E.g. for union { struct { int a, b; } s; } u
4546 = { .s = { .a = 1 } }. */
4547 if (elt_count
== count_type_elements (init_sub_type
, false))
4552 *p_must_clear
= clear_this
;
4555 *p_nz_elts
+= nz_elts
;
4556 *p_nc_elts
+= nc_elts
;
4557 *p_elt_count
+= elt_count
;
4561 categorize_ctor_elements (tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
4562 HOST_WIDE_INT
*p_nc_elts
,
4563 HOST_WIDE_INT
*p_elt_count
,
4569 *p_must_clear
= false;
4570 categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_nc_elts
, p_elt_count
,
4574 /* Count the number of scalars in TYPE. Return -1 on overflow or
4575 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4576 array member at the end of the structure. */
4579 count_type_elements (tree type
, bool allow_flexarr
)
4581 const HOST_WIDE_INT max
= ~((HOST_WIDE_INT
)1 << (HOST_BITS_PER_WIDE_INT
-1));
4582 switch (TREE_CODE (type
))
4586 tree telts
= array_type_nelts (type
);
4587 if (telts
&& host_integerp (telts
, 1))
4589 HOST_WIDE_INT n
= tree_low_cst (telts
, 1) + 1;
4590 HOST_WIDE_INT m
= count_type_elements (TREE_TYPE (type
), false);
4593 else if (max
/ n
> m
)
4601 HOST_WIDE_INT n
= 0, t
;
4604 for (f
= TYPE_FIELDS (type
); f
; f
= TREE_CHAIN (f
))
4605 if (TREE_CODE (f
) == FIELD_DECL
)
4607 t
= count_type_elements (TREE_TYPE (f
), false);
4610 /* Check for structures with flexible array member. */
4611 tree tf
= TREE_TYPE (f
);
4613 && TREE_CHAIN (f
) == NULL
4614 && TREE_CODE (tf
) == ARRAY_TYPE
4616 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf
))
4617 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf
)))
4618 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf
))
4619 && int_size_in_bytes (type
) >= 0)
4631 case QUAL_UNION_TYPE
:
4633 /* Ho hum. How in the world do we guess here? Clearly it isn't
4634 right to count the fields. Guess based on the number of words. */
4635 HOST_WIDE_INT n
= int_size_in_bytes (type
);
4638 return n
/ UNITS_PER_WORD
;
4645 return TYPE_VECTOR_SUBPARTS (type
);
4653 case REFERENCE_TYPE
:
4665 /* Return 1 if EXP contains mostly (3/4) zeros. */
4668 mostly_zeros_p (tree exp
)
4670 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4673 HOST_WIDE_INT nz_elts
, nc_elts
, count
, elts
;
4676 categorize_ctor_elements (exp
, &nz_elts
, &nc_elts
, &count
, &must_clear
);
4680 elts
= count_type_elements (TREE_TYPE (exp
), false);
4682 return nz_elts
< elts
/ 4;
4685 return initializer_zerop (exp
);
4688 /* Return 1 if EXP contains all zeros. */
4691 all_zeros_p (tree exp
)
4693 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4696 HOST_WIDE_INT nz_elts
, nc_elts
, count
;
4699 categorize_ctor_elements (exp
, &nz_elts
, &nc_elts
, &count
, &must_clear
);
4700 return nz_elts
== 0;
4703 return initializer_zerop (exp
);
4706 /* Helper function for store_constructor.
4707 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4708 TYPE is the type of the CONSTRUCTOR, not the element type.
4709 CLEARED is as for store_constructor.
4710 ALIAS_SET is the alias set to use for any stores.
4712 This provides a recursive shortcut back to store_constructor when it isn't
4713 necessary to go through store_field. This is so that we can pass through
4714 the cleared field to let store_constructor know that we may not have to
4715 clear a substructure if the outer structure has already been cleared. */
4718 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
4719 HOST_WIDE_INT bitpos
, enum machine_mode mode
,
4720 tree exp
, tree type
, int cleared
, int alias_set
)
4722 if (TREE_CODE (exp
) == CONSTRUCTOR
4723 /* We can only call store_constructor recursively if the size and
4724 bit position are on a byte boundary. */
4725 && bitpos
% BITS_PER_UNIT
== 0
4726 && (bitsize
> 0 && bitsize
% BITS_PER_UNIT
== 0)
4727 /* If we have a nonzero bitpos for a register target, then we just
4728 let store_field do the bitfield handling. This is unlikely to
4729 generate unnecessary clear instructions anyways. */
4730 && (bitpos
== 0 || MEM_P (target
)))
4734 = adjust_address (target
,
4735 GET_MODE (target
) == BLKmode
4737 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4738 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
4741 /* Update the alias set, if required. */
4742 if (MEM_P (target
) && ! MEM_KEEP_ALIAS_SET_P (target
)
4743 && MEM_ALIAS_SET (target
) != 0)
4745 target
= copy_rtx (target
);
4746 set_mem_alias_set (target
, alias_set
);
4749 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
4752 store_field (target
, bitsize
, bitpos
, mode
, exp
, type
, alias_set
);
4755 /* Store the value of constructor EXP into the rtx TARGET.
4756 TARGET is either a REG or a MEM; we know it cannot conflict, since
4757 safe_from_p has been called.
4758 CLEARED is true if TARGET is known to have been zero'd.
4759 SIZE is the number of bytes of TARGET we are allowed to modify: this
4760 may not be the same as the size of EXP if we are assigning to a field
4761 which has been packed to exclude padding bits. */
4764 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
)
4766 tree type
= TREE_TYPE (exp
);
4767 #ifdef WORD_REGISTER_OPERATIONS
4768 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4771 switch (TREE_CODE (type
))
4775 case QUAL_UNION_TYPE
:
4777 unsigned HOST_WIDE_INT idx
;
4780 /* If size is zero or the target is already cleared, do nothing. */
4781 if (size
== 0 || cleared
)
4783 /* We either clear the aggregate or indicate the value is dead. */
4784 else if ((TREE_CODE (type
) == UNION_TYPE
4785 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4786 && ! CONSTRUCTOR_ELTS (exp
))
4787 /* If the constructor is empty, clear the union. */
4789 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
4793 /* If we are building a static constructor into a register,
4794 set the initial value as zero so we can fold the value into
4795 a constant. But if more than one register is involved,
4796 this probably loses. */
4797 else if (REG_P (target
) && TREE_STATIC (exp
)
4798 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4800 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4804 /* If the constructor has fewer fields than the structure or
4805 if we are initializing the structure to mostly zeros, clear
4806 the whole structure first. Don't do this if TARGET is a
4807 register whose mode size isn't equal to SIZE since
4808 clear_storage can't handle this case. */
4810 && (((int)VEC_length (constructor_elt
, CONSTRUCTOR_ELTS (exp
))
4811 != fields_length (type
))
4812 || mostly_zeros_p (exp
))
4814 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
4817 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
4822 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4824 /* Store each element of the constructor into the
4825 corresponding field of TARGET. */
4826 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, field
, value
)
4828 enum machine_mode mode
;
4829 HOST_WIDE_INT bitsize
;
4830 HOST_WIDE_INT bitpos
= 0;
4832 rtx to_rtx
= target
;
4834 /* Just ignore missing fields. We cleared the whole
4835 structure, above, if any fields are missing. */
4839 if (cleared
&& initializer_zerop (value
))
4842 if (host_integerp (DECL_SIZE (field
), 1))
4843 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4847 mode
= DECL_MODE (field
);
4848 if (DECL_BIT_FIELD (field
))
4851 offset
= DECL_FIELD_OFFSET (field
);
4852 if (host_integerp (offset
, 0)
4853 && host_integerp (bit_position (field
), 0))
4855 bitpos
= int_bit_position (field
);
4859 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
4866 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset
,
4867 make_tree (TREE_TYPE (exp
),
4870 offset_rtx
= expand_normal (offset
);
4871 gcc_assert (MEM_P (to_rtx
));
4873 #ifdef POINTERS_EXTEND_UNSIGNED
4874 if (GET_MODE (offset_rtx
) != Pmode
)
4875 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
4877 if (GET_MODE (offset_rtx
) != ptr_mode
)
4878 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4881 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4882 highest_pow2_factor (offset
));
4885 #ifdef WORD_REGISTER_OPERATIONS
4886 /* If this initializes a field that is smaller than a
4887 word, at the start of a word, try to widen it to a full
4888 word. This special case allows us to output C++ member
4889 function initializations in a form that the optimizers
4892 && bitsize
< BITS_PER_WORD
4893 && bitpos
% BITS_PER_WORD
== 0
4894 && GET_MODE_CLASS (mode
) == MODE_INT
4895 && TREE_CODE (value
) == INTEGER_CST
4897 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
4899 tree type
= TREE_TYPE (value
);
4901 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4903 type
= lang_hooks
.types
.type_for_size
4904 (BITS_PER_WORD
, TYPE_UNSIGNED (type
));
4905 value
= convert (type
, value
);
4908 if (BYTES_BIG_ENDIAN
)
4910 = fold_build2 (LSHIFT_EXPR
, type
, value
,
4911 build_int_cst (NULL_TREE
,
4912 BITS_PER_WORD
- bitsize
));
4913 bitsize
= BITS_PER_WORD
;
4918 if (MEM_P (to_rtx
) && !MEM_KEEP_ALIAS_SET_P (to_rtx
)
4919 && DECL_NONADDRESSABLE_P (field
))
4921 to_rtx
= copy_rtx (to_rtx
);
4922 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4925 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
4926 value
, type
, cleared
,
4927 get_alias_set (TREE_TYPE (field
)));
4934 unsigned HOST_WIDE_INT i
;
4937 tree elttype
= TREE_TYPE (type
);
4939 HOST_WIDE_INT minelt
= 0;
4940 HOST_WIDE_INT maxelt
= 0;
4942 domain
= TYPE_DOMAIN (type
);
4943 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
4944 && TYPE_MAX_VALUE (domain
)
4945 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
4946 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
4948 /* If we have constant bounds for the range of the type, get them. */
4951 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
4952 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
4955 /* If the constructor has fewer elements than the array, clear
4956 the whole array first. Similarly if this is static
4957 constructor of a non-BLKmode object. */
4960 else if (REG_P (target
) && TREE_STATIC (exp
))
4964 unsigned HOST_WIDE_INT idx
;
4966 HOST_WIDE_INT count
= 0, zero_count
= 0;
4967 need_to_clear
= ! const_bounds_p
;
4969 /* This loop is a more accurate version of the loop in
4970 mostly_zeros_p (it handles RANGE_EXPR in an index). It
4971 is also needed to check for missing elements. */
4972 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, index
, value
)
4974 HOST_WIDE_INT this_node_count
;
4979 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4981 tree lo_index
= TREE_OPERAND (index
, 0);
4982 tree hi_index
= TREE_OPERAND (index
, 1);
4984 if (! host_integerp (lo_index
, 1)
4985 || ! host_integerp (hi_index
, 1))
4991 this_node_count
= (tree_low_cst (hi_index
, 1)
4992 - tree_low_cst (lo_index
, 1) + 1);
4995 this_node_count
= 1;
4997 count
+= this_node_count
;
4998 if (mostly_zeros_p (value
))
4999 zero_count
+= this_node_count
;
5002 /* Clear the entire array first if there are any missing
5003 elements, or if the incidence of zero elements is >=
5006 && (count
< maxelt
- minelt
+ 1
5007 || 4 * zero_count
>= 3 * count
))
5011 if (need_to_clear
&& size
> 0)
5014 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5016 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
5020 if (!cleared
&& REG_P (target
))
5021 /* Inform later passes that the old value is dead. */
5022 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
5024 /* Store each element of the constructor into the
5025 corresponding element of TARGET, determined by counting the
5027 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), i
, index
, value
)
5029 enum machine_mode mode
;
5030 HOST_WIDE_INT bitsize
;
5031 HOST_WIDE_INT bitpos
;
5033 rtx xtarget
= target
;
5035 if (cleared
&& initializer_zerop (value
))
5038 unsignedp
= TYPE_UNSIGNED (elttype
);
5039 mode
= TYPE_MODE (elttype
);
5040 if (mode
== BLKmode
)
5041 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
5042 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
5045 bitsize
= GET_MODE_BITSIZE (mode
);
5047 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
5049 tree lo_index
= TREE_OPERAND (index
, 0);
5050 tree hi_index
= TREE_OPERAND (index
, 1);
5051 rtx index_r
, pos_rtx
;
5052 HOST_WIDE_INT lo
, hi
, count
;
5055 /* If the range is constant and "small", unroll the loop. */
5057 && host_integerp (lo_index
, 0)
5058 && host_integerp (hi_index
, 0)
5059 && (lo
= tree_low_cst (lo_index
, 0),
5060 hi
= tree_low_cst (hi_index
, 0),
5061 count
= hi
- lo
+ 1,
5064 || (host_integerp (TYPE_SIZE (elttype
), 1)
5065 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
5068 lo
-= minelt
; hi
-= minelt
;
5069 for (; lo
<= hi
; lo
++)
5071 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
5074 && !MEM_KEEP_ALIAS_SET_P (target
)
5075 && TREE_CODE (type
) == ARRAY_TYPE
5076 && TYPE_NONALIASED_COMPONENT (type
))
5078 target
= copy_rtx (target
);
5079 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5082 store_constructor_field
5083 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
5084 get_alias_set (elttype
));
5089 rtx loop_start
= gen_label_rtx ();
5090 rtx loop_end
= gen_label_rtx ();
5093 expand_normal (hi_index
);
5094 unsignedp
= TYPE_UNSIGNED (domain
);
5096 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
5099 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
5101 SET_DECL_RTL (index
, index_r
);
5102 store_expr (lo_index
, index_r
, 0);
5104 /* Build the head of the loop. */
5105 do_pending_stack_adjust ();
5106 emit_label (loop_start
);
5108 /* Assign value to element index. */
5110 = convert (ssizetype
,
5111 fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
5112 index
, TYPE_MIN_VALUE (domain
)));
5113 position
= size_binop (MULT_EXPR
, position
,
5115 TYPE_SIZE_UNIT (elttype
)));
5117 pos_rtx
= expand_normal (position
);
5118 xtarget
= offset_address (target
, pos_rtx
,
5119 highest_pow2_factor (position
));
5120 xtarget
= adjust_address (xtarget
, mode
, 0);
5121 if (TREE_CODE (value
) == CONSTRUCTOR
)
5122 store_constructor (value
, xtarget
, cleared
,
5123 bitsize
/ BITS_PER_UNIT
);
5125 store_expr (value
, xtarget
, 0);
5127 /* Generate a conditional jump to exit the loop. */
5128 exit_cond
= build2 (LT_EXPR
, integer_type_node
,
5130 jumpif (exit_cond
, loop_end
);
5132 /* Update the loop counter, and jump to the head of
5134 expand_assignment (index
,
5135 build2 (PLUS_EXPR
, TREE_TYPE (index
),
5136 index
, integer_one_node
));
5138 emit_jump (loop_start
);
5140 /* Build the end of the loop. */
5141 emit_label (loop_end
);
5144 else if ((index
!= 0 && ! host_integerp (index
, 0))
5145 || ! host_integerp (TYPE_SIZE (elttype
), 1))
5150 index
= ssize_int (1);
5153 index
= fold_convert (ssizetype
,
5154 fold_build2 (MINUS_EXPR
,
5157 TYPE_MIN_VALUE (domain
)));
5159 position
= size_binop (MULT_EXPR
, index
,
5161 TYPE_SIZE_UNIT (elttype
)));
5162 xtarget
= offset_address (target
,
5163 expand_normal (position
),
5164 highest_pow2_factor (position
));
5165 xtarget
= adjust_address (xtarget
, mode
, 0);
5166 store_expr (value
, xtarget
, 0);
5171 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
5172 * tree_low_cst (TYPE_SIZE (elttype
), 1));
5174 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
5176 if (MEM_P (target
) && !MEM_KEEP_ALIAS_SET_P (target
)
5177 && TREE_CODE (type
) == ARRAY_TYPE
5178 && TYPE_NONALIASED_COMPONENT (type
))
5180 target
= copy_rtx (target
);
5181 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5183 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
5184 type
, cleared
, get_alias_set (elttype
));
5192 unsigned HOST_WIDE_INT idx
;
5193 constructor_elt
*ce
;
5197 tree elttype
= TREE_TYPE (type
);
5198 int elt_size
= tree_low_cst (TYPE_SIZE (elttype
), 1);
5199 enum machine_mode eltmode
= TYPE_MODE (elttype
);
5200 HOST_WIDE_INT bitsize
;
5201 HOST_WIDE_INT bitpos
;
5202 rtvec vector
= NULL
;
5205 gcc_assert (eltmode
!= BLKmode
);
5207 n_elts
= TYPE_VECTOR_SUBPARTS (type
);
5208 if (REG_P (target
) && VECTOR_MODE_P (GET_MODE (target
)))
5210 enum machine_mode mode
= GET_MODE (target
);
5212 icode
= (int) vec_init_optab
->handlers
[mode
].insn_code
;
5213 if (icode
!= CODE_FOR_nothing
)
5217 vector
= rtvec_alloc (n_elts
);
5218 for (i
= 0; i
< n_elts
; i
++)
5219 RTVEC_ELT (vector
, i
) = CONST0_RTX (GET_MODE_INNER (mode
));
5223 /* If the constructor has fewer elements than the vector,
5224 clear the whole array first. Similarly if this is static
5225 constructor of a non-BLKmode object. */
5228 else if (REG_P (target
) && TREE_STATIC (exp
))
5232 unsigned HOST_WIDE_INT count
= 0, zero_count
= 0;
5235 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
5237 int n_elts_here
= tree_low_cst
5238 (int_const_binop (TRUNC_DIV_EXPR
,
5239 TYPE_SIZE (TREE_TYPE (value
)),
5240 TYPE_SIZE (elttype
), 0), 1);
5242 count
+= n_elts_here
;
5243 if (mostly_zeros_p (value
))
5244 zero_count
+= n_elts_here
;
5247 /* Clear the entire vector first if there are any missing elements,
5248 or if the incidence of zero elements is >= 75%. */
5249 need_to_clear
= (count
< n_elts
|| 4 * zero_count
>= 3 * count
);
5252 if (need_to_clear
&& size
> 0 && !vector
)
5255 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5257 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
5261 /* Inform later passes that the old value is dead. */
5262 if (!cleared
&& REG_P (target
))
5263 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5265 /* Store each element of the constructor into the corresponding
5266 element of TARGET, determined by counting the elements. */
5267 for (idx
= 0, i
= 0;
5268 VEC_iterate (constructor_elt
, CONSTRUCTOR_ELTS (exp
), idx
, ce
);
5269 idx
++, i
+= bitsize
/ elt_size
)
5271 HOST_WIDE_INT eltpos
;
5272 tree value
= ce
->value
;
5274 bitsize
= tree_low_cst (TYPE_SIZE (TREE_TYPE (value
)), 1);
5275 if (cleared
&& initializer_zerop (value
))
5279 eltpos
= tree_low_cst (ce
->index
, 1);
5285 /* Vector CONSTRUCTORs should only be built from smaller
5286 vectors in the case of BLKmode vectors. */
5287 gcc_assert (TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
);
5288 RTVEC_ELT (vector
, eltpos
)
5289 = expand_normal (value
);
5293 enum machine_mode value_mode
=
5294 TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
5295 ? TYPE_MODE (TREE_TYPE (value
))
5297 bitpos
= eltpos
* elt_size
;
5298 store_constructor_field (target
, bitsize
, bitpos
,
5299 value_mode
, value
, type
,
5300 cleared
, get_alias_set (elttype
));
5305 emit_insn (GEN_FCN (icode
)
5307 gen_rtx_PARALLEL (GET_MODE (target
), vector
)));
5316 /* Store the value of EXP (an expression tree)
5317 into a subfield of TARGET which has mode MODE and occupies
5318 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5319 If MODE is VOIDmode, it means that we are storing into a bit-field.
5321 Always return const0_rtx unless we have something particular to
5324 TYPE is the type of the underlying object,
5326 ALIAS_SET is the alias set for the destination. This value will
5327 (in general) be different from that for TARGET, since TARGET is a
5328 reference to the containing structure. */
5331 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
5332 enum machine_mode mode
, tree exp
, tree type
, int alias_set
)
5334 HOST_WIDE_INT width_mask
= 0;
5336 if (TREE_CODE (exp
) == ERROR_MARK
)
5339 /* If we have nothing to store, do nothing unless the expression has
5342 return expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
5343 else if (bitsize
>= 0 && bitsize
< HOST_BITS_PER_WIDE_INT
)
5344 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5346 /* If we are storing into an unaligned field of an aligned union that is
5347 in a register, we may have the mode of TARGET being an integer mode but
5348 MODE == BLKmode. In that case, get an aligned object whose size and
5349 alignment are the same as TARGET and store TARGET into it (we can avoid
5350 the store if the field being stored is the entire width of TARGET). Then
5351 call ourselves recursively to store the field into a BLKmode version of
5352 that object. Finally, load from the object into TARGET. This is not
5353 very efficient in general, but should only be slightly more expensive
5354 than the otherwise-required unaligned accesses. Perhaps this can be
5355 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5356 twice, once with emit_move_insn and once via store_field. */
5359 && (REG_P (target
) || GET_CODE (target
) == SUBREG
))
5361 rtx object
= assign_temp (type
, 0, 1, 1);
5362 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
5364 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5365 emit_move_insn (object
, target
);
5367 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, type
, alias_set
);
5369 emit_move_insn (target
, object
);
5371 /* We want to return the BLKmode version of the data. */
5375 if (GET_CODE (target
) == CONCAT
)
5377 /* We're storing into a struct containing a single __complex. */
5379 gcc_assert (!bitpos
);
5380 return store_expr (exp
, target
, 0);
5383 /* If the structure is in a register or if the component
5384 is a bit field, we cannot use addressing to access it.
5385 Use bit-field techniques or SUBREG to store in it. */
5387 if (mode
== VOIDmode
5388 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5389 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5390 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5392 || GET_CODE (target
) == SUBREG
5393 /* If the field isn't aligned enough to store as an ordinary memref,
5394 store it as a bit field. */
5396 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
5397 || bitpos
% GET_MODE_ALIGNMENT (mode
))
5398 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
5399 || (bitpos
% BITS_PER_UNIT
!= 0)))
5400 /* If the RHS and field are a constant size and the size of the
5401 RHS isn't the same size as the bitfield, we must use bitfield
5404 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5405 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5409 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5410 implies a mask operation. If the precision is the same size as
5411 the field we're storing into, that mask is redundant. This is
5412 particularly common with bit field assignments generated by the
5414 if (TREE_CODE (exp
) == NOP_EXPR
)
5416 tree type
= TREE_TYPE (exp
);
5417 if (INTEGRAL_TYPE_P (type
)
5418 && TYPE_PRECISION (type
) < GET_MODE_BITSIZE (TYPE_MODE (type
))
5419 && bitsize
== TYPE_PRECISION (type
))
5421 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
5422 if (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) >= bitsize
)
5423 exp
= TREE_OPERAND (exp
, 0);
5427 temp
= expand_normal (exp
);
5429 /* If BITSIZE is narrower than the size of the type of EXP
5430 we will be narrowing TEMP. Normally, what's wanted are the
5431 low-order bits. However, if EXP's type is a record and this is
5432 big-endian machine, we want the upper BITSIZE bits. */
5433 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5434 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
5435 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5436 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5437 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5441 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5443 if (mode
!= VOIDmode
&& mode
!= BLKmode
5444 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5445 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5447 /* If the modes of TARGET and TEMP are both BLKmode, both
5448 must be in memory and BITPOS must be aligned on a byte
5449 boundary. If so, we simply do a block copy. */
5450 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5452 gcc_assert (MEM_P (target
) && MEM_P (temp
)
5453 && !(bitpos
% BITS_PER_UNIT
));
5455 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5456 emit_block_move (target
, temp
,
5457 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5464 /* Store the value in the bitfield. */
5465 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
);
5471 /* Now build a reference to just the desired component. */
5472 rtx to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5474 if (to_rtx
== target
)
5475 to_rtx
= copy_rtx (to_rtx
);
5477 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5478 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5479 set_mem_alias_set (to_rtx
, alias_set
);
5481 return store_expr (exp
, to_rtx
, 0);
5485 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5486 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5487 codes and find the ultimate containing object, which we return.
5489 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5490 bit position, and *PUNSIGNEDP to the signedness of the field.
5491 If the position of the field is variable, we store a tree
5492 giving the variable offset (in units) in *POFFSET.
5493 This offset is in addition to the bit position.
5494 If the position is not variable, we store 0 in *POFFSET.
5496 If any of the extraction expressions is volatile,
5497 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5499 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5500 is a mode that can be used to access the field. In that case, *PBITSIZE
5503 If the field describes a variable-sized object, *PMODE is set to
5504 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5505 this case, but the address of the object can be found.
5507 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5508 look through nodes that serve as markers of a greater alignment than
5509 the one that can be deduced from the expression. These nodes make it
5510 possible for front-ends to prevent temporaries from being created by
5511 the middle-end on alignment considerations. For that purpose, the
5512 normal operating mode at high-level is to always pass FALSE so that
5513 the ultimate containing object is really returned; moreover, the
5514 associated predicate handled_component_p will always return TRUE
5515 on these nodes, thus indicating that they are essentially handled
5516 by get_inner_reference. TRUE should only be passed when the caller
5517 is scanning the expression in order to build another representation
5518 and specifically knows how to handle these nodes; as such, this is
5519 the normal operating mode in the RTL expanders. */
5522 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
5523 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
5524 enum machine_mode
*pmode
, int *punsignedp
,
5525 int *pvolatilep
, bool keep_aligning
)
5528 enum machine_mode mode
= VOIDmode
;
5529 tree offset
= size_zero_node
;
5530 tree bit_offset
= bitsize_zero_node
;
5533 /* First get the mode, signedness, and size. We do this from just the
5534 outermost expression. */
5535 if (TREE_CODE (exp
) == COMPONENT_REF
)
5537 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5538 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5539 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5541 *punsignedp
= DECL_UNSIGNED (TREE_OPERAND (exp
, 1));
5543 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5545 size_tree
= TREE_OPERAND (exp
, 1);
5546 *punsignedp
= BIT_FIELD_REF_UNSIGNED (exp
);
5550 mode
= TYPE_MODE (TREE_TYPE (exp
));
5551 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
5553 if (mode
== BLKmode
)
5554 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5556 *pbitsize
= GET_MODE_BITSIZE (mode
);
5561 if (! host_integerp (size_tree
, 1))
5562 mode
= BLKmode
, *pbitsize
= -1;
5564 *pbitsize
= tree_low_cst (size_tree
, 1);
5567 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5568 and find the ultimate containing object. */
5571 switch (TREE_CODE (exp
))
5574 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5575 TREE_OPERAND (exp
, 2));
5580 tree field
= TREE_OPERAND (exp
, 1);
5581 tree this_offset
= component_ref_field_offset (exp
);
5583 /* If this field hasn't been filled in yet, don't go past it.
5584 This should only happen when folding expressions made during
5585 type construction. */
5586 if (this_offset
== 0)
5589 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5590 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5591 DECL_FIELD_BIT_OFFSET (field
));
5593 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5598 case ARRAY_RANGE_REF
:
5600 tree index
= TREE_OPERAND (exp
, 1);
5601 tree low_bound
= array_ref_low_bound (exp
);
5602 tree unit_size
= array_ref_element_size (exp
);
5604 /* We assume all arrays have sizes that are a multiple of a byte.
5605 First subtract the lower bound, if any, in the type of the
5606 index, then convert to sizetype and multiply by the size of
5607 the array element. */
5608 if (! integer_zerop (low_bound
))
5609 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
5612 offset
= size_binop (PLUS_EXPR
, offset
,
5613 size_binop (MULT_EXPR
,
5614 convert (sizetype
, index
),
5623 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5624 bitsize_int (*pbitsize
));
5627 case VIEW_CONVERT_EXPR
:
5628 if (keep_aligning
&& STRICT_ALIGNMENT
5629 && (TYPE_ALIGN (TREE_TYPE (exp
))
5630 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5631 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
5632 < BIGGEST_ALIGNMENT
)
5633 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
5634 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
5642 /* If any reference in the chain is volatile, the effect is volatile. */
5643 if (TREE_THIS_VOLATILE (exp
))
5646 exp
= TREE_OPERAND (exp
, 0);
5650 /* If OFFSET is constant, see if we can return the whole thing as a
5651 constant bit position. Otherwise, split it up. */
5652 if (host_integerp (offset
, 0)
5653 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5655 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5656 && host_integerp (tem
, 0))
5657 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5659 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5665 /* Return a tree of sizetype representing the size, in bytes, of the element
5666 of EXP, an ARRAY_REF. */
5669 array_ref_element_size (tree exp
)
5671 tree aligned_size
= TREE_OPERAND (exp
, 3);
5672 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5674 /* If a size was specified in the ARRAY_REF, it's the size measured
5675 in alignment units of the element type. So multiply by that value. */
5678 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5679 sizetype from another type of the same width and signedness. */
5680 if (TREE_TYPE (aligned_size
) != sizetype
)
5681 aligned_size
= fold_convert (sizetype
, aligned_size
);
5682 return size_binop (MULT_EXPR
, aligned_size
,
5683 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
5686 /* Otherwise, take the size from that of the element type. Substitute
5687 any PLACEHOLDER_EXPR that we have. */
5689 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type
), exp
);
5692 /* Return a tree representing the lower bound of the array mentioned in
5693 EXP, an ARRAY_REF. */
5696 array_ref_low_bound (tree exp
)
5698 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5700 /* If a lower bound is specified in EXP, use it. */
5701 if (TREE_OPERAND (exp
, 2))
5702 return TREE_OPERAND (exp
, 2);
5704 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5705 substituting for a PLACEHOLDER_EXPR as needed. */
5706 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
5707 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type
), exp
);
5709 /* Otherwise, return a zero of the appropriate type. */
5710 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp
, 1)), 0);
5713 /* Return a tree representing the upper bound of the array mentioned in
5714 EXP, an ARRAY_REF. */
5717 array_ref_up_bound (tree exp
)
5719 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5721 /* If there is a domain type and it has an upper bound, use it, substituting
5722 for a PLACEHOLDER_EXPR as needed. */
5723 if (domain_type
&& TYPE_MAX_VALUE (domain_type
))
5724 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type
), exp
);
5726 /* Otherwise fail. */
5730 /* Return a tree representing the offset, in bytes, of the field referenced
5731 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5734 component_ref_field_offset (tree exp
)
5736 tree aligned_offset
= TREE_OPERAND (exp
, 2);
5737 tree field
= TREE_OPERAND (exp
, 1);
5739 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5740 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5744 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5745 sizetype from another type of the same width and signedness. */
5746 if (TREE_TYPE (aligned_offset
) != sizetype
)
5747 aligned_offset
= fold_convert (sizetype
, aligned_offset
);
5748 return size_binop (MULT_EXPR
, aligned_offset
,
5749 size_int (DECL_OFFSET_ALIGN (field
) / BITS_PER_UNIT
));
5752 /* Otherwise, take the offset from that of the field. Substitute
5753 any PLACEHOLDER_EXPR that we have. */
5755 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field
), exp
);
5758 /* Return 1 if T is an expression that get_inner_reference handles. */
5761 handled_component_p (tree t
)
5763 switch (TREE_CODE (t
))
5768 case ARRAY_RANGE_REF
:
5769 case VIEW_CONVERT_EXPR
:
5779 /* Given an rtx VALUE that may contain additions and multiplications, return
5780 an equivalent value that just refers to a register, memory, or constant.
5781 This is done by generating instructions to perform the arithmetic and
5782 returning a pseudo-register containing the value.
5784 The returned value may be a REG, SUBREG, MEM or constant. */
5787 force_operand (rtx value
, rtx target
)
5790 /* Use subtarget as the target for operand 0 of a binary operation. */
5791 rtx subtarget
= get_subtarget (target
);
5792 enum rtx_code code
= GET_CODE (value
);
5794 /* Check for subreg applied to an expression produced by loop optimizer. */
5796 && !REG_P (SUBREG_REG (value
))
5797 && !MEM_P (SUBREG_REG (value
)))
5799 value
= simplify_gen_subreg (GET_MODE (value
),
5800 force_reg (GET_MODE (SUBREG_REG (value
)),
5801 force_operand (SUBREG_REG (value
),
5803 GET_MODE (SUBREG_REG (value
)),
5804 SUBREG_BYTE (value
));
5805 code
= GET_CODE (value
);
5808 /* Check for a PIC address load. */
5809 if ((code
== PLUS
|| code
== MINUS
)
5810 && XEXP (value
, 0) == pic_offset_table_rtx
5811 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5812 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5813 || GET_CODE (XEXP (value
, 1)) == CONST
))
5816 subtarget
= gen_reg_rtx (GET_MODE (value
));
5817 emit_move_insn (subtarget
, value
);
5821 if (ARITHMETIC_P (value
))
5823 op2
= XEXP (value
, 1);
5824 if (!CONSTANT_P (op2
) && !(REG_P (op2
) && op2
!= subtarget
))
5826 if (code
== MINUS
&& GET_CODE (op2
) == CONST_INT
)
5829 op2
= negate_rtx (GET_MODE (value
), op2
);
5832 /* Check for an addition with OP2 a constant integer and our first
5833 operand a PLUS of a virtual register and something else. In that
5834 case, we want to emit the sum of the virtual register and the
5835 constant first and then add the other value. This allows virtual
5836 register instantiation to simply modify the constant rather than
5837 creating another one around this addition. */
5838 if (code
== PLUS
&& GET_CODE (op2
) == CONST_INT
5839 && GET_CODE (XEXP (value
, 0)) == PLUS
5840 && REG_P (XEXP (XEXP (value
, 0), 0))
5841 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5842 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5844 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
5845 XEXP (XEXP (value
, 0), 0), op2
,
5846 subtarget
, 0, OPTAB_LIB_WIDEN
);
5847 return expand_simple_binop (GET_MODE (value
), code
, temp
,
5848 force_operand (XEXP (XEXP (value
,
5850 target
, 0, OPTAB_LIB_WIDEN
);
5853 op1
= force_operand (XEXP (value
, 0), subtarget
);
5854 op2
= force_operand (op2
, NULL_RTX
);
5858 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
5860 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
5861 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5862 target
, 1, OPTAB_LIB_WIDEN
);
5864 return expand_divmod (0,
5865 FLOAT_MODE_P (GET_MODE (value
))
5866 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
5867 GET_MODE (value
), op1
, op2
, target
, 0);
5870 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5874 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
5878 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5882 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5883 target
, 0, OPTAB_LIB_WIDEN
);
5886 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5887 target
, 1, OPTAB_LIB_WIDEN
);
5890 if (UNARY_P (value
))
5893 target
= gen_reg_rtx (GET_MODE (value
));
5894 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
5900 convert_move (target
, op1
, code
== ZERO_EXTEND
);
5905 expand_fix (target
, op1
, code
== UNSIGNED_FIX
);
5909 case UNSIGNED_FLOAT
:
5910 expand_float (target
, op1
, code
== UNSIGNED_FLOAT
);
5914 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
5918 #ifdef INSN_SCHEDULING
5919 /* On machines that have insn scheduling, we want all memory reference to be
5920 explicit, so we need to deal with such paradoxical SUBREGs. */
5921 if (GET_CODE (value
) == SUBREG
&& MEM_P (SUBREG_REG (value
))
5922 && (GET_MODE_SIZE (GET_MODE (value
))
5923 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
5925 = simplify_gen_subreg (GET_MODE (value
),
5926 force_reg (GET_MODE (SUBREG_REG (value
)),
5927 force_operand (SUBREG_REG (value
),
5929 GET_MODE (SUBREG_REG (value
)),
5930 SUBREG_BYTE (value
));
5936 /* Subroutine of expand_expr: return nonzero iff there is no way that
5937 EXP can reference X, which is being modified. TOP_P is nonzero if this
5938 call is going to be used to determine whether we need a temporary
5939 for EXP, as opposed to a recursive call to this function.
5941 It is always safe for this routine to return zero since it merely
5942 searches for optimization opportunities. */
5945 safe_from_p (rtx x
, tree exp
, int top_p
)
5951 /* If EXP has varying size, we MUST use a target since we currently
5952 have no way of allocating temporaries of variable size
5953 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5954 So we assume here that something at a higher level has prevented a
5955 clash. This is somewhat bogus, but the best we can do. Only
5956 do this when X is BLKmode and when we are at the top level. */
5957 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
5958 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
5959 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
5960 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
5961 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
5963 && GET_MODE (x
) == BLKmode
)
5964 /* If X is in the outgoing argument area, it is always safe. */
5966 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
5967 || (GET_CODE (XEXP (x
, 0)) == PLUS
5968 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
5971 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5972 find the underlying pseudo. */
5973 if (GET_CODE (x
) == SUBREG
)
5976 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5980 /* Now look at our tree code and possibly recurse. */
5981 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
5983 case tcc_declaration
:
5984 exp_rtl
= DECL_RTL_IF_SET (exp
);
5990 case tcc_exceptional
:
5991 if (TREE_CODE (exp
) == TREE_LIST
)
5995 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
5997 exp
= TREE_CHAIN (exp
);
6000 if (TREE_CODE (exp
) != TREE_LIST
)
6001 return safe_from_p (x
, exp
, 0);
6004 else if (TREE_CODE (exp
) == ERROR_MARK
)
6005 return 1; /* An already-visited SAVE_EXPR? */
6010 /* The only case we look at here is the DECL_INITIAL inside a
6012 return (TREE_CODE (exp
) != DECL_EXPR
6013 || TREE_CODE (DECL_EXPR_DECL (exp
)) != VAR_DECL
6014 || !DECL_INITIAL (DECL_EXPR_DECL (exp
))
6015 || safe_from_p (x
, DECL_INITIAL (DECL_EXPR_DECL (exp
)), 0));
6018 case tcc_comparison
:
6019 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
6024 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6026 case tcc_expression
:
6028 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6029 the expression. If it is set, we conflict iff we are that rtx or
6030 both are in memory. Otherwise, we check all operands of the
6031 expression recursively. */
6033 switch (TREE_CODE (exp
))
6036 /* If the operand is static or we are static, we can't conflict.
6037 Likewise if we don't conflict with the operand at all. */
6038 if (staticp (TREE_OPERAND (exp
, 0))
6039 || TREE_STATIC (exp
)
6040 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
6043 /* Otherwise, the only way this can conflict is if we are taking
6044 the address of a DECL a that address if part of X, which is
6046 exp
= TREE_OPERAND (exp
, 0);
6049 if (!DECL_RTL_SET_P (exp
)
6050 || !MEM_P (DECL_RTL (exp
)))
6053 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
6057 case MISALIGNED_INDIRECT_REF
:
6058 case ALIGN_INDIRECT_REF
:
6061 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
6062 get_alias_set (exp
)))
6067 /* Assume that the call will clobber all hard registers and
6069 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6074 case WITH_CLEANUP_EXPR
:
6075 case CLEANUP_POINT_EXPR
:
6076 /* Lowered by gimplify.c. */
6080 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6086 /* If we have an rtx, we do not need to scan our operands. */
6090 nops
= TREE_CODE_LENGTH (TREE_CODE (exp
));
6091 for (i
= 0; i
< nops
; i
++)
6092 if (TREE_OPERAND (exp
, i
) != 0
6093 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
6096 /* If this is a language-specific tree code, it may require
6097 special handling. */
6098 if ((unsigned int) TREE_CODE (exp
)
6099 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6100 && !lang_hooks
.safe_from_p (x
, exp
))
6105 /* Should never get a type here. */
6109 /* If we have an rtl, find any enclosed object. Then see if we conflict
6113 if (GET_CODE (exp_rtl
) == SUBREG
)
6115 exp_rtl
= SUBREG_REG (exp_rtl
);
6117 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
6121 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6122 are memory and they conflict. */
6123 return ! (rtx_equal_p (x
, exp_rtl
)
6124 || (MEM_P (x
) && MEM_P (exp_rtl
)
6125 && true_dependence (exp_rtl
, VOIDmode
, x
,
6126 rtx_addr_varies_p
)));
6129 /* If we reach here, it is safe. */
6134 /* Return the highest power of two that EXP is known to be a multiple of.
6135 This is used in updating alignment of MEMs in array references. */
6137 unsigned HOST_WIDE_INT
6138 highest_pow2_factor (tree exp
)
6140 unsigned HOST_WIDE_INT c0
, c1
;
6142 switch (TREE_CODE (exp
))
6145 /* We can find the lowest bit that's a one. If the low
6146 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6147 We need to handle this case since we can find it in a COND_EXPR,
6148 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6149 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6151 if (TREE_CONSTANT_OVERFLOW (exp
))
6152 return BIGGEST_ALIGNMENT
;
6155 /* Note: tree_low_cst is intentionally not used here,
6156 we don't care about the upper bits. */
6157 c0
= TREE_INT_CST_LOW (exp
);
6159 return c0
? c0
: BIGGEST_ALIGNMENT
;
6163 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
6164 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6165 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6166 return MIN (c0
, c1
);
6169 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6170 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6173 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6175 if (integer_pow2p (TREE_OPERAND (exp
, 1))
6176 && host_integerp (TREE_OPERAND (exp
, 1), 1))
6178 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6179 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
6180 return MAX (1, c0
/ c1
);
6184 case NON_LVALUE_EXPR
: case NOP_EXPR
: case CONVERT_EXPR
:
6186 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
6189 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6192 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6193 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
6194 return MIN (c0
, c1
);
6203 /* Similar, except that the alignment requirements of TARGET are
6204 taken into account. Assume it is at least as aligned as its
6205 type, unless it is a COMPONENT_REF in which case the layout of
6206 the structure gives the alignment. */
6208 static unsigned HOST_WIDE_INT
6209 highest_pow2_factor_for_target (tree target
, tree exp
)
6211 unsigned HOST_WIDE_INT target_align
, factor
;
6213 factor
= highest_pow2_factor (exp
);
6214 if (TREE_CODE (target
) == COMPONENT_REF
)
6215 target_align
= DECL_ALIGN_UNIT (TREE_OPERAND (target
, 1));
6217 target_align
= TYPE_ALIGN_UNIT (TREE_TYPE (target
));
6218 return MAX (factor
, target_align
);
6221 /* Expands variable VAR. */
6224 expand_var (tree var
)
6226 if (DECL_EXTERNAL (var
))
6229 if (TREE_STATIC (var
))
6230 /* If this is an inlined copy of a static local variable,
6231 look up the original decl. */
6232 var
= DECL_ORIGIN (var
);
6234 if (TREE_STATIC (var
)
6235 ? !TREE_ASM_WRITTEN (var
)
6236 : !DECL_RTL_SET_P (var
))
6238 if (TREE_CODE (var
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (var
))
6239 /* Should be ignored. */;
6240 else if (lang_hooks
.expand_decl (var
))
6242 else if (TREE_CODE (var
) == VAR_DECL
&& !TREE_STATIC (var
))
6244 else if (TREE_CODE (var
) == VAR_DECL
&& TREE_STATIC (var
))
6245 rest_of_decl_compilation (var
, 0, 0);
6247 /* No expansion needed. */
6248 gcc_assert (TREE_CODE (var
) == TYPE_DECL
6249 || TREE_CODE (var
) == CONST_DECL
6250 || TREE_CODE (var
) == FUNCTION_DECL
6251 || TREE_CODE (var
) == LABEL_DECL
);
6255 /* Subroutine of expand_expr. Expand the two operands of a binary
6256 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6257 The value may be stored in TARGET if TARGET is nonzero. The
6258 MODIFIER argument is as documented by expand_expr. */
6261 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
6262 enum expand_modifier modifier
)
6264 if (! safe_from_p (target
, exp1
, 1))
6266 if (operand_equal_p (exp0
, exp1
, 0))
6268 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6269 *op1
= copy_rtx (*op0
);
6273 /* If we need to preserve evaluation order, copy exp0 into its own
6274 temporary variable so that it can't be clobbered by exp1. */
6275 if (flag_evaluation_order
&& TREE_SIDE_EFFECTS (exp1
))
6276 exp0
= save_expr (exp0
);
6277 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6278 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
6283 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6284 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6287 expand_expr_addr_expr_1 (tree exp
, rtx target
, enum machine_mode tmode
,
6288 enum expand_modifier modifier
)
6290 rtx result
, subtarget
;
6292 HOST_WIDE_INT bitsize
, bitpos
;
6293 int volatilep
, unsignedp
;
6294 enum machine_mode mode1
;
6296 /* If we are taking the address of a constant and are at the top level,
6297 we have to use output_constant_def since we can't call force_const_mem
6299 /* ??? This should be considered a front-end bug. We should not be
6300 generating ADDR_EXPR of something that isn't an LVALUE. The only
6301 exception here is STRING_CST. */
6302 if (TREE_CODE (exp
) == CONSTRUCTOR
6303 || CONSTANT_CLASS_P (exp
))
6304 return XEXP (output_constant_def (exp
, 0), 0);
6306 /* Everything must be something allowed by is_gimple_addressable. */
6307 switch (TREE_CODE (exp
))
6310 /* This case will happen via recursion for &a->b. */
6311 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, EXPAND_NORMAL
);
6314 /* Recurse and make the output_constant_def clause above handle this. */
6315 return expand_expr_addr_expr_1 (DECL_INITIAL (exp
), target
,
6319 /* The real part of the complex number is always first, therefore
6320 the address is the same as the address of the parent object. */
6323 inner
= TREE_OPERAND (exp
, 0);
6327 /* The imaginary part of the complex number is always second.
6328 The expression is therefore always offset by the size of the
6331 bitpos
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp
)));
6332 inner
= TREE_OPERAND (exp
, 0);
6336 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6337 expand_expr, as that can have various side effects; LABEL_DECLs for
6338 example, may not have their DECL_RTL set yet. Assume language
6339 specific tree nodes can be expanded in some interesting way. */
6341 || TREE_CODE (exp
) >= LAST_AND_UNUSED_TREE_CODE
)
6343 result
= expand_expr (exp
, target
, tmode
,
6344 modifier
== EXPAND_INITIALIZER
6345 ? EXPAND_INITIALIZER
: EXPAND_CONST_ADDRESS
);
6347 /* If the DECL isn't in memory, then the DECL wasn't properly
6348 marked TREE_ADDRESSABLE, which will be either a front-end
6349 or a tree optimizer bug. */
6350 gcc_assert (MEM_P (result
));
6351 result
= XEXP (result
, 0);
6353 /* ??? Is this needed anymore? */
6354 if (DECL_P (exp
) && !TREE_USED (exp
) == 0)
6356 assemble_external (exp
);
6357 TREE_USED (exp
) = 1;
6360 if (modifier
!= EXPAND_INITIALIZER
6361 && modifier
!= EXPAND_CONST_ADDRESS
)
6362 result
= force_operand (result
, target
);
6366 /* Pass FALSE as the last argument to get_inner_reference although
6367 we are expanding to RTL. The rationale is that we know how to
6368 handle "aligning nodes" here: we can just bypass them because
6369 they won't change the final object whose address will be returned
6370 (they actually exist only for that purpose). */
6371 inner
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
6372 &mode1
, &unsignedp
, &volatilep
, false);
6376 /* We must have made progress. */
6377 gcc_assert (inner
!= exp
);
6379 subtarget
= offset
|| bitpos
? NULL_RTX
: target
;
6380 result
= expand_expr_addr_expr_1 (inner
, subtarget
, tmode
, modifier
);
6386 if (modifier
!= EXPAND_NORMAL
)
6387 result
= force_operand (result
, NULL
);
6388 tmp
= expand_expr (offset
, NULL
, tmode
, EXPAND_NORMAL
);
6390 result
= convert_memory_address (tmode
, result
);
6391 tmp
= convert_memory_address (tmode
, tmp
);
6393 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
6394 result
= gen_rtx_PLUS (tmode
, result
, tmp
);
6397 subtarget
= bitpos
? NULL_RTX
: target
;
6398 result
= expand_simple_binop (tmode
, PLUS
, result
, tmp
, subtarget
,
6399 1, OPTAB_LIB_WIDEN
);
6405 /* Someone beforehand should have rejected taking the address
6406 of such an object. */
6407 gcc_assert ((bitpos
% BITS_PER_UNIT
) == 0);
6409 result
= plus_constant (result
, bitpos
/ BITS_PER_UNIT
);
6410 if (modifier
< EXPAND_SUM
)
6411 result
= force_operand (result
, target
);
6417 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6418 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6421 expand_expr_addr_expr (tree exp
, rtx target
, enum machine_mode tmode
,
6422 enum expand_modifier modifier
)
6424 enum machine_mode rmode
;
6427 /* Target mode of VOIDmode says "whatever's natural". */
6428 if (tmode
== VOIDmode
)
6429 tmode
= TYPE_MODE (TREE_TYPE (exp
));
6431 /* We can get called with some Weird Things if the user does silliness
6432 like "(short) &a". In that case, convert_memory_address won't do
6433 the right thing, so ignore the given target mode. */
6434 if (tmode
!= Pmode
&& tmode
!= ptr_mode
)
6437 result
= expand_expr_addr_expr_1 (TREE_OPERAND (exp
, 0), target
,
6440 /* Despite expand_expr claims concerning ignoring TMODE when not
6441 strictly convenient, stuff breaks if we don't honor it. Note
6442 that combined with the above, we only do this for pointer modes. */
6443 rmode
= GET_MODE (result
);
6444 if (rmode
== VOIDmode
)
6447 result
= convert_memory_address (tmode
, result
);
6453 /* expand_expr: generate code for computing expression EXP.
6454 An rtx for the computed value is returned. The value is never null.
6455 In the case of a void EXP, const0_rtx is returned.
6457 The value may be stored in TARGET if TARGET is nonzero.
6458 TARGET is just a suggestion; callers must assume that
6459 the rtx returned may not be the same as TARGET.
6461 If TARGET is CONST0_RTX, it means that the value will be ignored.
6463 If TMODE is not VOIDmode, it suggests generating the
6464 result in mode TMODE. But this is done only when convenient.
6465 Otherwise, TMODE is ignored and the value generated in its natural mode.
6466 TMODE is just a suggestion; callers must assume that
6467 the rtx returned may not have mode TMODE.
6469 Note that TARGET may have neither TMODE nor MODE. In that case, it
6470 probably will not be used.
6472 If MODIFIER is EXPAND_SUM then when EXP is an addition
6473 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6474 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6475 products as above, or REG or MEM, or constant.
6476 Ordinarily in such cases we would output mul or add instructions
6477 and then return a pseudo reg containing the sum.
6479 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6480 it also marks a label as absolutely required (it can't be dead).
6481 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6482 This is used for outputting expressions used in initializers.
6484 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6485 with a constant address even if that address is not normally legitimate.
6486 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6488 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6489 a call parameter. Such targets require special care as we haven't yet
6490 marked TARGET so that it's safe from being trashed by libcalls. We
6491 don't want to use TARGET for anything but the final result;
6492 Intermediate values must go elsewhere. Additionally, calls to
6493 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6495 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6496 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6497 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6498 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6501 static rtx
expand_expr_real_1 (tree
, rtx
, enum machine_mode
,
6502 enum expand_modifier
, rtx
*);
6505 expand_expr_real (tree exp
, rtx target
, enum machine_mode tmode
,
6506 enum expand_modifier modifier
, rtx
*alt_rtl
)
6509 rtx ret
, last
= NULL
;
6511 /* Handle ERROR_MARK before anybody tries to access its type. */
6512 if (TREE_CODE (exp
) == ERROR_MARK
6513 || TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
)
6515 ret
= CONST0_RTX (tmode
);
6516 return ret
? ret
: const0_rtx
;
6519 if (flag_non_call_exceptions
)
6521 rn
= lookup_stmt_eh_region (exp
);
6522 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6524 last
= get_last_insn ();
6527 /* If this is an expression of some kind and it has an associated line
6528 number, then emit the line number before expanding the expression.
6530 We need to save and restore the file and line information so that
6531 errors discovered during expansion are emitted with the right
6532 information. It would be better of the diagnostic routines
6533 used the file/line information embedded in the tree nodes rather
6535 if (cfun
&& cfun
->ib_boundaries_block
&& EXPR_HAS_LOCATION (exp
))
6537 location_t saved_location
= input_location
;
6538 input_location
= EXPR_LOCATION (exp
);
6539 emit_line_note (input_location
);
6541 /* Record where the insns produced belong. */
6542 record_block_change (TREE_BLOCK (exp
));
6544 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
6546 input_location
= saved_location
;
6550 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
6553 /* If using non-call exceptions, mark all insns that may trap.
6554 expand_call() will mark CALL_INSNs before we get to this code,
6555 but it doesn't handle libcalls, and these may trap. */
6559 for (insn
= next_real_insn (last
); insn
;
6560 insn
= next_real_insn (insn
))
6562 if (! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
6563 /* If we want exceptions for non-call insns, any
6564 may_trap_p instruction may throw. */
6565 && GET_CODE (PATTERN (insn
)) != CLOBBER
6566 && GET_CODE (PATTERN (insn
)) != USE
6567 && (CALL_P (insn
) || may_trap_p (PATTERN (insn
))))
6569 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_EH_REGION
, GEN_INT (rn
),
6579 expand_expr_real_1 (tree exp
, rtx target
, enum machine_mode tmode
,
6580 enum expand_modifier modifier
, rtx
*alt_rtl
)
6583 tree type
= TREE_TYPE (exp
);
6585 enum machine_mode mode
;
6586 enum tree_code code
= TREE_CODE (exp
);
6588 rtx subtarget
, original_target
;
6590 tree context
, subexp0
, subexp1
;
6591 bool reduce_bit_field
= false;
6592 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6593 ? reduce_to_bit_field_precision ((expr), \
6598 mode
= TYPE_MODE (type
);
6599 unsignedp
= TYPE_UNSIGNED (type
);
6600 if (lang_hooks
.reduce_bit_field_operations
6601 && TREE_CODE (type
) == INTEGER_TYPE
6602 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
))
6604 /* An operation in what may be a bit-field type needs the
6605 result to be reduced to the precision of the bit-field type,
6606 which is narrower than that of the type's mode. */
6607 reduce_bit_field
= true;
6608 if (modifier
== EXPAND_STACK_PARM
)
6612 /* Use subtarget as the target for operand 0 of a binary operation. */
6613 subtarget
= get_subtarget (target
);
6614 original_target
= target
;
6615 ignore
= (target
== const0_rtx
6616 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
6617 || code
== CONVERT_EXPR
|| code
== COND_EXPR
6618 || code
== VIEW_CONVERT_EXPR
)
6619 && TREE_CODE (type
) == VOID_TYPE
));
6621 /* If we are going to ignore this result, we need only do something
6622 if there is a side-effect somewhere in the expression. If there
6623 is, short-circuit the most common cases here. Note that we must
6624 not call expand_expr with anything but const0_rtx in case this
6625 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6629 if (! TREE_SIDE_EFFECTS (exp
))
6632 /* Ensure we reference a volatile object even if value is ignored, but
6633 don't do this if all we are doing is taking its address. */
6634 if (TREE_THIS_VOLATILE (exp
)
6635 && TREE_CODE (exp
) != FUNCTION_DECL
6636 && mode
!= VOIDmode
&& mode
!= BLKmode
6637 && modifier
!= EXPAND_CONST_ADDRESS
)
6639 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
6641 temp
= copy_to_reg (temp
);
6645 if (TREE_CODE_CLASS (code
) == tcc_unary
6646 || code
== COMPONENT_REF
|| code
== INDIRECT_REF
)
6647 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6650 else if (TREE_CODE_CLASS (code
) == tcc_binary
6651 || TREE_CODE_CLASS (code
) == tcc_comparison
6652 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
6654 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6655 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6658 else if (code
== BIT_FIELD_REF
)
6660 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6661 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6662 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, modifier
);
6674 tree function
= decl_function_context (exp
);
6676 temp
= label_rtx (exp
);
6677 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
6679 if (function
!= current_function_decl
6681 LABEL_REF_NONLOCAL_P (temp
) = 1;
6683 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
6688 return expand_expr_real_1 (SSA_NAME_VAR (exp
), target
, tmode
, modifier
,
6693 /* If a static var's type was incomplete when the decl was written,
6694 but the type is complete now, lay out the decl now. */
6695 if (DECL_SIZE (exp
) == 0
6696 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
6697 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6698 layout_decl (exp
, 0);
6700 /* ... fall through ... */
6704 gcc_assert (DECL_RTL (exp
));
6706 /* Ensure variable marked as used even if it doesn't go through
6707 a parser. If it hasn't be used yet, write out an external
6709 if (! TREE_USED (exp
))
6711 assemble_external (exp
);
6712 TREE_USED (exp
) = 1;
6715 /* Show we haven't gotten RTL for this yet. */
6718 /* Variables inherited from containing functions should have
6719 been lowered by this point. */
6720 context
= decl_function_context (exp
);
6721 gcc_assert (!context
6722 || context
== current_function_decl
6723 || TREE_STATIC (exp
)
6724 /* ??? C++ creates functions that are not TREE_STATIC. */
6725 || TREE_CODE (exp
) == FUNCTION_DECL
);
6727 /* This is the case of an array whose size is to be determined
6728 from its initializer, while the initializer is still being parsed.
6731 if (MEM_P (DECL_RTL (exp
))
6732 && REG_P (XEXP (DECL_RTL (exp
), 0)))
6733 temp
= validize_mem (DECL_RTL (exp
));
6735 /* If DECL_RTL is memory, we are in the normal case and either
6736 the address is not valid or it is not a register and -fforce-addr
6737 is specified, get the address into a register. */
6739 else if (MEM_P (DECL_RTL (exp
))
6740 && modifier
!= EXPAND_CONST_ADDRESS
6741 && modifier
!= EXPAND_SUM
6742 && modifier
!= EXPAND_INITIALIZER
6743 && (! memory_address_p (DECL_MODE (exp
),
6744 XEXP (DECL_RTL (exp
), 0))
6746 && !REG_P (XEXP (DECL_RTL (exp
), 0)))))
6749 *alt_rtl
= DECL_RTL (exp
);
6750 temp
= replace_equiv_address (DECL_RTL (exp
),
6751 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
6754 /* If we got something, return it. But first, set the alignment
6755 if the address is a register. */
6758 if (MEM_P (temp
) && REG_P (XEXP (temp
, 0)))
6759 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6764 /* If the mode of DECL_RTL does not match that of the decl, it
6765 must be a promoted value. We return a SUBREG of the wanted mode,
6766 but mark it so that we know that it was already extended. */
6768 if (REG_P (DECL_RTL (exp
))
6769 && GET_MODE (DECL_RTL (exp
)) != DECL_MODE (exp
))
6771 enum machine_mode pmode
;
6773 /* Get the signedness used for this variable. Ensure we get the
6774 same mode we got when the variable was declared. */
6775 pmode
= promote_mode (type
, DECL_MODE (exp
), &unsignedp
,
6776 (TREE_CODE (exp
) == RESULT_DECL
6777 || TREE_CODE (exp
) == PARM_DECL
) ? 1 : 0);
6778 gcc_assert (GET_MODE (DECL_RTL (exp
)) == pmode
);
6780 temp
= gen_lowpart_SUBREG (mode
, DECL_RTL (exp
));
6781 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6782 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6786 return DECL_RTL (exp
);
6789 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
6790 TREE_INT_CST_HIGH (exp
), mode
);
6792 /* ??? If overflow is set, fold will have done an incomplete job,
6793 which can result in (plus xx (const_int 0)), which can get
6794 simplified by validate_replace_rtx during virtual register
6795 instantiation, which can result in unrecognizable insns.
6796 Avoid this by forcing all overflows into registers. */
6797 if (TREE_CONSTANT_OVERFLOW (exp
)
6798 && modifier
!= EXPAND_INITIALIZER
)
6799 temp
= force_reg (mode
, temp
);
6804 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp
))) == MODE_VECTOR_INT
6805 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp
))) == MODE_VECTOR_FLOAT
)
6806 return const_vector_from_tree (exp
);
6808 return expand_expr (build_constructor_from_list
6810 TREE_VECTOR_CST_ELTS (exp
)),
6811 ignore
? const0_rtx
: target
, tmode
, modifier
);
6814 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
6817 /* If optimized, generate immediate CONST_DOUBLE
6818 which will be turned into memory by reload if necessary.
6820 We used to force a register so that loop.c could see it. But
6821 this does not allow gen_* patterns to perform optimizations with
6822 the constants. It also produces two insns in cases like "x = 1.0;".
6823 On most machines, floating-point constants are not permitted in
6824 many insns, so we'd end up copying it to a register in any case.
6826 Now, we do the copying in expand_binop, if appropriate. */
6827 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
6828 TYPE_MODE (TREE_TYPE (exp
)));
6831 /* Handle evaluating a complex constant in a CONCAT target. */
6832 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
6834 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
6837 rtarg
= XEXP (original_target
, 0);
6838 itarg
= XEXP (original_target
, 1);
6840 /* Move the real and imaginary parts separately. */
6841 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, 0);
6842 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, 0);
6845 emit_move_insn (rtarg
, op0
);
6847 emit_move_insn (itarg
, op1
);
6849 return original_target
;
6852 /* ... fall through ... */
6855 temp
= output_constant_def (exp
, 1);
6857 /* temp contains a constant address.
6858 On RISC machines where a constant address isn't valid,
6859 make some insns to get that address into a register. */
6860 if (modifier
!= EXPAND_CONST_ADDRESS
6861 && modifier
!= EXPAND_INITIALIZER
6862 && modifier
!= EXPAND_SUM
6863 && (! memory_address_p (mode
, XEXP (temp
, 0))
6864 || flag_force_addr
))
6865 return replace_equiv_address (temp
,
6866 copy_rtx (XEXP (temp
, 0)));
6871 tree val
= TREE_OPERAND (exp
, 0);
6872 rtx ret
= expand_expr_real_1 (val
, target
, tmode
, modifier
, alt_rtl
);
6874 if (!SAVE_EXPR_RESOLVED_P (exp
))
6876 /* We can indeed still hit this case, typically via builtin
6877 expanders calling save_expr immediately before expanding
6878 something. Assume this means that we only have to deal
6879 with non-BLKmode values. */
6880 gcc_assert (GET_MODE (ret
) != BLKmode
);
6882 val
= build_decl (VAR_DECL
, NULL
, TREE_TYPE (exp
));
6883 DECL_ARTIFICIAL (val
) = 1;
6884 DECL_IGNORED_P (val
) = 1;
6885 TREE_OPERAND (exp
, 0) = val
;
6886 SAVE_EXPR_RESOLVED_P (exp
) = 1;
6888 if (!CONSTANT_P (ret
))
6889 ret
= copy_to_reg (ret
);
6890 SET_DECL_RTL (val
, ret
);
6897 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6898 expand_goto (TREE_OPERAND (exp
, 0));
6900 expand_computed_goto (TREE_OPERAND (exp
, 0));
6904 /* If we don't need the result, just ensure we evaluate any
6908 unsigned HOST_WIDE_INT idx
;
6911 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
6912 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
6917 /* Try to avoid creating a temporary at all. This is possible
6918 if all of the initializer is zero.
6919 FIXME: try to handle all [0..255] initializers we can handle
6921 else if (TREE_STATIC (exp
)
6922 && !TREE_ADDRESSABLE (exp
)
6923 && target
!= 0 && mode
== BLKmode
6924 && all_zeros_p (exp
))
6926 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
6930 /* All elts simple constants => refer to a constant in memory. But
6931 if this is a non-BLKmode mode, let it store a field at a time
6932 since that should make a CONST_INT or CONST_DOUBLE when we
6933 fold. Likewise, if we have a target we can use, it is best to
6934 store directly into the target unless the type is large enough
6935 that memcpy will be used. If we are making an initializer and
6936 all operands are constant, put it in memory as well.
6938 FIXME: Avoid trying to fill vector constructors piece-meal.
6939 Output them with output_constant_def below unless we're sure
6940 they're zeros. This should go away when vector initializers
6941 are treated like VECTOR_CST instead of arrays.
6943 else if ((TREE_STATIC (exp
)
6944 && ((mode
== BLKmode
6945 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
6946 || TREE_ADDRESSABLE (exp
)
6947 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
6948 && (! MOVE_BY_PIECES_P
6949 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
6951 && ! mostly_zeros_p (exp
))))
6952 || ((modifier
== EXPAND_INITIALIZER
6953 || modifier
== EXPAND_CONST_ADDRESS
)
6954 && TREE_CONSTANT (exp
)))
6956 rtx constructor
= output_constant_def (exp
, 1);
6958 if (modifier
!= EXPAND_CONST_ADDRESS
6959 && modifier
!= EXPAND_INITIALIZER
6960 && modifier
!= EXPAND_SUM
)
6961 constructor
= validize_mem (constructor
);
6967 /* Handle calls that pass values in multiple non-contiguous
6968 locations. The Irix 6 ABI has examples of this. */
6969 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
6970 || GET_CODE (target
) == PARALLEL
6971 || modifier
== EXPAND_STACK_PARM
)
6973 = assign_temp (build_qualified_type (type
,
6975 | (TREE_READONLY (exp
)
6976 * TYPE_QUAL_CONST
))),
6977 0, TREE_ADDRESSABLE (exp
), 1);
6979 store_constructor (exp
, target
, 0, int_expr_size (exp
));
6983 case MISALIGNED_INDIRECT_REF
:
6984 case ALIGN_INDIRECT_REF
:
6987 tree exp1
= TREE_OPERAND (exp
, 0);
6989 if (modifier
!= EXPAND_WRITE
)
6993 t
= fold_read_from_constant_string (exp
);
6995 return expand_expr (t
, target
, tmode
, modifier
);
6998 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
6999 op0
= memory_address (mode
, op0
);
7001 if (code
== ALIGN_INDIRECT_REF
)
7003 int align
= TYPE_ALIGN_UNIT (type
);
7004 op0
= gen_rtx_AND (Pmode
, op0
, GEN_INT (-align
));
7005 op0
= memory_address (mode
, op0
);
7008 temp
= gen_rtx_MEM (mode
, op0
);
7010 set_mem_attributes (temp
, exp
, 0);
7012 /* Resolve the misalignment now, so that we don't have to remember
7013 to resolve it later. Of course, this only works for reads. */
7014 /* ??? When we get around to supporting writes, we'll have to handle
7015 this in store_expr directly. The vectorizer isn't generating
7016 those yet, however. */
7017 if (code
== MISALIGNED_INDIRECT_REF
)
7022 gcc_assert (modifier
== EXPAND_NORMAL
7023 || modifier
== EXPAND_STACK_PARM
);
7025 /* The vectorizer should have already checked the mode. */
7026 icode
= movmisalign_optab
->handlers
[mode
].insn_code
;
7027 gcc_assert (icode
!= CODE_FOR_nothing
);
7029 /* We've already validated the memory, and we're creating a
7030 new pseudo destination. The predicates really can't fail. */
7031 reg
= gen_reg_rtx (mode
);
7033 /* Nor can the insn generator. */
7034 insn
= GEN_FCN (icode
) (reg
, temp
);
7043 case TARGET_MEM_REF
:
7045 struct mem_address addr
;
7047 get_address_description (exp
, &addr
);
7048 op0
= addr_for_mem_ref (&addr
, true);
7049 op0
= memory_address (mode
, op0
);
7050 temp
= gen_rtx_MEM (mode
, op0
);
7051 set_mem_attributes (temp
, TMR_ORIGINAL (exp
), 0);
7058 tree array
= TREE_OPERAND (exp
, 0);
7059 tree index
= TREE_OPERAND (exp
, 1);
7061 /* Fold an expression like: "foo"[2].
7062 This is not done in fold so it won't happen inside &.
7063 Don't fold if this is for wide characters since it's too
7064 difficult to do correctly and this is a very rare case. */
7066 if (modifier
!= EXPAND_CONST_ADDRESS
7067 && modifier
!= EXPAND_INITIALIZER
7068 && modifier
!= EXPAND_MEMORY
)
7070 tree t
= fold_read_from_constant_string (exp
);
7073 return expand_expr (t
, target
, tmode
, modifier
);
7076 /* If this is a constant index into a constant array,
7077 just get the value from the array. Handle both the cases when
7078 we have an explicit constructor and when our operand is a variable
7079 that was declared const. */
7081 if (modifier
!= EXPAND_CONST_ADDRESS
7082 && modifier
!= EXPAND_INITIALIZER
7083 && modifier
!= EXPAND_MEMORY
7084 && TREE_CODE (array
) == CONSTRUCTOR
7085 && ! TREE_SIDE_EFFECTS (array
)
7086 && TREE_CODE (index
) == INTEGER_CST
)
7088 unsigned HOST_WIDE_INT ix
;
7091 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array
), ix
,
7093 if (tree_int_cst_equal (field
, index
))
7095 if (!TREE_SIDE_EFFECTS (value
))
7096 return expand_expr (fold (value
), target
, tmode
, modifier
);
7101 else if (optimize
>= 1
7102 && modifier
!= EXPAND_CONST_ADDRESS
7103 && modifier
!= EXPAND_INITIALIZER
7104 && modifier
!= EXPAND_MEMORY
7105 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
7106 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
7107 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
7108 && targetm
.binds_local_p (array
))
7110 if (TREE_CODE (index
) == INTEGER_CST
)
7112 tree init
= DECL_INITIAL (array
);
7114 if (TREE_CODE (init
) == CONSTRUCTOR
)
7116 unsigned HOST_WIDE_INT ix
;
7119 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), ix
,
7121 if (tree_int_cst_equal (field
, index
))
7123 if (!TREE_SIDE_EFFECTS (value
))
7124 return expand_expr (fold (value
), target
, tmode
,
7129 else if(TREE_CODE (init
) == STRING_CST
)
7131 tree index1
= index
;
7132 tree low_bound
= array_ref_low_bound (exp
);
7133 index1
= fold_convert (sizetype
, TREE_OPERAND (exp
, 1));
7135 /* Optimize the special-case of a zero lower bound.
7137 We convert the low_bound to sizetype to avoid some problems
7138 with constant folding. (E.g. suppose the lower bound is 1,
7139 and its mode is QI. Without the conversion,l (ARRAY
7140 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7141 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7143 if (! integer_zerop (low_bound
))
7144 index1
= size_diffop (index1
, fold_convert (sizetype
,
7147 if (0 > compare_tree_int (index1
,
7148 TREE_STRING_LENGTH (init
)))
7150 tree type
= TREE_TYPE (TREE_TYPE (init
));
7151 enum machine_mode mode
= TYPE_MODE (type
);
7153 if (GET_MODE_CLASS (mode
) == MODE_INT
7154 && GET_MODE_SIZE (mode
) == 1)
7155 return gen_int_mode (TREE_STRING_POINTER (init
)
7156 [TREE_INT_CST_LOW (index1
)],
7163 goto normal_inner_ref
;
7166 /* If the operand is a CONSTRUCTOR, we can just extract the
7167 appropriate field if it is present. */
7168 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
)
7170 unsigned HOST_WIDE_INT idx
;
7173 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
7175 if (field
== TREE_OPERAND (exp
, 1)
7176 /* We can normally use the value of the field in the
7177 CONSTRUCTOR. However, if this is a bitfield in
7178 an integral mode that we can fit in a HOST_WIDE_INT,
7179 we must mask only the number of bits in the bitfield,
7180 since this is done implicitly by the constructor. If
7181 the bitfield does not meet either of those conditions,
7182 we can't do this optimization. */
7183 && (! DECL_BIT_FIELD (field
)
7184 || ((GET_MODE_CLASS (DECL_MODE (field
)) == MODE_INT
)
7185 && (GET_MODE_BITSIZE (DECL_MODE (field
))
7186 <= HOST_BITS_PER_WIDE_INT
))))
7188 if (DECL_BIT_FIELD (field
)
7189 && modifier
== EXPAND_STACK_PARM
)
7191 op0
= expand_expr (value
, target
, tmode
, modifier
);
7192 if (DECL_BIT_FIELD (field
))
7194 HOST_WIDE_INT bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
7195 enum machine_mode imode
= TYPE_MODE (TREE_TYPE (field
));
7197 if (TYPE_UNSIGNED (TREE_TYPE (field
)))
7199 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
7200 op0
= expand_and (imode
, op0
, op1
, target
);
7205 = build_int_cst (NULL_TREE
,
7206 GET_MODE_BITSIZE (imode
) - bitsize
);
7208 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
7210 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
7218 goto normal_inner_ref
;
7221 case ARRAY_RANGE_REF
:
7224 enum machine_mode mode1
;
7225 HOST_WIDE_INT bitsize
, bitpos
;
7228 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7229 &mode1
, &unsignedp
, &volatilep
, true);
7232 /* If we got back the original object, something is wrong. Perhaps
7233 we are evaluating an expression too early. In any event, don't
7234 infinitely recurse. */
7235 gcc_assert (tem
!= exp
);
7237 /* If TEM's type is a union of variable size, pass TARGET to the inner
7238 computation, since it will need a temporary and TARGET is known
7239 to have to do. This occurs in unchecked conversion in Ada. */
7243 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
7244 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
7246 && modifier
!= EXPAND_STACK_PARM
7247 ? target
: NULL_RTX
),
7249 (modifier
== EXPAND_INITIALIZER
7250 || modifier
== EXPAND_CONST_ADDRESS
7251 || modifier
== EXPAND_STACK_PARM
)
7252 ? modifier
: EXPAND_NORMAL
);
7254 /* If this is a constant, put it into a register if it is a legitimate
7255 constant, OFFSET is 0, and we won't try to extract outside the
7256 register (in case we were passed a partially uninitialized object
7257 or a view_conversion to a larger size). Force the constant to
7258 memory otherwise. */
7259 if (CONSTANT_P (op0
))
7261 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
7262 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
7264 && bitpos
+ bitsize
<= GET_MODE_BITSIZE (mode
))
7265 op0
= force_reg (mode
, op0
);
7267 op0
= validize_mem (force_const_mem (mode
, op0
));
7270 /* Otherwise, if this object not in memory and we either have an
7271 offset, a BLKmode result, or a reference outside the object, put it
7272 there. Such cases can occur in Ada if we have unchecked conversion
7273 of an expression from a scalar type to an array or record type or
7274 for an ARRAY_RANGE_REF whose type is BLKmode. */
7275 else if (!MEM_P (op0
)
7277 || (bitpos
+ bitsize
> GET_MODE_BITSIZE (GET_MODE (op0
)))
7278 || (code
== ARRAY_RANGE_REF
&& mode
== BLKmode
)))
7280 tree nt
= build_qualified_type (TREE_TYPE (tem
),
7281 (TYPE_QUALS (TREE_TYPE (tem
))
7282 | TYPE_QUAL_CONST
));
7283 rtx memloc
= assign_temp (nt
, 1, 1, 1);
7285 emit_move_insn (memloc
, op0
);
7291 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
7294 gcc_assert (MEM_P (op0
));
7296 #ifdef POINTERS_EXTEND_UNSIGNED
7297 if (GET_MODE (offset_rtx
) != Pmode
)
7298 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
7300 if (GET_MODE (offset_rtx
) != ptr_mode
)
7301 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
7304 if (GET_MODE (op0
) == BLKmode
7305 /* A constant address in OP0 can have VOIDmode, we must
7306 not try to call force_reg in that case. */
7307 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
7309 && (bitpos
% bitsize
) == 0
7310 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
7311 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
7313 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7317 op0
= offset_address (op0
, offset_rtx
,
7318 highest_pow2_factor (offset
));
7321 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7322 record its alignment as BIGGEST_ALIGNMENT. */
7323 if (MEM_P (op0
) && bitpos
== 0 && offset
!= 0
7324 && is_aligning_offset (offset
, tem
))
7325 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
7327 /* Don't forget about volatility even if this is a bitfield. */
7328 if (MEM_P (op0
) && volatilep
&& ! MEM_VOLATILE_P (op0
))
7330 if (op0
== orig_op0
)
7331 op0
= copy_rtx (op0
);
7333 MEM_VOLATILE_P (op0
) = 1;
7336 /* The following code doesn't handle CONCAT.
7337 Assume only bitpos == 0 can be used for CONCAT, due to
7338 one element arrays having the same mode as its element. */
7339 if (GET_CODE (op0
) == CONCAT
)
7341 gcc_assert (bitpos
== 0
7342 && bitsize
== GET_MODE_BITSIZE (GET_MODE (op0
)));
7346 /* In cases where an aligned union has an unaligned object
7347 as a field, we might be extracting a BLKmode value from
7348 an integer-mode (e.g., SImode) object. Handle this case
7349 by doing the extract into an object as wide as the field
7350 (which we know to be the width of a basic mode), then
7351 storing into memory, and changing the mode to BLKmode. */
7352 if (mode1
== VOIDmode
7353 || REG_P (op0
) || GET_CODE (op0
) == SUBREG
7354 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
7355 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7356 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
7357 && modifier
!= EXPAND_CONST_ADDRESS
7358 && modifier
!= EXPAND_INITIALIZER
)
7359 /* If the field isn't aligned enough to fetch as a memref,
7360 fetch it as a bit field. */
7361 || (mode1
!= BLKmode
7362 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
7363 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)
7365 && (MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
7366 || (bitpos
% GET_MODE_ALIGNMENT (mode1
) != 0))))
7367 && ((modifier
== EXPAND_CONST_ADDRESS
7368 || modifier
== EXPAND_INITIALIZER
)
7370 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
7371 || (bitpos
% BITS_PER_UNIT
!= 0)))
7372 /* If the type and the field are a constant size and the
7373 size of the type isn't the same size as the bitfield,
7374 we must use bitfield operations. */
7376 && TYPE_SIZE (TREE_TYPE (exp
))
7377 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
7378 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7381 enum machine_mode ext_mode
= mode
;
7383 if (ext_mode
== BLKmode
7384 && ! (target
!= 0 && MEM_P (op0
)
7386 && bitpos
% BITS_PER_UNIT
== 0))
7387 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7389 if (ext_mode
== BLKmode
)
7392 target
= assign_temp (type
, 0, 1, 1);
7397 /* In this case, BITPOS must start at a byte boundary and
7398 TARGET, if specified, must be a MEM. */
7399 gcc_assert (MEM_P (op0
)
7400 && (!target
|| MEM_P (target
))
7401 && !(bitpos
% BITS_PER_UNIT
));
7403 emit_block_move (target
,
7404 adjust_address (op0
, VOIDmode
,
7405 bitpos
/ BITS_PER_UNIT
),
7406 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7408 (modifier
== EXPAND_STACK_PARM
7409 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7414 op0
= validize_mem (op0
);
7416 if (MEM_P (op0
) && REG_P (XEXP (op0
, 0)))
7417 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7419 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
7420 (modifier
== EXPAND_STACK_PARM
7421 ? NULL_RTX
: target
),
7422 ext_mode
, ext_mode
);
7424 /* If the result is a record type and BITSIZE is narrower than
7425 the mode of OP0, an integral mode, and this is a big endian
7426 machine, we must put the field into the high-order bits. */
7427 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7428 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7429 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
7430 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7431 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7435 /* If the result type is BLKmode, store the data into a temporary
7436 of the appropriate type, but with the mode corresponding to the
7437 mode for the data we have (op0's mode). It's tempting to make
7438 this a constant type, since we know it's only being stored once,
7439 but that can cause problems if we are taking the address of this
7440 COMPONENT_REF because the MEM of any reference via that address
7441 will have flags corresponding to the type, which will not
7442 necessarily be constant. */
7443 if (mode
== BLKmode
)
7446 = assign_stack_temp_for_type
7447 (ext_mode
, GET_MODE_BITSIZE (ext_mode
), 0, type
);
7449 emit_move_insn (new, op0
);
7450 op0
= copy_rtx (new);
7451 PUT_MODE (op0
, BLKmode
);
7452 set_mem_attributes (op0
, exp
, 1);
7458 /* If the result is BLKmode, use that to access the object
7460 if (mode
== BLKmode
)
7463 /* Get a reference to just this component. */
7464 if (modifier
== EXPAND_CONST_ADDRESS
7465 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7466 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7468 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7470 if (op0
== orig_op0
)
7471 op0
= copy_rtx (op0
);
7473 set_mem_attributes (op0
, exp
, 0);
7474 if (REG_P (XEXP (op0
, 0)))
7475 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7477 MEM_VOLATILE_P (op0
) |= volatilep
;
7478 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7479 || modifier
== EXPAND_CONST_ADDRESS
7480 || modifier
== EXPAND_INITIALIZER
)
7482 else if (target
== 0)
7483 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7485 convert_move (target
, op0
, unsignedp
);
7490 return expand_expr (OBJ_TYPE_REF_EXPR (exp
), target
, tmode
, modifier
);
7493 /* Check for a built-in function. */
7494 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7495 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7497 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7499 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7500 == BUILT_IN_FRONTEND
)
7501 return lang_hooks
.expand_expr (exp
, original_target
,
7505 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7508 return expand_call (exp
, target
, ignore
);
7510 case NON_LVALUE_EXPR
:
7513 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7516 if (TREE_CODE (type
) == UNION_TYPE
)
7518 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7520 /* If both input and output are BLKmode, this conversion isn't doing
7521 anything except possibly changing memory attribute. */
7522 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7524 rtx result
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7527 result
= copy_rtx (result
);
7528 set_mem_attributes (result
, exp
, 0);
7534 if (TYPE_MODE (type
) != BLKmode
)
7535 target
= gen_reg_rtx (TYPE_MODE (type
));
7537 target
= assign_temp (type
, 0, 1, 1);
7541 /* Store data into beginning of memory target. */
7542 store_expr (TREE_OPERAND (exp
, 0),
7543 adjust_address (target
, TYPE_MODE (valtype
), 0),
7544 modifier
== EXPAND_STACK_PARM
);
7548 gcc_assert (REG_P (target
));
7550 /* Store this field into a union of the proper type. */
7551 store_field (target
,
7552 MIN ((int_size_in_bytes (TREE_TYPE
7553 (TREE_OPERAND (exp
, 0)))
7555 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7556 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7560 /* Return the entire union. */
7564 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7566 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7569 /* If the signedness of the conversion differs and OP0 is
7570 a promoted SUBREG, clear that indication since we now
7571 have to do the proper extension. */
7572 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7573 && GET_CODE (op0
) == SUBREG
)
7574 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7576 return REDUCE_BIT_FIELD (op0
);
7579 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7580 if (GET_MODE (op0
) == mode
)
7583 /* If OP0 is a constant, just convert it into the proper mode. */
7584 else if (CONSTANT_P (op0
))
7586 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7587 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
7589 if (modifier
== EXPAND_INITIALIZER
)
7590 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
,
7591 subreg_lowpart_offset (mode
,
7594 op0
= convert_modes (mode
, inner_mode
, op0
,
7595 TYPE_UNSIGNED (inner_type
));
7598 else if (modifier
== EXPAND_INITIALIZER
)
7599 op0
= gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7601 else if (target
== 0)
7602 op0
= convert_to_mode (mode
, op0
,
7603 TYPE_UNSIGNED (TREE_TYPE
7604 (TREE_OPERAND (exp
, 0))));
7607 convert_move (target
, op0
,
7608 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7612 return REDUCE_BIT_FIELD (op0
);
7614 case VIEW_CONVERT_EXPR
:
7615 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7617 /* If the input and output modes are both the same, we are done. */
7618 if (TYPE_MODE (type
) == GET_MODE (op0
))
7620 /* If neither mode is BLKmode, and both modes are the same size
7621 then we can use gen_lowpart. */
7622 else if (TYPE_MODE (type
) != BLKmode
&& GET_MODE (op0
) != BLKmode
7623 && GET_MODE_SIZE (TYPE_MODE (type
))
7624 == GET_MODE_SIZE (GET_MODE (op0
)))
7626 if (GET_CODE (op0
) == SUBREG
)
7627 op0
= force_reg (GET_MODE (op0
), op0
);
7628 op0
= gen_lowpart (TYPE_MODE (type
), op0
);
7630 /* If both modes are integral, then we can convert from one to the
7632 else if (SCALAR_INT_MODE_P (GET_MODE (op0
))
7633 && SCALAR_INT_MODE_P (TYPE_MODE (type
)))
7634 op0
= convert_modes (TYPE_MODE (type
), GET_MODE (op0
), op0
,
7635 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7636 /* As a last resort, spill op0 to memory, and reload it in a
7638 else if (!MEM_P (op0
))
7640 /* If the operand is not a MEM, force it into memory. Since we
7641 are going to be be changing the mode of the MEM, don't call
7642 force_const_mem for constants because we don't allow pool
7643 constants to change mode. */
7644 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7646 gcc_assert (!TREE_ADDRESSABLE (exp
));
7648 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
7650 = assign_stack_temp_for_type
7651 (TYPE_MODE (inner_type
),
7652 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
7654 emit_move_insn (target
, op0
);
7658 /* At this point, OP0 is in the correct mode. If the output type is such
7659 that the operand is known to be aligned, indicate that it is.
7660 Otherwise, we need only be concerned about alignment for non-BLKmode
7664 op0
= copy_rtx (op0
);
7666 if (TYPE_ALIGN_OK (type
))
7667 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
7668 else if (TYPE_MODE (type
) != BLKmode
&& STRICT_ALIGNMENT
7669 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
7671 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7672 HOST_WIDE_INT temp_size
7673 = MAX (int_size_in_bytes (inner_type
),
7674 (HOST_WIDE_INT
) GET_MODE_SIZE (TYPE_MODE (type
)));
7675 rtx
new = assign_stack_temp_for_type (TYPE_MODE (type
),
7676 temp_size
, 0, type
);
7677 rtx new_with_op0_mode
= adjust_address (new, GET_MODE (op0
), 0);
7679 gcc_assert (!TREE_ADDRESSABLE (exp
));
7681 if (GET_MODE (op0
) == BLKmode
)
7682 emit_block_move (new_with_op0_mode
, op0
,
7683 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type
))),
7684 (modifier
== EXPAND_STACK_PARM
7685 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7687 emit_move_insn (new_with_op0_mode
, op0
);
7692 op0
= adjust_address (op0
, TYPE_MODE (type
), 0);
7698 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7699 something else, make sure we add the register to the constant and
7700 then to the other thing. This case can occur during strength
7701 reduction and doing it this way will produce better code if the
7702 frame pointer or argument pointer is eliminated.
7704 fold-const.c will ensure that the constant is always in the inner
7705 PLUS_EXPR, so the only case we need to do anything about is if
7706 sp, ap, or fp is our second argument, in which case we must swap
7707 the innermost first argument and our second argument. */
7709 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7710 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7711 && TREE_CODE (TREE_OPERAND (exp
, 1)) == VAR_DECL
7712 && (DECL_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7713 || DECL_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7714 || DECL_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7716 tree t
= TREE_OPERAND (exp
, 1);
7718 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7719 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
7722 /* If the result is to be ptr_mode and we are adding an integer to
7723 something, we might be forming a constant. So try to use
7724 plus_constant. If it produces a sum and we can't accept it,
7725 use force_operand. This allows P = &ARR[const] to generate
7726 efficient code on machines where a SYMBOL_REF is not a valid
7729 If this is an EXPAND_SUM call, always return the sum. */
7730 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7731 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
7733 if (modifier
== EXPAND_STACK_PARM
)
7735 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
7736 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7737 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
7741 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
7743 /* Use immed_double_const to ensure that the constant is
7744 truncated according to the mode of OP1, then sign extended
7745 to a HOST_WIDE_INT. Using the constant directly can result
7746 in non-canonical RTL in a 64x32 cross compile. */
7748 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
7750 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
7751 op1
= plus_constant (op1
, INTVAL (constant_part
));
7752 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7753 op1
= force_operand (op1
, target
);
7754 return REDUCE_BIT_FIELD (op1
);
7757 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7758 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7759 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
7763 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7764 (modifier
== EXPAND_INITIALIZER
7765 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
7766 if (! CONSTANT_P (op0
))
7768 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7769 VOIDmode
, modifier
);
7770 /* Return a PLUS if modifier says it's OK. */
7771 if (modifier
== EXPAND_SUM
7772 || modifier
== EXPAND_INITIALIZER
)
7773 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7776 /* Use immed_double_const to ensure that the constant is
7777 truncated according to the mode of OP1, then sign extended
7778 to a HOST_WIDE_INT. Using the constant directly can result
7779 in non-canonical RTL in a 64x32 cross compile. */
7781 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
7783 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7784 op0
= plus_constant (op0
, INTVAL (constant_part
));
7785 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7786 op0
= force_operand (op0
, target
);
7787 return REDUCE_BIT_FIELD (op0
);
7791 /* No sense saving up arithmetic to be done
7792 if it's all in the wrong mode to form part of an address.
7793 And force_operand won't know whether to sign-extend or
7795 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7796 || mode
!= ptr_mode
)
7798 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7799 subtarget
, &op0
, &op1
, 0);
7800 if (op0
== const0_rtx
)
7802 if (op1
== const0_rtx
)
7807 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7808 subtarget
, &op0
, &op1
, modifier
);
7809 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
7812 /* For initializers, we are allowed to return a MINUS of two
7813 symbolic constants. Here we handle all cases when both operands
7815 /* Handle difference of two symbolic constants,
7816 for the sake of an initializer. */
7817 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7818 && really_constant_p (TREE_OPERAND (exp
, 0))
7819 && really_constant_p (TREE_OPERAND (exp
, 1)))
7821 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7822 NULL_RTX
, &op0
, &op1
, modifier
);
7824 /* If the last operand is a CONST_INT, use plus_constant of
7825 the negated constant. Else make the MINUS. */
7826 if (GET_CODE (op1
) == CONST_INT
)
7827 return REDUCE_BIT_FIELD (plus_constant (op0
, - INTVAL (op1
)));
7829 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode
, op0
, op1
));
7832 /* No sense saving up arithmetic to be done
7833 if it's all in the wrong mode to form part of an address.
7834 And force_operand won't know whether to sign-extend or
7836 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7837 || mode
!= ptr_mode
)
7840 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7841 subtarget
, &op0
, &op1
, modifier
);
7843 /* Convert A - const to A + (-const). */
7844 if (GET_CODE (op1
) == CONST_INT
)
7846 op1
= negate_rtx (mode
, op1
);
7847 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
7853 /* If first operand is constant, swap them.
7854 Thus the following special case checks need only
7855 check the second operand. */
7856 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
7858 tree t1
= TREE_OPERAND (exp
, 0);
7859 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
7860 TREE_OPERAND (exp
, 1) = t1
;
7863 /* Attempt to return something suitable for generating an
7864 indexed address, for machines that support that. */
7866 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
7867 && host_integerp (TREE_OPERAND (exp
, 1), 0))
7869 tree exp1
= TREE_OPERAND (exp
, 1);
7871 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7875 op0
= force_operand (op0
, NULL_RTX
);
7877 op0
= copy_to_mode_reg (mode
, op0
);
7879 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode
, op0
,
7880 gen_int_mode (tree_low_cst (exp1
, 0),
7881 TYPE_MODE (TREE_TYPE (exp1
)))));
7884 if (modifier
== EXPAND_STACK_PARM
)
7887 /* Check for multiplying things that have been extended
7888 from a narrower type. If this machine supports multiplying
7889 in that narrower type with a result in the desired type,
7890 do it that way, and avoid the explicit type-conversion. */
7892 subexp0
= TREE_OPERAND (exp
, 0);
7893 subexp1
= TREE_OPERAND (exp
, 1);
7894 /* First, check if we have a multiplication of one signed and one
7895 unsigned operand. */
7896 if (TREE_CODE (subexp0
) == NOP_EXPR
7897 && TREE_CODE (subexp1
) == NOP_EXPR
7898 && TREE_CODE (type
) == INTEGER_TYPE
7899 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0
, 0)))
7900 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7901 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0
, 0)))
7902 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1
, 0))))
7903 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0
, 0)))
7904 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1
, 0)))))
7906 enum machine_mode innermode
7907 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0
, 0)));
7908 this_optab
= usmul_widen_optab
;
7909 if (mode
== GET_MODE_WIDER_MODE (innermode
))
7911 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
7913 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0
, 0))))
7914 expand_operands (TREE_OPERAND (subexp0
, 0),
7915 TREE_OPERAND (subexp1
, 0),
7916 NULL_RTX
, &op0
, &op1
, 0);
7918 expand_operands (TREE_OPERAND (subexp0
, 0),
7919 TREE_OPERAND (subexp1
, 0),
7920 NULL_RTX
, &op1
, &op0
, 0);
7926 /* Check for a multiplication with matching signedness. */
7927 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
7928 && TREE_CODE (type
) == INTEGER_TYPE
7929 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7930 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7931 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7932 && int_fits_type_p (TREE_OPERAND (exp
, 1),
7933 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7934 /* Don't use a widening multiply if a shift will do. */
7935 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
7936 > HOST_BITS_PER_WIDE_INT
)
7937 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
7939 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7940 && (TYPE_PRECISION (TREE_TYPE
7941 (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7942 == TYPE_PRECISION (TREE_TYPE
7944 (TREE_OPERAND (exp
, 0), 0))))
7945 /* If both operands are extended, they must either both
7946 be zero-extended or both be sign-extended. */
7947 && (TYPE_UNSIGNED (TREE_TYPE
7948 (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7949 == TYPE_UNSIGNED (TREE_TYPE
7951 (TREE_OPERAND (exp
, 0), 0)))))))
7953 tree op0type
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0));
7954 enum machine_mode innermode
= TYPE_MODE (op0type
);
7955 bool zextend_p
= TYPE_UNSIGNED (op0type
);
7956 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
7957 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
7959 if (mode
== GET_MODE_2XWIDER_MODE (innermode
))
7961 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
7963 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7964 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7965 TREE_OPERAND (exp
, 1),
7966 NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
7968 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7969 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7970 NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
7973 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
7974 && innermode
== word_mode
)
7977 op0
= expand_normal (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0));
7978 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7979 op1
= convert_modes (innermode
, mode
,
7980 expand_normal (TREE_OPERAND (exp
, 1)),
7983 op1
= expand_normal (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0));
7984 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
7985 unsignedp
, OPTAB_LIB_WIDEN
);
7986 hipart
= gen_highpart (innermode
, temp
);
7987 htem
= expand_mult_highpart_adjust (innermode
, hipart
,
7991 emit_move_insn (hipart
, htem
);
7992 return REDUCE_BIT_FIELD (temp
);
7996 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7997 subtarget
, &op0
, &op1
, 0);
7998 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8000 case TRUNC_DIV_EXPR
:
8001 case FLOOR_DIV_EXPR
:
8003 case ROUND_DIV_EXPR
:
8004 case EXACT_DIV_EXPR
:
8005 if (modifier
== EXPAND_STACK_PARM
)
8007 /* Possible optimization: compute the dividend with EXPAND_SUM
8008 then if the divisor is constant can optimize the case
8009 where some terms of the dividend have coeffs divisible by it. */
8010 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8011 subtarget
, &op0
, &op1
, 0);
8012 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
8017 case TRUNC_MOD_EXPR
:
8018 case FLOOR_MOD_EXPR
:
8020 case ROUND_MOD_EXPR
:
8021 if (modifier
== EXPAND_STACK_PARM
)
8023 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8024 subtarget
, &op0
, &op1
, 0);
8025 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
8027 case FIX_ROUND_EXPR
:
8028 case FIX_FLOOR_EXPR
:
8030 gcc_unreachable (); /* Not used for C. */
8032 case FIX_TRUNC_EXPR
:
8033 op0
= expand_normal (TREE_OPERAND (exp
, 0));
8034 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8035 target
= gen_reg_rtx (mode
);
8036 expand_fix (target
, op0
, unsignedp
);
8040 op0
= expand_normal (TREE_OPERAND (exp
, 0));
8041 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8042 target
= gen_reg_rtx (mode
);
8043 /* expand_float can't figure out what to do if FROM has VOIDmode.
8044 So give it the correct mode. With -O, cse will optimize this. */
8045 if (GET_MODE (op0
) == VOIDmode
)
8046 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8048 expand_float (target
, op0
,
8049 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8053 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8054 if (modifier
== EXPAND_STACK_PARM
)
8056 temp
= expand_unop (mode
,
8057 optab_for_tree_code (NEGATE_EXPR
, type
),
8060 return REDUCE_BIT_FIELD (temp
);
8063 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8064 if (modifier
== EXPAND_STACK_PARM
)
8067 /* ABS_EXPR is not valid for complex arguments. */
8068 gcc_assert (GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
8069 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
);
8071 /* Unsigned abs is simply the operand. Testing here means we don't
8072 risk generating incorrect code below. */
8073 if (TYPE_UNSIGNED (type
))
8076 return expand_abs (mode
, op0
, target
, unsignedp
,
8077 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
8081 target
= original_target
;
8083 || modifier
== EXPAND_STACK_PARM
8084 || (MEM_P (target
) && MEM_VOLATILE_P (target
))
8085 || GET_MODE (target
) != mode
8087 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8088 target
= gen_reg_rtx (mode
);
8089 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8090 target
, &op0
, &op1
, 0);
8092 /* First try to do it with a special MIN or MAX instruction.
8093 If that does not win, use a conditional jump to select the proper
8095 this_optab
= optab_for_tree_code (code
, type
);
8096 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8101 /* At this point, a MEM target is no longer useful; we will get better
8104 if (! REG_P (target
))
8105 target
= gen_reg_rtx (mode
);
8107 /* If op1 was placed in target, swap op0 and op1. */
8108 if (target
!= op0
&& target
== op1
)
8115 /* We generate better code and avoid problems with op1 mentioning
8116 target by forcing op1 into a pseudo if it isn't a constant. */
8117 if (! CONSTANT_P (op1
))
8118 op1
= force_reg (mode
, op1
);
8121 enum rtx_code comparison_code
;
8124 if (code
== MAX_EXPR
)
8125 comparison_code
= unsignedp
? GEU
: GE
;
8127 comparison_code
= unsignedp
? LEU
: LE
;
8129 /* Canonicalize to comparisons against 0. */
8130 if (op1
== const1_rtx
)
8132 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8133 or (a != 0 ? a : 1) for unsigned.
8134 For MIN we are safe converting (a <= 1 ? a : 1)
8135 into (a <= 0 ? a : 1) */
8136 cmpop1
= const0_rtx
;
8137 if (code
== MAX_EXPR
)
8138 comparison_code
= unsignedp
? NE
: GT
;
8140 if (op1
== constm1_rtx
&& !unsignedp
)
8142 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8143 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8144 cmpop1
= const0_rtx
;
8145 if (code
== MIN_EXPR
)
8146 comparison_code
= LT
;
8148 #ifdef HAVE_conditional_move
8149 /* Use a conditional move if possible. */
8150 if (can_conditionally_move_p (mode
))
8154 /* ??? Same problem as in expmed.c: emit_conditional_move
8155 forces a stack adjustment via compare_from_rtx, and we
8156 lose the stack adjustment if the sequence we are about
8157 to create is discarded. */
8158 do_pending_stack_adjust ();
8162 /* Try to emit the conditional move. */
8163 insn
= emit_conditional_move (target
, comparison_code
,
8168 /* If we could do the conditional move, emit the sequence,
8172 rtx seq
= get_insns ();
8178 /* Otherwise discard the sequence and fall back to code with
8184 emit_move_insn (target
, op0
);
8186 temp
= gen_label_rtx ();
8188 /* If this mode is an integer too wide to compare properly,
8189 compare word by word. Rely on cse to optimize constant cases. */
8190 if (GET_MODE_CLASS (mode
) == MODE_INT
8191 && ! can_compare_p (GE
, mode
, ccp_jump
))
8193 if (code
== MAX_EXPR
)
8194 do_jump_by_parts_greater_rtx (mode
, unsignedp
, target
, op1
,
8197 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op1
, target
,
8202 do_compare_rtx_and_jump (target
, cmpop1
, comparison_code
,
8203 unsignedp
, mode
, NULL_RTX
, NULL_RTX
, temp
);
8206 emit_move_insn (target
, op1
);
8211 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8212 if (modifier
== EXPAND_STACK_PARM
)
8214 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8218 /* ??? Can optimize bitwise operations with one arg constant.
8219 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8220 and (a bitwise1 b) bitwise2 b (etc)
8221 but that is probably not worth while. */
8223 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8224 boolean values when we want in all cases to compute both of them. In
8225 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8226 as actual zero-or-1 values and then bitwise anding. In cases where
8227 there cannot be any side effects, better code would be made by
8228 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8229 how to recognize those cases. */
8231 case TRUTH_AND_EXPR
:
8232 code
= BIT_AND_EXPR
;
8237 code
= BIT_IOR_EXPR
;
8241 case TRUTH_XOR_EXPR
:
8242 code
= BIT_XOR_EXPR
;
8250 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8252 if (modifier
== EXPAND_STACK_PARM
)
8254 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8255 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
8258 /* Could determine the answer when only additive constants differ. Also,
8259 the addition of one can be handled by changing the condition. */
8266 case UNORDERED_EXPR
:
8274 temp
= do_store_flag (exp
,
8275 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
8276 tmode
!= VOIDmode
? tmode
: mode
, 0);
8280 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8281 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
8283 && REG_P (original_target
)
8284 && (GET_MODE (original_target
)
8285 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
8287 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
8290 /* If temp is constant, we can just compute the result. */
8291 if (GET_CODE (temp
) == CONST_INT
)
8293 if (INTVAL (temp
) != 0)
8294 emit_move_insn (target
, const1_rtx
);
8296 emit_move_insn (target
, const0_rtx
);
8301 if (temp
!= original_target
)
8303 enum machine_mode mode1
= GET_MODE (temp
);
8304 if (mode1
== VOIDmode
)
8305 mode1
= tmode
!= VOIDmode
? tmode
: mode
;
8307 temp
= copy_to_mode_reg (mode1
, temp
);
8310 op1
= gen_label_rtx ();
8311 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
8312 GET_MODE (temp
), unsignedp
, op1
);
8313 emit_move_insn (temp
, const1_rtx
);
8318 /* If no set-flag instruction, must generate a conditional store
8319 into a temporary variable. Drop through and handle this
8324 || modifier
== EXPAND_STACK_PARM
8325 || ! safe_from_p (target
, exp
, 1)
8326 /* Make sure we don't have a hard reg (such as function's return
8327 value) live across basic blocks, if not optimizing. */
8328 || (!optimize
&& REG_P (target
)
8329 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8330 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8333 emit_move_insn (target
, const0_rtx
);
8335 op1
= gen_label_rtx ();
8336 jumpifnot (exp
, op1
);
8339 emit_move_insn (target
, const1_rtx
);
8342 return ignore
? const0_rtx
: target
;
8344 case TRUTH_NOT_EXPR
:
8345 if (modifier
== EXPAND_STACK_PARM
)
8347 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8348 /* The parser is careful to generate TRUTH_NOT_EXPR
8349 only with operands that are always zero or one. */
8350 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8351 target
, 1, OPTAB_LIB_WIDEN
);
8355 case STATEMENT_LIST
:
8357 tree_stmt_iterator iter
;
8359 gcc_assert (ignore
);
8361 for (iter
= tsi_start (exp
); !tsi_end_p (iter
); tsi_next (&iter
))
8362 expand_expr (tsi_stmt (iter
), const0_rtx
, VOIDmode
, modifier
);
8367 /* A COND_EXPR with its type being VOID_TYPE represents a
8368 conditional jump and is handled in
8369 expand_gimple_cond_expr. */
8370 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp
)));
8372 /* Note that COND_EXPRs whose type is a structure or union
8373 are required to be constructed to contain assignments of
8374 a temporary variable, so that we can evaluate them here
8375 for side effect only. If type is void, we must do likewise. */
8377 gcc_assert (!TREE_ADDRESSABLE (type
)
8379 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
8380 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
);
8382 /* If we are not to produce a result, we have no target. Otherwise,
8383 if a target was specified use it; it will not be used as an
8384 intermediate target unless it is safe. If no target, use a
8387 if (modifier
!= EXPAND_STACK_PARM
8389 && safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
8390 && GET_MODE (original_target
) == mode
8391 #ifdef HAVE_conditional_move
8392 && (! can_conditionally_move_p (mode
)
8393 || REG_P (original_target
))
8395 && !MEM_P (original_target
))
8396 temp
= original_target
;
8398 temp
= assign_temp (type
, 0, 0, 1);
8400 do_pending_stack_adjust ();
8402 op0
= gen_label_rtx ();
8403 op1
= gen_label_rtx ();
8404 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8405 store_expr (TREE_OPERAND (exp
, 1), temp
,
8406 modifier
== EXPAND_STACK_PARM
);
8408 emit_jump_insn (gen_jump (op1
));
8411 store_expr (TREE_OPERAND (exp
, 2), temp
,
8412 modifier
== EXPAND_STACK_PARM
);
8419 target
= expand_vec_cond_expr (exp
, target
);
8424 tree lhs
= TREE_OPERAND (exp
, 0);
8425 tree rhs
= TREE_OPERAND (exp
, 1);
8427 gcc_assert (ignore
);
8429 /* Check for |= or &= of a bitfield of size one into another bitfield
8430 of size 1. In this case, (unless we need the result of the
8431 assignment) we can do this more efficiently with a
8432 test followed by an assignment, if necessary.
8434 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8435 things change so we do, this code should be enhanced to
8437 if (TREE_CODE (lhs
) == COMPONENT_REF
8438 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
8439 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
8440 && TREE_OPERAND (rhs
, 0) == lhs
8441 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
8442 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
8443 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
8445 rtx label
= gen_label_rtx ();
8447 do_jump (TREE_OPERAND (rhs
, 1),
8448 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
8449 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
8450 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
8451 (TREE_CODE (rhs
) == BIT_IOR_EXPR
8453 : integer_zero_node
)));
8454 do_pending_stack_adjust ();
8459 expand_assignment (lhs
, rhs
);
8465 if (!TREE_OPERAND (exp
, 0))
8466 expand_null_return ();
8468 expand_return (TREE_OPERAND (exp
, 0));
8472 return expand_expr_addr_expr (exp
, target
, tmode
, modifier
);
8475 /* Get the rtx code of the operands. */
8476 op0
= expand_normal (TREE_OPERAND (exp
, 0));
8477 op1
= expand_normal (TREE_OPERAND (exp
, 1));
8480 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8482 /* Move the real (op0) and imaginary (op1) parts to their location. */
8483 write_complex_part (target
, op0
, false);
8484 write_complex_part (target
, op1
, true);
8489 op0
= expand_normal (TREE_OPERAND (exp
, 0));
8490 return read_complex_part (op0
, false);
8493 op0
= expand_normal (TREE_OPERAND (exp
, 0));
8494 return read_complex_part (op0
, true);
8497 expand_resx_expr (exp
);
8500 case TRY_CATCH_EXPR
:
8502 case EH_FILTER_EXPR
:
8503 case TRY_FINALLY_EXPR
:
8504 /* Lowered by tree-eh.c. */
8507 case WITH_CLEANUP_EXPR
:
8508 case CLEANUP_POINT_EXPR
:
8510 case CASE_LABEL_EXPR
:
8516 case PREINCREMENT_EXPR
:
8517 case PREDECREMENT_EXPR
:
8518 case POSTINCREMENT_EXPR
:
8519 case POSTDECREMENT_EXPR
:
8522 case TRUTH_ANDIF_EXPR
:
8523 case TRUTH_ORIF_EXPR
:
8524 /* Lowered by gimplify.c. */
8528 return get_exception_pointer (cfun
);
8531 return get_exception_filter (cfun
);
8534 /* Function descriptors are not valid except for as
8535 initialization constants, and should not be expanded. */
8543 expand_label (TREE_OPERAND (exp
, 0));
8547 expand_asm_expr (exp
);
8550 case WITH_SIZE_EXPR
:
8551 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8552 have pulled out the size to use in whatever context it needed. */
8553 return expand_expr_real (TREE_OPERAND (exp
, 0), original_target
, tmode
,
8556 case REALIGN_LOAD_EXPR
:
8558 tree oprnd0
= TREE_OPERAND (exp
, 0);
8559 tree oprnd1
= TREE_OPERAND (exp
, 1);
8560 tree oprnd2
= TREE_OPERAND (exp
, 2);
8563 this_optab
= optab_for_tree_code (code
, type
);
8564 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8565 op2
= expand_normal (oprnd2
);
8566 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
8574 tree oprnd0
= TREE_OPERAND (exp
, 0);
8575 tree oprnd1
= TREE_OPERAND (exp
, 1);
8576 tree oprnd2
= TREE_OPERAND (exp
, 2);
8579 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8580 op2
= expand_normal (oprnd2
);
8581 target
= expand_widen_pattern_expr (exp
, op0
, op1
, op2
,
8586 case WIDEN_SUM_EXPR
:
8588 tree oprnd0
= TREE_OPERAND (exp
, 0);
8589 tree oprnd1
= TREE_OPERAND (exp
, 1);
8591 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, 0);
8592 target
= expand_widen_pattern_expr (exp
, op0
, NULL_RTX
, op1
,
8597 case REDUC_MAX_EXPR
:
8598 case REDUC_MIN_EXPR
:
8599 case REDUC_PLUS_EXPR
:
8601 op0
= expand_normal (TREE_OPERAND (exp
, 0));
8602 this_optab
= optab_for_tree_code (code
, type
);
8603 temp
= expand_unop (mode
, this_optab
, op0
, target
, unsignedp
);
8608 case VEC_LSHIFT_EXPR
:
8609 case VEC_RSHIFT_EXPR
:
8611 target
= expand_vec_shift_expr (exp
, target
);
8616 return lang_hooks
.expand_expr (exp
, original_target
, tmode
,
8620 /* Here to do an ordinary binary operator. */
8622 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8623 subtarget
, &op0
, &op1
, 0);
8625 this_optab
= optab_for_tree_code (code
, type
);
8627 if (modifier
== EXPAND_STACK_PARM
)
8629 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
8630 unsignedp
, OPTAB_LIB_WIDEN
);
8632 return REDUCE_BIT_FIELD (temp
);
8634 #undef REDUCE_BIT_FIELD
8636 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8637 signedness of TYPE), possibly returning the result in TARGET. */
8639 reduce_to_bit_field_precision (rtx exp
, rtx target
, tree type
)
8641 HOST_WIDE_INT prec
= TYPE_PRECISION (type
);
8642 if (target
&& GET_MODE (target
) != GET_MODE (exp
))
8644 if (TYPE_UNSIGNED (type
))
8647 if (prec
< HOST_BITS_PER_WIDE_INT
)
8648 mask
= immed_double_const (((unsigned HOST_WIDE_INT
) 1 << prec
) - 1, 0,
8651 mask
= immed_double_const ((unsigned HOST_WIDE_INT
) -1,
8652 ((unsigned HOST_WIDE_INT
) 1
8653 << (prec
- HOST_BITS_PER_WIDE_INT
)) - 1,
8655 return expand_and (GET_MODE (exp
), exp
, mask
, target
);
8659 tree count
= build_int_cst (NULL_TREE
,
8660 GET_MODE_BITSIZE (GET_MODE (exp
)) - prec
);
8661 exp
= expand_shift (LSHIFT_EXPR
, GET_MODE (exp
), exp
, count
, target
, 0);
8662 return expand_shift (RSHIFT_EXPR
, GET_MODE (exp
), exp
, count
, target
, 0);
8666 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8667 when applied to the address of EXP produces an address known to be
8668 aligned more than BIGGEST_ALIGNMENT. */
8671 is_aligning_offset (tree offset
, tree exp
)
8673 /* Strip off any conversions. */
8674 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
8675 || TREE_CODE (offset
) == NOP_EXPR
8676 || TREE_CODE (offset
) == CONVERT_EXPR
)
8677 offset
= TREE_OPERAND (offset
, 0);
8679 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8680 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8681 if (TREE_CODE (offset
) != BIT_AND_EXPR
8682 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
8683 || compare_tree_int (TREE_OPERAND (offset
, 1),
8684 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
8685 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
8688 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8689 It must be NEGATE_EXPR. Then strip any more conversions. */
8690 offset
= TREE_OPERAND (offset
, 0);
8691 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
8692 || TREE_CODE (offset
) == NOP_EXPR
8693 || TREE_CODE (offset
) == CONVERT_EXPR
)
8694 offset
= TREE_OPERAND (offset
, 0);
8696 if (TREE_CODE (offset
) != NEGATE_EXPR
)
8699 offset
= TREE_OPERAND (offset
, 0);
8700 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
8701 || TREE_CODE (offset
) == NOP_EXPR
8702 || TREE_CODE (offset
) == CONVERT_EXPR
)
8703 offset
= TREE_OPERAND (offset
, 0);
8705 /* This must now be the address of EXP. */
8706 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
8709 /* Return the tree node if an ARG corresponds to a string constant or zero
8710 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8711 in bytes within the string that ARG is accessing. The type of the
8712 offset will be `sizetype'. */
8715 string_constant (tree arg
, tree
*ptr_offset
)
8720 if (TREE_CODE (arg
) == ADDR_EXPR
)
8722 if (TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
8724 *ptr_offset
= size_zero_node
;
8725 return TREE_OPERAND (arg
, 0);
8727 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == VAR_DECL
)
8729 array
= TREE_OPERAND (arg
, 0);
8730 offset
= size_zero_node
;
8732 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
8734 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
8735 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
8736 if (TREE_CODE (array
) != STRING_CST
8737 && TREE_CODE (array
) != VAR_DECL
)
8743 else if (TREE_CODE (arg
) == PLUS_EXPR
)
8745 tree arg0
= TREE_OPERAND (arg
, 0);
8746 tree arg1
= TREE_OPERAND (arg
, 1);
8751 if (TREE_CODE (arg0
) == ADDR_EXPR
8752 && (TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
8753 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == VAR_DECL
))
8755 array
= TREE_OPERAND (arg0
, 0);
8758 else if (TREE_CODE (arg1
) == ADDR_EXPR
8759 && (TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
8760 || TREE_CODE (TREE_OPERAND (arg1
, 0)) == VAR_DECL
))
8762 array
= TREE_OPERAND (arg1
, 0);
8771 if (TREE_CODE (array
) == STRING_CST
)
8773 *ptr_offset
= convert (sizetype
, offset
);
8776 else if (TREE_CODE (array
) == VAR_DECL
)
8780 /* Variables initialized to string literals can be handled too. */
8781 if (DECL_INITIAL (array
) == NULL_TREE
8782 || TREE_CODE (DECL_INITIAL (array
)) != STRING_CST
)
8785 /* If they are read-only, non-volatile and bind locally. */
8786 if (! TREE_READONLY (array
)
8787 || TREE_SIDE_EFFECTS (array
)
8788 || ! targetm
.binds_local_p (array
))
8791 /* Avoid const char foo[4] = "abcde"; */
8792 if (DECL_SIZE_UNIT (array
) == NULL_TREE
8793 || TREE_CODE (DECL_SIZE_UNIT (array
)) != INTEGER_CST
8794 || (length
= TREE_STRING_LENGTH (DECL_INITIAL (array
))) <= 0
8795 || compare_tree_int (DECL_SIZE_UNIT (array
), length
) < 0)
8798 /* If variable is bigger than the string literal, OFFSET must be constant
8799 and inside of the bounds of the string literal. */
8800 offset
= convert (sizetype
, offset
);
8801 if (compare_tree_int (DECL_SIZE_UNIT (array
), length
) > 0
8802 && (! host_integerp (offset
, 1)
8803 || compare_tree_int (offset
, length
) >= 0))
8806 *ptr_offset
= offset
;
8807 return DECL_INITIAL (array
);
8813 /* Generate code to calculate EXP using a store-flag instruction
8814 and return an rtx for the result. EXP is either a comparison
8815 or a TRUTH_NOT_EXPR whose operand is a comparison.
8817 If TARGET is nonzero, store the result there if convenient.
8819 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8822 Return zero if there is no suitable set-flag instruction
8823 available on this machine.
8825 Once expand_expr has been called on the arguments of the comparison,
8826 we are committed to doing the store flag, since it is not safe to
8827 re-evaluate the expression. We emit the store-flag insn by calling
8828 emit_store_flag, but only expand the arguments if we have a reason
8829 to believe that emit_store_flag will be successful. If we think that
8830 it will, but it isn't, we have to simulate the store-flag with a
8831 set/jump/set sequence. */
8834 do_store_flag (tree exp
, rtx target
, enum machine_mode mode
, int only_cheap
)
8837 tree arg0
, arg1
, type
;
8839 enum machine_mode operand_mode
;
8843 enum insn_code icode
;
8844 rtx subtarget
= target
;
8847 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8848 result at the end. We can't simply invert the test since it would
8849 have already been inverted if it were valid. This case occurs for
8850 some floating-point comparisons. */
8852 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
8853 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
8855 arg0
= TREE_OPERAND (exp
, 0);
8856 arg1
= TREE_OPERAND (exp
, 1);
8858 /* Don't crash if the comparison was erroneous. */
8859 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
8862 type
= TREE_TYPE (arg0
);
8863 operand_mode
= TYPE_MODE (type
);
8864 unsignedp
= TYPE_UNSIGNED (type
);
8866 /* We won't bother with BLKmode store-flag operations because it would mean
8867 passing a lot of information to emit_store_flag. */
8868 if (operand_mode
== BLKmode
)
8871 /* We won't bother with store-flag operations involving function pointers
8872 when function pointers must be canonicalized before comparisons. */
8873 #ifdef HAVE_canonicalize_funcptr_for_compare
8874 if (HAVE_canonicalize_funcptr_for_compare
8875 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
8876 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
8878 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
8879 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
8880 == FUNCTION_TYPE
))))
8887 /* Get the rtx comparison code to use. We know that EXP is a comparison
8888 operation of some type. Some comparisons against 1 and -1 can be
8889 converted to comparisons with zero. Do so here so that the tests
8890 below will be aware that we have a comparison with zero. These
8891 tests will not catch constants in the first operand, but constants
8892 are rarely passed as the first operand. */
8894 switch (TREE_CODE (exp
))
8903 if (integer_onep (arg1
))
8904 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
8906 code
= unsignedp
? LTU
: LT
;
8909 if (! unsignedp
&& integer_all_onesp (arg1
))
8910 arg1
= integer_zero_node
, code
= LT
;
8912 code
= unsignedp
? LEU
: LE
;
8915 if (! unsignedp
&& integer_all_onesp (arg1
))
8916 arg1
= integer_zero_node
, code
= GE
;
8918 code
= unsignedp
? GTU
: GT
;
8921 if (integer_onep (arg1
))
8922 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
8924 code
= unsignedp
? GEU
: GE
;
8927 case UNORDERED_EXPR
:
8956 /* Put a constant second. */
8957 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
8959 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
8960 code
= swap_condition (code
);
8963 /* If this is an equality or inequality test of a single bit, we can
8964 do this by shifting the bit being tested to the low-order bit and
8965 masking the result with the constant 1. If the condition was EQ,
8966 we xor it with 1. This does not require an scc insn and is faster
8967 than an scc insn even if we have it.
8969 The code to make this transformation was moved into fold_single_bit_test,
8970 so we just call into the folder and expand its result. */
8972 if ((code
== NE
|| code
== EQ
)
8973 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
8974 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
8976 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
8977 return expand_expr (fold_single_bit_test (code
== NE
? NE_EXPR
: EQ_EXPR
,
8979 target
, VOIDmode
, EXPAND_NORMAL
);
8982 /* Now see if we are likely to be able to do this. Return if not. */
8983 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
8986 icode
= setcc_gen_code
[(int) code
];
8987 if (icode
== CODE_FOR_nothing
8988 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
8990 /* We can only do this if it is one of the special cases that
8991 can be handled without an scc insn. */
8992 if ((code
== LT
&& integer_zerop (arg1
))
8993 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
8995 else if (! only_cheap
&& (code
== NE
|| code
== EQ
)
8996 && TREE_CODE (type
) != REAL_TYPE
8997 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
8998 != CODE_FOR_nothing
)
8999 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
9000 != CODE_FOR_nothing
)))
9006 if (! get_subtarget (target
)
9007 || GET_MODE (subtarget
) != operand_mode
)
9010 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, 0);
9013 target
= gen_reg_rtx (mode
);
9015 result
= emit_store_flag (target
, code
, op0
, op1
,
9016 operand_mode
, unsignedp
, 1);
9021 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
9022 result
, 0, OPTAB_LIB_WIDEN
);
9026 /* If this failed, we have to do this with set/compare/jump/set code. */
9028 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
9029 target
= gen_reg_rtx (GET_MODE (target
));
9031 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
9032 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
9033 operand_mode
, NULL_RTX
);
9034 if (GET_CODE (result
) == CONST_INT
)
9035 return (((result
== const0_rtx
&& ! invert
)
9036 || (result
!= const0_rtx
&& invert
))
9037 ? const0_rtx
: const1_rtx
);
9039 /* The code of RESULT may not match CODE if compare_from_rtx
9040 decided to swap its operands and reverse the original code.
9042 We know that compare_from_rtx returns either a CONST_INT or
9043 a new comparison code, so it is safe to just extract the
9044 code from RESULT. */
9045 code
= GET_CODE (result
);
9047 label
= gen_label_rtx ();
9048 gcc_assert (bcc_gen_fctn
[(int) code
]);
9050 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
9051 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
9058 /* Stubs in case we haven't got a casesi insn. */
9060 # define HAVE_casesi 0
9061 # define gen_casesi(a, b, c, d, e) (0)
9062 # define CODE_FOR_casesi CODE_FOR_nothing
9065 /* If the machine does not have a case insn that compares the bounds,
9066 this means extra overhead for dispatch tables, which raises the
9067 threshold for using them. */
9068 #ifndef CASE_VALUES_THRESHOLD
9069 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9070 #endif /* CASE_VALUES_THRESHOLD */
9073 case_values_threshold (void)
9075 return CASE_VALUES_THRESHOLD
;
9078 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9079 0 otherwise (i.e. if there is no casesi instruction). */
9081 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
9082 rtx table_label ATTRIBUTE_UNUSED
, rtx default_label
)
9084 enum machine_mode index_mode
= SImode
;
9085 int index_bits
= GET_MODE_BITSIZE (index_mode
);
9086 rtx op1
, op2
, index
;
9087 enum machine_mode op_mode
;
9092 /* Convert the index to SImode. */
9093 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
9095 enum machine_mode omode
= TYPE_MODE (index_type
);
9096 rtx rangertx
= expand_normal (range
);
9098 /* We must handle the endpoints in the original mode. */
9099 index_expr
= build2 (MINUS_EXPR
, index_type
,
9100 index_expr
, minval
);
9101 minval
= integer_zero_node
;
9102 index
= expand_normal (index_expr
);
9103 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
9104 omode
, 1, default_label
);
9105 /* Now we can safely truncate. */
9106 index
= convert_to_mode (index_mode
, index
, 0);
9110 if (TYPE_MODE (index_type
) != index_mode
)
9112 index_expr
= convert (lang_hooks
.types
.type_for_size
9113 (index_bits
, 0), index_expr
);
9114 index_type
= TREE_TYPE (index_expr
);
9117 index
= expand_normal (index_expr
);
9120 do_pending_stack_adjust ();
9122 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
9123 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
9125 index
= copy_to_mode_reg (op_mode
, index
);
9127 op1
= expand_normal (minval
);
9129 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
9130 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
9131 op1
, TYPE_UNSIGNED (TREE_TYPE (minval
)));
9132 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
9134 op1
= copy_to_mode_reg (op_mode
, op1
);
9136 op2
= expand_normal (range
);
9138 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
9139 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
9140 op2
, TYPE_UNSIGNED (TREE_TYPE (range
)));
9141 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
9143 op2
= copy_to_mode_reg (op_mode
, op2
);
9145 emit_jump_insn (gen_casesi (index
, op1
, op2
,
9146 table_label
, default_label
));
9150 /* Attempt to generate a tablejump instruction; same concept. */
9151 #ifndef HAVE_tablejump
9152 #define HAVE_tablejump 0
9153 #define gen_tablejump(x, y) (0)
9156 /* Subroutine of the next function.
9158 INDEX is the value being switched on, with the lowest value
9159 in the table already subtracted.
9160 MODE is its expected mode (needed if INDEX is constant).
9161 RANGE is the length of the jump table.
9162 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9164 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9165 index value is out of range. */
9168 do_tablejump (rtx index
, enum machine_mode mode
, rtx range
, rtx table_label
,
9173 if (INTVAL (range
) > cfun
->max_jumptable_ents
)
9174 cfun
->max_jumptable_ents
= INTVAL (range
);
9176 /* Do an unsigned comparison (in the proper mode) between the index
9177 expression and the value which represents the length of the range.
9178 Since we just finished subtracting the lower bound of the range
9179 from the index expression, this comparison allows us to simultaneously
9180 check that the original index expression value is both greater than
9181 or equal to the minimum value of the range and less than or equal to
9182 the maximum value of the range. */
9184 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
9187 /* If index is in range, it must fit in Pmode.
9188 Convert to Pmode so we can index with it. */
9190 index
= convert_to_mode (Pmode
, index
, 1);
9192 /* Don't let a MEM slip through, because then INDEX that comes
9193 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9194 and break_out_memory_refs will go to work on it and mess it up. */
9195 #ifdef PIC_CASE_VECTOR_ADDRESS
9196 if (flag_pic
&& !REG_P (index
))
9197 index
= copy_to_mode_reg (Pmode
, index
);
9200 /* If flag_force_addr were to affect this address
9201 it could interfere with the tricky assumptions made
9202 about addresses that contain label-refs,
9203 which may be valid only very near the tablejump itself. */
9204 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9205 GET_MODE_SIZE, because this indicates how large insns are. The other
9206 uses should all be Pmode, because they are addresses. This code
9207 could fail if addresses and insns are not the same size. */
9208 index
= gen_rtx_PLUS (Pmode
,
9209 gen_rtx_MULT (Pmode
, index
,
9210 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
9211 gen_rtx_LABEL_REF (Pmode
, table_label
));
9212 #ifdef PIC_CASE_VECTOR_ADDRESS
9214 index
= PIC_CASE_VECTOR_ADDRESS (index
);
9217 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
9218 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
9219 vector
= gen_const_mem (CASE_VECTOR_MODE
, index
);
9220 convert_move (temp
, vector
, 0);
9222 emit_jump_insn (gen_tablejump (temp
, table_label
));
9224 /* If we are generating PIC code or if the table is PC-relative, the
9225 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9226 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
9231 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
9232 rtx table_label
, rtx default_label
)
9236 if (! HAVE_tablejump
)
9239 index_expr
= fold_build2 (MINUS_EXPR
, index_type
,
9240 convert (index_type
, index_expr
),
9241 convert (index_type
, minval
));
9242 index
= expand_normal (index_expr
);
9243 do_pending_stack_adjust ();
9245 do_tablejump (index
, TYPE_MODE (index_type
),
9246 convert_modes (TYPE_MODE (index_type
),
9247 TYPE_MODE (TREE_TYPE (range
)),
9248 expand_normal (range
),
9249 TYPE_UNSIGNED (TREE_TYPE (range
))),
9250 table_label
, default_label
);
9254 /* Nonzero if the mode is a valid vector mode for this architecture.
9255 This returns nonzero even if there is no hardware support for the
9256 vector mode, but we can emulate with narrower modes. */
9259 vector_mode_valid_p (enum machine_mode mode
)
9261 enum mode_class
class = GET_MODE_CLASS (mode
);
9262 enum machine_mode innermode
;
9264 /* Doh! What's going on? */
9265 if (class != MODE_VECTOR_INT
9266 && class != MODE_VECTOR_FLOAT
)
9269 /* Hardware support. Woo hoo! */
9270 if (targetm
.vector_mode_supported_p (mode
))
9273 innermode
= GET_MODE_INNER (mode
);
9275 /* We should probably return 1 if requesting V4DI and we have no DI,
9276 but we have V2DI, but this is probably very unlikely. */
9278 /* If we have support for the inner mode, we can safely emulate it.
9279 We may not have V2DI, but me can emulate with a pair of DIs. */
9280 return targetm
.scalar_mode_supported_p (innermode
);
9283 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9285 const_vector_from_tree (tree exp
)
9290 enum machine_mode inner
, mode
;
9292 mode
= TYPE_MODE (TREE_TYPE (exp
));
9294 if (initializer_zerop (exp
))
9295 return CONST0_RTX (mode
);
9297 units
= GET_MODE_NUNITS (mode
);
9298 inner
= GET_MODE_INNER (mode
);
9300 v
= rtvec_alloc (units
);
9302 link
= TREE_VECTOR_CST_ELTS (exp
);
9303 for (i
= 0; link
; link
= TREE_CHAIN (link
), ++i
)
9305 elt
= TREE_VALUE (link
);
9307 if (TREE_CODE (elt
) == REAL_CST
)
9308 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
9311 RTVEC_ELT (v
, i
) = immed_double_const (TREE_INT_CST_LOW (elt
),
9312 TREE_INT_CST_HIGH (elt
),
9316 /* Initialize remaining elements to 0. */
9317 for (; i
< units
; ++i
)
9318 RTVEC_ELT (v
, i
) = CONST0_RTX (inner
);
9320 return gen_rtx_CONST_VECTOR (mode
, v
);
9322 #include "gt-expr.h"