1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation,
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
25 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
45 #include "typeclass.h"
48 #include "langhooks.h"
51 #include "tree-iterator.h"
52 #include "tree-pass.h"
53 #include "tree-flow.h"
57 /* Decide whether a function's arguments should be processed
58 from first to last or from last to first.
60 They should if the stack and args grow in opposite directions, but
61 only if we have push insns. */
65 #ifndef PUSH_ARGS_REVERSED
66 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
67 #define PUSH_ARGS_REVERSED /* If it's last to first. */
73 #ifndef STACK_PUSH_CODE
74 #ifdef STACK_GROWS_DOWNWARD
75 #define STACK_PUSH_CODE PRE_DEC
77 #define STACK_PUSH_CODE PRE_INC
82 /* If this is nonzero, we do not bother generating VOLATILE
83 around volatile memory references, and we are willing to
84 output indirect addresses. If cse is to follow, we reject
85 indirect addresses so a useful potential cse is generated;
86 if it is used only once, instruction combination will produce
87 the same indirect address eventually. */
90 /* This structure is used by move_by_pieces to describe the move to
101 int explicit_inc_from
;
102 unsigned HOST_WIDE_INT len
;
103 HOST_WIDE_INT offset
;
107 /* This structure is used by store_by_pieces to describe the clear to
110 struct store_by_pieces
116 unsigned HOST_WIDE_INT len
;
117 HOST_WIDE_INT offset
;
118 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
);
123 static unsigned HOST_WIDE_INT
move_by_pieces_ninsns (unsigned HOST_WIDE_INT
,
126 static void move_by_pieces_1 (rtx (*) (rtx
, ...), enum machine_mode
,
127 struct move_by_pieces
*);
128 static bool block_move_libcall_safe_for_call_parm (void);
129 static bool emit_block_move_via_movmem (rtx
, rtx
, rtx
, unsigned);
130 static rtx
emit_block_move_via_libcall (rtx
, rtx
, rtx
, bool);
131 static tree
emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
133 static rtx
clear_by_pieces_1 (void *, HOST_WIDE_INT
, enum machine_mode
);
134 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces
*, unsigned int);
136 static void store_by_pieces_2 (rtx (*) (rtx
, ...), enum machine_mode
,
137 struct store_by_pieces
*);
138 static rtx
clear_storage_via_libcall (rtx
, rtx
, bool);
139 static tree
clear_storage_libcall_fn (int);
140 static rtx
compress_float_constant (rtx
, rtx
);
141 static rtx
get_subtarget (rtx
);
142 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
143 HOST_WIDE_INT
, enum machine_mode
,
144 tree
, tree
, int, int);
145 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
);
146 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
, enum machine_mode
,
149 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (tree
, tree
);
151 static int is_aligning_offset (tree
, tree
);
152 static void expand_operands (tree
, tree
, rtx
, rtx
*, rtx
*,
153 enum expand_modifier
);
154 static rtx
reduce_to_bit_field_precision (rtx
, rtx
, tree
);
155 static rtx
do_store_flag (tree
, rtx
, enum machine_mode
, int);
157 static void emit_single_push_insn (enum machine_mode
, rtx
, tree
);
159 static void do_tablejump (rtx
, enum machine_mode
, rtx
, rtx
, rtx
);
160 static rtx
const_vector_from_tree (tree
);
161 static void write_complex_part (rtx
, rtx
, bool);
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load
[NUM_MACHINE_MODES
];
168 static char direct_store
[NUM_MACHINE_MODES
];
170 /* Record for each mode whether we can float-extend from memory. */
172 static bool float_extend_from_mem
[NUM_MACHINE_MODES
][NUM_MACHINE_MODES
];
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO)
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO)
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193 #ifndef STORE_BY_PIECES_P
194 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) MOVE_RATIO)
199 /* This array records the insn_code of insns to perform block moves. */
200 enum insn_code movmem_optab
[NUM_MACHINE_MODES
];
202 /* This array records the insn_code of insns to perform block sets. */
203 enum insn_code setmem_optab
[NUM_MACHINE_MODES
];
205 /* These arrays record the insn_code of three different kinds of insns
206 to perform block compares. */
207 enum insn_code cmpstr_optab
[NUM_MACHINE_MODES
];
208 enum insn_code cmpstrn_optab
[NUM_MACHINE_MODES
];
209 enum insn_code cmpmem_optab
[NUM_MACHINE_MODES
];
211 /* Synchronization primitives. */
212 enum insn_code sync_add_optab
[NUM_MACHINE_MODES
];
213 enum insn_code sync_sub_optab
[NUM_MACHINE_MODES
];
214 enum insn_code sync_ior_optab
[NUM_MACHINE_MODES
];
215 enum insn_code sync_and_optab
[NUM_MACHINE_MODES
];
216 enum insn_code sync_xor_optab
[NUM_MACHINE_MODES
];
217 enum insn_code sync_nand_optab
[NUM_MACHINE_MODES
];
218 enum insn_code sync_old_add_optab
[NUM_MACHINE_MODES
];
219 enum insn_code sync_old_sub_optab
[NUM_MACHINE_MODES
];
220 enum insn_code sync_old_ior_optab
[NUM_MACHINE_MODES
];
221 enum insn_code sync_old_and_optab
[NUM_MACHINE_MODES
];
222 enum insn_code sync_old_xor_optab
[NUM_MACHINE_MODES
];
223 enum insn_code sync_old_nand_optab
[NUM_MACHINE_MODES
];
224 enum insn_code sync_new_add_optab
[NUM_MACHINE_MODES
];
225 enum insn_code sync_new_sub_optab
[NUM_MACHINE_MODES
];
226 enum insn_code sync_new_ior_optab
[NUM_MACHINE_MODES
];
227 enum insn_code sync_new_and_optab
[NUM_MACHINE_MODES
];
228 enum insn_code sync_new_xor_optab
[NUM_MACHINE_MODES
];
229 enum insn_code sync_new_nand_optab
[NUM_MACHINE_MODES
];
230 enum insn_code sync_compare_and_swap
[NUM_MACHINE_MODES
];
231 enum insn_code sync_compare_and_swap_cc
[NUM_MACHINE_MODES
];
232 enum insn_code sync_lock_test_and_set
[NUM_MACHINE_MODES
];
233 enum insn_code sync_lock_release
[NUM_MACHINE_MODES
];
235 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
237 #ifndef SLOW_UNALIGNED_ACCESS
238 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
241 /* This is run once per compilation to set up which modes can be used
242 directly in memory and to initialize the block move optab. */
245 init_expr_once (void)
248 enum machine_mode mode
;
253 /* Try indexing by frame ptr and try by stack ptr.
254 It is known that on the Convex the stack ptr isn't a valid index.
255 With luck, one or the other is valid on any machine. */
256 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
257 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
259 /* A scratch register we can modify in-place below to avoid
260 useless RTL allocations. */
261 reg
= gen_rtx_REG (VOIDmode
, -1);
263 insn
= rtx_alloc (INSN
);
264 pat
= gen_rtx_SET (0, NULL_RTX
, NULL_RTX
);
265 PATTERN (insn
) = pat
;
267 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
268 mode
= (enum machine_mode
) ((int) mode
+ 1))
272 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
273 PUT_MODE (mem
, mode
);
274 PUT_MODE (mem1
, mode
);
275 PUT_MODE (reg
, mode
);
277 /* See if there is some register that can be used in this mode and
278 directly loaded or stored from memory. */
280 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
281 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
282 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
285 if (! HARD_REGNO_MODE_OK (regno
, mode
))
291 SET_DEST (pat
) = reg
;
292 if (recog (pat
, insn
, &num_clobbers
) >= 0)
293 direct_load
[(int) mode
] = 1;
295 SET_SRC (pat
) = mem1
;
296 SET_DEST (pat
) = reg
;
297 if (recog (pat
, insn
, &num_clobbers
) >= 0)
298 direct_load
[(int) mode
] = 1;
301 SET_DEST (pat
) = mem
;
302 if (recog (pat
, insn
, &num_clobbers
) >= 0)
303 direct_store
[(int) mode
] = 1;
306 SET_DEST (pat
) = mem1
;
307 if (recog (pat
, insn
, &num_clobbers
) >= 0)
308 direct_store
[(int) mode
] = 1;
312 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
314 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
315 mode
= GET_MODE_WIDER_MODE (mode
))
317 enum machine_mode srcmode
;
318 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
319 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
323 ic
= can_extend_p (mode
, srcmode
, 0);
324 if (ic
== CODE_FOR_nothing
)
327 PUT_MODE (mem
, srcmode
);
329 if ((*insn_data
[ic
].operand
[1].predicate
) (mem
, srcmode
))
330 float_extend_from_mem
[mode
][srcmode
] = true;
335 /* This is run at the start of compiling a function. */
340 cfun
->expr
= ggc_alloc_cleared (sizeof (struct expr_status
));
343 /* Copy data from FROM to TO, where the machine modes are not the same.
344 Both modes may be integer, or both may be floating.
345 UNSIGNEDP should be nonzero if FROM is an unsigned type.
346 This causes zero-extension instead of sign-extension. */
349 convert_move (rtx to
, rtx from
, int unsignedp
)
351 enum machine_mode to_mode
= GET_MODE (to
);
352 enum machine_mode from_mode
= GET_MODE (from
);
353 int to_real
= SCALAR_FLOAT_MODE_P (to_mode
);
354 int from_real
= SCALAR_FLOAT_MODE_P (from_mode
);
358 /* rtx code for making an equivalent value. */
359 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
360 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
363 gcc_assert (to_real
== from_real
);
365 /* If the source and destination are already the same, then there's
370 /* If FROM is a SUBREG that indicates that we have already done at least
371 the required extension, strip it. We don't handle such SUBREGs as
374 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
375 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
376 >= GET_MODE_SIZE (to_mode
))
377 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
378 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
380 gcc_assert (GET_CODE (to
) != SUBREG
|| !SUBREG_PROMOTED_VAR_P (to
));
382 if (to_mode
== from_mode
383 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
385 emit_move_insn (to
, from
);
389 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
391 gcc_assert (GET_MODE_BITSIZE (from_mode
) == GET_MODE_BITSIZE (to_mode
));
393 if (VECTOR_MODE_P (to_mode
))
394 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
396 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
398 emit_move_insn (to
, from
);
402 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
404 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
405 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
414 gcc_assert ((GET_MODE_PRECISION (from_mode
)
415 != GET_MODE_PRECISION (to_mode
))
416 || (DECIMAL_FLOAT_MODE_P (from_mode
)
417 != DECIMAL_FLOAT_MODE_P (to_mode
)));
419 if (GET_MODE_PRECISION (from_mode
) == GET_MODE_PRECISION (to_mode
))
420 /* Conversion between decimal float and binary float, same size. */
421 tab
= DECIMAL_FLOAT_MODE_P (from_mode
) ? trunc_optab
: sext_optab
;
422 else if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
427 /* Try converting directly if the insn is supported. */
429 code
= tab
->handlers
[to_mode
][from_mode
].insn_code
;
430 if (code
!= CODE_FOR_nothing
)
432 emit_unop_insn (code
, to
, from
,
433 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
437 /* Otherwise use a libcall. */
438 libcall
= tab
->handlers
[to_mode
][from_mode
].libfunc
;
440 /* Is this conversion implemented yet? */
441 gcc_assert (libcall
);
444 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
446 insns
= get_insns ();
448 emit_libcall_block (insns
, to
, value
,
449 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
451 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
455 /* Handle pointer conversion. */ /* SPEE 900220. */
456 /* Targets are expected to provide conversion insns between PxImode and
457 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
458 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
460 enum machine_mode full_mode
461 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode
), MODE_INT
);
463 gcc_assert (trunc_optab
->handlers
[to_mode
][full_mode
].insn_code
464 != CODE_FOR_nothing
);
466 if (full_mode
!= from_mode
)
467 from
= convert_to_mode (full_mode
, from
, unsignedp
);
468 emit_unop_insn (trunc_optab
->handlers
[to_mode
][full_mode
].insn_code
,
472 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
475 enum machine_mode full_mode
476 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode
), MODE_INT
);
478 gcc_assert (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
479 != CODE_FOR_nothing
);
481 if (to_mode
== full_mode
)
483 emit_unop_insn (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
,
488 new_from
= gen_reg_rtx (full_mode
);
489 emit_unop_insn (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
,
490 new_from
, from
, UNKNOWN
);
492 /* else proceed to integer conversions below. */
493 from_mode
= full_mode
;
497 /* Now both modes are integers. */
499 /* Handle expanding beyond a word. */
500 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
501 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
508 enum machine_mode lowpart_mode
;
509 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
511 /* Try converting directly if the insn is supported. */
512 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
515 /* If FROM is a SUBREG, put it into a register. Do this
516 so that we always generate the same set of insns for
517 better cse'ing; if an intermediate assignment occurred,
518 we won't be doing the operation directly on the SUBREG. */
519 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
520 from
= force_reg (from_mode
, from
);
521 emit_unop_insn (code
, to
, from
, equiv_code
);
524 /* Next, try converting via full word. */
525 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
526 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
527 != CODE_FOR_nothing
))
531 if (reg_overlap_mentioned_p (to
, from
))
532 from
= force_reg (from_mode
, from
);
533 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
535 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
536 emit_unop_insn (code
, to
,
537 gen_lowpart (word_mode
, to
), equiv_code
);
541 /* No special multiword conversion insn; do it by hand. */
544 /* Since we will turn this into a no conflict block, we must ensure
545 that the source does not overlap the target. */
547 if (reg_overlap_mentioned_p (to
, from
))
548 from
= force_reg (from_mode
, from
);
550 /* Get a copy of FROM widened to a word, if necessary. */
551 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
552 lowpart_mode
= word_mode
;
554 lowpart_mode
= from_mode
;
556 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
558 lowpart
= gen_lowpart (lowpart_mode
, to
);
559 emit_move_insn (lowpart
, lowfrom
);
561 /* Compute the value to put in each remaining word. */
563 fill_value
= const0_rtx
;
568 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
569 && STORE_FLAG_VALUE
== -1)
571 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
573 fill_value
= gen_reg_rtx (word_mode
);
574 emit_insn (gen_slt (fill_value
));
580 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
581 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
583 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
587 /* Fill the remaining words. */
588 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
590 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
591 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
593 gcc_assert (subword
);
595 if (fill_value
!= subword
)
596 emit_move_insn (subword
, fill_value
);
599 insns
= get_insns ();
602 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
603 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
607 /* Truncating multi-word to a word or less. */
608 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
609 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
612 && ! MEM_VOLATILE_P (from
)
613 && direct_load
[(int) to_mode
]
614 && ! mode_dependent_address_p (XEXP (from
, 0)))
616 || GET_CODE (from
) == SUBREG
))
617 from
= force_reg (from_mode
, from
);
618 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
622 /* Now follow all the conversions between integers
623 no more than a word long. */
625 /* For truncation, usually we can just refer to FROM in a narrower mode. */
626 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
627 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
628 GET_MODE_BITSIZE (from_mode
)))
631 && ! MEM_VOLATILE_P (from
)
632 && direct_load
[(int) to_mode
]
633 && ! mode_dependent_address_p (XEXP (from
, 0)))
635 || GET_CODE (from
) == SUBREG
))
636 from
= force_reg (from_mode
, from
);
637 if (REG_P (from
) && REGNO (from
) < FIRST_PSEUDO_REGISTER
638 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
639 from
= copy_to_reg (from
);
640 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
644 /* Handle extension. */
645 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
647 /* Convert directly if that works. */
648 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
651 emit_unop_insn (code
, to
, from
, equiv_code
);
656 enum machine_mode intermediate
;
660 /* Search for a mode to convert via. */
661 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
662 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
663 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
665 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
666 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
667 GET_MODE_BITSIZE (intermediate
))))
668 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
669 != CODE_FOR_nothing
))
671 convert_move (to
, convert_to_mode (intermediate
, from
,
672 unsignedp
), unsignedp
);
676 /* No suitable intermediate mode.
677 Generate what we need with shifts. */
678 shift_amount
= build_int_cst (NULL_TREE
,
679 GET_MODE_BITSIZE (to_mode
)
680 - GET_MODE_BITSIZE (from_mode
));
681 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
682 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
684 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
687 emit_move_insn (to
, tmp
);
692 /* Support special truncate insns for certain modes. */
693 if (trunc_optab
->handlers
[to_mode
][from_mode
].insn_code
!= CODE_FOR_nothing
)
695 emit_unop_insn (trunc_optab
->handlers
[to_mode
][from_mode
].insn_code
,
700 /* Handle truncation of volatile memrefs, and so on;
701 the things that couldn't be truncated directly,
702 and for which there was no special instruction.
704 ??? Code above formerly short-circuited this, for most integer
705 mode pairs, with a force_reg in from_mode followed by a recursive
706 call to this routine. Appears always to have been wrong. */
707 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
709 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
710 emit_move_insn (to
, temp
);
714 /* Mode combination is not recognized. */
718 /* Return an rtx for a value that would result
719 from converting X to mode MODE.
720 Both X and MODE may be floating, or both integer.
721 UNSIGNEDP is nonzero if X is an unsigned value.
722 This can be done by referring to a part of X in place
723 or by copying to a new temporary with conversion. */
726 convert_to_mode (enum machine_mode mode
, rtx x
, int unsignedp
)
728 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
731 /* Return an rtx for a value that would result
732 from converting X from mode OLDMODE to mode MODE.
733 Both modes may be floating, or both integer.
734 UNSIGNEDP is nonzero if X is an unsigned value.
736 This can be done by referring to a part of X in place
737 or by copying to a new temporary with conversion.
739 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
742 convert_modes (enum machine_mode mode
, enum machine_mode oldmode
, rtx x
, int unsignedp
)
746 /* If FROM is a SUBREG that indicates that we have already done at least
747 the required extension, strip it. */
749 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
750 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
751 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
752 x
= gen_lowpart (mode
, x
);
754 if (GET_MODE (x
) != VOIDmode
)
755 oldmode
= GET_MODE (x
);
760 /* There is one case that we must handle specially: If we are converting
761 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
762 we are to interpret the constant as unsigned, gen_lowpart will do
763 the wrong if the constant appears negative. What we want to do is
764 make the high-order word of the constant zero, not all ones. */
766 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
767 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
768 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
770 HOST_WIDE_INT val
= INTVAL (x
);
772 if (oldmode
!= VOIDmode
773 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
775 int width
= GET_MODE_BITSIZE (oldmode
);
777 /* We need to zero extend VAL. */
778 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
781 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
784 /* We can do this with a gen_lowpart if both desired and current modes
785 are integer, and this is either a constant integer, a register, or a
786 non-volatile MEM. Except for the constant case where MODE is no
787 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
789 if ((GET_CODE (x
) == CONST_INT
790 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
791 || (GET_MODE_CLASS (mode
) == MODE_INT
792 && GET_MODE_CLASS (oldmode
) == MODE_INT
793 && (GET_CODE (x
) == CONST_DOUBLE
794 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
795 && ((MEM_P (x
) && ! MEM_VOLATILE_P (x
)
796 && direct_load
[(int) mode
])
798 && (! HARD_REGISTER_P (x
)
799 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
800 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
801 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
803 /* ?? If we don't know OLDMODE, we have to assume here that
804 X does not need sign- or zero-extension. This may not be
805 the case, but it's the best we can do. */
806 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
807 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
809 HOST_WIDE_INT val
= INTVAL (x
);
810 int width
= GET_MODE_BITSIZE (oldmode
);
812 /* We must sign or zero-extend in this case. Start by
813 zero-extending, then sign extend if we need to. */
814 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
816 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
817 val
|= (HOST_WIDE_INT
) (-1) << width
;
819 return gen_int_mode (val
, mode
);
822 return gen_lowpart (mode
, x
);
825 /* Converting from integer constant into mode is always equivalent to an
827 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
829 gcc_assert (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (oldmode
));
830 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
833 temp
= gen_reg_rtx (mode
);
834 convert_move (temp
, x
, unsignedp
);
838 /* STORE_MAX_PIECES is the number of bytes at a time that we can
839 store efficiently. Due to internal GCC limitations, this is
840 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
841 for an immediate constant. */
843 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
845 /* Determine whether the LEN bytes can be moved by using several move
846 instructions. Return nonzero if a call to move_by_pieces should
850 can_move_by_pieces (unsigned HOST_WIDE_INT len
,
851 unsigned int align ATTRIBUTE_UNUSED
)
853 return MOVE_BY_PIECES_P (len
, align
);
856 /* Generate several move instructions to copy LEN bytes from block FROM to
857 block TO. (These are MEM rtx's with BLKmode).
859 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
860 used to push FROM to the stack.
862 ALIGN is maximum stack alignment we can assume.
864 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
865 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
869 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
870 unsigned int align
, int endp
)
872 struct move_by_pieces data
;
873 rtx to_addr
, from_addr
= XEXP (from
, 0);
874 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
875 enum machine_mode mode
= VOIDmode
, tmode
;
876 enum insn_code icode
;
878 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
881 data
.from_addr
= from_addr
;
884 to_addr
= XEXP (to
, 0);
887 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
888 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
890 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
897 #ifdef STACK_GROWS_DOWNWARD
903 data
.to_addr
= to_addr
;
906 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
907 || GET_CODE (from_addr
) == POST_INC
908 || GET_CODE (from_addr
) == POST_DEC
);
910 data
.explicit_inc_from
= 0;
911 data
.explicit_inc_to
= 0;
912 if (data
.reverse
) data
.offset
= len
;
915 /* If copying requires more than two move insns,
916 copy addresses to registers (to make displacements shorter)
917 and use post-increment if available. */
918 if (!(data
.autinc_from
&& data
.autinc_to
)
919 && move_by_pieces_ninsns (len
, align
, max_size
) > 2)
921 /* Find the mode of the largest move... */
922 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
923 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
924 if (GET_MODE_SIZE (tmode
) < max_size
)
927 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
929 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
930 data
.autinc_from
= 1;
931 data
.explicit_inc_from
= -1;
933 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
935 data
.from_addr
= copy_addr_to_reg (from_addr
);
936 data
.autinc_from
= 1;
937 data
.explicit_inc_from
= 1;
939 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
940 data
.from_addr
= copy_addr_to_reg (from_addr
);
941 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
943 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
945 data
.explicit_inc_to
= -1;
947 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
949 data
.to_addr
= copy_addr_to_reg (to_addr
);
951 data
.explicit_inc_to
= 1;
953 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
954 data
.to_addr
= copy_addr_to_reg (to_addr
);
957 tmode
= mode_for_size (MOVE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
958 if (align
>= GET_MODE_ALIGNMENT (tmode
))
959 align
= GET_MODE_ALIGNMENT (tmode
);
962 enum machine_mode xmode
;
964 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
966 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
967 if (GET_MODE_SIZE (tmode
) > MOVE_MAX_PIECES
968 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
971 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
974 /* First move what we can in the largest integer mode, then go to
975 successively smaller modes. */
979 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
980 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
981 if (GET_MODE_SIZE (tmode
) < max_size
)
984 if (mode
== VOIDmode
)
987 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
988 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
989 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
991 max_size
= GET_MODE_SIZE (mode
);
994 /* The code above should have handled everything. */
995 gcc_assert (!data
.len
);
1001 gcc_assert (!data
.reverse
);
1006 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
1007 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
1009 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
1012 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
1019 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
1027 /* Return number of insns required to move L bytes by pieces.
1028 ALIGN (in bits) is maximum alignment we can assume. */
1030 static unsigned HOST_WIDE_INT
1031 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
,
1032 unsigned int max_size
)
1034 unsigned HOST_WIDE_INT n_insns
= 0;
1035 enum machine_mode tmode
;
1037 tmode
= mode_for_size (MOVE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
1038 if (align
>= GET_MODE_ALIGNMENT (tmode
))
1039 align
= GET_MODE_ALIGNMENT (tmode
);
1042 enum machine_mode tmode
, xmode
;
1044 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
1046 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
1047 if (GET_MODE_SIZE (tmode
) > MOVE_MAX_PIECES
1048 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
1051 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
1054 while (max_size
> 1)
1056 enum machine_mode mode
= VOIDmode
;
1057 enum insn_code icode
;
1059 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1060 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1061 if (GET_MODE_SIZE (tmode
) < max_size
)
1064 if (mode
== VOIDmode
)
1067 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1068 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1069 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1071 max_size
= GET_MODE_SIZE (mode
);
1078 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1079 with move instructions for mode MODE. GENFUN is the gen_... function
1080 to make a move insn for that mode. DATA has all the other info. */
1083 move_by_pieces_1 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
1084 struct move_by_pieces
*data
)
1086 unsigned int size
= GET_MODE_SIZE (mode
);
1087 rtx to1
= NULL_RTX
, from1
;
1089 while (data
->len
>= size
)
1092 data
->offset
-= size
;
1096 if (data
->autinc_to
)
1097 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1100 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1103 if (data
->autinc_from
)
1104 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1107 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1109 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1110 emit_insn (gen_add2_insn (data
->to_addr
,
1111 GEN_INT (-(HOST_WIDE_INT
)size
)));
1112 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1113 emit_insn (gen_add2_insn (data
->from_addr
,
1114 GEN_INT (-(HOST_WIDE_INT
)size
)));
1117 emit_insn ((*genfun
) (to1
, from1
));
1120 #ifdef PUSH_ROUNDING
1121 emit_single_push_insn (mode
, from1
, NULL
);
1127 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1128 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1129 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1130 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1132 if (! data
->reverse
)
1133 data
->offset
+= size
;
1139 /* Emit code to move a block Y to a block X. This may be done with
1140 string-move instructions, with multiple scalar move instructions,
1141 or with a library call.
1143 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1144 SIZE is an rtx that says how long they are.
1145 ALIGN is the maximum alignment we can assume they have.
1146 METHOD describes what kind of copy this is, and what mechanisms may be used.
1148 Return the address of the new block, if memcpy is called and returns it,
1152 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1160 case BLOCK_OP_NORMAL
:
1161 case BLOCK_OP_TAILCALL
:
1162 may_use_call
= true;
1165 case BLOCK_OP_CALL_PARM
:
1166 may_use_call
= block_move_libcall_safe_for_call_parm ();
1168 /* Make inhibit_defer_pop nonzero around the library call
1169 to force it to pop the arguments right away. */
1173 case BLOCK_OP_NO_LIBCALL
:
1174 may_use_call
= false;
1181 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1183 gcc_assert (MEM_P (x
));
1184 gcc_assert (MEM_P (y
));
1187 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1188 block copy is more efficient for other large modes, e.g. DCmode. */
1189 x
= adjust_address (x
, BLKmode
, 0);
1190 y
= adjust_address (y
, BLKmode
, 0);
1192 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1193 can be incorrect is coming from __builtin_memcpy. */
1194 if (GET_CODE (size
) == CONST_INT
)
1196 if (INTVAL (size
) == 0)
1199 x
= shallow_copy_rtx (x
);
1200 y
= shallow_copy_rtx (y
);
1201 set_mem_size (x
, size
);
1202 set_mem_size (y
, size
);
1205 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1206 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1207 else if (emit_block_move_via_movmem (x
, y
, size
, align
))
1209 else if (may_use_call
)
1210 retval
= emit_block_move_via_libcall (x
, y
, size
,
1211 method
== BLOCK_OP_TAILCALL
);
1213 emit_block_move_via_loop (x
, y
, size
, align
);
1215 if (method
== BLOCK_OP_CALL_PARM
)
1221 /* A subroutine of emit_block_move. Returns true if calling the
1222 block move libcall will not clobber any parameters which may have
1223 already been placed on the stack. */
1226 block_move_libcall_safe_for_call_parm (void)
1228 /* If arguments are pushed on the stack, then they're safe. */
1232 /* If registers go on the stack anyway, any argument is sure to clobber
1233 an outgoing argument. */
1234 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1236 tree fn
= emit_block_move_libcall_fn (false);
1238 if (REG_PARM_STACK_SPACE (fn
) != 0)
1243 /* If any argument goes in memory, then it might clobber an outgoing
1246 CUMULATIVE_ARGS args_so_far
;
1249 fn
= emit_block_move_libcall_fn (false);
1250 INIT_CUMULATIVE_ARGS (args_so_far
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1252 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1253 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1255 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1256 rtx tmp
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
1257 if (!tmp
|| !REG_P (tmp
))
1259 if (targetm
.calls
.arg_partial_bytes (&args_so_far
, mode
, NULL
, 1))
1261 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, NULL_TREE
, 1);
1267 /* A subroutine of emit_block_move. Expand a movmem pattern;
1268 return true if successful. */
1271 emit_block_move_via_movmem (rtx x
, rtx y
, rtx size
, unsigned int align
)
1273 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1274 int save_volatile_ok
= volatile_ok
;
1275 enum machine_mode mode
;
1277 /* Since this is a move insn, we don't care about volatility. */
1280 /* Try the most limited insn first, because there's no point
1281 including more than one in the machine description unless
1282 the more limited one has some advantage. */
1284 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1285 mode
= GET_MODE_WIDER_MODE (mode
))
1287 enum insn_code code
= movmem_optab
[(int) mode
];
1288 insn_operand_predicate_fn pred
;
1290 if (code
!= CODE_FOR_nothing
1291 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1292 here because if SIZE is less than the mode mask, as it is
1293 returned by the macro, it will definitely be less than the
1294 actual mode mask. */
1295 && ((GET_CODE (size
) == CONST_INT
1296 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1297 <= (GET_MODE_MASK (mode
) >> 1)))
1298 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1299 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1300 || (*pred
) (x
, BLKmode
))
1301 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1302 || (*pred
) (y
, BLKmode
))
1303 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1304 || (*pred
) (opalign
, VOIDmode
)))
1307 rtx last
= get_last_insn ();
1310 op2
= convert_to_mode (mode
, size
, 1);
1311 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1312 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1313 op2
= copy_to_mode_reg (mode
, op2
);
1315 /* ??? When called via emit_block_move_for_call, it'd be
1316 nice if there were some way to inform the backend, so
1317 that it doesn't fail the expansion because it thinks
1318 emitting the libcall would be more efficient. */
1320 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1324 volatile_ok
= save_volatile_ok
;
1328 delete_insns_since (last
);
1332 volatile_ok
= save_volatile_ok
;
1336 /* A subroutine of emit_block_move. Expand a call to memcpy.
1337 Return the return value from memcpy, 0 otherwise. */
1340 emit_block_move_via_libcall (rtx dst
, rtx src
, rtx size
, bool tailcall
)
1342 rtx dst_addr
, src_addr
;
1343 tree call_expr
, arg_list
, fn
, src_tree
, dst_tree
, size_tree
;
1344 enum machine_mode size_mode
;
1347 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1348 pseudos. We can then place those new pseudos into a VAR_DECL and
1351 dst_addr
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
1352 src_addr
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
1354 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1355 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1357 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1358 src_tree
= make_tree (ptr_type_node
, src_addr
);
1360 size_mode
= TYPE_MODE (sizetype
);
1362 size
= convert_to_mode (size_mode
, size
, 1);
1363 size
= copy_to_mode_reg (size_mode
, size
);
1365 /* It is incorrect to use the libcall calling conventions to call
1366 memcpy in this context. This could be a user call to memcpy and
1367 the user may wish to examine the return value from memcpy. For
1368 targets where libcalls and normal calls have different conventions
1369 for returning pointers, we could end up generating incorrect code. */
1371 size_tree
= make_tree (sizetype
, size
);
1373 fn
= emit_block_move_libcall_fn (true);
1374 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
1375 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1376 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1378 /* Now we have to build up the CALL_EXPR itself. */
1379 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1380 call_expr
= build3 (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1381 call_expr
, arg_list
, NULL_TREE
);
1382 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
1384 retval
= expand_normal (call_expr
);
1389 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1390 for the function we use for block copies. The first time FOR_CALL
1391 is true, we call assemble_external. */
1393 static GTY(()) tree block_move_fn
;
1396 init_block_move_fn (const char *asmspec
)
1402 fn
= get_identifier ("memcpy");
1403 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
1404 const_ptr_type_node
, sizetype
,
1407 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
1408 DECL_EXTERNAL (fn
) = 1;
1409 TREE_PUBLIC (fn
) = 1;
1410 DECL_ARTIFICIAL (fn
) = 1;
1411 TREE_NOTHROW (fn
) = 1;
1412 DECL_VISIBILITY (fn
) = VISIBILITY_DEFAULT
;
1413 DECL_VISIBILITY_SPECIFIED (fn
) = 1;
1419 set_user_assembler_name (block_move_fn
, asmspec
);
1423 emit_block_move_libcall_fn (int for_call
)
1425 static bool emitted_extern
;
1428 init_block_move_fn (NULL
);
1430 if (for_call
&& !emitted_extern
)
1432 emitted_extern
= true;
1433 make_decl_rtl (block_move_fn
);
1434 assemble_external (block_move_fn
);
1437 return block_move_fn
;
1440 /* A subroutine of emit_block_move. Copy the data via an explicit
1441 loop. This is used only when libcalls are forbidden. */
1442 /* ??? It'd be nice to copy in hunks larger than QImode. */
1445 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1446 unsigned int align ATTRIBUTE_UNUSED
)
1448 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
1449 enum machine_mode iter_mode
;
1451 iter_mode
= GET_MODE (size
);
1452 if (iter_mode
== VOIDmode
)
1453 iter_mode
= word_mode
;
1455 top_label
= gen_label_rtx ();
1456 cmp_label
= gen_label_rtx ();
1457 iter
= gen_reg_rtx (iter_mode
);
1459 emit_move_insn (iter
, const0_rtx
);
1461 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1462 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1463 do_pending_stack_adjust ();
1465 emit_jump (cmp_label
);
1466 emit_label (top_label
);
1468 tmp
= convert_modes (Pmode
, iter_mode
, iter
, true);
1469 x_addr
= gen_rtx_PLUS (Pmode
, x_addr
, tmp
);
1470 y_addr
= gen_rtx_PLUS (Pmode
, y_addr
, tmp
);
1471 x
= change_address (x
, QImode
, x_addr
);
1472 y
= change_address (y
, QImode
, y_addr
);
1474 emit_move_insn (x
, y
);
1476 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1477 true, OPTAB_LIB_WIDEN
);
1479 emit_move_insn (iter
, tmp
);
1481 emit_label (cmp_label
);
1483 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1487 /* Copy all or part of a value X into registers starting at REGNO.
1488 The number of registers to be filled is NREGS. */
1491 move_block_to_reg (int regno
, rtx x
, int nregs
, enum machine_mode mode
)
1494 #ifdef HAVE_load_multiple
1502 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1503 x
= validize_mem (force_const_mem (mode
, x
));
1505 /* See if the machine can do this with a load multiple insn. */
1506 #ifdef HAVE_load_multiple
1507 if (HAVE_load_multiple
)
1509 last
= get_last_insn ();
1510 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1518 delete_insns_since (last
);
1522 for (i
= 0; i
< nregs
; i
++)
1523 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1524 operand_subword_force (x
, i
, mode
));
1527 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1528 The number of registers to be filled is NREGS. */
1531 move_block_from_reg (int regno
, rtx x
, int nregs
)
1538 /* See if the machine can do this with a store multiple insn. */
1539 #ifdef HAVE_store_multiple
1540 if (HAVE_store_multiple
)
1542 rtx last
= get_last_insn ();
1543 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1551 delete_insns_since (last
);
1555 for (i
= 0; i
< nregs
; i
++)
1557 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1561 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1565 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1566 ORIG, where ORIG is a non-consecutive group of registers represented by
1567 a PARALLEL. The clone is identical to the original except in that the
1568 original set of registers is replaced by a new set of pseudo registers.
1569 The new set has the same modes as the original set. */
1572 gen_group_rtx (rtx orig
)
1577 gcc_assert (GET_CODE (orig
) == PARALLEL
);
1579 length
= XVECLEN (orig
, 0);
1580 tmps
= alloca (sizeof (rtx
) * length
);
1582 /* Skip a NULL entry in first slot. */
1583 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
1588 for (; i
< length
; i
++)
1590 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
1591 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
1593 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
1596 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
1599 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1600 except that values are placed in TMPS[i], and must later be moved
1601 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1604 emit_group_load_1 (rtx
*tmps
, rtx dst
, rtx orig_src
, tree type
, int ssize
)
1608 enum machine_mode m
= GET_MODE (orig_src
);
1610 gcc_assert (GET_CODE (dst
) == PARALLEL
);
1613 && !SCALAR_INT_MODE_P (m
)
1614 && !MEM_P (orig_src
)
1615 && GET_CODE (orig_src
) != CONCAT
)
1617 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_src
));
1618 if (imode
== BLKmode
)
1619 src
= assign_stack_temp (GET_MODE (orig_src
), ssize
, 0);
1621 src
= gen_reg_rtx (imode
);
1622 if (imode
!= BLKmode
)
1623 src
= gen_lowpart (GET_MODE (orig_src
), src
);
1624 emit_move_insn (src
, orig_src
);
1625 /* ...and back again. */
1626 if (imode
!= BLKmode
)
1627 src
= gen_lowpart (imode
, src
);
1628 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1632 /* Check for a NULL entry, used to indicate that the parameter goes
1633 both on the stack and in registers. */
1634 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1639 /* Process the pieces. */
1640 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1642 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1643 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1644 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1647 /* Handle trailing fragments that run over the size of the struct. */
1648 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1650 /* Arrange to shift the fragment to where it belongs.
1651 extract_bit_field loads to the lsb of the reg. */
1653 #ifdef BLOCK_REG_PADDING
1654 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
1655 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1660 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1661 bytelen
= ssize
- bytepos
;
1662 gcc_assert (bytelen
> 0);
1665 /* If we won't be loading directly from memory, protect the real source
1666 from strange tricks we might play; but make sure that the source can
1667 be loaded directly into the destination. */
1669 if (!MEM_P (orig_src
)
1670 && (!CONSTANT_P (orig_src
)
1671 || (GET_MODE (orig_src
) != mode
1672 && GET_MODE (orig_src
) != VOIDmode
)))
1674 if (GET_MODE (orig_src
) == VOIDmode
)
1675 src
= gen_reg_rtx (mode
);
1677 src
= gen_reg_rtx (GET_MODE (orig_src
));
1679 emit_move_insn (src
, orig_src
);
1682 /* Optimize the access just a bit. */
1684 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
1685 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
1686 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1687 && bytelen
== GET_MODE_SIZE (mode
))
1689 tmps
[i
] = gen_reg_rtx (mode
);
1690 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
1692 else if (COMPLEX_MODE_P (mode
)
1693 && GET_MODE (src
) == mode
1694 && bytelen
== GET_MODE_SIZE (mode
))
1695 /* Let emit_move_complex do the bulk of the work. */
1697 else if (GET_CODE (src
) == CONCAT
)
1699 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
1700 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
1702 if ((bytepos
== 0 && bytelen
== slen0
)
1703 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
1705 /* The following assumes that the concatenated objects all
1706 have the same size. In this case, a simple calculation
1707 can be used to determine the object and the bit field
1709 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
1710 if (! CONSTANT_P (tmps
[i
])
1711 && (!REG_P (tmps
[i
]) || GET_MODE (tmps
[i
]) != mode
))
1712 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
1713 (bytepos
% slen0
) * BITS_PER_UNIT
,
1714 1, NULL_RTX
, mode
, mode
);
1720 gcc_assert (!bytepos
);
1721 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1722 emit_move_insn (mem
, src
);
1723 tmps
[i
] = extract_bit_field (mem
, bytelen
* BITS_PER_UNIT
,
1724 0, 1, NULL_RTX
, mode
, mode
);
1727 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1728 SIMD register, which is currently broken. While we get GCC
1729 to emit proper RTL for these cases, let's dump to memory. */
1730 else if (VECTOR_MODE_P (GET_MODE (dst
))
1733 int slen
= GET_MODE_SIZE (GET_MODE (src
));
1736 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1737 emit_move_insn (mem
, src
);
1738 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
1740 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
1741 && XVECLEN (dst
, 0) > 1)
1742 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE(dst
), bytepos
);
1743 else if (CONSTANT_P (src
)
1744 || (REG_P (src
) && GET_MODE (src
) == mode
))
1747 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
1748 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
1752 tmps
[i
] = expand_shift (LSHIFT_EXPR
, mode
, tmps
[i
],
1753 build_int_cst (NULL_TREE
, shift
), tmps
[i
], 0);
1757 /* Emit code to move a block SRC of type TYPE to a block DST,
1758 where DST is non-consecutive registers represented by a PARALLEL.
1759 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1763 emit_group_load (rtx dst
, rtx src
, tree type
, int ssize
)
1768 tmps
= alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
1769 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1771 /* Copy the extracted pieces into the proper (probable) hard regs. */
1772 for (i
= 0; i
< XVECLEN (dst
, 0); i
++)
1774 rtx d
= XEXP (XVECEXP (dst
, 0, i
), 0);
1777 emit_move_insn (d
, tmps
[i
]);
1781 /* Similar, but load SRC into new pseudos in a format that looks like
1782 PARALLEL. This can later be fed to emit_group_move to get things
1783 in the right place. */
1786 emit_group_load_into_temps (rtx parallel
, rtx src
, tree type
, int ssize
)
1791 vec
= rtvec_alloc (XVECLEN (parallel
, 0));
1792 emit_group_load_1 (&RTVEC_ELT (vec
, 0), parallel
, src
, type
, ssize
);
1794 /* Convert the vector to look just like the original PARALLEL, except
1795 with the computed values. */
1796 for (i
= 0; i
< XVECLEN (parallel
, 0); i
++)
1798 rtx e
= XVECEXP (parallel
, 0, i
);
1799 rtx d
= XEXP (e
, 0);
1803 d
= force_reg (GET_MODE (d
), RTVEC_ELT (vec
, i
));
1804 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), d
, XEXP (e
, 1));
1806 RTVEC_ELT (vec
, i
) = e
;
1809 return gen_rtx_PARALLEL (GET_MODE (parallel
), vec
);
1812 /* Emit code to move a block SRC to block DST, where SRC and DST are
1813 non-consecutive groups of registers, each represented by a PARALLEL. */
1816 emit_group_move (rtx dst
, rtx src
)
1820 gcc_assert (GET_CODE (src
) == PARALLEL
1821 && GET_CODE (dst
) == PARALLEL
1822 && XVECLEN (src
, 0) == XVECLEN (dst
, 0));
1824 /* Skip first entry if NULL. */
1825 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
1826 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
1827 XEXP (XVECEXP (src
, 0, i
), 0));
1830 /* Move a group of registers represented by a PARALLEL into pseudos. */
1833 emit_group_move_into_temps (rtx src
)
1835 rtvec vec
= rtvec_alloc (XVECLEN (src
, 0));
1838 for (i
= 0; i
< XVECLEN (src
, 0); i
++)
1840 rtx e
= XVECEXP (src
, 0, i
);
1841 rtx d
= XEXP (e
, 0);
1844 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), copy_to_reg (d
), XEXP (e
, 1));
1845 RTVEC_ELT (vec
, i
) = e
;
1848 return gen_rtx_PARALLEL (GET_MODE (src
), vec
);
1851 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1852 where SRC is non-consecutive registers represented by a PARALLEL.
1853 SSIZE represents the total size of block ORIG_DST, or -1 if not
1857 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1861 enum machine_mode m
= GET_MODE (orig_dst
);
1863 gcc_assert (GET_CODE (src
) == PARALLEL
);
1865 if (!SCALAR_INT_MODE_P (m
)
1866 && !MEM_P (orig_dst
) && GET_CODE (orig_dst
) != CONCAT
)
1868 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_dst
));
1869 if (imode
== BLKmode
)
1870 dst
= assign_stack_temp (GET_MODE (orig_dst
), ssize
, 0);
1872 dst
= gen_reg_rtx (imode
);
1873 emit_group_store (dst
, src
, type
, ssize
);
1874 if (imode
!= BLKmode
)
1875 dst
= gen_lowpart (GET_MODE (orig_dst
), dst
);
1876 emit_move_insn (orig_dst
, dst
);
1880 /* Check for a NULL entry, used to indicate that the parameter goes
1881 both on the stack and in registers. */
1882 if (XEXP (XVECEXP (src
, 0, 0), 0))
1887 tmps
= alloca (sizeof (rtx
) * XVECLEN (src
, 0));
1889 /* Copy the (probable) hard regs into pseudos. */
1890 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
1892 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
1893 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
1894 emit_move_insn (tmps
[i
], reg
);
1897 /* If we won't be storing directly into memory, protect the real destination
1898 from strange tricks we might play. */
1900 if (GET_CODE (dst
) == PARALLEL
)
1904 /* We can get a PARALLEL dst if there is a conditional expression in
1905 a return statement. In that case, the dst and src are the same,
1906 so no action is necessary. */
1907 if (rtx_equal_p (dst
, src
))
1910 /* It is unclear if we can ever reach here, but we may as well handle
1911 it. Allocate a temporary, and split this into a store/load to/from
1914 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
1915 emit_group_store (temp
, src
, type
, ssize
);
1916 emit_group_load (dst
, temp
, type
, ssize
);
1919 else if (!MEM_P (dst
) && GET_CODE (dst
) != CONCAT
)
1921 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
1922 /* Make life a bit easier for combine. */
1923 emit_move_insn (dst
, CONST0_RTX (GET_MODE (orig_dst
)));
1926 /* Process the pieces. */
1927 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
1929 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
1930 enum machine_mode mode
= GET_MODE (tmps
[i
]);
1931 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1934 /* Handle trailing fragments that run over the size of the struct. */
1935 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1937 /* store_bit_field always takes its value from the lsb.
1938 Move the fragment to the lsb if it's not already there. */
1940 #ifdef BLOCK_REG_PADDING
1941 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
1942 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1948 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1949 tmps
[i
] = expand_shift (RSHIFT_EXPR
, mode
, tmps
[i
],
1950 build_int_cst (NULL_TREE
, shift
),
1953 bytelen
= ssize
- bytepos
;
1956 if (GET_CODE (dst
) == CONCAT
)
1958 if (bytepos
+ bytelen
<= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
1959 dest
= XEXP (dst
, 0);
1960 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
1962 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
1963 dest
= XEXP (dst
, 1);
1967 gcc_assert (bytepos
== 0 && XVECLEN (src
, 0));
1968 dest
= assign_stack_temp (GET_MODE (dest
),
1969 GET_MODE_SIZE (GET_MODE (dest
)), 0);
1970 emit_move_insn (adjust_address (dest
, GET_MODE (tmps
[i
]), bytepos
),
1977 /* Optimize the access just a bit. */
1979 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
1980 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
1981 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1982 && bytelen
== GET_MODE_SIZE (mode
))
1983 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
1985 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
1989 /* Copy from the pseudo into the (probable) hard reg. */
1990 if (orig_dst
!= dst
)
1991 emit_move_insn (orig_dst
, dst
);
1994 /* Generate code to copy a BLKmode object of TYPE out of a
1995 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1996 is null, a stack temporary is created. TGTBLK is returned.
1998 The purpose of this routine is to handle functions that return
1999 BLKmode structures in registers. Some machines (the PA for example)
2000 want to return all small structures in registers regardless of the
2001 structure's alignment. */
2004 copy_blkmode_from_reg (rtx tgtblk
, rtx srcreg
, tree type
)
2006 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2007 rtx src
= NULL
, dst
= NULL
;
2008 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2009 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2013 tgtblk
= assign_temp (build_qualified_type (type
,
2015 | TYPE_QUAL_CONST
)),
2017 preserve_temp_slots (tgtblk
);
2020 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2021 into a new pseudo which is a full word. */
2023 if (GET_MODE (srcreg
) != BLKmode
2024 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2025 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
2027 /* If the structure doesn't take up a whole number of words, see whether
2028 SRCREG is padded on the left or on the right. If it's on the left,
2029 set PADDING_CORRECTION to the number of bits to skip.
2031 In most ABIs, the structure will be returned at the least end of
2032 the register, which translates to right padding on little-endian
2033 targets and left padding on big-endian targets. The opposite
2034 holds if the structure is returned at the most significant
2035 end of the register. */
2036 if (bytes
% UNITS_PER_WORD
!= 0
2037 && (targetm
.calls
.return_in_msb (type
)
2039 : BYTES_BIG_ENDIAN
))
2041 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2043 /* Copy the structure BITSIZE bites at a time.
2045 We could probably emit more efficient code for machines which do not use
2046 strict alignment, but it doesn't seem worth the effort at the current
2048 for (bitpos
= 0, xbitpos
= padding_correction
;
2049 bitpos
< bytes
* BITS_PER_UNIT
;
2050 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2052 /* We need a new source operand each time xbitpos is on a
2053 word boundary and when xbitpos == padding_correction
2054 (the first time through). */
2055 if (xbitpos
% BITS_PER_WORD
== 0
2056 || xbitpos
== padding_correction
)
2057 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2060 /* We need a new destination operand each time bitpos is on
2062 if (bitpos
% BITS_PER_WORD
== 0)
2063 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2065 /* Use xbitpos for the source extraction (right justified) and
2066 xbitpos for the destination store (left justified). */
2067 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2068 extract_bit_field (src
, bitsize
,
2069 xbitpos
% BITS_PER_WORD
, 1,
2070 NULL_RTX
, word_mode
, word_mode
));
2076 /* Add a USE expression for REG to the (possibly empty) list pointed
2077 to by CALL_FUSAGE. REG must denote a hard register. */
2080 use_reg (rtx
*call_fusage
, rtx reg
)
2082 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2085 = gen_rtx_EXPR_LIST (VOIDmode
,
2086 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2089 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2090 starting at REGNO. All of these registers must be hard registers. */
2093 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2097 gcc_assert (regno
+ nregs
<= FIRST_PSEUDO_REGISTER
);
2099 for (i
= 0; i
< nregs
; i
++)
2100 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2103 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2104 PARALLEL REGS. This is for calls that pass values in multiple
2105 non-contiguous locations. The Irix 6 ABI has examples of this. */
2108 use_group_regs (rtx
*call_fusage
, rtx regs
)
2112 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2114 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2116 /* A NULL entry means the parameter goes both on the stack and in
2117 registers. This can also be a MEM for targets that pass values
2118 partially on the stack and partially in registers. */
2119 if (reg
!= 0 && REG_P (reg
))
2120 use_reg (call_fusage
, reg
);
2125 /* Determine whether the LEN bytes generated by CONSTFUN can be
2126 stored to memory using several move instructions. CONSTFUNDATA is
2127 a pointer which will be passed as argument in every CONSTFUN call.
2128 ALIGN is maximum alignment we can assume. Return nonzero if a
2129 call to store_by_pieces should succeed. */
2132 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
2133 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2134 void *constfundata
, unsigned int align
)
2136 unsigned HOST_WIDE_INT l
;
2137 unsigned int max_size
;
2138 HOST_WIDE_INT offset
= 0;
2139 enum machine_mode mode
, tmode
;
2140 enum insn_code icode
;
2147 if (! STORE_BY_PIECES_P (len
, align
))
2150 tmode
= mode_for_size (STORE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
2151 if (align
>= GET_MODE_ALIGNMENT (tmode
))
2152 align
= GET_MODE_ALIGNMENT (tmode
);
2155 enum machine_mode xmode
;
2157 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
2159 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
2160 if (GET_MODE_SIZE (tmode
) > STORE_MAX_PIECES
2161 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
2164 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
2167 /* We would first store what we can in the largest integer mode, then go to
2168 successively smaller modes. */
2171 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2176 max_size
= STORE_MAX_PIECES
+ 1;
2177 while (max_size
> 1)
2179 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2180 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2181 if (GET_MODE_SIZE (tmode
) < max_size
)
2184 if (mode
== VOIDmode
)
2187 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2188 if (icode
!= CODE_FOR_nothing
2189 && align
>= GET_MODE_ALIGNMENT (mode
))
2191 unsigned int size
= GET_MODE_SIZE (mode
);
2198 cst
= (*constfun
) (constfundata
, offset
, mode
);
2199 if (!LEGITIMATE_CONSTANT_P (cst
))
2209 max_size
= GET_MODE_SIZE (mode
);
2212 /* The code above should have handled everything. */
2219 /* Generate several move instructions to store LEN bytes generated by
2220 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2221 pointer which will be passed as argument in every CONSTFUN call.
2222 ALIGN is maximum alignment we can assume.
2223 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2224 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2228 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
2229 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2230 void *constfundata
, unsigned int align
, int endp
)
2232 struct store_by_pieces data
;
2236 gcc_assert (endp
!= 2);
2240 gcc_assert (STORE_BY_PIECES_P (len
, align
));
2241 data
.constfun
= constfun
;
2242 data
.constfundata
= constfundata
;
2245 store_by_pieces_1 (&data
, align
);
2250 gcc_assert (!data
.reverse
);
2255 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2256 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2258 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
2261 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2268 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2276 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2277 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2280 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
2282 struct store_by_pieces data
;
2287 data
.constfun
= clear_by_pieces_1
;
2288 data
.constfundata
= NULL
;
2291 store_by_pieces_1 (&data
, align
);
2294 /* Callback routine for clear_by_pieces.
2295 Return const0_rtx unconditionally. */
2298 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED
,
2299 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2300 enum machine_mode mode ATTRIBUTE_UNUSED
)
2305 /* Subroutine of clear_by_pieces and store_by_pieces.
2306 Generate several move instructions to store LEN bytes of block TO. (A MEM
2307 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2310 store_by_pieces_1 (struct store_by_pieces
*data ATTRIBUTE_UNUSED
,
2311 unsigned int align ATTRIBUTE_UNUSED
)
2313 rtx to_addr
= XEXP (data
->to
, 0);
2314 unsigned int max_size
= STORE_MAX_PIECES
+ 1;
2315 enum machine_mode mode
= VOIDmode
, tmode
;
2316 enum insn_code icode
;
2319 data
->to_addr
= to_addr
;
2321 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2322 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2324 data
->explicit_inc_to
= 0;
2326 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2328 data
->offset
= data
->len
;
2330 /* If storing requires more than two move insns,
2331 copy addresses to registers (to make displacements shorter)
2332 and use post-increment if available. */
2333 if (!data
->autinc_to
2334 && move_by_pieces_ninsns (data
->len
, align
, max_size
) > 2)
2336 /* Determine the main mode we'll be using. */
2337 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2338 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2339 if (GET_MODE_SIZE (tmode
) < max_size
)
2342 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2344 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2345 data
->autinc_to
= 1;
2346 data
->explicit_inc_to
= -1;
2349 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2350 && ! data
->autinc_to
)
2352 data
->to_addr
= copy_addr_to_reg (to_addr
);
2353 data
->autinc_to
= 1;
2354 data
->explicit_inc_to
= 1;
2357 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2358 data
->to_addr
= copy_addr_to_reg (to_addr
);
2361 tmode
= mode_for_size (STORE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
2362 if (align
>= GET_MODE_ALIGNMENT (tmode
))
2363 align
= GET_MODE_ALIGNMENT (tmode
);
2366 enum machine_mode xmode
;
2368 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
2370 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
2371 if (GET_MODE_SIZE (tmode
) > STORE_MAX_PIECES
2372 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
2375 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
2378 /* First store what we can in the largest integer mode, then go to
2379 successively smaller modes. */
2381 while (max_size
> 1)
2383 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2384 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2385 if (GET_MODE_SIZE (tmode
) < max_size
)
2388 if (mode
== VOIDmode
)
2391 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2392 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2393 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2395 max_size
= GET_MODE_SIZE (mode
);
2398 /* The code above should have handled everything. */
2399 gcc_assert (!data
->len
);
2402 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2403 with move instructions for mode MODE. GENFUN is the gen_... function
2404 to make a move insn for that mode. DATA has all the other info. */
2407 store_by_pieces_2 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
2408 struct store_by_pieces
*data
)
2410 unsigned int size
= GET_MODE_SIZE (mode
);
2413 while (data
->len
>= size
)
2416 data
->offset
-= size
;
2418 if (data
->autinc_to
)
2419 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2422 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2424 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2425 emit_insn (gen_add2_insn (data
->to_addr
,
2426 GEN_INT (-(HOST_WIDE_INT
) size
)));
2428 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2429 emit_insn ((*genfun
) (to1
, cst
));
2431 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2432 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2434 if (! data
->reverse
)
2435 data
->offset
+= size
;
2441 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2442 its length in bytes. */
2445 clear_storage (rtx object
, rtx size
, enum block_op_methods method
)
2447 enum machine_mode mode
= GET_MODE (object
);
2450 gcc_assert (method
== BLOCK_OP_NORMAL
|| method
== BLOCK_OP_TAILCALL
);
2452 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2453 just move a zero. Otherwise, do this a piece at a time. */
2455 && GET_CODE (size
) == CONST_INT
2456 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (mode
))
2458 rtx zero
= CONST0_RTX (mode
);
2461 emit_move_insn (object
, zero
);
2465 if (COMPLEX_MODE_P (mode
))
2467 zero
= CONST0_RTX (GET_MODE_INNER (mode
));
2470 write_complex_part (object
, zero
, 0);
2471 write_complex_part (object
, zero
, 1);
2477 if (size
== const0_rtx
)
2480 align
= MEM_ALIGN (object
);
2482 if (GET_CODE (size
) == CONST_INT
2483 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2484 clear_by_pieces (object
, INTVAL (size
), align
);
2485 else if (set_storage_via_setmem (object
, size
, const0_rtx
, align
))
2488 return clear_storage_via_libcall (object
, size
,
2489 method
== BLOCK_OP_TAILCALL
);
2494 /* A subroutine of clear_storage. Expand a call to memset.
2495 Return the return value of memset, 0 otherwise. */
2498 clear_storage_via_libcall (rtx object
, rtx size
, bool tailcall
)
2500 tree call_expr
, arg_list
, fn
, object_tree
, size_tree
;
2501 enum machine_mode size_mode
;
2504 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2505 place those into new pseudos into a VAR_DECL and use them later. */
2507 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2509 size_mode
= TYPE_MODE (sizetype
);
2510 size
= convert_to_mode (size_mode
, size
, 1);
2511 size
= copy_to_mode_reg (size_mode
, size
);
2513 /* It is incorrect to use the libcall calling conventions to call
2514 memset in this context. This could be a user call to memset and
2515 the user may wish to examine the return value from memset. For
2516 targets where libcalls and normal calls have different conventions
2517 for returning pointers, we could end up generating incorrect code. */
2519 object_tree
= make_tree (ptr_type_node
, object
);
2520 size_tree
= make_tree (sizetype
, size
);
2522 fn
= clear_storage_libcall_fn (true);
2523 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
2524 arg_list
= tree_cons (NULL_TREE
, integer_zero_node
, arg_list
);
2525 arg_list
= tree_cons (NULL_TREE
, object_tree
, arg_list
);
2527 /* Now we have to build up the CALL_EXPR itself. */
2528 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2529 call_expr
= build3 (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2530 call_expr
, arg_list
, NULL_TREE
);
2531 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
2533 retval
= expand_normal (call_expr
);
2538 /* A subroutine of clear_storage_via_libcall. Create the tree node
2539 for the function we use for block clears. The first time FOR_CALL
2540 is true, we call assemble_external. */
2542 static GTY(()) tree block_clear_fn
;
2545 init_block_clear_fn (const char *asmspec
)
2547 if (!block_clear_fn
)
2551 fn
= get_identifier ("memset");
2552 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2553 integer_type_node
, sizetype
,
2556 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
2557 DECL_EXTERNAL (fn
) = 1;
2558 TREE_PUBLIC (fn
) = 1;
2559 DECL_ARTIFICIAL (fn
) = 1;
2560 TREE_NOTHROW (fn
) = 1;
2561 DECL_VISIBILITY (fn
) = VISIBILITY_DEFAULT
;
2562 DECL_VISIBILITY_SPECIFIED (fn
) = 1;
2564 block_clear_fn
= fn
;
2568 set_user_assembler_name (block_clear_fn
, asmspec
);
2572 clear_storage_libcall_fn (int for_call
)
2574 static bool emitted_extern
;
2576 if (!block_clear_fn
)
2577 init_block_clear_fn (NULL
);
2579 if (for_call
&& !emitted_extern
)
2581 emitted_extern
= true;
2582 make_decl_rtl (block_clear_fn
);
2583 assemble_external (block_clear_fn
);
2586 return block_clear_fn
;
2589 /* Expand a setmem pattern; return true if successful. */
2592 set_storage_via_setmem (rtx object
, rtx size
, rtx val
, unsigned int align
)
2594 /* Try the most limited insn first, because there's no point
2595 including more than one in the machine description unless
2596 the more limited one has some advantage. */
2598 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2599 enum machine_mode mode
;
2601 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2602 mode
= GET_MODE_WIDER_MODE (mode
))
2604 enum insn_code code
= setmem_optab
[(int) mode
];
2605 insn_operand_predicate_fn pred
;
2607 if (code
!= CODE_FOR_nothing
2608 /* We don't need MODE to be narrower than
2609 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2610 the mode mask, as it is returned by the macro, it will
2611 definitely be less than the actual mode mask. */
2612 && ((GET_CODE (size
) == CONST_INT
2613 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2614 <= (GET_MODE_MASK (mode
) >> 1)))
2615 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2616 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2617 || (*pred
) (object
, BLKmode
))
2618 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
2619 || (*pred
) (opalign
, VOIDmode
)))
2622 enum machine_mode char_mode
;
2623 rtx last
= get_last_insn ();
2626 opsize
= convert_to_mode (mode
, size
, 1);
2627 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2628 if (pred
!= 0 && ! (*pred
) (opsize
, mode
))
2629 opsize
= copy_to_mode_reg (mode
, opsize
);
2632 char_mode
= insn_data
[(int) code
].operand
[2].mode
;
2633 if (char_mode
!= VOIDmode
)
2635 opchar
= convert_to_mode (char_mode
, opchar
, 1);
2636 pred
= insn_data
[(int) code
].operand
[2].predicate
;
2637 if (pred
!= 0 && ! (*pred
) (opchar
, char_mode
))
2638 opchar
= copy_to_mode_reg (char_mode
, opchar
);
2641 pat
= GEN_FCN ((int) code
) (object
, opsize
, opchar
, opalign
);
2648 delete_insns_since (last
);
2656 /* Write to one of the components of the complex value CPLX. Write VAL to
2657 the real part if IMAG_P is false, and the imaginary part if its true. */
2660 write_complex_part (rtx cplx
, rtx val
, bool imag_p
)
2662 enum machine_mode cmode
;
2663 enum machine_mode imode
;
2666 if (GET_CODE (cplx
) == CONCAT
)
2668 emit_move_insn (XEXP (cplx
, imag_p
), val
);
2672 cmode
= GET_MODE (cplx
);
2673 imode
= GET_MODE_INNER (cmode
);
2674 ibitsize
= GET_MODE_BITSIZE (imode
);
2676 /* For MEMs simplify_gen_subreg may generate an invalid new address
2677 because, e.g., the original address is considered mode-dependent
2678 by the target, which restricts simplify_subreg from invoking
2679 adjust_address_nv. Instead of preparing fallback support for an
2680 invalid address, we call adjust_address_nv directly. */
2683 emit_move_insn (adjust_address_nv (cplx
, imode
,
2684 imag_p
? GET_MODE_SIZE (imode
) : 0),
2689 /* If the sub-object is at least word sized, then we know that subregging
2690 will work. This special case is important, since store_bit_field
2691 wants to operate on integer modes, and there's rarely an OImode to
2692 correspond to TCmode. */
2693 if (ibitsize
>= BITS_PER_WORD
2694 /* For hard regs we have exact predicates. Assume we can split
2695 the original object if it spans an even number of hard regs.
2696 This special case is important for SCmode on 64-bit platforms
2697 where the natural size of floating-point regs is 32-bit. */
2699 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
2700 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0))
2702 rtx part
= simplify_gen_subreg (imode
, cplx
, cmode
,
2703 imag_p
? GET_MODE_SIZE (imode
) : 0);
2706 emit_move_insn (part
, val
);
2710 /* simplify_gen_subreg may fail for sub-word MEMs. */
2711 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
2714 store_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0, imode
, val
);
2717 /* Extract one of the components of the complex value CPLX. Extract the
2718 real part if IMAG_P is false, and the imaginary part if it's true. */
2721 read_complex_part (rtx cplx
, bool imag_p
)
2723 enum machine_mode cmode
, imode
;
2726 if (GET_CODE (cplx
) == CONCAT
)
2727 return XEXP (cplx
, imag_p
);
2729 cmode
= GET_MODE (cplx
);
2730 imode
= GET_MODE_INNER (cmode
);
2731 ibitsize
= GET_MODE_BITSIZE (imode
);
2733 /* Special case reads from complex constants that got spilled to memory. */
2734 if (MEM_P (cplx
) && GET_CODE (XEXP (cplx
, 0)) == SYMBOL_REF
)
2736 tree decl
= SYMBOL_REF_DECL (XEXP (cplx
, 0));
2737 if (decl
&& TREE_CODE (decl
) == COMPLEX_CST
)
2739 tree part
= imag_p
? TREE_IMAGPART (decl
) : TREE_REALPART (decl
);
2740 if (CONSTANT_CLASS_P (part
))
2741 return expand_expr (part
, NULL_RTX
, imode
, EXPAND_NORMAL
);
2745 /* For MEMs simplify_gen_subreg may generate an invalid new address
2746 because, e.g., the original address is considered mode-dependent
2747 by the target, which restricts simplify_subreg from invoking
2748 adjust_address_nv. Instead of preparing fallback support for an
2749 invalid address, we call adjust_address_nv directly. */
2751 return adjust_address_nv (cplx
, imode
,
2752 imag_p
? GET_MODE_SIZE (imode
) : 0);
2754 /* If the sub-object is at least word sized, then we know that subregging
2755 will work. This special case is important, since extract_bit_field
2756 wants to operate on integer modes, and there's rarely an OImode to
2757 correspond to TCmode. */
2758 if (ibitsize
>= BITS_PER_WORD
2759 /* For hard regs we have exact predicates. Assume we can split
2760 the original object if it spans an even number of hard regs.
2761 This special case is important for SCmode on 64-bit platforms
2762 where the natural size of floating-point regs is 32-bit. */
2764 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
2765 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0))
2767 rtx ret
= simplify_gen_subreg (imode
, cplx
, cmode
,
2768 imag_p
? GET_MODE_SIZE (imode
) : 0);
2772 /* simplify_gen_subreg may fail for sub-word MEMs. */
2773 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
2776 return extract_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0,
2777 true, NULL_RTX
, imode
, imode
);
2780 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2781 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2782 represented in NEW_MODE. If FORCE is true, this will never happen, as
2783 we'll force-create a SUBREG if needed. */
2786 emit_move_change_mode (enum machine_mode new_mode
,
2787 enum machine_mode old_mode
, rtx x
, bool force
)
2793 /* We don't have to worry about changing the address since the
2794 size in bytes is supposed to be the same. */
2795 if (reload_in_progress
)
2797 /* Copy the MEM to change the mode and move any
2798 substitutions from the old MEM to the new one. */
2799 ret
= adjust_address_nv (x
, new_mode
, 0);
2800 copy_replacements (x
, ret
);
2803 ret
= adjust_address (x
, new_mode
, 0);
2807 /* Note that we do want simplify_subreg's behavior of validating
2808 that the new mode is ok for a hard register. If we were to use
2809 simplify_gen_subreg, we would create the subreg, but would
2810 probably run into the target not being able to implement it. */
2811 /* Except, of course, when FORCE is true, when this is exactly what
2812 we want. Which is needed for CCmodes on some targets. */
2814 ret
= simplify_gen_subreg (new_mode
, x
, old_mode
, 0);
2816 ret
= simplify_subreg (new_mode
, x
, old_mode
, 0);
2822 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2823 an integer mode of the same size as MODE. Returns the instruction
2824 emitted, or NULL if such a move could not be generated. */
2827 emit_move_via_integer (enum machine_mode mode
, rtx x
, rtx y
, bool force
)
2829 enum machine_mode imode
;
2830 enum insn_code code
;
2832 /* There must exist a mode of the exact size we require. */
2833 imode
= int_mode_for_mode (mode
);
2834 if (imode
== BLKmode
)
2837 /* The target must support moves in this mode. */
2838 code
= mov_optab
->handlers
[imode
].insn_code
;
2839 if (code
== CODE_FOR_nothing
)
2842 x
= emit_move_change_mode (imode
, mode
, x
, force
);
2845 y
= emit_move_change_mode (imode
, mode
, y
, force
);
2848 return emit_insn (GEN_FCN (code
) (x
, y
));
2851 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2852 Return an equivalent MEM that does not use an auto-increment. */
2855 emit_move_resolve_push (enum machine_mode mode
, rtx x
)
2857 enum rtx_code code
= GET_CODE (XEXP (x
, 0));
2858 HOST_WIDE_INT adjust
;
2861 adjust
= GET_MODE_SIZE (mode
);
2862 #ifdef PUSH_ROUNDING
2863 adjust
= PUSH_ROUNDING (adjust
);
2865 if (code
== PRE_DEC
|| code
== POST_DEC
)
2867 else if (code
== PRE_MODIFY
|| code
== POST_MODIFY
)
2869 rtx expr
= XEXP (XEXP (x
, 0), 1);
2872 gcc_assert (GET_CODE (expr
) == PLUS
|| GET_CODE (expr
) == MINUS
);
2873 gcc_assert (GET_CODE (XEXP (expr
, 1)) == CONST_INT
);
2874 val
= INTVAL (XEXP (expr
, 1));
2875 if (GET_CODE (expr
) == MINUS
)
2877 gcc_assert (adjust
== val
|| adjust
== -val
);
2881 /* Do not use anti_adjust_stack, since we don't want to update
2882 stack_pointer_delta. */
2883 temp
= expand_simple_binop (Pmode
, PLUS
, stack_pointer_rtx
,
2884 GEN_INT (adjust
), stack_pointer_rtx
,
2885 0, OPTAB_LIB_WIDEN
);
2886 if (temp
!= stack_pointer_rtx
)
2887 emit_move_insn (stack_pointer_rtx
, temp
);
2894 temp
= stack_pointer_rtx
;
2899 temp
= plus_constant (stack_pointer_rtx
, -adjust
);
2905 return replace_equiv_address (x
, temp
);
2908 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2909 X is known to satisfy push_operand, and MODE is known to be complex.
2910 Returns the last instruction emitted. */
2913 emit_move_complex_push (enum machine_mode mode
, rtx x
, rtx y
)
2915 enum machine_mode submode
= GET_MODE_INNER (mode
);
2918 #ifdef PUSH_ROUNDING
2919 unsigned int submodesize
= GET_MODE_SIZE (submode
);
2921 /* In case we output to the stack, but the size is smaller than the
2922 machine can push exactly, we need to use move instructions. */
2923 if (PUSH_ROUNDING (submodesize
) != submodesize
)
2925 x
= emit_move_resolve_push (mode
, x
);
2926 return emit_move_insn (x
, y
);
2930 /* Note that the real part always precedes the imag part in memory
2931 regardless of machine's endianness. */
2932 switch (GET_CODE (XEXP (x
, 0)))
2946 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2947 read_complex_part (y
, imag_first
));
2948 return emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2949 read_complex_part (y
, !imag_first
));
2952 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2953 MODE is known to be complex. Returns the last instruction emitted. */
2956 emit_move_complex (enum machine_mode mode
, rtx x
, rtx y
)
2960 /* Need to take special care for pushes, to maintain proper ordering
2961 of the data, and possibly extra padding. */
2962 if (push_operand (x
, mode
))
2963 return emit_move_complex_push (mode
, x
, y
);
2965 /* See if we can coerce the target into moving both values at once. */
2967 /* Move floating point as parts. */
2968 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
2969 && mov_optab
->handlers
[GET_MODE_INNER (mode
)].insn_code
!= CODE_FOR_nothing
)
2971 /* Not possible if the values are inherently not adjacent. */
2972 else if (GET_CODE (x
) == CONCAT
|| GET_CODE (y
) == CONCAT
)
2974 /* Is possible if both are registers (or subregs of registers). */
2975 else if (register_operand (x
, mode
) && register_operand (y
, mode
))
2977 /* If one of the operands is a memory, and alignment constraints
2978 are friendly enough, we may be able to do combined memory operations.
2979 We do not attempt this if Y is a constant because that combination is
2980 usually better with the by-parts thing below. */
2981 else if ((MEM_P (x
) ? !CONSTANT_P (y
) : MEM_P (y
))
2982 && (!STRICT_ALIGNMENT
2983 || get_mode_alignment (mode
) == BIGGEST_ALIGNMENT
))
2992 /* For memory to memory moves, optimal behavior can be had with the
2993 existing block move logic. */
2994 if (MEM_P (x
) && MEM_P (y
))
2996 emit_block_move (x
, y
, GEN_INT (GET_MODE_SIZE (mode
)),
2997 BLOCK_OP_NO_LIBCALL
);
2998 return get_last_insn ();
3001 ret
= emit_move_via_integer (mode
, x
, y
, true);
3006 /* Show the output dies here. This is necessary for SUBREGs
3007 of pseudos since we cannot track their lifetimes correctly;
3008 hard regs shouldn't appear here except as return values. */
3009 if (!reload_completed
&& !reload_in_progress
3010 && REG_P (x
) && !reg_overlap_mentioned_p (x
, y
))
3011 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3013 write_complex_part (x
, read_complex_part (y
, false), false);
3014 write_complex_part (x
, read_complex_part (y
, true), true);
3015 return get_last_insn ();
3018 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3019 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3022 emit_move_ccmode (enum machine_mode mode
, rtx x
, rtx y
)
3026 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3029 enum insn_code code
= mov_optab
->handlers
[CCmode
].insn_code
;
3030 if (code
!= CODE_FOR_nothing
)
3032 x
= emit_move_change_mode (CCmode
, mode
, x
, true);
3033 y
= emit_move_change_mode (CCmode
, mode
, y
, true);
3034 return emit_insn (GEN_FCN (code
) (x
, y
));
3038 /* Otherwise, find the MODE_INT mode of the same width. */
3039 ret
= emit_move_via_integer (mode
, x
, y
, false);
3040 gcc_assert (ret
!= NULL
);
3044 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3045 MODE is any multi-word or full-word mode that lacks a move_insn
3046 pattern. Note that you will get better code if you define such
3047 patterns, even if they must turn into multiple assembler instructions. */
3050 emit_move_multi_word (enum machine_mode mode
, rtx x
, rtx y
)
3057 gcc_assert (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
);
3059 /* If X is a push on the stack, do the push now and replace
3060 X with a reference to the stack pointer. */
3061 if (push_operand (x
, mode
))
3062 x
= emit_move_resolve_push (mode
, x
);
3064 /* If we are in reload, see if either operand is a MEM whose address
3065 is scheduled for replacement. */
3066 if (reload_in_progress
&& MEM_P (x
)
3067 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3068 x
= replace_equiv_address_nv (x
, inner
);
3069 if (reload_in_progress
&& MEM_P (y
)
3070 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3071 y
= replace_equiv_address_nv (y
, inner
);
3075 need_clobber
= false;
3077 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3080 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3081 rtx ypart
= operand_subword (y
, i
, 1, mode
);
3083 /* If we can't get a part of Y, put Y into memory if it is a
3084 constant. Otherwise, force it into a register. Then we must
3085 be able to get a part of Y. */
3086 if (ypart
== 0 && CONSTANT_P (y
))
3088 y
= use_anchored_address (force_const_mem (mode
, y
));
3089 ypart
= operand_subword (y
, i
, 1, mode
);
3091 else if (ypart
== 0)
3092 ypart
= operand_subword_force (y
, i
, mode
);
3094 gcc_assert (xpart
&& ypart
);
3096 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3098 last_insn
= emit_move_insn (xpart
, ypart
);
3104 /* Show the output dies here. This is necessary for SUBREGs
3105 of pseudos since we cannot track their lifetimes correctly;
3106 hard regs shouldn't appear here except as return values.
3107 We never want to emit such a clobber after reload. */
3109 && ! (reload_in_progress
|| reload_completed
)
3110 && need_clobber
!= 0)
3111 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3118 /* Low level part of emit_move_insn.
3119 Called just like emit_move_insn, but assumes X and Y
3120 are basically valid. */
3123 emit_move_insn_1 (rtx x
, rtx y
)
3125 enum machine_mode mode
= GET_MODE (x
);
3126 enum insn_code code
;
3128 gcc_assert ((unsigned int) mode
< (unsigned int) MAX_MACHINE_MODE
);
3130 code
= mov_optab
->handlers
[mode
].insn_code
;
3131 if (code
!= CODE_FOR_nothing
)
3132 return emit_insn (GEN_FCN (code
) (x
, y
));
3134 /* Expand complex moves by moving real part and imag part. */
3135 if (COMPLEX_MODE_P (mode
))
3136 return emit_move_complex (mode
, x
, y
);
3138 if (GET_MODE_CLASS (mode
) == MODE_DECIMAL_FLOAT
)
3140 rtx result
= emit_move_via_integer (mode
, x
, y
, true);
3142 /* If we can't find an integer mode, use multi words. */
3146 return emit_move_multi_word (mode
, x
, y
);
3149 if (GET_MODE_CLASS (mode
) == MODE_CC
)
3150 return emit_move_ccmode (mode
, x
, y
);
3152 /* Try using a move pattern for the corresponding integer mode. This is
3153 only safe when simplify_subreg can convert MODE constants into integer
3154 constants. At present, it can only do this reliably if the value
3155 fits within a HOST_WIDE_INT. */
3156 if (!CONSTANT_P (y
) || GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
3158 rtx ret
= emit_move_via_integer (mode
, x
, y
, false);
3163 return emit_move_multi_word (mode
, x
, y
);
3166 /* Generate code to copy Y into X.
3167 Both Y and X must have the same mode, except that
3168 Y can be a constant with VOIDmode.
3169 This mode cannot be BLKmode; use emit_block_move for that.
3171 Return the last instruction emitted. */
3174 emit_move_insn (rtx x
, rtx y
)
3176 enum machine_mode mode
= GET_MODE (x
);
3177 rtx y_cst
= NULL_RTX
;
3180 gcc_assert (mode
!= BLKmode
3181 && (GET_MODE (y
) == mode
|| GET_MODE (y
) == VOIDmode
));
3186 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3187 && (last_insn
= compress_float_constant (x
, y
)))
3192 if (!LEGITIMATE_CONSTANT_P (y
))
3194 y
= force_const_mem (mode
, y
);
3196 /* If the target's cannot_force_const_mem prevented the spill,
3197 assume that the target's move expanders will also take care
3198 of the non-legitimate constant. */
3202 y
= use_anchored_address (y
);
3206 /* If X or Y are memory references, verify that their addresses are valid
3209 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
3210 && ! push_operand (x
, GET_MODE (x
)))
3212 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
3213 x
= validize_mem (x
);
3216 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
3218 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
3219 y
= validize_mem (y
);
3221 gcc_assert (mode
!= BLKmode
);
3223 last_insn
= emit_move_insn_1 (x
, y
);
3225 if (y_cst
&& REG_P (x
)
3226 && (set
= single_set (last_insn
)) != NULL_RTX
3227 && SET_DEST (set
) == x
3228 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
3229 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
3234 /* If Y is representable exactly in a narrower mode, and the target can
3235 perform the extension directly from constant or memory, then emit the
3236 move as an extension. */
3239 compress_float_constant (rtx x
, rtx y
)
3241 enum machine_mode dstmode
= GET_MODE (x
);
3242 enum machine_mode orig_srcmode
= GET_MODE (y
);
3243 enum machine_mode srcmode
;
3245 int oldcost
, newcost
;
3247 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3249 if (LEGITIMATE_CONSTANT_P (y
))
3250 oldcost
= rtx_cost (y
, SET
);
3252 oldcost
= rtx_cost (force_const_mem (dstmode
, y
), SET
);
3254 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3255 srcmode
!= orig_srcmode
;
3256 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3259 rtx trunc_y
, last_insn
;
3261 /* Skip if the target can't extend this way. */
3262 ic
= can_extend_p (dstmode
, srcmode
, 0);
3263 if (ic
== CODE_FOR_nothing
)
3266 /* Skip if the narrowed value isn't exact. */
3267 if (! exact_real_truncate (srcmode
, &r
))
3270 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3272 if (LEGITIMATE_CONSTANT_P (trunc_y
))
3274 /* Skip if the target needs extra instructions to perform
3276 if (! (*insn_data
[ic
].operand
[1].predicate
) (trunc_y
, srcmode
))
3278 /* This is valid, but may not be cheaper than the original. */
3279 newcost
= rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
), SET
);
3280 if (oldcost
< newcost
)
3283 else if (float_extend_from_mem
[dstmode
][srcmode
])
3285 trunc_y
= force_const_mem (srcmode
, trunc_y
);
3286 /* This is valid, but may not be cheaper than the original. */
3287 newcost
= rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
), SET
);
3288 if (oldcost
< newcost
)
3290 trunc_y
= validize_mem (trunc_y
);
3295 emit_unop_insn (ic
, x
, trunc_y
, UNKNOWN
);
3296 last_insn
= get_last_insn ();
3299 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3307 /* Pushing data onto the stack. */
3309 /* Push a block of length SIZE (perhaps variable)
3310 and return an rtx to address the beginning of the block.
3311 The value may be virtual_outgoing_args_rtx.
3313 EXTRA is the number of bytes of padding to push in addition to SIZE.
3314 BELOW nonzero means this padding comes at low addresses;
3315 otherwise, the padding comes at high addresses. */
3318 push_block (rtx size
, int extra
, int below
)
3322 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3323 if (CONSTANT_P (size
))
3324 anti_adjust_stack (plus_constant (size
, extra
));
3325 else if (REG_P (size
) && extra
== 0)
3326 anti_adjust_stack (size
);
3329 temp
= copy_to_mode_reg (Pmode
, size
);
3331 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3332 temp
, 0, OPTAB_LIB_WIDEN
);
3333 anti_adjust_stack (temp
);
3336 #ifndef STACK_GROWS_DOWNWARD
3342 temp
= virtual_outgoing_args_rtx
;
3343 if (extra
!= 0 && below
)
3344 temp
= plus_constant (temp
, extra
);
3348 if (GET_CODE (size
) == CONST_INT
)
3349 temp
= plus_constant (virtual_outgoing_args_rtx
,
3350 -INTVAL (size
) - (below
? 0 : extra
));
3351 else if (extra
!= 0 && !below
)
3352 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3353 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3355 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3356 negate_rtx (Pmode
, size
));
3359 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3362 #ifdef PUSH_ROUNDING
3364 /* Emit single push insn. */
3367 emit_single_push_insn (enum machine_mode mode
, rtx x
, tree type
)
3370 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3372 enum insn_code icode
;
3373 insn_operand_predicate_fn pred
;
3375 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3376 /* If there is push pattern, use it. Otherwise try old way of throwing
3377 MEM representing push operation to move expander. */
3378 icode
= push_optab
->handlers
[(int) mode
].insn_code
;
3379 if (icode
!= CODE_FOR_nothing
)
3381 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3382 && !((*pred
) (x
, mode
))))
3383 x
= force_reg (mode
, x
);
3384 emit_insn (GEN_FCN (icode
) (x
));
3387 if (GET_MODE_SIZE (mode
) == rounded_size
)
3388 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3389 /* If we are to pad downward, adjust the stack pointer first and
3390 then store X into the stack location using an offset. This is
3391 because emit_move_insn does not know how to pad; it does not have
3393 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
3395 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
3396 HOST_WIDE_INT offset
;
3398 emit_move_insn (stack_pointer_rtx
,
3399 expand_binop (Pmode
,
3400 #ifdef STACK_GROWS_DOWNWARD
3406 GEN_INT (rounded_size
),
3407 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
3409 offset
= (HOST_WIDE_INT
) padding_size
;
3410 #ifdef STACK_GROWS_DOWNWARD
3411 if (STACK_PUSH_CODE
== POST_DEC
)
3412 /* We have already decremented the stack pointer, so get the
3414 offset
+= (HOST_WIDE_INT
) rounded_size
;
3416 if (STACK_PUSH_CODE
== POST_INC
)
3417 /* We have already incremented the stack pointer, so get the
3419 offset
-= (HOST_WIDE_INT
) rounded_size
;
3421 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (offset
));
3425 #ifdef STACK_GROWS_DOWNWARD
3426 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3427 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3428 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3430 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3431 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3432 GEN_INT (rounded_size
));
3434 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3437 dest
= gen_rtx_MEM (mode
, dest_addr
);
3441 set_mem_attributes (dest
, type
, 1);
3443 if (flag_optimize_sibling_calls
)
3444 /* Function incoming arguments may overlap with sibling call
3445 outgoing arguments and we cannot allow reordering of reads
3446 from function arguments with stores to outgoing arguments
3447 of sibling calls. */
3448 set_mem_alias_set (dest
, 0);
3450 emit_move_insn (dest
, x
);
3454 /* Generate code to push X onto the stack, assuming it has mode MODE and
3456 MODE is redundant except when X is a CONST_INT (since they don't
3458 SIZE is an rtx for the size of data to be copied (in bytes),
3459 needed only if X is BLKmode.
3461 ALIGN (in bits) is maximum alignment we can assume.
3463 If PARTIAL and REG are both nonzero, then copy that many of the first
3464 bytes of X into registers starting with REG, and push the rest of X.
3465 The amount of space pushed is decreased by PARTIAL bytes.
3466 REG must be a hard register in this case.
3467 If REG is zero but PARTIAL is not, take any all others actions for an
3468 argument partially in registers, but do not actually load any
3471 EXTRA is the amount in bytes of extra space to leave next to this arg.
3472 This is ignored if an argument block has already been allocated.
3474 On a machine that lacks real push insns, ARGS_ADDR is the address of
3475 the bottom of the argument block for this call. We use indexing off there
3476 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3477 argument block has not been preallocated.
3479 ARGS_SO_FAR is the size of args previously pushed for this call.
3481 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3482 for arguments passed in registers. If nonzero, it will be the number
3483 of bytes required. */
3486 emit_push_insn (rtx x
, enum machine_mode mode
, tree type
, rtx size
,
3487 unsigned int align
, int partial
, rtx reg
, int extra
,
3488 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
3492 enum direction stack_direction
3493 #ifdef STACK_GROWS_DOWNWARD
3499 /* Decide where to pad the argument: `downward' for below,
3500 `upward' for above, or `none' for don't pad it.
3501 Default is below for small data on big-endian machines; else above. */
3502 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3504 /* Invert direction if stack is post-decrement.
3506 if (STACK_PUSH_CODE
== POST_DEC
)
3507 if (where_pad
!= none
)
3508 where_pad
= (where_pad
== downward
? upward
: downward
);
3512 if (mode
== BLKmode
)
3514 /* Copy a block into the stack, entirely or partially. */
3521 offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3522 used
= partial
- offset
;
3526 /* USED is now the # of bytes we need not copy to the stack
3527 because registers will take care of them. */
3530 xinner
= adjust_address (xinner
, BLKmode
, used
);
3532 /* If the partial register-part of the arg counts in its stack size,
3533 skip the part of stack space corresponding to the registers.
3534 Otherwise, start copying to the beginning of the stack space,
3535 by setting SKIP to 0. */
3536 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3538 #ifdef PUSH_ROUNDING
3539 /* Do it with several push insns if that doesn't take lots of insns
3540 and if there is no difficulty with push insns that skip bytes
3541 on the stack for alignment purposes. */
3544 && GET_CODE (size
) == CONST_INT
3546 && MEM_ALIGN (xinner
) >= align
3547 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3548 /* Here we avoid the case of a structure whose weak alignment
3549 forces many pushes of a small amount of data,
3550 and such small pushes do rounding that causes trouble. */
3551 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3552 || align
>= BIGGEST_ALIGNMENT
3553 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3554 == (align
/ BITS_PER_UNIT
)))
3555 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3557 /* Push padding now if padding above and stack grows down,
3558 or if padding below and stack grows up.
3559 But if space already allocated, this has already been done. */
3560 if (extra
&& args_addr
== 0
3561 && where_pad
!= none
&& where_pad
!= stack_direction
)
3562 anti_adjust_stack (GEN_INT (extra
));
3564 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
3567 #endif /* PUSH_ROUNDING */
3571 /* Otherwise make space on the stack and copy the data
3572 to the address of that space. */
3574 /* Deduct words put into registers from the size we must copy. */
3577 if (GET_CODE (size
) == CONST_INT
)
3578 size
= GEN_INT (INTVAL (size
) - used
);
3580 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3581 GEN_INT (used
), NULL_RTX
, 0,
3585 /* Get the address of the stack space.
3586 In this case, we do not deal with EXTRA separately.
3587 A single stack adjust will do. */
3590 temp
= push_block (size
, extra
, where_pad
== downward
);
3593 else if (GET_CODE (args_so_far
) == CONST_INT
)
3594 temp
= memory_address (BLKmode
,
3595 plus_constant (args_addr
,
3596 skip
+ INTVAL (args_so_far
)));
3598 temp
= memory_address (BLKmode
,
3599 plus_constant (gen_rtx_PLUS (Pmode
,
3604 if (!ACCUMULATE_OUTGOING_ARGS
)
3606 /* If the source is referenced relative to the stack pointer,
3607 copy it to another register to stabilize it. We do not need
3608 to do this if we know that we won't be changing sp. */
3610 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3611 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3612 temp
= copy_to_reg (temp
);
3615 target
= gen_rtx_MEM (BLKmode
, temp
);
3617 /* We do *not* set_mem_attributes here, because incoming arguments
3618 may overlap with sibling call outgoing arguments and we cannot
3619 allow reordering of reads from function arguments with stores
3620 to outgoing arguments of sibling calls. We do, however, want
3621 to record the alignment of the stack slot. */
3622 /* ALIGN may well be better aligned than TYPE, e.g. due to
3623 PARM_BOUNDARY. Assume the caller isn't lying. */
3624 set_mem_align (target
, align
);
3626 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
3629 else if (partial
> 0)
3631 /* Scalar partly in registers. */
3633 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3636 /* # bytes of start of argument
3637 that we must make space for but need not store. */
3638 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3639 int args_offset
= INTVAL (args_so_far
);
3642 /* Push padding now if padding above and stack grows down,
3643 or if padding below and stack grows up.
3644 But if space already allocated, this has already been done. */
3645 if (extra
&& args_addr
== 0
3646 && where_pad
!= none
&& where_pad
!= stack_direction
)
3647 anti_adjust_stack (GEN_INT (extra
));
3649 /* If we make space by pushing it, we might as well push
3650 the real data. Otherwise, we can leave OFFSET nonzero
3651 and leave the space uninitialized. */
3655 /* Now NOT_STACK gets the number of words that we don't need to
3656 allocate on the stack. Convert OFFSET to words too. */
3657 not_stack
= (partial
- offset
) / UNITS_PER_WORD
;
3658 offset
/= UNITS_PER_WORD
;
3660 /* If the partial register-part of the arg counts in its stack size,
3661 skip the part of stack space corresponding to the registers.
3662 Otherwise, start copying to the beginning of the stack space,
3663 by setting SKIP to 0. */
3664 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3666 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3667 x
= validize_mem (force_const_mem (mode
, x
));
3669 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3670 SUBREGs of such registers are not allowed. */
3671 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
3672 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3673 x
= copy_to_reg (x
);
3675 /* Loop over all the words allocated on the stack for this arg. */
3676 /* We can do it by words, because any scalar bigger than a word
3677 has a size a multiple of a word. */
3678 #ifndef PUSH_ARGS_REVERSED
3679 for (i
= not_stack
; i
< size
; i
++)
3681 for (i
= size
- 1; i
>= not_stack
; i
--)
3683 if (i
>= not_stack
+ offset
)
3684 emit_push_insn (operand_subword_force (x
, i
, mode
),
3685 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3687 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3689 reg_parm_stack_space
, alignment_pad
);
3696 /* Push padding now if padding above and stack grows down,
3697 or if padding below and stack grows up.
3698 But if space already allocated, this has already been done. */
3699 if (extra
&& args_addr
== 0
3700 && where_pad
!= none
&& where_pad
!= stack_direction
)
3701 anti_adjust_stack (GEN_INT (extra
));
3703 #ifdef PUSH_ROUNDING
3704 if (args_addr
== 0 && PUSH_ARGS
)
3705 emit_single_push_insn (mode
, x
, type
);
3709 if (GET_CODE (args_so_far
) == CONST_INT
)
3711 = memory_address (mode
,
3712 plus_constant (args_addr
,
3713 INTVAL (args_so_far
)));
3715 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3717 dest
= gen_rtx_MEM (mode
, addr
);
3719 /* We do *not* set_mem_attributes here, because incoming arguments
3720 may overlap with sibling call outgoing arguments and we cannot
3721 allow reordering of reads from function arguments with stores
3722 to outgoing arguments of sibling calls. We do, however, want
3723 to record the alignment of the stack slot. */
3724 /* ALIGN may well be better aligned than TYPE, e.g. due to
3725 PARM_BOUNDARY. Assume the caller isn't lying. */
3726 set_mem_align (dest
, align
);
3728 emit_move_insn (dest
, x
);
3732 /* If part should go in registers, copy that part
3733 into the appropriate registers. Do this now, at the end,
3734 since mem-to-mem copies above may do function calls. */
3735 if (partial
> 0 && reg
!= 0)
3737 /* Handle calls that pass values in multiple non-contiguous locations.
3738 The Irix 6 ABI has examples of this. */
3739 if (GET_CODE (reg
) == PARALLEL
)
3740 emit_group_load (reg
, x
, type
, -1);
3743 gcc_assert (partial
% UNITS_PER_WORD
== 0);
3744 move_block_to_reg (REGNO (reg
), x
, partial
/ UNITS_PER_WORD
, mode
);
3748 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3749 anti_adjust_stack (GEN_INT (extra
));
3751 if (alignment_pad
&& args_addr
== 0)
3752 anti_adjust_stack (alignment_pad
);
3755 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3759 get_subtarget (rtx x
)
3763 /* Only registers can be subtargets. */
3765 /* Don't use hard regs to avoid extending their life. */
3766 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3770 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3771 FIELD is a bitfield. Returns true if the optimization was successful,
3772 and there's nothing else to do. */
3775 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize
,
3776 unsigned HOST_WIDE_INT bitpos
,
3777 enum machine_mode mode1
, rtx str_rtx
,
3780 enum machine_mode str_mode
= GET_MODE (str_rtx
);
3781 unsigned int str_bitsize
= GET_MODE_BITSIZE (str_mode
);
3786 if (mode1
!= VOIDmode
3787 || bitsize
>= BITS_PER_WORD
3788 || str_bitsize
> BITS_PER_WORD
3789 || TREE_SIDE_EFFECTS (to
)
3790 || TREE_THIS_VOLATILE (to
))
3794 if (!BINARY_CLASS_P (src
)
3795 || TREE_CODE (TREE_TYPE (src
)) != INTEGER_TYPE
)
3798 op0
= TREE_OPERAND (src
, 0);
3799 op1
= TREE_OPERAND (src
, 1);
3802 if (!operand_equal_p (to
, op0
, 0))
3805 if (MEM_P (str_rtx
))
3807 unsigned HOST_WIDE_INT offset1
;
3809 if (str_bitsize
== 0 || str_bitsize
> BITS_PER_WORD
)
3810 str_mode
= word_mode
;
3811 str_mode
= get_best_mode (bitsize
, bitpos
,
3812 MEM_ALIGN (str_rtx
), str_mode
, 0);
3813 if (str_mode
== VOIDmode
)
3815 str_bitsize
= GET_MODE_BITSIZE (str_mode
);
3818 bitpos
%= str_bitsize
;
3819 offset1
= (offset1
- bitpos
) / BITS_PER_UNIT
;
3820 str_rtx
= adjust_address (str_rtx
, str_mode
, offset1
);
3822 else if (!REG_P (str_rtx
) && GET_CODE (str_rtx
) != SUBREG
)
3825 /* If the bit field covers the whole REG/MEM, store_field
3826 will likely generate better code. */
3827 if (bitsize
>= str_bitsize
)
3830 /* We can't handle fields split across multiple entities. */
3831 if (bitpos
+ bitsize
> str_bitsize
)
3834 if (BYTES_BIG_ENDIAN
)
3835 bitpos
= str_bitsize
- bitpos
- bitsize
;
3837 switch (TREE_CODE (src
))
3841 /* For now, just optimize the case of the topmost bitfield
3842 where we don't need to do any masking and also
3843 1 bit bitfields where xor can be used.
3844 We might win by one instruction for the other bitfields
3845 too if insv/extv instructions aren't used, so that
3846 can be added later. */
3847 if (bitpos
+ bitsize
!= str_bitsize
3848 && (bitsize
!= 1 || TREE_CODE (op1
) != INTEGER_CST
))
3851 value
= expand_expr (op1
, NULL_RTX
, str_mode
, 0);
3852 value
= convert_modes (str_mode
,
3853 TYPE_MODE (TREE_TYPE (op1
)), value
,
3854 TYPE_UNSIGNED (TREE_TYPE (op1
)));
3856 /* We may be accessing data outside the field, which means
3857 we can alias adjacent data. */
3858 if (MEM_P (str_rtx
))
3860 str_rtx
= shallow_copy_rtx (str_rtx
);
3861 set_mem_alias_set (str_rtx
, 0);
3862 set_mem_expr (str_rtx
, 0);
3865 binop
= TREE_CODE (src
) == PLUS_EXPR
? add_optab
: sub_optab
;
3866 if (bitsize
== 1 && bitpos
+ bitsize
!= str_bitsize
)
3868 value
= expand_and (str_mode
, value
, const1_rtx
, NULL
);
3871 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
,
3872 build_int_cst (NULL_TREE
, bitpos
),
3874 result
= expand_binop (str_mode
, binop
, str_rtx
,
3875 value
, str_rtx
, 1, OPTAB_WIDEN
);
3876 if (result
!= str_rtx
)
3877 emit_move_insn (str_rtx
, result
);
3882 if (TREE_CODE (op1
) != INTEGER_CST
)
3884 value
= expand_expr (op1
, NULL_RTX
, GET_MODE (str_rtx
), 0);
3885 value
= convert_modes (GET_MODE (str_rtx
),
3886 TYPE_MODE (TREE_TYPE (op1
)), value
,
3887 TYPE_UNSIGNED (TREE_TYPE (op1
)));
3889 /* We may be accessing data outside the field, which means
3890 we can alias adjacent data. */
3891 if (MEM_P (str_rtx
))
3893 str_rtx
= shallow_copy_rtx (str_rtx
);
3894 set_mem_alias_set (str_rtx
, 0);
3895 set_mem_expr (str_rtx
, 0);
3898 binop
= TREE_CODE (src
) == BIT_IOR_EXPR
? ior_optab
: xor_optab
;
3899 if (bitpos
+ bitsize
!= GET_MODE_BITSIZE (GET_MODE (str_rtx
)))
3901 rtx mask
= GEN_INT (((unsigned HOST_WIDE_INT
) 1 << bitsize
)
3903 value
= expand_and (GET_MODE (str_rtx
), value
, mask
,
3906 value
= expand_shift (LSHIFT_EXPR
, GET_MODE (str_rtx
), value
,
3907 build_int_cst (NULL_TREE
, bitpos
),
3909 result
= expand_binop (GET_MODE (str_rtx
), binop
, str_rtx
,
3910 value
, str_rtx
, 1, OPTAB_WIDEN
);
3911 if (result
!= str_rtx
)
3912 emit_move_insn (str_rtx
, result
);
3923 /* Expand an assignment that stores the value of FROM into TO. */
3926 expand_assignment (tree to
, tree from
)
3931 /* Don't crash if the lhs of the assignment was erroneous. */
3933 if (TREE_CODE (to
) == ERROR_MARK
)
3935 result
= expand_normal (from
);
3939 /* Assignment of a structure component needs special treatment
3940 if the structure component's rtx is not simply a MEM.
3941 Assignment of an array element at a constant index, and assignment of
3942 an array element in an unaligned packed structure field, has the same
3944 if (handled_component_p (to
)
3945 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
3947 enum machine_mode mode1
;
3948 HOST_WIDE_INT bitsize
, bitpos
;
3955 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
3956 &unsignedp
, &volatilep
, true);
3958 /* If we are going to use store_bit_field and extract_bit_field,
3959 make sure to_rtx will be safe for multiple use. */
3961 to_rtx
= expand_normal (tem
);
3967 if (!MEM_P (to_rtx
))
3969 /* We can get constant negative offsets into arrays with broken
3970 user code. Translate this to a trap instead of ICEing. */
3971 gcc_assert (TREE_CODE (offset
) == INTEGER_CST
);
3972 expand_builtin_trap ();
3973 to_rtx
= gen_rtx_MEM (BLKmode
, const0_rtx
);
3976 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
3977 #ifdef POINTERS_EXTEND_UNSIGNED
3978 if (GET_MODE (offset_rtx
) != Pmode
)
3979 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
3981 if (GET_MODE (offset_rtx
) != ptr_mode
)
3982 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
3985 /* A constant address in TO_RTX can have VOIDmode, we must not try
3986 to call force_reg for that case. Avoid that case. */
3988 && GET_MODE (to_rtx
) == BLKmode
3989 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
3991 && (bitpos
% bitsize
) == 0
3992 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
3993 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
3995 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
3999 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4000 highest_pow2_factor_for_target (to
,
4004 /* Handle expand_expr of a complex value returning a CONCAT. */
4005 if (GET_CODE (to_rtx
) == CONCAT
)
4007 if (TREE_CODE (TREE_TYPE (from
)) == COMPLEX_TYPE
)
4009 gcc_assert (bitpos
== 0);
4010 result
= store_expr (from
, to_rtx
, false);
4014 gcc_assert (bitpos
== 0 || bitpos
== GET_MODE_BITSIZE (mode1
));
4015 result
= store_expr (from
, XEXP (to_rtx
, bitpos
!= 0), false);
4022 /* If the field is at offset zero, we could have been given the
4023 DECL_RTX of the parent struct. Don't munge it. */
4024 to_rtx
= shallow_copy_rtx (to_rtx
);
4026 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
4028 /* Deal with volatile and readonly fields. The former is only
4029 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4031 MEM_VOLATILE_P (to_rtx
) = 1;
4032 if (component_uses_parent_alias_set (to
))
4033 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4036 if (optimize_bitfield_assignment_op (bitsize
, bitpos
, mode1
,
4040 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
4041 TREE_TYPE (tem
), get_alias_set (to
));
4045 preserve_temp_slots (result
);
4051 /* If the rhs is a function call and its value is not an aggregate,
4052 call the function before we start to compute the lhs.
4053 This is needed for correct code for cases such as
4054 val = setjmp (buf) on machines where reference to val
4055 requires loading up part of an address in a separate insn.
4057 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4058 since it might be a promoted variable where the zero- or sign- extension
4059 needs to be done. Handling this in the normal way is safe because no
4060 computation is done before the call. */
4061 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
4062 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
4063 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
4064 && REG_P (DECL_RTL (to
))))
4069 value
= expand_normal (from
);
4071 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4073 /* Handle calls that return values in multiple non-contiguous locations.
4074 The Irix 6 ABI has examples of this. */
4075 if (GET_CODE (to_rtx
) == PARALLEL
)
4076 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
4077 int_size_in_bytes (TREE_TYPE (from
)));
4078 else if (GET_MODE (to_rtx
) == BLKmode
)
4079 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
4082 if (POINTER_TYPE_P (TREE_TYPE (to
)))
4083 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
4084 emit_move_insn (to_rtx
, value
);
4086 preserve_temp_slots (to_rtx
);
4092 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4093 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4096 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4098 /* Don't move directly into a return register. */
4099 if (TREE_CODE (to
) == RESULT_DECL
4100 && (REG_P (to_rtx
) || GET_CODE (to_rtx
) == PARALLEL
))
4105 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
4107 if (GET_CODE (to_rtx
) == PARALLEL
)
4108 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
4109 int_size_in_bytes (TREE_TYPE (from
)));
4111 emit_move_insn (to_rtx
, temp
);
4113 preserve_temp_slots (to_rtx
);
4119 /* In case we are returning the contents of an object which overlaps
4120 the place the value is being stored, use a safe function when copying
4121 a value through a pointer into a structure value return block. */
4122 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
4123 && current_function_returns_struct
4124 && !current_function_returns_pcc_struct
)
4129 size
= expr_size (from
);
4130 from_rtx
= expand_normal (from
);
4132 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
4133 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
4134 XEXP (from_rtx
, 0), Pmode
,
4135 convert_to_mode (TYPE_MODE (sizetype
),
4136 size
, TYPE_UNSIGNED (sizetype
)),
4137 TYPE_MODE (sizetype
));
4139 preserve_temp_slots (to_rtx
);
4145 /* Compute FROM and store the value in the rtx we got. */
4148 result
= store_expr (from
, to_rtx
, 0);
4149 preserve_temp_slots (result
);
4155 /* Generate code for computing expression EXP,
4156 and storing the value into TARGET.
4158 If the mode is BLKmode then we may return TARGET itself.
4159 It turns out that in BLKmode it doesn't cause a problem.
4160 because C has no operators that could combine two different
4161 assignments into the same BLKmode object with different values
4162 with no sequence point. Will other languages need this to
4165 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4166 stack, and block moves may need to be treated specially. */
4169 store_expr (tree exp
, rtx target
, int call_param_p
)
4172 rtx alt_rtl
= NULL_RTX
;
4173 int dont_return_target
= 0;
4175 if (VOID_TYPE_P (TREE_TYPE (exp
)))
4177 /* C++ can generate ?: expressions with a throw expression in one
4178 branch and an rvalue in the other. Here, we resolve attempts to
4179 store the throw expression's nonexistent result. */
4180 gcc_assert (!call_param_p
);
4181 expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
4184 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4186 /* Perform first part of compound expression, then assign from second
4188 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
4189 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4190 return store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
);
4192 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4194 /* For conditional expression, get safe form of the target. Then
4195 test the condition, doing the appropriate assignment on either
4196 side. This avoids the creation of unnecessary temporaries.
4197 For non-BLKmode, it is more efficient not to do this. */
4199 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4201 do_pending_stack_adjust ();
4203 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
4204 store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
);
4205 emit_jump_insn (gen_jump (lab2
));
4208 store_expr (TREE_OPERAND (exp
, 2), target
, call_param_p
);
4214 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4215 /* If this is a scalar in a register that is stored in a wider mode
4216 than the declared mode, compute the result into its declared mode
4217 and then convert to the wider mode. Our value is the computed
4220 rtx inner_target
= 0;
4222 /* We can do the conversion inside EXP, which will often result
4223 in some optimizations. Do the conversion in two steps: first
4224 change the signedness, if needed, then the extend. But don't
4225 do this if the type of EXP is a subtype of something else
4226 since then the conversion might involve more than just
4227 converting modes. */
4228 if (INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4229 && TREE_TYPE (TREE_TYPE (exp
)) == 0
4230 && (!lang_hooks
.reduce_bit_field_operations
4231 || (GET_MODE_PRECISION (GET_MODE (target
))
4232 == TYPE_PRECISION (TREE_TYPE (exp
)))))
4234 if (TYPE_UNSIGNED (TREE_TYPE (exp
))
4235 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4237 (lang_hooks
.types
.signed_or_unsigned_type
4238 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)), exp
);
4240 exp
= convert (lang_hooks
.types
.type_for_mode
4241 (GET_MODE (SUBREG_REG (target
)),
4242 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4245 inner_target
= SUBREG_REG (target
);
4248 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
4249 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4251 /* If TEMP is a VOIDmode constant, use convert_modes to make
4252 sure that we properly convert it. */
4253 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4255 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4256 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4257 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4258 GET_MODE (target
), temp
,
4259 SUBREG_PROMOTED_UNSIGNED_P (target
));
4262 convert_move (SUBREG_REG (target
), temp
,
4263 SUBREG_PROMOTED_UNSIGNED_P (target
));
4269 temp
= expand_expr_real (exp
, target
, GET_MODE (target
),
4271 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
4273 /* Return TARGET if it's a specified hardware register.
4274 If TARGET is a volatile mem ref, either return TARGET
4275 or return a reg copied *from* TARGET; ANSI requires this.
4277 Otherwise, if TEMP is not TARGET, return TEMP
4278 if it is constant (for efficiency),
4279 or if we really want the correct value. */
4280 if (!(target
&& REG_P (target
)
4281 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4282 && !(MEM_P (target
) && MEM_VOLATILE_P (target
))
4283 && ! rtx_equal_p (temp
, target
)
4284 && CONSTANT_P (temp
))
4285 dont_return_target
= 1;
4288 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4289 the same as that of TARGET, adjust the constant. This is needed, for
4290 example, in case it is a CONST_DOUBLE and we want only a word-sized
4292 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4293 && TREE_CODE (exp
) != ERROR_MARK
4294 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4295 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4296 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
4298 /* If value was not generated in the target, store it there.
4299 Convert the value to TARGET's type first if necessary and emit the
4300 pending incrementations that have been queued when expanding EXP.
4301 Note that we cannot emit the whole queue blindly because this will
4302 effectively disable the POST_INC optimization later.
4304 If TEMP and TARGET compare equal according to rtx_equal_p, but
4305 one or both of them are volatile memory refs, we have to distinguish
4307 - expand_expr has used TARGET. In this case, we must not generate
4308 another copy. This can be detected by TARGET being equal according
4310 - expand_expr has not used TARGET - that means that the source just
4311 happens to have the same RTX form. Since temp will have been created
4312 by expand_expr, it will compare unequal according to == .
4313 We must generate a copy in this case, to reach the correct number
4314 of volatile memory references. */
4316 if ((! rtx_equal_p (temp
, target
)
4317 || (temp
!= target
&& (side_effects_p (temp
)
4318 || side_effects_p (target
))))
4319 && TREE_CODE (exp
) != ERROR_MARK
4320 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4321 but TARGET is not valid memory reference, TEMP will differ
4322 from TARGET although it is really the same location. */
4323 && !(alt_rtl
&& rtx_equal_p (alt_rtl
, target
))
4324 /* If there's nothing to copy, don't bother. Don't call
4325 expr_size unless necessary, because some front-ends (C++)
4326 expr_size-hook must not be given objects that are not
4327 supposed to be bit-copied or bit-initialized. */
4328 && expr_size (exp
) != const0_rtx
)
4330 if (GET_MODE (temp
) != GET_MODE (target
)
4331 && GET_MODE (temp
) != VOIDmode
)
4333 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
4334 if (dont_return_target
)
4336 /* In this case, we will return TEMP,
4337 so make sure it has the proper mode.
4338 But don't forget to store the value into TARGET. */
4339 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4340 emit_move_insn (target
, temp
);
4343 convert_move (target
, temp
, unsignedp
);
4346 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4348 /* Handle copying a string constant into an array. The string
4349 constant may be shorter than the array. So copy just the string's
4350 actual length, and clear the rest. First get the size of the data
4351 type of the string, which is actually the size of the target. */
4352 rtx size
= expr_size (exp
);
4354 if (GET_CODE (size
) == CONST_INT
4355 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4356 emit_block_move (target
, temp
, size
,
4358 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4361 /* Compute the size of the data to copy from the string. */
4363 = size_binop (MIN_EXPR
,
4364 make_tree (sizetype
, size
),
4365 size_int (TREE_STRING_LENGTH (exp
)));
4367 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
4369 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4372 /* Copy that much. */
4373 copy_size_rtx
= convert_to_mode (ptr_mode
, copy_size_rtx
,
4374 TYPE_UNSIGNED (sizetype
));
4375 emit_block_move (target
, temp
, copy_size_rtx
,
4377 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4379 /* Figure out how much is left in TARGET that we have to clear.
4380 Do all calculations in ptr_mode. */
4381 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4383 size
= plus_constant (size
, -INTVAL (copy_size_rtx
));
4384 target
= adjust_address (target
, BLKmode
,
4385 INTVAL (copy_size_rtx
));
4389 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
4390 copy_size_rtx
, NULL_RTX
, 0,
4393 #ifdef POINTERS_EXTEND_UNSIGNED
4394 if (GET_MODE (copy_size_rtx
) != Pmode
)
4395 copy_size_rtx
= convert_to_mode (Pmode
, copy_size_rtx
,
4396 TYPE_UNSIGNED (sizetype
));
4399 target
= offset_address (target
, copy_size_rtx
,
4400 highest_pow2_factor (copy_size
));
4401 label
= gen_label_rtx ();
4402 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4403 GET_MODE (size
), 0, label
);
4406 if (size
!= const0_rtx
)
4407 clear_storage (target
, size
, BLOCK_OP_NORMAL
);
4413 /* Handle calls that return values in multiple non-contiguous locations.
4414 The Irix 6 ABI has examples of this. */
4415 else if (GET_CODE (target
) == PARALLEL
)
4416 emit_group_load (target
, temp
, TREE_TYPE (exp
),
4417 int_size_in_bytes (TREE_TYPE (exp
)));
4418 else if (GET_MODE (temp
) == BLKmode
)
4419 emit_block_move (target
, temp
, expr_size (exp
),
4421 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4424 temp
= force_operand (temp
, target
);
4426 emit_move_insn (target
, temp
);
4433 /* Examine CTOR to discover:
4434 * how many scalar fields are set to nonzero values,
4435 and place it in *P_NZ_ELTS;
4436 * how many scalar fields are set to non-constant values,
4437 and place it in *P_NC_ELTS; and
4438 * how many scalar fields in total are in CTOR,
4439 and place it in *P_ELT_COUNT.
4440 * if a type is a union, and the initializer from the constructor
4441 is not the largest element in the union, then set *p_must_clear. */
4444 categorize_ctor_elements_1 (tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
4445 HOST_WIDE_INT
*p_nc_elts
,
4446 HOST_WIDE_INT
*p_elt_count
,
4449 unsigned HOST_WIDE_INT idx
;
4450 HOST_WIDE_INT nz_elts
, nc_elts
, elt_count
;
4451 tree value
, purpose
;
4457 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), idx
, purpose
, value
)
4462 if (TREE_CODE (purpose
) == RANGE_EXPR
)
4464 tree lo_index
= TREE_OPERAND (purpose
, 0);
4465 tree hi_index
= TREE_OPERAND (purpose
, 1);
4467 if (host_integerp (lo_index
, 1) && host_integerp (hi_index
, 1))
4468 mult
= (tree_low_cst (hi_index
, 1)
4469 - tree_low_cst (lo_index
, 1) + 1);
4472 switch (TREE_CODE (value
))
4476 HOST_WIDE_INT nz
= 0, nc
= 0, ic
= 0;
4477 categorize_ctor_elements_1 (value
, &nz
, &nc
, &ic
, p_must_clear
);
4478 nz_elts
+= mult
* nz
;
4479 nc_elts
+= mult
* nc
;
4480 elt_count
+= mult
* ic
;
4486 if (!initializer_zerop (value
))
4492 nz_elts
+= mult
* TREE_STRING_LENGTH (value
);
4493 elt_count
+= mult
* TREE_STRING_LENGTH (value
);
4497 if (!initializer_zerop (TREE_REALPART (value
)))
4499 if (!initializer_zerop (TREE_IMAGPART (value
)))
4507 for (v
= TREE_VECTOR_CST_ELTS (value
); v
; v
= TREE_CHAIN (v
))
4509 if (!initializer_zerop (TREE_VALUE (v
)))
4519 if (!initializer_constant_valid_p (value
, TREE_TYPE (value
)))
4526 && (TREE_CODE (TREE_TYPE (ctor
)) == UNION_TYPE
4527 || TREE_CODE (TREE_TYPE (ctor
)) == QUAL_UNION_TYPE
))
4530 bool clear_this
= true;
4532 if (!VEC_empty (constructor_elt
, CONSTRUCTOR_ELTS (ctor
)))
4534 /* We don't expect more than one element of the union to be
4535 initialized. Not sure what we should do otherwise... */
4536 gcc_assert (VEC_length (constructor_elt
, CONSTRUCTOR_ELTS (ctor
))
4539 init_sub_type
= TREE_TYPE (VEC_index (constructor_elt
,
4540 CONSTRUCTOR_ELTS (ctor
),
4543 /* ??? We could look at each element of the union, and find the
4544 largest element. Which would avoid comparing the size of the
4545 initialized element against any tail padding in the union.
4546 Doesn't seem worth the effort... */
4547 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor
)),
4548 TYPE_SIZE (init_sub_type
)) == 1)
4550 /* And now we have to find out if the element itself is fully
4551 constructed. E.g. for union { struct { int a, b; } s; } u
4552 = { .s = { .a = 1 } }. */
4553 if (elt_count
== count_type_elements (init_sub_type
, false))
4558 *p_must_clear
= clear_this
;
4561 *p_nz_elts
+= nz_elts
;
4562 *p_nc_elts
+= nc_elts
;
4563 *p_elt_count
+= elt_count
;
4567 categorize_ctor_elements (tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
4568 HOST_WIDE_INT
*p_nc_elts
,
4569 HOST_WIDE_INT
*p_elt_count
,
4575 *p_must_clear
= false;
4576 categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_nc_elts
, p_elt_count
,
4580 /* Count the number of scalars in TYPE. Return -1 on overflow or
4581 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4582 array member at the end of the structure. */
4585 count_type_elements (tree type
, bool allow_flexarr
)
4587 const HOST_WIDE_INT max
= ~((HOST_WIDE_INT
)1 << (HOST_BITS_PER_WIDE_INT
-1));
4588 switch (TREE_CODE (type
))
4592 tree telts
= array_type_nelts (type
);
4593 if (telts
&& host_integerp (telts
, 1))
4595 HOST_WIDE_INT n
= tree_low_cst (telts
, 1) + 1;
4596 HOST_WIDE_INT m
= count_type_elements (TREE_TYPE (type
), false);
4599 else if (max
/ n
> m
)
4607 HOST_WIDE_INT n
= 0, t
;
4610 for (f
= TYPE_FIELDS (type
); f
; f
= TREE_CHAIN (f
))
4611 if (TREE_CODE (f
) == FIELD_DECL
)
4613 t
= count_type_elements (TREE_TYPE (f
), false);
4616 /* Check for structures with flexible array member. */
4617 tree tf
= TREE_TYPE (f
);
4619 && TREE_CHAIN (f
) == NULL
4620 && TREE_CODE (tf
) == ARRAY_TYPE
4622 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf
))
4623 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf
)))
4624 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf
))
4625 && int_size_in_bytes (type
) >= 0)
4637 case QUAL_UNION_TYPE
:
4639 /* Ho hum. How in the world do we guess here? Clearly it isn't
4640 right to count the fields. Guess based on the number of words. */
4641 HOST_WIDE_INT n
= int_size_in_bytes (type
);
4644 return n
/ UNITS_PER_WORD
;
4651 return TYPE_VECTOR_SUBPARTS (type
);
4659 case REFERENCE_TYPE
:
4671 /* Return 1 if EXP contains mostly (3/4) zeros. */
4674 mostly_zeros_p (tree exp
)
4676 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4679 HOST_WIDE_INT nz_elts
, nc_elts
, count
, elts
;
4682 categorize_ctor_elements (exp
, &nz_elts
, &nc_elts
, &count
, &must_clear
);
4686 elts
= count_type_elements (TREE_TYPE (exp
), false);
4688 return nz_elts
< elts
/ 4;
4691 return initializer_zerop (exp
);
4694 /* Return 1 if EXP contains all zeros. */
4697 all_zeros_p (tree exp
)
4699 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4702 HOST_WIDE_INT nz_elts
, nc_elts
, count
;
4705 categorize_ctor_elements (exp
, &nz_elts
, &nc_elts
, &count
, &must_clear
);
4706 return nz_elts
== 0;
4709 return initializer_zerop (exp
);
4712 /* Helper function for store_constructor.
4713 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4714 TYPE is the type of the CONSTRUCTOR, not the element type.
4715 CLEARED is as for store_constructor.
4716 ALIAS_SET is the alias set to use for any stores.
4718 This provides a recursive shortcut back to store_constructor when it isn't
4719 necessary to go through store_field. This is so that we can pass through
4720 the cleared field to let store_constructor know that we may not have to
4721 clear a substructure if the outer structure has already been cleared. */
4724 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
4725 HOST_WIDE_INT bitpos
, enum machine_mode mode
,
4726 tree exp
, tree type
, int cleared
, int alias_set
)
4728 if (TREE_CODE (exp
) == CONSTRUCTOR
4729 /* We can only call store_constructor recursively if the size and
4730 bit position are on a byte boundary. */
4731 && bitpos
% BITS_PER_UNIT
== 0
4732 && (bitsize
> 0 && bitsize
% BITS_PER_UNIT
== 0)
4733 /* If we have a nonzero bitpos for a register target, then we just
4734 let store_field do the bitfield handling. This is unlikely to
4735 generate unnecessary clear instructions anyways. */
4736 && (bitpos
== 0 || MEM_P (target
)))
4740 = adjust_address (target
,
4741 GET_MODE (target
) == BLKmode
4743 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4744 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
4747 /* Update the alias set, if required. */
4748 if (MEM_P (target
) && ! MEM_KEEP_ALIAS_SET_P (target
)
4749 && MEM_ALIAS_SET (target
) != 0)
4751 target
= copy_rtx (target
);
4752 set_mem_alias_set (target
, alias_set
);
4755 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
4758 store_field (target
, bitsize
, bitpos
, mode
, exp
, type
, alias_set
);
4761 /* Store the value of constructor EXP into the rtx TARGET.
4762 TARGET is either a REG or a MEM; we know it cannot conflict, since
4763 safe_from_p has been called.
4764 CLEARED is true if TARGET is known to have been zero'd.
4765 SIZE is the number of bytes of TARGET we are allowed to modify: this
4766 may not be the same as the size of EXP if we are assigning to a field
4767 which has been packed to exclude padding bits. */
4770 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
)
4772 tree type
= TREE_TYPE (exp
);
4773 #ifdef WORD_REGISTER_OPERATIONS
4774 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4777 switch (TREE_CODE (type
))
4781 case QUAL_UNION_TYPE
:
4783 unsigned HOST_WIDE_INT idx
;
4786 /* If size is zero or the target is already cleared, do nothing. */
4787 if (size
== 0 || cleared
)
4789 /* We either clear the aggregate or indicate the value is dead. */
4790 else if ((TREE_CODE (type
) == UNION_TYPE
4791 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4792 && ! CONSTRUCTOR_ELTS (exp
))
4793 /* If the constructor is empty, clear the union. */
4795 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
4799 /* If we are building a static constructor into a register,
4800 set the initial value as zero so we can fold the value into
4801 a constant. But if more than one register is involved,
4802 this probably loses. */
4803 else if (REG_P (target
) && TREE_STATIC (exp
)
4804 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4806 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4810 /* If the constructor has fewer fields than the structure or
4811 if we are initializing the structure to mostly zeros, clear
4812 the whole structure first. Don't do this if TARGET is a
4813 register whose mode size isn't equal to SIZE since
4814 clear_storage can't handle this case. */
4816 && (((int)VEC_length (constructor_elt
, CONSTRUCTOR_ELTS (exp
))
4817 != fields_length (type
))
4818 || mostly_zeros_p (exp
))
4820 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
4823 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
4828 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4830 /* Store each element of the constructor into the
4831 corresponding field of TARGET. */
4832 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, field
, value
)
4834 enum machine_mode mode
;
4835 HOST_WIDE_INT bitsize
;
4836 HOST_WIDE_INT bitpos
= 0;
4838 rtx to_rtx
= target
;
4840 /* Just ignore missing fields. We cleared the whole
4841 structure, above, if any fields are missing. */
4845 if (cleared
&& initializer_zerop (value
))
4848 if (host_integerp (DECL_SIZE (field
), 1))
4849 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4853 mode
= DECL_MODE (field
);
4854 if (DECL_BIT_FIELD (field
))
4857 offset
= DECL_FIELD_OFFSET (field
);
4858 if (host_integerp (offset
, 0)
4859 && host_integerp (bit_position (field
), 0))
4861 bitpos
= int_bit_position (field
);
4865 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
4872 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset
,
4873 make_tree (TREE_TYPE (exp
),
4876 offset_rtx
= expand_normal (offset
);
4877 gcc_assert (MEM_P (to_rtx
));
4879 #ifdef POINTERS_EXTEND_UNSIGNED
4880 if (GET_MODE (offset_rtx
) != Pmode
)
4881 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
4883 if (GET_MODE (offset_rtx
) != ptr_mode
)
4884 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4887 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4888 highest_pow2_factor (offset
));
4891 #ifdef WORD_REGISTER_OPERATIONS
4892 /* If this initializes a field that is smaller than a
4893 word, at the start of a word, try to widen it to a full
4894 word. This special case allows us to output C++ member
4895 function initializations in a form that the optimizers
4898 && bitsize
< BITS_PER_WORD
4899 && bitpos
% BITS_PER_WORD
== 0
4900 && GET_MODE_CLASS (mode
) == MODE_INT
4901 && TREE_CODE (value
) == INTEGER_CST
4903 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
4905 tree type
= TREE_TYPE (value
);
4907 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4909 type
= lang_hooks
.types
.type_for_size
4910 (BITS_PER_WORD
, TYPE_UNSIGNED (type
));
4911 value
= convert (type
, value
);
4914 if (BYTES_BIG_ENDIAN
)
4916 = fold_build2 (LSHIFT_EXPR
, type
, value
,
4917 build_int_cst (NULL_TREE
,
4918 BITS_PER_WORD
- bitsize
));
4919 bitsize
= BITS_PER_WORD
;
4924 if (MEM_P (to_rtx
) && !MEM_KEEP_ALIAS_SET_P (to_rtx
)
4925 && DECL_NONADDRESSABLE_P (field
))
4927 to_rtx
= copy_rtx (to_rtx
);
4928 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4931 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
4932 value
, type
, cleared
,
4933 get_alias_set (TREE_TYPE (field
)));
4940 unsigned HOST_WIDE_INT i
;
4943 tree elttype
= TREE_TYPE (type
);
4945 HOST_WIDE_INT minelt
= 0;
4946 HOST_WIDE_INT maxelt
= 0;
4948 domain
= TYPE_DOMAIN (type
);
4949 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
4950 && TYPE_MAX_VALUE (domain
)
4951 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
4952 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
4954 /* If we have constant bounds for the range of the type, get them. */
4957 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
4958 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
4961 /* If the constructor has fewer elements than the array, clear
4962 the whole array first. Similarly if this is static
4963 constructor of a non-BLKmode object. */
4966 else if (REG_P (target
) && TREE_STATIC (exp
))
4970 unsigned HOST_WIDE_INT idx
;
4972 HOST_WIDE_INT count
= 0, zero_count
= 0;
4973 need_to_clear
= ! const_bounds_p
;
4975 /* This loop is a more accurate version of the loop in
4976 mostly_zeros_p (it handles RANGE_EXPR in an index). It
4977 is also needed to check for missing elements. */
4978 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, index
, value
)
4980 HOST_WIDE_INT this_node_count
;
4985 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4987 tree lo_index
= TREE_OPERAND (index
, 0);
4988 tree hi_index
= TREE_OPERAND (index
, 1);
4990 if (! host_integerp (lo_index
, 1)
4991 || ! host_integerp (hi_index
, 1))
4997 this_node_count
= (tree_low_cst (hi_index
, 1)
4998 - tree_low_cst (lo_index
, 1) + 1);
5001 this_node_count
= 1;
5003 count
+= this_node_count
;
5004 if (mostly_zeros_p (value
))
5005 zero_count
+= this_node_count
;
5008 /* Clear the entire array first if there are any missing
5009 elements, or if the incidence of zero elements is >=
5012 && (count
< maxelt
- minelt
+ 1
5013 || 4 * zero_count
>= 3 * count
))
5017 if (need_to_clear
&& size
> 0)
5020 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5022 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
5026 if (!cleared
&& REG_P (target
))
5027 /* Inform later passes that the old value is dead. */
5028 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
5030 /* Store each element of the constructor into the
5031 corresponding element of TARGET, determined by counting the
5033 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), i
, index
, value
)
5035 enum machine_mode mode
;
5036 HOST_WIDE_INT bitsize
;
5037 HOST_WIDE_INT bitpos
;
5039 rtx xtarget
= target
;
5041 if (cleared
&& initializer_zerop (value
))
5044 unsignedp
= TYPE_UNSIGNED (elttype
);
5045 mode
= TYPE_MODE (elttype
);
5046 if (mode
== BLKmode
)
5047 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
5048 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
5051 bitsize
= GET_MODE_BITSIZE (mode
);
5053 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
5055 tree lo_index
= TREE_OPERAND (index
, 0);
5056 tree hi_index
= TREE_OPERAND (index
, 1);
5057 rtx index_r
, pos_rtx
;
5058 HOST_WIDE_INT lo
, hi
, count
;
5061 /* If the range is constant and "small", unroll the loop. */
5063 && host_integerp (lo_index
, 0)
5064 && host_integerp (hi_index
, 0)
5065 && (lo
= tree_low_cst (lo_index
, 0),
5066 hi
= tree_low_cst (hi_index
, 0),
5067 count
= hi
- lo
+ 1,
5070 || (host_integerp (TYPE_SIZE (elttype
), 1)
5071 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
5074 lo
-= minelt
; hi
-= minelt
;
5075 for (; lo
<= hi
; lo
++)
5077 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
5080 && !MEM_KEEP_ALIAS_SET_P (target
)
5081 && TREE_CODE (type
) == ARRAY_TYPE
5082 && TYPE_NONALIASED_COMPONENT (type
))
5084 target
= copy_rtx (target
);
5085 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5088 store_constructor_field
5089 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
5090 get_alias_set (elttype
));
5095 rtx loop_start
= gen_label_rtx ();
5096 rtx loop_end
= gen_label_rtx ();
5099 expand_normal (hi_index
);
5100 unsignedp
= TYPE_UNSIGNED (domain
);
5102 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
5105 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
5107 SET_DECL_RTL (index
, index_r
);
5108 store_expr (lo_index
, index_r
, 0);
5110 /* Build the head of the loop. */
5111 do_pending_stack_adjust ();
5112 emit_label (loop_start
);
5114 /* Assign value to element index. */
5116 = convert (ssizetype
,
5117 fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
5118 index
, TYPE_MIN_VALUE (domain
)));
5119 position
= size_binop (MULT_EXPR
, position
,
5121 TYPE_SIZE_UNIT (elttype
)));
5123 pos_rtx
= expand_normal (position
);
5124 xtarget
= offset_address (target
, pos_rtx
,
5125 highest_pow2_factor (position
));
5126 xtarget
= adjust_address (xtarget
, mode
, 0);
5127 if (TREE_CODE (value
) == CONSTRUCTOR
)
5128 store_constructor (value
, xtarget
, cleared
,
5129 bitsize
/ BITS_PER_UNIT
);
5131 store_expr (value
, xtarget
, 0);
5133 /* Generate a conditional jump to exit the loop. */
5134 exit_cond
= build2 (LT_EXPR
, integer_type_node
,
5136 jumpif (exit_cond
, loop_end
);
5138 /* Update the loop counter, and jump to the head of
5140 expand_assignment (index
,
5141 build2 (PLUS_EXPR
, TREE_TYPE (index
),
5142 index
, integer_one_node
));
5144 emit_jump (loop_start
);
5146 /* Build the end of the loop. */
5147 emit_label (loop_end
);
5150 else if ((index
!= 0 && ! host_integerp (index
, 0))
5151 || ! host_integerp (TYPE_SIZE (elttype
), 1))
5156 index
= ssize_int (1);
5159 index
= fold_convert (ssizetype
,
5160 fold_build2 (MINUS_EXPR
,
5163 TYPE_MIN_VALUE (domain
)));
5165 position
= size_binop (MULT_EXPR
, index
,
5167 TYPE_SIZE_UNIT (elttype
)));
5168 xtarget
= offset_address (target
,
5169 expand_normal (position
),
5170 highest_pow2_factor (position
));
5171 xtarget
= adjust_address (xtarget
, mode
, 0);
5172 store_expr (value
, xtarget
, 0);
5177 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
5178 * tree_low_cst (TYPE_SIZE (elttype
), 1));
5180 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
5182 if (MEM_P (target
) && !MEM_KEEP_ALIAS_SET_P (target
)
5183 && TREE_CODE (type
) == ARRAY_TYPE
5184 && TYPE_NONALIASED_COMPONENT (type
))
5186 target
= copy_rtx (target
);
5187 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5189 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
5190 type
, cleared
, get_alias_set (elttype
));
5198 unsigned HOST_WIDE_INT idx
;
5199 constructor_elt
*ce
;
5203 tree elttype
= TREE_TYPE (type
);
5204 int elt_size
= tree_low_cst (TYPE_SIZE (elttype
), 1);
5205 enum machine_mode eltmode
= TYPE_MODE (elttype
);
5206 HOST_WIDE_INT bitsize
;
5207 HOST_WIDE_INT bitpos
;
5208 rtvec vector
= NULL
;
5211 gcc_assert (eltmode
!= BLKmode
);
5213 n_elts
= TYPE_VECTOR_SUBPARTS (type
);
5214 if (REG_P (target
) && VECTOR_MODE_P (GET_MODE (target
)))
5216 enum machine_mode mode
= GET_MODE (target
);
5218 icode
= (int) vec_init_optab
->handlers
[mode
].insn_code
;
5219 if (icode
!= CODE_FOR_nothing
)
5223 vector
= rtvec_alloc (n_elts
);
5224 for (i
= 0; i
< n_elts
; i
++)
5225 RTVEC_ELT (vector
, i
) = CONST0_RTX (GET_MODE_INNER (mode
));
5229 /* If the constructor has fewer elements than the vector,
5230 clear the whole array first. Similarly if this is static
5231 constructor of a non-BLKmode object. */
5234 else if (REG_P (target
) && TREE_STATIC (exp
))
5238 unsigned HOST_WIDE_INT count
= 0, zero_count
= 0;
5241 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
5243 int n_elts_here
= tree_low_cst
5244 (int_const_binop (TRUNC_DIV_EXPR
,
5245 TYPE_SIZE (TREE_TYPE (value
)),
5246 TYPE_SIZE (elttype
), 0), 1);
5248 count
+= n_elts_here
;
5249 if (mostly_zeros_p (value
))
5250 zero_count
+= n_elts_here
;
5253 /* Clear the entire vector first if there are any missing elements,
5254 or if the incidence of zero elements is >= 75%. */
5255 need_to_clear
= (count
< n_elts
|| 4 * zero_count
>= 3 * count
);
5258 if (need_to_clear
&& size
> 0 && !vector
)
5261 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5263 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
5267 /* Inform later passes that the old value is dead. */
5268 if (!cleared
&& REG_P (target
))
5269 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5271 /* Store each element of the constructor into the corresponding
5272 element of TARGET, determined by counting the elements. */
5273 for (idx
= 0, i
= 0;
5274 VEC_iterate (constructor_elt
, CONSTRUCTOR_ELTS (exp
), idx
, ce
);
5275 idx
++, i
+= bitsize
/ elt_size
)
5277 HOST_WIDE_INT eltpos
;
5278 tree value
= ce
->value
;
5280 bitsize
= tree_low_cst (TYPE_SIZE (TREE_TYPE (value
)), 1);
5281 if (cleared
&& initializer_zerop (value
))
5285 eltpos
= tree_low_cst (ce
->index
, 1);
5291 /* Vector CONSTRUCTORs should only be built from smaller
5292 vectors in the case of BLKmode vectors. */
5293 gcc_assert (TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
);
5294 RTVEC_ELT (vector
, eltpos
)
5295 = expand_normal (value
);
5299 enum machine_mode value_mode
=
5300 TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
5301 ? TYPE_MODE (TREE_TYPE (value
))
5303 bitpos
= eltpos
* elt_size
;
5304 store_constructor_field (target
, bitsize
, bitpos
,
5305 value_mode
, value
, type
,
5306 cleared
, get_alias_set (elttype
));
5311 emit_insn (GEN_FCN (icode
)
5313 gen_rtx_PARALLEL (GET_MODE (target
), vector
)));
5322 /* Store the value of EXP (an expression tree)
5323 into a subfield of TARGET which has mode MODE and occupies
5324 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5325 If MODE is VOIDmode, it means that we are storing into a bit-field.
5327 Always return const0_rtx unless we have something particular to
5330 TYPE is the type of the underlying object,
5332 ALIAS_SET is the alias set for the destination. This value will
5333 (in general) be different from that for TARGET, since TARGET is a
5334 reference to the containing structure. */
5337 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
5338 enum machine_mode mode
, tree exp
, tree type
, int alias_set
)
5340 HOST_WIDE_INT width_mask
= 0;
5342 if (TREE_CODE (exp
) == ERROR_MARK
)
5345 /* If we have nothing to store, do nothing unless the expression has
5348 return expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
5349 else if (bitsize
>= 0 && bitsize
< HOST_BITS_PER_WIDE_INT
)
5350 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5352 /* If we are storing into an unaligned field of an aligned union that is
5353 in a register, we may have the mode of TARGET being an integer mode but
5354 MODE == BLKmode. In that case, get an aligned object whose size and
5355 alignment are the same as TARGET and store TARGET into it (we can avoid
5356 the store if the field being stored is the entire width of TARGET). Then
5357 call ourselves recursively to store the field into a BLKmode version of
5358 that object. Finally, load from the object into TARGET. This is not
5359 very efficient in general, but should only be slightly more expensive
5360 than the otherwise-required unaligned accesses. Perhaps this can be
5361 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5362 twice, once with emit_move_insn and once via store_field. */
5365 && (REG_P (target
) || GET_CODE (target
) == SUBREG
))
5367 rtx object
= assign_temp (type
, 0, 1, 1);
5368 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
5370 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5371 emit_move_insn (object
, target
);
5373 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, type
, alias_set
);
5375 emit_move_insn (target
, object
);
5377 /* We want to return the BLKmode version of the data. */
5381 if (GET_CODE (target
) == CONCAT
)
5383 /* We're storing into a struct containing a single __complex. */
5385 gcc_assert (!bitpos
);
5386 return store_expr (exp
, target
, 0);
5389 /* If the structure is in a register or if the component
5390 is a bit field, we cannot use addressing to access it.
5391 Use bit-field techniques or SUBREG to store in it. */
5393 if (mode
== VOIDmode
5394 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5395 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5396 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5398 || GET_CODE (target
) == SUBREG
5399 /* If the field isn't aligned enough to store as an ordinary memref,
5400 store it as a bit field. */
5402 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
5403 || bitpos
% GET_MODE_ALIGNMENT (mode
))
5404 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
5405 || (bitpos
% BITS_PER_UNIT
!= 0)))
5406 /* If the RHS and field are a constant size and the size of the
5407 RHS isn't the same size as the bitfield, we must use bitfield
5410 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5411 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5415 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5416 implies a mask operation. If the precision is the same size as
5417 the field we're storing into, that mask is redundant. This is
5418 particularly common with bit field assignments generated by the
5420 if (TREE_CODE (exp
) == NOP_EXPR
)
5422 tree type
= TREE_TYPE (exp
);
5423 if (INTEGRAL_TYPE_P (type
)
5424 && TYPE_PRECISION (type
) < GET_MODE_BITSIZE (TYPE_MODE (type
))
5425 && bitsize
== TYPE_PRECISION (type
))
5427 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
5428 if (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) >= bitsize
)
5429 exp
= TREE_OPERAND (exp
, 0);
5433 temp
= expand_normal (exp
);
5435 /* If BITSIZE is narrower than the size of the type of EXP
5436 we will be narrowing TEMP. Normally, what's wanted are the
5437 low-order bits. However, if EXP's type is a record and this is
5438 big-endian machine, we want the upper BITSIZE bits. */
5439 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5440 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
5441 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5442 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5443 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5447 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5449 if (mode
!= VOIDmode
&& mode
!= BLKmode
5450 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5451 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5453 /* If the modes of TARGET and TEMP are both BLKmode, both
5454 must be in memory and BITPOS must be aligned on a byte
5455 boundary. If so, we simply do a block copy. */
5456 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5458 gcc_assert (MEM_P (target
) && MEM_P (temp
)
5459 && !(bitpos
% BITS_PER_UNIT
));
5461 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5462 emit_block_move (target
, temp
,
5463 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5470 /* Store the value in the bitfield. */
5471 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
);
5477 /* Now build a reference to just the desired component. */
5478 rtx to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5480 if (to_rtx
== target
)
5481 to_rtx
= copy_rtx (to_rtx
);
5483 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5484 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5485 set_mem_alias_set (to_rtx
, alias_set
);
5487 return store_expr (exp
, to_rtx
, 0);
5491 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5492 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5493 codes and find the ultimate containing object, which we return.
5495 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5496 bit position, and *PUNSIGNEDP to the signedness of the field.
5497 If the position of the field is variable, we store a tree
5498 giving the variable offset (in units) in *POFFSET.
5499 This offset is in addition to the bit position.
5500 If the position is not variable, we store 0 in *POFFSET.
5502 If any of the extraction expressions is volatile,
5503 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5505 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5506 is a mode that can be used to access the field. In that case, *PBITSIZE
5509 If the field describes a variable-sized object, *PMODE is set to
5510 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5511 this case, but the address of the object can be found.
5513 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5514 look through nodes that serve as markers of a greater alignment than
5515 the one that can be deduced from the expression. These nodes make it
5516 possible for front-ends to prevent temporaries from being created by
5517 the middle-end on alignment considerations. For that purpose, the
5518 normal operating mode at high-level is to always pass FALSE so that
5519 the ultimate containing object is really returned; moreover, the
5520 associated predicate handled_component_p will always return TRUE
5521 on these nodes, thus indicating that they are essentially handled
5522 by get_inner_reference. TRUE should only be passed when the caller
5523 is scanning the expression in order to build another representation
5524 and specifically knows how to handle these nodes; as such, this is
5525 the normal operating mode in the RTL expanders. */
5528 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
5529 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
5530 enum machine_mode
*pmode
, int *punsignedp
,
5531 int *pvolatilep
, bool keep_aligning
)
5534 enum machine_mode mode
= VOIDmode
;
5535 tree offset
= size_zero_node
;
5536 tree bit_offset
= bitsize_zero_node
;
5539 /* First get the mode, signedness, and size. We do this from just the
5540 outermost expression. */
5541 if (TREE_CODE (exp
) == COMPONENT_REF
)
5543 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5544 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5545 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5547 *punsignedp
= DECL_UNSIGNED (TREE_OPERAND (exp
, 1));
5549 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5551 size_tree
= TREE_OPERAND (exp
, 1);
5552 *punsignedp
= BIT_FIELD_REF_UNSIGNED (exp
);
5556 mode
= TYPE_MODE (TREE_TYPE (exp
));
5557 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
5559 if (mode
== BLKmode
)
5560 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5562 *pbitsize
= GET_MODE_BITSIZE (mode
);
5567 if (! host_integerp (size_tree
, 1))
5568 mode
= BLKmode
, *pbitsize
= -1;
5570 *pbitsize
= tree_low_cst (size_tree
, 1);
5573 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5574 and find the ultimate containing object. */
5577 switch (TREE_CODE (exp
))
5580 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5581 TREE_OPERAND (exp
, 2));
5586 tree field
= TREE_OPERAND (exp
, 1);
5587 tree this_offset
= component_ref_field_offset (exp
);
5589 /* If this field hasn't been filled in yet, don't go past it.
5590 This should only happen when folding expressions made during
5591 type construction. */
5592 if (this_offset
== 0)
5595 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5596 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5597 DECL_FIELD_BIT_OFFSET (field
));
5599 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5604 case ARRAY_RANGE_REF
:
5606 tree index
= TREE_OPERAND (exp
, 1);
5607 tree low_bound
= array_ref_low_bound (exp
);
5608 tree unit_size
= array_ref_element_size (exp
);
5610 /* We assume all arrays have sizes that are a multiple of a byte.
5611 First subtract the lower bound, if any, in the type of the
5612 index, then convert to sizetype and multiply by the size of
5613 the array element. */
5614 if (! integer_zerop (low_bound
))
5615 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
5618 offset
= size_binop (PLUS_EXPR
, offset
,
5619 size_binop (MULT_EXPR
,
5620 convert (sizetype
, index
),
5629 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5630 bitsize_int (*pbitsize
));
5633 case VIEW_CONVERT_EXPR
:
5634 if (keep_aligning
&& STRICT_ALIGNMENT
5635 && (TYPE_ALIGN (TREE_TYPE (exp
))
5636 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5637 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
5638 < BIGGEST_ALIGNMENT
)
5639 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
5640 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
5648 /* If any reference in the chain is volatile, the effect is volatile. */
5649 if (TREE_THIS_VOLATILE (exp
))
5652 exp
= TREE_OPERAND (exp
, 0);
5656 /* If OFFSET is constant, see if we can return the whole thing as a
5657 constant bit position. Otherwise, split it up. */
5658 if (host_integerp (offset
, 0)
5659 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5661 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5662 && host_integerp (tem
, 0))
5663 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5665 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5671 /* Return a tree of sizetype representing the size, in bytes, of the element
5672 of EXP, an ARRAY_REF. */
5675 array_ref_element_size (tree exp
)
5677 tree aligned_size
= TREE_OPERAND (exp
, 3);
5678 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5680 /* If a size was specified in the ARRAY_REF, it's the size measured
5681 in alignment units of the element type. So multiply by that value. */
5684 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5685 sizetype from another type of the same width and signedness. */
5686 if (TREE_TYPE (aligned_size
) != sizetype
)
5687 aligned_size
= fold_convert (sizetype
, aligned_size
);
5688 return size_binop (MULT_EXPR
, aligned_size
,
5689 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
5692 /* Otherwise, take the size from that of the element type. Substitute
5693 any PLACEHOLDER_EXPR that we have. */
5695 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type
), exp
);
5698 /* Return a tree representing the lower bound of the array mentioned in
5699 EXP, an ARRAY_REF. */
5702 array_ref_low_bound (tree exp
)
5704 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5706 /* If a lower bound is specified in EXP, use it. */
5707 if (TREE_OPERAND (exp
, 2))
5708 return TREE_OPERAND (exp
, 2);
5710 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5711 substituting for a PLACEHOLDER_EXPR as needed. */
5712 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
5713 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type
), exp
);
5715 /* Otherwise, return a zero of the appropriate type. */
5716 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp
, 1)), 0);
5719 /* Return a tree representing the upper bound of the array mentioned in
5720 EXP, an ARRAY_REF. */
5723 array_ref_up_bound (tree exp
)
5725 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5727 /* If there is a domain type and it has an upper bound, use it, substituting
5728 for a PLACEHOLDER_EXPR as needed. */
5729 if (domain_type
&& TYPE_MAX_VALUE (domain_type
))
5730 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type
), exp
);
5732 /* Otherwise fail. */
5736 /* Return a tree representing the offset, in bytes, of the field referenced
5737 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5740 component_ref_field_offset (tree exp
)
5742 tree aligned_offset
= TREE_OPERAND (exp
, 2);
5743 tree field
= TREE_OPERAND (exp
, 1);
5745 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5746 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5750 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5751 sizetype from another type of the same width and signedness. */
5752 if (TREE_TYPE (aligned_offset
) != sizetype
)
5753 aligned_offset
= fold_convert (sizetype
, aligned_offset
);
5754 return size_binop (MULT_EXPR
, aligned_offset
,
5755 size_int (DECL_OFFSET_ALIGN (field
) / BITS_PER_UNIT
));
5758 /* Otherwise, take the offset from that of the field. Substitute
5759 any PLACEHOLDER_EXPR that we have. */
5761 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field
), exp
);
5764 /* Return 1 if T is an expression that get_inner_reference handles. */
5767 handled_component_p (tree t
)
5769 switch (TREE_CODE (t
))
5774 case ARRAY_RANGE_REF
:
5775 case VIEW_CONVERT_EXPR
:
5785 /* Given an rtx VALUE that may contain additions and multiplications, return
5786 an equivalent value that just refers to a register, memory, or constant.
5787 This is done by generating instructions to perform the arithmetic and
5788 returning a pseudo-register containing the value.
5790 The returned value may be a REG, SUBREG, MEM or constant. */
5793 force_operand (rtx value
, rtx target
)
5796 /* Use subtarget as the target for operand 0 of a binary operation. */
5797 rtx subtarget
= get_subtarget (target
);
5798 enum rtx_code code
= GET_CODE (value
);
5800 /* Check for subreg applied to an expression produced by loop optimizer. */
5802 && !REG_P (SUBREG_REG (value
))
5803 && !MEM_P (SUBREG_REG (value
)))
5805 value
= simplify_gen_subreg (GET_MODE (value
),
5806 force_reg (GET_MODE (SUBREG_REG (value
)),
5807 force_operand (SUBREG_REG (value
),
5809 GET_MODE (SUBREG_REG (value
)),
5810 SUBREG_BYTE (value
));
5811 code
= GET_CODE (value
);
5814 /* Check for a PIC address load. */
5815 if ((code
== PLUS
|| code
== MINUS
)
5816 && XEXP (value
, 0) == pic_offset_table_rtx
5817 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5818 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5819 || GET_CODE (XEXP (value
, 1)) == CONST
))
5822 subtarget
= gen_reg_rtx (GET_MODE (value
));
5823 emit_move_insn (subtarget
, value
);
5827 if (ARITHMETIC_P (value
))
5829 op2
= XEXP (value
, 1);
5830 if (!CONSTANT_P (op2
) && !(REG_P (op2
) && op2
!= subtarget
))
5832 if (code
== MINUS
&& GET_CODE (op2
) == CONST_INT
)
5835 op2
= negate_rtx (GET_MODE (value
), op2
);
5838 /* Check for an addition with OP2 a constant integer and our first
5839 operand a PLUS of a virtual register and something else. In that
5840 case, we want to emit the sum of the virtual register and the
5841 constant first and then add the other value. This allows virtual
5842 register instantiation to simply modify the constant rather than
5843 creating another one around this addition. */
5844 if (code
== PLUS
&& GET_CODE (op2
) == CONST_INT
5845 && GET_CODE (XEXP (value
, 0)) == PLUS
5846 && REG_P (XEXP (XEXP (value
, 0), 0))
5847 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5848 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5850 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
5851 XEXP (XEXP (value
, 0), 0), op2
,
5852 subtarget
, 0, OPTAB_LIB_WIDEN
);
5853 return expand_simple_binop (GET_MODE (value
), code
, temp
,
5854 force_operand (XEXP (XEXP (value
,
5856 target
, 0, OPTAB_LIB_WIDEN
);
5859 op1
= force_operand (XEXP (value
, 0), subtarget
);
5860 op2
= force_operand (op2
, NULL_RTX
);
5864 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
5866 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
5867 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5868 target
, 1, OPTAB_LIB_WIDEN
);
5870 return expand_divmod (0,
5871 FLOAT_MODE_P (GET_MODE (value
))
5872 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
5873 GET_MODE (value
), op1
, op2
, target
, 0);
5876 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5880 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
5884 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5888 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5889 target
, 0, OPTAB_LIB_WIDEN
);
5892 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5893 target
, 1, OPTAB_LIB_WIDEN
);
5896 if (UNARY_P (value
))
5899 target
= gen_reg_rtx (GET_MODE (value
));
5900 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
5906 convert_move (target
, op1
, code
== ZERO_EXTEND
);
5911 expand_fix (target
, op1
, code
== UNSIGNED_FIX
);
5915 case UNSIGNED_FLOAT
:
5916 expand_float (target
, op1
, code
== UNSIGNED_FLOAT
);
5920 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
5924 #ifdef INSN_SCHEDULING
5925 /* On machines that have insn scheduling, we want all memory reference to be
5926 explicit, so we need to deal with such paradoxical SUBREGs. */
5927 if (GET_CODE (value
) == SUBREG
&& MEM_P (SUBREG_REG (value
))
5928 && (GET_MODE_SIZE (GET_MODE (value
))
5929 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
5931 = simplify_gen_subreg (GET_MODE (value
),
5932 force_reg (GET_MODE (SUBREG_REG (value
)),
5933 force_operand (SUBREG_REG (value
),
5935 GET_MODE (SUBREG_REG (value
)),
5936 SUBREG_BYTE (value
));
5942 /* Subroutine of expand_expr: return nonzero iff there is no way that
5943 EXP can reference X, which is being modified. TOP_P is nonzero if this
5944 call is going to be used to determine whether we need a temporary
5945 for EXP, as opposed to a recursive call to this function.
5947 It is always safe for this routine to return zero since it merely
5948 searches for optimization opportunities. */
5951 safe_from_p (rtx x
, tree exp
, int top_p
)
5957 /* If EXP has varying size, we MUST use a target since we currently
5958 have no way of allocating temporaries of variable size
5959 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5960 So we assume here that something at a higher level has prevented a
5961 clash. This is somewhat bogus, but the best we can do. Only
5962 do this when X is BLKmode and when we are at the top level. */
5963 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
5964 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
5965 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
5966 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
5967 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
5969 && GET_MODE (x
) == BLKmode
)
5970 /* If X is in the outgoing argument area, it is always safe. */
5972 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
5973 || (GET_CODE (XEXP (x
, 0)) == PLUS
5974 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
5977 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5978 find the underlying pseudo. */
5979 if (GET_CODE (x
) == SUBREG
)
5982 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5986 /* Now look at our tree code and possibly recurse. */
5987 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
5989 case tcc_declaration
:
5990 exp_rtl
= DECL_RTL_IF_SET (exp
);
5996 case tcc_exceptional
:
5997 if (TREE_CODE (exp
) == TREE_LIST
)
6001 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
6003 exp
= TREE_CHAIN (exp
);
6006 if (TREE_CODE (exp
) != TREE_LIST
)
6007 return safe_from_p (x
, exp
, 0);
6010 else if (TREE_CODE (exp
) == ERROR_MARK
)
6011 return 1; /* An already-visited SAVE_EXPR? */
6016 /* The only case we look at here is the DECL_INITIAL inside a
6018 return (TREE_CODE (exp
) != DECL_EXPR
6019 || TREE_CODE (DECL_EXPR_DECL (exp
)) != VAR_DECL
6020 || !DECL_INITIAL (DECL_EXPR_DECL (exp
))
6021 || safe_from_p (x
, DECL_INITIAL (DECL_EXPR_DECL (exp
)), 0));
6024 case tcc_comparison
:
6025 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
6030 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6032 case tcc_expression
:
6034 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6035 the expression. If it is set, we conflict iff we are that rtx or
6036 both are in memory. Otherwise, we check all operands of the
6037 expression recursively. */
6039 switch (TREE_CODE (exp
))
6042 /* If the operand is static or we are static, we can't conflict.
6043 Likewise if we don't conflict with the operand at all. */
6044 if (staticp (TREE_OPERAND (exp
, 0))
6045 || TREE_STATIC (exp
)
6046 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
6049 /* Otherwise, the only way this can conflict is if we are taking
6050 the address of a DECL a that address if part of X, which is
6052 exp
= TREE_OPERAND (exp
, 0);
6055 if (!DECL_RTL_SET_P (exp
)
6056 || !MEM_P (DECL_RTL (exp
)))
6059 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
6063 case MISALIGNED_INDIRECT_REF
:
6064 case ALIGN_INDIRECT_REF
:
6067 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
6068 get_alias_set (exp
)))
6073 /* Assume that the call will clobber all hard registers and
6075 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6080 case WITH_CLEANUP_EXPR
:
6081 case CLEANUP_POINT_EXPR
:
6082 /* Lowered by gimplify.c. */
6086 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6092 /* If we have an rtx, we do not need to scan our operands. */
6096 nops
= TREE_CODE_LENGTH (TREE_CODE (exp
));
6097 for (i
= 0; i
< nops
; i
++)
6098 if (TREE_OPERAND (exp
, i
) != 0
6099 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
6102 /* If this is a language-specific tree code, it may require
6103 special handling. */
6104 if ((unsigned int) TREE_CODE (exp
)
6105 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6106 && !lang_hooks
.safe_from_p (x
, exp
))
6111 /* Should never get a type here. */
6115 /* If we have an rtl, find any enclosed object. Then see if we conflict
6119 if (GET_CODE (exp_rtl
) == SUBREG
)
6121 exp_rtl
= SUBREG_REG (exp_rtl
);
6123 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
6127 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6128 are memory and they conflict. */
6129 return ! (rtx_equal_p (x
, exp_rtl
)
6130 || (MEM_P (x
) && MEM_P (exp_rtl
)
6131 && true_dependence (exp_rtl
, VOIDmode
, x
,
6132 rtx_addr_varies_p
)));
6135 /* If we reach here, it is safe. */
6140 /* Return the highest power of two that EXP is known to be a multiple of.
6141 This is used in updating alignment of MEMs in array references. */
6143 unsigned HOST_WIDE_INT
6144 highest_pow2_factor (tree exp
)
6146 unsigned HOST_WIDE_INT c0
, c1
;
6148 switch (TREE_CODE (exp
))
6151 /* We can find the lowest bit that's a one. If the low
6152 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6153 We need to handle this case since we can find it in a COND_EXPR,
6154 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6155 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6157 if (TREE_CONSTANT_OVERFLOW (exp
))
6158 return BIGGEST_ALIGNMENT
;
6161 /* Note: tree_low_cst is intentionally not used here,
6162 we don't care about the upper bits. */
6163 c0
= TREE_INT_CST_LOW (exp
);
6165 return c0
? c0
: BIGGEST_ALIGNMENT
;
6169 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
6170 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6171 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6172 return MIN (c0
, c1
);
6175 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6176 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6179 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6181 if (integer_pow2p (TREE_OPERAND (exp
, 1))
6182 && host_integerp (TREE_OPERAND (exp
, 1), 1))
6184 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6185 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
6186 return MAX (1, c0
/ c1
);
6190 case NON_LVALUE_EXPR
: case NOP_EXPR
: case CONVERT_EXPR
:
6192 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
6195 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6198 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6199 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
6200 return MIN (c0
, c1
);
6209 /* Similar, except that the alignment requirements of TARGET are
6210 taken into account. Assume it is at least as aligned as its
6211 type, unless it is a COMPONENT_REF in which case the layout of
6212 the structure gives the alignment. */
6214 static unsigned HOST_WIDE_INT
6215 highest_pow2_factor_for_target (tree target
, tree exp
)
6217 unsigned HOST_WIDE_INT target_align
, factor
;
6219 factor
= highest_pow2_factor (exp
);
6220 if (TREE_CODE (target
) == COMPONENT_REF
)
6221 target_align
= DECL_ALIGN_UNIT (TREE_OPERAND (target
, 1));
6223 target_align
= TYPE_ALIGN_UNIT (TREE_TYPE (target
));
6224 return MAX (factor
, target_align
);
6227 /* Expands variable VAR. */
6230 expand_var (tree var
)
6232 if (DECL_EXTERNAL (var
))
6235 if (TREE_STATIC (var
))
6236 /* If this is an inlined copy of a static local variable,
6237 look up the original decl. */
6238 var
= DECL_ORIGIN (var
);
6240 if (TREE_STATIC (var
)
6241 ? !TREE_ASM_WRITTEN (var
)
6242 : !DECL_RTL_SET_P (var
))
6244 if (TREE_CODE (var
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (var
))
6245 /* Should be ignored. */;
6246 else if (lang_hooks
.expand_decl (var
))
6248 else if (TREE_CODE (var
) == VAR_DECL
&& !TREE_STATIC (var
))
6250 else if (TREE_CODE (var
) == VAR_DECL
&& TREE_STATIC (var
))
6251 rest_of_decl_compilation (var
, 0, 0);
6253 /* No expansion needed. */
6254 gcc_assert (TREE_CODE (var
) == TYPE_DECL
6255 || TREE_CODE (var
) == CONST_DECL
6256 || TREE_CODE (var
) == FUNCTION_DECL
6257 || TREE_CODE (var
) == LABEL_DECL
);
6261 /* Subroutine of expand_expr. Expand the two operands of a binary
6262 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6263 The value may be stored in TARGET if TARGET is nonzero. The
6264 MODIFIER argument is as documented by expand_expr. */
6267 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
6268 enum expand_modifier modifier
)
6270 if (! safe_from_p (target
, exp1
, 1))
6272 if (operand_equal_p (exp0
, exp1
, 0))
6274 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6275 *op1
= copy_rtx (*op0
);
6279 /* If we need to preserve evaluation order, copy exp0 into its own
6280 temporary variable so that it can't be clobbered by exp1. */
6281 if (flag_evaluation_order
&& TREE_SIDE_EFFECTS (exp1
))
6282 exp0
= save_expr (exp0
);
6283 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6284 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
6289 /* Return a MEM that contains constant EXP. DEFER is as for
6290 output_constant_def and MODIFIER is as for expand_expr. */
6293 expand_expr_constant (tree exp
, int defer
, enum expand_modifier modifier
)
6297 mem
= output_constant_def (exp
, defer
);
6298 if (modifier
!= EXPAND_INITIALIZER
)
6299 mem
= use_anchored_address (mem
);
6303 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6304 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6307 expand_expr_addr_expr_1 (tree exp
, rtx target
, enum machine_mode tmode
,
6308 enum expand_modifier modifier
)
6310 rtx result
, subtarget
;
6312 HOST_WIDE_INT bitsize
, bitpos
;
6313 int volatilep
, unsignedp
;
6314 enum machine_mode mode1
;
6316 /* If we are taking the address of a constant and are at the top level,
6317 we have to use output_constant_def since we can't call force_const_mem
6319 /* ??? This should be considered a front-end bug. We should not be
6320 generating ADDR_EXPR of something that isn't an LVALUE. The only
6321 exception here is STRING_CST. */
6322 if (TREE_CODE (exp
) == CONSTRUCTOR
6323 || CONSTANT_CLASS_P (exp
))
6324 return XEXP (expand_expr_constant (exp
, 0, modifier
), 0);
6326 /* Everything must be something allowed by is_gimple_addressable. */
6327 switch (TREE_CODE (exp
))
6330 /* This case will happen via recursion for &a->b. */
6331 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6334 /* Recurse and make the output_constant_def clause above handle this. */
6335 return expand_expr_addr_expr_1 (DECL_INITIAL (exp
), target
,
6339 /* The real part of the complex number is always first, therefore
6340 the address is the same as the address of the parent object. */
6343 inner
= TREE_OPERAND (exp
, 0);
6347 /* The imaginary part of the complex number is always second.
6348 The expression is therefore always offset by the size of the
6351 bitpos
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp
)));
6352 inner
= TREE_OPERAND (exp
, 0);
6356 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6357 expand_expr, as that can have various side effects; LABEL_DECLs for
6358 example, may not have their DECL_RTL set yet. Assume language
6359 specific tree nodes can be expanded in some interesting way. */
6361 || TREE_CODE (exp
) >= LAST_AND_UNUSED_TREE_CODE
)
6363 result
= expand_expr (exp
, target
, tmode
,
6364 modifier
== EXPAND_INITIALIZER
6365 ? EXPAND_INITIALIZER
: EXPAND_CONST_ADDRESS
);
6367 /* If the DECL isn't in memory, then the DECL wasn't properly
6368 marked TREE_ADDRESSABLE, which will be either a front-end
6369 or a tree optimizer bug. */
6370 gcc_assert (MEM_P (result
));
6371 result
= XEXP (result
, 0);
6373 /* ??? Is this needed anymore? */
6374 if (DECL_P (exp
) && !TREE_USED (exp
) == 0)
6376 assemble_external (exp
);
6377 TREE_USED (exp
) = 1;
6380 if (modifier
!= EXPAND_INITIALIZER
6381 && modifier
!= EXPAND_CONST_ADDRESS
)
6382 result
= force_operand (result
, target
);
6386 /* Pass FALSE as the last argument to get_inner_reference although
6387 we are expanding to RTL. The rationale is that we know how to
6388 handle "aligning nodes" here: we can just bypass them because
6389 they won't change the final object whose address will be returned
6390 (they actually exist only for that purpose). */
6391 inner
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
6392 &mode1
, &unsignedp
, &volatilep
, false);
6396 /* We must have made progress. */
6397 gcc_assert (inner
!= exp
);
6399 subtarget
= offset
|| bitpos
? NULL_RTX
: target
;
6400 result
= expand_expr_addr_expr_1 (inner
, subtarget
, tmode
, modifier
);
6406 if (modifier
!= EXPAND_NORMAL
)
6407 result
= force_operand (result
, NULL
);
6408 tmp
= expand_expr (offset
, NULL
, tmode
, EXPAND_NORMAL
);
6410 result
= convert_memory_address (tmode
, result
);
6411 tmp
= convert_memory_address (tmode
, tmp
);
6413 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
6414 result
= gen_rtx_PLUS (tmode
, result
, tmp
);
6417 subtarget
= bitpos
? NULL_RTX
: target
;
6418 result
= expand_simple_binop (tmode
, PLUS
, result
, tmp
, subtarget
,
6419 1, OPTAB_LIB_WIDEN
);
6425 /* Someone beforehand should have rejected taking the address
6426 of such an object. */
6427 gcc_assert ((bitpos
% BITS_PER_UNIT
) == 0);
6429 result
= plus_constant (result
, bitpos
/ BITS_PER_UNIT
);
6430 if (modifier
< EXPAND_SUM
)
6431 result
= force_operand (result
, target
);
6437 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6438 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6441 expand_expr_addr_expr (tree exp
, rtx target
, enum machine_mode tmode
,
6442 enum expand_modifier modifier
)
6444 enum machine_mode rmode
;
6447 /* Target mode of VOIDmode says "whatever's natural". */
6448 if (tmode
== VOIDmode
)
6449 tmode
= TYPE_MODE (TREE_TYPE (exp
));
6451 /* We can get called with some Weird Things if the user does silliness
6452 like "(short) &a". In that case, convert_memory_address won't do
6453 the right thing, so ignore the given target mode. */
6454 if (tmode
!= Pmode
&& tmode
!= ptr_mode
)
6457 result
= expand_expr_addr_expr_1 (TREE_OPERAND (exp
, 0), target
,
6460 /* Despite expand_expr claims concerning ignoring TMODE when not
6461 strictly convenient, stuff breaks if we don't honor it. Note
6462 that combined with the above, we only do this for pointer modes. */
6463 rmode
= GET_MODE (result
);
6464 if (rmode
== VOIDmode
)
6467 result
= convert_memory_address (tmode
, result
);
6473 /* expand_expr: generate code for computing expression EXP.
6474 An rtx for the computed value is returned. The value is never null.
6475 In the case of a void EXP, const0_rtx is returned.
6477 The value may be stored in TARGET if TARGET is nonzero.
6478 TARGET is just a suggestion; callers must assume that
6479 the rtx returned may not be the same as TARGET.
6481 If TARGET is CONST0_RTX, it means that the value will be ignored.
6483 If TMODE is not VOIDmode, it suggests generating the
6484 result in mode TMODE. But this is done only when convenient.
6485 Otherwise, TMODE is ignored and the value generated in its natural mode.
6486 TMODE is just a suggestion; callers must assume that
6487 the rtx returned may not have mode TMODE.
6489 Note that TARGET may have neither TMODE nor MODE. In that case, it
6490 probably will not be used.
6492 If MODIFIER is EXPAND_SUM then when EXP is an addition
6493 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6494 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6495 products as above, or REG or MEM, or constant.
6496 Ordinarily in such cases we would output mul or add instructions
6497 and then return a pseudo reg containing the sum.
6499 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6500 it also marks a label as absolutely required (it can't be dead).
6501 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6502 This is used for outputting expressions used in initializers.
6504 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6505 with a constant address even if that address is not normally legitimate.
6506 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6508 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6509 a call parameter. Such targets require special care as we haven't yet
6510 marked TARGET so that it's safe from being trashed by libcalls. We
6511 don't want to use TARGET for anything but the final result;
6512 Intermediate values must go elsewhere. Additionally, calls to
6513 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6515 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6516 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6517 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6518 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6521 static rtx
expand_expr_real_1 (tree
, rtx
, enum machine_mode
,
6522 enum expand_modifier
, rtx
*);
6525 expand_expr_real (tree exp
, rtx target
, enum machine_mode tmode
,
6526 enum expand_modifier modifier
, rtx
*alt_rtl
)
6529 rtx ret
, last
= NULL
;
6531 /* Handle ERROR_MARK before anybody tries to access its type. */
6532 if (TREE_CODE (exp
) == ERROR_MARK
6533 || TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
)
6535 ret
= CONST0_RTX (tmode
);
6536 return ret
? ret
: const0_rtx
;
6539 if (flag_non_call_exceptions
)
6541 rn
= lookup_stmt_eh_region (exp
);
6542 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6544 last
= get_last_insn ();
6547 /* If this is an expression of some kind and it has an associated line
6548 number, then emit the line number before expanding the expression.
6550 We need to save and restore the file and line information so that
6551 errors discovered during expansion are emitted with the right
6552 information. It would be better of the diagnostic routines
6553 used the file/line information embedded in the tree nodes rather
6555 if (cfun
&& cfun
->ib_boundaries_block
&& EXPR_HAS_LOCATION (exp
))
6557 location_t saved_location
= input_location
;
6558 input_location
= EXPR_LOCATION (exp
);
6559 emit_line_note (input_location
);
6561 /* Record where the insns produced belong. */
6562 record_block_change (TREE_BLOCK (exp
));
6564 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
6566 input_location
= saved_location
;
6570 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
6573 /* If using non-call exceptions, mark all insns that may trap.
6574 expand_call() will mark CALL_INSNs before we get to this code,
6575 but it doesn't handle libcalls, and these may trap. */
6579 for (insn
= next_real_insn (last
); insn
;
6580 insn
= next_real_insn (insn
))
6582 if (! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
6583 /* If we want exceptions for non-call insns, any
6584 may_trap_p instruction may throw. */
6585 && GET_CODE (PATTERN (insn
)) != CLOBBER
6586 && GET_CODE (PATTERN (insn
)) != USE
6587 && (CALL_P (insn
) || may_trap_p (PATTERN (insn
))))
6589 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_EH_REGION
, GEN_INT (rn
),
6599 expand_expr_real_1 (tree exp
, rtx target
, enum machine_mode tmode
,
6600 enum expand_modifier modifier
, rtx
*alt_rtl
)
6602 rtx op0
, op1
, temp
, decl_rtl
;
6603 tree type
= TREE_TYPE (exp
);
6605 enum machine_mode mode
;
6606 enum tree_code code
= TREE_CODE (exp
);
6608 rtx subtarget
, original_target
;
6610 tree context
, subexp0
, subexp1
;
6611 bool reduce_bit_field
= false;
6612 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6613 ? reduce_to_bit_field_precision ((expr), \
6618 mode
= TYPE_MODE (type
);
6619 unsignedp
= TYPE_UNSIGNED (type
);
6620 if (lang_hooks
.reduce_bit_field_operations
6621 && TREE_CODE (type
) == INTEGER_TYPE
6622 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
))
6624 /* An operation in what may be a bit-field type needs the
6625 result to be reduced to the precision of the bit-field type,
6626 which is narrower than that of the type's mode. */
6627 reduce_bit_field
= true;
6628 if (modifier
== EXPAND_STACK_PARM
)
6632 /* Use subtarget as the target for operand 0 of a binary operation. */
6633 subtarget
= get_subtarget (target
);
6634 original_target
= target
;
6635 ignore
= (target
== const0_rtx
6636 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
6637 || code
== CONVERT_EXPR
|| code
== COND_EXPR
6638 || code
== VIEW_CONVERT_EXPR
)
6639 && TREE_CODE (type
) == VOID_TYPE
));
6641 /* If we are going to ignore this result, we need only do something
6642 if there is a side-effect somewhere in the expression. If there
6643 is, short-circuit the most common cases here. Note that we must
6644 not call expand_expr with anything but const0_rtx in case this
6645 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6649 if (! TREE_SIDE_EFFECTS (exp
))
6652 /* Ensure we reference a volatile object even if value is ignored, but
6653 don't do this if all we are doing is taking its address. */
6654 if (TREE_THIS_VOLATILE (exp
)
6655 && TREE_CODE (exp
) != FUNCTION_DECL
6656 && mode
!= VOIDmode
&& mode
!= BLKmode
6657 && modifier
!= EXPAND_CONST_ADDRESS
)
6659 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
6661 temp
= copy_to_reg (temp
);
6665 if (TREE_CODE_CLASS (code
) == tcc_unary
6666 || code
== COMPONENT_REF
|| code
== INDIRECT_REF
)
6667 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6670 else if (TREE_CODE_CLASS (code
) == tcc_binary
6671 || TREE_CODE_CLASS (code
) == tcc_comparison
6672 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
6674 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6675 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6678 else if (code
== BIT_FIELD_REF
)
6680 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6681 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6682 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, modifier
);
6694 tree function
= decl_function_context (exp
);
6696 temp
= label_rtx (exp
);
6697 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
6699 if (function
!= current_function_decl
6701 LABEL_REF_NONLOCAL_P (temp
) = 1;
6703 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
6708 return expand_expr_real_1 (SSA_NAME_VAR (exp
), target
, tmode
, modifier
,
6713 /* If a static var's type was incomplete when the decl was written,
6714 but the type is complete now, lay out the decl now. */
6715 if (DECL_SIZE (exp
) == 0
6716 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
6717 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6718 layout_decl (exp
, 0);
6720 /* ... fall through ... */
6724 decl_rtl
= DECL_RTL (exp
);
6725 gcc_assert (decl_rtl
);
6727 /* Ensure variable marked as used even if it doesn't go through
6728 a parser. If it hasn't be used yet, write out an external
6730 if (! TREE_USED (exp
))
6732 assemble_external (exp
);
6733 TREE_USED (exp
) = 1;
6736 /* Show we haven't gotten RTL for this yet. */
6739 /* Variables inherited from containing functions should have
6740 been lowered by this point. */
6741 context
= decl_function_context (exp
);
6742 gcc_assert (!context
6743 || context
== current_function_decl
6744 || TREE_STATIC (exp
)
6745 /* ??? C++ creates functions that are not TREE_STATIC. */
6746 || TREE_CODE (exp
) == FUNCTION_DECL
);
6748 /* This is the case of an array whose size is to be determined
6749 from its initializer, while the initializer is still being parsed.
6752 if (MEM_P (decl_rtl
) && REG_P (XEXP (decl_rtl
, 0)))
6753 temp
= validize_mem (decl_rtl
);
6755 /* If DECL_RTL is memory, we are in the normal case and either
6756 the address is not valid or it is not a register and -fforce-addr
6757 is specified, get the address into a register. */
6759 else if (MEM_P (decl_rtl
) && modifier
!= EXPAND_INITIALIZER
)
6762 *alt_rtl
= decl_rtl
;
6763 decl_rtl
= use_anchored_address (decl_rtl
);
6764 if (modifier
!= EXPAND_CONST_ADDRESS
6765 && modifier
!= EXPAND_SUM
6766 && (!memory_address_p (DECL_MODE (exp
), XEXP (decl_rtl
, 0))
6767 || (flag_force_addr
&& !REG_P (XEXP (decl_rtl
, 0)))))
6768 temp
= replace_equiv_address (decl_rtl
,
6769 copy_rtx (XEXP (decl_rtl
, 0)));
6772 /* If we got something, return it. But first, set the alignment
6773 if the address is a register. */
6776 if (MEM_P (temp
) && REG_P (XEXP (temp
, 0)))
6777 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6782 /* If the mode of DECL_RTL does not match that of the decl, it
6783 must be a promoted value. We return a SUBREG of the wanted mode,
6784 but mark it so that we know that it was already extended. */
6786 if (REG_P (decl_rtl
)
6787 && GET_MODE (decl_rtl
) != DECL_MODE (exp
))
6789 enum machine_mode pmode
;
6791 /* Get the signedness used for this variable. Ensure we get the
6792 same mode we got when the variable was declared. */
6793 pmode
= promote_mode (type
, DECL_MODE (exp
), &unsignedp
,
6794 (TREE_CODE (exp
) == RESULT_DECL
6795 || TREE_CODE (exp
) == PARM_DECL
) ? 1 : 0);
6796 gcc_assert (GET_MODE (decl_rtl
) == pmode
);
6798 temp
= gen_lowpart_SUBREG (mode
, decl_rtl
);
6799 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6800 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6807 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
6808 TREE_INT_CST_HIGH (exp
), mode
);
6810 /* ??? If overflow is set, fold will have done an incomplete job,
6811 which can result in (plus xx (const_int 0)), which can get
6812 simplified by validate_replace_rtx during virtual register
6813 instantiation, which can result in unrecognizable insns.
6814 Avoid this by forcing all overflows into registers. */
6815 if (TREE_CONSTANT_OVERFLOW (exp
)
6816 && modifier
!= EXPAND_INITIALIZER
)
6817 temp
= force_reg (mode
, temp
);
6822 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp
))) == MODE_VECTOR_INT
6823 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp
))) == MODE_VECTOR_FLOAT
)
6824 return const_vector_from_tree (exp
);
6826 return expand_expr (build_constructor_from_list
6828 TREE_VECTOR_CST_ELTS (exp
)),
6829 ignore
? const0_rtx
: target
, tmode
, modifier
);
6832 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
6835 /* If optimized, generate immediate CONST_DOUBLE
6836 which will be turned into memory by reload if necessary.
6838 We used to force a register so that loop.c could see it. But
6839 this does not allow gen_* patterns to perform optimizations with
6840 the constants. It also produces two insns in cases like "x = 1.0;".
6841 On most machines, floating-point constants are not permitted in
6842 many insns, so we'd end up copying it to a register in any case.
6844 Now, we do the copying in expand_binop, if appropriate. */
6845 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
6846 TYPE_MODE (TREE_TYPE (exp
)));
6849 /* Handle evaluating a complex constant in a CONCAT target. */
6850 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
6852 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
6855 rtarg
= XEXP (original_target
, 0);
6856 itarg
= XEXP (original_target
, 1);
6858 /* Move the real and imaginary parts separately. */
6859 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, 0);
6860 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, 0);
6863 emit_move_insn (rtarg
, op0
);
6865 emit_move_insn (itarg
, op1
);
6867 return original_target
;
6870 /* ... fall through ... */
6873 temp
= expand_expr_constant (exp
, 1, modifier
);
6875 /* temp contains a constant address.
6876 On RISC machines where a constant address isn't valid,
6877 make some insns to get that address into a register. */
6878 if (modifier
!= EXPAND_CONST_ADDRESS
6879 && modifier
!= EXPAND_INITIALIZER
6880 && modifier
!= EXPAND_SUM
6881 && (! memory_address_p (mode
, XEXP (temp
, 0))
6882 || flag_force_addr
))
6883 return replace_equiv_address (temp
,
6884 copy_rtx (XEXP (temp
, 0)));
6889 tree val
= TREE_OPERAND (exp
, 0);
6890 rtx ret
= expand_expr_real_1 (val
, target
, tmode
, modifier
, alt_rtl
);
6892 if (!SAVE_EXPR_RESOLVED_P (exp
))
6894 /* We can indeed still hit this case, typically via builtin
6895 expanders calling save_expr immediately before expanding
6896 something. Assume this means that we only have to deal
6897 with non-BLKmode values. */
6898 gcc_assert (GET_MODE (ret
) != BLKmode
);
6900 val
= build_decl (VAR_DECL
, NULL
, TREE_TYPE (exp
));
6901 DECL_ARTIFICIAL (val
) = 1;
6902 DECL_IGNORED_P (val
) = 1;
6903 TREE_OPERAND (exp
, 0) = val
;
6904 SAVE_EXPR_RESOLVED_P (exp
) = 1;
6906 if (!CONSTANT_P (ret
))
6907 ret
= copy_to_reg (ret
);
6908 SET_DECL_RTL (val
, ret
);
6915 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6916 expand_goto (TREE_OPERAND (exp
, 0));
6918 expand_computed_goto (TREE_OPERAND (exp
, 0));
6922 /* If we don't need the result, just ensure we evaluate any
6926 unsigned HOST_WIDE_INT idx
;
6929 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
6930 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
6935 /* Try to avoid creating a temporary at all. This is possible
6936 if all of the initializer is zero.
6937 FIXME: try to handle all [0..255] initializers we can handle
6939 else if (TREE_STATIC (exp
)
6940 && !TREE_ADDRESSABLE (exp
)
6941 && target
!= 0 && mode
== BLKmode
6942 && all_zeros_p (exp
))
6944 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
6948 /* All elts simple constants => refer to a constant in memory. But
6949 if this is a non-BLKmode mode, let it store a field at a time
6950 since that should make a CONST_INT or CONST_DOUBLE when we
6951 fold. Likewise, if we have a target we can use, it is best to
6952 store directly into the target unless the type is large enough
6953 that memcpy will be used. If we are making an initializer and
6954 all operands are constant, put it in memory as well.
6956 FIXME: Avoid trying to fill vector constructors piece-meal.
6957 Output them with output_constant_def below unless we're sure
6958 they're zeros. This should go away when vector initializers
6959 are treated like VECTOR_CST instead of arrays.
6961 else if ((TREE_STATIC (exp
)
6962 && ((mode
== BLKmode
6963 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
6964 || TREE_ADDRESSABLE (exp
)
6965 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
6966 && (! MOVE_BY_PIECES_P
6967 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
6969 && ! mostly_zeros_p (exp
))))
6970 || ((modifier
== EXPAND_INITIALIZER
6971 || modifier
== EXPAND_CONST_ADDRESS
)
6972 && TREE_CONSTANT (exp
)))
6974 rtx constructor
= expand_expr_constant (exp
, 1, modifier
);
6976 if (modifier
!= EXPAND_CONST_ADDRESS
6977 && modifier
!= EXPAND_INITIALIZER
6978 && modifier
!= EXPAND_SUM
)
6979 constructor
= validize_mem (constructor
);
6985 /* Handle calls that pass values in multiple non-contiguous
6986 locations. The Irix 6 ABI has examples of this. */
6987 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
6988 || GET_CODE (target
) == PARALLEL
6989 || modifier
== EXPAND_STACK_PARM
)
6991 = assign_temp (build_qualified_type (type
,
6993 | (TREE_READONLY (exp
)
6994 * TYPE_QUAL_CONST
))),
6995 0, TREE_ADDRESSABLE (exp
), 1);
6997 store_constructor (exp
, target
, 0, int_expr_size (exp
));
7001 case MISALIGNED_INDIRECT_REF
:
7002 case ALIGN_INDIRECT_REF
:
7005 tree exp1
= TREE_OPERAND (exp
, 0);
7007 if (modifier
!= EXPAND_WRITE
)
7011 t
= fold_read_from_constant_string (exp
);
7013 return expand_expr (t
, target
, tmode
, modifier
);
7016 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
7017 op0
= memory_address (mode
, op0
);
7019 if (code
== ALIGN_INDIRECT_REF
)
7021 int align
= TYPE_ALIGN_UNIT (type
);
7022 op0
= gen_rtx_AND (Pmode
, op0
, GEN_INT (-align
));
7023 op0
= memory_address (mode
, op0
);
7026 temp
= gen_rtx_MEM (mode
, op0
);
7028 set_mem_attributes (temp
, exp
, 0);
7030 /* Resolve the misalignment now, so that we don't have to remember
7031 to resolve it later. Of course, this only works for reads. */
7032 /* ??? When we get around to supporting writes, we'll have to handle
7033 this in store_expr directly. The vectorizer isn't generating
7034 those yet, however. */
7035 if (code
== MISALIGNED_INDIRECT_REF
)
7040 gcc_assert (modifier
== EXPAND_NORMAL
7041 || modifier
== EXPAND_STACK_PARM
);
7043 /* The vectorizer should have already checked the mode. */
7044 icode
= movmisalign_optab
->handlers
[mode
].insn_code
;
7045 gcc_assert (icode
!= CODE_FOR_nothing
);
7047 /* We've already validated the memory, and we're creating a
7048 new pseudo destination. The predicates really can't fail. */
7049 reg
= gen_reg_rtx (mode
);
7051 /* Nor can the insn generator. */
7052 insn
= GEN_FCN (icode
) (reg
, temp
);
7061 case TARGET_MEM_REF
:
7063 struct mem_address addr
;
7065 get_address_description (exp
, &addr
);
7066 op0
= addr_for_mem_ref (&addr
, true);
7067 op0
= memory_address (mode
, op0
);
7068 temp
= gen_rtx_MEM (mode
, op0
);
7069 set_mem_attributes (temp
, TMR_ORIGINAL (exp
), 0);
7076 tree array
= TREE_OPERAND (exp
, 0);
7077 tree index
= TREE_OPERAND (exp
, 1);
7079 /* Fold an expression like: "foo"[2].
7080 This is not done in fold so it won't happen inside &.
7081 Don't fold if this is for wide characters since it's too
7082 difficult to do correctly and this is a very rare case. */
7084 if (modifier
!= EXPAND_CONST_ADDRESS
7085 && modifier
!= EXPAND_INITIALIZER
7086 && modifier
!= EXPAND_MEMORY
)
7088 tree t
= fold_read_from_constant_string (exp
);
7091 return expand_expr (t
, target
, tmode
, modifier
);
7094 /* If this is a constant index into a constant array,
7095 just get the value from the array. Handle both the cases when
7096 we have an explicit constructor and when our operand is a variable
7097 that was declared const. */
7099 if (modifier
!= EXPAND_CONST_ADDRESS
7100 && modifier
!= EXPAND_INITIALIZER
7101 && modifier
!= EXPAND_MEMORY
7102 && TREE_CODE (array
) == CONSTRUCTOR
7103 && ! TREE_SIDE_EFFECTS (array
)
7104 && TREE_CODE (index
) == INTEGER_CST
)
7106 unsigned HOST_WIDE_INT ix
;
7109 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array
), ix
,
7111 if (tree_int_cst_equal (field
, index
))
7113 if (!TREE_SIDE_EFFECTS (value
))
7114 return expand_expr (fold (value
), target
, tmode
, modifier
);
7119 else if (optimize
>= 1
7120 && modifier
!= EXPAND_CONST_ADDRESS
7121 && modifier
!= EXPAND_INITIALIZER
7122 && modifier
!= EXPAND_MEMORY
7123 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
7124 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
7125 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
7126 && targetm
.binds_local_p (array
))
7128 if (TREE_CODE (index
) == INTEGER_CST
)
7130 tree init
= DECL_INITIAL (array
);
7132 if (TREE_CODE (init
) == CONSTRUCTOR
)
7134 unsigned HOST_WIDE_INT ix
;
7137 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), ix
,
7139 if (tree_int_cst_equal (field
, index
))
7141 if (!TREE_SIDE_EFFECTS (value
))
7142 return expand_expr (fold (value
), target
, tmode
,
7147 else if(TREE_CODE (init
) == STRING_CST
)
7149 tree index1
= index
;
7150 tree low_bound
= array_ref_low_bound (exp
);
7151 index1
= fold_convert (sizetype
, TREE_OPERAND (exp
, 1));
7153 /* Optimize the special-case of a zero lower bound.
7155 We convert the low_bound to sizetype to avoid some problems
7156 with constant folding. (E.g. suppose the lower bound is 1,
7157 and its mode is QI. Without the conversion,l (ARRAY
7158 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7159 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7161 if (! integer_zerop (low_bound
))
7162 index1
= size_diffop (index1
, fold_convert (sizetype
,
7165 if (0 > compare_tree_int (index1
,
7166 TREE_STRING_LENGTH (init
)))
7168 tree type
= TREE_TYPE (TREE_TYPE (init
));
7169 enum machine_mode mode
= TYPE_MODE (type
);
7171 if (GET_MODE_CLASS (mode
) == MODE_INT
7172 && GET_MODE_SIZE (mode
) == 1)
7173 return gen_int_mode (TREE_STRING_POINTER (init
)
7174 [TREE_INT_CST_LOW (index1
)],
7181 goto normal_inner_ref
;
7184 /* If the operand is a CONSTRUCTOR, we can just extract the
7185 appropriate field if it is present. */
7186 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
)
7188 unsigned HOST_WIDE_INT idx
;
7191 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
7193 if (field
== TREE_OPERAND (exp
, 1)
7194 /* We can normally use the value of the field in the
7195 CONSTRUCTOR. However, if this is a bitfield in
7196 an integral mode that we can fit in a HOST_WIDE_INT,
7197 we must mask only the number of bits in the bitfield,
7198 since this is done implicitly by the constructor. If
7199 the bitfield does not meet either of those conditions,
7200 we can't do this optimization. */
7201 && (! DECL_BIT_FIELD (field
)
7202 || ((GET_MODE_CLASS (DECL_MODE (field
)) == MODE_INT
)
7203 && (GET_MODE_BITSIZE (DECL_MODE (field
))
7204 <= HOST_BITS_PER_WIDE_INT
))))
7206 if (DECL_BIT_FIELD (field
)
7207 && modifier
== EXPAND_STACK_PARM
)
7209 op0
= expand_expr (value
, target
, tmode
, modifier
);
7210 if (DECL_BIT_FIELD (field
))
7212 HOST_WIDE_INT bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
7213 enum machine_mode imode
= TYPE_MODE (TREE_TYPE (field
));
7215 if (TYPE_UNSIGNED (TREE_TYPE (field
)))
7217 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
7218 op0
= expand_and (imode
, op0
, op1
, target
);
7223 = build_int_cst (NULL_TREE
,
7224 GET_MODE_BITSIZE (imode
) - bitsize
);
7226 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
7228 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
7236 goto normal_inner_ref
;
7239 case ARRAY_RANGE_REF
:
7242 enum machine_mode mode1
;
7243 HOST_WIDE_INT bitsize
, bitpos
;
7246 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7247 &mode1
, &unsignedp
, &volatilep
, true);
7250 /* If we got back the original object, something is wrong. Perhaps
7251 we are evaluating an expression too early. In any event, don't
7252 infinitely recurse. */
7253 gcc_assert (tem
!= exp
);
7255 /* If TEM's type is a union of variable size, pass TARGET to the inner
7256 computation, since it will need a temporary and TARGET is known
7257 to have to do. This occurs in unchecked conversion in Ada. */
7261 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
7262 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
7264 && modifier
!= EXPAND_STACK_PARM
7265 ? target
: NULL_RTX
),
7267 (modifier
== EXPAND_INITIALIZER
7268 || modifier
== EXPAND_CONST_ADDRESS
7269 || modifier
== EXPAND_STACK_PARM
)
7270 ? modifier
: EXPAND_NORMAL
);
7272 /* If this is a constant, put it into a register if it is a legitimate
7273 constant, OFFSET is 0, and we won't try to extract outside the
7274 register (in case we were passed a partially uninitialized object
7275 or a view_conversion to a larger size). Force the constant to
7276 memory otherwise. */
7277 if (CONSTANT_P (op0
))
7279 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
7280 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
7282 && bitpos
+ bitsize
<= GET_MODE_BITSIZE (mode
))
7283 op0
= force_reg (mode
, op0
);
7285 op0
= validize_mem (force_const_mem (mode
, op0
));
7288 /* Otherwise, if this object not in memory and we either have an
7289 offset, a BLKmode result, or a reference outside the object, put it
7290 there. Such cases can occur in Ada if we have unchecked conversion
7291 of an expression from a scalar type to an array or record type or
7292 for an ARRAY_RANGE_REF whose type is BLKmode. */
7293 else if (!MEM_P (op0
)
7295 || (bitpos
+ bitsize
> GET_MODE_BITSIZE (GET_MODE (op0
)))
7296 || (code
== ARRAY_RANGE_REF
&& mode
== BLKmode
)))
7298 tree nt
= build_qualified_type (TREE_TYPE (tem
),
7299 (TYPE_QUALS (TREE_TYPE (tem
))
7300 | TYPE_QUAL_CONST
));
7301 rtx memloc
= assign_temp (nt
, 1, 1, 1);
7303 emit_move_insn (memloc
, op0
);
7309 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
7312 gcc_assert (MEM_P (op0
));
7314 #ifdef POINTERS_EXTEND_UNSIGNED
7315 if (GET_MODE (offset_rtx
) != Pmode
)
7316 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
7318 if (GET_MODE (offset_rtx
) != ptr_mode
)
7319 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
7322 if (GET_MODE (op0
) == BLKmode
7323 /* A constant address in OP0 can have VOIDmode, we must
7324 not try to call force_reg in that case. */
7325 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
7327 && (bitpos
% bitsize
) == 0
7328 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
7329 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
7331 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7335 op0
= offset_address (op0
, offset_rtx
,
7336 highest_pow2_factor (offset
));
7339 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7340 record its alignment as BIGGEST_ALIGNMENT. */
7341 if (MEM_P (op0
) && bitpos
== 0 && offset
!= 0
7342 && is_aligning_offset (offset
, tem
))
7343 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
7345 /* Don't forget about volatility even if this is a bitfield. */
7346 if (MEM_P (op0
) && volatilep
&& ! MEM_VOLATILE_P (op0
))
7348 if (op0
== orig_op0
)
7349 op0
= copy_rtx (op0
);
7351 MEM_VOLATILE_P (op0
) = 1;
7354 /* The following code doesn't handle CONCAT.
7355 Assume only bitpos == 0 can be used for CONCAT, due to
7356 one element arrays having the same mode as its element. */
7357 if (GET_CODE (op0
) == CONCAT
)
7359 gcc_assert (bitpos
== 0
7360 && bitsize
== GET_MODE_BITSIZE (GET_MODE (op0
)));
7364 /* In cases where an aligned union has an unaligned object
7365 as a field, we might be extracting a BLKmode value from
7366 an integer-mode (e.g., SImode) object. Handle this case
7367 by doing the extract into an object as wide as the field
7368 (which we know to be the width of a basic mode), then
7369 storing into memory, and changing the mode to BLKmode. */
7370 if (mode1
== VOIDmode
7371 || REG_P (op0
) || GET_CODE (op0
) == SUBREG
7372 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
7373 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7374 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
7375 && modifier
!= EXPAND_CONST_ADDRESS
7376 && modifier
!= EXPAND_INITIALIZER
)
7377 /* If the field isn't aligned enough to fetch as a memref,
7378 fetch it as a bit field. */
7379 || (mode1
!= BLKmode
7380 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
7381 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)
7383 && (MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
7384 || (bitpos
% GET_MODE_ALIGNMENT (mode1
) != 0))))
7385 && ((modifier
== EXPAND_CONST_ADDRESS
7386 || modifier
== EXPAND_INITIALIZER
)
7388 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
7389 || (bitpos
% BITS_PER_UNIT
!= 0)))
7390 /* If the type and the field are a constant size and the
7391 size of the type isn't the same size as the bitfield,
7392 we must use bitfield operations. */
7394 && TYPE_SIZE (TREE_TYPE (exp
))
7395 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
7396 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7399 enum machine_mode ext_mode
= mode
;
7401 if (ext_mode
== BLKmode
7402 && ! (target
!= 0 && MEM_P (op0
)
7404 && bitpos
% BITS_PER_UNIT
== 0))
7405 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7407 if (ext_mode
== BLKmode
)
7410 target
= assign_temp (type
, 0, 1, 1);
7415 /* In this case, BITPOS must start at a byte boundary and
7416 TARGET, if specified, must be a MEM. */
7417 gcc_assert (MEM_P (op0
)
7418 && (!target
|| MEM_P (target
))
7419 && !(bitpos
% BITS_PER_UNIT
));
7421 emit_block_move (target
,
7422 adjust_address (op0
, VOIDmode
,
7423 bitpos
/ BITS_PER_UNIT
),
7424 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7426 (modifier
== EXPAND_STACK_PARM
7427 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7432 op0
= validize_mem (op0
);
7434 if (MEM_P (op0
) && REG_P (XEXP (op0
, 0)))
7435 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7437 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
7438 (modifier
== EXPAND_STACK_PARM
7439 ? NULL_RTX
: target
),
7440 ext_mode
, ext_mode
);
7442 /* If the result is a record type and BITSIZE is narrower than
7443 the mode of OP0, an integral mode, and this is a big endian
7444 machine, we must put the field into the high-order bits. */
7445 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7446 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7447 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
7448 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7449 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7453 /* If the result type is BLKmode, store the data into a temporary
7454 of the appropriate type, but with the mode corresponding to the
7455 mode for the data we have (op0's mode). It's tempting to make
7456 this a constant type, since we know it's only being stored once,
7457 but that can cause problems if we are taking the address of this
7458 COMPONENT_REF because the MEM of any reference via that address
7459 will have flags corresponding to the type, which will not
7460 necessarily be constant. */
7461 if (mode
== BLKmode
)
7464 = assign_stack_temp_for_type
7465 (ext_mode
, GET_MODE_BITSIZE (ext_mode
), 0, type
);
7467 emit_move_insn (new, op0
);
7468 op0
= copy_rtx (new);
7469 PUT_MODE (op0
, BLKmode
);
7470 set_mem_attributes (op0
, exp
, 1);
7476 /* If the result is BLKmode, use that to access the object
7478 if (mode
== BLKmode
)
7481 /* Get a reference to just this component. */
7482 if (modifier
== EXPAND_CONST_ADDRESS
7483 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7484 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7486 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7488 if (op0
== orig_op0
)
7489 op0
= copy_rtx (op0
);
7491 set_mem_attributes (op0
, exp
, 0);
7492 if (REG_P (XEXP (op0
, 0)))
7493 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7495 MEM_VOLATILE_P (op0
) |= volatilep
;
7496 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7497 || modifier
== EXPAND_CONST_ADDRESS
7498 || modifier
== EXPAND_INITIALIZER
)
7500 else if (target
== 0)
7501 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7503 convert_move (target
, op0
, unsignedp
);
7508 return expand_expr (OBJ_TYPE_REF_EXPR (exp
), target
, tmode
, modifier
);
7511 /* Check for a built-in function. */
7512 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7513 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7515 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7517 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7518 == BUILT_IN_FRONTEND
)
7519 return lang_hooks
.expand_expr (exp
, original_target
,
7523 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7526 return expand_call (exp
, target
, ignore
);
7528 case NON_LVALUE_EXPR
:
7531 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7534 if (TREE_CODE (type
) == UNION_TYPE
)
7536 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7538 /* If both input and output are BLKmode, this conversion isn't doing
7539 anything except possibly changing memory attribute. */
7540 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7542 rtx result
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7545 result
= copy_rtx (result
);
7546 set_mem_attributes (result
, exp
, 0);
7552 if (TYPE_MODE (type
) != BLKmode
)
7553 target
= gen_reg_rtx (TYPE_MODE (type
));
7555 target
= assign_temp (type
, 0, 1, 1);
7559 /* Store data into beginning of memory target. */
7560 store_expr (TREE_OPERAND (exp
, 0),
7561 adjust_address (target
, TYPE_MODE (valtype
), 0),
7562 modifier
== EXPAND_STACK_PARM
);
7566 gcc_assert (REG_P (target
));
7568 /* Store this field into a union of the proper type. */
7569 store_field (target
,
7570 MIN ((int_size_in_bytes (TREE_TYPE
7571 (TREE_OPERAND (exp
, 0)))
7573 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7574 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7578 /* Return the entire union. */
7582 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7584 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7587 /* If the signedness of the conversion differs and OP0 is
7588 a promoted SUBREG, clear that indication since we now
7589 have to do the proper extension. */
7590 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7591 && GET_CODE (op0
) == SUBREG
)
7592 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7594 return REDUCE_BIT_FIELD (op0
);
7597 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7598 if (GET_MODE (op0
) == mode
)
7601 /* If OP0 is a constant, just convert it into the proper mode. */
7602 else if (CONSTANT_P (op0
))
7604 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7605 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
7607 if (modifier
== EXPAND_INITIALIZER
)
7608 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
,
7609 subreg_lowpart_offset (mode
,
7612 op0
= convert_modes (mode
, inner_mode
, op0
,
7613 TYPE_UNSIGNED (inner_type
));
7616 else if (modifier
== EXPAND_INITIALIZER
)
7617 op0
= gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7619 else if (target
== 0)
7620 op0
= convert_to_mode (mode
, op0
,
7621 TYPE_UNSIGNED (TREE_TYPE
7622 (TREE_OPERAND (exp
, 0))));
7625 convert_move (target
, op0
,
7626 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7630 return REDUCE_BIT_FIELD (op0
);
7632 case VIEW_CONVERT_EXPR
:
7633 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7635 /* If the input and output modes are both the same, we are done. */
7636 if (TYPE_MODE (type
) == GET_MODE (op0
))
7638 /* If neither mode is BLKmode, and both modes are the same size
7639 then we can use gen_lowpart. */
7640 else if (TYPE_MODE (type
) != BLKmode
&& GET_MODE (op0
) != BLKmode
7641 && GET_MODE_SIZE (TYPE_MODE (type
))
7642 == GET_MODE_SIZE (GET_MODE (op0
)))
7644 if (GET_CODE (op0
) == SUBREG
)
7645 op0
= force_reg (GET_MODE (op0
), op0
);
7646 op0
= gen_lowpart (TYPE_MODE (type
), op0
);
7648 /* If both modes are integral, then we can convert from one to the
7650 else if (SCALAR_INT_MODE_P (GET_MODE (op0
))
7651 && SCALAR_INT_MODE_P (TYPE_MODE (type
)))
7652 op0
= convert_modes (TYPE_MODE (type
), GET_MODE (op0
), op0
,
7653 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7654 /* As a last resort, spill op0 to memory, and reload it in a
7656 else if (!MEM_P (op0
))
7658 /* If the operand is not a MEM, force it into memory. Since we
7659 are going to be be changing the mode of the MEM, don't call
7660 force_const_mem for constants because we don't allow pool
7661 constants to change mode. */
7662 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7664 gcc_assert (!TREE_ADDRESSABLE (exp
));
7666 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
7668 = assign_stack_temp_for_type
7669 (TYPE_MODE (inner_type
),
7670 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
7672 emit_move_insn (target
, op0
);
7676 /* At this point, OP0 is in the correct mode. If the output type is such
7677 that the operand is known to be aligned, indicate that it is.
7678 Otherwise, we need only be concerned about alignment for non-BLKmode
7682 op0
= copy_rtx (op0
);
7684 if (TYPE_ALIGN_OK (type
))
7685 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
7686 else if (TYPE_MODE (type
) != BLKmode
&& STRICT_ALIGNMENT
7687 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
7689 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7690 HOST_WIDE_INT temp_size
7691 = MAX (int_size_in_bytes (inner_type
),
7692 (HOST_WIDE_INT
) GET_MODE_SIZE (TYPE_MODE (type
)));
7693 rtx
new = assign_stack_temp_for_type (TYPE_MODE (type
),
7694 temp_size
, 0, type
);
7695 rtx new_with_op0_mode
= adjust_address (new, GET_MODE (op0
), 0);
7697 gcc_assert (!TREE_ADDRESSABLE (exp
));
7699 if (GET_MODE (op0
) == BLKmode
)
7700 emit_block_move (new_with_op0_mode
, op0
,
7701 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type
))),
7702 (modifier
== EXPAND_STACK_PARM
7703 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7705 emit_move_insn (new_with_op0_mode
, op0
);
7710 op0
= adjust_address (op0
, TYPE_MODE (type
), 0);
7716 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7717 something else, make sure we add the register to the constant and
7718 then to the other thing. This case can occur during strength
7719 reduction and doing it this way will produce better code if the
7720 frame pointer or argument pointer is eliminated.
7722 fold-const.c will ensure that the constant is always in the inner
7723 PLUS_EXPR, so the only case we need to do anything about is if
7724 sp, ap, or fp is our second argument, in which case we must swap
7725 the innermost first argument and our second argument. */
7727 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7728 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7729 && TREE_CODE (TREE_OPERAND (exp
, 1)) == VAR_DECL
7730 && (DECL_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7731 || DECL_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7732 || DECL_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7734 tree t
= TREE_OPERAND (exp
, 1);
7736 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7737 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
7740 /* If the result is to be ptr_mode and we are adding an integer to
7741 something, we might be forming a constant. So try to use
7742 plus_constant. If it produces a sum and we can't accept it,
7743 use force_operand. This allows P = &ARR[const] to generate
7744 efficient code on machines where a SYMBOL_REF is not a valid
7747 If this is an EXPAND_SUM call, always return the sum. */
7748 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7749 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
7751 if (modifier
== EXPAND_STACK_PARM
)
7753 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
7754 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7755 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
7759 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
7761 /* Use immed_double_const to ensure that the constant is
7762 truncated according to the mode of OP1, then sign extended
7763 to a HOST_WIDE_INT. Using the constant directly can result
7764 in non-canonical RTL in a 64x32 cross compile. */
7766 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
7768 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
7769 op1
= plus_constant (op1
, INTVAL (constant_part
));
7770 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7771 op1
= force_operand (op1
, target
);
7772 return REDUCE_BIT_FIELD (op1
);
7775 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7776 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7777 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
7781 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7782 (modifier
== EXPAND_INITIALIZER
7783 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
7784 if (! CONSTANT_P (op0
))
7786 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7787 VOIDmode
, modifier
);
7788 /* Return a PLUS if modifier says it's OK. */
7789 if (modifier
== EXPAND_SUM
7790 || modifier
== EXPAND_INITIALIZER
)
7791 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7794 /* Use immed_double_const to ensure that the constant is
7795 truncated according to the mode of OP1, then sign extended
7796 to a HOST_WIDE_INT. Using the constant directly can result
7797 in non-canonical RTL in a 64x32 cross compile. */
7799 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
7801 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7802 op0
= plus_constant (op0
, INTVAL (constant_part
));
7803 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7804 op0
= force_operand (op0
, target
);
7805 return REDUCE_BIT_FIELD (op0
);
7809 /* No sense saving up arithmetic to be done
7810 if it's all in the wrong mode to form part of an address.
7811 And force_operand won't know whether to sign-extend or
7813 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7814 || mode
!= ptr_mode
)
7816 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7817 subtarget
, &op0
, &op1
, 0);
7818 if (op0
== const0_rtx
)
7820 if (op1
== const0_rtx
)
7825 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7826 subtarget
, &op0
, &op1
, modifier
);
7827 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
7830 /* For initializers, we are allowed to return a MINUS of two
7831 symbolic constants. Here we handle all cases when both operands
7833 /* Handle difference of two symbolic constants,
7834 for the sake of an initializer. */
7835 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7836 && really_constant_p (TREE_OPERAND (exp
, 0))
7837 && really_constant_p (TREE_OPERAND (exp
, 1)))
7839 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7840 NULL_RTX
, &op0
, &op1
, modifier
);
7842 /* If the last operand is a CONST_INT, use plus_constant of
7843 the negated constant. Else make the MINUS. */
7844 if (GET_CODE (op1
) == CONST_INT
)
7845 return REDUCE_BIT_FIELD (plus_constant (op0
, - INTVAL (op1
)));
7847 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode
, op0
, op1
));
7850 /* No sense saving up arithmetic to be done
7851 if it's all in the wrong mode to form part of an address.
7852 And force_operand won't know whether to sign-extend or
7854 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7855 || mode
!= ptr_mode
)
7858 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7859 subtarget
, &op0
, &op1
, modifier
);
7861 /* Convert A - const to A + (-const). */
7862 if (GET_CODE (op1
) == CONST_INT
)
7864 op1
= negate_rtx (mode
, op1
);
7865 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
7871 /* If first operand is constant, swap them.
7872 Thus the following special case checks need only
7873 check the second operand. */
7874 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
7876 tree t1
= TREE_OPERAND (exp
, 0);
7877 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
7878 TREE_OPERAND (exp
, 1) = t1
;
7881 /* Attempt to return something suitable for generating an
7882 indexed address, for machines that support that. */
7884 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
7885 && host_integerp (TREE_OPERAND (exp
, 1), 0))
7887 tree exp1
= TREE_OPERAND (exp
, 1);
7889 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7893 op0
= force_operand (op0
, NULL_RTX
);
7895 op0
= copy_to_mode_reg (mode
, op0
);
7897 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode
, op0
,
7898 gen_int_mode (tree_low_cst (exp1
, 0),
7899 TYPE_MODE (TREE_TYPE (exp1
)))));
7902 if (modifier
== EXPAND_STACK_PARM
)
7905 /* Check for multiplying things that have been extended
7906 from a narrower type. If this machine supports multiplying
7907 in that narrower type with a result in the desired type,
7908 do it that way, and avoid the explicit type-conversion. */
7910 subexp0
= TREE_OPERAND (exp
, 0);
7911 subexp1
= TREE_OPERAND (exp
, 1);
7912 /* First, check if we have a multiplication of one signed and one
7913 unsigned operand. */
7914 if (TREE_CODE (subexp0
) == NOP_EXPR
7915 && TREE_CODE (subexp1
) == NOP_EXPR
7916 && TREE_CODE (type
) == INTEGER_TYPE
7917 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0
, 0)))
7918 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7919 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0
, 0)))
7920 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1
, 0))))
7921 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0
, 0)))
7922 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1
, 0)))))
7924 enum machine_mode innermode
7925 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0
, 0)));
7926 this_optab
= usmul_widen_optab
;
7927 if (mode
== GET_MODE_WIDER_MODE (innermode
))
7929 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
7931 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0
, 0))))
7932 expand_operands (TREE_OPERAND (subexp0
, 0),
7933 TREE_OPERAND (subexp1
, 0),
7934 NULL_RTX
, &op0
, &op1
, 0);
7936 expand_operands (TREE_OPERAND (subexp0
, 0),
7937 TREE_OPERAND (subexp1
, 0),
7938 NULL_RTX
, &op1
, &op0
, 0);
7944 /* Check for a multiplication with matching signedness. */
7945 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
7946 && TREE_CODE (type
) == INTEGER_TYPE
7947 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7948 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7949 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7950 && int_fits_type_p (TREE_OPERAND (exp
, 1),
7951 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7952 /* Don't use a widening multiply if a shift will do. */
7953 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
7954 > HOST_BITS_PER_WIDE_INT
)
7955 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
7957 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7958 && (TYPE_PRECISION (TREE_TYPE
7959 (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7960 == TYPE_PRECISION (TREE_TYPE
7962 (TREE_OPERAND (exp
, 0), 0))))
7963 /* If both operands are extended, they must either both
7964 be zero-extended or both be sign-extended. */
7965 && (TYPE_UNSIGNED (TREE_TYPE
7966 (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7967 == TYPE_UNSIGNED (TREE_TYPE
7969 (TREE_OPERAND (exp
, 0), 0)))))))
7971 tree op0type
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0));
7972 enum machine_mode innermode
= TYPE_MODE (op0type
);
7973 bool zextend_p
= TYPE_UNSIGNED (op0type
);
7974 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
7975 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
7977 if (mode
== GET_MODE_2XWIDER_MODE (innermode
))
7979 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
7981 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7982 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7983 TREE_OPERAND (exp
, 1),
7984 NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
7986 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7987 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7988 NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
7991 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
7992 && innermode
== word_mode
)
7995 op0
= expand_normal (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0));
7996 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7997 op1
= convert_modes (innermode
, mode
,
7998 expand_normal (TREE_OPERAND (exp
, 1)),
8001 op1
= expand_normal (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0));
8002 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8003 unsignedp
, OPTAB_LIB_WIDEN
);
8004 hipart
= gen_highpart (innermode
, temp
);
8005 htem
= expand_mult_highpart_adjust (innermode
, hipart
,
8009 emit_move_insn (hipart
, htem
);
8010 return REDUCE_BIT_FIELD (temp
);
8014 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8015 subtarget
, &op0
, &op1
, 0);
8016 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8018 case TRUNC_DIV_EXPR
:
8019 case FLOOR_DIV_EXPR
:
8021 case ROUND_DIV_EXPR
:
8022 case EXACT_DIV_EXPR
:
8023 if (modifier
== EXPAND_STACK_PARM
)
8025 /* Possible optimization: compute the dividend with EXPAND_SUM
8026 then if the divisor is constant can optimize the case
8027 where some terms of the dividend have coeffs divisible by it. */
8028 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8029 subtarget
, &op0
, &op1
, 0);
8030 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
8035 case TRUNC_MOD_EXPR
:
8036 case FLOOR_MOD_EXPR
:
8038 case ROUND_MOD_EXPR
:
8039 if (modifier
== EXPAND_STACK_PARM
)
8041 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8042 subtarget
, &op0
, &op1
, 0);
8043 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
8045 case FIX_ROUND_EXPR
:
8046 case FIX_FLOOR_EXPR
:
8048 gcc_unreachable (); /* Not used for C. */
8050 case FIX_TRUNC_EXPR
:
8051 op0
= expand_normal (TREE_OPERAND (exp
, 0));
8052 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8053 target
= gen_reg_rtx (mode
);
8054 expand_fix (target
, op0
, unsignedp
);
8058 op0
= expand_normal (TREE_OPERAND (exp
, 0));
8059 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8060 target
= gen_reg_rtx (mode
);
8061 /* expand_float can't figure out what to do if FROM has VOIDmode.
8062 So give it the correct mode. With -O, cse will optimize this. */
8063 if (GET_MODE (op0
) == VOIDmode
)
8064 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8066 expand_float (target
, op0
,
8067 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8071 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8072 if (modifier
== EXPAND_STACK_PARM
)
8074 temp
= expand_unop (mode
,
8075 optab_for_tree_code (NEGATE_EXPR
, type
),
8078 return REDUCE_BIT_FIELD (temp
);
8081 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8082 if (modifier
== EXPAND_STACK_PARM
)
8085 /* ABS_EXPR is not valid for complex arguments. */
8086 gcc_assert (GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
8087 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
);
8089 /* Unsigned abs is simply the operand. Testing here means we don't
8090 risk generating incorrect code below. */
8091 if (TYPE_UNSIGNED (type
))
8094 return expand_abs (mode
, op0
, target
, unsignedp
,
8095 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
8099 target
= original_target
;
8101 || modifier
== EXPAND_STACK_PARM
8102 || (MEM_P (target
) && MEM_VOLATILE_P (target
))
8103 || GET_MODE (target
) != mode
8105 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8106 target
= gen_reg_rtx (mode
);
8107 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8108 target
, &op0
, &op1
, 0);
8110 /* First try to do it with a special MIN or MAX instruction.
8111 If that does not win, use a conditional jump to select the proper
8113 this_optab
= optab_for_tree_code (code
, type
);
8114 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8119 /* At this point, a MEM target is no longer useful; we will get better
8122 if (! REG_P (target
))
8123 target
= gen_reg_rtx (mode
);
8125 /* If op1 was placed in target, swap op0 and op1. */
8126 if (target
!= op0
&& target
== op1
)
8133 /* We generate better code and avoid problems with op1 mentioning
8134 target by forcing op1 into a pseudo if it isn't a constant. */
8135 if (! CONSTANT_P (op1
))
8136 op1
= force_reg (mode
, op1
);
8139 enum rtx_code comparison_code
;
8142 if (code
== MAX_EXPR
)
8143 comparison_code
= unsignedp
? GEU
: GE
;
8145 comparison_code
= unsignedp
? LEU
: LE
;
8147 /* Canonicalize to comparisons against 0. */
8148 if (op1
== const1_rtx
)
8150 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8151 or (a != 0 ? a : 1) for unsigned.
8152 For MIN we are safe converting (a <= 1 ? a : 1)
8153 into (a <= 0 ? a : 1) */
8154 cmpop1
= const0_rtx
;
8155 if (code
== MAX_EXPR
)
8156 comparison_code
= unsignedp
? NE
: GT
;
8158 if (op1
== constm1_rtx
&& !unsignedp
)
8160 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8161 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8162 cmpop1
= const0_rtx
;
8163 if (code
== MIN_EXPR
)
8164 comparison_code
= LT
;
8166 #ifdef HAVE_conditional_move
8167 /* Use a conditional move if possible. */
8168 if (can_conditionally_move_p (mode
))
8172 /* ??? Same problem as in expmed.c: emit_conditional_move
8173 forces a stack adjustment via compare_from_rtx, and we
8174 lose the stack adjustment if the sequence we are about
8175 to create is discarded. */
8176 do_pending_stack_adjust ();
8180 /* Try to emit the conditional move. */
8181 insn
= emit_conditional_move (target
, comparison_code
,
8186 /* If we could do the conditional move, emit the sequence,
8190 rtx seq
= get_insns ();
8196 /* Otherwise discard the sequence and fall back to code with
8202 emit_move_insn (target
, op0
);
8204 temp
= gen_label_rtx ();
8205 do_compare_rtx_and_jump (target
, cmpop1
, comparison_code
,
8206 unsignedp
, mode
, NULL_RTX
, NULL_RTX
, temp
);
8208 emit_move_insn (target
, op1
);
8213 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8214 if (modifier
== EXPAND_STACK_PARM
)
8216 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8220 /* ??? Can optimize bitwise operations with one arg constant.
8221 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8222 and (a bitwise1 b) bitwise2 b (etc)
8223 but that is probably not worth while. */
8225 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8226 boolean values when we want in all cases to compute both of them. In
8227 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8228 as actual zero-or-1 values and then bitwise anding. In cases where
8229 there cannot be any side effects, better code would be made by
8230 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8231 how to recognize those cases. */
8233 case TRUTH_AND_EXPR
:
8234 code
= BIT_AND_EXPR
;
8239 code
= BIT_IOR_EXPR
;
8243 case TRUTH_XOR_EXPR
:
8244 code
= BIT_XOR_EXPR
;
8252 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8254 if (modifier
== EXPAND_STACK_PARM
)
8256 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8257 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
8260 /* Could determine the answer when only additive constants differ. Also,
8261 the addition of one can be handled by changing the condition. */
8268 case UNORDERED_EXPR
:
8276 temp
= do_store_flag (exp
,
8277 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
8278 tmode
!= VOIDmode
? tmode
: mode
, 0);
8282 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8283 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
8285 && REG_P (original_target
)
8286 && (GET_MODE (original_target
)
8287 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
8289 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
8292 /* If temp is constant, we can just compute the result. */
8293 if (GET_CODE (temp
) == CONST_INT
)
8295 if (INTVAL (temp
) != 0)
8296 emit_move_insn (target
, const1_rtx
);
8298 emit_move_insn (target
, const0_rtx
);
8303 if (temp
!= original_target
)
8305 enum machine_mode mode1
= GET_MODE (temp
);
8306 if (mode1
== VOIDmode
)
8307 mode1
= tmode
!= VOIDmode
? tmode
: mode
;
8309 temp
= copy_to_mode_reg (mode1
, temp
);
8312 op1
= gen_label_rtx ();
8313 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
8314 GET_MODE (temp
), unsignedp
, op1
);
8315 emit_move_insn (temp
, const1_rtx
);
8320 /* If no set-flag instruction, must generate a conditional store
8321 into a temporary variable. Drop through and handle this
8326 || modifier
== EXPAND_STACK_PARM
8327 || ! safe_from_p (target
, exp
, 1)
8328 /* Make sure we don't have a hard reg (such as function's return
8329 value) live across basic blocks, if not optimizing. */
8330 || (!optimize
&& REG_P (target
)
8331 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8332 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8335 emit_move_insn (target
, const0_rtx
);
8337 op1
= gen_label_rtx ();
8338 jumpifnot (exp
, op1
);
8341 emit_move_insn (target
, const1_rtx
);
8344 return ignore
? const0_rtx
: target
;
8346 case TRUTH_NOT_EXPR
:
8347 if (modifier
== EXPAND_STACK_PARM
)
8349 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8350 /* The parser is careful to generate TRUTH_NOT_EXPR
8351 only with operands that are always zero or one. */
8352 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8353 target
, 1, OPTAB_LIB_WIDEN
);
8357 case STATEMENT_LIST
:
8359 tree_stmt_iterator iter
;
8361 gcc_assert (ignore
);
8363 for (iter
= tsi_start (exp
); !tsi_end_p (iter
); tsi_next (&iter
))
8364 expand_expr (tsi_stmt (iter
), const0_rtx
, VOIDmode
, modifier
);
8369 /* A COND_EXPR with its type being VOID_TYPE represents a
8370 conditional jump and is handled in
8371 expand_gimple_cond_expr. */
8372 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp
)));
8374 /* Note that COND_EXPRs whose type is a structure or union
8375 are required to be constructed to contain assignments of
8376 a temporary variable, so that we can evaluate them here
8377 for side effect only. If type is void, we must do likewise. */
8379 gcc_assert (!TREE_ADDRESSABLE (type
)
8381 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
8382 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
);
8384 /* If we are not to produce a result, we have no target. Otherwise,
8385 if a target was specified use it; it will not be used as an
8386 intermediate target unless it is safe. If no target, use a
8389 if (modifier
!= EXPAND_STACK_PARM
8391 && safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
8392 && GET_MODE (original_target
) == mode
8393 #ifdef HAVE_conditional_move
8394 && (! can_conditionally_move_p (mode
)
8395 || REG_P (original_target
))
8397 && !MEM_P (original_target
))
8398 temp
= original_target
;
8400 temp
= assign_temp (type
, 0, 0, 1);
8402 do_pending_stack_adjust ();
8404 op0
= gen_label_rtx ();
8405 op1
= gen_label_rtx ();
8406 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8407 store_expr (TREE_OPERAND (exp
, 1), temp
,
8408 modifier
== EXPAND_STACK_PARM
);
8410 emit_jump_insn (gen_jump (op1
));
8413 store_expr (TREE_OPERAND (exp
, 2), temp
,
8414 modifier
== EXPAND_STACK_PARM
);
8421 target
= expand_vec_cond_expr (exp
, target
);
8426 tree lhs
= TREE_OPERAND (exp
, 0);
8427 tree rhs
= TREE_OPERAND (exp
, 1);
8429 gcc_assert (ignore
);
8431 /* Check for |= or &= of a bitfield of size one into another bitfield
8432 of size 1. In this case, (unless we need the result of the
8433 assignment) we can do this more efficiently with a
8434 test followed by an assignment, if necessary.
8436 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8437 things change so we do, this code should be enhanced to
8439 if (TREE_CODE (lhs
) == COMPONENT_REF
8440 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
8441 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
8442 && TREE_OPERAND (rhs
, 0) == lhs
8443 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
8444 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
8445 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
8447 rtx label
= gen_label_rtx ();
8449 do_jump (TREE_OPERAND (rhs
, 1),
8450 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
8451 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
8452 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
8453 (TREE_CODE (rhs
) == BIT_IOR_EXPR
8455 : integer_zero_node
)));
8456 do_pending_stack_adjust ();
8461 expand_assignment (lhs
, rhs
);
8467 if (!TREE_OPERAND (exp
, 0))
8468 expand_null_return ();
8470 expand_return (TREE_OPERAND (exp
, 0));
8474 return expand_expr_addr_expr (exp
, target
, tmode
, modifier
);
8477 /* Get the rtx code of the operands. */
8478 op0
= expand_normal (TREE_OPERAND (exp
, 0));
8479 op1
= expand_normal (TREE_OPERAND (exp
, 1));
8482 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8484 /* Move the real (op0) and imaginary (op1) parts to their location. */
8485 write_complex_part (target
, op0
, false);
8486 write_complex_part (target
, op1
, true);
8491 op0
= expand_normal (TREE_OPERAND (exp
, 0));
8492 return read_complex_part (op0
, false);
8495 op0
= expand_normal (TREE_OPERAND (exp
, 0));
8496 return read_complex_part (op0
, true);
8499 expand_resx_expr (exp
);
8502 case TRY_CATCH_EXPR
:
8504 case EH_FILTER_EXPR
:
8505 case TRY_FINALLY_EXPR
:
8506 /* Lowered by tree-eh.c. */
8509 case WITH_CLEANUP_EXPR
:
8510 case CLEANUP_POINT_EXPR
:
8512 case CASE_LABEL_EXPR
:
8518 case PREINCREMENT_EXPR
:
8519 case PREDECREMENT_EXPR
:
8520 case POSTINCREMENT_EXPR
:
8521 case POSTDECREMENT_EXPR
:
8524 case TRUTH_ANDIF_EXPR
:
8525 case TRUTH_ORIF_EXPR
:
8526 /* Lowered by gimplify.c. */
8530 return get_exception_pointer (cfun
);
8533 return get_exception_filter (cfun
);
8536 /* Function descriptors are not valid except for as
8537 initialization constants, and should not be expanded. */
8545 expand_label (TREE_OPERAND (exp
, 0));
8549 expand_asm_expr (exp
);
8552 case WITH_SIZE_EXPR
:
8553 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8554 have pulled out the size to use in whatever context it needed. */
8555 return expand_expr_real (TREE_OPERAND (exp
, 0), original_target
, tmode
,
8558 case REALIGN_LOAD_EXPR
:
8560 tree oprnd0
= TREE_OPERAND (exp
, 0);
8561 tree oprnd1
= TREE_OPERAND (exp
, 1);
8562 tree oprnd2
= TREE_OPERAND (exp
, 2);
8565 this_optab
= optab_for_tree_code (code
, type
);
8566 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8567 op2
= expand_normal (oprnd2
);
8568 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
8576 tree oprnd0
= TREE_OPERAND (exp
, 0);
8577 tree oprnd1
= TREE_OPERAND (exp
, 1);
8578 tree oprnd2
= TREE_OPERAND (exp
, 2);
8581 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8582 op2
= expand_normal (oprnd2
);
8583 target
= expand_widen_pattern_expr (exp
, op0
, op1
, op2
,
8588 case WIDEN_SUM_EXPR
:
8590 tree oprnd0
= TREE_OPERAND (exp
, 0);
8591 tree oprnd1
= TREE_OPERAND (exp
, 1);
8593 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, 0);
8594 target
= expand_widen_pattern_expr (exp
, op0
, NULL_RTX
, op1
,
8599 case REDUC_MAX_EXPR
:
8600 case REDUC_MIN_EXPR
:
8601 case REDUC_PLUS_EXPR
:
8603 op0
= expand_normal (TREE_OPERAND (exp
, 0));
8604 this_optab
= optab_for_tree_code (code
, type
);
8605 temp
= expand_unop (mode
, this_optab
, op0
, target
, unsignedp
);
8610 case VEC_LSHIFT_EXPR
:
8611 case VEC_RSHIFT_EXPR
:
8613 target
= expand_vec_shift_expr (exp
, target
);
8618 return lang_hooks
.expand_expr (exp
, original_target
, tmode
,
8622 /* Here to do an ordinary binary operator. */
8624 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8625 subtarget
, &op0
, &op1
, 0);
8627 this_optab
= optab_for_tree_code (code
, type
);
8629 if (modifier
== EXPAND_STACK_PARM
)
8631 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
8632 unsignedp
, OPTAB_LIB_WIDEN
);
8634 return REDUCE_BIT_FIELD (temp
);
8636 #undef REDUCE_BIT_FIELD
8638 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8639 signedness of TYPE), possibly returning the result in TARGET. */
8641 reduce_to_bit_field_precision (rtx exp
, rtx target
, tree type
)
8643 HOST_WIDE_INT prec
= TYPE_PRECISION (type
);
8644 if (target
&& GET_MODE (target
) != GET_MODE (exp
))
8646 if (TYPE_UNSIGNED (type
))
8649 if (prec
< HOST_BITS_PER_WIDE_INT
)
8650 mask
= immed_double_const (((unsigned HOST_WIDE_INT
) 1 << prec
) - 1, 0,
8653 mask
= immed_double_const ((unsigned HOST_WIDE_INT
) -1,
8654 ((unsigned HOST_WIDE_INT
) 1
8655 << (prec
- HOST_BITS_PER_WIDE_INT
)) - 1,
8657 return expand_and (GET_MODE (exp
), exp
, mask
, target
);
8661 tree count
= build_int_cst (NULL_TREE
,
8662 GET_MODE_BITSIZE (GET_MODE (exp
)) - prec
);
8663 exp
= expand_shift (LSHIFT_EXPR
, GET_MODE (exp
), exp
, count
, target
, 0);
8664 return expand_shift (RSHIFT_EXPR
, GET_MODE (exp
), exp
, count
, target
, 0);
8668 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8669 when applied to the address of EXP produces an address known to be
8670 aligned more than BIGGEST_ALIGNMENT. */
8673 is_aligning_offset (tree offset
, tree exp
)
8675 /* Strip off any conversions. */
8676 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
8677 || TREE_CODE (offset
) == NOP_EXPR
8678 || TREE_CODE (offset
) == CONVERT_EXPR
)
8679 offset
= TREE_OPERAND (offset
, 0);
8681 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8682 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8683 if (TREE_CODE (offset
) != BIT_AND_EXPR
8684 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
8685 || compare_tree_int (TREE_OPERAND (offset
, 1),
8686 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
8687 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
8690 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8691 It must be NEGATE_EXPR. Then strip any more conversions. */
8692 offset
= TREE_OPERAND (offset
, 0);
8693 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
8694 || TREE_CODE (offset
) == NOP_EXPR
8695 || TREE_CODE (offset
) == CONVERT_EXPR
)
8696 offset
= TREE_OPERAND (offset
, 0);
8698 if (TREE_CODE (offset
) != NEGATE_EXPR
)
8701 offset
= TREE_OPERAND (offset
, 0);
8702 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
8703 || TREE_CODE (offset
) == NOP_EXPR
8704 || TREE_CODE (offset
) == CONVERT_EXPR
)
8705 offset
= TREE_OPERAND (offset
, 0);
8707 /* This must now be the address of EXP. */
8708 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
8711 /* Return the tree node if an ARG corresponds to a string constant or zero
8712 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8713 in bytes within the string that ARG is accessing. The type of the
8714 offset will be `sizetype'. */
8717 string_constant (tree arg
, tree
*ptr_offset
)
8722 if (TREE_CODE (arg
) == ADDR_EXPR
)
8724 if (TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
8726 *ptr_offset
= size_zero_node
;
8727 return TREE_OPERAND (arg
, 0);
8729 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == VAR_DECL
)
8731 array
= TREE_OPERAND (arg
, 0);
8732 offset
= size_zero_node
;
8734 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
8736 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
8737 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
8738 if (TREE_CODE (array
) != STRING_CST
8739 && TREE_CODE (array
) != VAR_DECL
)
8745 else if (TREE_CODE (arg
) == PLUS_EXPR
)
8747 tree arg0
= TREE_OPERAND (arg
, 0);
8748 tree arg1
= TREE_OPERAND (arg
, 1);
8753 if (TREE_CODE (arg0
) == ADDR_EXPR
8754 && (TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
8755 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == VAR_DECL
))
8757 array
= TREE_OPERAND (arg0
, 0);
8760 else if (TREE_CODE (arg1
) == ADDR_EXPR
8761 && (TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
8762 || TREE_CODE (TREE_OPERAND (arg1
, 0)) == VAR_DECL
))
8764 array
= TREE_OPERAND (arg1
, 0);
8773 if (TREE_CODE (array
) == STRING_CST
)
8775 *ptr_offset
= convert (sizetype
, offset
);
8778 else if (TREE_CODE (array
) == VAR_DECL
)
8782 /* Variables initialized to string literals can be handled too. */
8783 if (DECL_INITIAL (array
) == NULL_TREE
8784 || TREE_CODE (DECL_INITIAL (array
)) != STRING_CST
)
8787 /* If they are read-only, non-volatile and bind locally. */
8788 if (! TREE_READONLY (array
)
8789 || TREE_SIDE_EFFECTS (array
)
8790 || ! targetm
.binds_local_p (array
))
8793 /* Avoid const char foo[4] = "abcde"; */
8794 if (DECL_SIZE_UNIT (array
) == NULL_TREE
8795 || TREE_CODE (DECL_SIZE_UNIT (array
)) != INTEGER_CST
8796 || (length
= TREE_STRING_LENGTH (DECL_INITIAL (array
))) <= 0
8797 || compare_tree_int (DECL_SIZE_UNIT (array
), length
) < 0)
8800 /* If variable is bigger than the string literal, OFFSET must be constant
8801 and inside of the bounds of the string literal. */
8802 offset
= convert (sizetype
, offset
);
8803 if (compare_tree_int (DECL_SIZE_UNIT (array
), length
) > 0
8804 && (! host_integerp (offset
, 1)
8805 || compare_tree_int (offset
, length
) >= 0))
8808 *ptr_offset
= offset
;
8809 return DECL_INITIAL (array
);
8815 /* Generate code to calculate EXP using a store-flag instruction
8816 and return an rtx for the result. EXP is either a comparison
8817 or a TRUTH_NOT_EXPR whose operand is a comparison.
8819 If TARGET is nonzero, store the result there if convenient.
8821 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8824 Return zero if there is no suitable set-flag instruction
8825 available on this machine.
8827 Once expand_expr has been called on the arguments of the comparison,
8828 we are committed to doing the store flag, since it is not safe to
8829 re-evaluate the expression. We emit the store-flag insn by calling
8830 emit_store_flag, but only expand the arguments if we have a reason
8831 to believe that emit_store_flag will be successful. If we think that
8832 it will, but it isn't, we have to simulate the store-flag with a
8833 set/jump/set sequence. */
8836 do_store_flag (tree exp
, rtx target
, enum machine_mode mode
, int only_cheap
)
8839 tree arg0
, arg1
, type
;
8841 enum machine_mode operand_mode
;
8845 enum insn_code icode
;
8846 rtx subtarget
= target
;
8849 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8850 result at the end. We can't simply invert the test since it would
8851 have already been inverted if it were valid. This case occurs for
8852 some floating-point comparisons. */
8854 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
8855 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
8857 arg0
= TREE_OPERAND (exp
, 0);
8858 arg1
= TREE_OPERAND (exp
, 1);
8860 /* Don't crash if the comparison was erroneous. */
8861 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
8864 type
= TREE_TYPE (arg0
);
8865 operand_mode
= TYPE_MODE (type
);
8866 unsignedp
= TYPE_UNSIGNED (type
);
8868 /* We won't bother with BLKmode store-flag operations because it would mean
8869 passing a lot of information to emit_store_flag. */
8870 if (operand_mode
== BLKmode
)
8873 /* We won't bother with store-flag operations involving function pointers
8874 when function pointers must be canonicalized before comparisons. */
8875 #ifdef HAVE_canonicalize_funcptr_for_compare
8876 if (HAVE_canonicalize_funcptr_for_compare
8877 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
8878 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
8880 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
8881 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
8882 == FUNCTION_TYPE
))))
8889 /* Get the rtx comparison code to use. We know that EXP is a comparison
8890 operation of some type. Some comparisons against 1 and -1 can be
8891 converted to comparisons with zero. Do so here so that the tests
8892 below will be aware that we have a comparison with zero. These
8893 tests will not catch constants in the first operand, but constants
8894 are rarely passed as the first operand. */
8896 switch (TREE_CODE (exp
))
8905 if (integer_onep (arg1
))
8906 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
8908 code
= unsignedp
? LTU
: LT
;
8911 if (! unsignedp
&& integer_all_onesp (arg1
))
8912 arg1
= integer_zero_node
, code
= LT
;
8914 code
= unsignedp
? LEU
: LE
;
8917 if (! unsignedp
&& integer_all_onesp (arg1
))
8918 arg1
= integer_zero_node
, code
= GE
;
8920 code
= unsignedp
? GTU
: GT
;
8923 if (integer_onep (arg1
))
8924 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
8926 code
= unsignedp
? GEU
: GE
;
8929 case UNORDERED_EXPR
:
8958 /* Put a constant second. */
8959 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
8961 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
8962 code
= swap_condition (code
);
8965 /* If this is an equality or inequality test of a single bit, we can
8966 do this by shifting the bit being tested to the low-order bit and
8967 masking the result with the constant 1. If the condition was EQ,
8968 we xor it with 1. This does not require an scc insn and is faster
8969 than an scc insn even if we have it.
8971 The code to make this transformation was moved into fold_single_bit_test,
8972 so we just call into the folder and expand its result. */
8974 if ((code
== NE
|| code
== EQ
)
8975 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
8976 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
8978 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
8979 return expand_expr (fold_single_bit_test (code
== NE
? NE_EXPR
: EQ_EXPR
,
8981 target
, VOIDmode
, EXPAND_NORMAL
);
8984 /* Now see if we are likely to be able to do this. Return if not. */
8985 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
8988 icode
= setcc_gen_code
[(int) code
];
8989 if (icode
== CODE_FOR_nothing
8990 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
8992 /* We can only do this if it is one of the special cases that
8993 can be handled without an scc insn. */
8994 if ((code
== LT
&& integer_zerop (arg1
))
8995 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
8997 else if (! only_cheap
&& (code
== NE
|| code
== EQ
)
8998 && TREE_CODE (type
) != REAL_TYPE
8999 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
9000 != CODE_FOR_nothing
)
9001 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
9002 != CODE_FOR_nothing
)))
9008 if (! get_subtarget (target
)
9009 || GET_MODE (subtarget
) != operand_mode
)
9012 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, 0);
9015 target
= gen_reg_rtx (mode
);
9017 result
= emit_store_flag (target
, code
, op0
, op1
,
9018 operand_mode
, unsignedp
, 1);
9023 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
9024 result
, 0, OPTAB_LIB_WIDEN
);
9028 /* If this failed, we have to do this with set/compare/jump/set code. */
9030 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
9031 target
= gen_reg_rtx (GET_MODE (target
));
9033 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
9034 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
9035 operand_mode
, NULL_RTX
);
9036 if (GET_CODE (result
) == CONST_INT
)
9037 return (((result
== const0_rtx
&& ! invert
)
9038 || (result
!= const0_rtx
&& invert
))
9039 ? const0_rtx
: const1_rtx
);
9041 /* The code of RESULT may not match CODE if compare_from_rtx
9042 decided to swap its operands and reverse the original code.
9044 We know that compare_from_rtx returns either a CONST_INT or
9045 a new comparison code, so it is safe to just extract the
9046 code from RESULT. */
9047 code
= GET_CODE (result
);
9049 label
= gen_label_rtx ();
9050 gcc_assert (bcc_gen_fctn
[(int) code
]);
9052 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
9053 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
9060 /* Stubs in case we haven't got a casesi insn. */
9062 # define HAVE_casesi 0
9063 # define gen_casesi(a, b, c, d, e) (0)
9064 # define CODE_FOR_casesi CODE_FOR_nothing
9067 /* If the machine does not have a case insn that compares the bounds,
9068 this means extra overhead for dispatch tables, which raises the
9069 threshold for using them. */
9070 #ifndef CASE_VALUES_THRESHOLD
9071 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9072 #endif /* CASE_VALUES_THRESHOLD */
9075 case_values_threshold (void)
9077 return CASE_VALUES_THRESHOLD
;
9080 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9081 0 otherwise (i.e. if there is no casesi instruction). */
9083 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
9084 rtx table_label ATTRIBUTE_UNUSED
, rtx default_label
)
9086 enum machine_mode index_mode
= SImode
;
9087 int index_bits
= GET_MODE_BITSIZE (index_mode
);
9088 rtx op1
, op2
, index
;
9089 enum machine_mode op_mode
;
9094 /* Convert the index to SImode. */
9095 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
9097 enum machine_mode omode
= TYPE_MODE (index_type
);
9098 rtx rangertx
= expand_normal (range
);
9100 /* We must handle the endpoints in the original mode. */
9101 index_expr
= build2 (MINUS_EXPR
, index_type
,
9102 index_expr
, minval
);
9103 minval
= integer_zero_node
;
9104 index
= expand_normal (index_expr
);
9105 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
9106 omode
, 1, default_label
);
9107 /* Now we can safely truncate. */
9108 index
= convert_to_mode (index_mode
, index
, 0);
9112 if (TYPE_MODE (index_type
) != index_mode
)
9114 index_expr
= convert (lang_hooks
.types
.type_for_size
9115 (index_bits
, 0), index_expr
);
9116 index_type
= TREE_TYPE (index_expr
);
9119 index
= expand_normal (index_expr
);
9122 do_pending_stack_adjust ();
9124 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
9125 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
9127 index
= copy_to_mode_reg (op_mode
, index
);
9129 op1
= expand_normal (minval
);
9131 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
9132 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
9133 op1
, TYPE_UNSIGNED (TREE_TYPE (minval
)));
9134 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
9136 op1
= copy_to_mode_reg (op_mode
, op1
);
9138 op2
= expand_normal (range
);
9140 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
9141 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
9142 op2
, TYPE_UNSIGNED (TREE_TYPE (range
)));
9143 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
9145 op2
= copy_to_mode_reg (op_mode
, op2
);
9147 emit_jump_insn (gen_casesi (index
, op1
, op2
,
9148 table_label
, default_label
));
9152 /* Attempt to generate a tablejump instruction; same concept. */
9153 #ifndef HAVE_tablejump
9154 #define HAVE_tablejump 0
9155 #define gen_tablejump(x, y) (0)
9158 /* Subroutine of the next function.
9160 INDEX is the value being switched on, with the lowest value
9161 in the table already subtracted.
9162 MODE is its expected mode (needed if INDEX is constant).
9163 RANGE is the length of the jump table.
9164 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9166 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9167 index value is out of range. */
9170 do_tablejump (rtx index
, enum machine_mode mode
, rtx range
, rtx table_label
,
9175 if (INTVAL (range
) > cfun
->max_jumptable_ents
)
9176 cfun
->max_jumptable_ents
= INTVAL (range
);
9178 /* Do an unsigned comparison (in the proper mode) between the index
9179 expression and the value which represents the length of the range.
9180 Since we just finished subtracting the lower bound of the range
9181 from the index expression, this comparison allows us to simultaneously
9182 check that the original index expression value is both greater than
9183 or equal to the minimum value of the range and less than or equal to
9184 the maximum value of the range. */
9186 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
9189 /* If index is in range, it must fit in Pmode.
9190 Convert to Pmode so we can index with it. */
9192 index
= convert_to_mode (Pmode
, index
, 1);
9194 /* Don't let a MEM slip through, because then INDEX that comes
9195 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9196 and break_out_memory_refs will go to work on it and mess it up. */
9197 #ifdef PIC_CASE_VECTOR_ADDRESS
9198 if (flag_pic
&& !REG_P (index
))
9199 index
= copy_to_mode_reg (Pmode
, index
);
9202 /* If flag_force_addr were to affect this address
9203 it could interfere with the tricky assumptions made
9204 about addresses that contain label-refs,
9205 which may be valid only very near the tablejump itself. */
9206 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9207 GET_MODE_SIZE, because this indicates how large insns are. The other
9208 uses should all be Pmode, because they are addresses. This code
9209 could fail if addresses and insns are not the same size. */
9210 index
= gen_rtx_PLUS (Pmode
,
9211 gen_rtx_MULT (Pmode
, index
,
9212 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
9213 gen_rtx_LABEL_REF (Pmode
, table_label
));
9214 #ifdef PIC_CASE_VECTOR_ADDRESS
9216 index
= PIC_CASE_VECTOR_ADDRESS (index
);
9219 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
9220 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
9221 vector
= gen_const_mem (CASE_VECTOR_MODE
, index
);
9222 convert_move (temp
, vector
, 0);
9224 emit_jump_insn (gen_tablejump (temp
, table_label
));
9226 /* If we are generating PIC code or if the table is PC-relative, the
9227 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9228 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
9233 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
9234 rtx table_label
, rtx default_label
)
9238 if (! HAVE_tablejump
)
9241 index_expr
= fold_build2 (MINUS_EXPR
, index_type
,
9242 convert (index_type
, index_expr
),
9243 convert (index_type
, minval
));
9244 index
= expand_normal (index_expr
);
9245 do_pending_stack_adjust ();
9247 do_tablejump (index
, TYPE_MODE (index_type
),
9248 convert_modes (TYPE_MODE (index_type
),
9249 TYPE_MODE (TREE_TYPE (range
)),
9250 expand_normal (range
),
9251 TYPE_UNSIGNED (TREE_TYPE (range
))),
9252 table_label
, default_label
);
9256 /* Nonzero if the mode is a valid vector mode for this architecture.
9257 This returns nonzero even if there is no hardware support for the
9258 vector mode, but we can emulate with narrower modes. */
9261 vector_mode_valid_p (enum machine_mode mode
)
9263 enum mode_class
class = GET_MODE_CLASS (mode
);
9264 enum machine_mode innermode
;
9266 /* Doh! What's going on? */
9267 if (class != MODE_VECTOR_INT
9268 && class != MODE_VECTOR_FLOAT
)
9271 /* Hardware support. Woo hoo! */
9272 if (targetm
.vector_mode_supported_p (mode
))
9275 innermode
= GET_MODE_INNER (mode
);
9277 /* We should probably return 1 if requesting V4DI and we have no DI,
9278 but we have V2DI, but this is probably very unlikely. */
9280 /* If we have support for the inner mode, we can safely emulate it.
9281 We may not have V2DI, but me can emulate with a pair of DIs. */
9282 return targetm
.scalar_mode_supported_p (innermode
);
9285 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9287 const_vector_from_tree (tree exp
)
9292 enum machine_mode inner
, mode
;
9294 mode
= TYPE_MODE (TREE_TYPE (exp
));
9296 if (initializer_zerop (exp
))
9297 return CONST0_RTX (mode
);
9299 units
= GET_MODE_NUNITS (mode
);
9300 inner
= GET_MODE_INNER (mode
);
9302 v
= rtvec_alloc (units
);
9304 link
= TREE_VECTOR_CST_ELTS (exp
);
9305 for (i
= 0; link
; link
= TREE_CHAIN (link
), ++i
)
9307 elt
= TREE_VALUE (link
);
9309 if (TREE_CODE (elt
) == REAL_CST
)
9310 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
9313 RTVEC_ELT (v
, i
) = immed_double_const (TREE_INT_CST_LOW (elt
),
9314 TREE_INT_CST_HIGH (elt
),
9318 /* Initialize remaining elements to 0. */
9319 for (; i
< units
; ++i
)
9320 RTVEC_ELT (v
, i
) = CONST0_RTX (inner
);
9322 return gen_rtx_CONST_VECTOR (mode
, v
);
9324 #include "gt-expr.h"