1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
76 #define STACK_PUSH_CODE PRE_INC
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
89 /* This structure is used by move_by_pieces to describe the move to
100 int explicit_inc_from
;
101 unsigned HOST_WIDE_INT len
;
102 HOST_WIDE_INT offset
;
106 /* This structure is used by store_by_pieces to describe the clear to
109 struct store_by_pieces
115 unsigned HOST_WIDE_INT len
;
116 HOST_WIDE_INT offset
;
117 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
);
122 static unsigned HOST_WIDE_INT
move_by_pieces_ninsns (unsigned HOST_WIDE_INT
,
125 static void move_by_pieces_1 (rtx (*) (rtx
, ...), enum machine_mode
,
126 struct move_by_pieces
*);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx
, rtx
, rtx
, unsigned);
129 static rtx
emit_block_move_via_libcall (rtx
, rtx
, rtx
, bool);
130 static tree
emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
132 static rtx
clear_by_pieces_1 (void *, HOST_WIDE_INT
, enum machine_mode
);
133 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces
*, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx
, ...), enum machine_mode
,
136 struct store_by_pieces
*);
137 static bool clear_storage_via_clrmem (rtx
, rtx
, unsigned);
138 static rtx
clear_storage_via_libcall (rtx
, rtx
, bool);
139 static tree
clear_storage_libcall_fn (int);
140 static rtx
compress_float_constant (rtx
, rtx
);
141 static rtx
get_subtarget (rtx
);
142 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
143 HOST_WIDE_INT
, enum machine_mode
,
144 tree
, tree
, int, int);
145 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
);
146 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
, enum machine_mode
,
149 static unsigned HOST_WIDE_INT
highest_pow2_factor (tree
);
150 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (tree
, tree
);
152 static int is_aligning_offset (tree
, tree
);
153 static void expand_operands (tree
, tree
, rtx
, rtx
*, rtx
*,
154 enum expand_modifier
);
155 static rtx
reduce_to_bit_field_precision (rtx
, rtx
, tree
);
156 static rtx
do_store_flag (tree
, rtx
, enum machine_mode
, int);
158 static void emit_single_push_insn (enum machine_mode
, rtx
, tree
);
160 static void do_tablejump (rtx
, enum machine_mode
, rtx
, rtx
, rtx
);
161 static rtx
const_vector_from_tree (tree
);
162 static void write_complex_part (rtx
, rtx
, bool);
164 /* Record for each mode whether we can move a register directly to or
165 from an object of that mode in memory. If we can't, we won't try
166 to use that mode directly when accessing a field of that mode. */
168 static char direct_load
[NUM_MACHINE_MODES
];
169 static char direct_store
[NUM_MACHINE_MODES
];
171 /* Record for each mode whether we can float-extend from memory. */
173 static bool float_extend_from_mem
[NUM_MACHINE_MODES
][NUM_MACHINE_MODES
];
175 /* This macro is used to determine whether move_by_pieces should be called
176 to perform a structure copy. */
177 #ifndef MOVE_BY_PIECES_P
178 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
179 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
180 < (unsigned int) MOVE_RATIO)
183 /* This macro is used to determine whether clear_by_pieces should be
184 called to clear storage. */
185 #ifndef CLEAR_BY_PIECES_P
186 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
187 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
188 < (unsigned int) CLEAR_RATIO)
191 /* This macro is used to determine whether store_by_pieces should be
192 called to "memset" storage with byte values other than zero, or
193 to "memcpy" storage when the source is a constant string. */
194 #ifndef STORE_BY_PIECES_P
195 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
196 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
197 < (unsigned int) MOVE_RATIO)
200 /* This array records the insn_code of insns to perform block moves. */
201 enum insn_code movmem_optab
[NUM_MACHINE_MODES
];
203 /* This array records the insn_code of insns to perform block clears. */
204 enum insn_code clrmem_optab
[NUM_MACHINE_MODES
];
206 /* These arrays record the insn_code of two different kinds of insns
207 to perform block compares. */
208 enum insn_code cmpstr_optab
[NUM_MACHINE_MODES
];
209 enum insn_code cmpmem_optab
[NUM_MACHINE_MODES
];
211 /* Synchronization primitives. */
212 enum insn_code sync_add_optab
[NUM_MACHINE_MODES
];
213 enum insn_code sync_sub_optab
[NUM_MACHINE_MODES
];
214 enum insn_code sync_ior_optab
[NUM_MACHINE_MODES
];
215 enum insn_code sync_and_optab
[NUM_MACHINE_MODES
];
216 enum insn_code sync_xor_optab
[NUM_MACHINE_MODES
];
217 enum insn_code sync_nand_optab
[NUM_MACHINE_MODES
];
218 enum insn_code sync_old_add_optab
[NUM_MACHINE_MODES
];
219 enum insn_code sync_old_sub_optab
[NUM_MACHINE_MODES
];
220 enum insn_code sync_old_ior_optab
[NUM_MACHINE_MODES
];
221 enum insn_code sync_old_and_optab
[NUM_MACHINE_MODES
];
222 enum insn_code sync_old_xor_optab
[NUM_MACHINE_MODES
];
223 enum insn_code sync_old_nand_optab
[NUM_MACHINE_MODES
];
224 enum insn_code sync_new_add_optab
[NUM_MACHINE_MODES
];
225 enum insn_code sync_new_sub_optab
[NUM_MACHINE_MODES
];
226 enum insn_code sync_new_ior_optab
[NUM_MACHINE_MODES
];
227 enum insn_code sync_new_and_optab
[NUM_MACHINE_MODES
];
228 enum insn_code sync_new_xor_optab
[NUM_MACHINE_MODES
];
229 enum insn_code sync_new_nand_optab
[NUM_MACHINE_MODES
];
230 enum insn_code sync_compare_and_swap
[NUM_MACHINE_MODES
];
231 enum insn_code sync_compare_and_swap_cc
[NUM_MACHINE_MODES
];
232 enum insn_code sync_lock_test_and_set
[NUM_MACHINE_MODES
];
233 enum insn_code sync_lock_release
[NUM_MACHINE_MODES
];
235 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
237 #ifndef SLOW_UNALIGNED_ACCESS
238 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
241 /* This is run once per compilation to set up which modes can be used
242 directly in memory and to initialize the block move optab. */
245 init_expr_once (void)
248 enum machine_mode mode
;
253 /* Try indexing by frame ptr and try by stack ptr.
254 It is known that on the Convex the stack ptr isn't a valid index.
255 With luck, one or the other is valid on any machine. */
256 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
257 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
259 /* A scratch register we can modify in-place below to avoid
260 useless RTL allocations. */
261 reg
= gen_rtx_REG (VOIDmode
, -1);
263 insn
= rtx_alloc (INSN
);
264 pat
= gen_rtx_SET (0, NULL_RTX
, NULL_RTX
);
265 PATTERN (insn
) = pat
;
267 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
268 mode
= (enum machine_mode
) ((int) mode
+ 1))
272 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
273 PUT_MODE (mem
, mode
);
274 PUT_MODE (mem1
, mode
);
275 PUT_MODE (reg
, mode
);
277 /* See if there is some register that can be used in this mode and
278 directly loaded or stored from memory. */
280 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
281 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
282 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
285 if (! HARD_REGNO_MODE_OK (regno
, mode
))
291 SET_DEST (pat
) = reg
;
292 if (recog (pat
, insn
, &num_clobbers
) >= 0)
293 direct_load
[(int) mode
] = 1;
295 SET_SRC (pat
) = mem1
;
296 SET_DEST (pat
) = reg
;
297 if (recog (pat
, insn
, &num_clobbers
) >= 0)
298 direct_load
[(int) mode
] = 1;
301 SET_DEST (pat
) = mem
;
302 if (recog (pat
, insn
, &num_clobbers
) >= 0)
303 direct_store
[(int) mode
] = 1;
306 SET_DEST (pat
) = mem1
;
307 if (recog (pat
, insn
, &num_clobbers
) >= 0)
308 direct_store
[(int) mode
] = 1;
312 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
314 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
315 mode
= GET_MODE_WIDER_MODE (mode
))
317 enum machine_mode srcmode
;
318 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
319 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
323 ic
= can_extend_p (mode
, srcmode
, 0);
324 if (ic
== CODE_FOR_nothing
)
327 PUT_MODE (mem
, srcmode
);
329 if ((*insn_data
[ic
].operand
[1].predicate
) (mem
, srcmode
))
330 float_extend_from_mem
[mode
][srcmode
] = true;
335 /* This is run at the start of compiling a function. */
340 cfun
->expr
= ggc_alloc_cleared (sizeof (struct expr_status
));
343 /* Copy data from FROM to TO, where the machine modes are not the same.
344 Both modes may be integer, or both may be floating.
345 UNSIGNEDP should be nonzero if FROM is an unsigned type.
346 This causes zero-extension instead of sign-extension. */
349 convert_move (rtx to
, rtx from
, int unsignedp
)
351 enum machine_mode to_mode
= GET_MODE (to
);
352 enum machine_mode from_mode
= GET_MODE (from
);
353 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
354 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
358 /* rtx code for making an equivalent value. */
359 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
360 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
363 gcc_assert (to_real
== from_real
);
365 /* If the source and destination are already the same, then there's
370 /* If FROM is a SUBREG that indicates that we have already done at least
371 the required extension, strip it. We don't handle such SUBREGs as
374 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
375 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
376 >= GET_MODE_SIZE (to_mode
))
377 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
378 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
380 gcc_assert (GET_CODE (to
) != SUBREG
|| !SUBREG_PROMOTED_VAR_P (to
));
382 if (to_mode
== from_mode
383 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
385 emit_move_insn (to
, from
);
389 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
391 gcc_assert (GET_MODE_BITSIZE (from_mode
) == GET_MODE_BITSIZE (to_mode
));
393 if (VECTOR_MODE_P (to_mode
))
394 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
396 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
398 emit_move_insn (to
, from
);
402 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
404 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
405 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
414 gcc_assert (GET_MODE_PRECISION (from_mode
)
415 != GET_MODE_PRECISION (to_mode
));
417 if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
422 /* Try converting directly if the insn is supported. */
424 code
= tab
->handlers
[to_mode
][from_mode
].insn_code
;
425 if (code
!= CODE_FOR_nothing
)
427 emit_unop_insn (code
, to
, from
,
428 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
432 /* Otherwise use a libcall. */
433 libcall
= tab
->handlers
[to_mode
][from_mode
].libfunc
;
435 /* Is this conversion implemented yet? */
436 gcc_assert (libcall
);
439 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
441 insns
= get_insns ();
443 emit_libcall_block (insns
, to
, value
,
444 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
446 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
450 /* Handle pointer conversion. */ /* SPEE 900220. */
451 /* Targets are expected to provide conversion insns between PxImode and
452 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
453 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
455 enum machine_mode full_mode
456 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode
), MODE_INT
);
458 gcc_assert (trunc_optab
->handlers
[to_mode
][full_mode
].insn_code
459 != CODE_FOR_nothing
);
461 if (full_mode
!= from_mode
)
462 from
= convert_to_mode (full_mode
, from
, unsignedp
);
463 emit_unop_insn (trunc_optab
->handlers
[to_mode
][full_mode
].insn_code
,
467 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
470 enum machine_mode full_mode
471 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode
), MODE_INT
);
473 gcc_assert (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
474 != CODE_FOR_nothing
);
476 if (to_mode
== full_mode
)
478 emit_unop_insn (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
,
483 new_from
= gen_reg_rtx (full_mode
);
484 emit_unop_insn (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
,
485 new_from
, from
, UNKNOWN
);
487 /* else proceed to integer conversions below. */
488 from_mode
= full_mode
;
492 /* Now both modes are integers. */
494 /* Handle expanding beyond a word. */
495 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
496 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
503 enum machine_mode lowpart_mode
;
504 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
506 /* Try converting directly if the insn is supported. */
507 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
510 /* If FROM is a SUBREG, put it into a register. Do this
511 so that we always generate the same set of insns for
512 better cse'ing; if an intermediate assignment occurred,
513 we won't be doing the operation directly on the SUBREG. */
514 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
515 from
= force_reg (from_mode
, from
);
516 emit_unop_insn (code
, to
, from
, equiv_code
);
519 /* Next, try converting via full word. */
520 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
521 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
522 != CODE_FOR_nothing
))
526 if (reg_overlap_mentioned_p (to
, from
))
527 from
= force_reg (from_mode
, from
);
528 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
530 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
531 emit_unop_insn (code
, to
,
532 gen_lowpart (word_mode
, to
), equiv_code
);
536 /* No special multiword conversion insn; do it by hand. */
539 /* Since we will turn this into a no conflict block, we must ensure
540 that the source does not overlap the target. */
542 if (reg_overlap_mentioned_p (to
, from
))
543 from
= force_reg (from_mode
, from
);
545 /* Get a copy of FROM widened to a word, if necessary. */
546 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
547 lowpart_mode
= word_mode
;
549 lowpart_mode
= from_mode
;
551 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
553 lowpart
= gen_lowpart (lowpart_mode
, to
);
554 emit_move_insn (lowpart
, lowfrom
);
556 /* Compute the value to put in each remaining word. */
558 fill_value
= const0_rtx
;
563 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
564 && STORE_FLAG_VALUE
== -1)
566 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
568 fill_value
= gen_reg_rtx (word_mode
);
569 emit_insn (gen_slt (fill_value
));
575 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
576 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
578 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
582 /* Fill the remaining words. */
583 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
585 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
586 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
588 gcc_assert (subword
);
590 if (fill_value
!= subword
)
591 emit_move_insn (subword
, fill_value
);
594 insns
= get_insns ();
597 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
598 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
602 /* Truncating multi-word to a word or less. */
603 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
604 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
607 && ! MEM_VOLATILE_P (from
)
608 && direct_load
[(int) to_mode
]
609 && ! mode_dependent_address_p (XEXP (from
, 0)))
611 || GET_CODE (from
) == SUBREG
))
612 from
= force_reg (from_mode
, from
);
613 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
617 /* Now follow all the conversions between integers
618 no more than a word long. */
620 /* For truncation, usually we can just refer to FROM in a narrower mode. */
621 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
622 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
623 GET_MODE_BITSIZE (from_mode
)))
626 && ! MEM_VOLATILE_P (from
)
627 && direct_load
[(int) to_mode
]
628 && ! mode_dependent_address_p (XEXP (from
, 0)))
630 || GET_CODE (from
) == SUBREG
))
631 from
= force_reg (from_mode
, from
);
632 if (REG_P (from
) && REGNO (from
) < FIRST_PSEUDO_REGISTER
633 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
634 from
= copy_to_reg (from
);
635 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
639 /* Handle extension. */
640 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
642 /* Convert directly if that works. */
643 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
647 from
= force_not_mem (from
);
649 emit_unop_insn (code
, to
, from
, equiv_code
);
654 enum machine_mode intermediate
;
658 /* Search for a mode to convert via. */
659 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
660 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
661 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
663 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
664 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
665 GET_MODE_BITSIZE (intermediate
))))
666 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
667 != CODE_FOR_nothing
))
669 convert_move (to
, convert_to_mode (intermediate
, from
,
670 unsignedp
), unsignedp
);
674 /* No suitable intermediate mode.
675 Generate what we need with shifts. */
676 shift_amount
= build_int_cst (NULL_TREE
,
677 GET_MODE_BITSIZE (to_mode
)
678 - GET_MODE_BITSIZE (from_mode
));
679 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
680 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
682 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
685 emit_move_insn (to
, tmp
);
690 /* Support special truncate insns for certain modes. */
691 if (trunc_optab
->handlers
[to_mode
][from_mode
].insn_code
!= CODE_FOR_nothing
)
693 emit_unop_insn (trunc_optab
->handlers
[to_mode
][from_mode
].insn_code
,
698 /* Handle truncation of volatile memrefs, and so on;
699 the things that couldn't be truncated directly,
700 and for which there was no special instruction.
702 ??? Code above formerly short-circuited this, for most integer
703 mode pairs, with a force_reg in from_mode followed by a recursive
704 call to this routine. Appears always to have been wrong. */
705 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
707 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
708 emit_move_insn (to
, temp
);
712 /* Mode combination is not recognized. */
716 /* Return an rtx for a value that would result
717 from converting X to mode MODE.
718 Both X and MODE may be floating, or both integer.
719 UNSIGNEDP is nonzero if X is an unsigned value.
720 This can be done by referring to a part of X in place
721 or by copying to a new temporary with conversion. */
724 convert_to_mode (enum machine_mode mode
, rtx x
, int unsignedp
)
726 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
729 /* Return an rtx for a value that would result
730 from converting X from mode OLDMODE to mode MODE.
731 Both modes may be floating, or both integer.
732 UNSIGNEDP is nonzero if X is an unsigned value.
734 This can be done by referring to a part of X in place
735 or by copying to a new temporary with conversion.
737 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
740 convert_modes (enum machine_mode mode
, enum machine_mode oldmode
, rtx x
, int unsignedp
)
744 /* If FROM is a SUBREG that indicates that we have already done at least
745 the required extension, strip it. */
747 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
748 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
749 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
750 x
= gen_lowpart (mode
, x
);
752 if (GET_MODE (x
) != VOIDmode
)
753 oldmode
= GET_MODE (x
);
758 /* There is one case that we must handle specially: If we are converting
759 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
760 we are to interpret the constant as unsigned, gen_lowpart will do
761 the wrong if the constant appears negative. What we want to do is
762 make the high-order word of the constant zero, not all ones. */
764 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
765 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
766 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
768 HOST_WIDE_INT val
= INTVAL (x
);
770 if (oldmode
!= VOIDmode
771 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
773 int width
= GET_MODE_BITSIZE (oldmode
);
775 /* We need to zero extend VAL. */
776 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
779 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
782 /* We can do this with a gen_lowpart if both desired and current modes
783 are integer, and this is either a constant integer, a register, or a
784 non-volatile MEM. Except for the constant case where MODE is no
785 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
787 if ((GET_CODE (x
) == CONST_INT
788 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
789 || (GET_MODE_CLASS (mode
) == MODE_INT
790 && GET_MODE_CLASS (oldmode
) == MODE_INT
791 && (GET_CODE (x
) == CONST_DOUBLE
792 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
793 && ((MEM_P (x
) && ! MEM_VOLATILE_P (x
)
794 && direct_load
[(int) mode
])
796 && (! HARD_REGISTER_P (x
)
797 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
798 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
799 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
801 /* ?? If we don't know OLDMODE, we have to assume here that
802 X does not need sign- or zero-extension. This may not be
803 the case, but it's the best we can do. */
804 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
805 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
807 HOST_WIDE_INT val
= INTVAL (x
);
808 int width
= GET_MODE_BITSIZE (oldmode
);
810 /* We must sign or zero-extend in this case. Start by
811 zero-extending, then sign extend if we need to. */
812 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
814 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
815 val
|= (HOST_WIDE_INT
) (-1) << width
;
817 return gen_int_mode (val
, mode
);
820 return gen_lowpart (mode
, x
);
823 /* Converting from integer constant into mode is always equivalent to an
825 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
827 gcc_assert (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (oldmode
));
828 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
831 temp
= gen_reg_rtx (mode
);
832 convert_move (temp
, x
, unsignedp
);
836 /* STORE_MAX_PIECES is the number of bytes at a time that we can
837 store efficiently. Due to internal GCC limitations, this is
838 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
839 for an immediate constant. */
841 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
843 /* Determine whether the LEN bytes can be moved by using several move
844 instructions. Return nonzero if a call to move_by_pieces should
848 can_move_by_pieces (unsigned HOST_WIDE_INT len
,
849 unsigned int align ATTRIBUTE_UNUSED
)
851 return MOVE_BY_PIECES_P (len
, align
);
854 /* Generate several move instructions to copy LEN bytes from block FROM to
855 block TO. (These are MEM rtx's with BLKmode).
857 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
858 used to push FROM to the stack.
860 ALIGN is maximum stack alignment we can assume.
862 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
863 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
867 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
868 unsigned int align
, int endp
)
870 struct move_by_pieces data
;
871 rtx to_addr
, from_addr
= XEXP (from
, 0);
872 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
873 enum machine_mode mode
= VOIDmode
, tmode
;
874 enum insn_code icode
;
876 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
879 data
.from_addr
= from_addr
;
882 to_addr
= XEXP (to
, 0);
885 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
886 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
888 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
895 #ifdef STACK_GROWS_DOWNWARD
901 data
.to_addr
= to_addr
;
904 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
905 || GET_CODE (from_addr
) == POST_INC
906 || GET_CODE (from_addr
) == POST_DEC
);
908 data
.explicit_inc_from
= 0;
909 data
.explicit_inc_to
= 0;
910 if (data
.reverse
) data
.offset
= len
;
913 /* If copying requires more than two move insns,
914 copy addresses to registers (to make displacements shorter)
915 and use post-increment if available. */
916 if (!(data
.autinc_from
&& data
.autinc_to
)
917 && move_by_pieces_ninsns (len
, align
, max_size
) > 2)
919 /* Find the mode of the largest move... */
920 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
921 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
922 if (GET_MODE_SIZE (tmode
) < max_size
)
925 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
927 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
928 data
.autinc_from
= 1;
929 data
.explicit_inc_from
= -1;
931 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
933 data
.from_addr
= copy_addr_to_reg (from_addr
);
934 data
.autinc_from
= 1;
935 data
.explicit_inc_from
= 1;
937 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
938 data
.from_addr
= copy_addr_to_reg (from_addr
);
939 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
941 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
943 data
.explicit_inc_to
= -1;
945 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
947 data
.to_addr
= copy_addr_to_reg (to_addr
);
949 data
.explicit_inc_to
= 1;
951 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
952 data
.to_addr
= copy_addr_to_reg (to_addr
);
955 tmode
= mode_for_size (MOVE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
956 if (align
>= GET_MODE_ALIGNMENT (tmode
))
957 align
= GET_MODE_ALIGNMENT (tmode
);
960 enum machine_mode xmode
;
962 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
964 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
965 if (GET_MODE_SIZE (tmode
) > MOVE_MAX_PIECES
966 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
969 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
972 /* First move what we can in the largest integer mode, then go to
973 successively smaller modes. */
977 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
978 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
979 if (GET_MODE_SIZE (tmode
) < max_size
)
982 if (mode
== VOIDmode
)
985 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
986 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
987 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
989 max_size
= GET_MODE_SIZE (mode
);
992 /* The code above should have handled everything. */
993 gcc_assert (!data
.len
);
999 gcc_assert (!data
.reverse
);
1004 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
1005 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
1007 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
1010 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
1017 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
1025 /* Return number of insns required to move L bytes by pieces.
1026 ALIGN (in bits) is maximum alignment we can assume. */
1028 static unsigned HOST_WIDE_INT
1029 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
,
1030 unsigned int max_size
)
1032 unsigned HOST_WIDE_INT n_insns
= 0;
1033 enum machine_mode tmode
;
1035 tmode
= mode_for_size (MOVE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
1036 if (align
>= GET_MODE_ALIGNMENT (tmode
))
1037 align
= GET_MODE_ALIGNMENT (tmode
);
1040 enum machine_mode tmode
, xmode
;
1042 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
1044 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
1045 if (GET_MODE_SIZE (tmode
) > MOVE_MAX_PIECES
1046 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
1049 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
1052 while (max_size
> 1)
1054 enum machine_mode mode
= VOIDmode
;
1055 enum insn_code icode
;
1057 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1058 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1059 if (GET_MODE_SIZE (tmode
) < max_size
)
1062 if (mode
== VOIDmode
)
1065 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1066 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1067 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1069 max_size
= GET_MODE_SIZE (mode
);
1076 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1077 with move instructions for mode MODE. GENFUN is the gen_... function
1078 to make a move insn for that mode. DATA has all the other info. */
1081 move_by_pieces_1 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
1082 struct move_by_pieces
*data
)
1084 unsigned int size
= GET_MODE_SIZE (mode
);
1085 rtx to1
= NULL_RTX
, from1
;
1087 while (data
->len
>= size
)
1090 data
->offset
-= size
;
1094 if (data
->autinc_to
)
1095 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1098 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1101 if (data
->autinc_from
)
1102 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1105 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1107 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1108 emit_insn (gen_add2_insn (data
->to_addr
,
1109 GEN_INT (-(HOST_WIDE_INT
)size
)));
1110 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1111 emit_insn (gen_add2_insn (data
->from_addr
,
1112 GEN_INT (-(HOST_WIDE_INT
)size
)));
1115 emit_insn ((*genfun
) (to1
, from1
));
1118 #ifdef PUSH_ROUNDING
1119 emit_single_push_insn (mode
, from1
, NULL
);
1125 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1126 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1127 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1128 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1130 if (! data
->reverse
)
1131 data
->offset
+= size
;
1137 /* Emit code to move a block Y to a block X. This may be done with
1138 string-move instructions, with multiple scalar move instructions,
1139 or with a library call.
1141 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1142 SIZE is an rtx that says how long they are.
1143 ALIGN is the maximum alignment we can assume they have.
1144 METHOD describes what kind of copy this is, and what mechanisms may be used.
1146 Return the address of the new block, if memcpy is called and returns it,
1150 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1158 case BLOCK_OP_NORMAL
:
1159 case BLOCK_OP_TAILCALL
:
1160 may_use_call
= true;
1163 case BLOCK_OP_CALL_PARM
:
1164 may_use_call
= block_move_libcall_safe_for_call_parm ();
1166 /* Make inhibit_defer_pop nonzero around the library call
1167 to force it to pop the arguments right away. */
1171 case BLOCK_OP_NO_LIBCALL
:
1172 may_use_call
= false;
1179 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1181 gcc_assert (MEM_P (x
));
1182 gcc_assert (MEM_P (y
));
1185 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1186 block copy is more efficient for other large modes, e.g. DCmode. */
1187 x
= adjust_address (x
, BLKmode
, 0);
1188 y
= adjust_address (y
, BLKmode
, 0);
1190 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1191 can be incorrect is coming from __builtin_memcpy. */
1192 if (GET_CODE (size
) == CONST_INT
)
1194 if (INTVAL (size
) == 0)
1197 x
= shallow_copy_rtx (x
);
1198 y
= shallow_copy_rtx (y
);
1199 set_mem_size (x
, size
);
1200 set_mem_size (y
, size
);
1203 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1204 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1205 else if (emit_block_move_via_movmem (x
, y
, size
, align
))
1207 else if (may_use_call
)
1208 retval
= emit_block_move_via_libcall (x
, y
, size
,
1209 method
== BLOCK_OP_TAILCALL
);
1211 emit_block_move_via_loop (x
, y
, size
, align
);
1213 if (method
== BLOCK_OP_CALL_PARM
)
1219 /* A subroutine of emit_block_move. Returns true if calling the
1220 block move libcall will not clobber any parameters which may have
1221 already been placed on the stack. */
1224 block_move_libcall_safe_for_call_parm (void)
1226 /* If arguments are pushed on the stack, then they're safe. */
1230 /* If registers go on the stack anyway, any argument is sure to clobber
1231 an outgoing argument. */
1232 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1234 tree fn
= emit_block_move_libcall_fn (false);
1236 if (REG_PARM_STACK_SPACE (fn
) != 0)
1241 /* If any argument goes in memory, then it might clobber an outgoing
1244 CUMULATIVE_ARGS args_so_far
;
1247 fn
= emit_block_move_libcall_fn (false);
1248 INIT_CUMULATIVE_ARGS (args_so_far
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1250 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1251 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1253 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1254 rtx tmp
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
1255 if (!tmp
|| !REG_P (tmp
))
1257 if (targetm
.calls
.arg_partial_bytes (&args_so_far
, mode
, NULL
, 1))
1259 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, NULL_TREE
, 1);
1265 /* A subroutine of emit_block_move. Expand a movmem pattern;
1266 return true if successful. */
1269 emit_block_move_via_movmem (rtx x
, rtx y
, rtx size
, unsigned int align
)
1271 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1272 int save_volatile_ok
= volatile_ok
;
1273 enum machine_mode mode
;
1275 /* Since this is a move insn, we don't care about volatility. */
1278 /* Try the most limited insn first, because there's no point
1279 including more than one in the machine description unless
1280 the more limited one has some advantage. */
1282 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1283 mode
= GET_MODE_WIDER_MODE (mode
))
1285 enum insn_code code
= movmem_optab
[(int) mode
];
1286 insn_operand_predicate_fn pred
;
1288 if (code
!= CODE_FOR_nothing
1289 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1290 here because if SIZE is less than the mode mask, as it is
1291 returned by the macro, it will definitely be less than the
1292 actual mode mask. */
1293 && ((GET_CODE (size
) == CONST_INT
1294 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1295 <= (GET_MODE_MASK (mode
) >> 1)))
1296 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1297 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1298 || (*pred
) (x
, BLKmode
))
1299 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1300 || (*pred
) (y
, BLKmode
))
1301 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1302 || (*pred
) (opalign
, VOIDmode
)))
1305 rtx last
= get_last_insn ();
1308 op2
= convert_to_mode (mode
, size
, 1);
1309 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1310 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1311 op2
= copy_to_mode_reg (mode
, op2
);
1313 /* ??? When called via emit_block_move_for_call, it'd be
1314 nice if there were some way to inform the backend, so
1315 that it doesn't fail the expansion because it thinks
1316 emitting the libcall would be more efficient. */
1318 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1322 volatile_ok
= save_volatile_ok
;
1326 delete_insns_since (last
);
1330 volatile_ok
= save_volatile_ok
;
1334 /* A subroutine of emit_block_move. Expand a call to memcpy.
1335 Return the return value from memcpy, 0 otherwise. */
1338 emit_block_move_via_libcall (rtx dst
, rtx src
, rtx size
, bool tailcall
)
1340 rtx dst_addr
, src_addr
;
1341 tree call_expr
, arg_list
, fn
, src_tree
, dst_tree
, size_tree
;
1342 enum machine_mode size_mode
;
1345 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1346 pseudos. We can then place those new pseudos into a VAR_DECL and
1349 dst_addr
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
1350 src_addr
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
1352 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1353 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1355 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1356 src_tree
= make_tree (ptr_type_node
, src_addr
);
1358 size_mode
= TYPE_MODE (sizetype
);
1360 size
= convert_to_mode (size_mode
, size
, 1);
1361 size
= copy_to_mode_reg (size_mode
, size
);
1363 /* It is incorrect to use the libcall calling conventions to call
1364 memcpy in this context. This could be a user call to memcpy and
1365 the user may wish to examine the return value from memcpy. For
1366 targets where libcalls and normal calls have different conventions
1367 for returning pointers, we could end up generating incorrect code. */
1369 size_tree
= make_tree (sizetype
, size
);
1371 fn
= emit_block_move_libcall_fn (true);
1372 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
1373 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1374 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1376 /* Now we have to build up the CALL_EXPR itself. */
1377 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1378 call_expr
= build3 (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1379 call_expr
, arg_list
, NULL_TREE
);
1380 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
1382 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
1387 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1388 for the function we use for block copies. The first time FOR_CALL
1389 is true, we call assemble_external. */
1391 static GTY(()) tree block_move_fn
;
1394 init_block_move_fn (const char *asmspec
)
1400 fn
= get_identifier ("memcpy");
1401 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
1402 const_ptr_type_node
, sizetype
,
1405 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
1406 DECL_EXTERNAL (fn
) = 1;
1407 TREE_PUBLIC (fn
) = 1;
1408 DECL_ARTIFICIAL (fn
) = 1;
1409 TREE_NOTHROW (fn
) = 1;
1415 set_user_assembler_name (block_move_fn
, asmspec
);
1419 emit_block_move_libcall_fn (int for_call
)
1421 static bool emitted_extern
;
1424 init_block_move_fn (NULL
);
1426 if (for_call
&& !emitted_extern
)
1428 emitted_extern
= true;
1429 make_decl_rtl (block_move_fn
);
1430 assemble_external (block_move_fn
);
1433 return block_move_fn
;
1436 /* A subroutine of emit_block_move. Copy the data via an explicit
1437 loop. This is used only when libcalls are forbidden. */
1438 /* ??? It'd be nice to copy in hunks larger than QImode. */
1441 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1442 unsigned int align ATTRIBUTE_UNUSED
)
1444 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
1445 enum machine_mode iter_mode
;
1447 iter_mode
= GET_MODE (size
);
1448 if (iter_mode
== VOIDmode
)
1449 iter_mode
= word_mode
;
1451 top_label
= gen_label_rtx ();
1452 cmp_label
= gen_label_rtx ();
1453 iter
= gen_reg_rtx (iter_mode
);
1455 emit_move_insn (iter
, const0_rtx
);
1457 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1458 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1459 do_pending_stack_adjust ();
1461 emit_jump (cmp_label
);
1462 emit_label (top_label
);
1464 tmp
= convert_modes (Pmode
, iter_mode
, iter
, true);
1465 x_addr
= gen_rtx_PLUS (Pmode
, x_addr
, tmp
);
1466 y_addr
= gen_rtx_PLUS (Pmode
, y_addr
, tmp
);
1467 x
= change_address (x
, QImode
, x_addr
);
1468 y
= change_address (y
, QImode
, y_addr
);
1470 emit_move_insn (x
, y
);
1472 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1473 true, OPTAB_LIB_WIDEN
);
1475 emit_move_insn (iter
, tmp
);
1477 emit_label (cmp_label
);
1479 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1483 /* Copy all or part of a value X into registers starting at REGNO.
1484 The number of registers to be filled is NREGS. */
1487 move_block_to_reg (int regno
, rtx x
, int nregs
, enum machine_mode mode
)
1490 #ifdef HAVE_load_multiple
1498 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1499 x
= validize_mem (force_const_mem (mode
, x
));
1501 /* See if the machine can do this with a load multiple insn. */
1502 #ifdef HAVE_load_multiple
1503 if (HAVE_load_multiple
)
1505 last
= get_last_insn ();
1506 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1514 delete_insns_since (last
);
1518 for (i
= 0; i
< nregs
; i
++)
1519 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1520 operand_subword_force (x
, i
, mode
));
1523 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1524 The number of registers to be filled is NREGS. */
1527 move_block_from_reg (int regno
, rtx x
, int nregs
)
1534 /* See if the machine can do this with a store multiple insn. */
1535 #ifdef HAVE_store_multiple
1536 if (HAVE_store_multiple
)
1538 rtx last
= get_last_insn ();
1539 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1547 delete_insns_since (last
);
1551 for (i
= 0; i
< nregs
; i
++)
1553 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1557 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1561 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1562 ORIG, where ORIG is a non-consecutive group of registers represented by
1563 a PARALLEL. The clone is identical to the original except in that the
1564 original set of registers is replaced by a new set of pseudo registers.
1565 The new set has the same modes as the original set. */
1568 gen_group_rtx (rtx orig
)
1573 gcc_assert (GET_CODE (orig
) == PARALLEL
);
1575 length
= XVECLEN (orig
, 0);
1576 tmps
= alloca (sizeof (rtx
) * length
);
1578 /* Skip a NULL entry in first slot. */
1579 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
1584 for (; i
< length
; i
++)
1586 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
1587 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
1589 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
1592 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
1595 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1596 except that values are placed in TMPS[i], and must later be moved
1597 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1600 emit_group_load_1 (rtx
*tmps
, rtx dst
, rtx orig_src
, tree type
, int ssize
)
1604 enum machine_mode m
= GET_MODE (orig_src
);
1606 gcc_assert (GET_CODE (dst
) == PARALLEL
);
1609 && !SCALAR_INT_MODE_P (m
)
1610 && !MEM_P (orig_src
)
1611 && GET_CODE (orig_src
) != CONCAT
)
1613 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_src
));
1614 if (imode
== BLKmode
)
1615 src
= assign_stack_temp (GET_MODE (orig_src
), ssize
, 0);
1617 src
= gen_reg_rtx (imode
);
1618 if (imode
!= BLKmode
)
1619 src
= gen_lowpart (GET_MODE (orig_src
), src
);
1620 emit_move_insn (src
, orig_src
);
1621 /* ...and back again. */
1622 if (imode
!= BLKmode
)
1623 src
= gen_lowpart (imode
, src
);
1624 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1628 /* Check for a NULL entry, used to indicate that the parameter goes
1629 both on the stack and in registers. */
1630 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1635 /* Process the pieces. */
1636 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1638 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1639 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1640 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1643 /* Handle trailing fragments that run over the size of the struct. */
1644 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1646 /* Arrange to shift the fragment to where it belongs.
1647 extract_bit_field loads to the lsb of the reg. */
1649 #ifdef BLOCK_REG_PADDING
1650 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
1651 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1656 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1657 bytelen
= ssize
- bytepos
;
1658 gcc_assert (bytelen
> 0);
1661 /* If we won't be loading directly from memory, protect the real source
1662 from strange tricks we might play; but make sure that the source can
1663 be loaded directly into the destination. */
1665 if (!MEM_P (orig_src
)
1666 && (!CONSTANT_P (orig_src
)
1667 || (GET_MODE (orig_src
) != mode
1668 && GET_MODE (orig_src
) != VOIDmode
)))
1670 if (GET_MODE (orig_src
) == VOIDmode
)
1671 src
= gen_reg_rtx (mode
);
1673 src
= gen_reg_rtx (GET_MODE (orig_src
));
1675 emit_move_insn (src
, orig_src
);
1678 /* Optimize the access just a bit. */
1680 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
1681 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
1682 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1683 && bytelen
== GET_MODE_SIZE (mode
))
1685 tmps
[i
] = gen_reg_rtx (mode
);
1686 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
1688 else if (COMPLEX_MODE_P (mode
)
1689 && GET_MODE (src
) == mode
1690 && bytelen
== GET_MODE_SIZE (mode
))
1691 /* Let emit_move_complex do the bulk of the work. */
1693 else if (GET_CODE (src
) == CONCAT
)
1695 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
1696 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
1698 if ((bytepos
== 0 && bytelen
== slen0
)
1699 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
1701 /* The following assumes that the concatenated objects all
1702 have the same size. In this case, a simple calculation
1703 can be used to determine the object and the bit field
1705 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
1706 if (! CONSTANT_P (tmps
[i
])
1707 && (!REG_P (tmps
[i
]) || GET_MODE (tmps
[i
]) != mode
))
1708 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
1709 (bytepos
% slen0
) * BITS_PER_UNIT
,
1710 1, NULL_RTX
, mode
, mode
);
1716 gcc_assert (!bytepos
);
1717 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1718 emit_move_insn (mem
, src
);
1719 tmps
[i
] = extract_bit_field (mem
, bytelen
* BITS_PER_UNIT
,
1720 0, 1, NULL_RTX
, mode
, mode
);
1723 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1724 SIMD register, which is currently broken. While we get GCC
1725 to emit proper RTL for these cases, let's dump to memory. */
1726 else if (VECTOR_MODE_P (GET_MODE (dst
))
1729 int slen
= GET_MODE_SIZE (GET_MODE (src
));
1732 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1733 emit_move_insn (mem
, src
);
1734 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
1736 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
1737 && XVECLEN (dst
, 0) > 1)
1738 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE(dst
), bytepos
);
1739 else if (CONSTANT_P (src
)
1740 || (REG_P (src
) && GET_MODE (src
) == mode
))
1743 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
1744 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
1748 tmps
[i
] = expand_shift (LSHIFT_EXPR
, mode
, tmps
[i
],
1749 build_int_cst (NULL_TREE
, shift
), tmps
[i
], 0);
1753 /* Emit code to move a block SRC of type TYPE to a block DST,
1754 where DST is non-consecutive registers represented by a PARALLEL.
1755 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1759 emit_group_load (rtx dst
, rtx src
, tree type
, int ssize
)
1764 tmps
= alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
1765 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1767 /* Copy the extracted pieces into the proper (probable) hard regs. */
1768 for (i
= 0; i
< XVECLEN (dst
, 0); i
++)
1770 rtx d
= XEXP (XVECEXP (dst
, 0, i
), 0);
1773 emit_move_insn (d
, tmps
[i
]);
1777 /* Similar, but load SRC into new pseudos in a format that looks like
1778 PARALLEL. This can later be fed to emit_group_move to get things
1779 in the right place. */
1782 emit_group_load_into_temps (rtx parallel
, rtx src
, tree type
, int ssize
)
1787 vec
= rtvec_alloc (XVECLEN (parallel
, 0));
1788 emit_group_load_1 (&RTVEC_ELT (vec
, 0), parallel
, src
, type
, ssize
);
1790 /* Convert the vector to look just like the original PARALLEL, except
1791 with the computed values. */
1792 for (i
= 0; i
< XVECLEN (parallel
, 0); i
++)
1794 rtx e
= XVECEXP (parallel
, 0, i
);
1795 rtx d
= XEXP (e
, 0);
1799 d
= force_reg (GET_MODE (d
), RTVEC_ELT (vec
, i
));
1800 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), d
, XEXP (e
, 1));
1802 RTVEC_ELT (vec
, i
) = e
;
1805 return gen_rtx_PARALLEL (GET_MODE (parallel
), vec
);
1808 /* Emit code to move a block SRC to block DST, where SRC and DST are
1809 non-consecutive groups of registers, each represented by a PARALLEL. */
1812 emit_group_move (rtx dst
, rtx src
)
1816 gcc_assert (GET_CODE (src
) == PARALLEL
1817 && GET_CODE (dst
) == PARALLEL
1818 && XVECLEN (src
, 0) == XVECLEN (dst
, 0));
1820 /* Skip first entry if NULL. */
1821 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
1822 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
1823 XEXP (XVECEXP (src
, 0, i
), 0));
1826 /* Move a group of registers represented by a PARALLEL into pseudos. */
1829 emit_group_move_into_temps (rtx src
)
1831 rtvec vec
= rtvec_alloc (XVECLEN (src
, 0));
1834 for (i
= 0; i
< XVECLEN (src
, 0); i
++)
1836 rtx e
= XVECEXP (src
, 0, i
);
1837 rtx d
= XEXP (e
, 0);
1840 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), copy_to_reg (d
), XEXP (e
, 1));
1841 RTVEC_ELT (vec
, i
) = e
;
1844 return gen_rtx_PARALLEL (GET_MODE (src
), vec
);
1847 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1848 where SRC is non-consecutive registers represented by a PARALLEL.
1849 SSIZE represents the total size of block ORIG_DST, or -1 if not
1853 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1857 enum machine_mode m
= GET_MODE (orig_dst
);
1859 gcc_assert (GET_CODE (src
) == PARALLEL
);
1861 if (!SCALAR_INT_MODE_P (m
)
1862 && !MEM_P (orig_dst
) && GET_CODE (orig_dst
) != CONCAT
)
1864 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_dst
));
1865 if (imode
== BLKmode
)
1866 dst
= assign_stack_temp (GET_MODE (orig_dst
), ssize
, 0);
1868 dst
= gen_reg_rtx (imode
);
1869 emit_group_store (dst
, src
, type
, ssize
);
1870 if (imode
!= BLKmode
)
1871 dst
= gen_lowpart (GET_MODE (orig_dst
), dst
);
1872 emit_move_insn (orig_dst
, dst
);
1876 /* Check for a NULL entry, used to indicate that the parameter goes
1877 both on the stack and in registers. */
1878 if (XEXP (XVECEXP (src
, 0, 0), 0))
1883 tmps
= alloca (sizeof (rtx
) * XVECLEN (src
, 0));
1885 /* Copy the (probable) hard regs into pseudos. */
1886 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
1888 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
1889 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
1890 emit_move_insn (tmps
[i
], reg
);
1893 /* If we won't be storing directly into memory, protect the real destination
1894 from strange tricks we might play. */
1896 if (GET_CODE (dst
) == PARALLEL
)
1900 /* We can get a PARALLEL dst if there is a conditional expression in
1901 a return statement. In that case, the dst and src are the same,
1902 so no action is necessary. */
1903 if (rtx_equal_p (dst
, src
))
1906 /* It is unclear if we can ever reach here, but we may as well handle
1907 it. Allocate a temporary, and split this into a store/load to/from
1910 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
1911 emit_group_store (temp
, src
, type
, ssize
);
1912 emit_group_load (dst
, temp
, type
, ssize
);
1915 else if (!MEM_P (dst
) && GET_CODE (dst
) != CONCAT
)
1917 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
1918 /* Make life a bit easier for combine. */
1919 emit_move_insn (dst
, CONST0_RTX (GET_MODE (orig_dst
)));
1922 /* Process the pieces. */
1923 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
1925 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
1926 enum machine_mode mode
= GET_MODE (tmps
[i
]);
1927 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1930 /* Handle trailing fragments that run over the size of the struct. */
1931 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1933 /* store_bit_field always takes its value from the lsb.
1934 Move the fragment to the lsb if it's not already there. */
1936 #ifdef BLOCK_REG_PADDING
1937 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
1938 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1944 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1945 tmps
[i
] = expand_shift (RSHIFT_EXPR
, mode
, tmps
[i
],
1946 build_int_cst (NULL_TREE
, shift
),
1949 bytelen
= ssize
- bytepos
;
1952 if (GET_CODE (dst
) == CONCAT
)
1954 if (bytepos
+ bytelen
<= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
1955 dest
= XEXP (dst
, 0);
1956 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
1958 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
1959 dest
= XEXP (dst
, 1);
1963 gcc_assert (bytepos
== 0 && XVECLEN (src
, 0));
1964 dest
= assign_stack_temp (GET_MODE (dest
),
1965 GET_MODE_SIZE (GET_MODE (dest
)), 0);
1966 emit_move_insn (adjust_address (dest
, GET_MODE (tmps
[i
]), bytepos
),
1973 /* Optimize the access just a bit. */
1975 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
1976 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
1977 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1978 && bytelen
== GET_MODE_SIZE (mode
))
1979 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
1981 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
1985 /* Copy from the pseudo into the (probable) hard reg. */
1986 if (orig_dst
!= dst
)
1987 emit_move_insn (orig_dst
, dst
);
1990 /* Generate code to copy a BLKmode object of TYPE out of a
1991 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1992 is null, a stack temporary is created. TGTBLK is returned.
1994 The purpose of this routine is to handle functions that return
1995 BLKmode structures in registers. Some machines (the PA for example)
1996 want to return all small structures in registers regardless of the
1997 structure's alignment. */
2000 copy_blkmode_from_reg (rtx tgtblk
, rtx srcreg
, tree type
)
2002 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2003 rtx src
= NULL
, dst
= NULL
;
2004 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2005 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2009 tgtblk
= assign_temp (build_qualified_type (type
,
2011 | TYPE_QUAL_CONST
)),
2013 preserve_temp_slots (tgtblk
);
2016 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2017 into a new pseudo which is a full word. */
2019 if (GET_MODE (srcreg
) != BLKmode
2020 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2021 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
2023 /* If the structure doesn't take up a whole number of words, see whether
2024 SRCREG is padded on the left or on the right. If it's on the left,
2025 set PADDING_CORRECTION to the number of bits to skip.
2027 In most ABIs, the structure will be returned at the least end of
2028 the register, which translates to right padding on little-endian
2029 targets and left padding on big-endian targets. The opposite
2030 holds if the structure is returned at the most significant
2031 end of the register. */
2032 if (bytes
% UNITS_PER_WORD
!= 0
2033 && (targetm
.calls
.return_in_msb (type
)
2035 : BYTES_BIG_ENDIAN
))
2037 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2039 /* Copy the structure BITSIZE bites at a time.
2041 We could probably emit more efficient code for machines which do not use
2042 strict alignment, but it doesn't seem worth the effort at the current
2044 for (bitpos
= 0, xbitpos
= padding_correction
;
2045 bitpos
< bytes
* BITS_PER_UNIT
;
2046 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2048 /* We need a new source operand each time xbitpos is on a
2049 word boundary and when xbitpos == padding_correction
2050 (the first time through). */
2051 if (xbitpos
% BITS_PER_WORD
== 0
2052 || xbitpos
== padding_correction
)
2053 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2056 /* We need a new destination operand each time bitpos is on
2058 if (bitpos
% BITS_PER_WORD
== 0)
2059 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2061 /* Use xbitpos for the source extraction (right justified) and
2062 xbitpos for the destination store (left justified). */
2063 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2064 extract_bit_field (src
, bitsize
,
2065 xbitpos
% BITS_PER_WORD
, 1,
2066 NULL_RTX
, word_mode
, word_mode
));
2072 /* Add a USE expression for REG to the (possibly empty) list pointed
2073 to by CALL_FUSAGE. REG must denote a hard register. */
2076 use_reg (rtx
*call_fusage
, rtx reg
)
2078 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2081 = gen_rtx_EXPR_LIST (VOIDmode
,
2082 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2085 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2086 starting at REGNO. All of these registers must be hard registers. */
2089 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2093 gcc_assert (regno
+ nregs
<= FIRST_PSEUDO_REGISTER
);
2095 for (i
= 0; i
< nregs
; i
++)
2096 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2099 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2100 PARALLEL REGS. This is for calls that pass values in multiple
2101 non-contiguous locations. The Irix 6 ABI has examples of this. */
2104 use_group_regs (rtx
*call_fusage
, rtx regs
)
2108 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2110 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2112 /* A NULL entry means the parameter goes both on the stack and in
2113 registers. This can also be a MEM for targets that pass values
2114 partially on the stack and partially in registers. */
2115 if (reg
!= 0 && REG_P (reg
))
2116 use_reg (call_fusage
, reg
);
2121 /* Determine whether the LEN bytes generated by CONSTFUN can be
2122 stored to memory using several move instructions. CONSTFUNDATA is
2123 a pointer which will be passed as argument in every CONSTFUN call.
2124 ALIGN is maximum alignment we can assume. Return nonzero if a
2125 call to store_by_pieces should succeed. */
2128 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
2129 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2130 void *constfundata
, unsigned int align
)
2132 unsigned HOST_WIDE_INT l
;
2133 unsigned int max_size
;
2134 HOST_WIDE_INT offset
= 0;
2135 enum machine_mode mode
, tmode
;
2136 enum insn_code icode
;
2143 if (! STORE_BY_PIECES_P (len
, align
))
2146 tmode
= mode_for_size (STORE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
2147 if (align
>= GET_MODE_ALIGNMENT (tmode
))
2148 align
= GET_MODE_ALIGNMENT (tmode
);
2151 enum machine_mode xmode
;
2153 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
2155 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
2156 if (GET_MODE_SIZE (tmode
) > STORE_MAX_PIECES
2157 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
2160 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
2163 /* We would first store what we can in the largest integer mode, then go to
2164 successively smaller modes. */
2167 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2172 max_size
= STORE_MAX_PIECES
+ 1;
2173 while (max_size
> 1)
2175 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2176 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2177 if (GET_MODE_SIZE (tmode
) < max_size
)
2180 if (mode
== VOIDmode
)
2183 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2184 if (icode
!= CODE_FOR_nothing
2185 && align
>= GET_MODE_ALIGNMENT (mode
))
2187 unsigned int size
= GET_MODE_SIZE (mode
);
2194 cst
= (*constfun
) (constfundata
, offset
, mode
);
2195 if (!LEGITIMATE_CONSTANT_P (cst
))
2205 max_size
= GET_MODE_SIZE (mode
);
2208 /* The code above should have handled everything. */
2215 /* Generate several move instructions to store LEN bytes generated by
2216 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2217 pointer which will be passed as argument in every CONSTFUN call.
2218 ALIGN is maximum alignment we can assume.
2219 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2220 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2224 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
2225 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2226 void *constfundata
, unsigned int align
, int endp
)
2228 struct store_by_pieces data
;
2232 gcc_assert (endp
!= 2);
2236 gcc_assert (STORE_BY_PIECES_P (len
, align
));
2237 data
.constfun
= constfun
;
2238 data
.constfundata
= constfundata
;
2241 store_by_pieces_1 (&data
, align
);
2246 gcc_assert (!data
.reverse
);
2251 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2252 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2254 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
2257 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2264 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2272 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2273 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2276 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
2278 struct store_by_pieces data
;
2283 data
.constfun
= clear_by_pieces_1
;
2284 data
.constfundata
= NULL
;
2287 store_by_pieces_1 (&data
, align
);
2290 /* Callback routine for clear_by_pieces.
2291 Return const0_rtx unconditionally. */
2294 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED
,
2295 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2296 enum machine_mode mode ATTRIBUTE_UNUSED
)
2301 /* Subroutine of clear_by_pieces and store_by_pieces.
2302 Generate several move instructions to store LEN bytes of block TO. (A MEM
2303 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2306 store_by_pieces_1 (struct store_by_pieces
*data ATTRIBUTE_UNUSED
,
2307 unsigned int align ATTRIBUTE_UNUSED
)
2309 rtx to_addr
= XEXP (data
->to
, 0);
2310 unsigned int max_size
= STORE_MAX_PIECES
+ 1;
2311 enum machine_mode mode
= VOIDmode
, tmode
;
2312 enum insn_code icode
;
2315 data
->to_addr
= to_addr
;
2317 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2318 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2320 data
->explicit_inc_to
= 0;
2322 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2324 data
->offset
= data
->len
;
2326 /* If storing requires more than two move insns,
2327 copy addresses to registers (to make displacements shorter)
2328 and use post-increment if available. */
2329 if (!data
->autinc_to
2330 && move_by_pieces_ninsns (data
->len
, align
, max_size
) > 2)
2332 /* Determine the main mode we'll be using. */
2333 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2334 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2335 if (GET_MODE_SIZE (tmode
) < max_size
)
2338 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2340 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2341 data
->autinc_to
= 1;
2342 data
->explicit_inc_to
= -1;
2345 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2346 && ! data
->autinc_to
)
2348 data
->to_addr
= copy_addr_to_reg (to_addr
);
2349 data
->autinc_to
= 1;
2350 data
->explicit_inc_to
= 1;
2353 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2354 data
->to_addr
= copy_addr_to_reg (to_addr
);
2357 tmode
= mode_for_size (STORE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
2358 if (align
>= GET_MODE_ALIGNMENT (tmode
))
2359 align
= GET_MODE_ALIGNMENT (tmode
);
2362 enum machine_mode xmode
;
2364 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
2366 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
2367 if (GET_MODE_SIZE (tmode
) > STORE_MAX_PIECES
2368 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
2371 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
2374 /* First store what we can in the largest integer mode, then go to
2375 successively smaller modes. */
2377 while (max_size
> 1)
2379 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2380 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2381 if (GET_MODE_SIZE (tmode
) < max_size
)
2384 if (mode
== VOIDmode
)
2387 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2388 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2389 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2391 max_size
= GET_MODE_SIZE (mode
);
2394 /* The code above should have handled everything. */
2395 gcc_assert (!data
->len
);
2398 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2399 with move instructions for mode MODE. GENFUN is the gen_... function
2400 to make a move insn for that mode. DATA has all the other info. */
2403 store_by_pieces_2 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
2404 struct store_by_pieces
*data
)
2406 unsigned int size
= GET_MODE_SIZE (mode
);
2409 while (data
->len
>= size
)
2412 data
->offset
-= size
;
2414 if (data
->autinc_to
)
2415 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2418 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2420 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2421 emit_insn (gen_add2_insn (data
->to_addr
,
2422 GEN_INT (-(HOST_WIDE_INT
) size
)));
2424 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2425 emit_insn ((*genfun
) (to1
, cst
));
2427 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2428 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2430 if (! data
->reverse
)
2431 data
->offset
+= size
;
2437 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2438 its length in bytes. */
2441 clear_storage (rtx object
, rtx size
, enum block_op_methods method
)
2443 enum machine_mode mode
= GET_MODE (object
);
2446 gcc_assert (method
== BLOCK_OP_NORMAL
|| method
== BLOCK_OP_TAILCALL
);
2448 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2449 just move a zero. Otherwise, do this a piece at a time. */
2451 && GET_CODE (size
) == CONST_INT
2452 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (mode
))
2454 rtx zero
= CONST0_RTX (mode
);
2457 emit_move_insn (object
, zero
);
2461 if (COMPLEX_MODE_P (mode
))
2463 zero
= CONST0_RTX (GET_MODE_INNER (mode
));
2466 write_complex_part (object
, zero
, 0);
2467 write_complex_part (object
, zero
, 1);
2473 if (size
== const0_rtx
)
2476 align
= MEM_ALIGN (object
);
2478 if (GET_CODE (size
) == CONST_INT
2479 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2480 clear_by_pieces (object
, INTVAL (size
), align
);
2481 else if (clear_storage_via_clrmem (object
, size
, align
))
2484 return clear_storage_via_libcall (object
, size
,
2485 method
== BLOCK_OP_TAILCALL
);
2490 /* A subroutine of clear_storage. Expand a clrmem pattern;
2491 return true if successful. */
2494 clear_storage_via_clrmem (rtx object
, rtx size
, unsigned int align
)
2496 /* Try the most limited insn first, because there's no point
2497 including more than one in the machine description unless
2498 the more limited one has some advantage. */
2500 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2501 enum machine_mode mode
;
2503 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2504 mode
= GET_MODE_WIDER_MODE (mode
))
2506 enum insn_code code
= clrmem_optab
[(int) mode
];
2507 insn_operand_predicate_fn pred
;
2509 if (code
!= CODE_FOR_nothing
2510 /* We don't need MODE to be narrower than
2511 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2512 the mode mask, as it is returned by the macro, it will
2513 definitely be less than the actual mode mask. */
2514 && ((GET_CODE (size
) == CONST_INT
2515 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2516 <= (GET_MODE_MASK (mode
) >> 1)))
2517 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2518 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2519 || (*pred
) (object
, BLKmode
))
2520 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
2521 || (*pred
) (opalign
, VOIDmode
)))
2524 rtx last
= get_last_insn ();
2527 op1
= convert_to_mode (mode
, size
, 1);
2528 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2529 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
2530 op1
= copy_to_mode_reg (mode
, op1
);
2532 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2539 delete_insns_since (last
);
2546 /* A subroutine of clear_storage. Expand a call to memset.
2547 Return the return value of memset, 0 otherwise. */
2550 clear_storage_via_libcall (rtx object
, rtx size
, bool tailcall
)
2552 tree call_expr
, arg_list
, fn
, object_tree
, size_tree
;
2553 enum machine_mode size_mode
;
2556 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2557 place those into new pseudos into a VAR_DECL and use them later. */
2559 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2561 size_mode
= TYPE_MODE (sizetype
);
2562 size
= convert_to_mode (size_mode
, size
, 1);
2563 size
= copy_to_mode_reg (size_mode
, size
);
2565 /* It is incorrect to use the libcall calling conventions to call
2566 memset in this context. This could be a user call to memset and
2567 the user may wish to examine the return value from memset. For
2568 targets where libcalls and normal calls have different conventions
2569 for returning pointers, we could end up generating incorrect code. */
2571 object_tree
= make_tree (ptr_type_node
, object
);
2572 size_tree
= make_tree (sizetype
, size
);
2574 fn
= clear_storage_libcall_fn (true);
2575 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
2576 arg_list
= tree_cons (NULL_TREE
, integer_zero_node
, arg_list
);
2577 arg_list
= tree_cons (NULL_TREE
, object_tree
, arg_list
);
2579 /* Now we have to build up the CALL_EXPR itself. */
2580 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2581 call_expr
= build3 (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2582 call_expr
, arg_list
, NULL_TREE
);
2583 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
2585 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
2590 /* A subroutine of clear_storage_via_libcall. Create the tree node
2591 for the function we use for block clears. The first time FOR_CALL
2592 is true, we call assemble_external. */
2594 static GTY(()) tree block_clear_fn
;
2597 init_block_clear_fn (const char *asmspec
)
2599 if (!block_clear_fn
)
2603 fn
= get_identifier ("memset");
2604 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2605 integer_type_node
, sizetype
,
2608 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
2609 DECL_EXTERNAL (fn
) = 1;
2610 TREE_PUBLIC (fn
) = 1;
2611 DECL_ARTIFICIAL (fn
) = 1;
2612 TREE_NOTHROW (fn
) = 1;
2614 block_clear_fn
= fn
;
2618 set_user_assembler_name (block_clear_fn
, asmspec
);
2622 clear_storage_libcall_fn (int for_call
)
2624 static bool emitted_extern
;
2626 if (!block_clear_fn
)
2627 init_block_clear_fn (NULL
);
2629 if (for_call
&& !emitted_extern
)
2631 emitted_extern
= true;
2632 make_decl_rtl (block_clear_fn
);
2633 assemble_external (block_clear_fn
);
2636 return block_clear_fn
;
2639 /* Write to one of the components of the complex value CPLX. Write VAL to
2640 the real part if IMAG_P is false, and the imaginary part if its true. */
2643 write_complex_part (rtx cplx
, rtx val
, bool imag_p
)
2645 enum machine_mode cmode
;
2646 enum machine_mode imode
;
2649 if (GET_CODE (cplx
) == CONCAT
)
2651 emit_move_insn (XEXP (cplx
, imag_p
), val
);
2655 cmode
= GET_MODE (cplx
);
2656 imode
= GET_MODE_INNER (cmode
);
2657 ibitsize
= GET_MODE_BITSIZE (imode
);
2659 /* If the sub-object is at least word sized, then we know that subregging
2660 will work. This special case is important, since store_bit_field
2661 wants to operate on integer modes, and there's rarely an OImode to
2662 correspond to TCmode. */
2663 if (ibitsize
>= BITS_PER_WORD
2664 /* For hard regs we have exact predicates. Assume we can split
2665 the original object if it spans an even number of hard regs.
2666 This special case is important for SCmode on 64-bit platforms
2667 where the natural size of floating-point regs is 32-bit. */
2669 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
2670 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0)
2671 /* For MEMs we always try to make a "subreg", that is to adjust
2672 the MEM, because store_bit_field may generate overly
2673 convoluted RTL for sub-word fields. */
2676 rtx part
= simplify_gen_subreg (imode
, cplx
, cmode
,
2677 imag_p
? GET_MODE_SIZE (imode
) : 0);
2680 emit_move_insn (part
, val
);
2684 /* simplify_gen_subreg may fail for sub-word MEMs. */
2685 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
2688 store_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0, imode
, val
);
2691 /* Extract one of the components of the complex value CPLX. Extract the
2692 real part if IMAG_P is false, and the imaginary part if it's true. */
2695 read_complex_part (rtx cplx
, bool imag_p
)
2697 enum machine_mode cmode
, imode
;
2700 if (GET_CODE (cplx
) == CONCAT
)
2701 return XEXP (cplx
, imag_p
);
2703 cmode
= GET_MODE (cplx
);
2704 imode
= GET_MODE_INNER (cmode
);
2705 ibitsize
= GET_MODE_BITSIZE (imode
);
2707 /* Special case reads from complex constants that got spilled to memory. */
2708 if (MEM_P (cplx
) && GET_CODE (XEXP (cplx
, 0)) == SYMBOL_REF
)
2710 tree decl
= SYMBOL_REF_DECL (XEXP (cplx
, 0));
2711 if (decl
&& TREE_CODE (decl
) == COMPLEX_CST
)
2713 tree part
= imag_p
? TREE_IMAGPART (decl
) : TREE_REALPART (decl
);
2714 if (CONSTANT_CLASS_P (part
))
2715 return expand_expr (part
, NULL_RTX
, imode
, EXPAND_NORMAL
);
2719 /* If the sub-object is at least word sized, then we know that subregging
2720 will work. This special case is important, since extract_bit_field
2721 wants to operate on integer modes, and there's rarely an OImode to
2722 correspond to TCmode. */
2723 if (ibitsize
>= BITS_PER_WORD
2724 /* For hard regs we have exact predicates. Assume we can split
2725 the original object if it spans an even number of hard regs.
2726 This special case is important for SCmode on 64-bit platforms
2727 where the natural size of floating-point regs is 32-bit. */
2729 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
2730 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0)
2731 /* For MEMs we always try to make a "subreg", that is to adjust
2732 the MEM, because extract_bit_field may generate overly
2733 convoluted RTL for sub-word fields. */
2736 rtx ret
= simplify_gen_subreg (imode
, cplx
, cmode
,
2737 imag_p
? GET_MODE_SIZE (imode
) : 0);
2741 /* simplify_gen_subreg may fail for sub-word MEMs. */
2742 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
2745 return extract_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0,
2746 true, NULL_RTX
, imode
, imode
);
2749 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2750 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2751 represented in NEW_MODE. If FORCE is true, this will never happen, as
2752 we'll force-create a SUBREG if needed. */
2755 emit_move_change_mode (enum machine_mode new_mode
,
2756 enum machine_mode old_mode
, rtx x
, bool force
)
2760 if (reload_in_progress
&& MEM_P (x
))
2762 /* We can't use gen_lowpart here because it may call change_address
2763 which is not appropriate if we were called when a reload was in
2764 progress. We don't have to worry about changing the address since
2765 the size in bytes is supposed to be the same. Copy the MEM to
2766 change the mode and move any substitutions from the old MEM to
2769 ret
= adjust_address_nv (x
, new_mode
, 0);
2770 copy_replacements (x
, ret
);
2774 /* Note that we do want simplify_subreg's behavior of validating
2775 that the new mode is ok for a hard register. If we were to use
2776 simplify_gen_subreg, we would create the subreg, but would
2777 probably run into the target not being able to implement it. */
2778 /* Except, of course, when FORCE is true, when this is exactly what
2779 we want. Which is needed for CCmodes on some targets. */
2781 ret
= simplify_gen_subreg (new_mode
, x
, old_mode
, 0);
2783 ret
= simplify_subreg (new_mode
, x
, old_mode
, 0);
2789 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2790 an integer mode of the same size as MODE. Returns the instruction
2791 emitted, or NULL if such a move could not be generated. */
2794 emit_move_via_integer (enum machine_mode mode
, rtx x
, rtx y
)
2796 enum machine_mode imode
;
2797 enum insn_code code
;
2799 /* There must exist a mode of the exact size we require. */
2800 imode
= int_mode_for_mode (mode
);
2801 if (imode
== BLKmode
)
2804 /* The target must support moves in this mode. */
2805 code
= mov_optab
->handlers
[imode
].insn_code
;
2806 if (code
== CODE_FOR_nothing
)
2809 x
= emit_move_change_mode (imode
, mode
, x
, false);
2812 y
= emit_move_change_mode (imode
, mode
, y
, false);
2815 return emit_insn (GEN_FCN (code
) (x
, y
));
2818 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2819 Return an equivalent MEM that does not use an auto-increment. */
2822 emit_move_resolve_push (enum machine_mode mode
, rtx x
)
2824 enum rtx_code code
= GET_CODE (XEXP (x
, 0));
2825 HOST_WIDE_INT adjust
;
2828 adjust
= GET_MODE_SIZE (mode
);
2829 #ifdef PUSH_ROUNDING
2830 adjust
= PUSH_ROUNDING (adjust
);
2832 if (code
== PRE_DEC
|| code
== POST_DEC
)
2835 /* Do not use anti_adjust_stack, since we don't want to update
2836 stack_pointer_delta. */
2837 temp
= expand_simple_binop (Pmode
, PLUS
, stack_pointer_rtx
,
2838 GEN_INT (adjust
), stack_pointer_rtx
,
2839 0, OPTAB_LIB_WIDEN
);
2840 if (temp
!= stack_pointer_rtx
)
2841 emit_move_insn (stack_pointer_rtx
, temp
);
2847 temp
= stack_pointer_rtx
;
2850 temp
= plus_constant (stack_pointer_rtx
, -GET_MODE_SIZE (mode
));
2853 temp
= plus_constant (stack_pointer_rtx
, GET_MODE_SIZE (mode
));
2859 return replace_equiv_address (x
, temp
);
2862 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2863 X is known to satisfy push_operand, and MODE is known to be complex.
2864 Returns the last instruction emitted. */
2867 emit_move_complex_push (enum machine_mode mode
, rtx x
, rtx y
)
2869 enum machine_mode submode
= GET_MODE_INNER (mode
);
2872 #ifdef PUSH_ROUNDING
2873 unsigned int submodesize
= GET_MODE_SIZE (submode
);
2875 /* In case we output to the stack, but the size is smaller than the
2876 machine can push exactly, we need to use move instructions. */
2877 if (PUSH_ROUNDING (submodesize
) != submodesize
)
2879 x
= emit_move_resolve_push (mode
, x
);
2880 return emit_move_insn (x
, y
);
2884 /* Note that the real part always precedes the imag part in memory
2885 regardless of machine's endianness. */
2886 switch (GET_CODE (XEXP (x
, 0)))
2900 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2901 read_complex_part (y
, imag_first
));
2902 return emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2903 read_complex_part (y
, !imag_first
));
2906 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2907 MODE is known to be complex. Returns the last instruction emitted. */
2910 emit_move_complex (enum machine_mode mode
, rtx x
, rtx y
)
2914 /* Need to take special care for pushes, to maintain proper ordering
2915 of the data, and possibly extra padding. */
2916 if (push_operand (x
, mode
))
2917 return emit_move_complex_push (mode
, x
, y
);
2919 /* See if we can coerce the target into moving both values at once. */
2921 /* Move floating point as parts. */
2922 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
2923 && mov_optab
->handlers
[GET_MODE_INNER (mode
)].insn_code
!= CODE_FOR_nothing
)
2925 /* Not possible if the values are inherently not adjacent. */
2926 else if (GET_CODE (x
) == CONCAT
|| GET_CODE (y
) == CONCAT
)
2928 /* Is possible if both are registers (or subregs of registers). */
2929 else if (register_operand (x
, mode
) && register_operand (y
, mode
))
2931 /* If one of the operands is a memory, and alignment constraints
2932 are friendly enough, we may be able to do combined memory operations.
2933 We do not attempt this if Y is a constant because that combination is
2934 usually better with the by-parts thing below. */
2935 else if ((MEM_P (x
) ? !CONSTANT_P (y
) : MEM_P (y
))
2936 && (!STRICT_ALIGNMENT
2937 || get_mode_alignment (mode
) == BIGGEST_ALIGNMENT
))
2946 /* For memory to memory moves, optimal behavior can be had with the
2947 existing block move logic. */
2948 if (MEM_P (x
) && MEM_P (y
))
2950 emit_block_move (x
, y
, GEN_INT (GET_MODE_SIZE (mode
)),
2951 BLOCK_OP_NO_LIBCALL
);
2952 return get_last_insn ();
2955 ret
= emit_move_via_integer (mode
, x
, y
);
2960 /* Show the output dies here. This is necessary for SUBREGs
2961 of pseudos since we cannot track their lifetimes correctly;
2962 hard regs shouldn't appear here except as return values. */
2963 if (!reload_completed
&& !reload_in_progress
2964 && REG_P (x
) && !reg_overlap_mentioned_p (x
, y
))
2965 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
2967 write_complex_part (x
, read_complex_part (y
, false), false);
2968 write_complex_part (x
, read_complex_part (y
, true), true);
2969 return get_last_insn ();
2972 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2973 MODE is known to be MODE_CC. Returns the last instruction emitted. */
2976 emit_move_ccmode (enum machine_mode mode
, rtx x
, rtx y
)
2980 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
2983 enum insn_code code
= mov_optab
->handlers
[CCmode
].insn_code
;
2984 if (code
!= CODE_FOR_nothing
)
2986 x
= emit_move_change_mode (CCmode
, mode
, x
, true);
2987 y
= emit_move_change_mode (CCmode
, mode
, y
, true);
2988 return emit_insn (GEN_FCN (code
) (x
, y
));
2992 /* Otherwise, find the MODE_INT mode of the same width. */
2993 ret
= emit_move_via_integer (mode
, x
, y
);
2994 gcc_assert (ret
!= NULL
);
2998 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2999 MODE is any multi-word or full-word mode that lacks a move_insn
3000 pattern. Note that you will get better code if you define such
3001 patterns, even if they must turn into multiple assembler instructions. */
3004 emit_move_multi_word (enum machine_mode mode
, rtx x
, rtx y
)
3011 gcc_assert (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
);
3013 /* If X is a push on the stack, do the push now and replace
3014 X with a reference to the stack pointer. */
3015 if (push_operand (x
, mode
))
3016 x
= emit_move_resolve_push (mode
, x
);
3018 /* If we are in reload, see if either operand is a MEM whose address
3019 is scheduled for replacement. */
3020 if (reload_in_progress
&& MEM_P (x
)
3021 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3022 x
= replace_equiv_address_nv (x
, inner
);
3023 if (reload_in_progress
&& MEM_P (y
)
3024 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3025 y
= replace_equiv_address_nv (y
, inner
);
3029 need_clobber
= false;
3031 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3034 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3035 rtx ypart
= operand_subword (y
, i
, 1, mode
);
3037 /* If we can't get a part of Y, put Y into memory if it is a
3038 constant. Otherwise, force it into a register. Then we must
3039 be able to get a part of Y. */
3040 if (ypart
== 0 && CONSTANT_P (y
))
3042 y
= force_const_mem (mode
, y
);
3043 ypart
= operand_subword (y
, i
, 1, mode
);
3045 else if (ypart
== 0)
3046 ypart
= operand_subword_force (y
, i
, mode
);
3048 gcc_assert (xpart
&& ypart
);
3050 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3052 last_insn
= emit_move_insn (xpart
, ypart
);
3058 /* Show the output dies here. This is necessary for SUBREGs
3059 of pseudos since we cannot track their lifetimes correctly;
3060 hard regs shouldn't appear here except as return values.
3061 We never want to emit such a clobber after reload. */
3063 && ! (reload_in_progress
|| reload_completed
)
3064 && need_clobber
!= 0)
3065 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3072 /* Low level part of emit_move_insn.
3073 Called just like emit_move_insn, but assumes X and Y
3074 are basically valid. */
3077 emit_move_insn_1 (rtx x
, rtx y
)
3079 enum machine_mode mode
= GET_MODE (x
);
3080 enum insn_code code
;
3082 gcc_assert ((unsigned int) mode
< (unsigned int) MAX_MACHINE_MODE
);
3084 code
= mov_optab
->handlers
[mode
].insn_code
;
3085 if (code
!= CODE_FOR_nothing
)
3086 return emit_insn (GEN_FCN (code
) (x
, y
));
3088 /* Expand complex moves by moving real part and imag part. */
3089 if (COMPLEX_MODE_P (mode
))
3090 return emit_move_complex (mode
, x
, y
);
3092 if (GET_MODE_CLASS (mode
) == MODE_CC
)
3093 return emit_move_ccmode (mode
, x
, y
);
3095 /* Try using a move pattern for the corresponding integer mode. This is
3096 only safe when simplify_subreg can convert MODE constants into integer
3097 constants. At present, it can only do this reliably if the value
3098 fits within a HOST_WIDE_INT. */
3099 if (!CONSTANT_P (y
) || GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
3101 rtx ret
= emit_move_via_integer (mode
, x
, y
);
3106 return emit_move_multi_word (mode
, x
, y
);
3109 /* Generate code to copy Y into X.
3110 Both Y and X must have the same mode, except that
3111 Y can be a constant with VOIDmode.
3112 This mode cannot be BLKmode; use emit_block_move for that.
3114 Return the last instruction emitted. */
3117 emit_move_insn (rtx x
, rtx y
)
3119 enum machine_mode mode
= GET_MODE (x
);
3120 rtx y_cst
= NULL_RTX
;
3123 gcc_assert (mode
!= BLKmode
3124 && (GET_MODE (y
) == mode
|| GET_MODE (y
) == VOIDmode
));
3129 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3130 && (last_insn
= compress_float_constant (x
, y
)))
3135 if (!LEGITIMATE_CONSTANT_P (y
))
3137 y
= force_const_mem (mode
, y
);
3139 /* If the target's cannot_force_const_mem prevented the spill,
3140 assume that the target's move expanders will also take care
3141 of the non-legitimate constant. */
3147 /* If X or Y are memory references, verify that their addresses are valid
3150 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
3151 && ! push_operand (x
, GET_MODE (x
)))
3153 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
3154 x
= validize_mem (x
);
3157 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
3159 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
3160 y
= validize_mem (y
);
3162 gcc_assert (mode
!= BLKmode
);
3164 last_insn
= emit_move_insn_1 (x
, y
);
3166 if (y_cst
&& REG_P (x
)
3167 && (set
= single_set (last_insn
)) != NULL_RTX
3168 && SET_DEST (set
) == x
3169 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
3170 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
3175 /* If Y is representable exactly in a narrower mode, and the target can
3176 perform the extension directly from constant or memory, then emit the
3177 move as an extension. */
3180 compress_float_constant (rtx x
, rtx y
)
3182 enum machine_mode dstmode
= GET_MODE (x
);
3183 enum machine_mode orig_srcmode
= GET_MODE (y
);
3184 enum machine_mode srcmode
;
3187 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3189 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3190 srcmode
!= orig_srcmode
;
3191 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3194 rtx trunc_y
, last_insn
;
3196 /* Skip if the target can't extend this way. */
3197 ic
= can_extend_p (dstmode
, srcmode
, 0);
3198 if (ic
== CODE_FOR_nothing
)
3201 /* Skip if the narrowed value isn't exact. */
3202 if (! exact_real_truncate (srcmode
, &r
))
3205 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3207 if (LEGITIMATE_CONSTANT_P (trunc_y
))
3209 /* Skip if the target needs extra instructions to perform
3211 if (! (*insn_data
[ic
].operand
[1].predicate
) (trunc_y
, srcmode
))
3214 else if (float_extend_from_mem
[dstmode
][srcmode
])
3215 trunc_y
= validize_mem (force_const_mem (srcmode
, trunc_y
));
3219 emit_unop_insn (ic
, x
, trunc_y
, UNKNOWN
);
3220 last_insn
= get_last_insn ();
3223 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3231 /* Pushing data onto the stack. */
3233 /* Push a block of length SIZE (perhaps variable)
3234 and return an rtx to address the beginning of the block.
3235 The value may be virtual_outgoing_args_rtx.
3237 EXTRA is the number of bytes of padding to push in addition to SIZE.
3238 BELOW nonzero means this padding comes at low addresses;
3239 otherwise, the padding comes at high addresses. */
3242 push_block (rtx size
, int extra
, int below
)
3246 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3247 if (CONSTANT_P (size
))
3248 anti_adjust_stack (plus_constant (size
, extra
));
3249 else if (REG_P (size
) && extra
== 0)
3250 anti_adjust_stack (size
);
3253 temp
= copy_to_mode_reg (Pmode
, size
);
3255 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3256 temp
, 0, OPTAB_LIB_WIDEN
);
3257 anti_adjust_stack (temp
);
3260 #ifndef STACK_GROWS_DOWNWARD
3266 temp
= virtual_outgoing_args_rtx
;
3267 if (extra
!= 0 && below
)
3268 temp
= plus_constant (temp
, extra
);
3272 if (GET_CODE (size
) == CONST_INT
)
3273 temp
= plus_constant (virtual_outgoing_args_rtx
,
3274 -INTVAL (size
) - (below
? 0 : extra
));
3275 else if (extra
!= 0 && !below
)
3276 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3277 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3279 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3280 negate_rtx (Pmode
, size
));
3283 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3286 #ifdef PUSH_ROUNDING
3288 /* Emit single push insn. */
3291 emit_single_push_insn (enum machine_mode mode
, rtx x
, tree type
)
3294 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3296 enum insn_code icode
;
3297 insn_operand_predicate_fn pred
;
3299 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3300 /* If there is push pattern, use it. Otherwise try old way of throwing
3301 MEM representing push operation to move expander. */
3302 icode
= push_optab
->handlers
[(int) mode
].insn_code
;
3303 if (icode
!= CODE_FOR_nothing
)
3305 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3306 && !((*pred
) (x
, mode
))))
3307 x
= force_reg (mode
, x
);
3308 emit_insn (GEN_FCN (icode
) (x
));
3311 if (GET_MODE_SIZE (mode
) == rounded_size
)
3312 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3313 /* If we are to pad downward, adjust the stack pointer first and
3314 then store X into the stack location using an offset. This is
3315 because emit_move_insn does not know how to pad; it does not have
3317 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
3319 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
3320 HOST_WIDE_INT offset
;
3322 emit_move_insn (stack_pointer_rtx
,
3323 expand_binop (Pmode
,
3324 #ifdef STACK_GROWS_DOWNWARD
3330 GEN_INT (rounded_size
),
3331 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
3333 offset
= (HOST_WIDE_INT
) padding_size
;
3334 #ifdef STACK_GROWS_DOWNWARD
3335 if (STACK_PUSH_CODE
== POST_DEC
)
3336 /* We have already decremented the stack pointer, so get the
3338 offset
+= (HOST_WIDE_INT
) rounded_size
;
3340 if (STACK_PUSH_CODE
== POST_INC
)
3341 /* We have already incremented the stack pointer, so get the
3343 offset
-= (HOST_WIDE_INT
) rounded_size
;
3345 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (offset
));
3349 #ifdef STACK_GROWS_DOWNWARD
3350 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3351 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3352 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3354 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3355 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3356 GEN_INT (rounded_size
));
3358 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3361 dest
= gen_rtx_MEM (mode
, dest_addr
);
3365 set_mem_attributes (dest
, type
, 1);
3367 if (flag_optimize_sibling_calls
)
3368 /* Function incoming arguments may overlap with sibling call
3369 outgoing arguments and we cannot allow reordering of reads
3370 from function arguments with stores to outgoing arguments
3371 of sibling calls. */
3372 set_mem_alias_set (dest
, 0);
3374 emit_move_insn (dest
, x
);
3378 /* Generate code to push X onto the stack, assuming it has mode MODE and
3380 MODE is redundant except when X is a CONST_INT (since they don't
3382 SIZE is an rtx for the size of data to be copied (in bytes),
3383 needed only if X is BLKmode.
3385 ALIGN (in bits) is maximum alignment we can assume.
3387 If PARTIAL and REG are both nonzero, then copy that many of the first
3388 bytes of X into registers starting with REG, and push the rest of X.
3389 The amount of space pushed is decreased by PARTIAL bytes.
3390 REG must be a hard register in this case.
3391 If REG is zero but PARTIAL is not, take any all others actions for an
3392 argument partially in registers, but do not actually load any
3395 EXTRA is the amount in bytes of extra space to leave next to this arg.
3396 This is ignored if an argument block has already been allocated.
3398 On a machine that lacks real push insns, ARGS_ADDR is the address of
3399 the bottom of the argument block for this call. We use indexing off there
3400 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3401 argument block has not been preallocated.
3403 ARGS_SO_FAR is the size of args previously pushed for this call.
3405 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3406 for arguments passed in registers. If nonzero, it will be the number
3407 of bytes required. */
3410 emit_push_insn (rtx x
, enum machine_mode mode
, tree type
, rtx size
,
3411 unsigned int align
, int partial
, rtx reg
, int extra
,
3412 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
3416 enum direction stack_direction
3417 #ifdef STACK_GROWS_DOWNWARD
3423 /* Decide where to pad the argument: `downward' for below,
3424 `upward' for above, or `none' for don't pad it.
3425 Default is below for small data on big-endian machines; else above. */
3426 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3428 /* Invert direction if stack is post-decrement.
3430 if (STACK_PUSH_CODE
== POST_DEC
)
3431 if (where_pad
!= none
)
3432 where_pad
= (where_pad
== downward
? upward
: downward
);
3436 if (mode
== BLKmode
)
3438 /* Copy a block into the stack, entirely or partially. */
3445 offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3446 used
= partial
- offset
;
3450 /* USED is now the # of bytes we need not copy to the stack
3451 because registers will take care of them. */
3454 xinner
= adjust_address (xinner
, BLKmode
, used
);
3456 /* If the partial register-part of the arg counts in its stack size,
3457 skip the part of stack space corresponding to the registers.
3458 Otherwise, start copying to the beginning of the stack space,
3459 by setting SKIP to 0. */
3460 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3462 #ifdef PUSH_ROUNDING
3463 /* Do it with several push insns if that doesn't take lots of insns
3464 and if there is no difficulty with push insns that skip bytes
3465 on the stack for alignment purposes. */
3468 && GET_CODE (size
) == CONST_INT
3470 && MEM_ALIGN (xinner
) >= align
3471 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3472 /* Here we avoid the case of a structure whose weak alignment
3473 forces many pushes of a small amount of data,
3474 and such small pushes do rounding that causes trouble. */
3475 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3476 || align
>= BIGGEST_ALIGNMENT
3477 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3478 == (align
/ BITS_PER_UNIT
)))
3479 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3481 /* Push padding now if padding above and stack grows down,
3482 or if padding below and stack grows up.
3483 But if space already allocated, this has already been done. */
3484 if (extra
&& args_addr
== 0
3485 && where_pad
!= none
&& where_pad
!= stack_direction
)
3486 anti_adjust_stack (GEN_INT (extra
));
3488 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
3491 #endif /* PUSH_ROUNDING */
3495 /* Otherwise make space on the stack and copy the data
3496 to the address of that space. */
3498 /* Deduct words put into registers from the size we must copy. */
3501 if (GET_CODE (size
) == CONST_INT
)
3502 size
= GEN_INT (INTVAL (size
) - used
);
3504 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3505 GEN_INT (used
), NULL_RTX
, 0,
3509 /* Get the address of the stack space.
3510 In this case, we do not deal with EXTRA separately.
3511 A single stack adjust will do. */
3514 temp
= push_block (size
, extra
, where_pad
== downward
);
3517 else if (GET_CODE (args_so_far
) == CONST_INT
)
3518 temp
= memory_address (BLKmode
,
3519 plus_constant (args_addr
,
3520 skip
+ INTVAL (args_so_far
)));
3522 temp
= memory_address (BLKmode
,
3523 plus_constant (gen_rtx_PLUS (Pmode
,
3528 if (!ACCUMULATE_OUTGOING_ARGS
)
3530 /* If the source is referenced relative to the stack pointer,
3531 copy it to another register to stabilize it. We do not need
3532 to do this if we know that we won't be changing sp. */
3534 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3535 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3536 temp
= copy_to_reg (temp
);
3539 target
= gen_rtx_MEM (BLKmode
, temp
);
3541 /* We do *not* set_mem_attributes here, because incoming arguments
3542 may overlap with sibling call outgoing arguments and we cannot
3543 allow reordering of reads from function arguments with stores
3544 to outgoing arguments of sibling calls. We do, however, want
3545 to record the alignment of the stack slot. */
3546 /* ALIGN may well be better aligned than TYPE, e.g. due to
3547 PARM_BOUNDARY. Assume the caller isn't lying. */
3548 set_mem_align (target
, align
);
3550 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
3553 else if (partial
> 0)
3555 /* Scalar partly in registers. */
3557 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3560 /* # bytes of start of argument
3561 that we must make space for but need not store. */
3562 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3563 int args_offset
= INTVAL (args_so_far
);
3566 /* Push padding now if padding above and stack grows down,
3567 or if padding below and stack grows up.
3568 But if space already allocated, this has already been done. */
3569 if (extra
&& args_addr
== 0
3570 && where_pad
!= none
&& where_pad
!= stack_direction
)
3571 anti_adjust_stack (GEN_INT (extra
));
3573 /* If we make space by pushing it, we might as well push
3574 the real data. Otherwise, we can leave OFFSET nonzero
3575 and leave the space uninitialized. */
3579 /* Now NOT_STACK gets the number of words that we don't need to
3580 allocate on the stack. Convert OFFSET to words too. */
3581 not_stack
= (partial
- offset
) / UNITS_PER_WORD
;
3582 offset
/= UNITS_PER_WORD
;
3584 /* If the partial register-part of the arg counts in its stack size,
3585 skip the part of stack space corresponding to the registers.
3586 Otherwise, start copying to the beginning of the stack space,
3587 by setting SKIP to 0. */
3588 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3590 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3591 x
= validize_mem (force_const_mem (mode
, x
));
3593 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3594 SUBREGs of such registers are not allowed. */
3595 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
3596 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3597 x
= copy_to_reg (x
);
3599 /* Loop over all the words allocated on the stack for this arg. */
3600 /* We can do it by words, because any scalar bigger than a word
3601 has a size a multiple of a word. */
3602 #ifndef PUSH_ARGS_REVERSED
3603 for (i
= not_stack
; i
< size
; i
++)
3605 for (i
= size
- 1; i
>= not_stack
; i
--)
3607 if (i
>= not_stack
+ offset
)
3608 emit_push_insn (operand_subword_force (x
, i
, mode
),
3609 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3611 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3613 reg_parm_stack_space
, alignment_pad
);
3620 /* Push padding now if padding above and stack grows down,
3621 or if padding below and stack grows up.
3622 But if space already allocated, this has already been done. */
3623 if (extra
&& args_addr
== 0
3624 && where_pad
!= none
&& where_pad
!= stack_direction
)
3625 anti_adjust_stack (GEN_INT (extra
));
3627 #ifdef PUSH_ROUNDING
3628 if (args_addr
== 0 && PUSH_ARGS
)
3629 emit_single_push_insn (mode
, x
, type
);
3633 if (GET_CODE (args_so_far
) == CONST_INT
)
3635 = memory_address (mode
,
3636 plus_constant (args_addr
,
3637 INTVAL (args_so_far
)));
3639 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3641 dest
= gen_rtx_MEM (mode
, addr
);
3643 /* We do *not* set_mem_attributes here, because incoming arguments
3644 may overlap with sibling call outgoing arguments and we cannot
3645 allow reordering of reads from function arguments with stores
3646 to outgoing arguments of sibling calls. We do, however, want
3647 to record the alignment of the stack slot. */
3648 /* ALIGN may well be better aligned than TYPE, e.g. due to
3649 PARM_BOUNDARY. Assume the caller isn't lying. */
3650 set_mem_align (dest
, align
);
3652 emit_move_insn (dest
, x
);
3656 /* If part should go in registers, copy that part
3657 into the appropriate registers. Do this now, at the end,
3658 since mem-to-mem copies above may do function calls. */
3659 if (partial
> 0 && reg
!= 0)
3661 /* Handle calls that pass values in multiple non-contiguous locations.
3662 The Irix 6 ABI has examples of this. */
3663 if (GET_CODE (reg
) == PARALLEL
)
3664 emit_group_load (reg
, x
, type
, -1);
3667 gcc_assert (partial
% UNITS_PER_WORD
== 0);
3668 move_block_to_reg (REGNO (reg
), x
, partial
/ UNITS_PER_WORD
, mode
);
3672 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3673 anti_adjust_stack (GEN_INT (extra
));
3675 if (alignment_pad
&& args_addr
== 0)
3676 anti_adjust_stack (alignment_pad
);
3679 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3683 get_subtarget (rtx x
)
3687 /* Only registers can be subtargets. */
3689 /* Don't use hard regs to avoid extending their life. */
3690 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3694 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3695 FIELD is a bitfield. Returns true if the optimization was successful,
3696 and there's nothing else to do. */
3699 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize
,
3700 unsigned HOST_WIDE_INT bitpos
,
3701 enum machine_mode mode1
, rtx str_rtx
,
3704 enum machine_mode str_mode
= GET_MODE (str_rtx
);
3705 unsigned int str_bitsize
= GET_MODE_BITSIZE (str_mode
);
3710 if (mode1
!= VOIDmode
3711 || bitsize
>= BITS_PER_WORD
3712 || str_bitsize
> BITS_PER_WORD
3713 || TREE_SIDE_EFFECTS (to
)
3714 || TREE_THIS_VOLATILE (to
))
3718 if (!BINARY_CLASS_P (src
)
3719 || TREE_CODE (TREE_TYPE (src
)) != INTEGER_TYPE
)
3722 op0
= TREE_OPERAND (src
, 0);
3723 op1
= TREE_OPERAND (src
, 1);
3726 if (!operand_equal_p (to
, op0
, 0))
3729 if (MEM_P (str_rtx
))
3731 unsigned HOST_WIDE_INT offset1
;
3733 if (str_bitsize
== 0 || str_bitsize
> BITS_PER_WORD
)
3734 str_mode
= word_mode
;
3735 str_mode
= get_best_mode (bitsize
, bitpos
,
3736 MEM_ALIGN (str_rtx
), str_mode
, 0);
3737 if (str_mode
== VOIDmode
)
3739 str_bitsize
= GET_MODE_BITSIZE (str_mode
);
3742 bitpos
%= str_bitsize
;
3743 offset1
= (offset1
- bitpos
) / BITS_PER_UNIT
;
3744 str_rtx
= adjust_address (str_rtx
, str_mode
, offset1
);
3746 else if (!REG_P (str_rtx
) && GET_CODE (str_rtx
) != SUBREG
)
3749 /* If the bit field covers the whole REG/MEM, store_field
3750 will likely generate better code. */
3751 if (bitsize
>= str_bitsize
)
3754 /* We can't handle fields split across multiple entities. */
3755 if (bitpos
+ bitsize
> str_bitsize
)
3758 if (BYTES_BIG_ENDIAN
)
3759 bitpos
= str_bitsize
- bitpos
- bitsize
;
3761 switch (TREE_CODE (src
))
3765 /* For now, just optimize the case of the topmost bitfield
3766 where we don't need to do any masking and also
3767 1 bit bitfields where xor can be used.
3768 We might win by one instruction for the other bitfields
3769 too if insv/extv instructions aren't used, so that
3770 can be added later. */
3771 if (bitpos
+ bitsize
!= str_bitsize
3772 && (bitsize
!= 1 || TREE_CODE (op1
) != INTEGER_CST
))
3775 value
= expand_expr (op1
, NULL_RTX
, str_mode
, 0);
3776 value
= convert_modes (str_mode
,
3777 TYPE_MODE (TREE_TYPE (op1
)), value
,
3778 TYPE_UNSIGNED (TREE_TYPE (op1
)));
3780 /* We may be accessing data outside the field, which means
3781 we can alias adjacent data. */
3782 if (MEM_P (str_rtx
))
3784 str_rtx
= shallow_copy_rtx (str_rtx
);
3785 set_mem_alias_set (str_rtx
, 0);
3786 set_mem_expr (str_rtx
, 0);
3789 binop
= TREE_CODE (src
) == PLUS_EXPR
? add_optab
: sub_optab
;
3790 if (bitsize
== 1 && bitpos
+ bitsize
!= str_bitsize
)
3792 value
= expand_and (str_mode
, value
, const1_rtx
, NULL
);
3795 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
,
3796 build_int_cst (NULL_TREE
, bitpos
),
3798 result
= expand_binop (str_mode
, binop
, str_rtx
,
3799 value
, str_rtx
, 1, OPTAB_WIDEN
);
3800 if (result
!= str_rtx
)
3801 emit_move_insn (str_rtx
, result
);
3806 if (TREE_CODE (op1
) != INTEGER_CST
)
3808 value
= expand_expr (op1
, NULL_RTX
, GET_MODE (str_rtx
), 0);
3809 value
= convert_modes (GET_MODE (str_rtx
),
3810 TYPE_MODE (TREE_TYPE (op1
)), value
,
3811 TYPE_UNSIGNED (TREE_TYPE (op1
)));
3813 /* We may be accessing data outside the field, which means
3814 we can alias adjacent data. */
3815 if (MEM_P (str_rtx
))
3817 str_rtx
= shallow_copy_rtx (str_rtx
);
3818 set_mem_alias_set (str_rtx
, 0);
3819 set_mem_expr (str_rtx
, 0);
3822 binop
= TREE_CODE (src
) == BIT_IOR_EXPR
? ior_optab
: xor_optab
;
3823 if (bitpos
+ bitsize
!= GET_MODE_BITSIZE (GET_MODE (str_rtx
)))
3825 rtx mask
= GEN_INT (((unsigned HOST_WIDE_INT
) 1 << bitsize
)
3827 value
= expand_and (GET_MODE (str_rtx
), value
, mask
,
3830 value
= expand_shift (LSHIFT_EXPR
, GET_MODE (str_rtx
), value
,
3831 build_int_cst (NULL_TREE
, bitpos
),
3833 result
= expand_binop (GET_MODE (str_rtx
), binop
, str_rtx
,
3834 value
, str_rtx
, 1, OPTAB_WIDEN
);
3835 if (result
!= str_rtx
)
3836 emit_move_insn (str_rtx
, result
);
3847 /* Expand an assignment that stores the value of FROM into TO. */
3850 expand_assignment (tree to
, tree from
)
3855 /* Don't crash if the lhs of the assignment was erroneous. */
3857 if (TREE_CODE (to
) == ERROR_MARK
)
3859 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3863 /* Assignment of a structure component needs special treatment
3864 if the structure component's rtx is not simply a MEM.
3865 Assignment of an array element at a constant index, and assignment of
3866 an array element in an unaligned packed structure field, has the same
3868 if (handled_component_p (to
)
3869 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
3871 enum machine_mode mode1
;
3872 HOST_WIDE_INT bitsize
, bitpos
;
3879 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
3880 &unsignedp
, &volatilep
, true);
3882 /* If we are going to use store_bit_field and extract_bit_field,
3883 make sure to_rtx will be safe for multiple use. */
3885 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, 0);
3889 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
3891 gcc_assert (MEM_P (to_rtx
));
3893 #ifdef POINTERS_EXTEND_UNSIGNED
3894 if (GET_MODE (offset_rtx
) != Pmode
)
3895 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
3897 if (GET_MODE (offset_rtx
) != ptr_mode
)
3898 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
3901 /* A constant address in TO_RTX can have VOIDmode, we must not try
3902 to call force_reg for that case. Avoid that case. */
3904 && GET_MODE (to_rtx
) == BLKmode
3905 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
3907 && (bitpos
% bitsize
) == 0
3908 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
3909 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
3911 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
3915 to_rtx
= offset_address (to_rtx
, offset_rtx
,
3916 highest_pow2_factor_for_target (to
,
3920 /* Handle expand_expr of a complex value returning a CONCAT. */
3921 if (GET_CODE (to_rtx
) == CONCAT
)
3923 if (TREE_CODE (TREE_TYPE (from
)) == COMPLEX_TYPE
)
3925 gcc_assert (bitpos
== 0);
3926 result
= store_expr (from
, to_rtx
, false);
3930 gcc_assert (bitpos
== 0 || bitpos
== GET_MODE_BITSIZE (mode1
));
3931 result
= store_expr (from
, XEXP (to_rtx
, bitpos
!= 0), false);
3938 /* If the field is at offset zero, we could have been given the
3939 DECL_RTX of the parent struct. Don't munge it. */
3940 to_rtx
= shallow_copy_rtx (to_rtx
);
3942 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
3944 /* Deal with volatile and readonly fields. The former is only
3945 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3947 MEM_VOLATILE_P (to_rtx
) = 1;
3948 if (component_uses_parent_alias_set (to
))
3949 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
3952 if (optimize_bitfield_assignment_op (bitsize
, bitpos
, mode1
,
3956 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
3957 TREE_TYPE (tem
), get_alias_set (to
));
3961 preserve_temp_slots (result
);
3967 /* If the rhs is a function call and its value is not an aggregate,
3968 call the function before we start to compute the lhs.
3969 This is needed for correct code for cases such as
3970 val = setjmp (buf) on machines where reference to val
3971 requires loading up part of an address in a separate insn.
3973 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3974 since it might be a promoted variable where the zero- or sign- extension
3975 needs to be done. Handling this in the normal way is safe because no
3976 computation is done before the call. */
3977 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
3978 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
3979 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
3980 && REG_P (DECL_RTL (to
))))
3985 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3987 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3989 /* Handle calls that return values in multiple non-contiguous locations.
3990 The Irix 6 ABI has examples of this. */
3991 if (GET_CODE (to_rtx
) == PARALLEL
)
3992 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
3993 int_size_in_bytes (TREE_TYPE (from
)));
3994 else if (GET_MODE (to_rtx
) == BLKmode
)
3995 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
3998 if (POINTER_TYPE_P (TREE_TYPE (to
)))
3999 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
4000 emit_move_insn (to_rtx
, value
);
4002 preserve_temp_slots (to_rtx
);
4008 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4009 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4012 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4014 /* Don't move directly into a return register. */
4015 if (TREE_CODE (to
) == RESULT_DECL
4016 && (REG_P (to_rtx
) || GET_CODE (to_rtx
) == PARALLEL
))
4021 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
4023 if (GET_CODE (to_rtx
) == PARALLEL
)
4024 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
4025 int_size_in_bytes (TREE_TYPE (from
)));
4027 emit_move_insn (to_rtx
, temp
);
4029 preserve_temp_slots (to_rtx
);
4035 /* In case we are returning the contents of an object which overlaps
4036 the place the value is being stored, use a safe function when copying
4037 a value through a pointer into a structure value return block. */
4038 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
4039 && current_function_returns_struct
4040 && !current_function_returns_pcc_struct
)
4045 size
= expr_size (from
);
4046 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
4048 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
4049 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
4050 XEXP (from_rtx
, 0), Pmode
,
4051 convert_to_mode (TYPE_MODE (sizetype
),
4052 size
, TYPE_UNSIGNED (sizetype
)),
4053 TYPE_MODE (sizetype
));
4055 preserve_temp_slots (to_rtx
);
4061 /* Compute FROM and store the value in the rtx we got. */
4064 result
= store_expr (from
, to_rtx
, 0);
4065 preserve_temp_slots (result
);
4071 /* Generate code for computing expression EXP,
4072 and storing the value into TARGET.
4074 If the mode is BLKmode then we may return TARGET itself.
4075 It turns out that in BLKmode it doesn't cause a problem.
4076 because C has no operators that could combine two different
4077 assignments into the same BLKmode object with different values
4078 with no sequence point. Will other languages need this to
4081 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4082 stack, and block moves may need to be treated specially. */
4085 store_expr (tree exp
, rtx target
, int call_param_p
)
4088 rtx alt_rtl
= NULL_RTX
;
4089 int dont_return_target
= 0;
4091 if (VOID_TYPE_P (TREE_TYPE (exp
)))
4093 /* C++ can generate ?: expressions with a throw expression in one
4094 branch and an rvalue in the other. Here, we resolve attempts to
4095 store the throw expression's nonexistent result. */
4096 gcc_assert (!call_param_p
);
4097 expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
4100 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4102 /* Perform first part of compound expression, then assign from second
4104 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
4105 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4106 return store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
);
4108 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4110 /* For conditional expression, get safe form of the target. Then
4111 test the condition, doing the appropriate assignment on either
4112 side. This avoids the creation of unnecessary temporaries.
4113 For non-BLKmode, it is more efficient not to do this. */
4115 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4117 do_pending_stack_adjust ();
4119 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
4120 store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
);
4121 emit_jump_insn (gen_jump (lab2
));
4124 store_expr (TREE_OPERAND (exp
, 2), target
, call_param_p
);
4130 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4131 /* If this is a scalar in a register that is stored in a wider mode
4132 than the declared mode, compute the result into its declared mode
4133 and then convert to the wider mode. Our value is the computed
4136 rtx inner_target
= 0;
4138 /* We can do the conversion inside EXP, which will often result
4139 in some optimizations. Do the conversion in two steps: first
4140 change the signedness, if needed, then the extend. But don't
4141 do this if the type of EXP is a subtype of something else
4142 since then the conversion might involve more than just
4143 converting modes. */
4144 if (INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4145 && TREE_TYPE (TREE_TYPE (exp
)) == 0
4146 && (!lang_hooks
.reduce_bit_field_operations
4147 || (GET_MODE_PRECISION (GET_MODE (target
))
4148 == TYPE_PRECISION (TREE_TYPE (exp
)))))
4150 if (TYPE_UNSIGNED (TREE_TYPE (exp
))
4151 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4153 (lang_hooks
.types
.signed_or_unsigned_type
4154 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)), exp
);
4156 exp
= convert (lang_hooks
.types
.type_for_mode
4157 (GET_MODE (SUBREG_REG (target
)),
4158 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4161 inner_target
= SUBREG_REG (target
);
4164 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
4165 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4167 /* If TEMP is a VOIDmode constant, use convert_modes to make
4168 sure that we properly convert it. */
4169 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4171 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4172 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4173 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4174 GET_MODE (target
), temp
,
4175 SUBREG_PROMOTED_UNSIGNED_P (target
));
4178 convert_move (SUBREG_REG (target
), temp
,
4179 SUBREG_PROMOTED_UNSIGNED_P (target
));
4185 temp
= expand_expr_real (exp
, target
, GET_MODE (target
),
4187 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
4189 /* Return TARGET if it's a specified hardware register.
4190 If TARGET is a volatile mem ref, either return TARGET
4191 or return a reg copied *from* TARGET; ANSI requires this.
4193 Otherwise, if TEMP is not TARGET, return TEMP
4194 if it is constant (for efficiency),
4195 or if we really want the correct value. */
4196 if (!(target
&& REG_P (target
)
4197 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4198 && !(MEM_P (target
) && MEM_VOLATILE_P (target
))
4199 && ! rtx_equal_p (temp
, target
)
4200 && CONSTANT_P (temp
))
4201 dont_return_target
= 1;
4204 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4205 the same as that of TARGET, adjust the constant. This is needed, for
4206 example, in case it is a CONST_DOUBLE and we want only a word-sized
4208 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4209 && TREE_CODE (exp
) != ERROR_MARK
4210 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4211 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4212 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
4214 /* If value was not generated in the target, store it there.
4215 Convert the value to TARGET's type first if necessary and emit the
4216 pending incrementations that have been queued when expanding EXP.
4217 Note that we cannot emit the whole queue blindly because this will
4218 effectively disable the POST_INC optimization later.
4220 If TEMP and TARGET compare equal according to rtx_equal_p, but
4221 one or both of them are volatile memory refs, we have to distinguish
4223 - expand_expr has used TARGET. In this case, we must not generate
4224 another copy. This can be detected by TARGET being equal according
4226 - expand_expr has not used TARGET - that means that the source just
4227 happens to have the same RTX form. Since temp will have been created
4228 by expand_expr, it will compare unequal according to == .
4229 We must generate a copy in this case, to reach the correct number
4230 of volatile memory references. */
4232 if ((! rtx_equal_p (temp
, target
)
4233 || (temp
!= target
&& (side_effects_p (temp
)
4234 || side_effects_p (target
))))
4235 && TREE_CODE (exp
) != ERROR_MARK
4236 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4237 but TARGET is not valid memory reference, TEMP will differ
4238 from TARGET although it is really the same location. */
4239 && !(alt_rtl
&& rtx_equal_p (alt_rtl
, target
))
4240 /* If there's nothing to copy, don't bother. Don't call
4241 expr_size unless necessary, because some front-ends (C++)
4242 expr_size-hook must not be given objects that are not
4243 supposed to be bit-copied or bit-initialized. */
4244 && expr_size (exp
) != const0_rtx
)
4246 if (GET_MODE (temp
) != GET_MODE (target
)
4247 && GET_MODE (temp
) != VOIDmode
)
4249 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
4250 if (dont_return_target
)
4252 /* In this case, we will return TEMP,
4253 so make sure it has the proper mode.
4254 But don't forget to store the value into TARGET. */
4255 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4256 emit_move_insn (target
, temp
);
4259 convert_move (target
, temp
, unsignedp
);
4262 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4264 /* Handle copying a string constant into an array. The string
4265 constant may be shorter than the array. So copy just the string's
4266 actual length, and clear the rest. First get the size of the data
4267 type of the string, which is actually the size of the target. */
4268 rtx size
= expr_size (exp
);
4270 if (GET_CODE (size
) == CONST_INT
4271 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4272 emit_block_move (target
, temp
, size
,
4274 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4277 /* Compute the size of the data to copy from the string. */
4279 = size_binop (MIN_EXPR
,
4280 make_tree (sizetype
, size
),
4281 size_int (TREE_STRING_LENGTH (exp
)));
4283 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
4285 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4288 /* Copy that much. */
4289 copy_size_rtx
= convert_to_mode (ptr_mode
, copy_size_rtx
,
4290 TYPE_UNSIGNED (sizetype
));
4291 emit_block_move (target
, temp
, copy_size_rtx
,
4293 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4295 /* Figure out how much is left in TARGET that we have to clear.
4296 Do all calculations in ptr_mode. */
4297 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4299 size
= plus_constant (size
, -INTVAL (copy_size_rtx
));
4300 target
= adjust_address (target
, BLKmode
,
4301 INTVAL (copy_size_rtx
));
4305 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
4306 copy_size_rtx
, NULL_RTX
, 0,
4309 #ifdef POINTERS_EXTEND_UNSIGNED
4310 if (GET_MODE (copy_size_rtx
) != Pmode
)
4311 copy_size_rtx
= convert_to_mode (Pmode
, copy_size_rtx
,
4312 TYPE_UNSIGNED (sizetype
));
4315 target
= offset_address (target
, copy_size_rtx
,
4316 highest_pow2_factor (copy_size
));
4317 label
= gen_label_rtx ();
4318 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4319 GET_MODE (size
), 0, label
);
4322 if (size
!= const0_rtx
)
4323 clear_storage (target
, size
, BLOCK_OP_NORMAL
);
4329 /* Handle calls that return values in multiple non-contiguous locations.
4330 The Irix 6 ABI has examples of this. */
4331 else if (GET_CODE (target
) == PARALLEL
)
4332 emit_group_load (target
, temp
, TREE_TYPE (exp
),
4333 int_size_in_bytes (TREE_TYPE (exp
)));
4334 else if (GET_MODE (temp
) == BLKmode
)
4335 emit_block_move (target
, temp
, expr_size (exp
),
4337 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4340 temp
= force_operand (temp
, target
);
4342 emit_move_insn (target
, temp
);
4349 /* Examine CTOR to discover:
4350 * how many scalar fields are set to nonzero values,
4351 and place it in *P_NZ_ELTS;
4352 * how many scalar fields are set to non-constant values,
4353 and place it in *P_NC_ELTS; and
4354 * how many scalar fields in total are in CTOR,
4355 and place it in *P_ELT_COUNT.
4356 * if a type is a union, and the initializer from the constructor
4357 is not the largest element in the union, then set *p_must_clear. */
4360 categorize_ctor_elements_1 (tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
4361 HOST_WIDE_INT
*p_nc_elts
,
4362 HOST_WIDE_INT
*p_elt_count
,
4365 HOST_WIDE_INT nz_elts
, nc_elts
, elt_count
;
4372 for (list
= CONSTRUCTOR_ELTS (ctor
); list
; list
= TREE_CHAIN (list
))
4374 tree value
= TREE_VALUE (list
);
4375 tree purpose
= TREE_PURPOSE (list
);
4379 if (TREE_CODE (purpose
) == RANGE_EXPR
)
4381 tree lo_index
= TREE_OPERAND (purpose
, 0);
4382 tree hi_index
= TREE_OPERAND (purpose
, 1);
4384 if (host_integerp (lo_index
, 1) && host_integerp (hi_index
, 1))
4385 mult
= (tree_low_cst (hi_index
, 1)
4386 - tree_low_cst (lo_index
, 1) + 1);
4389 switch (TREE_CODE (value
))
4393 HOST_WIDE_INT nz
= 0, nc
= 0, ic
= 0;
4394 categorize_ctor_elements_1 (value
, &nz
, &nc
, &ic
, p_must_clear
);
4395 nz_elts
+= mult
* nz
;
4396 nc_elts
+= mult
* nc
;
4397 elt_count
+= mult
* ic
;
4403 if (!initializer_zerop (value
))
4409 nz_elts
+= mult
* TREE_STRING_LENGTH (value
);
4410 elt_count
+= mult
* TREE_STRING_LENGTH (value
);
4414 if (!initializer_zerop (TREE_REALPART (value
)))
4416 if (!initializer_zerop (TREE_IMAGPART (value
)))
4424 for (v
= TREE_VECTOR_CST_ELTS (value
); v
; v
= TREE_CHAIN (v
))
4426 if (!initializer_zerop (TREE_VALUE (v
)))
4436 if (!initializer_constant_valid_p (value
, TREE_TYPE (value
)))
4443 && (TREE_CODE (TREE_TYPE (ctor
)) == UNION_TYPE
4444 || TREE_CODE (TREE_TYPE (ctor
)) == QUAL_UNION_TYPE
))
4447 bool clear_this
= true;
4449 list
= CONSTRUCTOR_ELTS (ctor
);
4452 /* We don't expect more than one element of the union to be
4453 initialized. Not sure what we should do otherwise... */
4454 gcc_assert (TREE_CHAIN (list
) == NULL
);
4456 init_sub_type
= TREE_TYPE (TREE_VALUE (list
));
4458 /* ??? We could look at each element of the union, and find the
4459 largest element. Which would avoid comparing the size of the
4460 initialized element against any tail padding in the union.
4461 Doesn't seem worth the effort... */
4462 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor
)),
4463 TYPE_SIZE (init_sub_type
)) == 1)
4465 /* And now we have to find out if the element itself is fully
4466 constructed. E.g. for union { struct { int a, b; } s; } u
4467 = { .s = { .a = 1 } }. */
4468 if (elt_count
== count_type_elements (init_sub_type
))
4473 *p_must_clear
= clear_this
;
4476 *p_nz_elts
+= nz_elts
;
4477 *p_nc_elts
+= nc_elts
;
4478 *p_elt_count
+= elt_count
;
4482 categorize_ctor_elements (tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
4483 HOST_WIDE_INT
*p_nc_elts
,
4484 HOST_WIDE_INT
*p_elt_count
,
4490 *p_must_clear
= false;
4491 categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_nc_elts
, p_elt_count
,
4495 /* Count the number of scalars in TYPE. Return -1 on overflow or
4499 count_type_elements (tree type
)
4501 const HOST_WIDE_INT max
= ~((HOST_WIDE_INT
)1 << (HOST_BITS_PER_WIDE_INT
-1));
4502 switch (TREE_CODE (type
))
4506 tree telts
= array_type_nelts (type
);
4507 if (telts
&& host_integerp (telts
, 1))
4509 HOST_WIDE_INT n
= tree_low_cst (telts
, 1) + 1;
4510 HOST_WIDE_INT m
= count_type_elements (TREE_TYPE (type
));
4513 else if (max
/ n
> m
)
4521 HOST_WIDE_INT n
= 0, t
;
4524 for (f
= TYPE_FIELDS (type
); f
; f
= TREE_CHAIN (f
))
4525 if (TREE_CODE (f
) == FIELD_DECL
)
4527 t
= count_type_elements (TREE_TYPE (f
));
4537 case QUAL_UNION_TYPE
:
4539 /* Ho hum. How in the world do we guess here? Clearly it isn't
4540 right to count the fields. Guess based on the number of words. */
4541 HOST_WIDE_INT n
= int_size_in_bytes (type
);
4544 return n
/ UNITS_PER_WORD
;
4551 return TYPE_VECTOR_SUBPARTS (type
);
4560 case REFERENCE_TYPE
:
4572 /* Return 1 if EXP contains mostly (3/4) zeros. */
4575 mostly_zeros_p (tree exp
)
4577 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4580 HOST_WIDE_INT nz_elts
, nc_elts
, count
, elts
;
4583 categorize_ctor_elements (exp
, &nz_elts
, &nc_elts
, &count
, &must_clear
);
4587 elts
= count_type_elements (TREE_TYPE (exp
));
4589 return nz_elts
< elts
/ 4;
4592 return initializer_zerop (exp
);
4595 /* Helper function for store_constructor.
4596 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4597 TYPE is the type of the CONSTRUCTOR, not the element type.
4598 CLEARED is as for store_constructor.
4599 ALIAS_SET is the alias set to use for any stores.
4601 This provides a recursive shortcut back to store_constructor when it isn't
4602 necessary to go through store_field. This is so that we can pass through
4603 the cleared field to let store_constructor know that we may not have to
4604 clear a substructure if the outer structure has already been cleared. */
4607 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
4608 HOST_WIDE_INT bitpos
, enum machine_mode mode
,
4609 tree exp
, tree type
, int cleared
, int alias_set
)
4611 if (TREE_CODE (exp
) == CONSTRUCTOR
4612 /* We can only call store_constructor recursively if the size and
4613 bit position are on a byte boundary. */
4614 && bitpos
% BITS_PER_UNIT
== 0
4615 && (bitsize
> 0 && bitsize
% BITS_PER_UNIT
== 0)
4616 /* If we have a nonzero bitpos for a register target, then we just
4617 let store_field do the bitfield handling. This is unlikely to
4618 generate unnecessary clear instructions anyways. */
4619 && (bitpos
== 0 || MEM_P (target
)))
4623 = adjust_address (target
,
4624 GET_MODE (target
) == BLKmode
4626 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4627 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
4630 /* Update the alias set, if required. */
4631 if (MEM_P (target
) && ! MEM_KEEP_ALIAS_SET_P (target
)
4632 && MEM_ALIAS_SET (target
) != 0)
4634 target
= copy_rtx (target
);
4635 set_mem_alias_set (target
, alias_set
);
4638 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
4641 store_field (target
, bitsize
, bitpos
, mode
, exp
, type
, alias_set
);
4644 /* Store the value of constructor EXP into the rtx TARGET.
4645 TARGET is either a REG or a MEM; we know it cannot conflict, since
4646 safe_from_p has been called.
4647 CLEARED is true if TARGET is known to have been zero'd.
4648 SIZE is the number of bytes of TARGET we are allowed to modify: this
4649 may not be the same as the size of EXP if we are assigning to a field
4650 which has been packed to exclude padding bits. */
4653 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
)
4655 tree type
= TREE_TYPE (exp
);
4656 #ifdef WORD_REGISTER_OPERATIONS
4657 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4660 switch (TREE_CODE (type
))
4664 case QUAL_UNION_TYPE
:
4668 /* If size is zero or the target is already cleared, do nothing. */
4669 if (size
== 0 || cleared
)
4671 /* We either clear the aggregate or indicate the value is dead. */
4672 else if ((TREE_CODE (type
) == UNION_TYPE
4673 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4674 && ! CONSTRUCTOR_ELTS (exp
))
4675 /* If the constructor is empty, clear the union. */
4677 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
4681 /* If we are building a static constructor into a register,
4682 set the initial value as zero so we can fold the value into
4683 a constant. But if more than one register is involved,
4684 this probably loses. */
4685 else if (REG_P (target
) && TREE_STATIC (exp
)
4686 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4688 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4692 /* If the constructor has fewer fields than the structure or
4693 if we are initializing the structure to mostly zeros, clear
4694 the whole structure first. Don't do this if TARGET is a
4695 register whose mode size isn't equal to SIZE since
4696 clear_storage can't handle this case. */
4698 && ((list_length (CONSTRUCTOR_ELTS (exp
))
4699 != fields_length (type
))
4700 || mostly_zeros_p (exp
))
4702 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
4705 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
4710 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4712 /* Store each element of the constructor into the
4713 corresponding field of TARGET. */
4715 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4717 tree field
= TREE_PURPOSE (elt
);
4718 tree value
= TREE_VALUE (elt
);
4719 enum machine_mode mode
;
4720 HOST_WIDE_INT bitsize
;
4721 HOST_WIDE_INT bitpos
= 0;
4723 rtx to_rtx
= target
;
4725 /* Just ignore missing fields. We cleared the whole
4726 structure, above, if any fields are missing. */
4730 if (cleared
&& initializer_zerop (value
))
4733 if (host_integerp (DECL_SIZE (field
), 1))
4734 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4738 mode
= DECL_MODE (field
);
4739 if (DECL_BIT_FIELD (field
))
4742 offset
= DECL_FIELD_OFFSET (field
);
4743 if (host_integerp (offset
, 0)
4744 && host_integerp (bit_position (field
), 0))
4746 bitpos
= int_bit_position (field
);
4750 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
4757 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset
,
4758 make_tree (TREE_TYPE (exp
),
4761 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4762 gcc_assert (MEM_P (to_rtx
));
4764 #ifdef POINTERS_EXTEND_UNSIGNED
4765 if (GET_MODE (offset_rtx
) != Pmode
)
4766 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
4768 if (GET_MODE (offset_rtx
) != ptr_mode
)
4769 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4772 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4773 highest_pow2_factor (offset
));
4776 #ifdef WORD_REGISTER_OPERATIONS
4777 /* If this initializes a field that is smaller than a
4778 word, at the start of a word, try to widen it to a full
4779 word. This special case allows us to output C++ member
4780 function initializations in a form that the optimizers
4783 && bitsize
< BITS_PER_WORD
4784 && bitpos
% BITS_PER_WORD
== 0
4785 && GET_MODE_CLASS (mode
) == MODE_INT
4786 && TREE_CODE (value
) == INTEGER_CST
4788 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
4790 tree type
= TREE_TYPE (value
);
4792 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4794 type
= lang_hooks
.types
.type_for_size
4795 (BITS_PER_WORD
, TYPE_UNSIGNED (type
));
4796 value
= convert (type
, value
);
4799 if (BYTES_BIG_ENDIAN
)
4801 = fold_build2 (LSHIFT_EXPR
, type
, value
,
4802 build_int_cst (NULL_TREE
,
4803 BITS_PER_WORD
- bitsize
));
4804 bitsize
= BITS_PER_WORD
;
4809 if (MEM_P (to_rtx
) && !MEM_KEEP_ALIAS_SET_P (to_rtx
)
4810 && DECL_NONADDRESSABLE_P (field
))
4812 to_rtx
= copy_rtx (to_rtx
);
4813 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4816 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
4817 value
, type
, cleared
,
4818 get_alias_set (TREE_TYPE (field
)));
4828 tree elttype
= TREE_TYPE (type
);
4830 HOST_WIDE_INT minelt
= 0;
4831 HOST_WIDE_INT maxelt
= 0;
4833 domain
= TYPE_DOMAIN (type
);
4834 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
4835 && TYPE_MAX_VALUE (domain
)
4836 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
4837 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
4839 /* If we have constant bounds for the range of the type, get them. */
4842 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
4843 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
4846 /* If the constructor has fewer elements than the array, clear
4847 the whole array first. Similarly if this is static
4848 constructor of a non-BLKmode object. */
4851 else if (REG_P (target
) && TREE_STATIC (exp
))
4855 HOST_WIDE_INT count
= 0, zero_count
= 0;
4856 need_to_clear
= ! const_bounds_p
;
4858 /* This loop is a more accurate version of the loop in
4859 mostly_zeros_p (it handles RANGE_EXPR in an index). It
4860 is also needed to check for missing elements. */
4861 for (elt
= CONSTRUCTOR_ELTS (exp
);
4862 elt
!= NULL_TREE
&& ! need_to_clear
;
4863 elt
= TREE_CHAIN (elt
))
4865 tree index
= TREE_PURPOSE (elt
);
4866 HOST_WIDE_INT this_node_count
;
4868 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4870 tree lo_index
= TREE_OPERAND (index
, 0);
4871 tree hi_index
= TREE_OPERAND (index
, 1);
4873 if (! host_integerp (lo_index
, 1)
4874 || ! host_integerp (hi_index
, 1))
4880 this_node_count
= (tree_low_cst (hi_index
, 1)
4881 - tree_low_cst (lo_index
, 1) + 1);
4884 this_node_count
= 1;
4886 count
+= this_node_count
;
4887 if (mostly_zeros_p (TREE_VALUE (elt
)))
4888 zero_count
+= this_node_count
;
4891 /* Clear the entire array first if there are any missing
4892 elements, or if the incidence of zero elements is >=
4895 && (count
< maxelt
- minelt
+ 1
4896 || 4 * zero_count
>= 3 * count
))
4900 if (need_to_clear
&& size
> 0)
4903 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4905 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
4909 if (!cleared
&& REG_P (target
))
4910 /* Inform later passes that the old value is dead. */
4911 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4913 /* Store each element of the constructor into the
4914 corresponding element of TARGET, determined by counting the
4916 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
4918 elt
= TREE_CHAIN (elt
), i
++)
4920 enum machine_mode mode
;
4921 HOST_WIDE_INT bitsize
;
4922 HOST_WIDE_INT bitpos
;
4924 tree value
= TREE_VALUE (elt
);
4925 tree index
= TREE_PURPOSE (elt
);
4926 rtx xtarget
= target
;
4928 if (cleared
&& initializer_zerop (value
))
4931 unsignedp
= TYPE_UNSIGNED (elttype
);
4932 mode
= TYPE_MODE (elttype
);
4933 if (mode
== BLKmode
)
4934 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
4935 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
4938 bitsize
= GET_MODE_BITSIZE (mode
);
4940 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4942 tree lo_index
= TREE_OPERAND (index
, 0);
4943 tree hi_index
= TREE_OPERAND (index
, 1);
4944 rtx index_r
, pos_rtx
;
4945 HOST_WIDE_INT lo
, hi
, count
;
4948 /* If the range is constant and "small", unroll the loop. */
4950 && host_integerp (lo_index
, 0)
4951 && host_integerp (hi_index
, 0)
4952 && (lo
= tree_low_cst (lo_index
, 0),
4953 hi
= tree_low_cst (hi_index
, 0),
4954 count
= hi
- lo
+ 1,
4957 || (host_integerp (TYPE_SIZE (elttype
), 1)
4958 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
4961 lo
-= minelt
; hi
-= minelt
;
4962 for (; lo
<= hi
; lo
++)
4964 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
4967 && !MEM_KEEP_ALIAS_SET_P (target
)
4968 && TREE_CODE (type
) == ARRAY_TYPE
4969 && TYPE_NONALIASED_COMPONENT (type
))
4971 target
= copy_rtx (target
);
4972 MEM_KEEP_ALIAS_SET_P (target
) = 1;
4975 store_constructor_field
4976 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
4977 get_alias_set (elttype
));
4982 rtx loop_start
= gen_label_rtx ();
4983 rtx loop_end
= gen_label_rtx ();
4986 expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
4987 unsignedp
= TYPE_UNSIGNED (domain
);
4989 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
4992 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
4994 SET_DECL_RTL (index
, index_r
);
4995 store_expr (lo_index
, index_r
, 0);
4997 /* Build the head of the loop. */
4998 do_pending_stack_adjust ();
4999 emit_label (loop_start
);
5001 /* Assign value to element index. */
5003 = convert (ssizetype
,
5004 fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
5005 index
, TYPE_MIN_VALUE (domain
)));
5006 position
= size_binop (MULT_EXPR
, position
,
5008 TYPE_SIZE_UNIT (elttype
)));
5010 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
5011 xtarget
= offset_address (target
, pos_rtx
,
5012 highest_pow2_factor (position
));
5013 xtarget
= adjust_address (xtarget
, mode
, 0);
5014 if (TREE_CODE (value
) == CONSTRUCTOR
)
5015 store_constructor (value
, xtarget
, cleared
,
5016 bitsize
/ BITS_PER_UNIT
);
5018 store_expr (value
, xtarget
, 0);
5020 /* Generate a conditional jump to exit the loop. */
5021 exit_cond
= build2 (LT_EXPR
, integer_type_node
,
5023 jumpif (exit_cond
, loop_end
);
5025 /* Update the loop counter, and jump to the head of
5027 expand_assignment (index
,
5028 build2 (PLUS_EXPR
, TREE_TYPE (index
),
5029 index
, integer_one_node
));
5031 emit_jump (loop_start
);
5033 /* Build the end of the loop. */
5034 emit_label (loop_end
);
5037 else if ((index
!= 0 && ! host_integerp (index
, 0))
5038 || ! host_integerp (TYPE_SIZE (elttype
), 1))
5043 index
= ssize_int (1);
5046 index
= fold_convert (ssizetype
,
5047 fold_build2 (MINUS_EXPR
,
5050 TYPE_MIN_VALUE (domain
)));
5052 position
= size_binop (MULT_EXPR
, index
,
5054 TYPE_SIZE_UNIT (elttype
)));
5055 xtarget
= offset_address (target
,
5056 expand_expr (position
, 0, VOIDmode
, 0),
5057 highest_pow2_factor (position
));
5058 xtarget
= adjust_address (xtarget
, mode
, 0);
5059 store_expr (value
, xtarget
, 0);
5064 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
5065 * tree_low_cst (TYPE_SIZE (elttype
), 1));
5067 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
5069 if (MEM_P (target
) && !MEM_KEEP_ALIAS_SET_P (target
)
5070 && TREE_CODE (type
) == ARRAY_TYPE
5071 && TYPE_NONALIASED_COMPONENT (type
))
5073 target
= copy_rtx (target
);
5074 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5076 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
5077 type
, cleared
, get_alias_set (elttype
));
5089 tree elttype
= TREE_TYPE (type
);
5090 int elt_size
= tree_low_cst (TYPE_SIZE (elttype
), 1);
5091 enum machine_mode eltmode
= TYPE_MODE (elttype
);
5092 HOST_WIDE_INT bitsize
;
5093 HOST_WIDE_INT bitpos
;
5094 rtvec vector
= NULL
;
5097 gcc_assert (eltmode
!= BLKmode
);
5099 n_elts
= TYPE_VECTOR_SUBPARTS (type
);
5100 if (REG_P (target
) && VECTOR_MODE_P (GET_MODE (target
)))
5102 enum machine_mode mode
= GET_MODE (target
);
5104 icode
= (int) vec_init_optab
->handlers
[mode
].insn_code
;
5105 if (icode
!= CODE_FOR_nothing
)
5109 vector
= rtvec_alloc (n_elts
);
5110 for (i
= 0; i
< n_elts
; i
++)
5111 RTVEC_ELT (vector
, i
) = CONST0_RTX (GET_MODE_INNER (mode
));
5115 /* If the constructor has fewer elements than the vector,
5116 clear the whole array first. Similarly if this is static
5117 constructor of a non-BLKmode object. */
5120 else if (REG_P (target
) && TREE_STATIC (exp
))
5124 unsigned HOST_WIDE_INT count
= 0, zero_count
= 0;
5126 for (elt
= CONSTRUCTOR_ELTS (exp
);
5128 elt
= TREE_CHAIN (elt
))
5130 int n_elts_here
= tree_low_cst
5131 (int_const_binop (TRUNC_DIV_EXPR
,
5132 TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt
))),
5133 TYPE_SIZE (elttype
), 0), 1);
5135 count
+= n_elts_here
;
5136 if (mostly_zeros_p (TREE_VALUE (elt
)))
5137 zero_count
+= n_elts_here
;
5140 /* Clear the entire vector first if there are any missing elements,
5141 or if the incidence of zero elements is >= 75%. */
5142 need_to_clear
= (count
< n_elts
|| 4 * zero_count
>= 3 * count
);
5145 if (need_to_clear
&& size
> 0 && !vector
)
5148 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5150 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
5154 /* Inform later passes that the old value is dead. */
5155 if (!cleared
&& REG_P (target
))
5156 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5158 /* Store each element of the constructor into the corresponding
5159 element of TARGET, determined by counting the elements. */
5160 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
5162 elt
= TREE_CHAIN (elt
), i
+= bitsize
/ elt_size
)
5164 tree value
= TREE_VALUE (elt
);
5165 tree index
= TREE_PURPOSE (elt
);
5166 HOST_WIDE_INT eltpos
;
5168 bitsize
= tree_low_cst (TYPE_SIZE (TREE_TYPE (value
)), 1);
5169 if (cleared
&& initializer_zerop (value
))
5173 eltpos
= tree_low_cst (index
, 1);
5179 /* Vector CONSTRUCTORs should only be built from smaller
5180 vectors in the case of BLKmode vectors. */
5181 gcc_assert (TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
);
5182 RTVEC_ELT (vector
, eltpos
)
5183 = expand_expr (value
, NULL_RTX
, VOIDmode
, 0);
5187 enum machine_mode value_mode
=
5188 TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
5189 ? TYPE_MODE (TREE_TYPE (value
))
5191 bitpos
= eltpos
* elt_size
;
5192 store_constructor_field (target
, bitsize
, bitpos
,
5193 value_mode
, value
, type
,
5194 cleared
, get_alias_set (elttype
));
5199 emit_insn (GEN_FCN (icode
)
5201 gen_rtx_PARALLEL (GET_MODE (target
), vector
)));
5210 /* Store the value of EXP (an expression tree)
5211 into a subfield of TARGET which has mode MODE and occupies
5212 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5213 If MODE is VOIDmode, it means that we are storing into a bit-field.
5215 Always return const0_rtx unless we have something particular to
5218 TYPE is the type of the underlying object,
5220 ALIAS_SET is the alias set for the destination. This value will
5221 (in general) be different from that for TARGET, since TARGET is a
5222 reference to the containing structure. */
5225 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
5226 enum machine_mode mode
, tree exp
, tree type
, int alias_set
)
5228 HOST_WIDE_INT width_mask
= 0;
5230 if (TREE_CODE (exp
) == ERROR_MARK
)
5233 /* If we have nothing to store, do nothing unless the expression has
5236 return expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
5237 else if (bitsize
>= 0 && bitsize
< HOST_BITS_PER_WIDE_INT
)
5238 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5240 /* If we are storing into an unaligned field of an aligned union that is
5241 in a register, we may have the mode of TARGET being an integer mode but
5242 MODE == BLKmode. In that case, get an aligned object whose size and
5243 alignment are the same as TARGET and store TARGET into it (we can avoid
5244 the store if the field being stored is the entire width of TARGET). Then
5245 call ourselves recursively to store the field into a BLKmode version of
5246 that object. Finally, load from the object into TARGET. This is not
5247 very efficient in general, but should only be slightly more expensive
5248 than the otherwise-required unaligned accesses. Perhaps this can be
5249 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5250 twice, once with emit_move_insn and once via store_field. */
5253 && (REG_P (target
) || GET_CODE (target
) == SUBREG
))
5255 rtx object
= assign_temp (type
, 0, 1, 1);
5256 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
5258 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5259 emit_move_insn (object
, target
);
5261 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, type
, alias_set
);
5263 emit_move_insn (target
, object
);
5265 /* We want to return the BLKmode version of the data. */
5269 if (GET_CODE (target
) == CONCAT
)
5271 /* We're storing into a struct containing a single __complex. */
5273 gcc_assert (!bitpos
);
5274 return store_expr (exp
, target
, 0);
5277 /* If the structure is in a register or if the component
5278 is a bit field, we cannot use addressing to access it.
5279 Use bit-field techniques or SUBREG to store in it. */
5281 if (mode
== VOIDmode
5282 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5283 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5284 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5286 || GET_CODE (target
) == SUBREG
5287 /* If the field isn't aligned enough to store as an ordinary memref,
5288 store it as a bit field. */
5290 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
5291 || bitpos
% GET_MODE_ALIGNMENT (mode
))
5292 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
5293 || (bitpos
% BITS_PER_UNIT
!= 0)))
5294 /* If the RHS and field are a constant size and the size of the
5295 RHS isn't the same size as the bitfield, we must use bitfield
5298 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5299 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5303 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5304 implies a mask operation. If the precision is the same size as
5305 the field we're storing into, that mask is redundant. This is
5306 particularly common with bit field assignments generated by the
5308 if (TREE_CODE (exp
) == NOP_EXPR
)
5310 tree type
= TREE_TYPE (exp
);
5311 if (INTEGRAL_TYPE_P (type
)
5312 && TYPE_PRECISION (type
) < GET_MODE_BITSIZE (TYPE_MODE (type
))
5313 && bitsize
== TYPE_PRECISION (type
))
5315 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
5316 if (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) >= bitsize
)
5317 exp
= TREE_OPERAND (exp
, 0);
5321 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
5323 /* If BITSIZE is narrower than the size of the type of EXP
5324 we will be narrowing TEMP. Normally, what's wanted are the
5325 low-order bits. However, if EXP's type is a record and this is
5326 big-endian machine, we want the upper BITSIZE bits. */
5327 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5328 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
5329 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5330 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5331 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5335 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5337 if (mode
!= VOIDmode
&& mode
!= BLKmode
5338 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5339 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5341 /* If the modes of TARGET and TEMP are both BLKmode, both
5342 must be in memory and BITPOS must be aligned on a byte
5343 boundary. If so, we simply do a block copy. */
5344 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5346 gcc_assert (MEM_P (target
) && MEM_P (temp
)
5347 && !(bitpos
% BITS_PER_UNIT
));
5349 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5350 emit_block_move (target
, temp
,
5351 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5358 /* Store the value in the bitfield. */
5359 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
);
5365 /* Now build a reference to just the desired component. */
5366 rtx to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5368 if (to_rtx
== target
)
5369 to_rtx
= copy_rtx (to_rtx
);
5371 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5372 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5373 set_mem_alias_set (to_rtx
, alias_set
);
5375 return store_expr (exp
, to_rtx
, 0);
5379 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5380 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5381 codes and find the ultimate containing object, which we return.
5383 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5384 bit position, and *PUNSIGNEDP to the signedness of the field.
5385 If the position of the field is variable, we store a tree
5386 giving the variable offset (in units) in *POFFSET.
5387 This offset is in addition to the bit position.
5388 If the position is not variable, we store 0 in *POFFSET.
5390 If any of the extraction expressions is volatile,
5391 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5393 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5394 is a mode that can be used to access the field. In that case, *PBITSIZE
5397 If the field describes a variable-sized object, *PMODE is set to
5398 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5399 this case, but the address of the object can be found.
5401 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5402 look through nodes that serve as markers of a greater alignment than
5403 the one that can be deduced from the expression. These nodes make it
5404 possible for front-ends to prevent temporaries from being created by
5405 the middle-end on alignment considerations. For that purpose, the
5406 normal operating mode at high-level is to always pass FALSE so that
5407 the ultimate containing object is really returned; moreover, the
5408 associated predicate handled_component_p will always return TRUE
5409 on these nodes, thus indicating that they are essentially handled
5410 by get_inner_reference. TRUE should only be passed when the caller
5411 is scanning the expression in order to build another representation
5412 and specifically knows how to handle these nodes; as such, this is
5413 the normal operating mode in the RTL expanders. */
5416 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
5417 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
5418 enum machine_mode
*pmode
, int *punsignedp
,
5419 int *pvolatilep
, bool keep_aligning
)
5422 enum machine_mode mode
= VOIDmode
;
5423 tree offset
= size_zero_node
;
5424 tree bit_offset
= bitsize_zero_node
;
5427 /* First get the mode, signedness, and size. We do this from just the
5428 outermost expression. */
5429 if (TREE_CODE (exp
) == COMPONENT_REF
)
5431 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5432 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5433 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5435 *punsignedp
= DECL_UNSIGNED (TREE_OPERAND (exp
, 1));
5437 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5439 size_tree
= TREE_OPERAND (exp
, 1);
5440 *punsignedp
= BIT_FIELD_REF_UNSIGNED (exp
);
5444 mode
= TYPE_MODE (TREE_TYPE (exp
));
5445 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
5447 if (mode
== BLKmode
)
5448 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5450 *pbitsize
= GET_MODE_BITSIZE (mode
);
5455 if (! host_integerp (size_tree
, 1))
5456 mode
= BLKmode
, *pbitsize
= -1;
5458 *pbitsize
= tree_low_cst (size_tree
, 1);
5461 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5462 and find the ultimate containing object. */
5465 switch (TREE_CODE (exp
))
5468 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5469 TREE_OPERAND (exp
, 2));
5474 tree field
= TREE_OPERAND (exp
, 1);
5475 tree this_offset
= component_ref_field_offset (exp
);
5477 /* If this field hasn't been filled in yet, don't go past it.
5478 This should only happen when folding expressions made during
5479 type construction. */
5480 if (this_offset
== 0)
5483 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5484 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5485 DECL_FIELD_BIT_OFFSET (field
));
5487 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5492 case ARRAY_RANGE_REF
:
5494 tree index
= TREE_OPERAND (exp
, 1);
5495 tree low_bound
= array_ref_low_bound (exp
);
5496 tree unit_size
= array_ref_element_size (exp
);
5498 /* We assume all arrays have sizes that are a multiple of a byte.
5499 First subtract the lower bound, if any, in the type of the
5500 index, then convert to sizetype and multiply by the size of
5501 the array element. */
5502 if (! integer_zerop (low_bound
))
5503 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
5506 offset
= size_binop (PLUS_EXPR
, offset
,
5507 size_binop (MULT_EXPR
,
5508 convert (sizetype
, index
),
5517 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5518 bitsize_int (*pbitsize
));
5521 case VIEW_CONVERT_EXPR
:
5522 if (keep_aligning
&& STRICT_ALIGNMENT
5523 && (TYPE_ALIGN (TREE_TYPE (exp
))
5524 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5525 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
5526 < BIGGEST_ALIGNMENT
)
5527 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
5528 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
5536 /* If any reference in the chain is volatile, the effect is volatile. */
5537 if (TREE_THIS_VOLATILE (exp
))
5540 exp
= TREE_OPERAND (exp
, 0);
5544 /* If OFFSET is constant, see if we can return the whole thing as a
5545 constant bit position. Otherwise, split it up. */
5546 if (host_integerp (offset
, 0)
5547 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5549 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5550 && host_integerp (tem
, 0))
5551 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5553 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5559 /* Return a tree of sizetype representing the size, in bytes, of the element
5560 of EXP, an ARRAY_REF. */
5563 array_ref_element_size (tree exp
)
5565 tree aligned_size
= TREE_OPERAND (exp
, 3);
5566 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5568 /* If a size was specified in the ARRAY_REF, it's the size measured
5569 in alignment units of the element type. So multiply by that value. */
5572 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5573 sizetype from another type of the same width and signedness. */
5574 if (TREE_TYPE (aligned_size
) != sizetype
)
5575 aligned_size
= fold_convert (sizetype
, aligned_size
);
5576 return size_binop (MULT_EXPR
, aligned_size
,
5577 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
5580 /* Otherwise, take the size from that of the element type. Substitute
5581 any PLACEHOLDER_EXPR that we have. */
5583 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type
), exp
);
5586 /* Return a tree representing the lower bound of the array mentioned in
5587 EXP, an ARRAY_REF. */
5590 array_ref_low_bound (tree exp
)
5592 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5594 /* If a lower bound is specified in EXP, use it. */
5595 if (TREE_OPERAND (exp
, 2))
5596 return TREE_OPERAND (exp
, 2);
5598 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5599 substituting for a PLACEHOLDER_EXPR as needed. */
5600 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
5601 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type
), exp
);
5603 /* Otherwise, return a zero of the appropriate type. */
5604 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp
, 1)), 0);
5607 /* Return a tree representing the upper bound of the array mentioned in
5608 EXP, an ARRAY_REF. */
5611 array_ref_up_bound (tree exp
)
5613 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5615 /* If there is a domain type and it has an upper bound, use it, substituting
5616 for a PLACEHOLDER_EXPR as needed. */
5617 if (domain_type
&& TYPE_MAX_VALUE (domain_type
))
5618 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type
), exp
);
5620 /* Otherwise fail. */
5624 /* Return a tree representing the offset, in bytes, of the field referenced
5625 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5628 component_ref_field_offset (tree exp
)
5630 tree aligned_offset
= TREE_OPERAND (exp
, 2);
5631 tree field
= TREE_OPERAND (exp
, 1);
5633 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5634 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5638 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5639 sizetype from another type of the same width and signedness. */
5640 if (TREE_TYPE (aligned_offset
) != sizetype
)
5641 aligned_offset
= fold_convert (sizetype
, aligned_offset
);
5642 return size_binop (MULT_EXPR
, aligned_offset
,
5643 size_int (DECL_OFFSET_ALIGN (field
) / BITS_PER_UNIT
));
5646 /* Otherwise, take the offset from that of the field. Substitute
5647 any PLACEHOLDER_EXPR that we have. */
5649 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field
), exp
);
5652 /* Return 1 if T is an expression that get_inner_reference handles. */
5655 handled_component_p (tree t
)
5657 switch (TREE_CODE (t
))
5662 case ARRAY_RANGE_REF
:
5663 case VIEW_CONVERT_EXPR
:
5673 /* Given an rtx VALUE that may contain additions and multiplications, return
5674 an equivalent value that just refers to a register, memory, or constant.
5675 This is done by generating instructions to perform the arithmetic and
5676 returning a pseudo-register containing the value.
5678 The returned value may be a REG, SUBREG, MEM or constant. */
5681 force_operand (rtx value
, rtx target
)
5684 /* Use subtarget as the target for operand 0 of a binary operation. */
5685 rtx subtarget
= get_subtarget (target
);
5686 enum rtx_code code
= GET_CODE (value
);
5688 /* Check for subreg applied to an expression produced by loop optimizer. */
5690 && !REG_P (SUBREG_REG (value
))
5691 && !MEM_P (SUBREG_REG (value
)))
5693 value
= simplify_gen_subreg (GET_MODE (value
),
5694 force_reg (GET_MODE (SUBREG_REG (value
)),
5695 force_operand (SUBREG_REG (value
),
5697 GET_MODE (SUBREG_REG (value
)),
5698 SUBREG_BYTE (value
));
5699 code
= GET_CODE (value
);
5702 /* Check for a PIC address load. */
5703 if ((code
== PLUS
|| code
== MINUS
)
5704 && XEXP (value
, 0) == pic_offset_table_rtx
5705 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5706 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5707 || GET_CODE (XEXP (value
, 1)) == CONST
))
5710 subtarget
= gen_reg_rtx (GET_MODE (value
));
5711 emit_move_insn (subtarget
, value
);
5715 if (code
== ZERO_EXTEND
|| code
== SIGN_EXTEND
)
5718 target
= gen_reg_rtx (GET_MODE (value
));
5719 convert_move (target
, force_operand (XEXP (value
, 0), NULL
),
5720 code
== ZERO_EXTEND
);
5724 if (ARITHMETIC_P (value
))
5726 op2
= XEXP (value
, 1);
5727 if (!CONSTANT_P (op2
) && !(REG_P (op2
) && op2
!= subtarget
))
5729 if (code
== MINUS
&& GET_CODE (op2
) == CONST_INT
)
5732 op2
= negate_rtx (GET_MODE (value
), op2
);
5735 /* Check for an addition with OP2 a constant integer and our first
5736 operand a PLUS of a virtual register and something else. In that
5737 case, we want to emit the sum of the virtual register and the
5738 constant first and then add the other value. This allows virtual
5739 register instantiation to simply modify the constant rather than
5740 creating another one around this addition. */
5741 if (code
== PLUS
&& GET_CODE (op2
) == CONST_INT
5742 && GET_CODE (XEXP (value
, 0)) == PLUS
5743 && REG_P (XEXP (XEXP (value
, 0), 0))
5744 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5745 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5747 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
5748 XEXP (XEXP (value
, 0), 0), op2
,
5749 subtarget
, 0, OPTAB_LIB_WIDEN
);
5750 return expand_simple_binop (GET_MODE (value
), code
, temp
,
5751 force_operand (XEXP (XEXP (value
,
5753 target
, 0, OPTAB_LIB_WIDEN
);
5756 op1
= force_operand (XEXP (value
, 0), subtarget
);
5757 op2
= force_operand (op2
, NULL_RTX
);
5761 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
5763 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
5764 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5765 target
, 1, OPTAB_LIB_WIDEN
);
5767 return expand_divmod (0,
5768 FLOAT_MODE_P (GET_MODE (value
))
5769 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
5770 GET_MODE (value
), op1
, op2
, target
, 0);
5773 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5777 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
5781 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5785 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5786 target
, 0, OPTAB_LIB_WIDEN
);
5789 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5790 target
, 1, OPTAB_LIB_WIDEN
);
5793 if (UNARY_P (value
))
5795 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
5796 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
5799 #ifdef INSN_SCHEDULING
5800 /* On machines that have insn scheduling, we want all memory reference to be
5801 explicit, so we need to deal with such paradoxical SUBREGs. */
5802 if (GET_CODE (value
) == SUBREG
&& MEM_P (SUBREG_REG (value
))
5803 && (GET_MODE_SIZE (GET_MODE (value
))
5804 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
5806 = simplify_gen_subreg (GET_MODE (value
),
5807 force_reg (GET_MODE (SUBREG_REG (value
)),
5808 force_operand (SUBREG_REG (value
),
5810 GET_MODE (SUBREG_REG (value
)),
5811 SUBREG_BYTE (value
));
5817 /* Subroutine of expand_expr: return nonzero iff there is no way that
5818 EXP can reference X, which is being modified. TOP_P is nonzero if this
5819 call is going to be used to determine whether we need a temporary
5820 for EXP, as opposed to a recursive call to this function.
5822 It is always safe for this routine to return zero since it merely
5823 searches for optimization opportunities. */
5826 safe_from_p (rtx x
, tree exp
, int top_p
)
5832 /* If EXP has varying size, we MUST use a target since we currently
5833 have no way of allocating temporaries of variable size
5834 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5835 So we assume here that something at a higher level has prevented a
5836 clash. This is somewhat bogus, but the best we can do. Only
5837 do this when X is BLKmode and when we are at the top level. */
5838 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
5839 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
5840 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
5841 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
5842 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
5844 && GET_MODE (x
) == BLKmode
)
5845 /* If X is in the outgoing argument area, it is always safe. */
5847 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
5848 || (GET_CODE (XEXP (x
, 0)) == PLUS
5849 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
5852 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5853 find the underlying pseudo. */
5854 if (GET_CODE (x
) == SUBREG
)
5857 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5861 /* Now look at our tree code and possibly recurse. */
5862 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
5864 case tcc_declaration
:
5865 exp_rtl
= DECL_RTL_IF_SET (exp
);
5871 case tcc_exceptional
:
5872 if (TREE_CODE (exp
) == TREE_LIST
)
5876 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
5878 exp
= TREE_CHAIN (exp
);
5881 if (TREE_CODE (exp
) != TREE_LIST
)
5882 return safe_from_p (x
, exp
, 0);
5885 else if (TREE_CODE (exp
) == ERROR_MARK
)
5886 return 1; /* An already-visited SAVE_EXPR? */
5891 /* The only case we look at here is the DECL_INITIAL inside a
5893 return (TREE_CODE (exp
) != DECL_EXPR
5894 || TREE_CODE (DECL_EXPR_DECL (exp
)) != VAR_DECL
5895 || !DECL_INITIAL (DECL_EXPR_DECL (exp
))
5896 || safe_from_p (x
, DECL_INITIAL (DECL_EXPR_DECL (exp
)), 0));
5899 case tcc_comparison
:
5900 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
5905 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5907 case tcc_expression
:
5909 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5910 the expression. If it is set, we conflict iff we are that rtx or
5911 both are in memory. Otherwise, we check all operands of the
5912 expression recursively. */
5914 switch (TREE_CODE (exp
))
5917 /* If the operand is static or we are static, we can't conflict.
5918 Likewise if we don't conflict with the operand at all. */
5919 if (staticp (TREE_OPERAND (exp
, 0))
5920 || TREE_STATIC (exp
)
5921 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
5924 /* Otherwise, the only way this can conflict is if we are taking
5925 the address of a DECL a that address if part of X, which is
5927 exp
= TREE_OPERAND (exp
, 0);
5930 if (!DECL_RTL_SET_P (exp
)
5931 || !MEM_P (DECL_RTL (exp
)))
5934 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
5938 case MISALIGNED_INDIRECT_REF
:
5939 case ALIGN_INDIRECT_REF
:
5942 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
5943 get_alias_set (exp
)))
5948 /* Assume that the call will clobber all hard registers and
5950 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5955 case WITH_CLEANUP_EXPR
:
5956 case CLEANUP_POINT_EXPR
:
5957 /* Lowered by gimplify.c. */
5961 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5967 /* If we have an rtx, we do not need to scan our operands. */
5971 nops
= TREE_CODE_LENGTH (TREE_CODE (exp
));
5972 for (i
= 0; i
< nops
; i
++)
5973 if (TREE_OPERAND (exp
, i
) != 0
5974 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
5977 /* If this is a language-specific tree code, it may require
5978 special handling. */
5979 if ((unsigned int) TREE_CODE (exp
)
5980 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5981 && !lang_hooks
.safe_from_p (x
, exp
))
5986 /* Should never get a type here. */
5990 /* If we have an rtl, find any enclosed object. Then see if we conflict
5994 if (GET_CODE (exp_rtl
) == SUBREG
)
5996 exp_rtl
= SUBREG_REG (exp_rtl
);
5998 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
6002 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6003 are memory and they conflict. */
6004 return ! (rtx_equal_p (x
, exp_rtl
)
6005 || (MEM_P (x
) && MEM_P (exp_rtl
)
6006 && true_dependence (exp_rtl
, VOIDmode
, x
,
6007 rtx_addr_varies_p
)));
6010 /* If we reach here, it is safe. */
6015 /* Return the highest power of two that EXP is known to be a multiple of.
6016 This is used in updating alignment of MEMs in array references. */
6018 static unsigned HOST_WIDE_INT
6019 highest_pow2_factor (tree exp
)
6021 unsigned HOST_WIDE_INT c0
, c1
;
6023 switch (TREE_CODE (exp
))
6026 /* We can find the lowest bit that's a one. If the low
6027 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6028 We need to handle this case since we can find it in a COND_EXPR,
6029 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6030 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6032 if (TREE_CONSTANT_OVERFLOW (exp
))
6033 return BIGGEST_ALIGNMENT
;
6036 /* Note: tree_low_cst is intentionally not used here,
6037 we don't care about the upper bits. */
6038 c0
= TREE_INT_CST_LOW (exp
);
6040 return c0
? c0
: BIGGEST_ALIGNMENT
;
6044 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
6045 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6046 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6047 return MIN (c0
, c1
);
6050 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6051 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6054 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6056 if (integer_pow2p (TREE_OPERAND (exp
, 1))
6057 && host_integerp (TREE_OPERAND (exp
, 1), 1))
6059 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6060 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
6061 return MAX (1, c0
/ c1
);
6065 case NON_LVALUE_EXPR
: case NOP_EXPR
: case CONVERT_EXPR
:
6067 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
6070 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6073 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6074 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
6075 return MIN (c0
, c1
);
6084 /* Similar, except that the alignment requirements of TARGET are
6085 taken into account. Assume it is at least as aligned as its
6086 type, unless it is a COMPONENT_REF in which case the layout of
6087 the structure gives the alignment. */
6089 static unsigned HOST_WIDE_INT
6090 highest_pow2_factor_for_target (tree target
, tree exp
)
6092 unsigned HOST_WIDE_INT target_align
, factor
;
6094 factor
= highest_pow2_factor (exp
);
6095 if (TREE_CODE (target
) == COMPONENT_REF
)
6096 target_align
= DECL_ALIGN_UNIT (TREE_OPERAND (target
, 1));
6098 target_align
= TYPE_ALIGN_UNIT (TREE_TYPE (target
));
6099 return MAX (factor
, target_align
);
6102 /* Expands variable VAR. */
6105 expand_var (tree var
)
6107 if (DECL_EXTERNAL (var
))
6110 if (TREE_STATIC (var
))
6111 /* If this is an inlined copy of a static local variable,
6112 look up the original decl. */
6113 var
= DECL_ORIGIN (var
);
6115 if (TREE_STATIC (var
)
6116 ? !TREE_ASM_WRITTEN (var
)
6117 : !DECL_RTL_SET_P (var
))
6119 if (TREE_CODE (var
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (var
))
6120 /* Should be ignored. */;
6121 else if (lang_hooks
.expand_decl (var
))
6123 else if (TREE_CODE (var
) == VAR_DECL
&& !TREE_STATIC (var
))
6125 else if (TREE_CODE (var
) == VAR_DECL
&& TREE_STATIC (var
))
6126 rest_of_decl_compilation (var
, 0, 0);
6128 /* No expansion needed. */
6129 gcc_assert (TREE_CODE (var
) == TYPE_DECL
6130 || TREE_CODE (var
) == CONST_DECL
6131 || TREE_CODE (var
) == FUNCTION_DECL
6132 || TREE_CODE (var
) == LABEL_DECL
);
6136 /* Subroutine of expand_expr. Expand the two operands of a binary
6137 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6138 The value may be stored in TARGET if TARGET is nonzero. The
6139 MODIFIER argument is as documented by expand_expr. */
6142 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
6143 enum expand_modifier modifier
)
6145 if (! safe_from_p (target
, exp1
, 1))
6147 if (operand_equal_p (exp0
, exp1
, 0))
6149 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6150 *op1
= copy_rtx (*op0
);
6154 /* If we need to preserve evaluation order, copy exp0 into its own
6155 temporary variable so that it can't be clobbered by exp1. */
6156 if (flag_evaluation_order
&& TREE_SIDE_EFFECTS (exp1
))
6157 exp0
= save_expr (exp0
);
6158 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6159 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
6164 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6165 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6168 expand_expr_addr_expr_1 (tree exp
, rtx target
, enum machine_mode tmode
,
6169 enum expand_modifier modifier
)
6171 rtx result
, subtarget
;
6173 HOST_WIDE_INT bitsize
, bitpos
;
6174 int volatilep
, unsignedp
;
6175 enum machine_mode mode1
;
6177 /* If we are taking the address of a constant and are at the top level,
6178 we have to use output_constant_def since we can't call force_const_mem
6180 /* ??? This should be considered a front-end bug. We should not be
6181 generating ADDR_EXPR of something that isn't an LVALUE. The only
6182 exception here is STRING_CST. */
6183 if (TREE_CODE (exp
) == CONSTRUCTOR
6184 || CONSTANT_CLASS_P (exp
))
6185 return XEXP (output_constant_def (exp
, 0), 0);
6187 /* Everything must be something allowed by is_gimple_addressable. */
6188 switch (TREE_CODE (exp
))
6191 /* This case will happen via recursion for &a->b. */
6192 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, EXPAND_NORMAL
);
6195 /* Recurse and make the output_constant_def clause above handle this. */
6196 return expand_expr_addr_expr_1 (DECL_INITIAL (exp
), target
,
6200 /* The real part of the complex number is always first, therefore
6201 the address is the same as the address of the parent object. */
6204 inner
= TREE_OPERAND (exp
, 0);
6208 /* The imaginary part of the complex number is always second.
6209 The expression is therefore always offset by the size of the
6212 bitpos
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp
)));
6213 inner
= TREE_OPERAND (exp
, 0);
6217 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6218 expand_expr, as that can have various side effects; LABEL_DECLs for
6219 example, may not have their DECL_RTL set yet. Assume language
6220 specific tree nodes can be expanded in some interesting way. */
6222 || TREE_CODE (exp
) >= LAST_AND_UNUSED_TREE_CODE
)
6224 result
= expand_expr (exp
, target
, tmode
,
6225 modifier
== EXPAND_INITIALIZER
6226 ? EXPAND_INITIALIZER
: EXPAND_CONST_ADDRESS
);
6228 /* If the DECL isn't in memory, then the DECL wasn't properly
6229 marked TREE_ADDRESSABLE, which will be either a front-end
6230 or a tree optimizer bug. */
6231 gcc_assert (MEM_P (result
));
6232 result
= XEXP (result
, 0);
6234 /* ??? Is this needed anymore? */
6235 if (DECL_P (exp
) && !TREE_USED (exp
) == 0)
6237 assemble_external (exp
);
6238 TREE_USED (exp
) = 1;
6241 if (modifier
!= EXPAND_INITIALIZER
6242 && modifier
!= EXPAND_CONST_ADDRESS
)
6243 result
= force_operand (result
, target
);
6247 /* Pass FALSE as the last argument to get_inner_reference although
6248 we are expanding to RTL. The rationale is that we know how to
6249 handle "aligning nodes" here: we can just bypass them because
6250 they won't change the final object whose address will be returned
6251 (they actually exist only for that purpose). */
6252 inner
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
6253 &mode1
, &unsignedp
, &volatilep
, false);
6257 /* We must have made progress. */
6258 gcc_assert (inner
!= exp
);
6260 subtarget
= offset
|| bitpos
? NULL_RTX
: target
;
6261 result
= expand_expr_addr_expr_1 (inner
, subtarget
, tmode
, modifier
);
6267 if (modifier
!= EXPAND_NORMAL
)
6268 result
= force_operand (result
, NULL
);
6269 tmp
= expand_expr (offset
, NULL
, tmode
, EXPAND_NORMAL
);
6271 result
= convert_memory_address (tmode
, result
);
6272 tmp
= convert_memory_address (tmode
, tmp
);
6274 if (modifier
== EXPAND_SUM
)
6275 result
= gen_rtx_PLUS (tmode
, result
, tmp
);
6278 subtarget
= bitpos
? NULL_RTX
: target
;
6279 result
= expand_simple_binop (tmode
, PLUS
, result
, tmp
, subtarget
,
6280 1, OPTAB_LIB_WIDEN
);
6286 /* Someone beforehand should have rejected taking the address
6287 of such an object. */
6288 gcc_assert ((bitpos
% BITS_PER_UNIT
) == 0);
6290 result
= plus_constant (result
, bitpos
/ BITS_PER_UNIT
);
6291 if (modifier
< EXPAND_SUM
)
6292 result
= force_operand (result
, target
);
6298 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6299 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6302 expand_expr_addr_expr (tree exp
, rtx target
, enum machine_mode tmode
,
6303 enum expand_modifier modifier
)
6305 enum machine_mode rmode
;
6308 /* Target mode of VOIDmode says "whatever's natural". */
6309 if (tmode
== VOIDmode
)
6310 tmode
= TYPE_MODE (TREE_TYPE (exp
));
6312 /* We can get called with some Weird Things if the user does silliness
6313 like "(short) &a". In that case, convert_memory_address won't do
6314 the right thing, so ignore the given target mode. */
6315 if (tmode
!= Pmode
&& tmode
!= ptr_mode
)
6318 result
= expand_expr_addr_expr_1 (TREE_OPERAND (exp
, 0), target
,
6321 /* Despite expand_expr claims concerning ignoring TMODE when not
6322 strictly convenient, stuff breaks if we don't honor it. Note
6323 that combined with the above, we only do this for pointer modes. */
6324 rmode
= GET_MODE (result
);
6325 if (rmode
== VOIDmode
)
6328 result
= convert_memory_address (tmode
, result
);
6334 /* expand_expr: generate code for computing expression EXP.
6335 An rtx for the computed value is returned. The value is never null.
6336 In the case of a void EXP, const0_rtx is returned.
6338 The value may be stored in TARGET if TARGET is nonzero.
6339 TARGET is just a suggestion; callers must assume that
6340 the rtx returned may not be the same as TARGET.
6342 If TARGET is CONST0_RTX, it means that the value will be ignored.
6344 If TMODE is not VOIDmode, it suggests generating the
6345 result in mode TMODE. But this is done only when convenient.
6346 Otherwise, TMODE is ignored and the value generated in its natural mode.
6347 TMODE is just a suggestion; callers must assume that
6348 the rtx returned may not have mode TMODE.
6350 Note that TARGET may have neither TMODE nor MODE. In that case, it
6351 probably will not be used.
6353 If MODIFIER is EXPAND_SUM then when EXP is an addition
6354 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6355 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6356 products as above, or REG or MEM, or constant.
6357 Ordinarily in such cases we would output mul or add instructions
6358 and then return a pseudo reg containing the sum.
6360 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6361 it also marks a label as absolutely required (it can't be dead).
6362 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6363 This is used for outputting expressions used in initializers.
6365 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6366 with a constant address even if that address is not normally legitimate.
6367 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6369 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6370 a call parameter. Such targets require special care as we haven't yet
6371 marked TARGET so that it's safe from being trashed by libcalls. We
6372 don't want to use TARGET for anything but the final result;
6373 Intermediate values must go elsewhere. Additionally, calls to
6374 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6376 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6377 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6378 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6379 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6382 static rtx
expand_expr_real_1 (tree
, rtx
, enum machine_mode
,
6383 enum expand_modifier
, rtx
*);
6386 expand_expr_real (tree exp
, rtx target
, enum machine_mode tmode
,
6387 enum expand_modifier modifier
, rtx
*alt_rtl
)
6390 rtx ret
, last
= NULL
;
6392 /* Handle ERROR_MARK before anybody tries to access its type. */
6393 if (TREE_CODE (exp
) == ERROR_MARK
6394 || TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
)
6396 ret
= CONST0_RTX (tmode
);
6397 return ret
? ret
: const0_rtx
;
6400 if (flag_non_call_exceptions
)
6402 rn
= lookup_stmt_eh_region (exp
);
6403 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6405 last
= get_last_insn ();
6408 /* If this is an expression of some kind and it has an associated line
6409 number, then emit the line number before expanding the expression.
6411 We need to save and restore the file and line information so that
6412 errors discovered during expansion are emitted with the right
6413 information. It would be better of the diagnostic routines
6414 used the file/line information embedded in the tree nodes rather
6416 if (cfun
&& EXPR_HAS_LOCATION (exp
))
6418 location_t saved_location
= input_location
;
6419 input_location
= EXPR_LOCATION (exp
);
6420 emit_line_note (input_location
);
6422 /* Record where the insns produced belong. */
6423 record_block_change (TREE_BLOCK (exp
));
6425 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
6427 input_location
= saved_location
;
6431 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
6434 /* If using non-call exceptions, mark all insns that may trap.
6435 expand_call() will mark CALL_INSNs before we get to this code,
6436 but it doesn't handle libcalls, and these may trap. */
6440 for (insn
= next_real_insn (last
); insn
;
6441 insn
= next_real_insn (insn
))
6443 if (! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
6444 /* If we want exceptions for non-call insns, any
6445 may_trap_p instruction may throw. */
6446 && GET_CODE (PATTERN (insn
)) != CLOBBER
6447 && GET_CODE (PATTERN (insn
)) != USE
6448 && (CALL_P (insn
) || may_trap_p (PATTERN (insn
))))
6450 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_EH_REGION
, GEN_INT (rn
),
6460 expand_expr_real_1 (tree exp
, rtx target
, enum machine_mode tmode
,
6461 enum expand_modifier modifier
, rtx
*alt_rtl
)
6464 tree type
= TREE_TYPE (exp
);
6466 enum machine_mode mode
;
6467 enum tree_code code
= TREE_CODE (exp
);
6469 rtx subtarget
, original_target
;
6472 bool reduce_bit_field
= false;
6473 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6474 ? reduce_to_bit_field_precision ((expr), \
6479 mode
= TYPE_MODE (type
);
6480 unsignedp
= TYPE_UNSIGNED (type
);
6481 if (lang_hooks
.reduce_bit_field_operations
6482 && TREE_CODE (type
) == INTEGER_TYPE
6483 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
))
6485 /* An operation in what may be a bit-field type needs the
6486 result to be reduced to the precision of the bit-field type,
6487 which is narrower than that of the type's mode. */
6488 reduce_bit_field
= true;
6489 if (modifier
== EXPAND_STACK_PARM
)
6493 /* Use subtarget as the target for operand 0 of a binary operation. */
6494 subtarget
= get_subtarget (target
);
6495 original_target
= target
;
6496 ignore
= (target
== const0_rtx
6497 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
6498 || code
== CONVERT_EXPR
|| code
== COND_EXPR
6499 || code
== VIEW_CONVERT_EXPR
)
6500 && TREE_CODE (type
) == VOID_TYPE
));
6502 /* If we are going to ignore this result, we need only do something
6503 if there is a side-effect somewhere in the expression. If there
6504 is, short-circuit the most common cases here. Note that we must
6505 not call expand_expr with anything but const0_rtx in case this
6506 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6510 if (! TREE_SIDE_EFFECTS (exp
))
6513 /* Ensure we reference a volatile object even if value is ignored, but
6514 don't do this if all we are doing is taking its address. */
6515 if (TREE_THIS_VOLATILE (exp
)
6516 && TREE_CODE (exp
) != FUNCTION_DECL
6517 && mode
!= VOIDmode
&& mode
!= BLKmode
6518 && modifier
!= EXPAND_CONST_ADDRESS
)
6520 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
6522 temp
= copy_to_reg (temp
);
6526 if (TREE_CODE_CLASS (code
) == tcc_unary
6527 || code
== COMPONENT_REF
|| code
== INDIRECT_REF
)
6528 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6531 else if (TREE_CODE_CLASS (code
) == tcc_binary
6532 || TREE_CODE_CLASS (code
) == tcc_comparison
6533 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
6535 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6536 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6539 else if (code
== BIT_FIELD_REF
)
6541 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6542 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6543 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, modifier
);
6550 /* If will do cse, generate all results into pseudo registers
6551 since 1) that allows cse to find more things
6552 and 2) otherwise cse could produce an insn the machine
6553 cannot support. An exception is a CONSTRUCTOR into a multi-word
6554 MEM: that's much more likely to be most efficient into the MEM.
6555 Another is a CALL_EXPR which must return in memory. */
6557 if (! cse_not_expected
&& mode
!= BLKmode
&& target
6558 && (!REG_P (target
) || REGNO (target
) < FIRST_PSEUDO_REGISTER
)
6559 && ! (code
== CONSTRUCTOR
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
6560 && ! (code
== CALL_EXPR
&& aggregate_value_p (exp
, exp
)))
6567 tree function
= decl_function_context (exp
);
6569 temp
= label_rtx (exp
);
6570 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
6572 if (function
!= current_function_decl
6574 LABEL_REF_NONLOCAL_P (temp
) = 1;
6576 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
6581 return expand_expr_real_1 (SSA_NAME_VAR (exp
), target
, tmode
, modifier
,
6586 /* If a static var's type was incomplete when the decl was written,
6587 but the type is complete now, lay out the decl now. */
6588 if (DECL_SIZE (exp
) == 0
6589 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
6590 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6591 layout_decl (exp
, 0);
6593 /* ... fall through ... */
6597 gcc_assert (DECL_RTL (exp
));
6599 /* Ensure variable marked as used even if it doesn't go through
6600 a parser. If it hasn't be used yet, write out an external
6602 if (! TREE_USED (exp
))
6604 assemble_external (exp
);
6605 TREE_USED (exp
) = 1;
6608 /* Show we haven't gotten RTL for this yet. */
6611 /* Variables inherited from containing functions should have
6612 been lowered by this point. */
6613 context
= decl_function_context (exp
);
6614 gcc_assert (!context
6615 || context
== current_function_decl
6616 || TREE_STATIC (exp
)
6617 /* ??? C++ creates functions that are not TREE_STATIC. */
6618 || TREE_CODE (exp
) == FUNCTION_DECL
);
6620 /* This is the case of an array whose size is to be determined
6621 from its initializer, while the initializer is still being parsed.
6624 if (MEM_P (DECL_RTL (exp
))
6625 && REG_P (XEXP (DECL_RTL (exp
), 0)))
6626 temp
= validize_mem (DECL_RTL (exp
));
6628 /* If DECL_RTL is memory, we are in the normal case and either
6629 the address is not valid or it is not a register and -fforce-addr
6630 is specified, get the address into a register. */
6632 else if (MEM_P (DECL_RTL (exp
))
6633 && modifier
!= EXPAND_CONST_ADDRESS
6634 && modifier
!= EXPAND_SUM
6635 && modifier
!= EXPAND_INITIALIZER
6636 && (! memory_address_p (DECL_MODE (exp
),
6637 XEXP (DECL_RTL (exp
), 0))
6639 && !REG_P (XEXP (DECL_RTL (exp
), 0)))))
6642 *alt_rtl
= DECL_RTL (exp
);
6643 temp
= replace_equiv_address (DECL_RTL (exp
),
6644 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
6647 /* If we got something, return it. But first, set the alignment
6648 if the address is a register. */
6651 if (MEM_P (temp
) && REG_P (XEXP (temp
, 0)))
6652 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6657 /* If the mode of DECL_RTL does not match that of the decl, it
6658 must be a promoted value. We return a SUBREG of the wanted mode,
6659 but mark it so that we know that it was already extended. */
6661 if (REG_P (DECL_RTL (exp
))
6662 && GET_MODE (DECL_RTL (exp
)) != DECL_MODE (exp
))
6664 enum machine_mode pmode
;
6666 /* Get the signedness used for this variable. Ensure we get the
6667 same mode we got when the variable was declared. */
6668 pmode
= promote_mode (type
, DECL_MODE (exp
), &unsignedp
,
6669 (TREE_CODE (exp
) == RESULT_DECL
? 1 : 0));
6670 gcc_assert (GET_MODE (DECL_RTL (exp
)) == pmode
);
6672 temp
= gen_lowpart_SUBREG (mode
, DECL_RTL (exp
));
6673 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6674 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6678 return DECL_RTL (exp
);
6681 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
6682 TREE_INT_CST_HIGH (exp
), mode
);
6684 /* ??? If overflow is set, fold will have done an incomplete job,
6685 which can result in (plus xx (const_int 0)), which can get
6686 simplified by validate_replace_rtx during virtual register
6687 instantiation, which can result in unrecognizable insns.
6688 Avoid this by forcing all overflows into registers. */
6689 if (TREE_CONSTANT_OVERFLOW (exp
)
6690 && modifier
!= EXPAND_INITIALIZER
)
6691 temp
= force_reg (mode
, temp
);
6696 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp
))) == MODE_VECTOR_INT
6697 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp
))) == MODE_VECTOR_FLOAT
)
6698 return const_vector_from_tree (exp
);
6700 return expand_expr (build1 (CONSTRUCTOR
, TREE_TYPE (exp
),
6701 TREE_VECTOR_CST_ELTS (exp
)),
6702 ignore
? const0_rtx
: target
, tmode
, modifier
);
6705 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
6708 /* If optimized, generate immediate CONST_DOUBLE
6709 which will be turned into memory by reload if necessary.
6711 We used to force a register so that loop.c could see it. But
6712 this does not allow gen_* patterns to perform optimizations with
6713 the constants. It also produces two insns in cases like "x = 1.0;".
6714 On most machines, floating-point constants are not permitted in
6715 many insns, so we'd end up copying it to a register in any case.
6717 Now, we do the copying in expand_binop, if appropriate. */
6718 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
6719 TYPE_MODE (TREE_TYPE (exp
)));
6722 /* Handle evaluating a complex constant in a CONCAT target. */
6723 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
6725 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
6728 rtarg
= XEXP (original_target
, 0);
6729 itarg
= XEXP (original_target
, 1);
6731 /* Move the real and imaginary parts separately. */
6732 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, 0);
6733 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, 0);
6736 emit_move_insn (rtarg
, op0
);
6738 emit_move_insn (itarg
, op1
);
6740 return original_target
;
6743 /* ... fall through ... */
6746 temp
= output_constant_def (exp
, 1);
6748 /* temp contains a constant address.
6749 On RISC machines where a constant address isn't valid,
6750 make some insns to get that address into a register. */
6751 if (modifier
!= EXPAND_CONST_ADDRESS
6752 && modifier
!= EXPAND_INITIALIZER
6753 && modifier
!= EXPAND_SUM
6754 && (! memory_address_p (mode
, XEXP (temp
, 0))
6755 || flag_force_addr
))
6756 return replace_equiv_address (temp
,
6757 copy_rtx (XEXP (temp
, 0)));
6762 tree val
= TREE_OPERAND (exp
, 0);
6763 rtx ret
= expand_expr_real_1 (val
, target
, tmode
, modifier
, alt_rtl
);
6765 if (!SAVE_EXPR_RESOLVED_P (exp
))
6767 /* We can indeed still hit this case, typically via builtin
6768 expanders calling save_expr immediately before expanding
6769 something. Assume this means that we only have to deal
6770 with non-BLKmode values. */
6771 gcc_assert (GET_MODE (ret
) != BLKmode
);
6773 val
= build_decl (VAR_DECL
, NULL
, TREE_TYPE (exp
));
6774 DECL_ARTIFICIAL (val
) = 1;
6775 DECL_IGNORED_P (val
) = 1;
6776 TREE_OPERAND (exp
, 0) = val
;
6777 SAVE_EXPR_RESOLVED_P (exp
) = 1;
6779 if (!CONSTANT_P (ret
))
6780 ret
= copy_to_reg (ret
);
6781 SET_DECL_RTL (val
, ret
);
6788 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6789 expand_goto (TREE_OPERAND (exp
, 0));
6791 expand_computed_goto (TREE_OPERAND (exp
, 0));
6795 /* If we don't need the result, just ensure we evaluate any
6801 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
6802 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
, 0);
6807 /* All elts simple constants => refer to a constant in memory. But
6808 if this is a non-BLKmode mode, let it store a field at a time
6809 since that should make a CONST_INT or CONST_DOUBLE when we
6810 fold. Likewise, if we have a target we can use, it is best to
6811 store directly into the target unless the type is large enough
6812 that memcpy will be used. If we are making an initializer and
6813 all operands are constant, put it in memory as well.
6815 FIXME: Avoid trying to fill vector constructors piece-meal.
6816 Output them with output_constant_def below unless we're sure
6817 they're zeros. This should go away when vector initializers
6818 are treated like VECTOR_CST instead of arrays.
6820 else if ((TREE_STATIC (exp
)
6821 && ((mode
== BLKmode
6822 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
6823 || TREE_ADDRESSABLE (exp
)
6824 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
6825 && (! MOVE_BY_PIECES_P
6826 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
6828 && ! mostly_zeros_p (exp
))))
6829 || ((modifier
== EXPAND_INITIALIZER
6830 || modifier
== EXPAND_CONST_ADDRESS
)
6831 && TREE_CONSTANT (exp
)))
6833 rtx constructor
= output_constant_def (exp
, 1);
6835 if (modifier
!= EXPAND_CONST_ADDRESS
6836 && modifier
!= EXPAND_INITIALIZER
6837 && modifier
!= EXPAND_SUM
)
6838 constructor
= validize_mem (constructor
);
6844 /* Handle calls that pass values in multiple non-contiguous
6845 locations. The Irix 6 ABI has examples of this. */
6846 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
6847 || GET_CODE (target
) == PARALLEL
6848 || modifier
== EXPAND_STACK_PARM
)
6850 = assign_temp (build_qualified_type (type
,
6852 | (TREE_READONLY (exp
)
6853 * TYPE_QUAL_CONST
))),
6854 0, TREE_ADDRESSABLE (exp
), 1);
6856 store_constructor (exp
, target
, 0, int_expr_size (exp
));
6860 case MISALIGNED_INDIRECT_REF
:
6861 case ALIGN_INDIRECT_REF
:
6864 tree exp1
= TREE_OPERAND (exp
, 0);
6866 if (modifier
!= EXPAND_WRITE
)
6870 t
= fold_read_from_constant_string (exp
);
6872 return expand_expr (t
, target
, tmode
, modifier
);
6875 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
6876 op0
= memory_address (mode
, op0
);
6878 if (code
== ALIGN_INDIRECT_REF
)
6880 int align
= TYPE_ALIGN_UNIT (type
);
6881 op0
= gen_rtx_AND (Pmode
, op0
, GEN_INT (-align
));
6882 op0
= memory_address (mode
, op0
);
6885 temp
= gen_rtx_MEM (mode
, op0
);
6887 set_mem_attributes (temp
, exp
, 0);
6889 /* Resolve the misalignment now, so that we don't have to remember
6890 to resolve it later. Of course, this only works for reads. */
6891 /* ??? When we get around to supporting writes, we'll have to handle
6892 this in store_expr directly. The vectorizer isn't generating
6893 those yet, however. */
6894 if (code
== MISALIGNED_INDIRECT_REF
)
6899 gcc_assert (modifier
== EXPAND_NORMAL
);
6901 /* The vectorizer should have already checked the mode. */
6902 icode
= movmisalign_optab
->handlers
[mode
].insn_code
;
6903 gcc_assert (icode
!= CODE_FOR_nothing
);
6905 /* We've already validated the memory, and we're creating a
6906 new pseudo destination. The predicates really can't fail. */
6907 reg
= gen_reg_rtx (mode
);
6909 /* Nor can the insn generator. */
6910 insn
= GEN_FCN (icode
) (reg
, temp
);
6919 case TARGET_MEM_REF
:
6921 struct mem_address addr
;
6923 get_address_description (exp
, &addr
);
6924 op0
= addr_for_mem_ref (&addr
, true);
6925 op0
= memory_address (mode
, op0
);
6926 temp
= gen_rtx_MEM (mode
, op0
);
6927 set_mem_attributes (temp
, TMR_ORIGINAL (exp
), 0);
6934 tree array
= TREE_OPERAND (exp
, 0);
6935 tree index
= TREE_OPERAND (exp
, 1);
6937 /* Fold an expression like: "foo"[2].
6938 This is not done in fold so it won't happen inside &.
6939 Don't fold if this is for wide characters since it's too
6940 difficult to do correctly and this is a very rare case. */
6942 if (modifier
!= EXPAND_CONST_ADDRESS
6943 && modifier
!= EXPAND_INITIALIZER
6944 && modifier
!= EXPAND_MEMORY
)
6946 tree t
= fold_read_from_constant_string (exp
);
6949 return expand_expr (t
, target
, tmode
, modifier
);
6952 /* If this is a constant index into a constant array,
6953 just get the value from the array. Handle both the cases when
6954 we have an explicit constructor and when our operand is a variable
6955 that was declared const. */
6957 if (modifier
!= EXPAND_CONST_ADDRESS
6958 && modifier
!= EXPAND_INITIALIZER
6959 && modifier
!= EXPAND_MEMORY
6960 && TREE_CODE (array
) == CONSTRUCTOR
6961 && ! TREE_SIDE_EFFECTS (array
)
6962 && TREE_CODE (index
) == INTEGER_CST
)
6966 for (elem
= CONSTRUCTOR_ELTS (array
);
6967 (elem
&& !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
6968 elem
= TREE_CHAIN (elem
))
6971 if (elem
&& !TREE_SIDE_EFFECTS (TREE_VALUE (elem
)))
6972 return expand_expr (fold (TREE_VALUE (elem
)), target
, tmode
,
6976 else if (optimize
>= 1
6977 && modifier
!= EXPAND_CONST_ADDRESS
6978 && modifier
!= EXPAND_INITIALIZER
6979 && modifier
!= EXPAND_MEMORY
6980 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
6981 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
6982 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
6983 && targetm
.binds_local_p (array
))
6985 if (TREE_CODE (index
) == INTEGER_CST
)
6987 tree init
= DECL_INITIAL (array
);
6989 if (TREE_CODE (init
) == CONSTRUCTOR
)
6993 for (elem
= CONSTRUCTOR_ELTS (init
);
6995 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
6996 elem
= TREE_CHAIN (elem
))
6999 if (elem
&& !TREE_SIDE_EFFECTS (TREE_VALUE (elem
)))
7000 return expand_expr (fold (TREE_VALUE (elem
)), target
,
7003 else if (TREE_CODE (init
) == STRING_CST
7004 && 0 > compare_tree_int (index
,
7005 TREE_STRING_LENGTH (init
)))
7007 tree type
= TREE_TYPE (TREE_TYPE (init
));
7008 enum machine_mode mode
= TYPE_MODE (type
);
7010 if (GET_MODE_CLASS (mode
) == MODE_INT
7011 && GET_MODE_SIZE (mode
) == 1)
7012 return gen_int_mode (TREE_STRING_POINTER (init
)
7013 [TREE_INT_CST_LOW (index
)], mode
);
7018 goto normal_inner_ref
;
7021 /* If the operand is a CONSTRUCTOR, we can just extract the
7022 appropriate field if it is present. */
7023 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
)
7027 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
7028 elt
= TREE_CHAIN (elt
))
7029 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
7030 /* We can normally use the value of the field in the
7031 CONSTRUCTOR. However, if this is a bitfield in
7032 an integral mode that we can fit in a HOST_WIDE_INT,
7033 we must mask only the number of bits in the bitfield,
7034 since this is done implicitly by the constructor. If
7035 the bitfield does not meet either of those conditions,
7036 we can't do this optimization. */
7037 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
7038 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
7040 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
7041 <= HOST_BITS_PER_WIDE_INT
))))
7043 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
))
7044 && modifier
== EXPAND_STACK_PARM
)
7046 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
7047 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
7049 HOST_WIDE_INT bitsize
7050 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt
)));
7051 enum machine_mode imode
7052 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
7054 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
7056 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
7057 op0
= expand_and (imode
, op0
, op1
, target
);
7062 = build_int_cst (NULL_TREE
,
7063 GET_MODE_BITSIZE (imode
) - bitsize
);
7065 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
7067 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
7075 goto normal_inner_ref
;
7078 case ARRAY_RANGE_REF
:
7081 enum machine_mode mode1
;
7082 HOST_WIDE_INT bitsize
, bitpos
;
7085 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7086 &mode1
, &unsignedp
, &volatilep
, true);
7089 /* If we got back the original object, something is wrong. Perhaps
7090 we are evaluating an expression too early. In any event, don't
7091 infinitely recurse. */
7092 gcc_assert (tem
!= exp
);
7094 /* If TEM's type is a union of variable size, pass TARGET to the inner
7095 computation, since it will need a temporary and TARGET is known
7096 to have to do. This occurs in unchecked conversion in Ada. */
7100 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
7101 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
7103 && modifier
!= EXPAND_STACK_PARM
7104 ? target
: NULL_RTX
),
7106 (modifier
== EXPAND_INITIALIZER
7107 || modifier
== EXPAND_CONST_ADDRESS
7108 || modifier
== EXPAND_STACK_PARM
)
7109 ? modifier
: EXPAND_NORMAL
);
7111 /* If this is a constant, put it into a register if it is a
7112 legitimate constant and OFFSET is 0 and memory if it isn't. */
7113 if (CONSTANT_P (op0
))
7115 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
7116 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
7118 op0
= force_reg (mode
, op0
);
7120 op0
= validize_mem (force_const_mem (mode
, op0
));
7123 /* Otherwise, if this object not in memory and we either have an
7124 offset or a BLKmode result, put it there. This case can't occur in
7125 C, but can in Ada if we have unchecked conversion of an expression
7126 from a scalar type to an array or record type or for an
7127 ARRAY_RANGE_REF whose type is BLKmode. */
7128 else if (!MEM_P (op0
)
7130 || (code
== ARRAY_RANGE_REF
&& mode
== BLKmode
)))
7132 tree nt
= build_qualified_type (TREE_TYPE (tem
),
7133 (TYPE_QUALS (TREE_TYPE (tem
))
7134 | TYPE_QUAL_CONST
));
7135 rtx memloc
= assign_temp (nt
, 1, 1, 1);
7137 emit_move_insn (memloc
, op0
);
7143 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
7146 gcc_assert (MEM_P (op0
));
7148 #ifdef POINTERS_EXTEND_UNSIGNED
7149 if (GET_MODE (offset_rtx
) != Pmode
)
7150 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
7152 if (GET_MODE (offset_rtx
) != ptr_mode
)
7153 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
7156 if (GET_MODE (op0
) == BLKmode
7157 /* A constant address in OP0 can have VOIDmode, we must
7158 not try to call force_reg in that case. */
7159 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
7161 && (bitpos
% bitsize
) == 0
7162 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
7163 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
7165 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7169 op0
= offset_address (op0
, offset_rtx
,
7170 highest_pow2_factor (offset
));
7173 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7174 record its alignment as BIGGEST_ALIGNMENT. */
7175 if (MEM_P (op0
) && bitpos
== 0 && offset
!= 0
7176 && is_aligning_offset (offset
, tem
))
7177 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
7179 /* Don't forget about volatility even if this is a bitfield. */
7180 if (MEM_P (op0
) && volatilep
&& ! MEM_VOLATILE_P (op0
))
7182 if (op0
== orig_op0
)
7183 op0
= copy_rtx (op0
);
7185 MEM_VOLATILE_P (op0
) = 1;
7188 /* The following code doesn't handle CONCAT.
7189 Assume only bitpos == 0 can be used for CONCAT, due to
7190 one element arrays having the same mode as its element. */
7191 if (GET_CODE (op0
) == CONCAT
)
7193 gcc_assert (bitpos
== 0
7194 && bitsize
== GET_MODE_BITSIZE (GET_MODE (op0
)));
7198 /* In cases where an aligned union has an unaligned object
7199 as a field, we might be extracting a BLKmode value from
7200 an integer-mode (e.g., SImode) object. Handle this case
7201 by doing the extract into an object as wide as the field
7202 (which we know to be the width of a basic mode), then
7203 storing into memory, and changing the mode to BLKmode. */
7204 if (mode1
== VOIDmode
7205 || REG_P (op0
) || GET_CODE (op0
) == SUBREG
7206 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
7207 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7208 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
7209 && modifier
!= EXPAND_CONST_ADDRESS
7210 && modifier
!= EXPAND_INITIALIZER
)
7211 /* If the field isn't aligned enough to fetch as a memref,
7212 fetch it as a bit field. */
7213 || (mode1
!= BLKmode
7214 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
7215 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)
7217 && (MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
7218 || (bitpos
% GET_MODE_ALIGNMENT (mode1
) != 0))))
7219 && ((modifier
== EXPAND_CONST_ADDRESS
7220 || modifier
== EXPAND_INITIALIZER
)
7222 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
7223 || (bitpos
% BITS_PER_UNIT
!= 0)))
7224 /* If the type and the field are a constant size and the
7225 size of the type isn't the same size as the bitfield,
7226 we must use bitfield operations. */
7228 && TYPE_SIZE (TREE_TYPE (exp
))
7229 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
7230 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7233 enum machine_mode ext_mode
= mode
;
7235 if (ext_mode
== BLKmode
7236 && ! (target
!= 0 && MEM_P (op0
)
7238 && bitpos
% BITS_PER_UNIT
== 0))
7239 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7241 if (ext_mode
== BLKmode
)
7244 target
= assign_temp (type
, 0, 1, 1);
7249 /* In this case, BITPOS must start at a byte boundary and
7250 TARGET, if specified, must be a MEM. */
7251 gcc_assert (MEM_P (op0
)
7252 && (!target
|| MEM_P (target
))
7253 && !(bitpos
% BITS_PER_UNIT
));
7255 emit_block_move (target
,
7256 adjust_address (op0
, VOIDmode
,
7257 bitpos
/ BITS_PER_UNIT
),
7258 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7260 (modifier
== EXPAND_STACK_PARM
7261 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7266 op0
= validize_mem (op0
);
7268 if (MEM_P (op0
) && REG_P (XEXP (op0
, 0)))
7269 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7271 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
7272 (modifier
== EXPAND_STACK_PARM
7273 ? NULL_RTX
: target
),
7274 ext_mode
, ext_mode
);
7276 /* If the result is a record type and BITSIZE is narrower than
7277 the mode of OP0, an integral mode, and this is a big endian
7278 machine, we must put the field into the high-order bits. */
7279 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7280 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7281 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
7282 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7283 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7287 /* If the result type is BLKmode, store the data into a temporary
7288 of the appropriate type, but with the mode corresponding to the
7289 mode for the data we have (op0's mode). It's tempting to make
7290 this a constant type, since we know it's only being stored once,
7291 but that can cause problems if we are taking the address of this
7292 COMPONENT_REF because the MEM of any reference via that address
7293 will have flags corresponding to the type, which will not
7294 necessarily be constant. */
7295 if (mode
== BLKmode
)
7298 = assign_stack_temp_for_type
7299 (ext_mode
, GET_MODE_BITSIZE (ext_mode
), 0, type
);
7301 emit_move_insn (new, op0
);
7302 op0
= copy_rtx (new);
7303 PUT_MODE (op0
, BLKmode
);
7304 set_mem_attributes (op0
, exp
, 1);
7310 /* If the result is BLKmode, use that to access the object
7312 if (mode
== BLKmode
)
7315 /* Get a reference to just this component. */
7316 if (modifier
== EXPAND_CONST_ADDRESS
7317 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7318 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7320 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7322 if (op0
== orig_op0
)
7323 op0
= copy_rtx (op0
);
7325 set_mem_attributes (op0
, exp
, 0);
7326 if (REG_P (XEXP (op0
, 0)))
7327 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7329 MEM_VOLATILE_P (op0
) |= volatilep
;
7330 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7331 || modifier
== EXPAND_CONST_ADDRESS
7332 || modifier
== EXPAND_INITIALIZER
)
7334 else if (target
== 0)
7335 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7337 convert_move (target
, op0
, unsignedp
);
7342 return expand_expr (OBJ_TYPE_REF_EXPR (exp
), target
, tmode
, modifier
);
7345 /* Check for a built-in function. */
7346 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7347 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7349 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7351 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7352 == BUILT_IN_FRONTEND
)
7353 return lang_hooks
.expand_expr (exp
, original_target
,
7357 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7360 return expand_call (exp
, target
, ignore
);
7362 case NON_LVALUE_EXPR
:
7365 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7368 if (TREE_CODE (type
) == UNION_TYPE
)
7370 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7372 /* If both input and output are BLKmode, this conversion isn't doing
7373 anything except possibly changing memory attribute. */
7374 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7376 rtx result
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7379 result
= copy_rtx (result
);
7380 set_mem_attributes (result
, exp
, 0);
7386 if (TYPE_MODE (type
) != BLKmode
)
7387 target
= gen_reg_rtx (TYPE_MODE (type
));
7389 target
= assign_temp (type
, 0, 1, 1);
7393 /* Store data into beginning of memory target. */
7394 store_expr (TREE_OPERAND (exp
, 0),
7395 adjust_address (target
, TYPE_MODE (valtype
), 0),
7396 modifier
== EXPAND_STACK_PARM
);
7400 gcc_assert (REG_P (target
));
7402 /* Store this field into a union of the proper type. */
7403 store_field (target
,
7404 MIN ((int_size_in_bytes (TREE_TYPE
7405 (TREE_OPERAND (exp
, 0)))
7407 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7408 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7412 /* Return the entire union. */
7416 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7418 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7421 /* If the signedness of the conversion differs and OP0 is
7422 a promoted SUBREG, clear that indication since we now
7423 have to do the proper extension. */
7424 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7425 && GET_CODE (op0
) == SUBREG
)
7426 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7428 return REDUCE_BIT_FIELD (op0
);
7431 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7432 if (GET_MODE (op0
) == mode
)
7435 /* If OP0 is a constant, just convert it into the proper mode. */
7436 else if (CONSTANT_P (op0
))
7438 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7439 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
7441 if (modifier
== EXPAND_INITIALIZER
)
7442 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
,
7443 subreg_lowpart_offset (mode
,
7446 op0
= convert_modes (mode
, inner_mode
, op0
,
7447 TYPE_UNSIGNED (inner_type
));
7450 else if (modifier
== EXPAND_INITIALIZER
)
7451 op0
= gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7453 else if (target
== 0)
7454 op0
= convert_to_mode (mode
, op0
,
7455 TYPE_UNSIGNED (TREE_TYPE
7456 (TREE_OPERAND (exp
, 0))));
7459 convert_move (target
, op0
,
7460 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7464 return REDUCE_BIT_FIELD (op0
);
7466 case VIEW_CONVERT_EXPR
:
7467 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7469 /* If the input and output modes are both the same, we are done.
7470 Otherwise, if neither mode is BLKmode and both are integral and within
7471 a word, we can use gen_lowpart. If neither is true, make sure the
7472 operand is in memory and convert the MEM to the new mode. */
7473 if (TYPE_MODE (type
) == GET_MODE (op0
))
7475 else if (TYPE_MODE (type
) != BLKmode
&& GET_MODE (op0
) != BLKmode
7476 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7477 && GET_MODE_CLASS (TYPE_MODE (type
)) == MODE_INT
7478 && GET_MODE_SIZE (TYPE_MODE (type
)) <= UNITS_PER_WORD
7479 && GET_MODE_SIZE (GET_MODE (op0
)) <= UNITS_PER_WORD
)
7480 op0
= gen_lowpart (TYPE_MODE (type
), op0
);
7481 else if (!MEM_P (op0
))
7483 /* If the operand is not a MEM, force it into memory. Since we
7484 are going to be be changing the mode of the MEM, don't call
7485 force_const_mem for constants because we don't allow pool
7486 constants to change mode. */
7487 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7489 gcc_assert (!TREE_ADDRESSABLE (exp
));
7491 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
7493 = assign_stack_temp_for_type
7494 (TYPE_MODE (inner_type
),
7495 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
7497 emit_move_insn (target
, op0
);
7501 /* At this point, OP0 is in the correct mode. If the output type is such
7502 that the operand is known to be aligned, indicate that it is.
7503 Otherwise, we need only be concerned about alignment for non-BLKmode
7507 op0
= copy_rtx (op0
);
7509 if (TYPE_ALIGN_OK (type
))
7510 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
7511 else if (TYPE_MODE (type
) != BLKmode
&& STRICT_ALIGNMENT
7512 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
7514 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7515 HOST_WIDE_INT temp_size
7516 = MAX (int_size_in_bytes (inner_type
),
7517 (HOST_WIDE_INT
) GET_MODE_SIZE (TYPE_MODE (type
)));
7518 rtx
new = assign_stack_temp_for_type (TYPE_MODE (type
),
7519 temp_size
, 0, type
);
7520 rtx new_with_op0_mode
= adjust_address (new, GET_MODE (op0
), 0);
7522 gcc_assert (!TREE_ADDRESSABLE (exp
));
7524 if (GET_MODE (op0
) == BLKmode
)
7525 emit_block_move (new_with_op0_mode
, op0
,
7526 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type
))),
7527 (modifier
== EXPAND_STACK_PARM
7528 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7530 emit_move_insn (new_with_op0_mode
, op0
);
7535 op0
= adjust_address (op0
, TYPE_MODE (type
), 0);
7541 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7542 something else, make sure we add the register to the constant and
7543 then to the other thing. This case can occur during strength
7544 reduction and doing it this way will produce better code if the
7545 frame pointer or argument pointer is eliminated.
7547 fold-const.c will ensure that the constant is always in the inner
7548 PLUS_EXPR, so the only case we need to do anything about is if
7549 sp, ap, or fp is our second argument, in which case we must swap
7550 the innermost first argument and our second argument. */
7552 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7553 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7554 && TREE_CODE (TREE_OPERAND (exp
, 1)) == VAR_DECL
7555 && (DECL_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7556 || DECL_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7557 || DECL_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7559 tree t
= TREE_OPERAND (exp
, 1);
7561 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7562 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
7565 /* If the result is to be ptr_mode and we are adding an integer to
7566 something, we might be forming a constant. So try to use
7567 plus_constant. If it produces a sum and we can't accept it,
7568 use force_operand. This allows P = &ARR[const] to generate
7569 efficient code on machines where a SYMBOL_REF is not a valid
7572 If this is an EXPAND_SUM call, always return the sum. */
7573 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7574 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
7576 if (modifier
== EXPAND_STACK_PARM
)
7578 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
7579 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7580 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
7584 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
7586 /* Use immed_double_const to ensure that the constant is
7587 truncated according to the mode of OP1, then sign extended
7588 to a HOST_WIDE_INT. Using the constant directly can result
7589 in non-canonical RTL in a 64x32 cross compile. */
7591 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
7593 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
7594 op1
= plus_constant (op1
, INTVAL (constant_part
));
7595 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7596 op1
= force_operand (op1
, target
);
7597 return REDUCE_BIT_FIELD (op1
);
7600 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7601 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
7602 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
7606 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7607 (modifier
== EXPAND_INITIALIZER
7608 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
7609 if (! CONSTANT_P (op0
))
7611 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7612 VOIDmode
, modifier
);
7613 /* Return a PLUS if modifier says it's OK. */
7614 if (modifier
== EXPAND_SUM
7615 || modifier
== EXPAND_INITIALIZER
)
7616 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7619 /* Use immed_double_const to ensure that the constant is
7620 truncated according to the mode of OP1, then sign extended
7621 to a HOST_WIDE_INT. Using the constant directly can result
7622 in non-canonical RTL in a 64x32 cross compile. */
7624 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
7626 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7627 op0
= plus_constant (op0
, INTVAL (constant_part
));
7628 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7629 op0
= force_operand (op0
, target
);
7630 return REDUCE_BIT_FIELD (op0
);
7634 /* No sense saving up arithmetic to be done
7635 if it's all in the wrong mode to form part of an address.
7636 And force_operand won't know whether to sign-extend or
7638 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7639 || mode
!= ptr_mode
)
7641 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7642 subtarget
, &op0
, &op1
, 0);
7643 if (op0
== const0_rtx
)
7645 if (op1
== const0_rtx
)
7650 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7651 subtarget
, &op0
, &op1
, modifier
);
7652 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
7655 /* For initializers, we are allowed to return a MINUS of two
7656 symbolic constants. Here we handle all cases when both operands
7658 /* Handle difference of two symbolic constants,
7659 for the sake of an initializer. */
7660 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7661 && really_constant_p (TREE_OPERAND (exp
, 0))
7662 && really_constant_p (TREE_OPERAND (exp
, 1)))
7664 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7665 NULL_RTX
, &op0
, &op1
, modifier
);
7667 /* If the last operand is a CONST_INT, use plus_constant of
7668 the negated constant. Else make the MINUS. */
7669 if (GET_CODE (op1
) == CONST_INT
)
7670 return REDUCE_BIT_FIELD (plus_constant (op0
, - INTVAL (op1
)));
7672 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode
, op0
, op1
));
7675 /* No sense saving up arithmetic to be done
7676 if it's all in the wrong mode to form part of an address.
7677 And force_operand won't know whether to sign-extend or
7679 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7680 || mode
!= ptr_mode
)
7683 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7684 subtarget
, &op0
, &op1
, modifier
);
7686 /* Convert A - const to A + (-const). */
7687 if (GET_CODE (op1
) == CONST_INT
)
7689 op1
= negate_rtx (mode
, op1
);
7690 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
7696 /* If first operand is constant, swap them.
7697 Thus the following special case checks need only
7698 check the second operand. */
7699 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
7701 tree t1
= TREE_OPERAND (exp
, 0);
7702 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
7703 TREE_OPERAND (exp
, 1) = t1
;
7706 /* Attempt to return something suitable for generating an
7707 indexed address, for machines that support that. */
7709 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
7710 && host_integerp (TREE_OPERAND (exp
, 1), 0))
7712 tree exp1
= TREE_OPERAND (exp
, 1);
7714 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7718 op0
= force_operand (op0
, NULL_RTX
);
7720 op0
= copy_to_mode_reg (mode
, op0
);
7722 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode
, op0
,
7723 gen_int_mode (tree_low_cst (exp1
, 0),
7724 TYPE_MODE (TREE_TYPE (exp1
)))));
7727 if (modifier
== EXPAND_STACK_PARM
)
7730 /* Check for multiplying things that have been extended
7731 from a narrower type. If this machine supports multiplying
7732 in that narrower type with a result in the desired type,
7733 do it that way, and avoid the explicit type-conversion. */
7734 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
7735 && TREE_CODE (type
) == INTEGER_TYPE
7736 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7737 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7738 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7739 && int_fits_type_p (TREE_OPERAND (exp
, 1),
7740 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7741 /* Don't use a widening multiply if a shift will do. */
7742 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
7743 > HOST_BITS_PER_WIDE_INT
)
7744 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
7746 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7747 && (TYPE_PRECISION (TREE_TYPE
7748 (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7749 == TYPE_PRECISION (TREE_TYPE
7751 (TREE_OPERAND (exp
, 0), 0))))
7752 /* If both operands are extended, they must either both
7753 be zero-extended or both be sign-extended. */
7754 && (TYPE_UNSIGNED (TREE_TYPE
7755 (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7756 == TYPE_UNSIGNED (TREE_TYPE
7758 (TREE_OPERAND (exp
, 0), 0)))))))
7760 tree op0type
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0));
7761 enum machine_mode innermode
= TYPE_MODE (op0type
);
7762 bool zextend_p
= TYPE_UNSIGNED (op0type
);
7763 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
7764 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
7766 if (mode
== GET_MODE_2XWIDER_MODE (innermode
))
7768 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
7770 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7771 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7772 TREE_OPERAND (exp
, 1),
7773 NULL_RTX
, &op0
, &op1
, 0);
7775 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7776 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7777 NULL_RTX
, &op0
, &op1
, 0);
7780 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
7781 && innermode
== word_mode
)
7784 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7785 NULL_RTX
, VOIDmode
, 0);
7786 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7787 op1
= convert_modes (innermode
, mode
,
7788 expand_expr (TREE_OPERAND (exp
, 1),
7789 NULL_RTX
, VOIDmode
, 0),
7792 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7793 NULL_RTX
, VOIDmode
, 0);
7794 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
7795 unsignedp
, OPTAB_LIB_WIDEN
);
7796 hipart
= gen_highpart (innermode
, temp
);
7797 htem
= expand_mult_highpart_adjust (innermode
, hipart
,
7801 emit_move_insn (hipart
, htem
);
7802 return REDUCE_BIT_FIELD (temp
);
7806 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7807 subtarget
, &op0
, &op1
, 0);
7808 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
7810 case TRUNC_DIV_EXPR
:
7811 case FLOOR_DIV_EXPR
:
7813 case ROUND_DIV_EXPR
:
7814 case EXACT_DIV_EXPR
:
7815 if (modifier
== EXPAND_STACK_PARM
)
7817 /* Possible optimization: compute the dividend with EXPAND_SUM
7818 then if the divisor is constant can optimize the case
7819 where some terms of the dividend have coeffs divisible by it. */
7820 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7821 subtarget
, &op0
, &op1
, 0);
7822 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
7827 case TRUNC_MOD_EXPR
:
7828 case FLOOR_MOD_EXPR
:
7830 case ROUND_MOD_EXPR
:
7831 if (modifier
== EXPAND_STACK_PARM
)
7833 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7834 subtarget
, &op0
, &op1
, 0);
7835 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
7837 case FIX_ROUND_EXPR
:
7838 case FIX_FLOOR_EXPR
:
7840 gcc_unreachable (); /* Not used for C. */
7842 case FIX_TRUNC_EXPR
:
7843 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7844 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
7845 target
= gen_reg_rtx (mode
);
7846 expand_fix (target
, op0
, unsignedp
);
7850 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7851 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
7852 target
= gen_reg_rtx (mode
);
7853 /* expand_float can't figure out what to do if FROM has VOIDmode.
7854 So give it the correct mode. With -O, cse will optimize this. */
7855 if (GET_MODE (op0
) == VOIDmode
)
7856 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7858 expand_float (target
, op0
,
7859 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7863 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7864 if (modifier
== EXPAND_STACK_PARM
)
7866 temp
= expand_unop (mode
,
7867 optab_for_tree_code (NEGATE_EXPR
, type
),
7870 return REDUCE_BIT_FIELD (temp
);
7873 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7874 if (modifier
== EXPAND_STACK_PARM
)
7877 /* ABS_EXPR is not valid for complex arguments. */
7878 gcc_assert (GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7879 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
);
7881 /* Unsigned abs is simply the operand. Testing here means we don't
7882 risk generating incorrect code below. */
7883 if (TYPE_UNSIGNED (type
))
7886 return expand_abs (mode
, op0
, target
, unsignedp
,
7887 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
7891 target
= original_target
;
7893 || modifier
== EXPAND_STACK_PARM
7894 || (MEM_P (target
) && MEM_VOLATILE_P (target
))
7895 || GET_MODE (target
) != mode
7897 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
7898 target
= gen_reg_rtx (mode
);
7899 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7900 target
, &op0
, &op1
, 0);
7902 /* First try to do it with a special MIN or MAX instruction.
7903 If that does not win, use a conditional jump to select the proper
7905 this_optab
= optab_for_tree_code (code
, type
);
7906 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
7911 /* At this point, a MEM target is no longer useful; we will get better
7914 if (! REG_P (target
))
7915 target
= gen_reg_rtx (mode
);
7917 /* If op1 was placed in target, swap op0 and op1. */
7918 if (target
!= op0
&& target
== op1
)
7925 /* We generate better code and avoid problems with op1 mentioning
7926 target by forcing op1 into a pseudo if it isn't a constant. */
7927 if (! CONSTANT_P (op1
))
7928 op1
= force_reg (mode
, op1
);
7930 #ifdef HAVE_conditional_move
7931 /* Use a conditional move if possible. */
7932 if (can_conditionally_move_p (mode
))
7934 enum rtx_code comparison_code
;
7937 if (code
== MAX_EXPR
)
7938 comparison_code
= unsignedp
? GEU
: GE
;
7940 comparison_code
= unsignedp
? LEU
: LE
;
7942 /* ??? Same problem as in expmed.c: emit_conditional_move
7943 forces a stack adjustment via compare_from_rtx, and we
7944 lose the stack adjustment if the sequence we are about
7945 to create is discarded. */
7946 do_pending_stack_adjust ();
7950 /* Try to emit the conditional move. */
7951 insn
= emit_conditional_move (target
, comparison_code
,
7956 /* If we could do the conditional move, emit the sequence,
7960 rtx seq
= get_insns ();
7966 /* Otherwise discard the sequence and fall back to code with
7972 emit_move_insn (target
, op0
);
7974 temp
= gen_label_rtx ();
7976 /* If this mode is an integer too wide to compare properly,
7977 compare word by word. Rely on cse to optimize constant cases. */
7978 if (GET_MODE_CLASS (mode
) == MODE_INT
7979 && ! can_compare_p (GE
, mode
, ccp_jump
))
7981 if (code
== MAX_EXPR
)
7982 do_jump_by_parts_greater_rtx (mode
, unsignedp
, target
, op1
,
7985 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op1
, target
,
7990 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
7991 unsignedp
, mode
, NULL_RTX
, NULL_RTX
, temp
);
7993 emit_move_insn (target
, op1
);
7998 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7999 if (modifier
== EXPAND_STACK_PARM
)
8001 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8005 /* ??? Can optimize bitwise operations with one arg constant.
8006 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8007 and (a bitwise1 b) bitwise2 b (etc)
8008 but that is probably not worth while. */
8010 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8011 boolean values when we want in all cases to compute both of them. In
8012 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8013 as actual zero-or-1 values and then bitwise anding. In cases where
8014 there cannot be any side effects, better code would be made by
8015 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8016 how to recognize those cases. */
8018 case TRUTH_AND_EXPR
:
8019 code
= BIT_AND_EXPR
;
8024 code
= BIT_IOR_EXPR
;
8028 case TRUTH_XOR_EXPR
:
8029 code
= BIT_XOR_EXPR
;
8037 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8039 if (modifier
== EXPAND_STACK_PARM
)
8041 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8042 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
8045 /* Could determine the answer when only additive constants differ. Also,
8046 the addition of one can be handled by changing the condition. */
8053 case UNORDERED_EXPR
:
8061 temp
= do_store_flag (exp
,
8062 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
8063 tmode
!= VOIDmode
? tmode
: mode
, 0);
8067 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8068 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
8070 && REG_P (original_target
)
8071 && (GET_MODE (original_target
)
8072 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
8074 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
8077 /* If temp is constant, we can just compute the result. */
8078 if (GET_CODE (temp
) == CONST_INT
)
8080 if (INTVAL (temp
) != 0)
8081 emit_move_insn (target
, const1_rtx
);
8083 emit_move_insn (target
, const0_rtx
);
8088 if (temp
!= original_target
)
8090 enum machine_mode mode1
= GET_MODE (temp
);
8091 if (mode1
== VOIDmode
)
8092 mode1
= tmode
!= VOIDmode
? tmode
: mode
;
8094 temp
= copy_to_mode_reg (mode1
, temp
);
8097 op1
= gen_label_rtx ();
8098 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
8099 GET_MODE (temp
), unsignedp
, op1
);
8100 emit_move_insn (temp
, const1_rtx
);
8105 /* If no set-flag instruction, must generate a conditional store
8106 into a temporary variable. Drop through and handle this
8111 || modifier
== EXPAND_STACK_PARM
8112 || ! safe_from_p (target
, exp
, 1)
8113 /* Make sure we don't have a hard reg (such as function's return
8114 value) live across basic blocks, if not optimizing. */
8115 || (!optimize
&& REG_P (target
)
8116 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8117 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8120 emit_move_insn (target
, const0_rtx
);
8122 op1
= gen_label_rtx ();
8123 jumpifnot (exp
, op1
);
8126 emit_move_insn (target
, const1_rtx
);
8129 return ignore
? const0_rtx
: target
;
8131 case TRUTH_NOT_EXPR
:
8132 if (modifier
== EXPAND_STACK_PARM
)
8134 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8135 /* The parser is careful to generate TRUTH_NOT_EXPR
8136 only with operands that are always zero or one. */
8137 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8138 target
, 1, OPTAB_LIB_WIDEN
);
8142 case STATEMENT_LIST
:
8144 tree_stmt_iterator iter
;
8146 gcc_assert (ignore
);
8148 for (iter
= tsi_start (exp
); !tsi_end_p (iter
); tsi_next (&iter
))
8149 expand_expr (tsi_stmt (iter
), const0_rtx
, VOIDmode
, modifier
);
8154 /* A COND_EXPR with its type being VOID_TYPE represents a
8155 conditional jump and is handled in
8156 expand_gimple_cond_expr. */
8157 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp
)));
8159 /* Note that COND_EXPRs whose type is a structure or union
8160 are required to be constructed to contain assignments of
8161 a temporary variable, so that we can evaluate them here
8162 for side effect only. If type is void, we must do likewise. */
8164 gcc_assert (!TREE_ADDRESSABLE (type
)
8166 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
8167 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
);
8169 /* If we are not to produce a result, we have no target. Otherwise,
8170 if a target was specified use it; it will not be used as an
8171 intermediate target unless it is safe. If no target, use a
8174 if (modifier
!= EXPAND_STACK_PARM
8176 && safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
8177 && GET_MODE (original_target
) == mode
8178 #ifdef HAVE_conditional_move
8179 && (! can_conditionally_move_p (mode
)
8180 || REG_P (original_target
))
8182 && !MEM_P (original_target
))
8183 temp
= original_target
;
8185 temp
= assign_temp (type
, 0, 0, 1);
8187 do_pending_stack_adjust ();
8189 op0
= gen_label_rtx ();
8190 op1
= gen_label_rtx ();
8191 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8192 store_expr (TREE_OPERAND (exp
, 1), temp
,
8193 modifier
== EXPAND_STACK_PARM
);
8195 emit_jump_insn (gen_jump (op1
));
8198 store_expr (TREE_OPERAND (exp
, 2), temp
,
8199 modifier
== EXPAND_STACK_PARM
);
8206 target
= expand_vec_cond_expr (exp
, target
);
8211 tree lhs
= TREE_OPERAND (exp
, 0);
8212 tree rhs
= TREE_OPERAND (exp
, 1);
8214 gcc_assert (ignore
);
8216 /* Check for |= or &= of a bitfield of size one into another bitfield
8217 of size 1. In this case, (unless we need the result of the
8218 assignment) we can do this more efficiently with a
8219 test followed by an assignment, if necessary.
8221 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8222 things change so we do, this code should be enhanced to
8224 if (TREE_CODE (lhs
) == COMPONENT_REF
8225 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
8226 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
8227 && TREE_OPERAND (rhs
, 0) == lhs
8228 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
8229 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
8230 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
8232 rtx label
= gen_label_rtx ();
8234 do_jump (TREE_OPERAND (rhs
, 1),
8235 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
8236 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
8237 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
8238 (TREE_CODE (rhs
) == BIT_IOR_EXPR
8240 : integer_zero_node
)));
8241 do_pending_stack_adjust ();
8246 expand_assignment (lhs
, rhs
);
8252 if (!TREE_OPERAND (exp
, 0))
8253 expand_null_return ();
8255 expand_return (TREE_OPERAND (exp
, 0));
8259 return expand_expr_addr_expr (exp
, target
, tmode
, modifier
);
8262 /* Get the rtx code of the operands. */
8263 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8264 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
8267 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8269 /* Move the real (op0) and imaginary (op1) parts to their location. */
8270 write_complex_part (target
, op0
, false);
8271 write_complex_part (target
, op1
, true);
8276 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8277 return read_complex_part (op0
, false);
8280 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8281 return read_complex_part (op0
, true);
8284 expand_resx_expr (exp
);
8287 case TRY_CATCH_EXPR
:
8289 case EH_FILTER_EXPR
:
8290 case TRY_FINALLY_EXPR
:
8291 /* Lowered by tree-eh.c. */
8294 case WITH_CLEANUP_EXPR
:
8295 case CLEANUP_POINT_EXPR
:
8297 case CASE_LABEL_EXPR
:
8303 case PREINCREMENT_EXPR
:
8304 case PREDECREMENT_EXPR
:
8305 case POSTINCREMENT_EXPR
:
8306 case POSTDECREMENT_EXPR
:
8309 case TRUTH_ANDIF_EXPR
:
8310 case TRUTH_ORIF_EXPR
:
8311 /* Lowered by gimplify.c. */
8315 return get_exception_pointer (cfun
);
8318 return get_exception_filter (cfun
);
8321 /* Function descriptors are not valid except for as
8322 initialization constants, and should not be expanded. */
8330 expand_label (TREE_OPERAND (exp
, 0));
8334 expand_asm_expr (exp
);
8337 case WITH_SIZE_EXPR
:
8338 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8339 have pulled out the size to use in whatever context it needed. */
8340 return expand_expr_real (TREE_OPERAND (exp
, 0), original_target
, tmode
,
8343 case REALIGN_LOAD_EXPR
:
8345 tree oprnd0
= TREE_OPERAND (exp
, 0);
8346 tree oprnd1
= TREE_OPERAND (exp
, 1);
8347 tree oprnd2
= TREE_OPERAND (exp
, 2);
8350 this_optab
= optab_for_tree_code (code
, type
);
8351 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, 0);
8352 op2
= expand_expr (oprnd2
, NULL_RTX
, VOIDmode
, 0);
8353 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
8359 case REDUC_MAX_EXPR
:
8360 case REDUC_MIN_EXPR
:
8361 case REDUC_PLUS_EXPR
:
8363 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8364 this_optab
= optab_for_tree_code (code
, type
);
8365 temp
= expand_unop (mode
, this_optab
, op0
, target
, unsignedp
);
8371 return lang_hooks
.expand_expr (exp
, original_target
, tmode
,
8375 /* Here to do an ordinary binary operator. */
8377 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8378 subtarget
, &op0
, &op1
, 0);
8380 this_optab
= optab_for_tree_code (code
, type
);
8382 if (modifier
== EXPAND_STACK_PARM
)
8384 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
8385 unsignedp
, OPTAB_LIB_WIDEN
);
8387 return REDUCE_BIT_FIELD (temp
);
8389 #undef REDUCE_BIT_FIELD
8391 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8392 signedness of TYPE), possibly returning the result in TARGET. */
8394 reduce_to_bit_field_precision (rtx exp
, rtx target
, tree type
)
8396 HOST_WIDE_INT prec
= TYPE_PRECISION (type
);
8397 if (target
&& GET_MODE (target
) != GET_MODE (exp
))
8399 if (TYPE_UNSIGNED (type
))
8402 if (prec
< HOST_BITS_PER_WIDE_INT
)
8403 mask
= immed_double_const (((unsigned HOST_WIDE_INT
) 1 << prec
) - 1, 0,
8406 mask
= immed_double_const ((unsigned HOST_WIDE_INT
) -1,
8407 ((unsigned HOST_WIDE_INT
) 1
8408 << (prec
- HOST_BITS_PER_WIDE_INT
)) - 1,
8410 return expand_and (GET_MODE (exp
), exp
, mask
, target
);
8414 tree count
= build_int_cst (NULL_TREE
,
8415 GET_MODE_BITSIZE (GET_MODE (exp
)) - prec
);
8416 exp
= expand_shift (LSHIFT_EXPR
, GET_MODE (exp
), exp
, count
, target
, 0);
8417 return expand_shift (RSHIFT_EXPR
, GET_MODE (exp
), exp
, count
, target
, 0);
8421 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8422 when applied to the address of EXP produces an address known to be
8423 aligned more than BIGGEST_ALIGNMENT. */
8426 is_aligning_offset (tree offset
, tree exp
)
8428 /* Strip off any conversions. */
8429 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
8430 || TREE_CODE (offset
) == NOP_EXPR
8431 || TREE_CODE (offset
) == CONVERT_EXPR
)
8432 offset
= TREE_OPERAND (offset
, 0);
8434 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8435 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8436 if (TREE_CODE (offset
) != BIT_AND_EXPR
8437 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
8438 || compare_tree_int (TREE_OPERAND (offset
, 1),
8439 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
8440 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
8443 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8444 It must be NEGATE_EXPR. Then strip any more conversions. */
8445 offset
= TREE_OPERAND (offset
, 0);
8446 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
8447 || TREE_CODE (offset
) == NOP_EXPR
8448 || TREE_CODE (offset
) == CONVERT_EXPR
)
8449 offset
= TREE_OPERAND (offset
, 0);
8451 if (TREE_CODE (offset
) != NEGATE_EXPR
)
8454 offset
= TREE_OPERAND (offset
, 0);
8455 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
8456 || TREE_CODE (offset
) == NOP_EXPR
8457 || TREE_CODE (offset
) == CONVERT_EXPR
)
8458 offset
= TREE_OPERAND (offset
, 0);
8460 /* This must now be the address of EXP. */
8461 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
8464 /* Return the tree node if an ARG corresponds to a string constant or zero
8465 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8466 in bytes within the string that ARG is accessing. The type of the
8467 offset will be `sizetype'. */
8470 string_constant (tree arg
, tree
*ptr_offset
)
8475 if (TREE_CODE (arg
) == ADDR_EXPR
)
8477 if (TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
8479 *ptr_offset
= size_zero_node
;
8480 return TREE_OPERAND (arg
, 0);
8482 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == VAR_DECL
)
8484 array
= TREE_OPERAND (arg
, 0);
8485 offset
= size_zero_node
;
8487 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
8489 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
8490 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
8491 if (TREE_CODE (array
) != STRING_CST
8492 && TREE_CODE (array
) != VAR_DECL
)
8498 else if (TREE_CODE (arg
) == PLUS_EXPR
)
8500 tree arg0
= TREE_OPERAND (arg
, 0);
8501 tree arg1
= TREE_OPERAND (arg
, 1);
8506 if (TREE_CODE (arg0
) == ADDR_EXPR
8507 && (TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
8508 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == VAR_DECL
))
8510 array
= TREE_OPERAND (arg0
, 0);
8513 else if (TREE_CODE (arg1
) == ADDR_EXPR
8514 && (TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
8515 || TREE_CODE (TREE_OPERAND (arg1
, 0)) == VAR_DECL
))
8517 array
= TREE_OPERAND (arg1
, 0);
8526 if (TREE_CODE (array
) == STRING_CST
)
8528 *ptr_offset
= convert (sizetype
, offset
);
8531 else if (TREE_CODE (array
) == VAR_DECL
)
8535 /* Variables initialized to string literals can be handled too. */
8536 if (DECL_INITIAL (array
) == NULL_TREE
8537 || TREE_CODE (DECL_INITIAL (array
)) != STRING_CST
)
8540 /* If they are read-only, non-volatile and bind locally. */
8541 if (! TREE_READONLY (array
)
8542 || TREE_SIDE_EFFECTS (array
)
8543 || ! targetm
.binds_local_p (array
))
8546 /* Avoid const char foo[4] = "abcde"; */
8547 if (DECL_SIZE_UNIT (array
) == NULL_TREE
8548 || TREE_CODE (DECL_SIZE_UNIT (array
)) != INTEGER_CST
8549 || (length
= TREE_STRING_LENGTH (DECL_INITIAL (array
))) <= 0
8550 || compare_tree_int (DECL_SIZE_UNIT (array
), length
) < 0)
8553 /* If variable is bigger than the string literal, OFFSET must be constant
8554 and inside of the bounds of the string literal. */
8555 offset
= convert (sizetype
, offset
);
8556 if (compare_tree_int (DECL_SIZE_UNIT (array
), length
) > 0
8557 && (! host_integerp (offset
, 1)
8558 || compare_tree_int (offset
, length
) >= 0))
8561 *ptr_offset
= offset
;
8562 return DECL_INITIAL (array
);
8568 /* Generate code to calculate EXP using a store-flag instruction
8569 and return an rtx for the result. EXP is either a comparison
8570 or a TRUTH_NOT_EXPR whose operand is a comparison.
8572 If TARGET is nonzero, store the result there if convenient.
8574 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8577 Return zero if there is no suitable set-flag instruction
8578 available on this machine.
8580 Once expand_expr has been called on the arguments of the comparison,
8581 we are committed to doing the store flag, since it is not safe to
8582 re-evaluate the expression. We emit the store-flag insn by calling
8583 emit_store_flag, but only expand the arguments if we have a reason
8584 to believe that emit_store_flag will be successful. If we think that
8585 it will, but it isn't, we have to simulate the store-flag with a
8586 set/jump/set sequence. */
8589 do_store_flag (tree exp
, rtx target
, enum machine_mode mode
, int only_cheap
)
8592 tree arg0
, arg1
, type
;
8594 enum machine_mode operand_mode
;
8598 enum insn_code icode
;
8599 rtx subtarget
= target
;
8602 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8603 result at the end. We can't simply invert the test since it would
8604 have already been inverted if it were valid. This case occurs for
8605 some floating-point comparisons. */
8607 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
8608 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
8610 arg0
= TREE_OPERAND (exp
, 0);
8611 arg1
= TREE_OPERAND (exp
, 1);
8613 /* Don't crash if the comparison was erroneous. */
8614 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
8617 type
= TREE_TYPE (arg0
);
8618 operand_mode
= TYPE_MODE (type
);
8619 unsignedp
= TYPE_UNSIGNED (type
);
8621 /* We won't bother with BLKmode store-flag operations because it would mean
8622 passing a lot of information to emit_store_flag. */
8623 if (operand_mode
== BLKmode
)
8626 /* We won't bother with store-flag operations involving function pointers
8627 when function pointers must be canonicalized before comparisons. */
8628 #ifdef HAVE_canonicalize_funcptr_for_compare
8629 if (HAVE_canonicalize_funcptr_for_compare
8630 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
8631 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
8633 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
8634 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
8635 == FUNCTION_TYPE
))))
8642 /* Get the rtx comparison code to use. We know that EXP is a comparison
8643 operation of some type. Some comparisons against 1 and -1 can be
8644 converted to comparisons with zero. Do so here so that the tests
8645 below will be aware that we have a comparison with zero. These
8646 tests will not catch constants in the first operand, but constants
8647 are rarely passed as the first operand. */
8649 switch (TREE_CODE (exp
))
8658 if (integer_onep (arg1
))
8659 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
8661 code
= unsignedp
? LTU
: LT
;
8664 if (! unsignedp
&& integer_all_onesp (arg1
))
8665 arg1
= integer_zero_node
, code
= LT
;
8667 code
= unsignedp
? LEU
: LE
;
8670 if (! unsignedp
&& integer_all_onesp (arg1
))
8671 arg1
= integer_zero_node
, code
= GE
;
8673 code
= unsignedp
? GTU
: GT
;
8676 if (integer_onep (arg1
))
8677 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
8679 code
= unsignedp
? GEU
: GE
;
8682 case UNORDERED_EXPR
:
8711 /* Put a constant second. */
8712 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
8714 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
8715 code
= swap_condition (code
);
8718 /* If this is an equality or inequality test of a single bit, we can
8719 do this by shifting the bit being tested to the low-order bit and
8720 masking the result with the constant 1. If the condition was EQ,
8721 we xor it with 1. This does not require an scc insn and is faster
8722 than an scc insn even if we have it.
8724 The code to make this transformation was moved into fold_single_bit_test,
8725 so we just call into the folder and expand its result. */
8727 if ((code
== NE
|| code
== EQ
)
8728 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
8729 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
8731 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
8732 return expand_expr (fold_single_bit_test (code
== NE
? NE_EXPR
: EQ_EXPR
,
8734 target
, VOIDmode
, EXPAND_NORMAL
);
8737 /* Now see if we are likely to be able to do this. Return if not. */
8738 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
8741 icode
= setcc_gen_code
[(int) code
];
8742 if (icode
== CODE_FOR_nothing
8743 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
8745 /* We can only do this if it is one of the special cases that
8746 can be handled without an scc insn. */
8747 if ((code
== LT
&& integer_zerop (arg1
))
8748 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
8750 else if (! only_cheap
&& (code
== NE
|| code
== EQ
)
8751 && TREE_CODE (type
) != REAL_TYPE
8752 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
8753 != CODE_FOR_nothing
)
8754 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
8755 != CODE_FOR_nothing
)))
8761 if (! get_subtarget (target
)
8762 || GET_MODE (subtarget
) != operand_mode
)
8765 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, 0);
8768 target
= gen_reg_rtx (mode
);
8770 result
= emit_store_flag (target
, code
, op0
, op1
,
8771 operand_mode
, unsignedp
, 1);
8776 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
8777 result
, 0, OPTAB_LIB_WIDEN
);
8781 /* If this failed, we have to do this with set/compare/jump/set code. */
8783 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
8784 target
= gen_reg_rtx (GET_MODE (target
));
8786 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
8787 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
8788 operand_mode
, NULL_RTX
);
8789 if (GET_CODE (result
) == CONST_INT
)
8790 return (((result
== const0_rtx
&& ! invert
)
8791 || (result
!= const0_rtx
&& invert
))
8792 ? const0_rtx
: const1_rtx
);
8794 /* The code of RESULT may not match CODE if compare_from_rtx
8795 decided to swap its operands and reverse the original code.
8797 We know that compare_from_rtx returns either a CONST_INT or
8798 a new comparison code, so it is safe to just extract the
8799 code from RESULT. */
8800 code
= GET_CODE (result
);
8802 label
= gen_label_rtx ();
8803 gcc_assert (bcc_gen_fctn
[(int) code
]);
8805 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
8806 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
8813 /* Stubs in case we haven't got a casesi insn. */
8815 # define HAVE_casesi 0
8816 # define gen_casesi(a, b, c, d, e) (0)
8817 # define CODE_FOR_casesi CODE_FOR_nothing
8820 /* If the machine does not have a case insn that compares the bounds,
8821 this means extra overhead for dispatch tables, which raises the
8822 threshold for using them. */
8823 #ifndef CASE_VALUES_THRESHOLD
8824 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
8825 #endif /* CASE_VALUES_THRESHOLD */
8828 case_values_threshold (void)
8830 return CASE_VALUES_THRESHOLD
;
8833 /* Attempt to generate a casesi instruction. Returns 1 if successful,
8834 0 otherwise (i.e. if there is no casesi instruction). */
8836 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
8837 rtx table_label ATTRIBUTE_UNUSED
, rtx default_label
)
8839 enum machine_mode index_mode
= SImode
;
8840 int index_bits
= GET_MODE_BITSIZE (index_mode
);
8841 rtx op1
, op2
, index
;
8842 enum machine_mode op_mode
;
8847 /* Convert the index to SImode. */
8848 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
8850 enum machine_mode omode
= TYPE_MODE (index_type
);
8851 rtx rangertx
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
8853 /* We must handle the endpoints in the original mode. */
8854 index_expr
= build2 (MINUS_EXPR
, index_type
,
8855 index_expr
, minval
);
8856 minval
= integer_zero_node
;
8857 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
8858 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
8859 omode
, 1, default_label
);
8860 /* Now we can safely truncate. */
8861 index
= convert_to_mode (index_mode
, index
, 0);
8865 if (TYPE_MODE (index_type
) != index_mode
)
8867 index_expr
= convert (lang_hooks
.types
.type_for_size
8868 (index_bits
, 0), index_expr
);
8869 index_type
= TREE_TYPE (index_expr
);
8872 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
8875 do_pending_stack_adjust ();
8877 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
8878 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
8880 index
= copy_to_mode_reg (op_mode
, index
);
8882 op1
= expand_expr (minval
, NULL_RTX
, VOIDmode
, 0);
8884 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
8885 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
8886 op1
, TYPE_UNSIGNED (TREE_TYPE (minval
)));
8887 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
8889 op1
= copy_to_mode_reg (op_mode
, op1
);
8891 op2
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
8893 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
8894 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
8895 op2
, TYPE_UNSIGNED (TREE_TYPE (range
)));
8896 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
8898 op2
= copy_to_mode_reg (op_mode
, op2
);
8900 emit_jump_insn (gen_casesi (index
, op1
, op2
,
8901 table_label
, default_label
));
8905 /* Attempt to generate a tablejump instruction; same concept. */
8906 #ifndef HAVE_tablejump
8907 #define HAVE_tablejump 0
8908 #define gen_tablejump(x, y) (0)
8911 /* Subroutine of the next function.
8913 INDEX is the value being switched on, with the lowest value
8914 in the table already subtracted.
8915 MODE is its expected mode (needed if INDEX is constant).
8916 RANGE is the length of the jump table.
8917 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8919 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8920 index value is out of range. */
8923 do_tablejump (rtx index
, enum machine_mode mode
, rtx range
, rtx table_label
,
8928 if (INTVAL (range
) > cfun
->max_jumptable_ents
)
8929 cfun
->max_jumptable_ents
= INTVAL (range
);
8931 /* Do an unsigned comparison (in the proper mode) between the index
8932 expression and the value which represents the length of the range.
8933 Since we just finished subtracting the lower bound of the range
8934 from the index expression, this comparison allows us to simultaneously
8935 check that the original index expression value is both greater than
8936 or equal to the minimum value of the range and less than or equal to
8937 the maximum value of the range. */
8939 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
8942 /* If index is in range, it must fit in Pmode.
8943 Convert to Pmode so we can index with it. */
8945 index
= convert_to_mode (Pmode
, index
, 1);
8947 /* Don't let a MEM slip through, because then INDEX that comes
8948 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
8949 and break_out_memory_refs will go to work on it and mess it up. */
8950 #ifdef PIC_CASE_VECTOR_ADDRESS
8951 if (flag_pic
&& !REG_P (index
))
8952 index
= copy_to_mode_reg (Pmode
, index
);
8955 /* If flag_force_addr were to affect this address
8956 it could interfere with the tricky assumptions made
8957 about addresses that contain label-refs,
8958 which may be valid only very near the tablejump itself. */
8959 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8960 GET_MODE_SIZE, because this indicates how large insns are. The other
8961 uses should all be Pmode, because they are addresses. This code
8962 could fail if addresses and insns are not the same size. */
8963 index
= gen_rtx_PLUS (Pmode
,
8964 gen_rtx_MULT (Pmode
, index
,
8965 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
8966 gen_rtx_LABEL_REF (Pmode
, table_label
));
8967 #ifdef PIC_CASE_VECTOR_ADDRESS
8969 index
= PIC_CASE_VECTOR_ADDRESS (index
);
8972 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
8973 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
8974 vector
= gen_const_mem (CASE_VECTOR_MODE
, index
);
8975 convert_move (temp
, vector
, 0);
8977 emit_jump_insn (gen_tablejump (temp
, table_label
));
8979 /* If we are generating PIC code or if the table is PC-relative, the
8980 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8981 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
8986 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
8987 rtx table_label
, rtx default_label
)
8991 if (! HAVE_tablejump
)
8994 index_expr
= fold_build2 (MINUS_EXPR
, index_type
,
8995 convert (index_type
, index_expr
),
8996 convert (index_type
, minval
));
8997 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
8998 do_pending_stack_adjust ();
9000 do_tablejump (index
, TYPE_MODE (index_type
),
9001 convert_modes (TYPE_MODE (index_type
),
9002 TYPE_MODE (TREE_TYPE (range
)),
9003 expand_expr (range
, NULL_RTX
,
9005 TYPE_UNSIGNED (TREE_TYPE (range
))),
9006 table_label
, default_label
);
9010 /* Nonzero if the mode is a valid vector mode for this architecture.
9011 This returns nonzero even if there is no hardware support for the
9012 vector mode, but we can emulate with narrower modes. */
9015 vector_mode_valid_p (enum machine_mode mode
)
9017 enum mode_class
class = GET_MODE_CLASS (mode
);
9018 enum machine_mode innermode
;
9020 /* Doh! What's going on? */
9021 if (class != MODE_VECTOR_INT
9022 && class != MODE_VECTOR_FLOAT
)
9025 /* Hardware support. Woo hoo! */
9026 if (targetm
.vector_mode_supported_p (mode
))
9029 innermode
= GET_MODE_INNER (mode
);
9031 /* We should probably return 1 if requesting V4DI and we have no DI,
9032 but we have V2DI, but this is probably very unlikely. */
9034 /* If we have support for the inner mode, we can safely emulate it.
9035 We may not have V2DI, but me can emulate with a pair of DIs. */
9036 return targetm
.scalar_mode_supported_p (innermode
);
9039 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9041 const_vector_from_tree (tree exp
)
9046 enum machine_mode inner
, mode
;
9048 mode
= TYPE_MODE (TREE_TYPE (exp
));
9050 if (initializer_zerop (exp
))
9051 return CONST0_RTX (mode
);
9053 units
= GET_MODE_NUNITS (mode
);
9054 inner
= GET_MODE_INNER (mode
);
9056 v
= rtvec_alloc (units
);
9058 link
= TREE_VECTOR_CST_ELTS (exp
);
9059 for (i
= 0; link
; link
= TREE_CHAIN (link
), ++i
)
9061 elt
= TREE_VALUE (link
);
9063 if (TREE_CODE (elt
) == REAL_CST
)
9064 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
9067 RTVEC_ELT (v
, i
) = immed_double_const (TREE_INT_CST_LOW (elt
),
9068 TREE_INT_CST_HIGH (elt
),
9072 /* Initialize remaining elements to 0. */
9073 for (; i
< units
; ++i
)
9074 RTVEC_ELT (v
, i
) = CONST0_RTX (inner
);
9076 return gen_rtx_CONST_VECTOR (mode
, v
);
9078 #include "gt-expr.h"