1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation,
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
25 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
45 #include "typeclass.h"
48 #include "langhooks.h"
51 #include "tree-iterator.h"
52 #include "tree-pass.h"
53 #include "tree-flow.h"
57 /* Decide whether a function's arguments should be processed
58 from first to last or from last to first.
60 They should if the stack and args grow in opposite directions, but
61 only if we have push insns. */
65 #ifndef PUSH_ARGS_REVERSED
66 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
67 #define PUSH_ARGS_REVERSED /* If it's last to first. */
73 #ifndef STACK_PUSH_CODE
74 #ifdef STACK_GROWS_DOWNWARD
75 #define STACK_PUSH_CODE PRE_DEC
77 #define STACK_PUSH_CODE PRE_INC
82 /* If this is nonzero, we do not bother generating VOLATILE
83 around volatile memory references, and we are willing to
84 output indirect addresses. If cse is to follow, we reject
85 indirect addresses so a useful potential cse is generated;
86 if it is used only once, instruction combination will produce
87 the same indirect address eventually. */
90 /* This structure is used by move_by_pieces to describe the move to
101 int explicit_inc_from
;
102 unsigned HOST_WIDE_INT len
;
103 HOST_WIDE_INT offset
;
107 /* This structure is used by store_by_pieces to describe the clear to
110 struct store_by_pieces
116 unsigned HOST_WIDE_INT len
;
117 HOST_WIDE_INT offset
;
118 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
);
123 static unsigned HOST_WIDE_INT
move_by_pieces_ninsns (unsigned HOST_WIDE_INT
,
126 static void move_by_pieces_1 (rtx (*) (rtx
, ...), enum machine_mode
,
127 struct move_by_pieces
*);
128 static bool block_move_libcall_safe_for_call_parm (void);
129 static bool emit_block_move_via_movmem (rtx
, rtx
, rtx
, unsigned);
130 static rtx
emit_block_move_via_libcall (rtx
, rtx
, rtx
, bool);
131 static tree
emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
133 static rtx
clear_by_pieces_1 (void *, HOST_WIDE_INT
, enum machine_mode
);
134 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces
*, unsigned int);
136 static void store_by_pieces_2 (rtx (*) (rtx
, ...), enum machine_mode
,
137 struct store_by_pieces
*);
138 static rtx
clear_storage_via_libcall (rtx
, rtx
, bool);
139 static tree
clear_storage_libcall_fn (int);
140 static rtx
compress_float_constant (rtx
, rtx
);
141 static rtx
get_subtarget (rtx
);
142 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
143 HOST_WIDE_INT
, enum machine_mode
,
144 tree
, tree
, int, int);
145 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
);
146 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
, enum machine_mode
,
149 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (tree
, tree
);
151 static int is_aligning_offset (tree
, tree
);
152 static void expand_operands (tree
, tree
, rtx
, rtx
*, rtx
*,
153 enum expand_modifier
);
154 static rtx
reduce_to_bit_field_precision (rtx
, rtx
, tree
);
155 static rtx
do_store_flag (tree
, rtx
, enum machine_mode
, int);
157 static void emit_single_push_insn (enum machine_mode
, rtx
, tree
);
159 static void do_tablejump (rtx
, enum machine_mode
, rtx
, rtx
, rtx
);
160 static rtx
const_vector_from_tree (tree
);
161 static void write_complex_part (rtx
, rtx
, bool);
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load
[NUM_MACHINE_MODES
];
168 static char direct_store
[NUM_MACHINE_MODES
];
170 /* Record for each mode whether we can float-extend from memory. */
172 static bool float_extend_from_mem
[NUM_MACHINE_MODES
][NUM_MACHINE_MODES
];
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO)
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO)
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193 #ifndef STORE_BY_PIECES_P
194 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) MOVE_RATIO)
199 /* This array records the insn_code of insns to perform block moves. */
200 enum insn_code movmem_optab
[NUM_MACHINE_MODES
];
202 /* This array records the insn_code of insns to perform block sets. */
203 enum insn_code setmem_optab
[NUM_MACHINE_MODES
];
205 /* These arrays record the insn_code of three different kinds of insns
206 to perform block compares. */
207 enum insn_code cmpstr_optab
[NUM_MACHINE_MODES
];
208 enum insn_code cmpstrn_optab
[NUM_MACHINE_MODES
];
209 enum insn_code cmpmem_optab
[NUM_MACHINE_MODES
];
211 /* Synchronization primitives. */
212 enum insn_code sync_add_optab
[NUM_MACHINE_MODES
];
213 enum insn_code sync_sub_optab
[NUM_MACHINE_MODES
];
214 enum insn_code sync_ior_optab
[NUM_MACHINE_MODES
];
215 enum insn_code sync_and_optab
[NUM_MACHINE_MODES
];
216 enum insn_code sync_xor_optab
[NUM_MACHINE_MODES
];
217 enum insn_code sync_nand_optab
[NUM_MACHINE_MODES
];
218 enum insn_code sync_old_add_optab
[NUM_MACHINE_MODES
];
219 enum insn_code sync_old_sub_optab
[NUM_MACHINE_MODES
];
220 enum insn_code sync_old_ior_optab
[NUM_MACHINE_MODES
];
221 enum insn_code sync_old_and_optab
[NUM_MACHINE_MODES
];
222 enum insn_code sync_old_xor_optab
[NUM_MACHINE_MODES
];
223 enum insn_code sync_old_nand_optab
[NUM_MACHINE_MODES
];
224 enum insn_code sync_new_add_optab
[NUM_MACHINE_MODES
];
225 enum insn_code sync_new_sub_optab
[NUM_MACHINE_MODES
];
226 enum insn_code sync_new_ior_optab
[NUM_MACHINE_MODES
];
227 enum insn_code sync_new_and_optab
[NUM_MACHINE_MODES
];
228 enum insn_code sync_new_xor_optab
[NUM_MACHINE_MODES
];
229 enum insn_code sync_new_nand_optab
[NUM_MACHINE_MODES
];
230 enum insn_code sync_compare_and_swap
[NUM_MACHINE_MODES
];
231 enum insn_code sync_compare_and_swap_cc
[NUM_MACHINE_MODES
];
232 enum insn_code sync_lock_test_and_set
[NUM_MACHINE_MODES
];
233 enum insn_code sync_lock_release
[NUM_MACHINE_MODES
];
235 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
237 #ifndef SLOW_UNALIGNED_ACCESS
238 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
241 /* This is run once per compilation to set up which modes can be used
242 directly in memory and to initialize the block move optab. */
245 init_expr_once (void)
248 enum machine_mode mode
;
253 /* Try indexing by frame ptr and try by stack ptr.
254 It is known that on the Convex the stack ptr isn't a valid index.
255 With luck, one or the other is valid on any machine. */
256 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
257 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
259 /* A scratch register we can modify in-place below to avoid
260 useless RTL allocations. */
261 reg
= gen_rtx_REG (VOIDmode
, -1);
263 insn
= rtx_alloc (INSN
);
264 pat
= gen_rtx_SET (0, NULL_RTX
, NULL_RTX
);
265 PATTERN (insn
) = pat
;
267 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
268 mode
= (enum machine_mode
) ((int) mode
+ 1))
272 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
273 PUT_MODE (mem
, mode
);
274 PUT_MODE (mem1
, mode
);
275 PUT_MODE (reg
, mode
);
277 /* See if there is some register that can be used in this mode and
278 directly loaded or stored from memory. */
280 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
281 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
282 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
285 if (! HARD_REGNO_MODE_OK (regno
, mode
))
291 SET_DEST (pat
) = reg
;
292 if (recog (pat
, insn
, &num_clobbers
) >= 0)
293 direct_load
[(int) mode
] = 1;
295 SET_SRC (pat
) = mem1
;
296 SET_DEST (pat
) = reg
;
297 if (recog (pat
, insn
, &num_clobbers
) >= 0)
298 direct_load
[(int) mode
] = 1;
301 SET_DEST (pat
) = mem
;
302 if (recog (pat
, insn
, &num_clobbers
) >= 0)
303 direct_store
[(int) mode
] = 1;
306 SET_DEST (pat
) = mem1
;
307 if (recog (pat
, insn
, &num_clobbers
) >= 0)
308 direct_store
[(int) mode
] = 1;
312 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
314 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
315 mode
= GET_MODE_WIDER_MODE (mode
))
317 enum machine_mode srcmode
;
318 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
319 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
323 ic
= can_extend_p (mode
, srcmode
, 0);
324 if (ic
== CODE_FOR_nothing
)
327 PUT_MODE (mem
, srcmode
);
329 if ((*insn_data
[ic
].operand
[1].predicate
) (mem
, srcmode
))
330 float_extend_from_mem
[mode
][srcmode
] = true;
335 /* This is run at the start of compiling a function. */
340 cfun
->expr
= ggc_alloc_cleared (sizeof (struct expr_status
));
343 /* Copy data from FROM to TO, where the machine modes are not the same.
344 Both modes may be integer, or both may be floating.
345 UNSIGNEDP should be nonzero if FROM is an unsigned type.
346 This causes zero-extension instead of sign-extension. */
349 convert_move (rtx to
, rtx from
, int unsignedp
)
351 enum machine_mode to_mode
= GET_MODE (to
);
352 enum machine_mode from_mode
= GET_MODE (from
);
353 int to_real
= SCALAR_FLOAT_MODE_P (to_mode
);
354 int from_real
= SCALAR_FLOAT_MODE_P (from_mode
);
358 /* rtx code for making an equivalent value. */
359 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
360 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
363 gcc_assert (to_real
== from_real
);
365 /* If the source and destination are already the same, then there's
370 /* If FROM is a SUBREG that indicates that we have already done at least
371 the required extension, strip it. We don't handle such SUBREGs as
374 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
375 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
376 >= GET_MODE_SIZE (to_mode
))
377 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
378 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
380 gcc_assert (GET_CODE (to
) != SUBREG
|| !SUBREG_PROMOTED_VAR_P (to
));
382 if (to_mode
== from_mode
383 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
385 emit_move_insn (to
, from
);
389 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
391 gcc_assert (GET_MODE_BITSIZE (from_mode
) == GET_MODE_BITSIZE (to_mode
));
393 if (VECTOR_MODE_P (to_mode
))
394 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
396 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
398 emit_move_insn (to
, from
);
402 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
404 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
405 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
414 gcc_assert ((GET_MODE_PRECISION (from_mode
)
415 != GET_MODE_PRECISION (to_mode
))
416 || (DECIMAL_FLOAT_MODE_P (from_mode
)
417 != DECIMAL_FLOAT_MODE_P (to_mode
)));
419 if (GET_MODE_PRECISION (from_mode
) == GET_MODE_PRECISION (to_mode
))
420 /* Conversion between decimal float and binary float, same size. */
421 tab
= DECIMAL_FLOAT_MODE_P (from_mode
) ? trunc_optab
: sext_optab
;
422 else if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
427 /* Try converting directly if the insn is supported. */
429 code
= tab
->handlers
[to_mode
][from_mode
].insn_code
;
430 if (code
!= CODE_FOR_nothing
)
432 emit_unop_insn (code
, to
, from
,
433 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
437 /* Otherwise use a libcall. */
438 libcall
= tab
->handlers
[to_mode
][from_mode
].libfunc
;
440 /* Is this conversion implemented yet? */
441 gcc_assert (libcall
);
444 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
446 insns
= get_insns ();
448 emit_libcall_block (insns
, to
, value
,
449 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
451 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
455 /* Handle pointer conversion. */ /* SPEE 900220. */
456 /* Targets are expected to provide conversion insns between PxImode and
457 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
458 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
460 enum machine_mode full_mode
461 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode
), MODE_INT
);
463 gcc_assert (trunc_optab
->handlers
[to_mode
][full_mode
].insn_code
464 != CODE_FOR_nothing
);
466 if (full_mode
!= from_mode
)
467 from
= convert_to_mode (full_mode
, from
, unsignedp
);
468 emit_unop_insn (trunc_optab
->handlers
[to_mode
][full_mode
].insn_code
,
472 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
475 enum machine_mode full_mode
476 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode
), MODE_INT
);
478 gcc_assert (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
479 != CODE_FOR_nothing
);
481 if (to_mode
== full_mode
)
483 emit_unop_insn (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
,
488 new_from
= gen_reg_rtx (full_mode
);
489 emit_unop_insn (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
,
490 new_from
, from
, UNKNOWN
);
492 /* else proceed to integer conversions below. */
493 from_mode
= full_mode
;
497 /* Now both modes are integers. */
499 /* Handle expanding beyond a word. */
500 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
501 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
508 enum machine_mode lowpart_mode
;
509 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
511 /* Try converting directly if the insn is supported. */
512 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
515 /* If FROM is a SUBREG, put it into a register. Do this
516 so that we always generate the same set of insns for
517 better cse'ing; if an intermediate assignment occurred,
518 we won't be doing the operation directly on the SUBREG. */
519 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
520 from
= force_reg (from_mode
, from
);
521 emit_unop_insn (code
, to
, from
, equiv_code
);
524 /* Next, try converting via full word. */
525 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
526 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
527 != CODE_FOR_nothing
))
531 if (reg_overlap_mentioned_p (to
, from
))
532 from
= force_reg (from_mode
, from
);
533 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
535 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
536 emit_unop_insn (code
, to
,
537 gen_lowpart (word_mode
, to
), equiv_code
);
541 /* No special multiword conversion insn; do it by hand. */
544 /* Since we will turn this into a no conflict block, we must ensure
545 that the source does not overlap the target. */
547 if (reg_overlap_mentioned_p (to
, from
))
548 from
= force_reg (from_mode
, from
);
550 /* Get a copy of FROM widened to a word, if necessary. */
551 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
552 lowpart_mode
= word_mode
;
554 lowpart_mode
= from_mode
;
556 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
558 lowpart
= gen_lowpart (lowpart_mode
, to
);
559 emit_move_insn (lowpart
, lowfrom
);
561 /* Compute the value to put in each remaining word. */
563 fill_value
= const0_rtx
;
568 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
569 && STORE_FLAG_VALUE
== -1)
571 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
573 fill_value
= gen_reg_rtx (word_mode
);
574 emit_insn (gen_slt (fill_value
));
580 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
581 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
583 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
587 /* Fill the remaining words. */
588 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
590 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
591 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
593 gcc_assert (subword
);
595 if (fill_value
!= subword
)
596 emit_move_insn (subword
, fill_value
);
599 insns
= get_insns ();
602 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
603 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
607 /* Truncating multi-word to a word or less. */
608 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
609 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
612 && ! MEM_VOLATILE_P (from
)
613 && direct_load
[(int) to_mode
]
614 && ! mode_dependent_address_p (XEXP (from
, 0)))
616 || GET_CODE (from
) == SUBREG
))
617 from
= force_reg (from_mode
, from
);
618 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
622 /* Now follow all the conversions between integers
623 no more than a word long. */
625 /* For truncation, usually we can just refer to FROM in a narrower mode. */
626 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
627 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
628 GET_MODE_BITSIZE (from_mode
)))
631 && ! MEM_VOLATILE_P (from
)
632 && direct_load
[(int) to_mode
]
633 && ! mode_dependent_address_p (XEXP (from
, 0)))
635 || GET_CODE (from
) == SUBREG
))
636 from
= force_reg (from_mode
, from
);
637 if (REG_P (from
) && REGNO (from
) < FIRST_PSEUDO_REGISTER
638 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
639 from
= copy_to_reg (from
);
640 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
644 /* Handle extension. */
645 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
647 /* Convert directly if that works. */
648 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
651 emit_unop_insn (code
, to
, from
, equiv_code
);
656 enum machine_mode intermediate
;
660 /* Search for a mode to convert via. */
661 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
662 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
663 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
665 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
666 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
667 GET_MODE_BITSIZE (intermediate
))))
668 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
669 != CODE_FOR_nothing
))
671 convert_move (to
, convert_to_mode (intermediate
, from
,
672 unsignedp
), unsignedp
);
676 /* No suitable intermediate mode.
677 Generate what we need with shifts. */
678 shift_amount
= build_int_cst (NULL_TREE
,
679 GET_MODE_BITSIZE (to_mode
)
680 - GET_MODE_BITSIZE (from_mode
));
681 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
682 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
684 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
687 emit_move_insn (to
, tmp
);
692 /* Support special truncate insns for certain modes. */
693 if (trunc_optab
->handlers
[to_mode
][from_mode
].insn_code
!= CODE_FOR_nothing
)
695 emit_unop_insn (trunc_optab
->handlers
[to_mode
][from_mode
].insn_code
,
700 /* Handle truncation of volatile memrefs, and so on;
701 the things that couldn't be truncated directly,
702 and for which there was no special instruction.
704 ??? Code above formerly short-circuited this, for most integer
705 mode pairs, with a force_reg in from_mode followed by a recursive
706 call to this routine. Appears always to have been wrong. */
707 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
709 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
710 emit_move_insn (to
, temp
);
714 /* Mode combination is not recognized. */
718 /* Return an rtx for a value that would result
719 from converting X to mode MODE.
720 Both X and MODE may be floating, or both integer.
721 UNSIGNEDP is nonzero if X is an unsigned value.
722 This can be done by referring to a part of X in place
723 or by copying to a new temporary with conversion. */
726 convert_to_mode (enum machine_mode mode
, rtx x
, int unsignedp
)
728 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
731 /* Return an rtx for a value that would result
732 from converting X from mode OLDMODE to mode MODE.
733 Both modes may be floating, or both integer.
734 UNSIGNEDP is nonzero if X is an unsigned value.
736 This can be done by referring to a part of X in place
737 or by copying to a new temporary with conversion.
739 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
742 convert_modes (enum machine_mode mode
, enum machine_mode oldmode
, rtx x
, int unsignedp
)
746 /* If FROM is a SUBREG that indicates that we have already done at least
747 the required extension, strip it. */
749 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
750 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
751 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
752 x
= gen_lowpart (mode
, x
);
754 if (GET_MODE (x
) != VOIDmode
)
755 oldmode
= GET_MODE (x
);
760 /* There is one case that we must handle specially: If we are converting
761 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
762 we are to interpret the constant as unsigned, gen_lowpart will do
763 the wrong if the constant appears negative. What we want to do is
764 make the high-order word of the constant zero, not all ones. */
766 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
767 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
768 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
770 HOST_WIDE_INT val
= INTVAL (x
);
772 if (oldmode
!= VOIDmode
773 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
775 int width
= GET_MODE_BITSIZE (oldmode
);
777 /* We need to zero extend VAL. */
778 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
781 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
784 /* We can do this with a gen_lowpart if both desired and current modes
785 are integer, and this is either a constant integer, a register, or a
786 non-volatile MEM. Except for the constant case where MODE is no
787 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
789 if ((GET_CODE (x
) == CONST_INT
790 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
791 || (GET_MODE_CLASS (mode
) == MODE_INT
792 && GET_MODE_CLASS (oldmode
) == MODE_INT
793 && (GET_CODE (x
) == CONST_DOUBLE
794 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
795 && ((MEM_P (x
) && ! MEM_VOLATILE_P (x
)
796 && direct_load
[(int) mode
])
798 && (! HARD_REGISTER_P (x
)
799 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
800 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
801 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
803 /* ?? If we don't know OLDMODE, we have to assume here that
804 X does not need sign- or zero-extension. This may not be
805 the case, but it's the best we can do. */
806 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
807 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
809 HOST_WIDE_INT val
= INTVAL (x
);
810 int width
= GET_MODE_BITSIZE (oldmode
);
812 /* We must sign or zero-extend in this case. Start by
813 zero-extending, then sign extend if we need to. */
814 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
816 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
817 val
|= (HOST_WIDE_INT
) (-1) << width
;
819 return gen_int_mode (val
, mode
);
822 return gen_lowpart (mode
, x
);
825 /* Converting from integer constant into mode is always equivalent to an
827 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
829 gcc_assert (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (oldmode
));
830 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
833 temp
= gen_reg_rtx (mode
);
834 convert_move (temp
, x
, unsignedp
);
838 /* STORE_MAX_PIECES is the number of bytes at a time that we can
839 store efficiently. Due to internal GCC limitations, this is
840 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
841 for an immediate constant. */
843 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
845 /* Determine whether the LEN bytes can be moved by using several move
846 instructions. Return nonzero if a call to move_by_pieces should
850 can_move_by_pieces (unsigned HOST_WIDE_INT len
,
851 unsigned int align ATTRIBUTE_UNUSED
)
853 return MOVE_BY_PIECES_P (len
, align
);
856 /* Generate several move instructions to copy LEN bytes from block FROM to
857 block TO. (These are MEM rtx's with BLKmode).
859 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
860 used to push FROM to the stack.
862 ALIGN is maximum stack alignment we can assume.
864 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
865 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
869 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
870 unsigned int align
, int endp
)
872 struct move_by_pieces data
;
873 rtx to_addr
, from_addr
= XEXP (from
, 0);
874 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
875 enum machine_mode mode
= VOIDmode
, tmode
;
876 enum insn_code icode
;
878 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
881 data
.from_addr
= from_addr
;
884 to_addr
= XEXP (to
, 0);
887 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
888 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
890 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
897 #ifdef STACK_GROWS_DOWNWARD
903 data
.to_addr
= to_addr
;
906 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
907 || GET_CODE (from_addr
) == POST_INC
908 || GET_CODE (from_addr
) == POST_DEC
);
910 data
.explicit_inc_from
= 0;
911 data
.explicit_inc_to
= 0;
912 if (data
.reverse
) data
.offset
= len
;
915 /* If copying requires more than two move insns,
916 copy addresses to registers (to make displacements shorter)
917 and use post-increment if available. */
918 if (!(data
.autinc_from
&& data
.autinc_to
)
919 && move_by_pieces_ninsns (len
, align
, max_size
) > 2)
921 /* Find the mode of the largest move... */
922 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
923 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
924 if (GET_MODE_SIZE (tmode
) < max_size
)
927 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
929 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
930 data
.autinc_from
= 1;
931 data
.explicit_inc_from
= -1;
933 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
935 data
.from_addr
= copy_addr_to_reg (from_addr
);
936 data
.autinc_from
= 1;
937 data
.explicit_inc_from
= 1;
939 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
940 data
.from_addr
= copy_addr_to_reg (from_addr
);
941 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
943 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
945 data
.explicit_inc_to
= -1;
947 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
949 data
.to_addr
= copy_addr_to_reg (to_addr
);
951 data
.explicit_inc_to
= 1;
953 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
954 data
.to_addr
= copy_addr_to_reg (to_addr
);
957 tmode
= mode_for_size (MOVE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
958 if (align
>= GET_MODE_ALIGNMENT (tmode
))
959 align
= GET_MODE_ALIGNMENT (tmode
);
962 enum machine_mode xmode
;
964 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
966 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
967 if (GET_MODE_SIZE (tmode
) > MOVE_MAX_PIECES
968 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
971 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
974 /* First move what we can in the largest integer mode, then go to
975 successively smaller modes. */
979 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
980 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
981 if (GET_MODE_SIZE (tmode
) < max_size
)
984 if (mode
== VOIDmode
)
987 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
988 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
989 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
991 max_size
= GET_MODE_SIZE (mode
);
994 /* The code above should have handled everything. */
995 gcc_assert (!data
.len
);
1001 gcc_assert (!data
.reverse
);
1006 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
1007 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
1009 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
1012 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
1019 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
1027 /* Return number of insns required to move L bytes by pieces.
1028 ALIGN (in bits) is maximum alignment we can assume. */
1030 static unsigned HOST_WIDE_INT
1031 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
,
1032 unsigned int max_size
)
1034 unsigned HOST_WIDE_INT n_insns
= 0;
1035 enum machine_mode tmode
;
1037 tmode
= mode_for_size (MOVE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
1038 if (align
>= GET_MODE_ALIGNMENT (tmode
))
1039 align
= GET_MODE_ALIGNMENT (tmode
);
1042 enum machine_mode tmode
, xmode
;
1044 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
1046 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
1047 if (GET_MODE_SIZE (tmode
) > MOVE_MAX_PIECES
1048 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
1051 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
1054 while (max_size
> 1)
1056 enum machine_mode mode
= VOIDmode
;
1057 enum insn_code icode
;
1059 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1060 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1061 if (GET_MODE_SIZE (tmode
) < max_size
)
1064 if (mode
== VOIDmode
)
1067 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1068 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1069 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1071 max_size
= GET_MODE_SIZE (mode
);
1078 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1079 with move instructions for mode MODE. GENFUN is the gen_... function
1080 to make a move insn for that mode. DATA has all the other info. */
1083 move_by_pieces_1 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
1084 struct move_by_pieces
*data
)
1086 unsigned int size
= GET_MODE_SIZE (mode
);
1087 rtx to1
= NULL_RTX
, from1
;
1089 while (data
->len
>= size
)
1092 data
->offset
-= size
;
1096 if (data
->autinc_to
)
1097 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1100 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1103 if (data
->autinc_from
)
1104 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1107 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1109 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1110 emit_insn (gen_add2_insn (data
->to_addr
,
1111 GEN_INT (-(HOST_WIDE_INT
)size
)));
1112 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1113 emit_insn (gen_add2_insn (data
->from_addr
,
1114 GEN_INT (-(HOST_WIDE_INT
)size
)));
1117 emit_insn ((*genfun
) (to1
, from1
));
1120 #ifdef PUSH_ROUNDING
1121 emit_single_push_insn (mode
, from1
, NULL
);
1127 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1128 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1129 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1130 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1132 if (! data
->reverse
)
1133 data
->offset
+= size
;
1139 /* Emit code to move a block Y to a block X. This may be done with
1140 string-move instructions, with multiple scalar move instructions,
1141 or with a library call.
1143 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1144 SIZE is an rtx that says how long they are.
1145 ALIGN is the maximum alignment we can assume they have.
1146 METHOD describes what kind of copy this is, and what mechanisms may be used.
1148 Return the address of the new block, if memcpy is called and returns it,
1152 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1160 case BLOCK_OP_NORMAL
:
1161 case BLOCK_OP_TAILCALL
:
1162 may_use_call
= true;
1165 case BLOCK_OP_CALL_PARM
:
1166 may_use_call
= block_move_libcall_safe_for_call_parm ();
1168 /* Make inhibit_defer_pop nonzero around the library call
1169 to force it to pop the arguments right away. */
1173 case BLOCK_OP_NO_LIBCALL
:
1174 may_use_call
= false;
1181 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1183 gcc_assert (MEM_P (x
));
1184 gcc_assert (MEM_P (y
));
1187 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1188 block copy is more efficient for other large modes, e.g. DCmode. */
1189 x
= adjust_address (x
, BLKmode
, 0);
1190 y
= adjust_address (y
, BLKmode
, 0);
1192 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1193 can be incorrect is coming from __builtin_memcpy. */
1194 if (GET_CODE (size
) == CONST_INT
)
1196 if (INTVAL (size
) == 0)
1199 x
= shallow_copy_rtx (x
);
1200 y
= shallow_copy_rtx (y
);
1201 set_mem_size (x
, size
);
1202 set_mem_size (y
, size
);
1205 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1206 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1207 else if (emit_block_move_via_movmem (x
, y
, size
, align
))
1209 else if (may_use_call
)
1210 retval
= emit_block_move_via_libcall (x
, y
, size
,
1211 method
== BLOCK_OP_TAILCALL
);
1213 emit_block_move_via_loop (x
, y
, size
, align
);
1215 if (method
== BLOCK_OP_CALL_PARM
)
1221 /* A subroutine of emit_block_move. Returns true if calling the
1222 block move libcall will not clobber any parameters which may have
1223 already been placed on the stack. */
1226 block_move_libcall_safe_for_call_parm (void)
1228 /* If arguments are pushed on the stack, then they're safe. */
1232 /* If registers go on the stack anyway, any argument is sure to clobber
1233 an outgoing argument. */
1234 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1236 tree fn
= emit_block_move_libcall_fn (false);
1238 if (REG_PARM_STACK_SPACE (fn
) != 0)
1243 /* If any argument goes in memory, then it might clobber an outgoing
1246 CUMULATIVE_ARGS args_so_far
;
1249 fn
= emit_block_move_libcall_fn (false);
1250 INIT_CUMULATIVE_ARGS (args_so_far
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1252 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1253 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1255 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1256 rtx tmp
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
1257 if (!tmp
|| !REG_P (tmp
))
1259 if (targetm
.calls
.arg_partial_bytes (&args_so_far
, mode
, NULL
, 1))
1261 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, NULL_TREE
, 1);
1267 /* A subroutine of emit_block_move. Expand a movmem pattern;
1268 return true if successful. */
1271 emit_block_move_via_movmem (rtx x
, rtx y
, rtx size
, unsigned int align
)
1273 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1274 int save_volatile_ok
= volatile_ok
;
1275 enum machine_mode mode
;
1277 /* Since this is a move insn, we don't care about volatility. */
1280 /* Try the most limited insn first, because there's no point
1281 including more than one in the machine description unless
1282 the more limited one has some advantage. */
1284 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1285 mode
= GET_MODE_WIDER_MODE (mode
))
1287 enum insn_code code
= movmem_optab
[(int) mode
];
1288 insn_operand_predicate_fn pred
;
1290 if (code
!= CODE_FOR_nothing
1291 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1292 here because if SIZE is less than the mode mask, as it is
1293 returned by the macro, it will definitely be less than the
1294 actual mode mask. */
1295 && ((GET_CODE (size
) == CONST_INT
1296 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1297 <= (GET_MODE_MASK (mode
) >> 1)))
1298 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1299 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1300 || (*pred
) (x
, BLKmode
))
1301 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1302 || (*pred
) (y
, BLKmode
))
1303 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1304 || (*pred
) (opalign
, VOIDmode
)))
1307 rtx last
= get_last_insn ();
1310 op2
= convert_to_mode (mode
, size
, 1);
1311 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1312 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1313 op2
= copy_to_mode_reg (mode
, op2
);
1315 /* ??? When called via emit_block_move_for_call, it'd be
1316 nice if there were some way to inform the backend, so
1317 that it doesn't fail the expansion because it thinks
1318 emitting the libcall would be more efficient. */
1320 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1324 volatile_ok
= save_volatile_ok
;
1328 delete_insns_since (last
);
1332 volatile_ok
= save_volatile_ok
;
1336 /* A subroutine of emit_block_move. Expand a call to memcpy.
1337 Return the return value from memcpy, 0 otherwise. */
1340 emit_block_move_via_libcall (rtx dst
, rtx src
, rtx size
, bool tailcall
)
1342 rtx dst_addr
, src_addr
;
1343 tree call_expr
, arg_list
, fn
, src_tree
, dst_tree
, size_tree
;
1344 enum machine_mode size_mode
;
1347 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1348 pseudos. We can then place those new pseudos into a VAR_DECL and
1351 dst_addr
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
1352 src_addr
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
1354 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1355 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1357 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1358 src_tree
= make_tree (ptr_type_node
, src_addr
);
1360 size_mode
= TYPE_MODE (sizetype
);
1362 size
= convert_to_mode (size_mode
, size
, 1);
1363 size
= copy_to_mode_reg (size_mode
, size
);
1365 /* It is incorrect to use the libcall calling conventions to call
1366 memcpy in this context. This could be a user call to memcpy and
1367 the user may wish to examine the return value from memcpy. For
1368 targets where libcalls and normal calls have different conventions
1369 for returning pointers, we could end up generating incorrect code. */
1371 size_tree
= make_tree (sizetype
, size
);
1373 fn
= emit_block_move_libcall_fn (true);
1374 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
1375 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1376 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1378 /* Now we have to build up the CALL_EXPR itself. */
1379 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1380 call_expr
= build3 (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1381 call_expr
, arg_list
, NULL_TREE
);
1382 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
1384 retval
= expand_normal (call_expr
);
1389 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1390 for the function we use for block copies. The first time FOR_CALL
1391 is true, we call assemble_external. */
1393 static GTY(()) tree block_move_fn
;
1396 init_block_move_fn (const char *asmspec
)
1402 fn
= get_identifier ("memcpy");
1403 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
1404 const_ptr_type_node
, sizetype
,
1407 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
1408 DECL_EXTERNAL (fn
) = 1;
1409 TREE_PUBLIC (fn
) = 1;
1410 DECL_ARTIFICIAL (fn
) = 1;
1411 TREE_NOTHROW (fn
) = 1;
1412 DECL_VISIBILITY (fn
) = VISIBILITY_DEFAULT
;
1413 DECL_VISIBILITY_SPECIFIED (fn
) = 1;
1419 set_user_assembler_name (block_move_fn
, asmspec
);
1423 emit_block_move_libcall_fn (int for_call
)
1425 static bool emitted_extern
;
1428 init_block_move_fn (NULL
);
1430 if (for_call
&& !emitted_extern
)
1432 emitted_extern
= true;
1433 make_decl_rtl (block_move_fn
);
1434 assemble_external (block_move_fn
);
1437 return block_move_fn
;
1440 /* A subroutine of emit_block_move. Copy the data via an explicit
1441 loop. This is used only when libcalls are forbidden. */
1442 /* ??? It'd be nice to copy in hunks larger than QImode. */
1445 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1446 unsigned int align ATTRIBUTE_UNUSED
)
1448 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
1449 enum machine_mode iter_mode
;
1451 iter_mode
= GET_MODE (size
);
1452 if (iter_mode
== VOIDmode
)
1453 iter_mode
= word_mode
;
1455 top_label
= gen_label_rtx ();
1456 cmp_label
= gen_label_rtx ();
1457 iter
= gen_reg_rtx (iter_mode
);
1459 emit_move_insn (iter
, const0_rtx
);
1461 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1462 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1463 do_pending_stack_adjust ();
1465 emit_jump (cmp_label
);
1466 emit_label (top_label
);
1468 tmp
= convert_modes (Pmode
, iter_mode
, iter
, true);
1469 x_addr
= gen_rtx_PLUS (Pmode
, x_addr
, tmp
);
1470 y_addr
= gen_rtx_PLUS (Pmode
, y_addr
, tmp
);
1471 x
= change_address (x
, QImode
, x_addr
);
1472 y
= change_address (y
, QImode
, y_addr
);
1474 emit_move_insn (x
, y
);
1476 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1477 true, OPTAB_LIB_WIDEN
);
1479 emit_move_insn (iter
, tmp
);
1481 emit_label (cmp_label
);
1483 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1487 /* Copy all or part of a value X into registers starting at REGNO.
1488 The number of registers to be filled is NREGS. */
1491 move_block_to_reg (int regno
, rtx x
, int nregs
, enum machine_mode mode
)
1494 #ifdef HAVE_load_multiple
1502 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1503 x
= validize_mem (force_const_mem (mode
, x
));
1505 /* See if the machine can do this with a load multiple insn. */
1506 #ifdef HAVE_load_multiple
1507 if (HAVE_load_multiple
)
1509 last
= get_last_insn ();
1510 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1518 delete_insns_since (last
);
1522 for (i
= 0; i
< nregs
; i
++)
1523 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1524 operand_subword_force (x
, i
, mode
));
1527 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1528 The number of registers to be filled is NREGS. */
1531 move_block_from_reg (int regno
, rtx x
, int nregs
)
1538 /* See if the machine can do this with a store multiple insn. */
1539 #ifdef HAVE_store_multiple
1540 if (HAVE_store_multiple
)
1542 rtx last
= get_last_insn ();
1543 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1551 delete_insns_since (last
);
1555 for (i
= 0; i
< nregs
; i
++)
1557 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1561 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1565 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1566 ORIG, where ORIG is a non-consecutive group of registers represented by
1567 a PARALLEL. The clone is identical to the original except in that the
1568 original set of registers is replaced by a new set of pseudo registers.
1569 The new set has the same modes as the original set. */
1572 gen_group_rtx (rtx orig
)
1577 gcc_assert (GET_CODE (orig
) == PARALLEL
);
1579 length
= XVECLEN (orig
, 0);
1580 tmps
= alloca (sizeof (rtx
) * length
);
1582 /* Skip a NULL entry in first slot. */
1583 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
1588 for (; i
< length
; i
++)
1590 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
1591 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
1593 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
1596 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
1599 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1600 except that values are placed in TMPS[i], and must later be moved
1601 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1604 emit_group_load_1 (rtx
*tmps
, rtx dst
, rtx orig_src
, tree type
, int ssize
)
1608 enum machine_mode m
= GET_MODE (orig_src
);
1610 gcc_assert (GET_CODE (dst
) == PARALLEL
);
1613 && !SCALAR_INT_MODE_P (m
)
1614 && !MEM_P (orig_src
)
1615 && GET_CODE (orig_src
) != CONCAT
)
1617 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_src
));
1618 if (imode
== BLKmode
)
1619 src
= assign_stack_temp (GET_MODE (orig_src
), ssize
, 0);
1621 src
= gen_reg_rtx (imode
);
1622 if (imode
!= BLKmode
)
1623 src
= gen_lowpart (GET_MODE (orig_src
), src
);
1624 emit_move_insn (src
, orig_src
);
1625 /* ...and back again. */
1626 if (imode
!= BLKmode
)
1627 src
= gen_lowpart (imode
, src
);
1628 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1632 /* Check for a NULL entry, used to indicate that the parameter goes
1633 both on the stack and in registers. */
1634 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1639 /* Process the pieces. */
1640 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1642 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1643 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1644 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1647 /* Handle trailing fragments that run over the size of the struct. */
1648 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1650 /* Arrange to shift the fragment to where it belongs.
1651 extract_bit_field loads to the lsb of the reg. */
1653 #ifdef BLOCK_REG_PADDING
1654 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
1655 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1660 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1661 bytelen
= ssize
- bytepos
;
1662 gcc_assert (bytelen
> 0);
1665 /* If we won't be loading directly from memory, protect the real source
1666 from strange tricks we might play; but make sure that the source can
1667 be loaded directly into the destination. */
1669 if (!MEM_P (orig_src
)
1670 && (!CONSTANT_P (orig_src
)
1671 || (GET_MODE (orig_src
) != mode
1672 && GET_MODE (orig_src
) != VOIDmode
)))
1674 if (GET_MODE (orig_src
) == VOIDmode
)
1675 src
= gen_reg_rtx (mode
);
1677 src
= gen_reg_rtx (GET_MODE (orig_src
));
1679 emit_move_insn (src
, orig_src
);
1682 /* Optimize the access just a bit. */
1684 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
1685 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
1686 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1687 && bytelen
== GET_MODE_SIZE (mode
))
1689 tmps
[i
] = gen_reg_rtx (mode
);
1690 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
1692 else if (COMPLEX_MODE_P (mode
)
1693 && GET_MODE (src
) == mode
1694 && bytelen
== GET_MODE_SIZE (mode
))
1695 /* Let emit_move_complex do the bulk of the work. */
1697 else if (GET_CODE (src
) == CONCAT
)
1699 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
1700 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
1702 if ((bytepos
== 0 && bytelen
== slen0
)
1703 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
1705 /* The following assumes that the concatenated objects all
1706 have the same size. In this case, a simple calculation
1707 can be used to determine the object and the bit field
1709 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
1710 if (! CONSTANT_P (tmps
[i
])
1711 && (!REG_P (tmps
[i
]) || GET_MODE (tmps
[i
]) != mode
))
1712 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
1713 (bytepos
% slen0
) * BITS_PER_UNIT
,
1714 1, NULL_RTX
, mode
, mode
);
1720 gcc_assert (!bytepos
);
1721 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1722 emit_move_insn (mem
, src
);
1723 tmps
[i
] = extract_bit_field (mem
, bytelen
* BITS_PER_UNIT
,
1724 0, 1, NULL_RTX
, mode
, mode
);
1727 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1728 SIMD register, which is currently broken. While we get GCC
1729 to emit proper RTL for these cases, let's dump to memory. */
1730 else if (VECTOR_MODE_P (GET_MODE (dst
))
1733 int slen
= GET_MODE_SIZE (GET_MODE (src
));
1736 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1737 emit_move_insn (mem
, src
);
1738 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
1740 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
1741 && XVECLEN (dst
, 0) > 1)
1742 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE(dst
), bytepos
);
1743 else if (CONSTANT_P (src
)
1744 || (REG_P (src
) && GET_MODE (src
) == mode
))
1747 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
1748 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
1752 tmps
[i
] = expand_shift (LSHIFT_EXPR
, mode
, tmps
[i
],
1753 build_int_cst (NULL_TREE
, shift
), tmps
[i
], 0);
1757 /* Emit code to move a block SRC of type TYPE to a block DST,
1758 where DST is non-consecutive registers represented by a PARALLEL.
1759 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1763 emit_group_load (rtx dst
, rtx src
, tree type
, int ssize
)
1768 tmps
= alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
1769 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1771 /* Copy the extracted pieces into the proper (probable) hard regs. */
1772 for (i
= 0; i
< XVECLEN (dst
, 0); i
++)
1774 rtx d
= XEXP (XVECEXP (dst
, 0, i
), 0);
1777 emit_move_insn (d
, tmps
[i
]);
1781 /* Similar, but load SRC into new pseudos in a format that looks like
1782 PARALLEL. This can later be fed to emit_group_move to get things
1783 in the right place. */
1786 emit_group_load_into_temps (rtx parallel
, rtx src
, tree type
, int ssize
)
1791 vec
= rtvec_alloc (XVECLEN (parallel
, 0));
1792 emit_group_load_1 (&RTVEC_ELT (vec
, 0), parallel
, src
, type
, ssize
);
1794 /* Convert the vector to look just like the original PARALLEL, except
1795 with the computed values. */
1796 for (i
= 0; i
< XVECLEN (parallel
, 0); i
++)
1798 rtx e
= XVECEXP (parallel
, 0, i
);
1799 rtx d
= XEXP (e
, 0);
1803 d
= force_reg (GET_MODE (d
), RTVEC_ELT (vec
, i
));
1804 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), d
, XEXP (e
, 1));
1806 RTVEC_ELT (vec
, i
) = e
;
1809 return gen_rtx_PARALLEL (GET_MODE (parallel
), vec
);
1812 /* Emit code to move a block SRC to block DST, where SRC and DST are
1813 non-consecutive groups of registers, each represented by a PARALLEL. */
1816 emit_group_move (rtx dst
, rtx src
)
1820 gcc_assert (GET_CODE (src
) == PARALLEL
1821 && GET_CODE (dst
) == PARALLEL
1822 && XVECLEN (src
, 0) == XVECLEN (dst
, 0));
1824 /* Skip first entry if NULL. */
1825 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
1826 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
1827 XEXP (XVECEXP (src
, 0, i
), 0));
1830 /* Move a group of registers represented by a PARALLEL into pseudos. */
1833 emit_group_move_into_temps (rtx src
)
1835 rtvec vec
= rtvec_alloc (XVECLEN (src
, 0));
1838 for (i
= 0; i
< XVECLEN (src
, 0); i
++)
1840 rtx e
= XVECEXP (src
, 0, i
);
1841 rtx d
= XEXP (e
, 0);
1844 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), copy_to_reg (d
), XEXP (e
, 1));
1845 RTVEC_ELT (vec
, i
) = e
;
1848 return gen_rtx_PARALLEL (GET_MODE (src
), vec
);
1851 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1852 where SRC is non-consecutive registers represented by a PARALLEL.
1853 SSIZE represents the total size of block ORIG_DST, or -1 if not
1857 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1860 int start
, finish
, i
;
1861 enum machine_mode m
= GET_MODE (orig_dst
);
1863 gcc_assert (GET_CODE (src
) == PARALLEL
);
1865 if (!SCALAR_INT_MODE_P (m
)
1866 && !MEM_P (orig_dst
) && GET_CODE (orig_dst
) != CONCAT
)
1868 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_dst
));
1869 if (imode
== BLKmode
)
1870 dst
= assign_stack_temp (GET_MODE (orig_dst
), ssize
, 0);
1872 dst
= gen_reg_rtx (imode
);
1873 emit_group_store (dst
, src
, type
, ssize
);
1874 if (imode
!= BLKmode
)
1875 dst
= gen_lowpart (GET_MODE (orig_dst
), dst
);
1876 emit_move_insn (orig_dst
, dst
);
1880 /* Check for a NULL entry, used to indicate that the parameter goes
1881 both on the stack and in registers. */
1882 if (XEXP (XVECEXP (src
, 0, 0), 0))
1886 finish
= XVECLEN (src
, 0);
1888 tmps
= alloca (sizeof (rtx
) * finish
);
1890 /* Copy the (probable) hard regs into pseudos. */
1891 for (i
= start
; i
< finish
; i
++)
1893 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
1894 if (!REG_P (reg
) || REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
1896 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
1897 emit_move_insn (tmps
[i
], reg
);
1903 /* If we won't be storing directly into memory, protect the real destination
1904 from strange tricks we might play. */
1906 if (GET_CODE (dst
) == PARALLEL
)
1910 /* We can get a PARALLEL dst if there is a conditional expression in
1911 a return statement. In that case, the dst and src are the same,
1912 so no action is necessary. */
1913 if (rtx_equal_p (dst
, src
))
1916 /* It is unclear if we can ever reach here, but we may as well handle
1917 it. Allocate a temporary, and split this into a store/load to/from
1920 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
1921 emit_group_store (temp
, src
, type
, ssize
);
1922 emit_group_load (dst
, temp
, type
, ssize
);
1925 else if (!MEM_P (dst
) && GET_CODE (dst
) != CONCAT
)
1927 enum machine_mode outer
= GET_MODE (dst
);
1928 enum machine_mode inner
;
1929 HOST_WIDE_INT bytepos
;
1933 if (!REG_P (dst
) || REGNO (dst
) < FIRST_PSEUDO_REGISTER
)
1934 dst
= gen_reg_rtx (outer
);
1936 /* Make life a bit easier for combine. */
1937 /* If the first element of the vector is the low part
1938 of the destination mode, use a paradoxical subreg to
1939 initialize the destination. */
1942 inner
= GET_MODE (tmps
[start
]);
1943 bytepos
= subreg_lowpart_offset (outer
, inner
);
1944 if (INTVAL (XEXP (XVECEXP (src
, 0, start
), 1)) == bytepos
)
1946 temp
= simplify_gen_subreg (outer
, tmps
[start
],
1948 emit_move_insn (dst
, temp
);
1954 /* If the first element wasn't the low part, try the last. */
1956 && start
< finish
- 1)
1958 inner
= GET_MODE (tmps
[finish
- 1]);
1959 bytepos
= subreg_lowpart_offset (outer
, inner
);
1960 if (INTVAL (XEXP (XVECEXP (src
, 0, finish
- 1), 1)) == bytepos
)
1962 temp
= simplify_gen_subreg (outer
, tmps
[finish
- 1],
1964 emit_move_insn (dst
, temp
);
1970 /* Otherwise, simply initialize the result to zero. */
1972 emit_move_insn (dst
, CONST0_RTX (outer
));
1975 /* Process the pieces. */
1976 for (i
= start
; i
< finish
; i
++)
1978 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
1979 enum machine_mode mode
= GET_MODE (tmps
[i
]);
1980 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1983 /* Handle trailing fragments that run over the size of the struct. */
1984 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1986 /* store_bit_field always takes its value from the lsb.
1987 Move the fragment to the lsb if it's not already there. */
1989 #ifdef BLOCK_REG_PADDING
1990 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
1991 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1997 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1998 tmps
[i
] = expand_shift (RSHIFT_EXPR
, mode
, tmps
[i
],
1999 build_int_cst (NULL_TREE
, shift
),
2002 bytelen
= ssize
- bytepos
;
2005 if (GET_CODE (dst
) == CONCAT
)
2007 if (bytepos
+ bytelen
<= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2008 dest
= XEXP (dst
, 0);
2009 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2011 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2012 dest
= XEXP (dst
, 1);
2016 gcc_assert (bytepos
== 0 && XVECLEN (src
, 0));
2017 dest
= assign_stack_temp (GET_MODE (dest
),
2018 GET_MODE_SIZE (GET_MODE (dest
)), 0);
2019 emit_move_insn (adjust_address (dest
, GET_MODE (tmps
[i
]), bytepos
),
2026 /* Optimize the access just a bit. */
2028 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
2029 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2030 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2031 && bytelen
== GET_MODE_SIZE (mode
))
2032 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2034 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2038 /* Copy from the pseudo into the (probable) hard reg. */
2039 if (orig_dst
!= dst
)
2040 emit_move_insn (orig_dst
, dst
);
2043 /* Generate code to copy a BLKmode object of TYPE out of a
2044 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2045 is null, a stack temporary is created. TGTBLK is returned.
2047 The purpose of this routine is to handle functions that return
2048 BLKmode structures in registers. Some machines (the PA for example)
2049 want to return all small structures in registers regardless of the
2050 structure's alignment. */
2053 copy_blkmode_from_reg (rtx tgtblk
, rtx srcreg
, tree type
)
2055 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2056 rtx src
= NULL
, dst
= NULL
;
2057 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2058 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2062 tgtblk
= assign_temp (build_qualified_type (type
,
2064 | TYPE_QUAL_CONST
)),
2066 preserve_temp_slots (tgtblk
);
2069 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2070 into a new pseudo which is a full word. */
2072 if (GET_MODE (srcreg
) != BLKmode
2073 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2074 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
2076 /* If the structure doesn't take up a whole number of words, see whether
2077 SRCREG is padded on the left or on the right. If it's on the left,
2078 set PADDING_CORRECTION to the number of bits to skip.
2080 In most ABIs, the structure will be returned at the least end of
2081 the register, which translates to right padding on little-endian
2082 targets and left padding on big-endian targets. The opposite
2083 holds if the structure is returned at the most significant
2084 end of the register. */
2085 if (bytes
% UNITS_PER_WORD
!= 0
2086 && (targetm
.calls
.return_in_msb (type
)
2088 : BYTES_BIG_ENDIAN
))
2090 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2092 /* Copy the structure BITSIZE bites at a time.
2094 We could probably emit more efficient code for machines which do not use
2095 strict alignment, but it doesn't seem worth the effort at the current
2097 for (bitpos
= 0, xbitpos
= padding_correction
;
2098 bitpos
< bytes
* BITS_PER_UNIT
;
2099 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2101 /* We need a new source operand each time xbitpos is on a
2102 word boundary and when xbitpos == padding_correction
2103 (the first time through). */
2104 if (xbitpos
% BITS_PER_WORD
== 0
2105 || xbitpos
== padding_correction
)
2106 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2109 /* We need a new destination operand each time bitpos is on
2111 if (bitpos
% BITS_PER_WORD
== 0)
2112 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2114 /* Use xbitpos for the source extraction (right justified) and
2115 xbitpos for the destination store (left justified). */
2116 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2117 extract_bit_field (src
, bitsize
,
2118 xbitpos
% BITS_PER_WORD
, 1,
2119 NULL_RTX
, word_mode
, word_mode
));
2125 /* Add a USE expression for REG to the (possibly empty) list pointed
2126 to by CALL_FUSAGE. REG must denote a hard register. */
2129 use_reg (rtx
*call_fusage
, rtx reg
)
2131 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2134 = gen_rtx_EXPR_LIST (VOIDmode
,
2135 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2138 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2139 starting at REGNO. All of these registers must be hard registers. */
2142 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2146 gcc_assert (regno
+ nregs
<= FIRST_PSEUDO_REGISTER
);
2148 for (i
= 0; i
< nregs
; i
++)
2149 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2152 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2153 PARALLEL REGS. This is for calls that pass values in multiple
2154 non-contiguous locations. The Irix 6 ABI has examples of this. */
2157 use_group_regs (rtx
*call_fusage
, rtx regs
)
2161 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2163 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2165 /* A NULL entry means the parameter goes both on the stack and in
2166 registers. This can also be a MEM for targets that pass values
2167 partially on the stack and partially in registers. */
2168 if (reg
!= 0 && REG_P (reg
))
2169 use_reg (call_fusage
, reg
);
2174 /* Determine whether the LEN bytes generated by CONSTFUN can be
2175 stored to memory using several move instructions. CONSTFUNDATA is
2176 a pointer which will be passed as argument in every CONSTFUN call.
2177 ALIGN is maximum alignment we can assume. Return nonzero if a
2178 call to store_by_pieces should succeed. */
2181 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
2182 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2183 void *constfundata
, unsigned int align
)
2185 unsigned HOST_WIDE_INT l
;
2186 unsigned int max_size
;
2187 HOST_WIDE_INT offset
= 0;
2188 enum machine_mode mode
, tmode
;
2189 enum insn_code icode
;
2196 if (! STORE_BY_PIECES_P (len
, align
))
2199 tmode
= mode_for_size (STORE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
2200 if (align
>= GET_MODE_ALIGNMENT (tmode
))
2201 align
= GET_MODE_ALIGNMENT (tmode
);
2204 enum machine_mode xmode
;
2206 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
2208 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
2209 if (GET_MODE_SIZE (tmode
) > STORE_MAX_PIECES
2210 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
2213 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
2216 /* We would first store what we can in the largest integer mode, then go to
2217 successively smaller modes. */
2220 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2225 max_size
= STORE_MAX_PIECES
+ 1;
2226 while (max_size
> 1)
2228 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2229 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2230 if (GET_MODE_SIZE (tmode
) < max_size
)
2233 if (mode
== VOIDmode
)
2236 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2237 if (icode
!= CODE_FOR_nothing
2238 && align
>= GET_MODE_ALIGNMENT (mode
))
2240 unsigned int size
= GET_MODE_SIZE (mode
);
2247 cst
= (*constfun
) (constfundata
, offset
, mode
);
2248 if (!LEGITIMATE_CONSTANT_P (cst
))
2258 max_size
= GET_MODE_SIZE (mode
);
2261 /* The code above should have handled everything. */
2268 /* Generate several move instructions to store LEN bytes generated by
2269 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2270 pointer which will be passed as argument in every CONSTFUN call.
2271 ALIGN is maximum alignment we can assume.
2272 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2273 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2277 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
2278 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2279 void *constfundata
, unsigned int align
, int endp
)
2281 struct store_by_pieces data
;
2285 gcc_assert (endp
!= 2);
2289 gcc_assert (STORE_BY_PIECES_P (len
, align
));
2290 data
.constfun
= constfun
;
2291 data
.constfundata
= constfundata
;
2294 store_by_pieces_1 (&data
, align
);
2299 gcc_assert (!data
.reverse
);
2304 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2305 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2307 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
2310 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2317 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2325 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2326 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2329 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
2331 struct store_by_pieces data
;
2336 data
.constfun
= clear_by_pieces_1
;
2337 data
.constfundata
= NULL
;
2340 store_by_pieces_1 (&data
, align
);
2343 /* Callback routine for clear_by_pieces.
2344 Return const0_rtx unconditionally. */
2347 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED
,
2348 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2349 enum machine_mode mode ATTRIBUTE_UNUSED
)
2354 /* Subroutine of clear_by_pieces and store_by_pieces.
2355 Generate several move instructions to store LEN bytes of block TO. (A MEM
2356 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2359 store_by_pieces_1 (struct store_by_pieces
*data ATTRIBUTE_UNUSED
,
2360 unsigned int align ATTRIBUTE_UNUSED
)
2362 rtx to_addr
= XEXP (data
->to
, 0);
2363 unsigned int max_size
= STORE_MAX_PIECES
+ 1;
2364 enum machine_mode mode
= VOIDmode
, tmode
;
2365 enum insn_code icode
;
2368 data
->to_addr
= to_addr
;
2370 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2371 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2373 data
->explicit_inc_to
= 0;
2375 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2377 data
->offset
= data
->len
;
2379 /* If storing requires more than two move insns,
2380 copy addresses to registers (to make displacements shorter)
2381 and use post-increment if available. */
2382 if (!data
->autinc_to
2383 && move_by_pieces_ninsns (data
->len
, align
, max_size
) > 2)
2385 /* Determine the main mode we'll be using. */
2386 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2387 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2388 if (GET_MODE_SIZE (tmode
) < max_size
)
2391 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2393 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2394 data
->autinc_to
= 1;
2395 data
->explicit_inc_to
= -1;
2398 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2399 && ! data
->autinc_to
)
2401 data
->to_addr
= copy_addr_to_reg (to_addr
);
2402 data
->autinc_to
= 1;
2403 data
->explicit_inc_to
= 1;
2406 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2407 data
->to_addr
= copy_addr_to_reg (to_addr
);
2410 tmode
= mode_for_size (STORE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
2411 if (align
>= GET_MODE_ALIGNMENT (tmode
))
2412 align
= GET_MODE_ALIGNMENT (tmode
);
2415 enum machine_mode xmode
;
2417 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
2419 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
2420 if (GET_MODE_SIZE (tmode
) > STORE_MAX_PIECES
2421 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
2424 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
2427 /* First store what we can in the largest integer mode, then go to
2428 successively smaller modes. */
2430 while (max_size
> 1)
2432 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2433 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2434 if (GET_MODE_SIZE (tmode
) < max_size
)
2437 if (mode
== VOIDmode
)
2440 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2441 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2442 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2444 max_size
= GET_MODE_SIZE (mode
);
2447 /* The code above should have handled everything. */
2448 gcc_assert (!data
->len
);
2451 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2452 with move instructions for mode MODE. GENFUN is the gen_... function
2453 to make a move insn for that mode. DATA has all the other info. */
2456 store_by_pieces_2 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
2457 struct store_by_pieces
*data
)
2459 unsigned int size
= GET_MODE_SIZE (mode
);
2462 while (data
->len
>= size
)
2465 data
->offset
-= size
;
2467 if (data
->autinc_to
)
2468 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2471 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2473 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2474 emit_insn (gen_add2_insn (data
->to_addr
,
2475 GEN_INT (-(HOST_WIDE_INT
) size
)));
2477 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2478 emit_insn ((*genfun
) (to1
, cst
));
2480 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2481 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2483 if (! data
->reverse
)
2484 data
->offset
+= size
;
2490 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2491 its length in bytes. */
2494 clear_storage (rtx object
, rtx size
, enum block_op_methods method
)
2496 enum machine_mode mode
= GET_MODE (object
);
2499 gcc_assert (method
== BLOCK_OP_NORMAL
|| method
== BLOCK_OP_TAILCALL
);
2501 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2502 just move a zero. Otherwise, do this a piece at a time. */
2504 && GET_CODE (size
) == CONST_INT
2505 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (mode
))
2507 rtx zero
= CONST0_RTX (mode
);
2510 emit_move_insn (object
, zero
);
2514 if (COMPLEX_MODE_P (mode
))
2516 zero
= CONST0_RTX (GET_MODE_INNER (mode
));
2519 write_complex_part (object
, zero
, 0);
2520 write_complex_part (object
, zero
, 1);
2526 if (size
== const0_rtx
)
2529 align
= MEM_ALIGN (object
);
2531 if (GET_CODE (size
) == CONST_INT
2532 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2533 clear_by_pieces (object
, INTVAL (size
), align
);
2534 else if (set_storage_via_setmem (object
, size
, const0_rtx
, align
))
2537 return clear_storage_via_libcall (object
, size
,
2538 method
== BLOCK_OP_TAILCALL
);
2543 /* A subroutine of clear_storage. Expand a call to memset.
2544 Return the return value of memset, 0 otherwise. */
2547 clear_storage_via_libcall (rtx object
, rtx size
, bool tailcall
)
2549 tree call_expr
, arg_list
, fn
, object_tree
, size_tree
;
2550 enum machine_mode size_mode
;
2553 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2554 place those into new pseudos into a VAR_DECL and use them later. */
2556 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2558 size_mode
= TYPE_MODE (sizetype
);
2559 size
= convert_to_mode (size_mode
, size
, 1);
2560 size
= copy_to_mode_reg (size_mode
, size
);
2562 /* It is incorrect to use the libcall calling conventions to call
2563 memset in this context. This could be a user call to memset and
2564 the user may wish to examine the return value from memset. For
2565 targets where libcalls and normal calls have different conventions
2566 for returning pointers, we could end up generating incorrect code. */
2568 object_tree
= make_tree (ptr_type_node
, object
);
2569 size_tree
= make_tree (sizetype
, size
);
2571 fn
= clear_storage_libcall_fn (true);
2572 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
2573 arg_list
= tree_cons (NULL_TREE
, integer_zero_node
, arg_list
);
2574 arg_list
= tree_cons (NULL_TREE
, object_tree
, arg_list
);
2576 /* Now we have to build up the CALL_EXPR itself. */
2577 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2578 call_expr
= build3 (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2579 call_expr
, arg_list
, NULL_TREE
);
2580 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
2582 retval
= expand_normal (call_expr
);
2587 /* A subroutine of clear_storage_via_libcall. Create the tree node
2588 for the function we use for block clears. The first time FOR_CALL
2589 is true, we call assemble_external. */
2591 static GTY(()) tree block_clear_fn
;
2594 init_block_clear_fn (const char *asmspec
)
2596 if (!block_clear_fn
)
2600 fn
= get_identifier ("memset");
2601 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2602 integer_type_node
, sizetype
,
2605 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
2606 DECL_EXTERNAL (fn
) = 1;
2607 TREE_PUBLIC (fn
) = 1;
2608 DECL_ARTIFICIAL (fn
) = 1;
2609 TREE_NOTHROW (fn
) = 1;
2610 DECL_VISIBILITY (fn
) = VISIBILITY_DEFAULT
;
2611 DECL_VISIBILITY_SPECIFIED (fn
) = 1;
2613 block_clear_fn
= fn
;
2617 set_user_assembler_name (block_clear_fn
, asmspec
);
2621 clear_storage_libcall_fn (int for_call
)
2623 static bool emitted_extern
;
2625 if (!block_clear_fn
)
2626 init_block_clear_fn (NULL
);
2628 if (for_call
&& !emitted_extern
)
2630 emitted_extern
= true;
2631 make_decl_rtl (block_clear_fn
);
2632 assemble_external (block_clear_fn
);
2635 return block_clear_fn
;
2638 /* Expand a setmem pattern; return true if successful. */
2641 set_storage_via_setmem (rtx object
, rtx size
, rtx val
, unsigned int align
)
2643 /* Try the most limited insn first, because there's no point
2644 including more than one in the machine description unless
2645 the more limited one has some advantage. */
2647 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2648 enum machine_mode mode
;
2650 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2651 mode
= GET_MODE_WIDER_MODE (mode
))
2653 enum insn_code code
= setmem_optab
[(int) mode
];
2654 insn_operand_predicate_fn pred
;
2656 if (code
!= CODE_FOR_nothing
2657 /* We don't need MODE to be narrower than
2658 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2659 the mode mask, as it is returned by the macro, it will
2660 definitely be less than the actual mode mask. */
2661 && ((GET_CODE (size
) == CONST_INT
2662 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2663 <= (GET_MODE_MASK (mode
) >> 1)))
2664 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2665 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2666 || (*pred
) (object
, BLKmode
))
2667 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
2668 || (*pred
) (opalign
, VOIDmode
)))
2671 enum machine_mode char_mode
;
2672 rtx last
= get_last_insn ();
2675 opsize
= convert_to_mode (mode
, size
, 1);
2676 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2677 if (pred
!= 0 && ! (*pred
) (opsize
, mode
))
2678 opsize
= copy_to_mode_reg (mode
, opsize
);
2681 char_mode
= insn_data
[(int) code
].operand
[2].mode
;
2682 if (char_mode
!= VOIDmode
)
2684 opchar
= convert_to_mode (char_mode
, opchar
, 1);
2685 pred
= insn_data
[(int) code
].operand
[2].predicate
;
2686 if (pred
!= 0 && ! (*pred
) (opchar
, char_mode
))
2687 opchar
= copy_to_mode_reg (char_mode
, opchar
);
2690 pat
= GEN_FCN ((int) code
) (object
, opsize
, opchar
, opalign
);
2697 delete_insns_since (last
);
2705 /* Write to one of the components of the complex value CPLX. Write VAL to
2706 the real part if IMAG_P is false, and the imaginary part if its true. */
2709 write_complex_part (rtx cplx
, rtx val
, bool imag_p
)
2711 enum machine_mode cmode
;
2712 enum machine_mode imode
;
2715 if (GET_CODE (cplx
) == CONCAT
)
2717 emit_move_insn (XEXP (cplx
, imag_p
), val
);
2721 cmode
= GET_MODE (cplx
);
2722 imode
= GET_MODE_INNER (cmode
);
2723 ibitsize
= GET_MODE_BITSIZE (imode
);
2725 /* For MEMs simplify_gen_subreg may generate an invalid new address
2726 because, e.g., the original address is considered mode-dependent
2727 by the target, which restricts simplify_subreg from invoking
2728 adjust_address_nv. Instead of preparing fallback support for an
2729 invalid address, we call adjust_address_nv directly. */
2732 emit_move_insn (adjust_address_nv (cplx
, imode
,
2733 imag_p
? GET_MODE_SIZE (imode
) : 0),
2738 /* If the sub-object is at least word sized, then we know that subregging
2739 will work. This special case is important, since store_bit_field
2740 wants to operate on integer modes, and there's rarely an OImode to
2741 correspond to TCmode. */
2742 if (ibitsize
>= BITS_PER_WORD
2743 /* For hard regs we have exact predicates. Assume we can split
2744 the original object if it spans an even number of hard regs.
2745 This special case is important for SCmode on 64-bit platforms
2746 where the natural size of floating-point regs is 32-bit. */
2748 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
2749 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0))
2751 rtx part
= simplify_gen_subreg (imode
, cplx
, cmode
,
2752 imag_p
? GET_MODE_SIZE (imode
) : 0);
2755 emit_move_insn (part
, val
);
2759 /* simplify_gen_subreg may fail for sub-word MEMs. */
2760 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
2763 store_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0, imode
, val
);
2766 /* Extract one of the components of the complex value CPLX. Extract the
2767 real part if IMAG_P is false, and the imaginary part if it's true. */
2770 read_complex_part (rtx cplx
, bool imag_p
)
2772 enum machine_mode cmode
, imode
;
2775 if (GET_CODE (cplx
) == CONCAT
)
2776 return XEXP (cplx
, imag_p
);
2778 cmode
= GET_MODE (cplx
);
2779 imode
= GET_MODE_INNER (cmode
);
2780 ibitsize
= GET_MODE_BITSIZE (imode
);
2782 /* Special case reads from complex constants that got spilled to memory. */
2783 if (MEM_P (cplx
) && GET_CODE (XEXP (cplx
, 0)) == SYMBOL_REF
)
2785 tree decl
= SYMBOL_REF_DECL (XEXP (cplx
, 0));
2786 if (decl
&& TREE_CODE (decl
) == COMPLEX_CST
)
2788 tree part
= imag_p
? TREE_IMAGPART (decl
) : TREE_REALPART (decl
);
2789 if (CONSTANT_CLASS_P (part
))
2790 return expand_expr (part
, NULL_RTX
, imode
, EXPAND_NORMAL
);
2794 /* For MEMs simplify_gen_subreg may generate an invalid new address
2795 because, e.g., the original address is considered mode-dependent
2796 by the target, which restricts simplify_subreg from invoking
2797 adjust_address_nv. Instead of preparing fallback support for an
2798 invalid address, we call adjust_address_nv directly. */
2800 return adjust_address_nv (cplx
, imode
,
2801 imag_p
? GET_MODE_SIZE (imode
) : 0);
2803 /* If the sub-object is at least word sized, then we know that subregging
2804 will work. This special case is important, since extract_bit_field
2805 wants to operate on integer modes, and there's rarely an OImode to
2806 correspond to TCmode. */
2807 if (ibitsize
>= BITS_PER_WORD
2808 /* For hard regs we have exact predicates. Assume we can split
2809 the original object if it spans an even number of hard regs.
2810 This special case is important for SCmode on 64-bit platforms
2811 where the natural size of floating-point regs is 32-bit. */
2813 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
2814 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0))
2816 rtx ret
= simplify_gen_subreg (imode
, cplx
, cmode
,
2817 imag_p
? GET_MODE_SIZE (imode
) : 0);
2821 /* simplify_gen_subreg may fail for sub-word MEMs. */
2822 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
2825 return extract_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0,
2826 true, NULL_RTX
, imode
, imode
);
2829 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2830 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2831 represented in NEW_MODE. If FORCE is true, this will never happen, as
2832 we'll force-create a SUBREG if needed. */
2835 emit_move_change_mode (enum machine_mode new_mode
,
2836 enum machine_mode old_mode
, rtx x
, bool force
)
2842 /* We don't have to worry about changing the address since the
2843 size in bytes is supposed to be the same. */
2844 if (reload_in_progress
)
2846 /* Copy the MEM to change the mode and move any
2847 substitutions from the old MEM to the new one. */
2848 ret
= adjust_address_nv (x
, new_mode
, 0);
2849 copy_replacements (x
, ret
);
2852 ret
= adjust_address (x
, new_mode
, 0);
2856 /* Note that we do want simplify_subreg's behavior of validating
2857 that the new mode is ok for a hard register. If we were to use
2858 simplify_gen_subreg, we would create the subreg, but would
2859 probably run into the target not being able to implement it. */
2860 /* Except, of course, when FORCE is true, when this is exactly what
2861 we want. Which is needed for CCmodes on some targets. */
2863 ret
= simplify_gen_subreg (new_mode
, x
, old_mode
, 0);
2865 ret
= simplify_subreg (new_mode
, x
, old_mode
, 0);
2871 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2872 an integer mode of the same size as MODE. Returns the instruction
2873 emitted, or NULL if such a move could not be generated. */
2876 emit_move_via_integer (enum machine_mode mode
, rtx x
, rtx y
, bool force
)
2878 enum machine_mode imode
;
2879 enum insn_code code
;
2881 /* There must exist a mode of the exact size we require. */
2882 imode
= int_mode_for_mode (mode
);
2883 if (imode
== BLKmode
)
2886 /* The target must support moves in this mode. */
2887 code
= mov_optab
->handlers
[imode
].insn_code
;
2888 if (code
== CODE_FOR_nothing
)
2891 x
= emit_move_change_mode (imode
, mode
, x
, force
);
2894 y
= emit_move_change_mode (imode
, mode
, y
, force
);
2897 return emit_insn (GEN_FCN (code
) (x
, y
));
2900 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2901 Return an equivalent MEM that does not use an auto-increment. */
2904 emit_move_resolve_push (enum machine_mode mode
, rtx x
)
2906 enum rtx_code code
= GET_CODE (XEXP (x
, 0));
2907 HOST_WIDE_INT adjust
;
2910 adjust
= GET_MODE_SIZE (mode
);
2911 #ifdef PUSH_ROUNDING
2912 adjust
= PUSH_ROUNDING (adjust
);
2914 if (code
== PRE_DEC
|| code
== POST_DEC
)
2916 else if (code
== PRE_MODIFY
|| code
== POST_MODIFY
)
2918 rtx expr
= XEXP (XEXP (x
, 0), 1);
2921 gcc_assert (GET_CODE (expr
) == PLUS
|| GET_CODE (expr
) == MINUS
);
2922 gcc_assert (GET_CODE (XEXP (expr
, 1)) == CONST_INT
);
2923 val
= INTVAL (XEXP (expr
, 1));
2924 if (GET_CODE (expr
) == MINUS
)
2926 gcc_assert (adjust
== val
|| adjust
== -val
);
2930 /* Do not use anti_adjust_stack, since we don't want to update
2931 stack_pointer_delta. */
2932 temp
= expand_simple_binop (Pmode
, PLUS
, stack_pointer_rtx
,
2933 GEN_INT (adjust
), stack_pointer_rtx
,
2934 0, OPTAB_LIB_WIDEN
);
2935 if (temp
!= stack_pointer_rtx
)
2936 emit_move_insn (stack_pointer_rtx
, temp
);
2943 temp
= stack_pointer_rtx
;
2948 temp
= plus_constant (stack_pointer_rtx
, -adjust
);
2954 return replace_equiv_address (x
, temp
);
2957 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2958 X is known to satisfy push_operand, and MODE is known to be complex.
2959 Returns the last instruction emitted. */
2962 emit_move_complex_push (enum machine_mode mode
, rtx x
, rtx y
)
2964 enum machine_mode submode
= GET_MODE_INNER (mode
);
2967 #ifdef PUSH_ROUNDING
2968 unsigned int submodesize
= GET_MODE_SIZE (submode
);
2970 /* In case we output to the stack, but the size is smaller than the
2971 machine can push exactly, we need to use move instructions. */
2972 if (PUSH_ROUNDING (submodesize
) != submodesize
)
2974 x
= emit_move_resolve_push (mode
, x
);
2975 return emit_move_insn (x
, y
);
2979 /* Note that the real part always precedes the imag part in memory
2980 regardless of machine's endianness. */
2981 switch (GET_CODE (XEXP (x
, 0)))
2995 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2996 read_complex_part (y
, imag_first
));
2997 return emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2998 read_complex_part (y
, !imag_first
));
3001 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3002 MODE is known to be complex. Returns the last instruction emitted. */
3005 emit_move_complex (enum machine_mode mode
, rtx x
, rtx y
)
3009 /* Need to take special care for pushes, to maintain proper ordering
3010 of the data, and possibly extra padding. */
3011 if (push_operand (x
, mode
))
3012 return emit_move_complex_push (mode
, x
, y
);
3014 /* See if we can coerce the target into moving both values at once. */
3016 /* Move floating point as parts. */
3017 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
3018 && mov_optab
->handlers
[GET_MODE_INNER (mode
)].insn_code
!= CODE_FOR_nothing
)
3020 /* Not possible if the values are inherently not adjacent. */
3021 else if (GET_CODE (x
) == CONCAT
|| GET_CODE (y
) == CONCAT
)
3023 /* Is possible if both are registers (or subregs of registers). */
3024 else if (register_operand (x
, mode
) && register_operand (y
, mode
))
3026 /* If one of the operands is a memory, and alignment constraints
3027 are friendly enough, we may be able to do combined memory operations.
3028 We do not attempt this if Y is a constant because that combination is
3029 usually better with the by-parts thing below. */
3030 else if ((MEM_P (x
) ? !CONSTANT_P (y
) : MEM_P (y
))
3031 && (!STRICT_ALIGNMENT
3032 || get_mode_alignment (mode
) == BIGGEST_ALIGNMENT
))
3041 /* For memory to memory moves, optimal behavior can be had with the
3042 existing block move logic. */
3043 if (MEM_P (x
) && MEM_P (y
))
3045 emit_block_move (x
, y
, GEN_INT (GET_MODE_SIZE (mode
)),
3046 BLOCK_OP_NO_LIBCALL
);
3047 return get_last_insn ();
3050 ret
= emit_move_via_integer (mode
, x
, y
, true);
3055 /* Show the output dies here. This is necessary for SUBREGs
3056 of pseudos since we cannot track their lifetimes correctly;
3057 hard regs shouldn't appear here except as return values. */
3058 if (!reload_completed
&& !reload_in_progress
3059 && REG_P (x
) && !reg_overlap_mentioned_p (x
, y
))
3060 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3062 write_complex_part (x
, read_complex_part (y
, false), false);
3063 write_complex_part (x
, read_complex_part (y
, true), true);
3064 return get_last_insn ();
3067 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3068 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3071 emit_move_ccmode (enum machine_mode mode
, rtx x
, rtx y
)
3075 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3078 enum insn_code code
= mov_optab
->handlers
[CCmode
].insn_code
;
3079 if (code
!= CODE_FOR_nothing
)
3081 x
= emit_move_change_mode (CCmode
, mode
, x
, true);
3082 y
= emit_move_change_mode (CCmode
, mode
, y
, true);
3083 return emit_insn (GEN_FCN (code
) (x
, y
));
3087 /* Otherwise, find the MODE_INT mode of the same width. */
3088 ret
= emit_move_via_integer (mode
, x
, y
, false);
3089 gcc_assert (ret
!= NULL
);
3093 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3094 MODE is any multi-word or full-word mode that lacks a move_insn
3095 pattern. Note that you will get better code if you define such
3096 patterns, even if they must turn into multiple assembler instructions. */
3099 emit_move_multi_word (enum machine_mode mode
, rtx x
, rtx y
)
3106 gcc_assert (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
);
3108 /* If X is a push on the stack, do the push now and replace
3109 X with a reference to the stack pointer. */
3110 if (push_operand (x
, mode
))
3111 x
= emit_move_resolve_push (mode
, x
);
3113 /* If we are in reload, see if either operand is a MEM whose address
3114 is scheduled for replacement. */
3115 if (reload_in_progress
&& MEM_P (x
)
3116 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3117 x
= replace_equiv_address_nv (x
, inner
);
3118 if (reload_in_progress
&& MEM_P (y
)
3119 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3120 y
= replace_equiv_address_nv (y
, inner
);
3124 need_clobber
= false;
3126 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3129 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3130 rtx ypart
= operand_subword (y
, i
, 1, mode
);
3132 /* If we can't get a part of Y, put Y into memory if it is a
3133 constant. Otherwise, force it into a register. Then we must
3134 be able to get a part of Y. */
3135 if (ypart
== 0 && CONSTANT_P (y
))
3137 y
= use_anchored_address (force_const_mem (mode
, y
));
3138 ypart
= operand_subword (y
, i
, 1, mode
);
3140 else if (ypart
== 0)
3141 ypart
= operand_subword_force (y
, i
, mode
);
3143 gcc_assert (xpart
&& ypart
);
3145 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3147 last_insn
= emit_move_insn (xpart
, ypart
);
3153 /* Show the output dies here. This is necessary for SUBREGs
3154 of pseudos since we cannot track their lifetimes correctly;
3155 hard regs shouldn't appear here except as return values.
3156 We never want to emit such a clobber after reload. */
3158 && ! (reload_in_progress
|| reload_completed
)
3159 && need_clobber
!= 0)
3160 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3167 /* Low level part of emit_move_insn.
3168 Called just like emit_move_insn, but assumes X and Y
3169 are basically valid. */
3172 emit_move_insn_1 (rtx x
, rtx y
)
3174 enum machine_mode mode
= GET_MODE (x
);
3175 enum insn_code code
;
3177 gcc_assert ((unsigned int) mode
< (unsigned int) MAX_MACHINE_MODE
);
3179 code
= mov_optab
->handlers
[mode
].insn_code
;
3180 if (code
!= CODE_FOR_nothing
)
3181 return emit_insn (GEN_FCN (code
) (x
, y
));
3183 /* Expand complex moves by moving real part and imag part. */
3184 if (COMPLEX_MODE_P (mode
))
3185 return emit_move_complex (mode
, x
, y
);
3187 if (GET_MODE_CLASS (mode
) == MODE_DECIMAL_FLOAT
)
3189 rtx result
= emit_move_via_integer (mode
, x
, y
, true);
3191 /* If we can't find an integer mode, use multi words. */
3195 return emit_move_multi_word (mode
, x
, y
);
3198 if (GET_MODE_CLASS (mode
) == MODE_CC
)
3199 return emit_move_ccmode (mode
, x
, y
);
3201 /* Try using a move pattern for the corresponding integer mode. This is
3202 only safe when simplify_subreg can convert MODE constants into integer
3203 constants. At present, it can only do this reliably if the value
3204 fits within a HOST_WIDE_INT. */
3205 if (!CONSTANT_P (y
) || GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
3207 rtx ret
= emit_move_via_integer (mode
, x
, y
, false);
3212 return emit_move_multi_word (mode
, x
, y
);
3215 /* Generate code to copy Y into X.
3216 Both Y and X must have the same mode, except that
3217 Y can be a constant with VOIDmode.
3218 This mode cannot be BLKmode; use emit_block_move for that.
3220 Return the last instruction emitted. */
3223 emit_move_insn (rtx x
, rtx y
)
3225 enum machine_mode mode
= GET_MODE (x
);
3226 rtx y_cst
= NULL_RTX
;
3229 gcc_assert (mode
!= BLKmode
3230 && (GET_MODE (y
) == mode
|| GET_MODE (y
) == VOIDmode
));
3235 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3236 && (last_insn
= compress_float_constant (x
, y
)))
3241 if (!LEGITIMATE_CONSTANT_P (y
))
3243 y
= force_const_mem (mode
, y
);
3245 /* If the target's cannot_force_const_mem prevented the spill,
3246 assume that the target's move expanders will also take care
3247 of the non-legitimate constant. */
3251 y
= use_anchored_address (y
);
3255 /* If X or Y are memory references, verify that their addresses are valid
3258 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
3259 && ! push_operand (x
, GET_MODE (x
)))
3261 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
3262 x
= validize_mem (x
);
3265 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
3267 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
3268 y
= validize_mem (y
);
3270 gcc_assert (mode
!= BLKmode
);
3272 last_insn
= emit_move_insn_1 (x
, y
);
3274 if (y_cst
&& REG_P (x
)
3275 && (set
= single_set (last_insn
)) != NULL_RTX
3276 && SET_DEST (set
) == x
3277 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
3278 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
3283 /* If Y is representable exactly in a narrower mode, and the target can
3284 perform the extension directly from constant or memory, then emit the
3285 move as an extension. */
3288 compress_float_constant (rtx x
, rtx y
)
3290 enum machine_mode dstmode
= GET_MODE (x
);
3291 enum machine_mode orig_srcmode
= GET_MODE (y
);
3292 enum machine_mode srcmode
;
3294 int oldcost
, newcost
;
3296 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3298 if (LEGITIMATE_CONSTANT_P (y
))
3299 oldcost
= rtx_cost (y
, SET
);
3301 oldcost
= rtx_cost (force_const_mem (dstmode
, y
), SET
);
3303 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3304 srcmode
!= orig_srcmode
;
3305 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3308 rtx trunc_y
, last_insn
;
3310 /* Skip if the target can't extend this way. */
3311 ic
= can_extend_p (dstmode
, srcmode
, 0);
3312 if (ic
== CODE_FOR_nothing
)
3315 /* Skip if the narrowed value isn't exact. */
3316 if (! exact_real_truncate (srcmode
, &r
))
3319 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3321 if (LEGITIMATE_CONSTANT_P (trunc_y
))
3323 /* Skip if the target needs extra instructions to perform
3325 if (! (*insn_data
[ic
].operand
[1].predicate
) (trunc_y
, srcmode
))
3327 /* This is valid, but may not be cheaper than the original. */
3328 newcost
= rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
), SET
);
3329 if (oldcost
< newcost
)
3332 else if (float_extend_from_mem
[dstmode
][srcmode
])
3334 trunc_y
= force_const_mem (srcmode
, trunc_y
);
3335 /* This is valid, but may not be cheaper than the original. */
3336 newcost
= rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
), SET
);
3337 if (oldcost
< newcost
)
3339 trunc_y
= validize_mem (trunc_y
);
3344 emit_unop_insn (ic
, x
, trunc_y
, UNKNOWN
);
3345 last_insn
= get_last_insn ();
3348 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3356 /* Pushing data onto the stack. */
3358 /* Push a block of length SIZE (perhaps variable)
3359 and return an rtx to address the beginning of the block.
3360 The value may be virtual_outgoing_args_rtx.
3362 EXTRA is the number of bytes of padding to push in addition to SIZE.
3363 BELOW nonzero means this padding comes at low addresses;
3364 otherwise, the padding comes at high addresses. */
3367 push_block (rtx size
, int extra
, int below
)
3371 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3372 if (CONSTANT_P (size
))
3373 anti_adjust_stack (plus_constant (size
, extra
));
3374 else if (REG_P (size
) && extra
== 0)
3375 anti_adjust_stack (size
);
3378 temp
= copy_to_mode_reg (Pmode
, size
);
3380 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3381 temp
, 0, OPTAB_LIB_WIDEN
);
3382 anti_adjust_stack (temp
);
3385 #ifndef STACK_GROWS_DOWNWARD
3391 temp
= virtual_outgoing_args_rtx
;
3392 if (extra
!= 0 && below
)
3393 temp
= plus_constant (temp
, extra
);
3397 if (GET_CODE (size
) == CONST_INT
)
3398 temp
= plus_constant (virtual_outgoing_args_rtx
,
3399 -INTVAL (size
) - (below
? 0 : extra
));
3400 else if (extra
!= 0 && !below
)
3401 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3402 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3404 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3405 negate_rtx (Pmode
, size
));
3408 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3411 #ifdef PUSH_ROUNDING
3413 /* Emit single push insn. */
3416 emit_single_push_insn (enum machine_mode mode
, rtx x
, tree type
)
3419 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3421 enum insn_code icode
;
3422 insn_operand_predicate_fn pred
;
3424 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3425 /* If there is push pattern, use it. Otherwise try old way of throwing
3426 MEM representing push operation to move expander. */
3427 icode
= push_optab
->handlers
[(int) mode
].insn_code
;
3428 if (icode
!= CODE_FOR_nothing
)
3430 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3431 && !((*pred
) (x
, mode
))))
3432 x
= force_reg (mode
, x
);
3433 emit_insn (GEN_FCN (icode
) (x
));
3436 if (GET_MODE_SIZE (mode
) == rounded_size
)
3437 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3438 /* If we are to pad downward, adjust the stack pointer first and
3439 then store X into the stack location using an offset. This is
3440 because emit_move_insn does not know how to pad; it does not have
3442 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
3444 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
3445 HOST_WIDE_INT offset
;
3447 emit_move_insn (stack_pointer_rtx
,
3448 expand_binop (Pmode
,
3449 #ifdef STACK_GROWS_DOWNWARD
3455 GEN_INT (rounded_size
),
3456 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
3458 offset
= (HOST_WIDE_INT
) padding_size
;
3459 #ifdef STACK_GROWS_DOWNWARD
3460 if (STACK_PUSH_CODE
== POST_DEC
)
3461 /* We have already decremented the stack pointer, so get the
3463 offset
+= (HOST_WIDE_INT
) rounded_size
;
3465 if (STACK_PUSH_CODE
== POST_INC
)
3466 /* We have already incremented the stack pointer, so get the
3468 offset
-= (HOST_WIDE_INT
) rounded_size
;
3470 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (offset
));
3474 #ifdef STACK_GROWS_DOWNWARD
3475 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3476 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3477 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3479 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3480 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3481 GEN_INT (rounded_size
));
3483 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3486 dest
= gen_rtx_MEM (mode
, dest_addr
);
3490 set_mem_attributes (dest
, type
, 1);
3492 if (flag_optimize_sibling_calls
)
3493 /* Function incoming arguments may overlap with sibling call
3494 outgoing arguments and we cannot allow reordering of reads
3495 from function arguments with stores to outgoing arguments
3496 of sibling calls. */
3497 set_mem_alias_set (dest
, 0);
3499 emit_move_insn (dest
, x
);
3503 /* Generate code to push X onto the stack, assuming it has mode MODE and
3505 MODE is redundant except when X is a CONST_INT (since they don't
3507 SIZE is an rtx for the size of data to be copied (in bytes),
3508 needed only if X is BLKmode.
3510 ALIGN (in bits) is maximum alignment we can assume.
3512 If PARTIAL and REG are both nonzero, then copy that many of the first
3513 bytes of X into registers starting with REG, and push the rest of X.
3514 The amount of space pushed is decreased by PARTIAL bytes.
3515 REG must be a hard register in this case.
3516 If REG is zero but PARTIAL is not, take any all others actions for an
3517 argument partially in registers, but do not actually load any
3520 EXTRA is the amount in bytes of extra space to leave next to this arg.
3521 This is ignored if an argument block has already been allocated.
3523 On a machine that lacks real push insns, ARGS_ADDR is the address of
3524 the bottom of the argument block for this call. We use indexing off there
3525 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3526 argument block has not been preallocated.
3528 ARGS_SO_FAR is the size of args previously pushed for this call.
3530 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3531 for arguments passed in registers. If nonzero, it will be the number
3532 of bytes required. */
3535 emit_push_insn (rtx x
, enum machine_mode mode
, tree type
, rtx size
,
3536 unsigned int align
, int partial
, rtx reg
, int extra
,
3537 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
3541 enum direction stack_direction
3542 #ifdef STACK_GROWS_DOWNWARD
3548 /* Decide where to pad the argument: `downward' for below,
3549 `upward' for above, or `none' for don't pad it.
3550 Default is below for small data on big-endian machines; else above. */
3551 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3553 /* Invert direction if stack is post-decrement.
3555 if (STACK_PUSH_CODE
== POST_DEC
)
3556 if (where_pad
!= none
)
3557 where_pad
= (where_pad
== downward
? upward
: downward
);
3561 if (mode
== BLKmode
)
3563 /* Copy a block into the stack, entirely or partially. */
3570 offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3571 used
= partial
- offset
;
3575 /* USED is now the # of bytes we need not copy to the stack
3576 because registers will take care of them. */
3579 xinner
= adjust_address (xinner
, BLKmode
, used
);
3581 /* If the partial register-part of the arg counts in its stack size,
3582 skip the part of stack space corresponding to the registers.
3583 Otherwise, start copying to the beginning of the stack space,
3584 by setting SKIP to 0. */
3585 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3587 #ifdef PUSH_ROUNDING
3588 /* Do it with several push insns if that doesn't take lots of insns
3589 and if there is no difficulty with push insns that skip bytes
3590 on the stack for alignment purposes. */
3593 && GET_CODE (size
) == CONST_INT
3595 && MEM_ALIGN (xinner
) >= align
3596 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3597 /* Here we avoid the case of a structure whose weak alignment
3598 forces many pushes of a small amount of data,
3599 and such small pushes do rounding that causes trouble. */
3600 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3601 || align
>= BIGGEST_ALIGNMENT
3602 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3603 == (align
/ BITS_PER_UNIT
)))
3604 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3606 /* Push padding now if padding above and stack grows down,
3607 or if padding below and stack grows up.
3608 But if space already allocated, this has already been done. */
3609 if (extra
&& args_addr
== 0
3610 && where_pad
!= none
&& where_pad
!= stack_direction
)
3611 anti_adjust_stack (GEN_INT (extra
));
3613 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
3616 #endif /* PUSH_ROUNDING */
3620 /* Otherwise make space on the stack and copy the data
3621 to the address of that space. */
3623 /* Deduct words put into registers from the size we must copy. */
3626 if (GET_CODE (size
) == CONST_INT
)
3627 size
= GEN_INT (INTVAL (size
) - used
);
3629 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3630 GEN_INT (used
), NULL_RTX
, 0,
3634 /* Get the address of the stack space.
3635 In this case, we do not deal with EXTRA separately.
3636 A single stack adjust will do. */
3639 temp
= push_block (size
, extra
, where_pad
== downward
);
3642 else if (GET_CODE (args_so_far
) == CONST_INT
)
3643 temp
= memory_address (BLKmode
,
3644 plus_constant (args_addr
,
3645 skip
+ INTVAL (args_so_far
)));
3647 temp
= memory_address (BLKmode
,
3648 plus_constant (gen_rtx_PLUS (Pmode
,
3653 if (!ACCUMULATE_OUTGOING_ARGS
)
3655 /* If the source is referenced relative to the stack pointer,
3656 copy it to another register to stabilize it. We do not need
3657 to do this if we know that we won't be changing sp. */
3659 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3660 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3661 temp
= copy_to_reg (temp
);
3664 target
= gen_rtx_MEM (BLKmode
, temp
);
3666 /* We do *not* set_mem_attributes here, because incoming arguments
3667 may overlap with sibling call outgoing arguments and we cannot
3668 allow reordering of reads from function arguments with stores
3669 to outgoing arguments of sibling calls. We do, however, want
3670 to record the alignment of the stack slot. */
3671 /* ALIGN may well be better aligned than TYPE, e.g. due to
3672 PARM_BOUNDARY. Assume the caller isn't lying. */
3673 set_mem_align (target
, align
);
3675 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
3678 else if (partial
> 0)
3680 /* Scalar partly in registers. */
3682 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3685 /* # bytes of start of argument
3686 that we must make space for but need not store. */
3687 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3688 int args_offset
= INTVAL (args_so_far
);
3691 /* Push padding now if padding above and stack grows down,
3692 or if padding below and stack grows up.
3693 But if space already allocated, this has already been done. */
3694 if (extra
&& args_addr
== 0
3695 && where_pad
!= none
&& where_pad
!= stack_direction
)
3696 anti_adjust_stack (GEN_INT (extra
));
3698 /* If we make space by pushing it, we might as well push
3699 the real data. Otherwise, we can leave OFFSET nonzero
3700 and leave the space uninitialized. */
3704 /* Now NOT_STACK gets the number of words that we don't need to
3705 allocate on the stack. Convert OFFSET to words too. */
3706 not_stack
= (partial
- offset
) / UNITS_PER_WORD
;
3707 offset
/= UNITS_PER_WORD
;
3709 /* If the partial register-part of the arg counts in its stack size,
3710 skip the part of stack space corresponding to the registers.
3711 Otherwise, start copying to the beginning of the stack space,
3712 by setting SKIP to 0. */
3713 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3715 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3716 x
= validize_mem (force_const_mem (mode
, x
));
3718 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3719 SUBREGs of such registers are not allowed. */
3720 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
3721 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3722 x
= copy_to_reg (x
);
3724 /* Loop over all the words allocated on the stack for this arg. */
3725 /* We can do it by words, because any scalar bigger than a word
3726 has a size a multiple of a word. */
3727 #ifndef PUSH_ARGS_REVERSED
3728 for (i
= not_stack
; i
< size
; i
++)
3730 for (i
= size
- 1; i
>= not_stack
; i
--)
3732 if (i
>= not_stack
+ offset
)
3733 emit_push_insn (operand_subword_force (x
, i
, mode
),
3734 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3736 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3738 reg_parm_stack_space
, alignment_pad
);
3745 /* Push padding now if padding above and stack grows down,
3746 or if padding below and stack grows up.
3747 But if space already allocated, this has already been done. */
3748 if (extra
&& args_addr
== 0
3749 && where_pad
!= none
&& where_pad
!= stack_direction
)
3750 anti_adjust_stack (GEN_INT (extra
));
3752 #ifdef PUSH_ROUNDING
3753 if (args_addr
== 0 && PUSH_ARGS
)
3754 emit_single_push_insn (mode
, x
, type
);
3758 if (GET_CODE (args_so_far
) == CONST_INT
)
3760 = memory_address (mode
,
3761 plus_constant (args_addr
,
3762 INTVAL (args_so_far
)));
3764 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3766 dest
= gen_rtx_MEM (mode
, addr
);
3768 /* We do *not* set_mem_attributes here, because incoming arguments
3769 may overlap with sibling call outgoing arguments and we cannot
3770 allow reordering of reads from function arguments with stores
3771 to outgoing arguments of sibling calls. We do, however, want
3772 to record the alignment of the stack slot. */
3773 /* ALIGN may well be better aligned than TYPE, e.g. due to
3774 PARM_BOUNDARY. Assume the caller isn't lying. */
3775 set_mem_align (dest
, align
);
3777 emit_move_insn (dest
, x
);
3781 /* If part should go in registers, copy that part
3782 into the appropriate registers. Do this now, at the end,
3783 since mem-to-mem copies above may do function calls. */
3784 if (partial
> 0 && reg
!= 0)
3786 /* Handle calls that pass values in multiple non-contiguous locations.
3787 The Irix 6 ABI has examples of this. */
3788 if (GET_CODE (reg
) == PARALLEL
)
3789 emit_group_load (reg
, x
, type
, -1);
3792 gcc_assert (partial
% UNITS_PER_WORD
== 0);
3793 move_block_to_reg (REGNO (reg
), x
, partial
/ UNITS_PER_WORD
, mode
);
3797 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3798 anti_adjust_stack (GEN_INT (extra
));
3800 if (alignment_pad
&& args_addr
== 0)
3801 anti_adjust_stack (alignment_pad
);
3804 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3808 get_subtarget (rtx x
)
3812 /* Only registers can be subtargets. */
3814 /* Don't use hard regs to avoid extending their life. */
3815 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3819 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3820 FIELD is a bitfield. Returns true if the optimization was successful,
3821 and there's nothing else to do. */
3824 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize
,
3825 unsigned HOST_WIDE_INT bitpos
,
3826 enum machine_mode mode1
, rtx str_rtx
,
3829 enum machine_mode str_mode
= GET_MODE (str_rtx
);
3830 unsigned int str_bitsize
= GET_MODE_BITSIZE (str_mode
);
3835 if (mode1
!= VOIDmode
3836 || bitsize
>= BITS_PER_WORD
3837 || str_bitsize
> BITS_PER_WORD
3838 || TREE_SIDE_EFFECTS (to
)
3839 || TREE_THIS_VOLATILE (to
))
3843 if (!BINARY_CLASS_P (src
)
3844 || TREE_CODE (TREE_TYPE (src
)) != INTEGER_TYPE
)
3847 op0
= TREE_OPERAND (src
, 0);
3848 op1
= TREE_OPERAND (src
, 1);
3851 if (!operand_equal_p (to
, op0
, 0))
3854 if (MEM_P (str_rtx
))
3856 unsigned HOST_WIDE_INT offset1
;
3858 if (str_bitsize
== 0 || str_bitsize
> BITS_PER_WORD
)
3859 str_mode
= word_mode
;
3860 str_mode
= get_best_mode (bitsize
, bitpos
,
3861 MEM_ALIGN (str_rtx
), str_mode
, 0);
3862 if (str_mode
== VOIDmode
)
3864 str_bitsize
= GET_MODE_BITSIZE (str_mode
);
3867 bitpos
%= str_bitsize
;
3868 offset1
= (offset1
- bitpos
) / BITS_PER_UNIT
;
3869 str_rtx
= adjust_address (str_rtx
, str_mode
, offset1
);
3871 else if (!REG_P (str_rtx
) && GET_CODE (str_rtx
) != SUBREG
)
3874 /* If the bit field covers the whole REG/MEM, store_field
3875 will likely generate better code. */
3876 if (bitsize
>= str_bitsize
)
3879 /* We can't handle fields split across multiple entities. */
3880 if (bitpos
+ bitsize
> str_bitsize
)
3883 if (BYTES_BIG_ENDIAN
)
3884 bitpos
= str_bitsize
- bitpos
- bitsize
;
3886 switch (TREE_CODE (src
))
3890 /* For now, just optimize the case of the topmost bitfield
3891 where we don't need to do any masking and also
3892 1 bit bitfields where xor can be used.
3893 We might win by one instruction for the other bitfields
3894 too if insv/extv instructions aren't used, so that
3895 can be added later. */
3896 if (bitpos
+ bitsize
!= str_bitsize
3897 && (bitsize
!= 1 || TREE_CODE (op1
) != INTEGER_CST
))
3900 value
= expand_expr (op1
, NULL_RTX
, str_mode
, 0);
3901 value
= convert_modes (str_mode
,
3902 TYPE_MODE (TREE_TYPE (op1
)), value
,
3903 TYPE_UNSIGNED (TREE_TYPE (op1
)));
3905 /* We may be accessing data outside the field, which means
3906 we can alias adjacent data. */
3907 if (MEM_P (str_rtx
))
3909 str_rtx
= shallow_copy_rtx (str_rtx
);
3910 set_mem_alias_set (str_rtx
, 0);
3911 set_mem_expr (str_rtx
, 0);
3914 binop
= TREE_CODE (src
) == PLUS_EXPR
? add_optab
: sub_optab
;
3915 if (bitsize
== 1 && bitpos
+ bitsize
!= str_bitsize
)
3917 value
= expand_and (str_mode
, value
, const1_rtx
, NULL
);
3920 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
,
3921 build_int_cst (NULL_TREE
, bitpos
),
3923 result
= expand_binop (str_mode
, binop
, str_rtx
,
3924 value
, str_rtx
, 1, OPTAB_WIDEN
);
3925 if (result
!= str_rtx
)
3926 emit_move_insn (str_rtx
, result
);
3931 if (TREE_CODE (op1
) != INTEGER_CST
)
3933 value
= expand_expr (op1
, NULL_RTX
, GET_MODE (str_rtx
), 0);
3934 value
= convert_modes (GET_MODE (str_rtx
),
3935 TYPE_MODE (TREE_TYPE (op1
)), value
,
3936 TYPE_UNSIGNED (TREE_TYPE (op1
)));
3938 /* We may be accessing data outside the field, which means
3939 we can alias adjacent data. */
3940 if (MEM_P (str_rtx
))
3942 str_rtx
= shallow_copy_rtx (str_rtx
);
3943 set_mem_alias_set (str_rtx
, 0);
3944 set_mem_expr (str_rtx
, 0);
3947 binop
= TREE_CODE (src
) == BIT_IOR_EXPR
? ior_optab
: xor_optab
;
3948 if (bitpos
+ bitsize
!= GET_MODE_BITSIZE (GET_MODE (str_rtx
)))
3950 rtx mask
= GEN_INT (((unsigned HOST_WIDE_INT
) 1 << bitsize
)
3952 value
= expand_and (GET_MODE (str_rtx
), value
, mask
,
3955 value
= expand_shift (LSHIFT_EXPR
, GET_MODE (str_rtx
), value
,
3956 build_int_cst (NULL_TREE
, bitpos
),
3958 result
= expand_binop (GET_MODE (str_rtx
), binop
, str_rtx
,
3959 value
, str_rtx
, 1, OPTAB_WIDEN
);
3960 if (result
!= str_rtx
)
3961 emit_move_insn (str_rtx
, result
);
3972 /* Expand an assignment that stores the value of FROM into TO. */
3975 expand_assignment (tree to
, tree from
)
3980 /* Don't crash if the lhs of the assignment was erroneous. */
3982 if (TREE_CODE (to
) == ERROR_MARK
)
3984 result
= expand_normal (from
);
3988 /* Assignment of a structure component needs special treatment
3989 if the structure component's rtx is not simply a MEM.
3990 Assignment of an array element at a constant index, and assignment of
3991 an array element in an unaligned packed structure field, has the same
3993 if (handled_component_p (to
)
3994 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
3996 enum machine_mode mode1
;
3997 HOST_WIDE_INT bitsize
, bitpos
;
4004 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
4005 &unsignedp
, &volatilep
, true);
4007 /* If we are going to use store_bit_field and extract_bit_field,
4008 make sure to_rtx will be safe for multiple use. */
4010 to_rtx
= expand_normal (tem
);
4016 if (!MEM_P (to_rtx
))
4018 /* We can get constant negative offsets into arrays with broken
4019 user code. Translate this to a trap instead of ICEing. */
4020 gcc_assert (TREE_CODE (offset
) == INTEGER_CST
);
4021 expand_builtin_trap ();
4022 to_rtx
= gen_rtx_MEM (BLKmode
, const0_rtx
);
4025 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
4026 #ifdef POINTERS_EXTEND_UNSIGNED
4027 if (GET_MODE (offset_rtx
) != Pmode
)
4028 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
4030 if (GET_MODE (offset_rtx
) != ptr_mode
)
4031 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4034 /* A constant address in TO_RTX can have VOIDmode, we must not try
4035 to call force_reg for that case. Avoid that case. */
4037 && GET_MODE (to_rtx
) == BLKmode
4038 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
4040 && (bitpos
% bitsize
) == 0
4041 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
4042 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
4044 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
4048 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4049 highest_pow2_factor_for_target (to
,
4053 /* Handle expand_expr of a complex value returning a CONCAT. */
4054 if (GET_CODE (to_rtx
) == CONCAT
)
4056 if (TREE_CODE (TREE_TYPE (from
)) == COMPLEX_TYPE
)
4058 gcc_assert (bitpos
== 0);
4059 result
= store_expr (from
, to_rtx
, false);
4063 gcc_assert (bitpos
== 0 || bitpos
== GET_MODE_BITSIZE (mode1
));
4064 result
= store_expr (from
, XEXP (to_rtx
, bitpos
!= 0), false);
4071 /* If the field is at offset zero, we could have been given the
4072 DECL_RTX of the parent struct. Don't munge it. */
4073 to_rtx
= shallow_copy_rtx (to_rtx
);
4075 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
4077 /* Deal with volatile and readonly fields. The former is only
4078 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4080 MEM_VOLATILE_P (to_rtx
) = 1;
4081 if (component_uses_parent_alias_set (to
))
4082 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4085 if (optimize_bitfield_assignment_op (bitsize
, bitpos
, mode1
,
4089 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
4090 TREE_TYPE (tem
), get_alias_set (to
));
4094 preserve_temp_slots (result
);
4100 /* If the rhs is a function call and its value is not an aggregate,
4101 call the function before we start to compute the lhs.
4102 This is needed for correct code for cases such as
4103 val = setjmp (buf) on machines where reference to val
4104 requires loading up part of an address in a separate insn.
4106 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4107 since it might be a promoted variable where the zero- or sign- extension
4108 needs to be done. Handling this in the normal way is safe because no
4109 computation is done before the call. */
4110 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
4111 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
4112 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
4113 && REG_P (DECL_RTL (to
))))
4118 value
= expand_normal (from
);
4120 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4122 /* Handle calls that return values in multiple non-contiguous locations.
4123 The Irix 6 ABI has examples of this. */
4124 if (GET_CODE (to_rtx
) == PARALLEL
)
4125 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
4126 int_size_in_bytes (TREE_TYPE (from
)));
4127 else if (GET_MODE (to_rtx
) == BLKmode
)
4128 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
4131 if (POINTER_TYPE_P (TREE_TYPE (to
)))
4132 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
4133 emit_move_insn (to_rtx
, value
);
4135 preserve_temp_slots (to_rtx
);
4141 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4142 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4145 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4147 /* Don't move directly into a return register. */
4148 if (TREE_CODE (to
) == RESULT_DECL
4149 && (REG_P (to_rtx
) || GET_CODE (to_rtx
) == PARALLEL
))
4154 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
4156 if (GET_CODE (to_rtx
) == PARALLEL
)
4157 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
4158 int_size_in_bytes (TREE_TYPE (from
)));
4160 emit_move_insn (to_rtx
, temp
);
4162 preserve_temp_slots (to_rtx
);
4168 /* In case we are returning the contents of an object which overlaps
4169 the place the value is being stored, use a safe function when copying
4170 a value through a pointer into a structure value return block. */
4171 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
4172 && current_function_returns_struct
4173 && !current_function_returns_pcc_struct
)
4178 size
= expr_size (from
);
4179 from_rtx
= expand_normal (from
);
4181 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
4182 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
4183 XEXP (from_rtx
, 0), Pmode
,
4184 convert_to_mode (TYPE_MODE (sizetype
),
4185 size
, TYPE_UNSIGNED (sizetype
)),
4186 TYPE_MODE (sizetype
));
4188 preserve_temp_slots (to_rtx
);
4194 /* Compute FROM and store the value in the rtx we got. */
4197 result
= store_expr (from
, to_rtx
, 0);
4198 preserve_temp_slots (result
);
4204 /* Generate code for computing expression EXP,
4205 and storing the value into TARGET.
4207 If the mode is BLKmode then we may return TARGET itself.
4208 It turns out that in BLKmode it doesn't cause a problem.
4209 because C has no operators that could combine two different
4210 assignments into the same BLKmode object with different values
4211 with no sequence point. Will other languages need this to
4214 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4215 stack, and block moves may need to be treated specially. */
4218 store_expr (tree exp
, rtx target
, int call_param_p
)
4221 rtx alt_rtl
= NULL_RTX
;
4222 int dont_return_target
= 0;
4224 if (VOID_TYPE_P (TREE_TYPE (exp
)))
4226 /* C++ can generate ?: expressions with a throw expression in one
4227 branch and an rvalue in the other. Here, we resolve attempts to
4228 store the throw expression's nonexistent result. */
4229 gcc_assert (!call_param_p
);
4230 expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
4233 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4235 /* Perform first part of compound expression, then assign from second
4237 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
4238 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4239 return store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
);
4241 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4243 /* For conditional expression, get safe form of the target. Then
4244 test the condition, doing the appropriate assignment on either
4245 side. This avoids the creation of unnecessary temporaries.
4246 For non-BLKmode, it is more efficient not to do this. */
4248 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4250 do_pending_stack_adjust ();
4252 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
4253 store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
);
4254 emit_jump_insn (gen_jump (lab2
));
4257 store_expr (TREE_OPERAND (exp
, 2), target
, call_param_p
);
4263 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4264 /* If this is a scalar in a register that is stored in a wider mode
4265 than the declared mode, compute the result into its declared mode
4266 and then convert to the wider mode. Our value is the computed
4269 rtx inner_target
= 0;
4271 /* We can do the conversion inside EXP, which will often result
4272 in some optimizations. Do the conversion in two steps: first
4273 change the signedness, if needed, then the extend. But don't
4274 do this if the type of EXP is a subtype of something else
4275 since then the conversion might involve more than just
4276 converting modes. */
4277 if (INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4278 && TREE_TYPE (TREE_TYPE (exp
)) == 0
4279 && (!lang_hooks
.reduce_bit_field_operations
4280 || (GET_MODE_PRECISION (GET_MODE (target
))
4281 == TYPE_PRECISION (TREE_TYPE (exp
)))))
4283 if (TYPE_UNSIGNED (TREE_TYPE (exp
))
4284 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4286 (lang_hooks
.types
.signed_or_unsigned_type
4287 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)), exp
);
4289 exp
= convert (lang_hooks
.types
.type_for_mode
4290 (GET_MODE (SUBREG_REG (target
)),
4291 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4294 inner_target
= SUBREG_REG (target
);
4297 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
4298 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4300 /* If TEMP is a VOIDmode constant, use convert_modes to make
4301 sure that we properly convert it. */
4302 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4304 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4305 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4306 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4307 GET_MODE (target
), temp
,
4308 SUBREG_PROMOTED_UNSIGNED_P (target
));
4311 convert_move (SUBREG_REG (target
), temp
,
4312 SUBREG_PROMOTED_UNSIGNED_P (target
));
4318 temp
= expand_expr_real (exp
, target
, GET_MODE (target
),
4320 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
4322 /* Return TARGET if it's a specified hardware register.
4323 If TARGET is a volatile mem ref, either return TARGET
4324 or return a reg copied *from* TARGET; ANSI requires this.
4326 Otherwise, if TEMP is not TARGET, return TEMP
4327 if it is constant (for efficiency),
4328 or if we really want the correct value. */
4329 if (!(target
&& REG_P (target
)
4330 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4331 && !(MEM_P (target
) && MEM_VOLATILE_P (target
))
4332 && ! rtx_equal_p (temp
, target
)
4333 && CONSTANT_P (temp
))
4334 dont_return_target
= 1;
4337 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4338 the same as that of TARGET, adjust the constant. This is needed, for
4339 example, in case it is a CONST_DOUBLE and we want only a word-sized
4341 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4342 && TREE_CODE (exp
) != ERROR_MARK
4343 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4344 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4345 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
4347 /* If value was not generated in the target, store it there.
4348 Convert the value to TARGET's type first if necessary and emit the
4349 pending incrementations that have been queued when expanding EXP.
4350 Note that we cannot emit the whole queue blindly because this will
4351 effectively disable the POST_INC optimization later.
4353 If TEMP and TARGET compare equal according to rtx_equal_p, but
4354 one or both of them are volatile memory refs, we have to distinguish
4356 - expand_expr has used TARGET. In this case, we must not generate
4357 another copy. This can be detected by TARGET being equal according
4359 - expand_expr has not used TARGET - that means that the source just
4360 happens to have the same RTX form. Since temp will have been created
4361 by expand_expr, it will compare unequal according to == .
4362 We must generate a copy in this case, to reach the correct number
4363 of volatile memory references. */
4365 if ((! rtx_equal_p (temp
, target
)
4366 || (temp
!= target
&& (side_effects_p (temp
)
4367 || side_effects_p (target
))))
4368 && TREE_CODE (exp
) != ERROR_MARK
4369 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4370 but TARGET is not valid memory reference, TEMP will differ
4371 from TARGET although it is really the same location. */
4372 && !(alt_rtl
&& rtx_equal_p (alt_rtl
, target
))
4373 /* If there's nothing to copy, don't bother. Don't call
4374 expr_size unless necessary, because some front-ends (C++)
4375 expr_size-hook must not be given objects that are not
4376 supposed to be bit-copied or bit-initialized. */
4377 && expr_size (exp
) != const0_rtx
)
4379 if (GET_MODE (temp
) != GET_MODE (target
)
4380 && GET_MODE (temp
) != VOIDmode
)
4382 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
4383 if (dont_return_target
)
4385 /* In this case, we will return TEMP,
4386 so make sure it has the proper mode.
4387 But don't forget to store the value into TARGET. */
4388 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4389 emit_move_insn (target
, temp
);
4392 convert_move (target
, temp
, unsignedp
);
4395 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4397 /* Handle copying a string constant into an array. The string
4398 constant may be shorter than the array. So copy just the string's
4399 actual length, and clear the rest. First get the size of the data
4400 type of the string, which is actually the size of the target. */
4401 rtx size
= expr_size (exp
);
4403 if (GET_CODE (size
) == CONST_INT
4404 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4405 emit_block_move (target
, temp
, size
,
4407 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4410 /* Compute the size of the data to copy from the string. */
4412 = size_binop (MIN_EXPR
,
4413 make_tree (sizetype
, size
),
4414 size_int (TREE_STRING_LENGTH (exp
)));
4416 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
4418 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4421 /* Copy that much. */
4422 copy_size_rtx
= convert_to_mode (ptr_mode
, copy_size_rtx
,
4423 TYPE_UNSIGNED (sizetype
));
4424 emit_block_move (target
, temp
, copy_size_rtx
,
4426 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4428 /* Figure out how much is left in TARGET that we have to clear.
4429 Do all calculations in ptr_mode. */
4430 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4432 size
= plus_constant (size
, -INTVAL (copy_size_rtx
));
4433 target
= adjust_address (target
, BLKmode
,
4434 INTVAL (copy_size_rtx
));
4438 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
4439 copy_size_rtx
, NULL_RTX
, 0,
4442 #ifdef POINTERS_EXTEND_UNSIGNED
4443 if (GET_MODE (copy_size_rtx
) != Pmode
)
4444 copy_size_rtx
= convert_to_mode (Pmode
, copy_size_rtx
,
4445 TYPE_UNSIGNED (sizetype
));
4448 target
= offset_address (target
, copy_size_rtx
,
4449 highest_pow2_factor (copy_size
));
4450 label
= gen_label_rtx ();
4451 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4452 GET_MODE (size
), 0, label
);
4455 if (size
!= const0_rtx
)
4456 clear_storage (target
, size
, BLOCK_OP_NORMAL
);
4462 /* Handle calls that return values in multiple non-contiguous locations.
4463 The Irix 6 ABI has examples of this. */
4464 else if (GET_CODE (target
) == PARALLEL
)
4465 emit_group_load (target
, temp
, TREE_TYPE (exp
),
4466 int_size_in_bytes (TREE_TYPE (exp
)));
4467 else if (GET_MODE (temp
) == BLKmode
)
4468 emit_block_move (target
, temp
, expr_size (exp
),
4470 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4473 temp
= force_operand (temp
, target
);
4475 emit_move_insn (target
, temp
);
4482 /* Examine CTOR to discover:
4483 * how many scalar fields are set to nonzero values,
4484 and place it in *P_NZ_ELTS;
4485 * how many scalar fields are set to non-constant values,
4486 and place it in *P_NC_ELTS; and
4487 * how many scalar fields in total are in CTOR,
4488 and place it in *P_ELT_COUNT.
4489 * if a type is a union, and the initializer from the constructor
4490 is not the largest element in the union, then set *p_must_clear. */
4493 categorize_ctor_elements_1 (tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
4494 HOST_WIDE_INT
*p_nc_elts
,
4495 HOST_WIDE_INT
*p_elt_count
,
4498 unsigned HOST_WIDE_INT idx
;
4499 HOST_WIDE_INT nz_elts
, nc_elts
, elt_count
;
4500 tree value
, purpose
;
4506 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), idx
, purpose
, value
)
4511 if (TREE_CODE (purpose
) == RANGE_EXPR
)
4513 tree lo_index
= TREE_OPERAND (purpose
, 0);
4514 tree hi_index
= TREE_OPERAND (purpose
, 1);
4516 if (host_integerp (lo_index
, 1) && host_integerp (hi_index
, 1))
4517 mult
= (tree_low_cst (hi_index
, 1)
4518 - tree_low_cst (lo_index
, 1) + 1);
4521 switch (TREE_CODE (value
))
4525 HOST_WIDE_INT nz
= 0, nc
= 0, ic
= 0;
4526 categorize_ctor_elements_1 (value
, &nz
, &nc
, &ic
, p_must_clear
);
4527 nz_elts
+= mult
* nz
;
4528 nc_elts
+= mult
* nc
;
4529 elt_count
+= mult
* ic
;
4535 if (!initializer_zerop (value
))
4541 nz_elts
+= mult
* TREE_STRING_LENGTH (value
);
4542 elt_count
+= mult
* TREE_STRING_LENGTH (value
);
4546 if (!initializer_zerop (TREE_REALPART (value
)))
4548 if (!initializer_zerop (TREE_IMAGPART (value
)))
4556 for (v
= TREE_VECTOR_CST_ELTS (value
); v
; v
= TREE_CHAIN (v
))
4558 if (!initializer_zerop (TREE_VALUE (v
)))
4568 if (!initializer_constant_valid_p (value
, TREE_TYPE (value
)))
4575 && (TREE_CODE (TREE_TYPE (ctor
)) == UNION_TYPE
4576 || TREE_CODE (TREE_TYPE (ctor
)) == QUAL_UNION_TYPE
))
4579 bool clear_this
= true;
4581 if (!VEC_empty (constructor_elt
, CONSTRUCTOR_ELTS (ctor
)))
4583 /* We don't expect more than one element of the union to be
4584 initialized. Not sure what we should do otherwise... */
4585 gcc_assert (VEC_length (constructor_elt
, CONSTRUCTOR_ELTS (ctor
))
4588 init_sub_type
= TREE_TYPE (VEC_index (constructor_elt
,
4589 CONSTRUCTOR_ELTS (ctor
),
4592 /* ??? We could look at each element of the union, and find the
4593 largest element. Which would avoid comparing the size of the
4594 initialized element against any tail padding in the union.
4595 Doesn't seem worth the effort... */
4596 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor
)),
4597 TYPE_SIZE (init_sub_type
)) == 1)
4599 /* And now we have to find out if the element itself is fully
4600 constructed. E.g. for union { struct { int a, b; } s; } u
4601 = { .s = { .a = 1 } }. */
4602 if (elt_count
== count_type_elements (init_sub_type
, false))
4607 *p_must_clear
= clear_this
;
4610 *p_nz_elts
+= nz_elts
;
4611 *p_nc_elts
+= nc_elts
;
4612 *p_elt_count
+= elt_count
;
4616 categorize_ctor_elements (tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
4617 HOST_WIDE_INT
*p_nc_elts
,
4618 HOST_WIDE_INT
*p_elt_count
,
4624 *p_must_clear
= false;
4625 categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_nc_elts
, p_elt_count
,
4629 /* Count the number of scalars in TYPE. Return -1 on overflow or
4630 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4631 array member at the end of the structure. */
4634 count_type_elements (tree type
, bool allow_flexarr
)
4636 const HOST_WIDE_INT max
= ~((HOST_WIDE_INT
)1 << (HOST_BITS_PER_WIDE_INT
-1));
4637 switch (TREE_CODE (type
))
4641 tree telts
= array_type_nelts (type
);
4642 if (telts
&& host_integerp (telts
, 1))
4644 HOST_WIDE_INT n
= tree_low_cst (telts
, 1) + 1;
4645 HOST_WIDE_INT m
= count_type_elements (TREE_TYPE (type
), false);
4648 else if (max
/ n
> m
)
4656 HOST_WIDE_INT n
= 0, t
;
4659 for (f
= TYPE_FIELDS (type
); f
; f
= TREE_CHAIN (f
))
4660 if (TREE_CODE (f
) == FIELD_DECL
)
4662 t
= count_type_elements (TREE_TYPE (f
), false);
4665 /* Check for structures with flexible array member. */
4666 tree tf
= TREE_TYPE (f
);
4668 && TREE_CHAIN (f
) == NULL
4669 && TREE_CODE (tf
) == ARRAY_TYPE
4671 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf
))
4672 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf
)))
4673 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf
))
4674 && int_size_in_bytes (type
) >= 0)
4686 case QUAL_UNION_TYPE
:
4688 /* Ho hum. How in the world do we guess here? Clearly it isn't
4689 right to count the fields. Guess based on the number of words. */
4690 HOST_WIDE_INT n
= int_size_in_bytes (type
);
4693 return n
/ UNITS_PER_WORD
;
4700 return TYPE_VECTOR_SUBPARTS (type
);
4708 case REFERENCE_TYPE
:
4720 /* Return 1 if EXP contains mostly (3/4) zeros. */
4723 mostly_zeros_p (tree exp
)
4725 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4728 HOST_WIDE_INT nz_elts
, nc_elts
, count
, elts
;
4731 categorize_ctor_elements (exp
, &nz_elts
, &nc_elts
, &count
, &must_clear
);
4735 elts
= count_type_elements (TREE_TYPE (exp
), false);
4737 return nz_elts
< elts
/ 4;
4740 return initializer_zerop (exp
);
4743 /* Return 1 if EXP contains all zeros. */
4746 all_zeros_p (tree exp
)
4748 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4751 HOST_WIDE_INT nz_elts
, nc_elts
, count
;
4754 categorize_ctor_elements (exp
, &nz_elts
, &nc_elts
, &count
, &must_clear
);
4755 return nz_elts
== 0;
4758 return initializer_zerop (exp
);
4761 /* Helper function for store_constructor.
4762 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4763 TYPE is the type of the CONSTRUCTOR, not the element type.
4764 CLEARED is as for store_constructor.
4765 ALIAS_SET is the alias set to use for any stores.
4767 This provides a recursive shortcut back to store_constructor when it isn't
4768 necessary to go through store_field. This is so that we can pass through
4769 the cleared field to let store_constructor know that we may not have to
4770 clear a substructure if the outer structure has already been cleared. */
4773 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
4774 HOST_WIDE_INT bitpos
, enum machine_mode mode
,
4775 tree exp
, tree type
, int cleared
, int alias_set
)
4777 if (TREE_CODE (exp
) == CONSTRUCTOR
4778 /* We can only call store_constructor recursively if the size and
4779 bit position are on a byte boundary. */
4780 && bitpos
% BITS_PER_UNIT
== 0
4781 && (bitsize
> 0 && bitsize
% BITS_PER_UNIT
== 0)
4782 /* If we have a nonzero bitpos for a register target, then we just
4783 let store_field do the bitfield handling. This is unlikely to
4784 generate unnecessary clear instructions anyways. */
4785 && (bitpos
== 0 || MEM_P (target
)))
4789 = adjust_address (target
,
4790 GET_MODE (target
) == BLKmode
4792 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4793 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
4796 /* Update the alias set, if required. */
4797 if (MEM_P (target
) && ! MEM_KEEP_ALIAS_SET_P (target
)
4798 && MEM_ALIAS_SET (target
) != 0)
4800 target
= copy_rtx (target
);
4801 set_mem_alias_set (target
, alias_set
);
4804 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
4807 store_field (target
, bitsize
, bitpos
, mode
, exp
, type
, alias_set
);
4810 /* Store the value of constructor EXP into the rtx TARGET.
4811 TARGET is either a REG or a MEM; we know it cannot conflict, since
4812 safe_from_p has been called.
4813 CLEARED is true if TARGET is known to have been zero'd.
4814 SIZE is the number of bytes of TARGET we are allowed to modify: this
4815 may not be the same as the size of EXP if we are assigning to a field
4816 which has been packed to exclude padding bits. */
4819 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
)
4821 tree type
= TREE_TYPE (exp
);
4822 #ifdef WORD_REGISTER_OPERATIONS
4823 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4826 switch (TREE_CODE (type
))
4830 case QUAL_UNION_TYPE
:
4832 unsigned HOST_WIDE_INT idx
;
4835 /* If size is zero or the target is already cleared, do nothing. */
4836 if (size
== 0 || cleared
)
4838 /* We either clear the aggregate or indicate the value is dead. */
4839 else if ((TREE_CODE (type
) == UNION_TYPE
4840 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4841 && ! CONSTRUCTOR_ELTS (exp
))
4842 /* If the constructor is empty, clear the union. */
4844 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
4848 /* If we are building a static constructor into a register,
4849 set the initial value as zero so we can fold the value into
4850 a constant. But if more than one register is involved,
4851 this probably loses. */
4852 else if (REG_P (target
) && TREE_STATIC (exp
)
4853 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4855 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4859 /* If the constructor has fewer fields than the structure or
4860 if we are initializing the structure to mostly zeros, clear
4861 the whole structure first. Don't do this if TARGET is a
4862 register whose mode size isn't equal to SIZE since
4863 clear_storage can't handle this case. */
4865 && (((int)VEC_length (constructor_elt
, CONSTRUCTOR_ELTS (exp
))
4866 != fields_length (type
))
4867 || mostly_zeros_p (exp
))
4869 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
4872 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
4877 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4879 /* Store each element of the constructor into the
4880 corresponding field of TARGET. */
4881 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, field
, value
)
4883 enum machine_mode mode
;
4884 HOST_WIDE_INT bitsize
;
4885 HOST_WIDE_INT bitpos
= 0;
4887 rtx to_rtx
= target
;
4889 /* Just ignore missing fields. We cleared the whole
4890 structure, above, if any fields are missing. */
4894 if (cleared
&& initializer_zerop (value
))
4897 if (host_integerp (DECL_SIZE (field
), 1))
4898 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4902 mode
= DECL_MODE (field
);
4903 if (DECL_BIT_FIELD (field
))
4906 offset
= DECL_FIELD_OFFSET (field
);
4907 if (host_integerp (offset
, 0)
4908 && host_integerp (bit_position (field
), 0))
4910 bitpos
= int_bit_position (field
);
4914 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
4921 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset
,
4922 make_tree (TREE_TYPE (exp
),
4925 offset_rtx
= expand_normal (offset
);
4926 gcc_assert (MEM_P (to_rtx
));
4928 #ifdef POINTERS_EXTEND_UNSIGNED
4929 if (GET_MODE (offset_rtx
) != Pmode
)
4930 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
4932 if (GET_MODE (offset_rtx
) != ptr_mode
)
4933 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4936 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4937 highest_pow2_factor (offset
));
4940 #ifdef WORD_REGISTER_OPERATIONS
4941 /* If this initializes a field that is smaller than a
4942 word, at the start of a word, try to widen it to a full
4943 word. This special case allows us to output C++ member
4944 function initializations in a form that the optimizers
4947 && bitsize
< BITS_PER_WORD
4948 && bitpos
% BITS_PER_WORD
== 0
4949 && GET_MODE_CLASS (mode
) == MODE_INT
4950 && TREE_CODE (value
) == INTEGER_CST
4952 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
4954 tree type
= TREE_TYPE (value
);
4956 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4958 type
= lang_hooks
.types
.type_for_size
4959 (BITS_PER_WORD
, TYPE_UNSIGNED (type
));
4960 value
= convert (type
, value
);
4963 if (BYTES_BIG_ENDIAN
)
4965 = fold_build2 (LSHIFT_EXPR
, type
, value
,
4966 build_int_cst (NULL_TREE
,
4967 BITS_PER_WORD
- bitsize
));
4968 bitsize
= BITS_PER_WORD
;
4973 if (MEM_P (to_rtx
) && !MEM_KEEP_ALIAS_SET_P (to_rtx
)
4974 && DECL_NONADDRESSABLE_P (field
))
4976 to_rtx
= copy_rtx (to_rtx
);
4977 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4980 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
4981 value
, type
, cleared
,
4982 get_alias_set (TREE_TYPE (field
)));
4989 unsigned HOST_WIDE_INT i
;
4992 tree elttype
= TREE_TYPE (type
);
4994 HOST_WIDE_INT minelt
= 0;
4995 HOST_WIDE_INT maxelt
= 0;
4997 domain
= TYPE_DOMAIN (type
);
4998 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
4999 && TYPE_MAX_VALUE (domain
)
5000 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
5001 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
5003 /* If we have constant bounds for the range of the type, get them. */
5006 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
5007 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
5010 /* If the constructor has fewer elements than the array, clear
5011 the whole array first. Similarly if this is static
5012 constructor of a non-BLKmode object. */
5015 else if (REG_P (target
) && TREE_STATIC (exp
))
5019 unsigned HOST_WIDE_INT idx
;
5021 HOST_WIDE_INT count
= 0, zero_count
= 0;
5022 need_to_clear
= ! const_bounds_p
;
5024 /* This loop is a more accurate version of the loop in
5025 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5026 is also needed to check for missing elements. */
5027 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, index
, value
)
5029 HOST_WIDE_INT this_node_count
;
5034 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
5036 tree lo_index
= TREE_OPERAND (index
, 0);
5037 tree hi_index
= TREE_OPERAND (index
, 1);
5039 if (! host_integerp (lo_index
, 1)
5040 || ! host_integerp (hi_index
, 1))
5046 this_node_count
= (tree_low_cst (hi_index
, 1)
5047 - tree_low_cst (lo_index
, 1) + 1);
5050 this_node_count
= 1;
5052 count
+= this_node_count
;
5053 if (mostly_zeros_p (value
))
5054 zero_count
+= this_node_count
;
5057 /* Clear the entire array first if there are any missing
5058 elements, or if the incidence of zero elements is >=
5061 && (count
< maxelt
- minelt
+ 1
5062 || 4 * zero_count
>= 3 * count
))
5066 if (need_to_clear
&& size
> 0)
5069 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5071 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
5075 if (!cleared
&& REG_P (target
))
5076 /* Inform later passes that the old value is dead. */
5077 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
5079 /* Store each element of the constructor into the
5080 corresponding element of TARGET, determined by counting the
5082 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), i
, index
, value
)
5084 enum machine_mode mode
;
5085 HOST_WIDE_INT bitsize
;
5086 HOST_WIDE_INT bitpos
;
5088 rtx xtarget
= target
;
5090 if (cleared
&& initializer_zerop (value
))
5093 unsignedp
= TYPE_UNSIGNED (elttype
);
5094 mode
= TYPE_MODE (elttype
);
5095 if (mode
== BLKmode
)
5096 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
5097 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
5100 bitsize
= GET_MODE_BITSIZE (mode
);
5102 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
5104 tree lo_index
= TREE_OPERAND (index
, 0);
5105 tree hi_index
= TREE_OPERAND (index
, 1);
5106 rtx index_r
, pos_rtx
;
5107 HOST_WIDE_INT lo
, hi
, count
;
5110 /* If the range is constant and "small", unroll the loop. */
5112 && host_integerp (lo_index
, 0)
5113 && host_integerp (hi_index
, 0)
5114 && (lo
= tree_low_cst (lo_index
, 0),
5115 hi
= tree_low_cst (hi_index
, 0),
5116 count
= hi
- lo
+ 1,
5119 || (host_integerp (TYPE_SIZE (elttype
), 1)
5120 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
5123 lo
-= minelt
; hi
-= minelt
;
5124 for (; lo
<= hi
; lo
++)
5126 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
5129 && !MEM_KEEP_ALIAS_SET_P (target
)
5130 && TREE_CODE (type
) == ARRAY_TYPE
5131 && TYPE_NONALIASED_COMPONENT (type
))
5133 target
= copy_rtx (target
);
5134 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5137 store_constructor_field
5138 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
5139 get_alias_set (elttype
));
5144 rtx loop_start
= gen_label_rtx ();
5145 rtx loop_end
= gen_label_rtx ();
5148 expand_normal (hi_index
);
5149 unsignedp
= TYPE_UNSIGNED (domain
);
5151 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
5154 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
5156 SET_DECL_RTL (index
, index_r
);
5157 store_expr (lo_index
, index_r
, 0);
5159 /* Build the head of the loop. */
5160 do_pending_stack_adjust ();
5161 emit_label (loop_start
);
5163 /* Assign value to element index. */
5165 = convert (ssizetype
,
5166 fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
5167 index
, TYPE_MIN_VALUE (domain
)));
5168 position
= size_binop (MULT_EXPR
, position
,
5170 TYPE_SIZE_UNIT (elttype
)));
5172 pos_rtx
= expand_normal (position
);
5173 xtarget
= offset_address (target
, pos_rtx
,
5174 highest_pow2_factor (position
));
5175 xtarget
= adjust_address (xtarget
, mode
, 0);
5176 if (TREE_CODE (value
) == CONSTRUCTOR
)
5177 store_constructor (value
, xtarget
, cleared
,
5178 bitsize
/ BITS_PER_UNIT
);
5180 store_expr (value
, xtarget
, 0);
5182 /* Generate a conditional jump to exit the loop. */
5183 exit_cond
= build2 (LT_EXPR
, integer_type_node
,
5185 jumpif (exit_cond
, loop_end
);
5187 /* Update the loop counter, and jump to the head of
5189 expand_assignment (index
,
5190 build2 (PLUS_EXPR
, TREE_TYPE (index
),
5191 index
, integer_one_node
));
5193 emit_jump (loop_start
);
5195 /* Build the end of the loop. */
5196 emit_label (loop_end
);
5199 else if ((index
!= 0 && ! host_integerp (index
, 0))
5200 || ! host_integerp (TYPE_SIZE (elttype
), 1))
5205 index
= ssize_int (1);
5208 index
= fold_convert (ssizetype
,
5209 fold_build2 (MINUS_EXPR
,
5212 TYPE_MIN_VALUE (domain
)));
5214 position
= size_binop (MULT_EXPR
, index
,
5216 TYPE_SIZE_UNIT (elttype
)));
5217 xtarget
= offset_address (target
,
5218 expand_normal (position
),
5219 highest_pow2_factor (position
));
5220 xtarget
= adjust_address (xtarget
, mode
, 0);
5221 store_expr (value
, xtarget
, 0);
5226 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
5227 * tree_low_cst (TYPE_SIZE (elttype
), 1));
5229 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
5231 if (MEM_P (target
) && !MEM_KEEP_ALIAS_SET_P (target
)
5232 && TREE_CODE (type
) == ARRAY_TYPE
5233 && TYPE_NONALIASED_COMPONENT (type
))
5235 target
= copy_rtx (target
);
5236 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5238 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
5239 type
, cleared
, get_alias_set (elttype
));
5247 unsigned HOST_WIDE_INT idx
;
5248 constructor_elt
*ce
;
5252 tree elttype
= TREE_TYPE (type
);
5253 int elt_size
= tree_low_cst (TYPE_SIZE (elttype
), 1);
5254 enum machine_mode eltmode
= TYPE_MODE (elttype
);
5255 HOST_WIDE_INT bitsize
;
5256 HOST_WIDE_INT bitpos
;
5257 rtvec vector
= NULL
;
5260 gcc_assert (eltmode
!= BLKmode
);
5262 n_elts
= TYPE_VECTOR_SUBPARTS (type
);
5263 if (REG_P (target
) && VECTOR_MODE_P (GET_MODE (target
)))
5265 enum machine_mode mode
= GET_MODE (target
);
5267 icode
= (int) vec_init_optab
->handlers
[mode
].insn_code
;
5268 if (icode
!= CODE_FOR_nothing
)
5272 vector
= rtvec_alloc (n_elts
);
5273 for (i
= 0; i
< n_elts
; i
++)
5274 RTVEC_ELT (vector
, i
) = CONST0_RTX (GET_MODE_INNER (mode
));
5278 /* If the constructor has fewer elements than the vector,
5279 clear the whole array first. Similarly if this is static
5280 constructor of a non-BLKmode object. */
5283 else if (REG_P (target
) && TREE_STATIC (exp
))
5287 unsigned HOST_WIDE_INT count
= 0, zero_count
= 0;
5290 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
5292 int n_elts_here
= tree_low_cst
5293 (int_const_binop (TRUNC_DIV_EXPR
,
5294 TYPE_SIZE (TREE_TYPE (value
)),
5295 TYPE_SIZE (elttype
), 0), 1);
5297 count
+= n_elts_here
;
5298 if (mostly_zeros_p (value
))
5299 zero_count
+= n_elts_here
;
5302 /* Clear the entire vector first if there are any missing elements,
5303 or if the incidence of zero elements is >= 75%. */
5304 need_to_clear
= (count
< n_elts
|| 4 * zero_count
>= 3 * count
);
5307 if (need_to_clear
&& size
> 0 && !vector
)
5310 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5312 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
5316 /* Inform later passes that the old value is dead. */
5317 if (!cleared
&& REG_P (target
))
5318 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5320 /* Store each element of the constructor into the corresponding
5321 element of TARGET, determined by counting the elements. */
5322 for (idx
= 0, i
= 0;
5323 VEC_iterate (constructor_elt
, CONSTRUCTOR_ELTS (exp
), idx
, ce
);
5324 idx
++, i
+= bitsize
/ elt_size
)
5326 HOST_WIDE_INT eltpos
;
5327 tree value
= ce
->value
;
5329 bitsize
= tree_low_cst (TYPE_SIZE (TREE_TYPE (value
)), 1);
5330 if (cleared
&& initializer_zerop (value
))
5334 eltpos
= tree_low_cst (ce
->index
, 1);
5340 /* Vector CONSTRUCTORs should only be built from smaller
5341 vectors in the case of BLKmode vectors. */
5342 gcc_assert (TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
);
5343 RTVEC_ELT (vector
, eltpos
)
5344 = expand_normal (value
);
5348 enum machine_mode value_mode
=
5349 TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
5350 ? TYPE_MODE (TREE_TYPE (value
))
5352 bitpos
= eltpos
* elt_size
;
5353 store_constructor_field (target
, bitsize
, bitpos
,
5354 value_mode
, value
, type
,
5355 cleared
, get_alias_set (elttype
));
5360 emit_insn (GEN_FCN (icode
)
5362 gen_rtx_PARALLEL (GET_MODE (target
), vector
)));
5371 /* Store the value of EXP (an expression tree)
5372 into a subfield of TARGET which has mode MODE and occupies
5373 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5374 If MODE is VOIDmode, it means that we are storing into a bit-field.
5376 Always return const0_rtx unless we have something particular to
5379 TYPE is the type of the underlying object,
5381 ALIAS_SET is the alias set for the destination. This value will
5382 (in general) be different from that for TARGET, since TARGET is a
5383 reference to the containing structure. */
5386 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
5387 enum machine_mode mode
, tree exp
, tree type
, int alias_set
)
5389 HOST_WIDE_INT width_mask
= 0;
5391 if (TREE_CODE (exp
) == ERROR_MARK
)
5394 /* If we have nothing to store, do nothing unless the expression has
5397 return expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
5398 else if (bitsize
>= 0 && bitsize
< HOST_BITS_PER_WIDE_INT
)
5399 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5401 /* If we are storing into an unaligned field of an aligned union that is
5402 in a register, we may have the mode of TARGET being an integer mode but
5403 MODE == BLKmode. In that case, get an aligned object whose size and
5404 alignment are the same as TARGET and store TARGET into it (we can avoid
5405 the store if the field being stored is the entire width of TARGET). Then
5406 call ourselves recursively to store the field into a BLKmode version of
5407 that object. Finally, load from the object into TARGET. This is not
5408 very efficient in general, but should only be slightly more expensive
5409 than the otherwise-required unaligned accesses. Perhaps this can be
5410 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5411 twice, once with emit_move_insn and once via store_field. */
5414 && (REG_P (target
) || GET_CODE (target
) == SUBREG
))
5416 rtx object
= assign_temp (type
, 0, 1, 1);
5417 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
5419 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5420 emit_move_insn (object
, target
);
5422 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, type
, alias_set
);
5424 emit_move_insn (target
, object
);
5426 /* We want to return the BLKmode version of the data. */
5430 if (GET_CODE (target
) == CONCAT
)
5432 /* We're storing into a struct containing a single __complex. */
5434 gcc_assert (!bitpos
);
5435 return store_expr (exp
, target
, 0);
5438 /* If the structure is in a register or if the component
5439 is a bit field, we cannot use addressing to access it.
5440 Use bit-field techniques or SUBREG to store in it. */
5442 if (mode
== VOIDmode
5443 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5444 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5445 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5447 || GET_CODE (target
) == SUBREG
5448 /* If the field isn't aligned enough to store as an ordinary memref,
5449 store it as a bit field. */
5451 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
5452 || bitpos
% GET_MODE_ALIGNMENT (mode
))
5453 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
5454 || (bitpos
% BITS_PER_UNIT
!= 0)))
5455 /* If the RHS and field are a constant size and the size of the
5456 RHS isn't the same size as the bitfield, we must use bitfield
5459 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5460 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5464 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5465 implies a mask operation. If the precision is the same size as
5466 the field we're storing into, that mask is redundant. This is
5467 particularly common with bit field assignments generated by the
5469 if (TREE_CODE (exp
) == NOP_EXPR
)
5471 tree type
= TREE_TYPE (exp
);
5472 if (INTEGRAL_TYPE_P (type
)
5473 && TYPE_PRECISION (type
) < GET_MODE_BITSIZE (TYPE_MODE (type
))
5474 && bitsize
== TYPE_PRECISION (type
))
5476 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
5477 if (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) >= bitsize
)
5478 exp
= TREE_OPERAND (exp
, 0);
5482 temp
= expand_normal (exp
);
5484 /* If BITSIZE is narrower than the size of the type of EXP
5485 we will be narrowing TEMP. Normally, what's wanted are the
5486 low-order bits. However, if EXP's type is a record and this is
5487 big-endian machine, we want the upper BITSIZE bits. */
5488 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5489 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
5490 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5491 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5492 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5496 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5498 if (mode
!= VOIDmode
&& mode
!= BLKmode
5499 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5500 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5502 /* If the modes of TARGET and TEMP are both BLKmode, both
5503 must be in memory and BITPOS must be aligned on a byte
5504 boundary. If so, we simply do a block copy. */
5505 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5507 gcc_assert (MEM_P (target
) && MEM_P (temp
)
5508 && !(bitpos
% BITS_PER_UNIT
));
5510 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5511 emit_block_move (target
, temp
,
5512 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5519 /* Store the value in the bitfield. */
5520 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
);
5526 /* Now build a reference to just the desired component. */
5527 rtx to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5529 if (to_rtx
== target
)
5530 to_rtx
= copy_rtx (to_rtx
);
5532 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5533 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5534 set_mem_alias_set (to_rtx
, alias_set
);
5536 return store_expr (exp
, to_rtx
, 0);
5540 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5541 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5542 codes and find the ultimate containing object, which we return.
5544 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5545 bit position, and *PUNSIGNEDP to the signedness of the field.
5546 If the position of the field is variable, we store a tree
5547 giving the variable offset (in units) in *POFFSET.
5548 This offset is in addition to the bit position.
5549 If the position is not variable, we store 0 in *POFFSET.
5551 If any of the extraction expressions is volatile,
5552 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5554 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5555 is a mode that can be used to access the field. In that case, *PBITSIZE
5558 If the field describes a variable-sized object, *PMODE is set to
5559 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5560 this case, but the address of the object can be found.
5562 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5563 look through nodes that serve as markers of a greater alignment than
5564 the one that can be deduced from the expression. These nodes make it
5565 possible for front-ends to prevent temporaries from being created by
5566 the middle-end on alignment considerations. For that purpose, the
5567 normal operating mode at high-level is to always pass FALSE so that
5568 the ultimate containing object is really returned; moreover, the
5569 associated predicate handled_component_p will always return TRUE
5570 on these nodes, thus indicating that they are essentially handled
5571 by get_inner_reference. TRUE should only be passed when the caller
5572 is scanning the expression in order to build another representation
5573 and specifically knows how to handle these nodes; as such, this is
5574 the normal operating mode in the RTL expanders. */
5577 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
5578 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
5579 enum machine_mode
*pmode
, int *punsignedp
,
5580 int *pvolatilep
, bool keep_aligning
)
5583 enum machine_mode mode
= VOIDmode
;
5584 tree offset
= size_zero_node
;
5585 tree bit_offset
= bitsize_zero_node
;
5588 /* First get the mode, signedness, and size. We do this from just the
5589 outermost expression. */
5590 if (TREE_CODE (exp
) == COMPONENT_REF
)
5592 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5593 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5594 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5596 *punsignedp
= DECL_UNSIGNED (TREE_OPERAND (exp
, 1));
5598 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5600 size_tree
= TREE_OPERAND (exp
, 1);
5601 *punsignedp
= BIT_FIELD_REF_UNSIGNED (exp
);
5605 mode
= TYPE_MODE (TREE_TYPE (exp
));
5606 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
5608 if (mode
== BLKmode
)
5609 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5611 *pbitsize
= GET_MODE_BITSIZE (mode
);
5616 if (! host_integerp (size_tree
, 1))
5617 mode
= BLKmode
, *pbitsize
= -1;
5619 *pbitsize
= tree_low_cst (size_tree
, 1);
5622 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5623 and find the ultimate containing object. */
5626 switch (TREE_CODE (exp
))
5629 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5630 TREE_OPERAND (exp
, 2));
5635 tree field
= TREE_OPERAND (exp
, 1);
5636 tree this_offset
= component_ref_field_offset (exp
);
5638 /* If this field hasn't been filled in yet, don't go past it.
5639 This should only happen when folding expressions made during
5640 type construction. */
5641 if (this_offset
== 0)
5644 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5645 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5646 DECL_FIELD_BIT_OFFSET (field
));
5648 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5653 case ARRAY_RANGE_REF
:
5655 tree index
= TREE_OPERAND (exp
, 1);
5656 tree low_bound
= array_ref_low_bound (exp
);
5657 tree unit_size
= array_ref_element_size (exp
);
5659 /* We assume all arrays have sizes that are a multiple of a byte.
5660 First subtract the lower bound, if any, in the type of the
5661 index, then convert to sizetype and multiply by the size of
5662 the array element. */
5663 if (! integer_zerop (low_bound
))
5664 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
5667 offset
= size_binop (PLUS_EXPR
, offset
,
5668 size_binop (MULT_EXPR
,
5669 convert (sizetype
, index
),
5678 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5679 bitsize_int (*pbitsize
));
5682 case VIEW_CONVERT_EXPR
:
5683 if (keep_aligning
&& STRICT_ALIGNMENT
5684 && (TYPE_ALIGN (TREE_TYPE (exp
))
5685 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5686 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
5687 < BIGGEST_ALIGNMENT
)
5688 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
5689 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
5697 /* If any reference in the chain is volatile, the effect is volatile. */
5698 if (TREE_THIS_VOLATILE (exp
))
5701 exp
= TREE_OPERAND (exp
, 0);
5705 /* If OFFSET is constant, see if we can return the whole thing as a
5706 constant bit position. Otherwise, split it up. */
5707 if (host_integerp (offset
, 0)
5708 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5710 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5711 && host_integerp (tem
, 0))
5712 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5714 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5720 /* Return a tree of sizetype representing the size, in bytes, of the element
5721 of EXP, an ARRAY_REF. */
5724 array_ref_element_size (tree exp
)
5726 tree aligned_size
= TREE_OPERAND (exp
, 3);
5727 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5729 /* If a size was specified in the ARRAY_REF, it's the size measured
5730 in alignment units of the element type. So multiply by that value. */
5733 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5734 sizetype from another type of the same width and signedness. */
5735 if (TREE_TYPE (aligned_size
) != sizetype
)
5736 aligned_size
= fold_convert (sizetype
, aligned_size
);
5737 return size_binop (MULT_EXPR
, aligned_size
,
5738 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
5741 /* Otherwise, take the size from that of the element type. Substitute
5742 any PLACEHOLDER_EXPR that we have. */
5744 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type
), exp
);
5747 /* Return a tree representing the lower bound of the array mentioned in
5748 EXP, an ARRAY_REF. */
5751 array_ref_low_bound (tree exp
)
5753 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5755 /* If a lower bound is specified in EXP, use it. */
5756 if (TREE_OPERAND (exp
, 2))
5757 return TREE_OPERAND (exp
, 2);
5759 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5760 substituting for a PLACEHOLDER_EXPR as needed. */
5761 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
5762 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type
), exp
);
5764 /* Otherwise, return a zero of the appropriate type. */
5765 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp
, 1)), 0);
5768 /* Return a tree representing the upper bound of the array mentioned in
5769 EXP, an ARRAY_REF. */
5772 array_ref_up_bound (tree exp
)
5774 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5776 /* If there is a domain type and it has an upper bound, use it, substituting
5777 for a PLACEHOLDER_EXPR as needed. */
5778 if (domain_type
&& TYPE_MAX_VALUE (domain_type
))
5779 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type
), exp
);
5781 /* Otherwise fail. */
5785 /* Return a tree representing the offset, in bytes, of the field referenced
5786 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5789 component_ref_field_offset (tree exp
)
5791 tree aligned_offset
= TREE_OPERAND (exp
, 2);
5792 tree field
= TREE_OPERAND (exp
, 1);
5794 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5795 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5799 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5800 sizetype from another type of the same width and signedness. */
5801 if (TREE_TYPE (aligned_offset
) != sizetype
)
5802 aligned_offset
= fold_convert (sizetype
, aligned_offset
);
5803 return size_binop (MULT_EXPR
, aligned_offset
,
5804 size_int (DECL_OFFSET_ALIGN (field
) / BITS_PER_UNIT
));
5807 /* Otherwise, take the offset from that of the field. Substitute
5808 any PLACEHOLDER_EXPR that we have. */
5810 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field
), exp
);
5813 /* Return 1 if T is an expression that get_inner_reference handles. */
5816 handled_component_p (tree t
)
5818 switch (TREE_CODE (t
))
5823 case ARRAY_RANGE_REF
:
5824 case VIEW_CONVERT_EXPR
:
5834 /* Given an rtx VALUE that may contain additions and multiplications, return
5835 an equivalent value that just refers to a register, memory, or constant.
5836 This is done by generating instructions to perform the arithmetic and
5837 returning a pseudo-register containing the value.
5839 The returned value may be a REG, SUBREG, MEM or constant. */
5842 force_operand (rtx value
, rtx target
)
5845 /* Use subtarget as the target for operand 0 of a binary operation. */
5846 rtx subtarget
= get_subtarget (target
);
5847 enum rtx_code code
= GET_CODE (value
);
5849 /* Check for subreg applied to an expression produced by loop optimizer. */
5851 && !REG_P (SUBREG_REG (value
))
5852 && !MEM_P (SUBREG_REG (value
)))
5854 value
= simplify_gen_subreg (GET_MODE (value
),
5855 force_reg (GET_MODE (SUBREG_REG (value
)),
5856 force_operand (SUBREG_REG (value
),
5858 GET_MODE (SUBREG_REG (value
)),
5859 SUBREG_BYTE (value
));
5860 code
= GET_CODE (value
);
5863 /* Check for a PIC address load. */
5864 if ((code
== PLUS
|| code
== MINUS
)
5865 && XEXP (value
, 0) == pic_offset_table_rtx
5866 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5867 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5868 || GET_CODE (XEXP (value
, 1)) == CONST
))
5871 subtarget
= gen_reg_rtx (GET_MODE (value
));
5872 emit_move_insn (subtarget
, value
);
5876 if (ARITHMETIC_P (value
))
5878 op2
= XEXP (value
, 1);
5879 if (!CONSTANT_P (op2
) && !(REG_P (op2
) && op2
!= subtarget
))
5881 if (code
== MINUS
&& GET_CODE (op2
) == CONST_INT
)
5884 op2
= negate_rtx (GET_MODE (value
), op2
);
5887 /* Check for an addition with OP2 a constant integer and our first
5888 operand a PLUS of a virtual register and something else. In that
5889 case, we want to emit the sum of the virtual register and the
5890 constant first and then add the other value. This allows virtual
5891 register instantiation to simply modify the constant rather than
5892 creating another one around this addition. */
5893 if (code
== PLUS
&& GET_CODE (op2
) == CONST_INT
5894 && GET_CODE (XEXP (value
, 0)) == PLUS
5895 && REG_P (XEXP (XEXP (value
, 0), 0))
5896 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5897 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5899 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
5900 XEXP (XEXP (value
, 0), 0), op2
,
5901 subtarget
, 0, OPTAB_LIB_WIDEN
);
5902 return expand_simple_binop (GET_MODE (value
), code
, temp
,
5903 force_operand (XEXP (XEXP (value
,
5905 target
, 0, OPTAB_LIB_WIDEN
);
5908 op1
= force_operand (XEXP (value
, 0), subtarget
);
5909 op2
= force_operand (op2
, NULL_RTX
);
5913 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
5915 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
5916 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5917 target
, 1, OPTAB_LIB_WIDEN
);
5919 return expand_divmod (0,
5920 FLOAT_MODE_P (GET_MODE (value
))
5921 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
5922 GET_MODE (value
), op1
, op2
, target
, 0);
5925 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5929 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
5933 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5937 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5938 target
, 0, OPTAB_LIB_WIDEN
);
5941 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5942 target
, 1, OPTAB_LIB_WIDEN
);
5945 if (UNARY_P (value
))
5948 target
= gen_reg_rtx (GET_MODE (value
));
5949 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
5955 convert_move (target
, op1
, code
== ZERO_EXTEND
);
5960 expand_fix (target
, op1
, code
== UNSIGNED_FIX
);
5964 case UNSIGNED_FLOAT
:
5965 expand_float (target
, op1
, code
== UNSIGNED_FLOAT
);
5969 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
5973 #ifdef INSN_SCHEDULING
5974 /* On machines that have insn scheduling, we want all memory reference to be
5975 explicit, so we need to deal with such paradoxical SUBREGs. */
5976 if (GET_CODE (value
) == SUBREG
&& MEM_P (SUBREG_REG (value
))
5977 && (GET_MODE_SIZE (GET_MODE (value
))
5978 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
5980 = simplify_gen_subreg (GET_MODE (value
),
5981 force_reg (GET_MODE (SUBREG_REG (value
)),
5982 force_operand (SUBREG_REG (value
),
5984 GET_MODE (SUBREG_REG (value
)),
5985 SUBREG_BYTE (value
));
5991 /* Subroutine of expand_expr: return nonzero iff there is no way that
5992 EXP can reference X, which is being modified. TOP_P is nonzero if this
5993 call is going to be used to determine whether we need a temporary
5994 for EXP, as opposed to a recursive call to this function.
5996 It is always safe for this routine to return zero since it merely
5997 searches for optimization opportunities. */
6000 safe_from_p (rtx x
, tree exp
, int top_p
)
6006 /* If EXP has varying size, we MUST use a target since we currently
6007 have no way of allocating temporaries of variable size
6008 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6009 So we assume here that something at a higher level has prevented a
6010 clash. This is somewhat bogus, but the best we can do. Only
6011 do this when X is BLKmode and when we are at the top level. */
6012 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
6013 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
6014 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
6015 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
6016 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
6018 && GET_MODE (x
) == BLKmode
)
6019 /* If X is in the outgoing argument area, it is always safe. */
6021 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
6022 || (GET_CODE (XEXP (x
, 0)) == PLUS
6023 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
6026 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6027 find the underlying pseudo. */
6028 if (GET_CODE (x
) == SUBREG
)
6031 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6035 /* Now look at our tree code and possibly recurse. */
6036 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
6038 case tcc_declaration
:
6039 exp_rtl
= DECL_RTL_IF_SET (exp
);
6045 case tcc_exceptional
:
6046 if (TREE_CODE (exp
) == TREE_LIST
)
6050 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
6052 exp
= TREE_CHAIN (exp
);
6055 if (TREE_CODE (exp
) != TREE_LIST
)
6056 return safe_from_p (x
, exp
, 0);
6059 else if (TREE_CODE (exp
) == ERROR_MARK
)
6060 return 1; /* An already-visited SAVE_EXPR? */
6065 /* The only case we look at here is the DECL_INITIAL inside a
6067 return (TREE_CODE (exp
) != DECL_EXPR
6068 || TREE_CODE (DECL_EXPR_DECL (exp
)) != VAR_DECL
6069 || !DECL_INITIAL (DECL_EXPR_DECL (exp
))
6070 || safe_from_p (x
, DECL_INITIAL (DECL_EXPR_DECL (exp
)), 0));
6073 case tcc_comparison
:
6074 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
6079 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6081 case tcc_expression
:
6083 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6084 the expression. If it is set, we conflict iff we are that rtx or
6085 both are in memory. Otherwise, we check all operands of the
6086 expression recursively. */
6088 switch (TREE_CODE (exp
))
6091 /* If the operand is static or we are static, we can't conflict.
6092 Likewise if we don't conflict with the operand at all. */
6093 if (staticp (TREE_OPERAND (exp
, 0))
6094 || TREE_STATIC (exp
)
6095 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
6098 /* Otherwise, the only way this can conflict is if we are taking
6099 the address of a DECL a that address if part of X, which is
6101 exp
= TREE_OPERAND (exp
, 0);
6104 if (!DECL_RTL_SET_P (exp
)
6105 || !MEM_P (DECL_RTL (exp
)))
6108 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
6112 case MISALIGNED_INDIRECT_REF
:
6113 case ALIGN_INDIRECT_REF
:
6116 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
6117 get_alias_set (exp
)))
6122 /* Assume that the call will clobber all hard registers and
6124 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6129 case WITH_CLEANUP_EXPR
:
6130 case CLEANUP_POINT_EXPR
:
6131 /* Lowered by gimplify.c. */
6135 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6141 /* If we have an rtx, we do not need to scan our operands. */
6145 nops
= TREE_CODE_LENGTH (TREE_CODE (exp
));
6146 for (i
= 0; i
< nops
; i
++)
6147 if (TREE_OPERAND (exp
, i
) != 0
6148 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
6151 /* If this is a language-specific tree code, it may require
6152 special handling. */
6153 if ((unsigned int) TREE_CODE (exp
)
6154 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6155 && !lang_hooks
.safe_from_p (x
, exp
))
6160 /* Should never get a type here. */
6164 /* If we have an rtl, find any enclosed object. Then see if we conflict
6168 if (GET_CODE (exp_rtl
) == SUBREG
)
6170 exp_rtl
= SUBREG_REG (exp_rtl
);
6172 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
6176 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6177 are memory and they conflict. */
6178 return ! (rtx_equal_p (x
, exp_rtl
)
6179 || (MEM_P (x
) && MEM_P (exp_rtl
)
6180 && true_dependence (exp_rtl
, VOIDmode
, x
,
6181 rtx_addr_varies_p
)));
6184 /* If we reach here, it is safe. */
6189 /* Return the highest power of two that EXP is known to be a multiple of.
6190 This is used in updating alignment of MEMs in array references. */
6192 unsigned HOST_WIDE_INT
6193 highest_pow2_factor (tree exp
)
6195 unsigned HOST_WIDE_INT c0
, c1
;
6197 switch (TREE_CODE (exp
))
6200 /* We can find the lowest bit that's a one. If the low
6201 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6202 We need to handle this case since we can find it in a COND_EXPR,
6203 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6204 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6206 if (TREE_CONSTANT_OVERFLOW (exp
))
6207 return BIGGEST_ALIGNMENT
;
6210 /* Note: tree_low_cst is intentionally not used here,
6211 we don't care about the upper bits. */
6212 c0
= TREE_INT_CST_LOW (exp
);
6214 return c0
? c0
: BIGGEST_ALIGNMENT
;
6218 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
6219 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6220 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6221 return MIN (c0
, c1
);
6224 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6225 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6228 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6230 if (integer_pow2p (TREE_OPERAND (exp
, 1))
6231 && host_integerp (TREE_OPERAND (exp
, 1), 1))
6233 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6234 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
6235 return MAX (1, c0
/ c1
);
6239 case NON_LVALUE_EXPR
: case NOP_EXPR
: case CONVERT_EXPR
:
6241 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
6244 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6247 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6248 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
6249 return MIN (c0
, c1
);
6258 /* Similar, except that the alignment requirements of TARGET are
6259 taken into account. Assume it is at least as aligned as its
6260 type, unless it is a COMPONENT_REF in which case the layout of
6261 the structure gives the alignment. */
6263 static unsigned HOST_WIDE_INT
6264 highest_pow2_factor_for_target (tree target
, tree exp
)
6266 unsigned HOST_WIDE_INT target_align
, factor
;
6268 factor
= highest_pow2_factor (exp
);
6269 if (TREE_CODE (target
) == COMPONENT_REF
)
6270 target_align
= DECL_ALIGN_UNIT (TREE_OPERAND (target
, 1));
6272 target_align
= TYPE_ALIGN_UNIT (TREE_TYPE (target
));
6273 return MAX (factor
, target_align
);
6276 /* Expands variable VAR. */
6279 expand_var (tree var
)
6281 if (DECL_EXTERNAL (var
))
6284 if (TREE_STATIC (var
))
6285 /* If this is an inlined copy of a static local variable,
6286 look up the original decl. */
6287 var
= DECL_ORIGIN (var
);
6289 if (TREE_STATIC (var
)
6290 ? !TREE_ASM_WRITTEN (var
)
6291 : !DECL_RTL_SET_P (var
))
6293 if (TREE_CODE (var
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (var
))
6294 /* Should be ignored. */;
6295 else if (lang_hooks
.expand_decl (var
))
6297 else if (TREE_CODE (var
) == VAR_DECL
&& !TREE_STATIC (var
))
6299 else if (TREE_CODE (var
) == VAR_DECL
&& TREE_STATIC (var
))
6300 rest_of_decl_compilation (var
, 0, 0);
6302 /* No expansion needed. */
6303 gcc_assert (TREE_CODE (var
) == TYPE_DECL
6304 || TREE_CODE (var
) == CONST_DECL
6305 || TREE_CODE (var
) == FUNCTION_DECL
6306 || TREE_CODE (var
) == LABEL_DECL
);
6310 /* Subroutine of expand_expr. Expand the two operands of a binary
6311 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6312 The value may be stored in TARGET if TARGET is nonzero. The
6313 MODIFIER argument is as documented by expand_expr. */
6316 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
6317 enum expand_modifier modifier
)
6319 if (! safe_from_p (target
, exp1
, 1))
6321 if (operand_equal_p (exp0
, exp1
, 0))
6323 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6324 *op1
= copy_rtx (*op0
);
6328 /* If we need to preserve evaluation order, copy exp0 into its own
6329 temporary variable so that it can't be clobbered by exp1. */
6330 if (flag_evaluation_order
&& TREE_SIDE_EFFECTS (exp1
))
6331 exp0
= save_expr (exp0
);
6332 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6333 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
6338 /* Return a MEM that contains constant EXP. DEFER is as for
6339 output_constant_def and MODIFIER is as for expand_expr. */
6342 expand_expr_constant (tree exp
, int defer
, enum expand_modifier modifier
)
6346 mem
= output_constant_def (exp
, defer
);
6347 if (modifier
!= EXPAND_INITIALIZER
)
6348 mem
= use_anchored_address (mem
);
6352 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6353 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6356 expand_expr_addr_expr_1 (tree exp
, rtx target
, enum machine_mode tmode
,
6357 enum expand_modifier modifier
)
6359 rtx result
, subtarget
;
6361 HOST_WIDE_INT bitsize
, bitpos
;
6362 int volatilep
, unsignedp
;
6363 enum machine_mode mode1
;
6365 /* If we are taking the address of a constant and are at the top level,
6366 we have to use output_constant_def since we can't call force_const_mem
6368 /* ??? This should be considered a front-end bug. We should not be
6369 generating ADDR_EXPR of something that isn't an LVALUE. The only
6370 exception here is STRING_CST. */
6371 if (TREE_CODE (exp
) == CONSTRUCTOR
6372 || CONSTANT_CLASS_P (exp
))
6373 return XEXP (expand_expr_constant (exp
, 0, modifier
), 0);
6375 /* Everything must be something allowed by is_gimple_addressable. */
6376 switch (TREE_CODE (exp
))
6379 /* This case will happen via recursion for &a->b. */
6380 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6383 /* Recurse and make the output_constant_def clause above handle this. */
6384 return expand_expr_addr_expr_1 (DECL_INITIAL (exp
), target
,
6388 /* The real part of the complex number is always first, therefore
6389 the address is the same as the address of the parent object. */
6392 inner
= TREE_OPERAND (exp
, 0);
6396 /* The imaginary part of the complex number is always second.
6397 The expression is therefore always offset by the size of the
6400 bitpos
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp
)));
6401 inner
= TREE_OPERAND (exp
, 0);
6405 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6406 expand_expr, as that can have various side effects; LABEL_DECLs for
6407 example, may not have their DECL_RTL set yet. Assume language
6408 specific tree nodes can be expanded in some interesting way. */
6410 || TREE_CODE (exp
) >= LAST_AND_UNUSED_TREE_CODE
)
6412 result
= expand_expr (exp
, target
, tmode
,
6413 modifier
== EXPAND_INITIALIZER
6414 ? EXPAND_INITIALIZER
: EXPAND_CONST_ADDRESS
);
6416 /* If the DECL isn't in memory, then the DECL wasn't properly
6417 marked TREE_ADDRESSABLE, which will be either a front-end
6418 or a tree optimizer bug. */
6419 gcc_assert (MEM_P (result
));
6420 result
= XEXP (result
, 0);
6422 /* ??? Is this needed anymore? */
6423 if (DECL_P (exp
) && !TREE_USED (exp
) == 0)
6425 assemble_external (exp
);
6426 TREE_USED (exp
) = 1;
6429 if (modifier
!= EXPAND_INITIALIZER
6430 && modifier
!= EXPAND_CONST_ADDRESS
)
6431 result
= force_operand (result
, target
);
6435 /* Pass FALSE as the last argument to get_inner_reference although
6436 we are expanding to RTL. The rationale is that we know how to
6437 handle "aligning nodes" here: we can just bypass them because
6438 they won't change the final object whose address will be returned
6439 (they actually exist only for that purpose). */
6440 inner
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
6441 &mode1
, &unsignedp
, &volatilep
, false);
6445 /* We must have made progress. */
6446 gcc_assert (inner
!= exp
);
6448 subtarget
= offset
|| bitpos
? NULL_RTX
: target
;
6449 result
= expand_expr_addr_expr_1 (inner
, subtarget
, tmode
, modifier
);
6455 if (modifier
!= EXPAND_NORMAL
)
6456 result
= force_operand (result
, NULL
);
6457 tmp
= expand_expr (offset
, NULL
, tmode
, EXPAND_NORMAL
);
6459 result
= convert_memory_address (tmode
, result
);
6460 tmp
= convert_memory_address (tmode
, tmp
);
6462 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
6463 result
= gen_rtx_PLUS (tmode
, result
, tmp
);
6466 subtarget
= bitpos
? NULL_RTX
: target
;
6467 result
= expand_simple_binop (tmode
, PLUS
, result
, tmp
, subtarget
,
6468 1, OPTAB_LIB_WIDEN
);
6474 /* Someone beforehand should have rejected taking the address
6475 of such an object. */
6476 gcc_assert ((bitpos
% BITS_PER_UNIT
) == 0);
6478 result
= plus_constant (result
, bitpos
/ BITS_PER_UNIT
);
6479 if (modifier
< EXPAND_SUM
)
6480 result
= force_operand (result
, target
);
6486 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6487 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6490 expand_expr_addr_expr (tree exp
, rtx target
, enum machine_mode tmode
,
6491 enum expand_modifier modifier
)
6493 enum machine_mode rmode
;
6496 /* Target mode of VOIDmode says "whatever's natural". */
6497 if (tmode
== VOIDmode
)
6498 tmode
= TYPE_MODE (TREE_TYPE (exp
));
6500 /* We can get called with some Weird Things if the user does silliness
6501 like "(short) &a". In that case, convert_memory_address won't do
6502 the right thing, so ignore the given target mode. */
6503 if (tmode
!= Pmode
&& tmode
!= ptr_mode
)
6506 result
= expand_expr_addr_expr_1 (TREE_OPERAND (exp
, 0), target
,
6509 /* Despite expand_expr claims concerning ignoring TMODE when not
6510 strictly convenient, stuff breaks if we don't honor it. Note
6511 that combined with the above, we only do this for pointer modes. */
6512 rmode
= GET_MODE (result
);
6513 if (rmode
== VOIDmode
)
6516 result
= convert_memory_address (tmode
, result
);
6522 /* expand_expr: generate code for computing expression EXP.
6523 An rtx for the computed value is returned. The value is never null.
6524 In the case of a void EXP, const0_rtx is returned.
6526 The value may be stored in TARGET if TARGET is nonzero.
6527 TARGET is just a suggestion; callers must assume that
6528 the rtx returned may not be the same as TARGET.
6530 If TARGET is CONST0_RTX, it means that the value will be ignored.
6532 If TMODE is not VOIDmode, it suggests generating the
6533 result in mode TMODE. But this is done only when convenient.
6534 Otherwise, TMODE is ignored and the value generated in its natural mode.
6535 TMODE is just a suggestion; callers must assume that
6536 the rtx returned may not have mode TMODE.
6538 Note that TARGET may have neither TMODE nor MODE. In that case, it
6539 probably will not be used.
6541 If MODIFIER is EXPAND_SUM then when EXP is an addition
6542 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6543 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6544 products as above, or REG or MEM, or constant.
6545 Ordinarily in such cases we would output mul or add instructions
6546 and then return a pseudo reg containing the sum.
6548 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6549 it also marks a label as absolutely required (it can't be dead).
6550 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6551 This is used for outputting expressions used in initializers.
6553 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6554 with a constant address even if that address is not normally legitimate.
6555 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6557 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6558 a call parameter. Such targets require special care as we haven't yet
6559 marked TARGET so that it's safe from being trashed by libcalls. We
6560 don't want to use TARGET for anything but the final result;
6561 Intermediate values must go elsewhere. Additionally, calls to
6562 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6564 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6565 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6566 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6567 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6570 static rtx
expand_expr_real_1 (tree
, rtx
, enum machine_mode
,
6571 enum expand_modifier
, rtx
*);
6574 expand_expr_real (tree exp
, rtx target
, enum machine_mode tmode
,
6575 enum expand_modifier modifier
, rtx
*alt_rtl
)
6578 rtx ret
, last
= NULL
;
6580 /* Handle ERROR_MARK before anybody tries to access its type. */
6581 if (TREE_CODE (exp
) == ERROR_MARK
6582 || TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
)
6584 ret
= CONST0_RTX (tmode
);
6585 return ret
? ret
: const0_rtx
;
6588 if (flag_non_call_exceptions
)
6590 rn
= lookup_stmt_eh_region (exp
);
6591 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6593 last
= get_last_insn ();
6596 /* If this is an expression of some kind and it has an associated line
6597 number, then emit the line number before expanding the expression.
6599 We need to save and restore the file and line information so that
6600 errors discovered during expansion are emitted with the right
6601 information. It would be better of the diagnostic routines
6602 used the file/line information embedded in the tree nodes rather
6604 if (cfun
&& cfun
->ib_boundaries_block
&& EXPR_HAS_LOCATION (exp
))
6606 location_t saved_location
= input_location
;
6607 input_location
= EXPR_LOCATION (exp
);
6608 emit_line_note (input_location
);
6610 /* Record where the insns produced belong. */
6611 record_block_change (TREE_BLOCK (exp
));
6613 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
6615 input_location
= saved_location
;
6619 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
6622 /* If using non-call exceptions, mark all insns that may trap.
6623 expand_call() will mark CALL_INSNs before we get to this code,
6624 but it doesn't handle libcalls, and these may trap. */
6628 for (insn
= next_real_insn (last
); insn
;
6629 insn
= next_real_insn (insn
))
6631 if (! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
6632 /* If we want exceptions for non-call insns, any
6633 may_trap_p instruction may throw. */
6634 && GET_CODE (PATTERN (insn
)) != CLOBBER
6635 && GET_CODE (PATTERN (insn
)) != USE
6636 && (CALL_P (insn
) || may_trap_p (PATTERN (insn
))))
6638 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_EH_REGION
, GEN_INT (rn
),
6648 expand_expr_real_1 (tree exp
, rtx target
, enum machine_mode tmode
,
6649 enum expand_modifier modifier
, rtx
*alt_rtl
)
6651 rtx op0
, op1
, temp
, decl_rtl
;
6652 tree type
= TREE_TYPE (exp
);
6654 enum machine_mode mode
;
6655 enum tree_code code
= TREE_CODE (exp
);
6657 rtx subtarget
, original_target
;
6659 tree context
, subexp0
, subexp1
;
6660 bool reduce_bit_field
= false;
6661 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6662 ? reduce_to_bit_field_precision ((expr), \
6667 mode
= TYPE_MODE (type
);
6668 unsignedp
= TYPE_UNSIGNED (type
);
6669 if (lang_hooks
.reduce_bit_field_operations
6670 && TREE_CODE (type
) == INTEGER_TYPE
6671 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
))
6673 /* An operation in what may be a bit-field type needs the
6674 result to be reduced to the precision of the bit-field type,
6675 which is narrower than that of the type's mode. */
6676 reduce_bit_field
= true;
6677 if (modifier
== EXPAND_STACK_PARM
)
6681 /* Use subtarget as the target for operand 0 of a binary operation. */
6682 subtarget
= get_subtarget (target
);
6683 original_target
= target
;
6684 ignore
= (target
== const0_rtx
6685 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
6686 || code
== CONVERT_EXPR
|| code
== COND_EXPR
6687 || code
== VIEW_CONVERT_EXPR
)
6688 && TREE_CODE (type
) == VOID_TYPE
));
6690 /* If we are going to ignore this result, we need only do something
6691 if there is a side-effect somewhere in the expression. If there
6692 is, short-circuit the most common cases here. Note that we must
6693 not call expand_expr with anything but const0_rtx in case this
6694 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6698 if (! TREE_SIDE_EFFECTS (exp
))
6701 /* Ensure we reference a volatile object even if value is ignored, but
6702 don't do this if all we are doing is taking its address. */
6703 if (TREE_THIS_VOLATILE (exp
)
6704 && TREE_CODE (exp
) != FUNCTION_DECL
6705 && mode
!= VOIDmode
&& mode
!= BLKmode
6706 && modifier
!= EXPAND_CONST_ADDRESS
)
6708 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
6710 temp
= copy_to_reg (temp
);
6714 if (TREE_CODE_CLASS (code
) == tcc_unary
6715 || code
== COMPONENT_REF
|| code
== INDIRECT_REF
)
6716 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6719 else if (TREE_CODE_CLASS (code
) == tcc_binary
6720 || TREE_CODE_CLASS (code
) == tcc_comparison
6721 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
6723 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6724 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6727 else if (code
== BIT_FIELD_REF
)
6729 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6730 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6731 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, modifier
);
6743 tree function
= decl_function_context (exp
);
6745 temp
= label_rtx (exp
);
6746 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
6748 if (function
!= current_function_decl
6750 LABEL_REF_NONLOCAL_P (temp
) = 1;
6752 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
6757 return expand_expr_real_1 (SSA_NAME_VAR (exp
), target
, tmode
, modifier
,
6762 /* If a static var's type was incomplete when the decl was written,
6763 but the type is complete now, lay out the decl now. */
6764 if (DECL_SIZE (exp
) == 0
6765 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
6766 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6767 layout_decl (exp
, 0);
6769 /* ... fall through ... */
6773 decl_rtl
= DECL_RTL (exp
);
6774 gcc_assert (decl_rtl
);
6776 /* Ensure variable marked as used even if it doesn't go through
6777 a parser. If it hasn't be used yet, write out an external
6779 if (! TREE_USED (exp
))
6781 assemble_external (exp
);
6782 TREE_USED (exp
) = 1;
6785 /* Show we haven't gotten RTL for this yet. */
6788 /* Variables inherited from containing functions should have
6789 been lowered by this point. */
6790 context
= decl_function_context (exp
);
6791 gcc_assert (!context
6792 || context
== current_function_decl
6793 || TREE_STATIC (exp
)
6794 /* ??? C++ creates functions that are not TREE_STATIC. */
6795 || TREE_CODE (exp
) == FUNCTION_DECL
);
6797 /* This is the case of an array whose size is to be determined
6798 from its initializer, while the initializer is still being parsed.
6801 if (MEM_P (decl_rtl
) && REG_P (XEXP (decl_rtl
, 0)))
6802 temp
= validize_mem (decl_rtl
);
6804 /* If DECL_RTL is memory, we are in the normal case and either
6805 the address is not valid or it is not a register and -fforce-addr
6806 is specified, get the address into a register. */
6808 else if (MEM_P (decl_rtl
) && modifier
!= EXPAND_INITIALIZER
)
6811 *alt_rtl
= decl_rtl
;
6812 decl_rtl
= use_anchored_address (decl_rtl
);
6813 if (modifier
!= EXPAND_CONST_ADDRESS
6814 && modifier
!= EXPAND_SUM
6815 && (!memory_address_p (DECL_MODE (exp
), XEXP (decl_rtl
, 0))
6816 || (flag_force_addr
&& !REG_P (XEXP (decl_rtl
, 0)))))
6817 temp
= replace_equiv_address (decl_rtl
,
6818 copy_rtx (XEXP (decl_rtl
, 0)));
6821 /* If we got something, return it. But first, set the alignment
6822 if the address is a register. */
6825 if (MEM_P (temp
) && REG_P (XEXP (temp
, 0)))
6826 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6831 /* If the mode of DECL_RTL does not match that of the decl, it
6832 must be a promoted value. We return a SUBREG of the wanted mode,
6833 but mark it so that we know that it was already extended. */
6835 if (REG_P (decl_rtl
)
6836 && GET_MODE (decl_rtl
) != DECL_MODE (exp
))
6838 enum machine_mode pmode
;
6840 /* Get the signedness used for this variable. Ensure we get the
6841 same mode we got when the variable was declared. */
6842 pmode
= promote_mode (type
, DECL_MODE (exp
), &unsignedp
,
6843 (TREE_CODE (exp
) == RESULT_DECL
6844 || TREE_CODE (exp
) == PARM_DECL
) ? 1 : 0);
6845 gcc_assert (GET_MODE (decl_rtl
) == pmode
);
6847 temp
= gen_lowpart_SUBREG (mode
, decl_rtl
);
6848 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6849 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6856 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
6857 TREE_INT_CST_HIGH (exp
), mode
);
6859 /* ??? If overflow is set, fold will have done an incomplete job,
6860 which can result in (plus xx (const_int 0)), which can get
6861 simplified by validate_replace_rtx during virtual register
6862 instantiation, which can result in unrecognizable insns.
6863 Avoid this by forcing all overflows into registers. */
6864 if (TREE_CONSTANT_OVERFLOW (exp
)
6865 && modifier
!= EXPAND_INITIALIZER
)
6866 temp
= force_reg (mode
, temp
);
6871 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp
))) == MODE_VECTOR_INT
6872 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp
))) == MODE_VECTOR_FLOAT
)
6873 return const_vector_from_tree (exp
);
6875 return expand_expr (build_constructor_from_list
6877 TREE_VECTOR_CST_ELTS (exp
)),
6878 ignore
? const0_rtx
: target
, tmode
, modifier
);
6881 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
6884 /* If optimized, generate immediate CONST_DOUBLE
6885 which will be turned into memory by reload if necessary.
6887 We used to force a register so that loop.c could see it. But
6888 this does not allow gen_* patterns to perform optimizations with
6889 the constants. It also produces two insns in cases like "x = 1.0;".
6890 On most machines, floating-point constants are not permitted in
6891 many insns, so we'd end up copying it to a register in any case.
6893 Now, we do the copying in expand_binop, if appropriate. */
6894 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
6895 TYPE_MODE (TREE_TYPE (exp
)));
6898 /* Handle evaluating a complex constant in a CONCAT target. */
6899 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
6901 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
6904 rtarg
= XEXP (original_target
, 0);
6905 itarg
= XEXP (original_target
, 1);
6907 /* Move the real and imaginary parts separately. */
6908 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, 0);
6909 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, 0);
6912 emit_move_insn (rtarg
, op0
);
6914 emit_move_insn (itarg
, op1
);
6916 return original_target
;
6919 /* ... fall through ... */
6922 temp
= expand_expr_constant (exp
, 1, modifier
);
6924 /* temp contains a constant address.
6925 On RISC machines where a constant address isn't valid,
6926 make some insns to get that address into a register. */
6927 if (modifier
!= EXPAND_CONST_ADDRESS
6928 && modifier
!= EXPAND_INITIALIZER
6929 && modifier
!= EXPAND_SUM
6930 && (! memory_address_p (mode
, XEXP (temp
, 0))
6931 || flag_force_addr
))
6932 return replace_equiv_address (temp
,
6933 copy_rtx (XEXP (temp
, 0)));
6938 tree val
= TREE_OPERAND (exp
, 0);
6939 rtx ret
= expand_expr_real_1 (val
, target
, tmode
, modifier
, alt_rtl
);
6941 if (!SAVE_EXPR_RESOLVED_P (exp
))
6943 /* We can indeed still hit this case, typically via builtin
6944 expanders calling save_expr immediately before expanding
6945 something. Assume this means that we only have to deal
6946 with non-BLKmode values. */
6947 gcc_assert (GET_MODE (ret
) != BLKmode
);
6949 val
= build_decl (VAR_DECL
, NULL
, TREE_TYPE (exp
));
6950 DECL_ARTIFICIAL (val
) = 1;
6951 DECL_IGNORED_P (val
) = 1;
6952 TREE_OPERAND (exp
, 0) = val
;
6953 SAVE_EXPR_RESOLVED_P (exp
) = 1;
6955 if (!CONSTANT_P (ret
))
6956 ret
= copy_to_reg (ret
);
6957 SET_DECL_RTL (val
, ret
);
6964 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6965 expand_goto (TREE_OPERAND (exp
, 0));
6967 expand_computed_goto (TREE_OPERAND (exp
, 0));
6971 /* If we don't need the result, just ensure we evaluate any
6975 unsigned HOST_WIDE_INT idx
;
6978 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
6979 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
6984 /* Try to avoid creating a temporary at all. This is possible
6985 if all of the initializer is zero.
6986 FIXME: try to handle all [0..255] initializers we can handle
6988 else if (TREE_STATIC (exp
)
6989 && !TREE_ADDRESSABLE (exp
)
6990 && target
!= 0 && mode
== BLKmode
6991 && all_zeros_p (exp
))
6993 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
6997 /* All elts simple constants => refer to a constant in memory. But
6998 if this is a non-BLKmode mode, let it store a field at a time
6999 since that should make a CONST_INT or CONST_DOUBLE when we
7000 fold. Likewise, if we have a target we can use, it is best to
7001 store directly into the target unless the type is large enough
7002 that memcpy will be used. If we are making an initializer and
7003 all operands are constant, put it in memory as well.
7005 FIXME: Avoid trying to fill vector constructors piece-meal.
7006 Output them with output_constant_def below unless we're sure
7007 they're zeros. This should go away when vector initializers
7008 are treated like VECTOR_CST instead of arrays.
7010 else if ((TREE_STATIC (exp
)
7011 && ((mode
== BLKmode
7012 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
7013 || TREE_ADDRESSABLE (exp
)
7014 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
7015 && (! MOVE_BY_PIECES_P
7016 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
7018 && ! mostly_zeros_p (exp
))))
7019 || ((modifier
== EXPAND_INITIALIZER
7020 || modifier
== EXPAND_CONST_ADDRESS
)
7021 && TREE_CONSTANT (exp
)))
7023 rtx constructor
= expand_expr_constant (exp
, 1, modifier
);
7025 if (modifier
!= EXPAND_CONST_ADDRESS
7026 && modifier
!= EXPAND_INITIALIZER
7027 && modifier
!= EXPAND_SUM
)
7028 constructor
= validize_mem (constructor
);
7034 /* Handle calls that pass values in multiple non-contiguous
7035 locations. The Irix 6 ABI has examples of this. */
7036 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
7037 || GET_CODE (target
) == PARALLEL
7038 || modifier
== EXPAND_STACK_PARM
)
7040 = assign_temp (build_qualified_type (type
,
7042 | (TREE_READONLY (exp
)
7043 * TYPE_QUAL_CONST
))),
7044 0, TREE_ADDRESSABLE (exp
), 1);
7046 store_constructor (exp
, target
, 0, int_expr_size (exp
));
7050 case MISALIGNED_INDIRECT_REF
:
7051 case ALIGN_INDIRECT_REF
:
7054 tree exp1
= TREE_OPERAND (exp
, 0);
7056 if (modifier
!= EXPAND_WRITE
)
7060 t
= fold_read_from_constant_string (exp
);
7062 return expand_expr (t
, target
, tmode
, modifier
);
7065 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
7066 op0
= memory_address (mode
, op0
);
7068 if (code
== ALIGN_INDIRECT_REF
)
7070 int align
= TYPE_ALIGN_UNIT (type
);
7071 op0
= gen_rtx_AND (Pmode
, op0
, GEN_INT (-align
));
7072 op0
= memory_address (mode
, op0
);
7075 temp
= gen_rtx_MEM (mode
, op0
);
7077 set_mem_attributes (temp
, exp
, 0);
7079 /* Resolve the misalignment now, so that we don't have to remember
7080 to resolve it later. Of course, this only works for reads. */
7081 /* ??? When we get around to supporting writes, we'll have to handle
7082 this in store_expr directly. The vectorizer isn't generating
7083 those yet, however. */
7084 if (code
== MISALIGNED_INDIRECT_REF
)
7089 gcc_assert (modifier
== EXPAND_NORMAL
7090 || modifier
== EXPAND_STACK_PARM
);
7092 /* The vectorizer should have already checked the mode. */
7093 icode
= movmisalign_optab
->handlers
[mode
].insn_code
;
7094 gcc_assert (icode
!= CODE_FOR_nothing
);
7096 /* We've already validated the memory, and we're creating a
7097 new pseudo destination. The predicates really can't fail. */
7098 reg
= gen_reg_rtx (mode
);
7100 /* Nor can the insn generator. */
7101 insn
= GEN_FCN (icode
) (reg
, temp
);
7110 case TARGET_MEM_REF
:
7112 struct mem_address addr
;
7114 get_address_description (exp
, &addr
);
7115 op0
= addr_for_mem_ref (&addr
, true);
7116 op0
= memory_address (mode
, op0
);
7117 temp
= gen_rtx_MEM (mode
, op0
);
7118 set_mem_attributes (temp
, TMR_ORIGINAL (exp
), 0);
7125 tree array
= TREE_OPERAND (exp
, 0);
7126 tree index
= TREE_OPERAND (exp
, 1);
7128 /* Fold an expression like: "foo"[2].
7129 This is not done in fold so it won't happen inside &.
7130 Don't fold if this is for wide characters since it's too
7131 difficult to do correctly and this is a very rare case. */
7133 if (modifier
!= EXPAND_CONST_ADDRESS
7134 && modifier
!= EXPAND_INITIALIZER
7135 && modifier
!= EXPAND_MEMORY
)
7137 tree t
= fold_read_from_constant_string (exp
);
7140 return expand_expr (t
, target
, tmode
, modifier
);
7143 /* If this is a constant index into a constant array,
7144 just get the value from the array. Handle both the cases when
7145 we have an explicit constructor and when our operand is a variable
7146 that was declared const. */
7148 if (modifier
!= EXPAND_CONST_ADDRESS
7149 && modifier
!= EXPAND_INITIALIZER
7150 && modifier
!= EXPAND_MEMORY
7151 && TREE_CODE (array
) == CONSTRUCTOR
7152 && ! TREE_SIDE_EFFECTS (array
)
7153 && TREE_CODE (index
) == INTEGER_CST
)
7155 unsigned HOST_WIDE_INT ix
;
7158 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array
), ix
,
7160 if (tree_int_cst_equal (field
, index
))
7162 if (!TREE_SIDE_EFFECTS (value
))
7163 return expand_expr (fold (value
), target
, tmode
, modifier
);
7168 else if (optimize
>= 1
7169 && modifier
!= EXPAND_CONST_ADDRESS
7170 && modifier
!= EXPAND_INITIALIZER
7171 && modifier
!= EXPAND_MEMORY
7172 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
7173 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
7174 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
7175 && targetm
.binds_local_p (array
))
7177 if (TREE_CODE (index
) == INTEGER_CST
)
7179 tree init
= DECL_INITIAL (array
);
7181 if (TREE_CODE (init
) == CONSTRUCTOR
)
7183 unsigned HOST_WIDE_INT ix
;
7186 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), ix
,
7188 if (tree_int_cst_equal (field
, index
))
7190 if (!TREE_SIDE_EFFECTS (value
))
7191 return expand_expr (fold (value
), target
, tmode
,
7196 else if(TREE_CODE (init
) == STRING_CST
)
7198 tree index1
= index
;
7199 tree low_bound
= array_ref_low_bound (exp
);
7200 index1
= fold_convert (sizetype
, TREE_OPERAND (exp
, 1));
7202 /* Optimize the special-case of a zero lower bound.
7204 We convert the low_bound to sizetype to avoid some problems
7205 with constant folding. (E.g. suppose the lower bound is 1,
7206 and its mode is QI. Without the conversion,l (ARRAY
7207 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7208 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7210 if (! integer_zerop (low_bound
))
7211 index1
= size_diffop (index1
, fold_convert (sizetype
,
7214 if (0 > compare_tree_int (index1
,
7215 TREE_STRING_LENGTH (init
)))
7217 tree type
= TREE_TYPE (TREE_TYPE (init
));
7218 enum machine_mode mode
= TYPE_MODE (type
);
7220 if (GET_MODE_CLASS (mode
) == MODE_INT
7221 && GET_MODE_SIZE (mode
) == 1)
7222 return gen_int_mode (TREE_STRING_POINTER (init
)
7223 [TREE_INT_CST_LOW (index1
)],
7230 goto normal_inner_ref
;
7233 /* If the operand is a CONSTRUCTOR, we can just extract the
7234 appropriate field if it is present. */
7235 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
)
7237 unsigned HOST_WIDE_INT idx
;
7240 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
7242 if (field
== TREE_OPERAND (exp
, 1)
7243 /* We can normally use the value of the field in the
7244 CONSTRUCTOR. However, if this is a bitfield in
7245 an integral mode that we can fit in a HOST_WIDE_INT,
7246 we must mask only the number of bits in the bitfield,
7247 since this is done implicitly by the constructor. If
7248 the bitfield does not meet either of those conditions,
7249 we can't do this optimization. */
7250 && (! DECL_BIT_FIELD (field
)
7251 || ((GET_MODE_CLASS (DECL_MODE (field
)) == MODE_INT
)
7252 && (GET_MODE_BITSIZE (DECL_MODE (field
))
7253 <= HOST_BITS_PER_WIDE_INT
))))
7255 if (DECL_BIT_FIELD (field
)
7256 && modifier
== EXPAND_STACK_PARM
)
7258 op0
= expand_expr (value
, target
, tmode
, modifier
);
7259 if (DECL_BIT_FIELD (field
))
7261 HOST_WIDE_INT bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
7262 enum machine_mode imode
= TYPE_MODE (TREE_TYPE (field
));
7264 if (TYPE_UNSIGNED (TREE_TYPE (field
)))
7266 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
7267 op0
= expand_and (imode
, op0
, op1
, target
);
7272 = build_int_cst (NULL_TREE
,
7273 GET_MODE_BITSIZE (imode
) - bitsize
);
7275 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
7277 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
7285 goto normal_inner_ref
;
7288 case ARRAY_RANGE_REF
:
7291 enum machine_mode mode1
;
7292 HOST_WIDE_INT bitsize
, bitpos
;
7295 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7296 &mode1
, &unsignedp
, &volatilep
, true);
7299 /* If we got back the original object, something is wrong. Perhaps
7300 we are evaluating an expression too early. In any event, don't
7301 infinitely recurse. */
7302 gcc_assert (tem
!= exp
);
7304 /* If TEM's type is a union of variable size, pass TARGET to the inner
7305 computation, since it will need a temporary and TARGET is known
7306 to have to do. This occurs in unchecked conversion in Ada. */
7310 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
7311 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
7313 && modifier
!= EXPAND_STACK_PARM
7314 ? target
: NULL_RTX
),
7316 (modifier
== EXPAND_INITIALIZER
7317 || modifier
== EXPAND_CONST_ADDRESS
7318 || modifier
== EXPAND_STACK_PARM
)
7319 ? modifier
: EXPAND_NORMAL
);
7321 /* If this is a constant, put it into a register if it is a legitimate
7322 constant, OFFSET is 0, and we won't try to extract outside the
7323 register (in case we were passed a partially uninitialized object
7324 or a view_conversion to a larger size). Force the constant to
7325 memory otherwise. */
7326 if (CONSTANT_P (op0
))
7328 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
7329 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
7331 && bitpos
+ bitsize
<= GET_MODE_BITSIZE (mode
))
7332 op0
= force_reg (mode
, op0
);
7334 op0
= validize_mem (force_const_mem (mode
, op0
));
7337 /* Otherwise, if this object not in memory and we either have an
7338 offset, a BLKmode result, or a reference outside the object, put it
7339 there. Such cases can occur in Ada if we have unchecked conversion
7340 of an expression from a scalar type to an array or record type or
7341 for an ARRAY_RANGE_REF whose type is BLKmode. */
7342 else if (!MEM_P (op0
)
7344 || (bitpos
+ bitsize
> GET_MODE_BITSIZE (GET_MODE (op0
)))
7345 || (code
== ARRAY_RANGE_REF
&& mode
== BLKmode
)))
7347 tree nt
= build_qualified_type (TREE_TYPE (tem
),
7348 (TYPE_QUALS (TREE_TYPE (tem
))
7349 | TYPE_QUAL_CONST
));
7350 rtx memloc
= assign_temp (nt
, 1, 1, 1);
7352 emit_move_insn (memloc
, op0
);
7358 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
7361 gcc_assert (MEM_P (op0
));
7363 #ifdef POINTERS_EXTEND_UNSIGNED
7364 if (GET_MODE (offset_rtx
) != Pmode
)
7365 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
7367 if (GET_MODE (offset_rtx
) != ptr_mode
)
7368 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
7371 if (GET_MODE (op0
) == BLKmode
7372 /* A constant address in OP0 can have VOIDmode, we must
7373 not try to call force_reg in that case. */
7374 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
7376 && (bitpos
% bitsize
) == 0
7377 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
7378 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
7380 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7384 op0
= offset_address (op0
, offset_rtx
,
7385 highest_pow2_factor (offset
));
7388 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7389 record its alignment as BIGGEST_ALIGNMENT. */
7390 if (MEM_P (op0
) && bitpos
== 0 && offset
!= 0
7391 && is_aligning_offset (offset
, tem
))
7392 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
7394 /* Don't forget about volatility even if this is a bitfield. */
7395 if (MEM_P (op0
) && volatilep
&& ! MEM_VOLATILE_P (op0
))
7397 if (op0
== orig_op0
)
7398 op0
= copy_rtx (op0
);
7400 MEM_VOLATILE_P (op0
) = 1;
7403 /* The following code doesn't handle CONCAT.
7404 Assume only bitpos == 0 can be used for CONCAT, due to
7405 one element arrays having the same mode as its element. */
7406 if (GET_CODE (op0
) == CONCAT
)
7408 gcc_assert (bitpos
== 0
7409 && bitsize
== GET_MODE_BITSIZE (GET_MODE (op0
)));
7413 /* In cases where an aligned union has an unaligned object
7414 as a field, we might be extracting a BLKmode value from
7415 an integer-mode (e.g., SImode) object. Handle this case
7416 by doing the extract into an object as wide as the field
7417 (which we know to be the width of a basic mode), then
7418 storing into memory, and changing the mode to BLKmode. */
7419 if (mode1
== VOIDmode
7420 || REG_P (op0
) || GET_CODE (op0
) == SUBREG
7421 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
7422 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7423 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
7424 && modifier
!= EXPAND_CONST_ADDRESS
7425 && modifier
!= EXPAND_INITIALIZER
)
7426 /* If the field isn't aligned enough to fetch as a memref,
7427 fetch it as a bit field. */
7428 || (mode1
!= BLKmode
7429 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
7430 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)
7432 && (MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
7433 || (bitpos
% GET_MODE_ALIGNMENT (mode1
) != 0))))
7434 && ((modifier
== EXPAND_CONST_ADDRESS
7435 || modifier
== EXPAND_INITIALIZER
)
7437 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
7438 || (bitpos
% BITS_PER_UNIT
!= 0)))
7439 /* If the type and the field are a constant size and the
7440 size of the type isn't the same size as the bitfield,
7441 we must use bitfield operations. */
7443 && TYPE_SIZE (TREE_TYPE (exp
))
7444 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
7445 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7448 enum machine_mode ext_mode
= mode
;
7450 if (ext_mode
== BLKmode
7451 && ! (target
!= 0 && MEM_P (op0
)
7453 && bitpos
% BITS_PER_UNIT
== 0))
7454 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7456 if (ext_mode
== BLKmode
)
7459 target
= assign_temp (type
, 0, 1, 1);
7464 /* In this case, BITPOS must start at a byte boundary and
7465 TARGET, if specified, must be a MEM. */
7466 gcc_assert (MEM_P (op0
)
7467 && (!target
|| MEM_P (target
))
7468 && !(bitpos
% BITS_PER_UNIT
));
7470 emit_block_move (target
,
7471 adjust_address (op0
, VOIDmode
,
7472 bitpos
/ BITS_PER_UNIT
),
7473 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7475 (modifier
== EXPAND_STACK_PARM
7476 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7481 op0
= validize_mem (op0
);
7483 if (MEM_P (op0
) && REG_P (XEXP (op0
, 0)))
7484 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7486 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
7487 (modifier
== EXPAND_STACK_PARM
7488 ? NULL_RTX
: target
),
7489 ext_mode
, ext_mode
);
7491 /* If the result is a record type and BITSIZE is narrower than
7492 the mode of OP0, an integral mode, and this is a big endian
7493 machine, we must put the field into the high-order bits. */
7494 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7495 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7496 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
7497 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7498 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7502 /* If the result type is BLKmode, store the data into a temporary
7503 of the appropriate type, but with the mode corresponding to the
7504 mode for the data we have (op0's mode). It's tempting to make
7505 this a constant type, since we know it's only being stored once,
7506 but that can cause problems if we are taking the address of this
7507 COMPONENT_REF because the MEM of any reference via that address
7508 will have flags corresponding to the type, which will not
7509 necessarily be constant. */
7510 if (mode
== BLKmode
)
7513 = assign_stack_temp_for_type
7514 (ext_mode
, GET_MODE_BITSIZE (ext_mode
), 0, type
);
7516 emit_move_insn (new, op0
);
7517 op0
= copy_rtx (new);
7518 PUT_MODE (op0
, BLKmode
);
7519 set_mem_attributes (op0
, exp
, 1);
7525 /* If the result is BLKmode, use that to access the object
7527 if (mode
== BLKmode
)
7530 /* Get a reference to just this component. */
7531 if (modifier
== EXPAND_CONST_ADDRESS
7532 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7533 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7535 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7537 if (op0
== orig_op0
)
7538 op0
= copy_rtx (op0
);
7540 set_mem_attributes (op0
, exp
, 0);
7541 if (REG_P (XEXP (op0
, 0)))
7542 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7544 MEM_VOLATILE_P (op0
) |= volatilep
;
7545 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7546 || modifier
== EXPAND_CONST_ADDRESS
7547 || modifier
== EXPAND_INITIALIZER
)
7549 else if (target
== 0)
7550 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7552 convert_move (target
, op0
, unsignedp
);
7557 return expand_expr (OBJ_TYPE_REF_EXPR (exp
), target
, tmode
, modifier
);
7560 /* Check for a built-in function. */
7561 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7562 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7564 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7566 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7567 == BUILT_IN_FRONTEND
)
7568 return lang_hooks
.expand_expr (exp
, original_target
,
7572 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7575 return expand_call (exp
, target
, ignore
);
7577 case NON_LVALUE_EXPR
:
7580 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7583 if (TREE_CODE (type
) == UNION_TYPE
)
7585 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7587 /* If both input and output are BLKmode, this conversion isn't doing
7588 anything except possibly changing memory attribute. */
7589 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7591 rtx result
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7594 result
= copy_rtx (result
);
7595 set_mem_attributes (result
, exp
, 0);
7601 if (TYPE_MODE (type
) != BLKmode
)
7602 target
= gen_reg_rtx (TYPE_MODE (type
));
7604 target
= assign_temp (type
, 0, 1, 1);
7608 /* Store data into beginning of memory target. */
7609 store_expr (TREE_OPERAND (exp
, 0),
7610 adjust_address (target
, TYPE_MODE (valtype
), 0),
7611 modifier
== EXPAND_STACK_PARM
);
7615 gcc_assert (REG_P (target
));
7617 /* Store this field into a union of the proper type. */
7618 store_field (target
,
7619 MIN ((int_size_in_bytes (TREE_TYPE
7620 (TREE_OPERAND (exp
, 0)))
7622 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7623 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7627 /* Return the entire union. */
7631 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7633 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7636 /* If the signedness of the conversion differs and OP0 is
7637 a promoted SUBREG, clear that indication since we now
7638 have to do the proper extension. */
7639 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7640 && GET_CODE (op0
) == SUBREG
)
7641 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7643 return REDUCE_BIT_FIELD (op0
);
7646 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7647 if (GET_MODE (op0
) == mode
)
7650 /* If OP0 is a constant, just convert it into the proper mode. */
7651 else if (CONSTANT_P (op0
))
7653 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7654 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
7656 if (modifier
== EXPAND_INITIALIZER
)
7657 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
,
7658 subreg_lowpart_offset (mode
,
7661 op0
= convert_modes (mode
, inner_mode
, op0
,
7662 TYPE_UNSIGNED (inner_type
));
7665 else if (modifier
== EXPAND_INITIALIZER
)
7666 op0
= gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7668 else if (target
== 0)
7669 op0
= convert_to_mode (mode
, op0
,
7670 TYPE_UNSIGNED (TREE_TYPE
7671 (TREE_OPERAND (exp
, 0))));
7674 convert_move (target
, op0
,
7675 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7679 return REDUCE_BIT_FIELD (op0
);
7681 case VIEW_CONVERT_EXPR
:
7682 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7684 /* If the input and output modes are both the same, we are done. */
7685 if (TYPE_MODE (type
) == GET_MODE (op0
))
7687 /* If neither mode is BLKmode, and both modes are the same size
7688 then we can use gen_lowpart. */
7689 else if (TYPE_MODE (type
) != BLKmode
&& GET_MODE (op0
) != BLKmode
7690 && GET_MODE_SIZE (TYPE_MODE (type
))
7691 == GET_MODE_SIZE (GET_MODE (op0
)))
7693 if (GET_CODE (op0
) == SUBREG
)
7694 op0
= force_reg (GET_MODE (op0
), op0
);
7695 op0
= gen_lowpart (TYPE_MODE (type
), op0
);
7697 /* If both modes are integral, then we can convert from one to the
7699 else if (SCALAR_INT_MODE_P (GET_MODE (op0
))
7700 && SCALAR_INT_MODE_P (TYPE_MODE (type
)))
7701 op0
= convert_modes (TYPE_MODE (type
), GET_MODE (op0
), op0
,
7702 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7703 /* As a last resort, spill op0 to memory, and reload it in a
7705 else if (!MEM_P (op0
))
7707 /* If the operand is not a MEM, force it into memory. Since we
7708 are going to be be changing the mode of the MEM, don't call
7709 force_const_mem for constants because we don't allow pool
7710 constants to change mode. */
7711 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7713 gcc_assert (!TREE_ADDRESSABLE (exp
));
7715 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
7717 = assign_stack_temp_for_type
7718 (TYPE_MODE (inner_type
),
7719 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
7721 emit_move_insn (target
, op0
);
7725 /* At this point, OP0 is in the correct mode. If the output type is such
7726 that the operand is known to be aligned, indicate that it is.
7727 Otherwise, we need only be concerned about alignment for non-BLKmode
7731 op0
= copy_rtx (op0
);
7733 if (TYPE_ALIGN_OK (type
))
7734 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
7735 else if (TYPE_MODE (type
) != BLKmode
&& STRICT_ALIGNMENT
7736 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
7738 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7739 HOST_WIDE_INT temp_size
7740 = MAX (int_size_in_bytes (inner_type
),
7741 (HOST_WIDE_INT
) GET_MODE_SIZE (TYPE_MODE (type
)));
7742 rtx
new = assign_stack_temp_for_type (TYPE_MODE (type
),
7743 temp_size
, 0, type
);
7744 rtx new_with_op0_mode
= adjust_address (new, GET_MODE (op0
), 0);
7746 gcc_assert (!TREE_ADDRESSABLE (exp
));
7748 if (GET_MODE (op0
) == BLKmode
)
7749 emit_block_move (new_with_op0_mode
, op0
,
7750 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type
))),
7751 (modifier
== EXPAND_STACK_PARM
7752 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7754 emit_move_insn (new_with_op0_mode
, op0
);
7759 op0
= adjust_address (op0
, TYPE_MODE (type
), 0);
7765 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7766 something else, make sure we add the register to the constant and
7767 then to the other thing. This case can occur during strength
7768 reduction and doing it this way will produce better code if the
7769 frame pointer or argument pointer is eliminated.
7771 fold-const.c will ensure that the constant is always in the inner
7772 PLUS_EXPR, so the only case we need to do anything about is if
7773 sp, ap, or fp is our second argument, in which case we must swap
7774 the innermost first argument and our second argument. */
7776 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7777 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7778 && TREE_CODE (TREE_OPERAND (exp
, 1)) == VAR_DECL
7779 && (DECL_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7780 || DECL_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7781 || DECL_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7783 tree t
= TREE_OPERAND (exp
, 1);
7785 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7786 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
7789 /* If the result is to be ptr_mode and we are adding an integer to
7790 something, we might be forming a constant. So try to use
7791 plus_constant. If it produces a sum and we can't accept it,
7792 use force_operand. This allows P = &ARR[const] to generate
7793 efficient code on machines where a SYMBOL_REF is not a valid
7796 If this is an EXPAND_SUM call, always return the sum. */
7797 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7798 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
7800 if (modifier
== EXPAND_STACK_PARM
)
7802 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
7803 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7804 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
7808 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
7810 /* Use immed_double_const to ensure that the constant is
7811 truncated according to the mode of OP1, then sign extended
7812 to a HOST_WIDE_INT. Using the constant directly can result
7813 in non-canonical RTL in a 64x32 cross compile. */
7815 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
7817 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
7818 op1
= plus_constant (op1
, INTVAL (constant_part
));
7819 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7820 op1
= force_operand (op1
, target
);
7821 return REDUCE_BIT_FIELD (op1
);
7824 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7825 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7826 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
7830 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7831 (modifier
== EXPAND_INITIALIZER
7832 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
7833 if (! CONSTANT_P (op0
))
7835 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7836 VOIDmode
, modifier
);
7837 /* Return a PLUS if modifier says it's OK. */
7838 if (modifier
== EXPAND_SUM
7839 || modifier
== EXPAND_INITIALIZER
)
7840 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7843 /* Use immed_double_const to ensure that the constant is
7844 truncated according to the mode of OP1, then sign extended
7845 to a HOST_WIDE_INT. Using the constant directly can result
7846 in non-canonical RTL in a 64x32 cross compile. */
7848 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
7850 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7851 op0
= plus_constant (op0
, INTVAL (constant_part
));
7852 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7853 op0
= force_operand (op0
, target
);
7854 return REDUCE_BIT_FIELD (op0
);
7858 /* No sense saving up arithmetic to be done
7859 if it's all in the wrong mode to form part of an address.
7860 And force_operand won't know whether to sign-extend or
7862 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7863 || mode
!= ptr_mode
)
7865 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7866 subtarget
, &op0
, &op1
, 0);
7867 if (op0
== const0_rtx
)
7869 if (op1
== const0_rtx
)
7874 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7875 subtarget
, &op0
, &op1
, modifier
);
7876 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
7879 /* For initializers, we are allowed to return a MINUS of two
7880 symbolic constants. Here we handle all cases when both operands
7882 /* Handle difference of two symbolic constants,
7883 for the sake of an initializer. */
7884 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7885 && really_constant_p (TREE_OPERAND (exp
, 0))
7886 && really_constant_p (TREE_OPERAND (exp
, 1)))
7888 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7889 NULL_RTX
, &op0
, &op1
, modifier
);
7891 /* If the last operand is a CONST_INT, use plus_constant of
7892 the negated constant. Else make the MINUS. */
7893 if (GET_CODE (op1
) == CONST_INT
)
7894 return REDUCE_BIT_FIELD (plus_constant (op0
, - INTVAL (op1
)));
7896 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode
, op0
, op1
));
7899 /* No sense saving up arithmetic to be done
7900 if it's all in the wrong mode to form part of an address.
7901 And force_operand won't know whether to sign-extend or
7903 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7904 || mode
!= ptr_mode
)
7907 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7908 subtarget
, &op0
, &op1
, modifier
);
7910 /* Convert A - const to A + (-const). */
7911 if (GET_CODE (op1
) == CONST_INT
)
7913 op1
= negate_rtx (mode
, op1
);
7914 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
7920 /* If first operand is constant, swap them.
7921 Thus the following special case checks need only
7922 check the second operand. */
7923 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
7925 tree t1
= TREE_OPERAND (exp
, 0);
7926 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
7927 TREE_OPERAND (exp
, 1) = t1
;
7930 /* Attempt to return something suitable for generating an
7931 indexed address, for machines that support that. */
7933 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
7934 && host_integerp (TREE_OPERAND (exp
, 1), 0))
7936 tree exp1
= TREE_OPERAND (exp
, 1);
7938 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7942 op0
= force_operand (op0
, NULL_RTX
);
7944 op0
= copy_to_mode_reg (mode
, op0
);
7946 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode
, op0
,
7947 gen_int_mode (tree_low_cst (exp1
, 0),
7948 TYPE_MODE (TREE_TYPE (exp1
)))));
7951 if (modifier
== EXPAND_STACK_PARM
)
7954 /* Check for multiplying things that have been extended
7955 from a narrower type. If this machine supports multiplying
7956 in that narrower type with a result in the desired type,
7957 do it that way, and avoid the explicit type-conversion. */
7959 subexp0
= TREE_OPERAND (exp
, 0);
7960 subexp1
= TREE_OPERAND (exp
, 1);
7961 /* First, check if we have a multiplication of one signed and one
7962 unsigned operand. */
7963 if (TREE_CODE (subexp0
) == NOP_EXPR
7964 && TREE_CODE (subexp1
) == NOP_EXPR
7965 && TREE_CODE (type
) == INTEGER_TYPE
7966 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0
, 0)))
7967 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7968 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0
, 0)))
7969 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1
, 0))))
7970 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0
, 0)))
7971 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1
, 0)))))
7973 enum machine_mode innermode
7974 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0
, 0)));
7975 this_optab
= usmul_widen_optab
;
7976 if (mode
== GET_MODE_WIDER_MODE (innermode
))
7978 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
7980 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0
, 0))))
7981 expand_operands (TREE_OPERAND (subexp0
, 0),
7982 TREE_OPERAND (subexp1
, 0),
7983 NULL_RTX
, &op0
, &op1
, 0);
7985 expand_operands (TREE_OPERAND (subexp0
, 0),
7986 TREE_OPERAND (subexp1
, 0),
7987 NULL_RTX
, &op1
, &op0
, 0);
7993 /* Check for a multiplication with matching signedness. */
7994 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
7995 && TREE_CODE (type
) == INTEGER_TYPE
7996 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7997 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7998 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7999 && int_fits_type_p (TREE_OPERAND (exp
, 1),
8000 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8001 /* Don't use a widening multiply if a shift will do. */
8002 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
8003 > HOST_BITS_PER_WIDE_INT
)
8004 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
8006 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8007 && (TYPE_PRECISION (TREE_TYPE
8008 (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
8009 == TYPE_PRECISION (TREE_TYPE
8011 (TREE_OPERAND (exp
, 0), 0))))
8012 /* If both operands are extended, they must either both
8013 be zero-extended or both be sign-extended. */
8014 && (TYPE_UNSIGNED (TREE_TYPE
8015 (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
8016 == TYPE_UNSIGNED (TREE_TYPE
8018 (TREE_OPERAND (exp
, 0), 0)))))))
8020 tree op0type
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0));
8021 enum machine_mode innermode
= TYPE_MODE (op0type
);
8022 bool zextend_p
= TYPE_UNSIGNED (op0type
);
8023 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
8024 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
8026 if (mode
== GET_MODE_2XWIDER_MODE (innermode
))
8028 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
8030 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
8031 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8032 TREE_OPERAND (exp
, 1),
8033 NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8035 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8036 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
8037 NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8040 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
8041 && innermode
== word_mode
)
8044 op0
= expand_normal (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0));
8045 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
8046 op1
= convert_modes (innermode
, mode
,
8047 expand_normal (TREE_OPERAND (exp
, 1)),
8050 op1
= expand_normal (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0));
8051 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8052 unsignedp
, OPTAB_LIB_WIDEN
);
8053 hipart
= gen_highpart (innermode
, temp
);
8054 htem
= expand_mult_highpart_adjust (innermode
, hipart
,
8058 emit_move_insn (hipart
, htem
);
8059 return REDUCE_BIT_FIELD (temp
);
8063 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8064 subtarget
, &op0
, &op1
, 0);
8065 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8067 case TRUNC_DIV_EXPR
:
8068 case FLOOR_DIV_EXPR
:
8070 case ROUND_DIV_EXPR
:
8071 case EXACT_DIV_EXPR
:
8072 if (modifier
== EXPAND_STACK_PARM
)
8074 /* Possible optimization: compute the dividend with EXPAND_SUM
8075 then if the divisor is constant can optimize the case
8076 where some terms of the dividend have coeffs divisible by it. */
8077 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8078 subtarget
, &op0
, &op1
, 0);
8079 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
8084 case TRUNC_MOD_EXPR
:
8085 case FLOOR_MOD_EXPR
:
8087 case ROUND_MOD_EXPR
:
8088 if (modifier
== EXPAND_STACK_PARM
)
8090 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8091 subtarget
, &op0
, &op1
, 0);
8092 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
8094 case FIX_ROUND_EXPR
:
8095 case FIX_FLOOR_EXPR
:
8097 gcc_unreachable (); /* Not used for C. */
8099 case FIX_TRUNC_EXPR
:
8100 op0
= expand_normal (TREE_OPERAND (exp
, 0));
8101 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8102 target
= gen_reg_rtx (mode
);
8103 expand_fix (target
, op0
, unsignedp
);
8107 op0
= expand_normal (TREE_OPERAND (exp
, 0));
8108 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8109 target
= gen_reg_rtx (mode
);
8110 /* expand_float can't figure out what to do if FROM has VOIDmode.
8111 So give it the correct mode. With -O, cse will optimize this. */
8112 if (GET_MODE (op0
) == VOIDmode
)
8113 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8115 expand_float (target
, op0
,
8116 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8120 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8121 if (modifier
== EXPAND_STACK_PARM
)
8123 temp
= expand_unop (mode
,
8124 optab_for_tree_code (NEGATE_EXPR
, type
),
8127 return REDUCE_BIT_FIELD (temp
);
8130 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8131 if (modifier
== EXPAND_STACK_PARM
)
8134 /* ABS_EXPR is not valid for complex arguments. */
8135 gcc_assert (GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
8136 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
);
8138 /* Unsigned abs is simply the operand. Testing here means we don't
8139 risk generating incorrect code below. */
8140 if (TYPE_UNSIGNED (type
))
8143 return expand_abs (mode
, op0
, target
, unsignedp
,
8144 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
8148 target
= original_target
;
8150 || modifier
== EXPAND_STACK_PARM
8151 || (MEM_P (target
) && MEM_VOLATILE_P (target
))
8152 || GET_MODE (target
) != mode
8154 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8155 target
= gen_reg_rtx (mode
);
8156 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8157 target
, &op0
, &op1
, 0);
8159 /* First try to do it with a special MIN or MAX instruction.
8160 If that does not win, use a conditional jump to select the proper
8162 this_optab
= optab_for_tree_code (code
, type
);
8163 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8168 /* At this point, a MEM target is no longer useful; we will get better
8171 if (! REG_P (target
))
8172 target
= gen_reg_rtx (mode
);
8174 /* If op1 was placed in target, swap op0 and op1. */
8175 if (target
!= op0
&& target
== op1
)
8182 /* We generate better code and avoid problems with op1 mentioning
8183 target by forcing op1 into a pseudo if it isn't a constant. */
8184 if (! CONSTANT_P (op1
))
8185 op1
= force_reg (mode
, op1
);
8188 enum rtx_code comparison_code
;
8191 if (code
== MAX_EXPR
)
8192 comparison_code
= unsignedp
? GEU
: GE
;
8194 comparison_code
= unsignedp
? LEU
: LE
;
8196 /* Canonicalize to comparisons against 0. */
8197 if (op1
== const1_rtx
)
8199 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8200 or (a != 0 ? a : 1) for unsigned.
8201 For MIN we are safe converting (a <= 1 ? a : 1)
8202 into (a <= 0 ? a : 1) */
8203 cmpop1
= const0_rtx
;
8204 if (code
== MAX_EXPR
)
8205 comparison_code
= unsignedp
? NE
: GT
;
8207 if (op1
== constm1_rtx
&& !unsignedp
)
8209 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8210 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8211 cmpop1
= const0_rtx
;
8212 if (code
== MIN_EXPR
)
8213 comparison_code
= LT
;
8215 #ifdef HAVE_conditional_move
8216 /* Use a conditional move if possible. */
8217 if (can_conditionally_move_p (mode
))
8221 /* ??? Same problem as in expmed.c: emit_conditional_move
8222 forces a stack adjustment via compare_from_rtx, and we
8223 lose the stack adjustment if the sequence we are about
8224 to create is discarded. */
8225 do_pending_stack_adjust ();
8229 /* Try to emit the conditional move. */
8230 insn
= emit_conditional_move (target
, comparison_code
,
8235 /* If we could do the conditional move, emit the sequence,
8239 rtx seq
= get_insns ();
8245 /* Otherwise discard the sequence and fall back to code with
8251 emit_move_insn (target
, op0
);
8253 temp
= gen_label_rtx ();
8254 do_compare_rtx_and_jump (target
, cmpop1
, comparison_code
,
8255 unsignedp
, mode
, NULL_RTX
, NULL_RTX
, temp
);
8257 emit_move_insn (target
, op1
);
8262 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8263 if (modifier
== EXPAND_STACK_PARM
)
8265 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8269 /* ??? Can optimize bitwise operations with one arg constant.
8270 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8271 and (a bitwise1 b) bitwise2 b (etc)
8272 but that is probably not worth while. */
8274 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8275 boolean values when we want in all cases to compute both of them. In
8276 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8277 as actual zero-or-1 values and then bitwise anding. In cases where
8278 there cannot be any side effects, better code would be made by
8279 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8280 how to recognize those cases. */
8282 case TRUTH_AND_EXPR
:
8283 code
= BIT_AND_EXPR
;
8288 code
= BIT_IOR_EXPR
;
8292 case TRUTH_XOR_EXPR
:
8293 code
= BIT_XOR_EXPR
;
8301 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8303 if (modifier
== EXPAND_STACK_PARM
)
8305 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8306 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
8309 /* Could determine the answer when only additive constants differ. Also,
8310 the addition of one can be handled by changing the condition. */
8317 case UNORDERED_EXPR
:
8325 temp
= do_store_flag (exp
,
8326 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
8327 tmode
!= VOIDmode
? tmode
: mode
, 0);
8331 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8332 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
8334 && REG_P (original_target
)
8335 && (GET_MODE (original_target
)
8336 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
8338 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
8341 /* If temp is constant, we can just compute the result. */
8342 if (GET_CODE (temp
) == CONST_INT
)
8344 if (INTVAL (temp
) != 0)
8345 emit_move_insn (target
, const1_rtx
);
8347 emit_move_insn (target
, const0_rtx
);
8352 if (temp
!= original_target
)
8354 enum machine_mode mode1
= GET_MODE (temp
);
8355 if (mode1
== VOIDmode
)
8356 mode1
= tmode
!= VOIDmode
? tmode
: mode
;
8358 temp
= copy_to_mode_reg (mode1
, temp
);
8361 op1
= gen_label_rtx ();
8362 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
8363 GET_MODE (temp
), unsignedp
, op1
);
8364 emit_move_insn (temp
, const1_rtx
);
8369 /* If no set-flag instruction, must generate a conditional store
8370 into a temporary variable. Drop through and handle this
8375 || modifier
== EXPAND_STACK_PARM
8376 || ! safe_from_p (target
, exp
, 1)
8377 /* Make sure we don't have a hard reg (such as function's return
8378 value) live across basic blocks, if not optimizing. */
8379 || (!optimize
&& REG_P (target
)
8380 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8381 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8384 emit_move_insn (target
, const0_rtx
);
8386 op1
= gen_label_rtx ();
8387 jumpifnot (exp
, op1
);
8390 emit_move_insn (target
, const1_rtx
);
8393 return ignore
? const0_rtx
: target
;
8395 case TRUTH_NOT_EXPR
:
8396 if (modifier
== EXPAND_STACK_PARM
)
8398 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8399 /* The parser is careful to generate TRUTH_NOT_EXPR
8400 only with operands that are always zero or one. */
8401 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8402 target
, 1, OPTAB_LIB_WIDEN
);
8406 case STATEMENT_LIST
:
8408 tree_stmt_iterator iter
;
8410 gcc_assert (ignore
);
8412 for (iter
= tsi_start (exp
); !tsi_end_p (iter
); tsi_next (&iter
))
8413 expand_expr (tsi_stmt (iter
), const0_rtx
, VOIDmode
, modifier
);
8418 /* A COND_EXPR with its type being VOID_TYPE represents a
8419 conditional jump and is handled in
8420 expand_gimple_cond_expr. */
8421 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp
)));
8423 /* Note that COND_EXPRs whose type is a structure or union
8424 are required to be constructed to contain assignments of
8425 a temporary variable, so that we can evaluate them here
8426 for side effect only. If type is void, we must do likewise. */
8428 gcc_assert (!TREE_ADDRESSABLE (type
)
8430 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
8431 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
);
8433 /* If we are not to produce a result, we have no target. Otherwise,
8434 if a target was specified use it; it will not be used as an
8435 intermediate target unless it is safe. If no target, use a
8438 if (modifier
!= EXPAND_STACK_PARM
8440 && safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
8441 && GET_MODE (original_target
) == mode
8442 #ifdef HAVE_conditional_move
8443 && (! can_conditionally_move_p (mode
)
8444 || REG_P (original_target
))
8446 && !MEM_P (original_target
))
8447 temp
= original_target
;
8449 temp
= assign_temp (type
, 0, 0, 1);
8451 do_pending_stack_adjust ();
8453 op0
= gen_label_rtx ();
8454 op1
= gen_label_rtx ();
8455 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8456 store_expr (TREE_OPERAND (exp
, 1), temp
,
8457 modifier
== EXPAND_STACK_PARM
);
8459 emit_jump_insn (gen_jump (op1
));
8462 store_expr (TREE_OPERAND (exp
, 2), temp
,
8463 modifier
== EXPAND_STACK_PARM
);
8470 target
= expand_vec_cond_expr (exp
, target
);
8475 tree lhs
= TREE_OPERAND (exp
, 0);
8476 tree rhs
= TREE_OPERAND (exp
, 1);
8478 gcc_assert (ignore
);
8480 /* Check for |= or &= of a bitfield of size one into another bitfield
8481 of size 1. In this case, (unless we need the result of the
8482 assignment) we can do this more efficiently with a
8483 test followed by an assignment, if necessary.
8485 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8486 things change so we do, this code should be enhanced to
8488 if (TREE_CODE (lhs
) == COMPONENT_REF
8489 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
8490 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
8491 && TREE_OPERAND (rhs
, 0) == lhs
8492 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
8493 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
8494 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
8496 rtx label
= gen_label_rtx ();
8498 do_jump (TREE_OPERAND (rhs
, 1),
8499 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
8500 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
8501 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
8502 (TREE_CODE (rhs
) == BIT_IOR_EXPR
8504 : integer_zero_node
)));
8505 do_pending_stack_adjust ();
8510 expand_assignment (lhs
, rhs
);
8516 if (!TREE_OPERAND (exp
, 0))
8517 expand_null_return ();
8519 expand_return (TREE_OPERAND (exp
, 0));
8523 return expand_expr_addr_expr (exp
, target
, tmode
, modifier
);
8526 /* Get the rtx code of the operands. */
8527 op0
= expand_normal (TREE_OPERAND (exp
, 0));
8528 op1
= expand_normal (TREE_OPERAND (exp
, 1));
8531 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8533 /* Move the real (op0) and imaginary (op1) parts to their location. */
8534 write_complex_part (target
, op0
, false);
8535 write_complex_part (target
, op1
, true);
8540 op0
= expand_normal (TREE_OPERAND (exp
, 0));
8541 return read_complex_part (op0
, false);
8544 op0
= expand_normal (TREE_OPERAND (exp
, 0));
8545 return read_complex_part (op0
, true);
8548 expand_resx_expr (exp
);
8551 case TRY_CATCH_EXPR
:
8553 case EH_FILTER_EXPR
:
8554 case TRY_FINALLY_EXPR
:
8555 /* Lowered by tree-eh.c. */
8558 case WITH_CLEANUP_EXPR
:
8559 case CLEANUP_POINT_EXPR
:
8561 case CASE_LABEL_EXPR
:
8567 case PREINCREMENT_EXPR
:
8568 case PREDECREMENT_EXPR
:
8569 case POSTINCREMENT_EXPR
:
8570 case POSTDECREMENT_EXPR
:
8573 case TRUTH_ANDIF_EXPR
:
8574 case TRUTH_ORIF_EXPR
:
8575 /* Lowered by gimplify.c. */
8579 return get_exception_pointer (cfun
);
8582 return get_exception_filter (cfun
);
8585 /* Function descriptors are not valid except for as
8586 initialization constants, and should not be expanded. */
8594 expand_label (TREE_OPERAND (exp
, 0));
8598 expand_asm_expr (exp
);
8601 case WITH_SIZE_EXPR
:
8602 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8603 have pulled out the size to use in whatever context it needed. */
8604 return expand_expr_real (TREE_OPERAND (exp
, 0), original_target
, tmode
,
8607 case REALIGN_LOAD_EXPR
:
8609 tree oprnd0
= TREE_OPERAND (exp
, 0);
8610 tree oprnd1
= TREE_OPERAND (exp
, 1);
8611 tree oprnd2
= TREE_OPERAND (exp
, 2);
8614 this_optab
= optab_for_tree_code (code
, type
);
8615 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8616 op2
= expand_normal (oprnd2
);
8617 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
8625 tree oprnd0
= TREE_OPERAND (exp
, 0);
8626 tree oprnd1
= TREE_OPERAND (exp
, 1);
8627 tree oprnd2
= TREE_OPERAND (exp
, 2);
8630 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8631 op2
= expand_normal (oprnd2
);
8632 target
= expand_widen_pattern_expr (exp
, op0
, op1
, op2
,
8637 case WIDEN_SUM_EXPR
:
8639 tree oprnd0
= TREE_OPERAND (exp
, 0);
8640 tree oprnd1
= TREE_OPERAND (exp
, 1);
8642 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, 0);
8643 target
= expand_widen_pattern_expr (exp
, op0
, NULL_RTX
, op1
,
8648 case REDUC_MAX_EXPR
:
8649 case REDUC_MIN_EXPR
:
8650 case REDUC_PLUS_EXPR
:
8652 op0
= expand_normal (TREE_OPERAND (exp
, 0));
8653 this_optab
= optab_for_tree_code (code
, type
);
8654 temp
= expand_unop (mode
, this_optab
, op0
, target
, unsignedp
);
8659 case VEC_LSHIFT_EXPR
:
8660 case VEC_RSHIFT_EXPR
:
8662 target
= expand_vec_shift_expr (exp
, target
);
8667 return lang_hooks
.expand_expr (exp
, original_target
, tmode
,
8671 /* Here to do an ordinary binary operator. */
8673 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8674 subtarget
, &op0
, &op1
, 0);
8676 this_optab
= optab_for_tree_code (code
, type
);
8678 if (modifier
== EXPAND_STACK_PARM
)
8680 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
8681 unsignedp
, OPTAB_LIB_WIDEN
);
8683 return REDUCE_BIT_FIELD (temp
);
8685 #undef REDUCE_BIT_FIELD
8687 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8688 signedness of TYPE), possibly returning the result in TARGET. */
8690 reduce_to_bit_field_precision (rtx exp
, rtx target
, tree type
)
8692 HOST_WIDE_INT prec
= TYPE_PRECISION (type
);
8693 if (target
&& GET_MODE (target
) != GET_MODE (exp
))
8695 if (TYPE_UNSIGNED (type
))
8698 if (prec
< HOST_BITS_PER_WIDE_INT
)
8699 mask
= immed_double_const (((unsigned HOST_WIDE_INT
) 1 << prec
) - 1, 0,
8702 mask
= immed_double_const ((unsigned HOST_WIDE_INT
) -1,
8703 ((unsigned HOST_WIDE_INT
) 1
8704 << (prec
- HOST_BITS_PER_WIDE_INT
)) - 1,
8706 return expand_and (GET_MODE (exp
), exp
, mask
, target
);
8710 tree count
= build_int_cst (NULL_TREE
,
8711 GET_MODE_BITSIZE (GET_MODE (exp
)) - prec
);
8712 exp
= expand_shift (LSHIFT_EXPR
, GET_MODE (exp
), exp
, count
, target
, 0);
8713 return expand_shift (RSHIFT_EXPR
, GET_MODE (exp
), exp
, count
, target
, 0);
8717 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8718 when applied to the address of EXP produces an address known to be
8719 aligned more than BIGGEST_ALIGNMENT. */
8722 is_aligning_offset (tree offset
, tree exp
)
8724 /* Strip off any conversions. */
8725 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
8726 || TREE_CODE (offset
) == NOP_EXPR
8727 || TREE_CODE (offset
) == CONVERT_EXPR
)
8728 offset
= TREE_OPERAND (offset
, 0);
8730 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8731 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8732 if (TREE_CODE (offset
) != BIT_AND_EXPR
8733 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
8734 || compare_tree_int (TREE_OPERAND (offset
, 1),
8735 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
8736 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
8739 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8740 It must be NEGATE_EXPR. Then strip any more conversions. */
8741 offset
= TREE_OPERAND (offset
, 0);
8742 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
8743 || TREE_CODE (offset
) == NOP_EXPR
8744 || TREE_CODE (offset
) == CONVERT_EXPR
)
8745 offset
= TREE_OPERAND (offset
, 0);
8747 if (TREE_CODE (offset
) != NEGATE_EXPR
)
8750 offset
= TREE_OPERAND (offset
, 0);
8751 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
8752 || TREE_CODE (offset
) == NOP_EXPR
8753 || TREE_CODE (offset
) == CONVERT_EXPR
)
8754 offset
= TREE_OPERAND (offset
, 0);
8756 /* This must now be the address of EXP. */
8757 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
8760 /* Return the tree node if an ARG corresponds to a string constant or zero
8761 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8762 in bytes within the string that ARG is accessing. The type of the
8763 offset will be `sizetype'. */
8766 string_constant (tree arg
, tree
*ptr_offset
)
8771 if (TREE_CODE (arg
) == ADDR_EXPR
)
8773 if (TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
8775 *ptr_offset
= size_zero_node
;
8776 return TREE_OPERAND (arg
, 0);
8778 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == VAR_DECL
)
8780 array
= TREE_OPERAND (arg
, 0);
8781 offset
= size_zero_node
;
8783 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
8785 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
8786 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
8787 if (TREE_CODE (array
) != STRING_CST
8788 && TREE_CODE (array
) != VAR_DECL
)
8794 else if (TREE_CODE (arg
) == PLUS_EXPR
)
8796 tree arg0
= TREE_OPERAND (arg
, 0);
8797 tree arg1
= TREE_OPERAND (arg
, 1);
8802 if (TREE_CODE (arg0
) == ADDR_EXPR
8803 && (TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
8804 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == VAR_DECL
))
8806 array
= TREE_OPERAND (arg0
, 0);
8809 else if (TREE_CODE (arg1
) == ADDR_EXPR
8810 && (TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
8811 || TREE_CODE (TREE_OPERAND (arg1
, 0)) == VAR_DECL
))
8813 array
= TREE_OPERAND (arg1
, 0);
8822 if (TREE_CODE (array
) == STRING_CST
)
8824 *ptr_offset
= convert (sizetype
, offset
);
8827 else if (TREE_CODE (array
) == VAR_DECL
)
8831 /* Variables initialized to string literals can be handled too. */
8832 if (DECL_INITIAL (array
) == NULL_TREE
8833 || TREE_CODE (DECL_INITIAL (array
)) != STRING_CST
)
8836 /* If they are read-only, non-volatile and bind locally. */
8837 if (! TREE_READONLY (array
)
8838 || TREE_SIDE_EFFECTS (array
)
8839 || ! targetm
.binds_local_p (array
))
8842 /* Avoid const char foo[4] = "abcde"; */
8843 if (DECL_SIZE_UNIT (array
) == NULL_TREE
8844 || TREE_CODE (DECL_SIZE_UNIT (array
)) != INTEGER_CST
8845 || (length
= TREE_STRING_LENGTH (DECL_INITIAL (array
))) <= 0
8846 || compare_tree_int (DECL_SIZE_UNIT (array
), length
) < 0)
8849 /* If variable is bigger than the string literal, OFFSET must be constant
8850 and inside of the bounds of the string literal. */
8851 offset
= convert (sizetype
, offset
);
8852 if (compare_tree_int (DECL_SIZE_UNIT (array
), length
) > 0
8853 && (! host_integerp (offset
, 1)
8854 || compare_tree_int (offset
, length
) >= 0))
8857 *ptr_offset
= offset
;
8858 return DECL_INITIAL (array
);
8864 /* Generate code to calculate EXP using a store-flag instruction
8865 and return an rtx for the result. EXP is either a comparison
8866 or a TRUTH_NOT_EXPR whose operand is a comparison.
8868 If TARGET is nonzero, store the result there if convenient.
8870 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8873 Return zero if there is no suitable set-flag instruction
8874 available on this machine.
8876 Once expand_expr has been called on the arguments of the comparison,
8877 we are committed to doing the store flag, since it is not safe to
8878 re-evaluate the expression. We emit the store-flag insn by calling
8879 emit_store_flag, but only expand the arguments if we have a reason
8880 to believe that emit_store_flag will be successful. If we think that
8881 it will, but it isn't, we have to simulate the store-flag with a
8882 set/jump/set sequence. */
8885 do_store_flag (tree exp
, rtx target
, enum machine_mode mode
, int only_cheap
)
8888 tree arg0
, arg1
, type
;
8890 enum machine_mode operand_mode
;
8894 enum insn_code icode
;
8895 rtx subtarget
= target
;
8898 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8899 result at the end. We can't simply invert the test since it would
8900 have already been inverted if it were valid. This case occurs for
8901 some floating-point comparisons. */
8903 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
8904 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
8906 arg0
= TREE_OPERAND (exp
, 0);
8907 arg1
= TREE_OPERAND (exp
, 1);
8909 /* Don't crash if the comparison was erroneous. */
8910 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
8913 type
= TREE_TYPE (arg0
);
8914 operand_mode
= TYPE_MODE (type
);
8915 unsignedp
= TYPE_UNSIGNED (type
);
8917 /* We won't bother with BLKmode store-flag operations because it would mean
8918 passing a lot of information to emit_store_flag. */
8919 if (operand_mode
== BLKmode
)
8922 /* We won't bother with store-flag operations involving function pointers
8923 when function pointers must be canonicalized before comparisons. */
8924 #ifdef HAVE_canonicalize_funcptr_for_compare
8925 if (HAVE_canonicalize_funcptr_for_compare
8926 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
8927 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
8929 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
8930 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
8931 == FUNCTION_TYPE
))))
8938 /* Get the rtx comparison code to use. We know that EXP is a comparison
8939 operation of some type. Some comparisons against 1 and -1 can be
8940 converted to comparisons with zero. Do so here so that the tests
8941 below will be aware that we have a comparison with zero. These
8942 tests will not catch constants in the first operand, but constants
8943 are rarely passed as the first operand. */
8945 switch (TREE_CODE (exp
))
8954 if (integer_onep (arg1
))
8955 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
8957 code
= unsignedp
? LTU
: LT
;
8960 if (! unsignedp
&& integer_all_onesp (arg1
))
8961 arg1
= integer_zero_node
, code
= LT
;
8963 code
= unsignedp
? LEU
: LE
;
8966 if (! unsignedp
&& integer_all_onesp (arg1
))
8967 arg1
= integer_zero_node
, code
= GE
;
8969 code
= unsignedp
? GTU
: GT
;
8972 if (integer_onep (arg1
))
8973 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
8975 code
= unsignedp
? GEU
: GE
;
8978 case UNORDERED_EXPR
:
9007 /* Put a constant second. */
9008 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
9010 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
9011 code
= swap_condition (code
);
9014 /* If this is an equality or inequality test of a single bit, we can
9015 do this by shifting the bit being tested to the low-order bit and
9016 masking the result with the constant 1. If the condition was EQ,
9017 we xor it with 1. This does not require an scc insn and is faster
9018 than an scc insn even if we have it.
9020 The code to make this transformation was moved into fold_single_bit_test,
9021 so we just call into the folder and expand its result. */
9023 if ((code
== NE
|| code
== EQ
)
9024 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
9025 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
9027 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
9028 return expand_expr (fold_single_bit_test (code
== NE
? NE_EXPR
: EQ_EXPR
,
9030 target
, VOIDmode
, EXPAND_NORMAL
);
9033 /* Now see if we are likely to be able to do this. Return if not. */
9034 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
9037 icode
= setcc_gen_code
[(int) code
];
9038 if (icode
== CODE_FOR_nothing
9039 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
9041 /* We can only do this if it is one of the special cases that
9042 can be handled without an scc insn. */
9043 if ((code
== LT
&& integer_zerop (arg1
))
9044 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
9046 else if (! only_cheap
&& (code
== NE
|| code
== EQ
)
9047 && TREE_CODE (type
) != REAL_TYPE
9048 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
9049 != CODE_FOR_nothing
)
9050 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
9051 != CODE_FOR_nothing
)))
9057 if (! get_subtarget (target
)
9058 || GET_MODE (subtarget
) != operand_mode
)
9061 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, 0);
9064 target
= gen_reg_rtx (mode
);
9066 result
= emit_store_flag (target
, code
, op0
, op1
,
9067 operand_mode
, unsignedp
, 1);
9072 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
9073 result
, 0, OPTAB_LIB_WIDEN
);
9077 /* If this failed, we have to do this with set/compare/jump/set code. */
9079 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
9080 target
= gen_reg_rtx (GET_MODE (target
));
9082 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
9083 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
9084 operand_mode
, NULL_RTX
);
9085 if (GET_CODE (result
) == CONST_INT
)
9086 return (((result
== const0_rtx
&& ! invert
)
9087 || (result
!= const0_rtx
&& invert
))
9088 ? const0_rtx
: const1_rtx
);
9090 /* The code of RESULT may not match CODE if compare_from_rtx
9091 decided to swap its operands and reverse the original code.
9093 We know that compare_from_rtx returns either a CONST_INT or
9094 a new comparison code, so it is safe to just extract the
9095 code from RESULT. */
9096 code
= GET_CODE (result
);
9098 label
= gen_label_rtx ();
9099 gcc_assert (bcc_gen_fctn
[(int) code
]);
9101 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
9102 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
9109 /* Stubs in case we haven't got a casesi insn. */
9111 # define HAVE_casesi 0
9112 # define gen_casesi(a, b, c, d, e) (0)
9113 # define CODE_FOR_casesi CODE_FOR_nothing
9116 /* If the machine does not have a case insn that compares the bounds,
9117 this means extra overhead for dispatch tables, which raises the
9118 threshold for using them. */
9119 #ifndef CASE_VALUES_THRESHOLD
9120 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9121 #endif /* CASE_VALUES_THRESHOLD */
9124 case_values_threshold (void)
9126 return CASE_VALUES_THRESHOLD
;
9129 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9130 0 otherwise (i.e. if there is no casesi instruction). */
9132 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
9133 rtx table_label ATTRIBUTE_UNUSED
, rtx default_label
)
9135 enum machine_mode index_mode
= SImode
;
9136 int index_bits
= GET_MODE_BITSIZE (index_mode
);
9137 rtx op1
, op2
, index
;
9138 enum machine_mode op_mode
;
9143 /* Convert the index to SImode. */
9144 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
9146 enum machine_mode omode
= TYPE_MODE (index_type
);
9147 rtx rangertx
= expand_normal (range
);
9149 /* We must handle the endpoints in the original mode. */
9150 index_expr
= build2 (MINUS_EXPR
, index_type
,
9151 index_expr
, minval
);
9152 minval
= integer_zero_node
;
9153 index
= expand_normal (index_expr
);
9154 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
9155 omode
, 1, default_label
);
9156 /* Now we can safely truncate. */
9157 index
= convert_to_mode (index_mode
, index
, 0);
9161 if (TYPE_MODE (index_type
) != index_mode
)
9163 index_expr
= convert (lang_hooks
.types
.type_for_size
9164 (index_bits
, 0), index_expr
);
9165 index_type
= TREE_TYPE (index_expr
);
9168 index
= expand_normal (index_expr
);
9171 do_pending_stack_adjust ();
9173 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
9174 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
9176 index
= copy_to_mode_reg (op_mode
, index
);
9178 op1
= expand_normal (minval
);
9180 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
9181 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
9182 op1
, TYPE_UNSIGNED (TREE_TYPE (minval
)));
9183 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
9185 op1
= copy_to_mode_reg (op_mode
, op1
);
9187 op2
= expand_normal (range
);
9189 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
9190 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
9191 op2
, TYPE_UNSIGNED (TREE_TYPE (range
)));
9192 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
9194 op2
= copy_to_mode_reg (op_mode
, op2
);
9196 emit_jump_insn (gen_casesi (index
, op1
, op2
,
9197 table_label
, default_label
));
9201 /* Attempt to generate a tablejump instruction; same concept. */
9202 #ifndef HAVE_tablejump
9203 #define HAVE_tablejump 0
9204 #define gen_tablejump(x, y) (0)
9207 /* Subroutine of the next function.
9209 INDEX is the value being switched on, with the lowest value
9210 in the table already subtracted.
9211 MODE is its expected mode (needed if INDEX is constant).
9212 RANGE is the length of the jump table.
9213 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9215 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9216 index value is out of range. */
9219 do_tablejump (rtx index
, enum machine_mode mode
, rtx range
, rtx table_label
,
9224 if (INTVAL (range
) > cfun
->max_jumptable_ents
)
9225 cfun
->max_jumptable_ents
= INTVAL (range
);
9227 /* Do an unsigned comparison (in the proper mode) between the index
9228 expression and the value which represents the length of the range.
9229 Since we just finished subtracting the lower bound of the range
9230 from the index expression, this comparison allows us to simultaneously
9231 check that the original index expression value is both greater than
9232 or equal to the minimum value of the range and less than or equal to
9233 the maximum value of the range. */
9235 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
9238 /* If index is in range, it must fit in Pmode.
9239 Convert to Pmode so we can index with it. */
9241 index
= convert_to_mode (Pmode
, index
, 1);
9243 /* Don't let a MEM slip through, because then INDEX that comes
9244 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9245 and break_out_memory_refs will go to work on it and mess it up. */
9246 #ifdef PIC_CASE_VECTOR_ADDRESS
9247 if (flag_pic
&& !REG_P (index
))
9248 index
= copy_to_mode_reg (Pmode
, index
);
9251 /* If flag_force_addr were to affect this address
9252 it could interfere with the tricky assumptions made
9253 about addresses that contain label-refs,
9254 which may be valid only very near the tablejump itself. */
9255 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9256 GET_MODE_SIZE, because this indicates how large insns are. The other
9257 uses should all be Pmode, because they are addresses. This code
9258 could fail if addresses and insns are not the same size. */
9259 index
= gen_rtx_PLUS (Pmode
,
9260 gen_rtx_MULT (Pmode
, index
,
9261 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
9262 gen_rtx_LABEL_REF (Pmode
, table_label
));
9263 #ifdef PIC_CASE_VECTOR_ADDRESS
9265 index
= PIC_CASE_VECTOR_ADDRESS (index
);
9268 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
9269 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
9270 vector
= gen_const_mem (CASE_VECTOR_MODE
, index
);
9271 convert_move (temp
, vector
, 0);
9273 emit_jump_insn (gen_tablejump (temp
, table_label
));
9275 /* If we are generating PIC code or if the table is PC-relative, the
9276 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9277 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
9282 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
9283 rtx table_label
, rtx default_label
)
9287 if (! HAVE_tablejump
)
9290 index_expr
= fold_build2 (MINUS_EXPR
, index_type
,
9291 convert (index_type
, index_expr
),
9292 convert (index_type
, minval
));
9293 index
= expand_normal (index_expr
);
9294 do_pending_stack_adjust ();
9296 do_tablejump (index
, TYPE_MODE (index_type
),
9297 convert_modes (TYPE_MODE (index_type
),
9298 TYPE_MODE (TREE_TYPE (range
)),
9299 expand_normal (range
),
9300 TYPE_UNSIGNED (TREE_TYPE (range
))),
9301 table_label
, default_label
);
9305 /* Nonzero if the mode is a valid vector mode for this architecture.
9306 This returns nonzero even if there is no hardware support for the
9307 vector mode, but we can emulate with narrower modes. */
9310 vector_mode_valid_p (enum machine_mode mode
)
9312 enum mode_class
class = GET_MODE_CLASS (mode
);
9313 enum machine_mode innermode
;
9315 /* Doh! What's going on? */
9316 if (class != MODE_VECTOR_INT
9317 && class != MODE_VECTOR_FLOAT
)
9320 /* Hardware support. Woo hoo! */
9321 if (targetm
.vector_mode_supported_p (mode
))
9324 innermode
= GET_MODE_INNER (mode
);
9326 /* We should probably return 1 if requesting V4DI and we have no DI,
9327 but we have V2DI, but this is probably very unlikely. */
9329 /* If we have support for the inner mode, we can safely emulate it.
9330 We may not have V2DI, but me can emulate with a pair of DIs. */
9331 return targetm
.scalar_mode_supported_p (innermode
);
9334 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9336 const_vector_from_tree (tree exp
)
9341 enum machine_mode inner
, mode
;
9343 mode
= TYPE_MODE (TREE_TYPE (exp
));
9345 if (initializer_zerop (exp
))
9346 return CONST0_RTX (mode
);
9348 units
= GET_MODE_NUNITS (mode
);
9349 inner
= GET_MODE_INNER (mode
);
9351 v
= rtvec_alloc (units
);
9353 link
= TREE_VECTOR_CST_ELTS (exp
);
9354 for (i
= 0; link
; link
= TREE_CHAIN (link
), ++i
)
9356 elt
= TREE_VALUE (link
);
9358 if (TREE_CODE (elt
) == REAL_CST
)
9359 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
9362 RTVEC_ELT (v
, i
) = immed_double_const (TREE_INT_CST_LOW (elt
),
9363 TREE_INT_CST_HIGH (elt
),
9367 /* Initialize remaining elements to 0. */
9368 for (; i
< units
; ++i
)
9369 RTVEC_ELT (v
, i
) = CONST0_RTX (inner
);
9371 return gen_rtx_CONST_VECTOR (mode
, v
);
9373 #include "gt-expr.h"