1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
76 #define STACK_PUSH_CODE PRE_INC
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
89 /* This structure is used by move_by_pieces to describe the move to
100 int explicit_inc_from
;
101 unsigned HOST_WIDE_INT len
;
102 HOST_WIDE_INT offset
;
106 /* This structure is used by store_by_pieces to describe the clear to
109 struct store_by_pieces
115 unsigned HOST_WIDE_INT len
;
116 HOST_WIDE_INT offset
;
117 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
);
122 static unsigned HOST_WIDE_INT
move_by_pieces_ninsns (unsigned HOST_WIDE_INT
,
125 static void move_by_pieces_1 (rtx (*) (rtx
, ...), enum machine_mode
,
126 struct move_by_pieces
*);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx
, rtx
, rtx
, unsigned);
129 static rtx
emit_block_move_via_libcall (rtx
, rtx
, rtx
, bool);
130 static tree
emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
132 static rtx
clear_by_pieces_1 (void *, HOST_WIDE_INT
, enum machine_mode
);
133 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces
*, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx
, ...), enum machine_mode
,
136 struct store_by_pieces
*);
137 static rtx
clear_storage_via_libcall (rtx
, rtx
, bool);
138 static tree
clear_storage_libcall_fn (int);
139 static rtx
compress_float_constant (rtx
, rtx
);
140 static rtx
get_subtarget (rtx
);
141 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
142 HOST_WIDE_INT
, enum machine_mode
,
143 tree
, tree
, int, int);
144 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
);
145 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
, enum machine_mode
,
148 static unsigned HOST_WIDE_INT
highest_pow2_factor (tree
);
149 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (tree
, tree
);
151 static int is_aligning_offset (tree
, tree
);
152 static void expand_operands (tree
, tree
, rtx
, rtx
*, rtx
*,
153 enum expand_modifier
);
154 static rtx
reduce_to_bit_field_precision (rtx
, rtx
, tree
);
155 static rtx
do_store_flag (tree
, rtx
, enum machine_mode
, int);
157 static void emit_single_push_insn (enum machine_mode
, rtx
, tree
);
159 static void do_tablejump (rtx
, enum machine_mode
, rtx
, rtx
, rtx
);
160 static rtx
const_vector_from_tree (tree
);
161 static void write_complex_part (rtx
, rtx
, bool);
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load
[NUM_MACHINE_MODES
];
168 static char direct_store
[NUM_MACHINE_MODES
];
170 /* Record for each mode whether we can float-extend from memory. */
172 static bool float_extend_from_mem
[NUM_MACHINE_MODES
][NUM_MACHINE_MODES
];
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO)
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO)
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193 #ifndef STORE_BY_PIECES_P
194 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) MOVE_RATIO)
199 /* This array records the insn_code of insns to perform block moves. */
200 enum insn_code movmem_optab
[NUM_MACHINE_MODES
];
202 /* This array records the insn_code of insns to perform block sets. */
203 enum insn_code setmem_optab
[NUM_MACHINE_MODES
];
205 /* These arrays record the insn_code of two different kinds of insns
206 to perform block compares. */
207 enum insn_code cmpstr_optab
[NUM_MACHINE_MODES
];
208 enum insn_code cmpmem_optab
[NUM_MACHINE_MODES
];
210 /* Synchronization primitives. */
211 enum insn_code sync_add_optab
[NUM_MACHINE_MODES
];
212 enum insn_code sync_sub_optab
[NUM_MACHINE_MODES
];
213 enum insn_code sync_ior_optab
[NUM_MACHINE_MODES
];
214 enum insn_code sync_and_optab
[NUM_MACHINE_MODES
];
215 enum insn_code sync_xor_optab
[NUM_MACHINE_MODES
];
216 enum insn_code sync_nand_optab
[NUM_MACHINE_MODES
];
217 enum insn_code sync_old_add_optab
[NUM_MACHINE_MODES
];
218 enum insn_code sync_old_sub_optab
[NUM_MACHINE_MODES
];
219 enum insn_code sync_old_ior_optab
[NUM_MACHINE_MODES
];
220 enum insn_code sync_old_and_optab
[NUM_MACHINE_MODES
];
221 enum insn_code sync_old_xor_optab
[NUM_MACHINE_MODES
];
222 enum insn_code sync_old_nand_optab
[NUM_MACHINE_MODES
];
223 enum insn_code sync_new_add_optab
[NUM_MACHINE_MODES
];
224 enum insn_code sync_new_sub_optab
[NUM_MACHINE_MODES
];
225 enum insn_code sync_new_ior_optab
[NUM_MACHINE_MODES
];
226 enum insn_code sync_new_and_optab
[NUM_MACHINE_MODES
];
227 enum insn_code sync_new_xor_optab
[NUM_MACHINE_MODES
];
228 enum insn_code sync_new_nand_optab
[NUM_MACHINE_MODES
];
229 enum insn_code sync_compare_and_swap
[NUM_MACHINE_MODES
];
230 enum insn_code sync_compare_and_swap_cc
[NUM_MACHINE_MODES
];
231 enum insn_code sync_lock_test_and_set
[NUM_MACHINE_MODES
];
232 enum insn_code sync_lock_release
[NUM_MACHINE_MODES
];
234 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
236 #ifndef SLOW_UNALIGNED_ACCESS
237 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
240 /* This is run once per compilation to set up which modes can be used
241 directly in memory and to initialize the block move optab. */
244 init_expr_once (void)
247 enum machine_mode mode
;
252 /* Try indexing by frame ptr and try by stack ptr.
253 It is known that on the Convex the stack ptr isn't a valid index.
254 With luck, one or the other is valid on any machine. */
255 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
256 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
258 /* A scratch register we can modify in-place below to avoid
259 useless RTL allocations. */
260 reg
= gen_rtx_REG (VOIDmode
, -1);
262 insn
= rtx_alloc (INSN
);
263 pat
= gen_rtx_SET (0, NULL_RTX
, NULL_RTX
);
264 PATTERN (insn
) = pat
;
266 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
267 mode
= (enum machine_mode
) ((int) mode
+ 1))
271 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
272 PUT_MODE (mem
, mode
);
273 PUT_MODE (mem1
, mode
);
274 PUT_MODE (reg
, mode
);
276 /* See if there is some register that can be used in this mode and
277 directly loaded or stored from memory. */
279 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
280 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
281 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
284 if (! HARD_REGNO_MODE_OK (regno
, mode
))
290 SET_DEST (pat
) = reg
;
291 if (recog (pat
, insn
, &num_clobbers
) >= 0)
292 direct_load
[(int) mode
] = 1;
294 SET_SRC (pat
) = mem1
;
295 SET_DEST (pat
) = reg
;
296 if (recog (pat
, insn
, &num_clobbers
) >= 0)
297 direct_load
[(int) mode
] = 1;
300 SET_DEST (pat
) = mem
;
301 if (recog (pat
, insn
, &num_clobbers
) >= 0)
302 direct_store
[(int) mode
] = 1;
305 SET_DEST (pat
) = mem1
;
306 if (recog (pat
, insn
, &num_clobbers
) >= 0)
307 direct_store
[(int) mode
] = 1;
311 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
313 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
314 mode
= GET_MODE_WIDER_MODE (mode
))
316 enum machine_mode srcmode
;
317 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
318 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
322 ic
= can_extend_p (mode
, srcmode
, 0);
323 if (ic
== CODE_FOR_nothing
)
326 PUT_MODE (mem
, srcmode
);
328 if ((*insn_data
[ic
].operand
[1].predicate
) (mem
, srcmode
))
329 float_extend_from_mem
[mode
][srcmode
] = true;
334 /* This is run at the start of compiling a function. */
339 cfun
->expr
= ggc_alloc_cleared (sizeof (struct expr_status
));
342 /* Copy data from FROM to TO, where the machine modes are not the same.
343 Both modes may be integer, or both may be floating.
344 UNSIGNEDP should be nonzero if FROM is an unsigned type.
345 This causes zero-extension instead of sign-extension. */
348 convert_move (rtx to
, rtx from
, int unsignedp
)
350 enum machine_mode to_mode
= GET_MODE (to
);
351 enum machine_mode from_mode
= GET_MODE (from
);
352 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
353 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
357 /* rtx code for making an equivalent value. */
358 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
359 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
362 gcc_assert (to_real
== from_real
);
364 /* If the source and destination are already the same, then there's
369 /* If FROM is a SUBREG that indicates that we have already done at least
370 the required extension, strip it. We don't handle such SUBREGs as
373 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
374 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
375 >= GET_MODE_SIZE (to_mode
))
376 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
377 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
379 gcc_assert (GET_CODE (to
) != SUBREG
|| !SUBREG_PROMOTED_VAR_P (to
));
381 if (to_mode
== from_mode
382 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
384 emit_move_insn (to
, from
);
388 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
390 gcc_assert (GET_MODE_BITSIZE (from_mode
) == GET_MODE_BITSIZE (to_mode
));
392 if (VECTOR_MODE_P (to_mode
))
393 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
395 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
397 emit_move_insn (to
, from
);
401 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
403 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
404 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
413 gcc_assert (GET_MODE_PRECISION (from_mode
)
414 != GET_MODE_PRECISION (to_mode
));
416 if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
421 /* Try converting directly if the insn is supported. */
423 code
= tab
->handlers
[to_mode
][from_mode
].insn_code
;
424 if (code
!= CODE_FOR_nothing
)
426 emit_unop_insn (code
, to
, from
,
427 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
431 /* Otherwise use a libcall. */
432 libcall
= tab
->handlers
[to_mode
][from_mode
].libfunc
;
434 /* Is this conversion implemented yet? */
435 gcc_assert (libcall
);
438 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
440 insns
= get_insns ();
442 emit_libcall_block (insns
, to
, value
,
443 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
445 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
449 /* Handle pointer conversion. */ /* SPEE 900220. */
450 /* Targets are expected to provide conversion insns between PxImode and
451 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
452 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
454 enum machine_mode full_mode
455 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode
), MODE_INT
);
457 gcc_assert (trunc_optab
->handlers
[to_mode
][full_mode
].insn_code
458 != CODE_FOR_nothing
);
460 if (full_mode
!= from_mode
)
461 from
= convert_to_mode (full_mode
, from
, unsignedp
);
462 emit_unop_insn (trunc_optab
->handlers
[to_mode
][full_mode
].insn_code
,
466 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
469 enum machine_mode full_mode
470 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode
), MODE_INT
);
472 gcc_assert (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
473 != CODE_FOR_nothing
);
475 if (to_mode
== full_mode
)
477 emit_unop_insn (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
,
482 new_from
= gen_reg_rtx (full_mode
);
483 emit_unop_insn (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
,
484 new_from
, from
, UNKNOWN
);
486 /* else proceed to integer conversions below. */
487 from_mode
= full_mode
;
491 /* Now both modes are integers. */
493 /* Handle expanding beyond a word. */
494 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
495 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
502 enum machine_mode lowpart_mode
;
503 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
505 /* Try converting directly if the insn is supported. */
506 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
509 /* If FROM is a SUBREG, put it into a register. Do this
510 so that we always generate the same set of insns for
511 better cse'ing; if an intermediate assignment occurred,
512 we won't be doing the operation directly on the SUBREG. */
513 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
514 from
= force_reg (from_mode
, from
);
515 emit_unop_insn (code
, to
, from
, equiv_code
);
518 /* Next, try converting via full word. */
519 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
520 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
521 != CODE_FOR_nothing
))
525 if (reg_overlap_mentioned_p (to
, from
))
526 from
= force_reg (from_mode
, from
);
527 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
529 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
530 emit_unop_insn (code
, to
,
531 gen_lowpart (word_mode
, to
), equiv_code
);
535 /* No special multiword conversion insn; do it by hand. */
538 /* Since we will turn this into a no conflict block, we must ensure
539 that the source does not overlap the target. */
541 if (reg_overlap_mentioned_p (to
, from
))
542 from
= force_reg (from_mode
, from
);
544 /* Get a copy of FROM widened to a word, if necessary. */
545 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
546 lowpart_mode
= word_mode
;
548 lowpart_mode
= from_mode
;
550 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
552 lowpart
= gen_lowpart (lowpart_mode
, to
);
553 emit_move_insn (lowpart
, lowfrom
);
555 /* Compute the value to put in each remaining word. */
557 fill_value
= const0_rtx
;
562 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
563 && STORE_FLAG_VALUE
== -1)
565 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
567 fill_value
= gen_reg_rtx (word_mode
);
568 emit_insn (gen_slt (fill_value
));
574 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
575 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
577 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
581 /* Fill the remaining words. */
582 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
584 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
585 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
587 gcc_assert (subword
);
589 if (fill_value
!= subword
)
590 emit_move_insn (subword
, fill_value
);
593 insns
= get_insns ();
596 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
597 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
601 /* Truncating multi-word to a word or less. */
602 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
603 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
606 && ! MEM_VOLATILE_P (from
)
607 && direct_load
[(int) to_mode
]
608 && ! mode_dependent_address_p (XEXP (from
, 0)))
610 || GET_CODE (from
) == SUBREG
))
611 from
= force_reg (from_mode
, from
);
612 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
616 /* Now follow all the conversions between integers
617 no more than a word long. */
619 /* For truncation, usually we can just refer to FROM in a narrower mode. */
620 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
621 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
622 GET_MODE_BITSIZE (from_mode
)))
625 && ! MEM_VOLATILE_P (from
)
626 && direct_load
[(int) to_mode
]
627 && ! mode_dependent_address_p (XEXP (from
, 0)))
629 || GET_CODE (from
) == SUBREG
))
630 from
= force_reg (from_mode
, from
);
631 if (REG_P (from
) && REGNO (from
) < FIRST_PSEUDO_REGISTER
632 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
633 from
= copy_to_reg (from
);
634 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
638 /* Handle extension. */
639 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
641 /* Convert directly if that works. */
642 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
646 from
= force_not_mem (from
);
648 emit_unop_insn (code
, to
, from
, equiv_code
);
653 enum machine_mode intermediate
;
657 /* Search for a mode to convert via. */
658 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
659 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
660 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
662 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
663 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
664 GET_MODE_BITSIZE (intermediate
))))
665 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
666 != CODE_FOR_nothing
))
668 convert_move (to
, convert_to_mode (intermediate
, from
,
669 unsignedp
), unsignedp
);
673 /* No suitable intermediate mode.
674 Generate what we need with shifts. */
675 shift_amount
= build_int_cst (NULL_TREE
,
676 GET_MODE_BITSIZE (to_mode
)
677 - GET_MODE_BITSIZE (from_mode
));
678 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
679 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
681 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
684 emit_move_insn (to
, tmp
);
689 /* Support special truncate insns for certain modes. */
690 if (trunc_optab
->handlers
[to_mode
][from_mode
].insn_code
!= CODE_FOR_nothing
)
692 emit_unop_insn (trunc_optab
->handlers
[to_mode
][from_mode
].insn_code
,
697 /* Handle truncation of volatile memrefs, and so on;
698 the things that couldn't be truncated directly,
699 and for which there was no special instruction.
701 ??? Code above formerly short-circuited this, for most integer
702 mode pairs, with a force_reg in from_mode followed by a recursive
703 call to this routine. Appears always to have been wrong. */
704 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
706 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
707 emit_move_insn (to
, temp
);
711 /* Mode combination is not recognized. */
715 /* Return an rtx for a value that would result
716 from converting X to mode MODE.
717 Both X and MODE may be floating, or both integer.
718 UNSIGNEDP is nonzero if X is an unsigned value.
719 This can be done by referring to a part of X in place
720 or by copying to a new temporary with conversion. */
723 convert_to_mode (enum machine_mode mode
, rtx x
, int unsignedp
)
725 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
728 /* Return an rtx for a value that would result
729 from converting X from mode OLDMODE to mode MODE.
730 Both modes may be floating, or both integer.
731 UNSIGNEDP is nonzero if X is an unsigned value.
733 This can be done by referring to a part of X in place
734 or by copying to a new temporary with conversion.
736 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
739 convert_modes (enum machine_mode mode
, enum machine_mode oldmode
, rtx x
, int unsignedp
)
743 /* If FROM is a SUBREG that indicates that we have already done at least
744 the required extension, strip it. */
746 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
747 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
748 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
749 x
= gen_lowpart (mode
, x
);
751 if (GET_MODE (x
) != VOIDmode
)
752 oldmode
= GET_MODE (x
);
757 /* There is one case that we must handle specially: If we are converting
758 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
759 we are to interpret the constant as unsigned, gen_lowpart will do
760 the wrong if the constant appears negative. What we want to do is
761 make the high-order word of the constant zero, not all ones. */
763 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
764 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
765 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
767 HOST_WIDE_INT val
= INTVAL (x
);
769 if (oldmode
!= VOIDmode
770 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
772 int width
= GET_MODE_BITSIZE (oldmode
);
774 /* We need to zero extend VAL. */
775 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
778 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
781 /* We can do this with a gen_lowpart if both desired and current modes
782 are integer, and this is either a constant integer, a register, or a
783 non-volatile MEM. Except for the constant case where MODE is no
784 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
786 if ((GET_CODE (x
) == CONST_INT
787 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
788 || (GET_MODE_CLASS (mode
) == MODE_INT
789 && GET_MODE_CLASS (oldmode
) == MODE_INT
790 && (GET_CODE (x
) == CONST_DOUBLE
791 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
792 && ((MEM_P (x
) && ! MEM_VOLATILE_P (x
)
793 && direct_load
[(int) mode
])
795 && (! HARD_REGISTER_P (x
)
796 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
797 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
798 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
800 /* ?? If we don't know OLDMODE, we have to assume here that
801 X does not need sign- or zero-extension. This may not be
802 the case, but it's the best we can do. */
803 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
804 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
806 HOST_WIDE_INT val
= INTVAL (x
);
807 int width
= GET_MODE_BITSIZE (oldmode
);
809 /* We must sign or zero-extend in this case. Start by
810 zero-extending, then sign extend if we need to. */
811 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
813 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
814 val
|= (HOST_WIDE_INT
) (-1) << width
;
816 return gen_int_mode (val
, mode
);
819 return gen_lowpart (mode
, x
);
822 /* Converting from integer constant into mode is always equivalent to an
824 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
826 gcc_assert (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (oldmode
));
827 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
830 temp
= gen_reg_rtx (mode
);
831 convert_move (temp
, x
, unsignedp
);
835 /* STORE_MAX_PIECES is the number of bytes at a time that we can
836 store efficiently. Due to internal GCC limitations, this is
837 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
838 for an immediate constant. */
840 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
842 /* Determine whether the LEN bytes can be moved by using several move
843 instructions. Return nonzero if a call to move_by_pieces should
847 can_move_by_pieces (unsigned HOST_WIDE_INT len
,
848 unsigned int align ATTRIBUTE_UNUSED
)
850 return MOVE_BY_PIECES_P (len
, align
);
853 /* Generate several move instructions to copy LEN bytes from block FROM to
854 block TO. (These are MEM rtx's with BLKmode).
856 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
857 used to push FROM to the stack.
859 ALIGN is maximum stack alignment we can assume.
861 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
862 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
866 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
867 unsigned int align
, int endp
)
869 struct move_by_pieces data
;
870 rtx to_addr
, from_addr
= XEXP (from
, 0);
871 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
872 enum machine_mode mode
= VOIDmode
, tmode
;
873 enum insn_code icode
;
875 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
878 data
.from_addr
= from_addr
;
881 to_addr
= XEXP (to
, 0);
884 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
885 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
887 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
894 #ifdef STACK_GROWS_DOWNWARD
900 data
.to_addr
= to_addr
;
903 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
904 || GET_CODE (from_addr
) == POST_INC
905 || GET_CODE (from_addr
) == POST_DEC
);
907 data
.explicit_inc_from
= 0;
908 data
.explicit_inc_to
= 0;
909 if (data
.reverse
) data
.offset
= len
;
912 /* If copying requires more than two move insns,
913 copy addresses to registers (to make displacements shorter)
914 and use post-increment if available. */
915 if (!(data
.autinc_from
&& data
.autinc_to
)
916 && move_by_pieces_ninsns (len
, align
, max_size
) > 2)
918 /* Find the mode of the largest move... */
919 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
920 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
921 if (GET_MODE_SIZE (tmode
) < max_size
)
924 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
926 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
927 data
.autinc_from
= 1;
928 data
.explicit_inc_from
= -1;
930 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
932 data
.from_addr
= copy_addr_to_reg (from_addr
);
933 data
.autinc_from
= 1;
934 data
.explicit_inc_from
= 1;
936 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
937 data
.from_addr
= copy_addr_to_reg (from_addr
);
938 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
940 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
942 data
.explicit_inc_to
= -1;
944 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
946 data
.to_addr
= copy_addr_to_reg (to_addr
);
948 data
.explicit_inc_to
= 1;
950 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
951 data
.to_addr
= copy_addr_to_reg (to_addr
);
954 tmode
= mode_for_size (MOVE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
955 if (align
>= GET_MODE_ALIGNMENT (tmode
))
956 align
= GET_MODE_ALIGNMENT (tmode
);
959 enum machine_mode xmode
;
961 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
963 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
964 if (GET_MODE_SIZE (tmode
) > MOVE_MAX_PIECES
965 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
968 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
971 /* First move what we can in the largest integer mode, then go to
972 successively smaller modes. */
976 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
977 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
978 if (GET_MODE_SIZE (tmode
) < max_size
)
981 if (mode
== VOIDmode
)
984 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
985 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
986 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
988 max_size
= GET_MODE_SIZE (mode
);
991 /* The code above should have handled everything. */
992 gcc_assert (!data
.len
);
998 gcc_assert (!data
.reverse
);
1003 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
1004 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
1006 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
1009 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
1016 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
1024 /* Return number of insns required to move L bytes by pieces.
1025 ALIGN (in bits) is maximum alignment we can assume. */
1027 static unsigned HOST_WIDE_INT
1028 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
,
1029 unsigned int max_size
)
1031 unsigned HOST_WIDE_INT n_insns
= 0;
1032 enum machine_mode tmode
;
1034 tmode
= mode_for_size (MOVE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
1035 if (align
>= GET_MODE_ALIGNMENT (tmode
))
1036 align
= GET_MODE_ALIGNMENT (tmode
);
1039 enum machine_mode tmode
, xmode
;
1041 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
1043 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
1044 if (GET_MODE_SIZE (tmode
) > MOVE_MAX_PIECES
1045 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
1048 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
1051 while (max_size
> 1)
1053 enum machine_mode mode
= VOIDmode
;
1054 enum insn_code icode
;
1056 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1057 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1058 if (GET_MODE_SIZE (tmode
) < max_size
)
1061 if (mode
== VOIDmode
)
1064 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1065 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1066 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1068 max_size
= GET_MODE_SIZE (mode
);
1075 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1076 with move instructions for mode MODE. GENFUN is the gen_... function
1077 to make a move insn for that mode. DATA has all the other info. */
1080 move_by_pieces_1 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
1081 struct move_by_pieces
*data
)
1083 unsigned int size
= GET_MODE_SIZE (mode
);
1084 rtx to1
= NULL_RTX
, from1
;
1086 while (data
->len
>= size
)
1089 data
->offset
-= size
;
1093 if (data
->autinc_to
)
1094 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1097 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1100 if (data
->autinc_from
)
1101 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1104 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1106 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1107 emit_insn (gen_add2_insn (data
->to_addr
,
1108 GEN_INT (-(HOST_WIDE_INT
)size
)));
1109 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1110 emit_insn (gen_add2_insn (data
->from_addr
,
1111 GEN_INT (-(HOST_WIDE_INT
)size
)));
1114 emit_insn ((*genfun
) (to1
, from1
));
1117 #ifdef PUSH_ROUNDING
1118 emit_single_push_insn (mode
, from1
, NULL
);
1124 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1125 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1126 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1127 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1129 if (! data
->reverse
)
1130 data
->offset
+= size
;
1136 /* Emit code to move a block Y to a block X. This may be done with
1137 string-move instructions, with multiple scalar move instructions,
1138 or with a library call.
1140 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1141 SIZE is an rtx that says how long they are.
1142 ALIGN is the maximum alignment we can assume they have.
1143 METHOD describes what kind of copy this is, and what mechanisms may be used.
1145 Return the address of the new block, if memcpy is called and returns it,
1149 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1157 case BLOCK_OP_NORMAL
:
1158 case BLOCK_OP_TAILCALL
:
1159 may_use_call
= true;
1162 case BLOCK_OP_CALL_PARM
:
1163 may_use_call
= block_move_libcall_safe_for_call_parm ();
1165 /* Make inhibit_defer_pop nonzero around the library call
1166 to force it to pop the arguments right away. */
1170 case BLOCK_OP_NO_LIBCALL
:
1171 may_use_call
= false;
1178 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1180 gcc_assert (MEM_P (x
));
1181 gcc_assert (MEM_P (y
));
1184 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1185 block copy is more efficient for other large modes, e.g. DCmode. */
1186 x
= adjust_address (x
, BLKmode
, 0);
1187 y
= adjust_address (y
, BLKmode
, 0);
1189 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1190 can be incorrect is coming from __builtin_memcpy. */
1191 if (GET_CODE (size
) == CONST_INT
)
1193 if (INTVAL (size
) == 0)
1196 x
= shallow_copy_rtx (x
);
1197 y
= shallow_copy_rtx (y
);
1198 set_mem_size (x
, size
);
1199 set_mem_size (y
, size
);
1202 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1203 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1204 else if (emit_block_move_via_movmem (x
, y
, size
, align
))
1206 else if (may_use_call
)
1207 retval
= emit_block_move_via_libcall (x
, y
, size
,
1208 method
== BLOCK_OP_TAILCALL
);
1210 emit_block_move_via_loop (x
, y
, size
, align
);
1212 if (method
== BLOCK_OP_CALL_PARM
)
1218 /* A subroutine of emit_block_move. Returns true if calling the
1219 block move libcall will not clobber any parameters which may have
1220 already been placed on the stack. */
1223 block_move_libcall_safe_for_call_parm (void)
1225 /* If arguments are pushed on the stack, then they're safe. */
1229 /* If registers go on the stack anyway, any argument is sure to clobber
1230 an outgoing argument. */
1231 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1233 tree fn
= emit_block_move_libcall_fn (false);
1235 if (REG_PARM_STACK_SPACE (fn
) != 0)
1240 /* If any argument goes in memory, then it might clobber an outgoing
1243 CUMULATIVE_ARGS args_so_far
;
1246 fn
= emit_block_move_libcall_fn (false);
1247 INIT_CUMULATIVE_ARGS (args_so_far
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1249 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1250 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1252 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1253 rtx tmp
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
1254 if (!tmp
|| !REG_P (tmp
))
1256 if (targetm
.calls
.arg_partial_bytes (&args_so_far
, mode
, NULL
, 1))
1258 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, NULL_TREE
, 1);
1264 /* A subroutine of emit_block_move. Expand a movmem pattern;
1265 return true if successful. */
1268 emit_block_move_via_movmem (rtx x
, rtx y
, rtx size
, unsigned int align
)
1270 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1271 int save_volatile_ok
= volatile_ok
;
1272 enum machine_mode mode
;
1274 /* Since this is a move insn, we don't care about volatility. */
1277 /* Try the most limited insn first, because there's no point
1278 including more than one in the machine description unless
1279 the more limited one has some advantage. */
1281 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1282 mode
= GET_MODE_WIDER_MODE (mode
))
1284 enum insn_code code
= movmem_optab
[(int) mode
];
1285 insn_operand_predicate_fn pred
;
1287 if (code
!= CODE_FOR_nothing
1288 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1289 here because if SIZE is less than the mode mask, as it is
1290 returned by the macro, it will definitely be less than the
1291 actual mode mask. */
1292 && ((GET_CODE (size
) == CONST_INT
1293 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1294 <= (GET_MODE_MASK (mode
) >> 1)))
1295 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1296 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1297 || (*pred
) (x
, BLKmode
))
1298 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1299 || (*pred
) (y
, BLKmode
))
1300 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1301 || (*pred
) (opalign
, VOIDmode
)))
1304 rtx last
= get_last_insn ();
1307 op2
= convert_to_mode (mode
, size
, 1);
1308 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1309 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1310 op2
= copy_to_mode_reg (mode
, op2
);
1312 /* ??? When called via emit_block_move_for_call, it'd be
1313 nice if there were some way to inform the backend, so
1314 that it doesn't fail the expansion because it thinks
1315 emitting the libcall would be more efficient. */
1317 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1321 volatile_ok
= save_volatile_ok
;
1325 delete_insns_since (last
);
1329 volatile_ok
= save_volatile_ok
;
1333 /* A subroutine of emit_block_move. Expand a call to memcpy.
1334 Return the return value from memcpy, 0 otherwise. */
1337 emit_block_move_via_libcall (rtx dst
, rtx src
, rtx size
, bool tailcall
)
1339 rtx dst_addr
, src_addr
;
1340 tree call_expr
, arg_list
, fn
, src_tree
, dst_tree
, size_tree
;
1341 enum machine_mode size_mode
;
1344 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1345 pseudos. We can then place those new pseudos into a VAR_DECL and
1348 dst_addr
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
1349 src_addr
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
1351 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1352 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1354 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1355 src_tree
= make_tree (ptr_type_node
, src_addr
);
1357 size_mode
= TYPE_MODE (sizetype
);
1359 size
= convert_to_mode (size_mode
, size
, 1);
1360 size
= copy_to_mode_reg (size_mode
, size
);
1362 /* It is incorrect to use the libcall calling conventions to call
1363 memcpy in this context. This could be a user call to memcpy and
1364 the user may wish to examine the return value from memcpy. For
1365 targets where libcalls and normal calls have different conventions
1366 for returning pointers, we could end up generating incorrect code. */
1368 size_tree
= make_tree (sizetype
, size
);
1370 fn
= emit_block_move_libcall_fn (true);
1371 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
1372 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1373 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1375 /* Now we have to build up the CALL_EXPR itself. */
1376 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1377 call_expr
= build3 (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1378 call_expr
, arg_list
, NULL_TREE
);
1379 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
1381 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
1386 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1387 for the function we use for block copies. The first time FOR_CALL
1388 is true, we call assemble_external. */
1390 static GTY(()) tree block_move_fn
;
1393 init_block_move_fn (const char *asmspec
)
1399 fn
= get_identifier ("memcpy");
1400 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
1401 const_ptr_type_node
, sizetype
,
1404 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
1405 DECL_EXTERNAL (fn
) = 1;
1406 TREE_PUBLIC (fn
) = 1;
1407 DECL_ARTIFICIAL (fn
) = 1;
1408 TREE_NOTHROW (fn
) = 1;
1414 set_user_assembler_name (block_move_fn
, asmspec
);
1418 emit_block_move_libcall_fn (int for_call
)
1420 static bool emitted_extern
;
1423 init_block_move_fn (NULL
);
1425 if (for_call
&& !emitted_extern
)
1427 emitted_extern
= true;
1428 make_decl_rtl (block_move_fn
);
1429 assemble_external (block_move_fn
);
1432 return block_move_fn
;
1435 /* A subroutine of emit_block_move. Copy the data via an explicit
1436 loop. This is used only when libcalls are forbidden. */
1437 /* ??? It'd be nice to copy in hunks larger than QImode. */
1440 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1441 unsigned int align ATTRIBUTE_UNUSED
)
1443 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
1444 enum machine_mode iter_mode
;
1446 iter_mode
= GET_MODE (size
);
1447 if (iter_mode
== VOIDmode
)
1448 iter_mode
= word_mode
;
1450 top_label
= gen_label_rtx ();
1451 cmp_label
= gen_label_rtx ();
1452 iter
= gen_reg_rtx (iter_mode
);
1454 emit_move_insn (iter
, const0_rtx
);
1456 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1457 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1458 do_pending_stack_adjust ();
1460 emit_jump (cmp_label
);
1461 emit_label (top_label
);
1463 tmp
= convert_modes (Pmode
, iter_mode
, iter
, true);
1464 x_addr
= gen_rtx_PLUS (Pmode
, x_addr
, tmp
);
1465 y_addr
= gen_rtx_PLUS (Pmode
, y_addr
, tmp
);
1466 x
= change_address (x
, QImode
, x_addr
);
1467 y
= change_address (y
, QImode
, y_addr
);
1469 emit_move_insn (x
, y
);
1471 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1472 true, OPTAB_LIB_WIDEN
);
1474 emit_move_insn (iter
, tmp
);
1476 emit_label (cmp_label
);
1478 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1482 /* Copy all or part of a value X into registers starting at REGNO.
1483 The number of registers to be filled is NREGS. */
1486 move_block_to_reg (int regno
, rtx x
, int nregs
, enum machine_mode mode
)
1489 #ifdef HAVE_load_multiple
1497 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1498 x
= validize_mem (force_const_mem (mode
, x
));
1500 /* See if the machine can do this with a load multiple insn. */
1501 #ifdef HAVE_load_multiple
1502 if (HAVE_load_multiple
)
1504 last
= get_last_insn ();
1505 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1513 delete_insns_since (last
);
1517 for (i
= 0; i
< nregs
; i
++)
1518 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1519 operand_subword_force (x
, i
, mode
));
1522 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1523 The number of registers to be filled is NREGS. */
1526 move_block_from_reg (int regno
, rtx x
, int nregs
)
1533 /* See if the machine can do this with a store multiple insn. */
1534 #ifdef HAVE_store_multiple
1535 if (HAVE_store_multiple
)
1537 rtx last
= get_last_insn ();
1538 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1546 delete_insns_since (last
);
1550 for (i
= 0; i
< nregs
; i
++)
1552 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1556 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1560 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1561 ORIG, where ORIG is a non-consecutive group of registers represented by
1562 a PARALLEL. The clone is identical to the original except in that the
1563 original set of registers is replaced by a new set of pseudo registers.
1564 The new set has the same modes as the original set. */
1567 gen_group_rtx (rtx orig
)
1572 gcc_assert (GET_CODE (orig
) == PARALLEL
);
1574 length
= XVECLEN (orig
, 0);
1575 tmps
= alloca (sizeof (rtx
) * length
);
1577 /* Skip a NULL entry in first slot. */
1578 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
1583 for (; i
< length
; i
++)
1585 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
1586 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
1588 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
1591 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
1594 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1595 except that values are placed in TMPS[i], and must later be moved
1596 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1599 emit_group_load_1 (rtx
*tmps
, rtx dst
, rtx orig_src
, tree type
, int ssize
)
1603 enum machine_mode m
= GET_MODE (orig_src
);
1605 gcc_assert (GET_CODE (dst
) == PARALLEL
);
1608 && !SCALAR_INT_MODE_P (m
)
1609 && !MEM_P (orig_src
)
1610 && GET_CODE (orig_src
) != CONCAT
)
1612 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_src
));
1613 if (imode
== BLKmode
)
1614 src
= assign_stack_temp (GET_MODE (orig_src
), ssize
, 0);
1616 src
= gen_reg_rtx (imode
);
1617 if (imode
!= BLKmode
)
1618 src
= gen_lowpart (GET_MODE (orig_src
), src
);
1619 emit_move_insn (src
, orig_src
);
1620 /* ...and back again. */
1621 if (imode
!= BLKmode
)
1622 src
= gen_lowpart (imode
, src
);
1623 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1627 /* Check for a NULL entry, used to indicate that the parameter goes
1628 both on the stack and in registers. */
1629 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1634 /* Process the pieces. */
1635 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1637 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1638 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1639 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1642 /* Handle trailing fragments that run over the size of the struct. */
1643 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1645 /* Arrange to shift the fragment to where it belongs.
1646 extract_bit_field loads to the lsb of the reg. */
1648 #ifdef BLOCK_REG_PADDING
1649 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
1650 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1655 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1656 bytelen
= ssize
- bytepos
;
1657 gcc_assert (bytelen
> 0);
1660 /* If we won't be loading directly from memory, protect the real source
1661 from strange tricks we might play; but make sure that the source can
1662 be loaded directly into the destination. */
1664 if (!MEM_P (orig_src
)
1665 && (!CONSTANT_P (orig_src
)
1666 || (GET_MODE (orig_src
) != mode
1667 && GET_MODE (orig_src
) != VOIDmode
)))
1669 if (GET_MODE (orig_src
) == VOIDmode
)
1670 src
= gen_reg_rtx (mode
);
1672 src
= gen_reg_rtx (GET_MODE (orig_src
));
1674 emit_move_insn (src
, orig_src
);
1677 /* Optimize the access just a bit. */
1679 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
1680 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
1681 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1682 && bytelen
== GET_MODE_SIZE (mode
))
1684 tmps
[i
] = gen_reg_rtx (mode
);
1685 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
1687 else if (COMPLEX_MODE_P (mode
)
1688 && GET_MODE (src
) == mode
1689 && bytelen
== GET_MODE_SIZE (mode
))
1690 /* Let emit_move_complex do the bulk of the work. */
1692 else if (GET_CODE (src
) == CONCAT
)
1694 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
1695 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
1697 if ((bytepos
== 0 && bytelen
== slen0
)
1698 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
1700 /* The following assumes that the concatenated objects all
1701 have the same size. In this case, a simple calculation
1702 can be used to determine the object and the bit field
1704 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
1705 if (! CONSTANT_P (tmps
[i
])
1706 && (!REG_P (tmps
[i
]) || GET_MODE (tmps
[i
]) != mode
))
1707 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
1708 (bytepos
% slen0
) * BITS_PER_UNIT
,
1709 1, NULL_RTX
, mode
, mode
);
1715 gcc_assert (!bytepos
);
1716 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1717 emit_move_insn (mem
, src
);
1718 tmps
[i
] = extract_bit_field (mem
, bytelen
* BITS_PER_UNIT
,
1719 0, 1, NULL_RTX
, mode
, mode
);
1722 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1723 SIMD register, which is currently broken. While we get GCC
1724 to emit proper RTL for these cases, let's dump to memory. */
1725 else if (VECTOR_MODE_P (GET_MODE (dst
))
1728 int slen
= GET_MODE_SIZE (GET_MODE (src
));
1731 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1732 emit_move_insn (mem
, src
);
1733 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
1735 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
1736 && XVECLEN (dst
, 0) > 1)
1737 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE(dst
), bytepos
);
1738 else if (CONSTANT_P (src
)
1739 || (REG_P (src
) && GET_MODE (src
) == mode
))
1742 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
1743 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
1747 tmps
[i
] = expand_shift (LSHIFT_EXPR
, mode
, tmps
[i
],
1748 build_int_cst (NULL_TREE
, shift
), tmps
[i
], 0);
1752 /* Emit code to move a block SRC of type TYPE to a block DST,
1753 where DST is non-consecutive registers represented by a PARALLEL.
1754 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1758 emit_group_load (rtx dst
, rtx src
, tree type
, int ssize
)
1763 tmps
= alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
1764 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1766 /* Copy the extracted pieces into the proper (probable) hard regs. */
1767 for (i
= 0; i
< XVECLEN (dst
, 0); i
++)
1769 rtx d
= XEXP (XVECEXP (dst
, 0, i
), 0);
1772 emit_move_insn (d
, tmps
[i
]);
1776 /* Similar, but load SRC into new pseudos in a format that looks like
1777 PARALLEL. This can later be fed to emit_group_move to get things
1778 in the right place. */
1781 emit_group_load_into_temps (rtx parallel
, rtx src
, tree type
, int ssize
)
1786 vec
= rtvec_alloc (XVECLEN (parallel
, 0));
1787 emit_group_load_1 (&RTVEC_ELT (vec
, 0), parallel
, src
, type
, ssize
);
1789 /* Convert the vector to look just like the original PARALLEL, except
1790 with the computed values. */
1791 for (i
= 0; i
< XVECLEN (parallel
, 0); i
++)
1793 rtx e
= XVECEXP (parallel
, 0, i
);
1794 rtx d
= XEXP (e
, 0);
1798 d
= force_reg (GET_MODE (d
), RTVEC_ELT (vec
, i
));
1799 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), d
, XEXP (e
, 1));
1801 RTVEC_ELT (vec
, i
) = e
;
1804 return gen_rtx_PARALLEL (GET_MODE (parallel
), vec
);
1807 /* Emit code to move a block SRC to block DST, where SRC and DST are
1808 non-consecutive groups of registers, each represented by a PARALLEL. */
1811 emit_group_move (rtx dst
, rtx src
)
1815 gcc_assert (GET_CODE (src
) == PARALLEL
1816 && GET_CODE (dst
) == PARALLEL
1817 && XVECLEN (src
, 0) == XVECLEN (dst
, 0));
1819 /* Skip first entry if NULL. */
1820 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
1821 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
1822 XEXP (XVECEXP (src
, 0, i
), 0));
1825 /* Move a group of registers represented by a PARALLEL into pseudos. */
1828 emit_group_move_into_temps (rtx src
)
1830 rtvec vec
= rtvec_alloc (XVECLEN (src
, 0));
1833 for (i
= 0; i
< XVECLEN (src
, 0); i
++)
1835 rtx e
= XVECEXP (src
, 0, i
);
1836 rtx d
= XEXP (e
, 0);
1839 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), copy_to_reg (d
), XEXP (e
, 1));
1840 RTVEC_ELT (vec
, i
) = e
;
1843 return gen_rtx_PARALLEL (GET_MODE (src
), vec
);
1846 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1847 where SRC is non-consecutive registers represented by a PARALLEL.
1848 SSIZE represents the total size of block ORIG_DST, or -1 if not
1852 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1856 enum machine_mode m
= GET_MODE (orig_dst
);
1858 gcc_assert (GET_CODE (src
) == PARALLEL
);
1860 if (!SCALAR_INT_MODE_P (m
)
1861 && !MEM_P (orig_dst
) && GET_CODE (orig_dst
) != CONCAT
)
1863 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_dst
));
1864 if (imode
== BLKmode
)
1865 dst
= assign_stack_temp (GET_MODE (orig_dst
), ssize
, 0);
1867 dst
= gen_reg_rtx (imode
);
1868 emit_group_store (dst
, src
, type
, ssize
);
1869 if (imode
!= BLKmode
)
1870 dst
= gen_lowpart (GET_MODE (orig_dst
), dst
);
1871 emit_move_insn (orig_dst
, dst
);
1875 /* Check for a NULL entry, used to indicate that the parameter goes
1876 both on the stack and in registers. */
1877 if (XEXP (XVECEXP (src
, 0, 0), 0))
1882 tmps
= alloca (sizeof (rtx
) * XVECLEN (src
, 0));
1884 /* Copy the (probable) hard regs into pseudos. */
1885 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
1887 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
1888 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
1889 emit_move_insn (tmps
[i
], reg
);
1892 /* If we won't be storing directly into memory, protect the real destination
1893 from strange tricks we might play. */
1895 if (GET_CODE (dst
) == PARALLEL
)
1899 /* We can get a PARALLEL dst if there is a conditional expression in
1900 a return statement. In that case, the dst and src are the same,
1901 so no action is necessary. */
1902 if (rtx_equal_p (dst
, src
))
1905 /* It is unclear if we can ever reach here, but we may as well handle
1906 it. Allocate a temporary, and split this into a store/load to/from
1909 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
1910 emit_group_store (temp
, src
, type
, ssize
);
1911 emit_group_load (dst
, temp
, type
, ssize
);
1914 else if (!MEM_P (dst
) && GET_CODE (dst
) != CONCAT
)
1916 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
1917 /* Make life a bit easier for combine. */
1918 emit_move_insn (dst
, CONST0_RTX (GET_MODE (orig_dst
)));
1921 /* Process the pieces. */
1922 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
1924 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
1925 enum machine_mode mode
= GET_MODE (tmps
[i
]);
1926 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1929 /* Handle trailing fragments that run over the size of the struct. */
1930 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1932 /* store_bit_field always takes its value from the lsb.
1933 Move the fragment to the lsb if it's not already there. */
1935 #ifdef BLOCK_REG_PADDING
1936 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
1937 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1943 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1944 tmps
[i
] = expand_shift (RSHIFT_EXPR
, mode
, tmps
[i
],
1945 build_int_cst (NULL_TREE
, shift
),
1948 bytelen
= ssize
- bytepos
;
1951 if (GET_CODE (dst
) == CONCAT
)
1953 if (bytepos
+ bytelen
<= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
1954 dest
= XEXP (dst
, 0);
1955 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
1957 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
1958 dest
= XEXP (dst
, 1);
1962 gcc_assert (bytepos
== 0 && XVECLEN (src
, 0));
1963 dest
= assign_stack_temp (GET_MODE (dest
),
1964 GET_MODE_SIZE (GET_MODE (dest
)), 0);
1965 emit_move_insn (adjust_address (dest
, GET_MODE (tmps
[i
]), bytepos
),
1972 /* Optimize the access just a bit. */
1974 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
1975 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
1976 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1977 && bytelen
== GET_MODE_SIZE (mode
))
1978 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
1980 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
1984 /* Copy from the pseudo into the (probable) hard reg. */
1985 if (orig_dst
!= dst
)
1986 emit_move_insn (orig_dst
, dst
);
1989 /* Generate code to copy a BLKmode object of TYPE out of a
1990 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1991 is null, a stack temporary is created. TGTBLK is returned.
1993 The purpose of this routine is to handle functions that return
1994 BLKmode structures in registers. Some machines (the PA for example)
1995 want to return all small structures in registers regardless of the
1996 structure's alignment. */
1999 copy_blkmode_from_reg (rtx tgtblk
, rtx srcreg
, tree type
)
2001 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2002 rtx src
= NULL
, dst
= NULL
;
2003 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2004 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2008 tgtblk
= assign_temp (build_qualified_type (type
,
2010 | TYPE_QUAL_CONST
)),
2012 preserve_temp_slots (tgtblk
);
2015 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2016 into a new pseudo which is a full word. */
2018 if (GET_MODE (srcreg
) != BLKmode
2019 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2020 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
2022 /* If the structure doesn't take up a whole number of words, see whether
2023 SRCREG is padded on the left or on the right. If it's on the left,
2024 set PADDING_CORRECTION to the number of bits to skip.
2026 In most ABIs, the structure will be returned at the least end of
2027 the register, which translates to right padding on little-endian
2028 targets and left padding on big-endian targets. The opposite
2029 holds if the structure is returned at the most significant
2030 end of the register. */
2031 if (bytes
% UNITS_PER_WORD
!= 0
2032 && (targetm
.calls
.return_in_msb (type
)
2034 : BYTES_BIG_ENDIAN
))
2036 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2038 /* Copy the structure BITSIZE bites at a time.
2040 We could probably emit more efficient code for machines which do not use
2041 strict alignment, but it doesn't seem worth the effort at the current
2043 for (bitpos
= 0, xbitpos
= padding_correction
;
2044 bitpos
< bytes
* BITS_PER_UNIT
;
2045 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2047 /* We need a new source operand each time xbitpos is on a
2048 word boundary and when xbitpos == padding_correction
2049 (the first time through). */
2050 if (xbitpos
% BITS_PER_WORD
== 0
2051 || xbitpos
== padding_correction
)
2052 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2055 /* We need a new destination operand each time bitpos is on
2057 if (bitpos
% BITS_PER_WORD
== 0)
2058 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2060 /* Use xbitpos for the source extraction (right justified) and
2061 xbitpos for the destination store (left justified). */
2062 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2063 extract_bit_field (src
, bitsize
,
2064 xbitpos
% BITS_PER_WORD
, 1,
2065 NULL_RTX
, word_mode
, word_mode
));
2071 /* Add a USE expression for REG to the (possibly empty) list pointed
2072 to by CALL_FUSAGE. REG must denote a hard register. */
2075 use_reg (rtx
*call_fusage
, rtx reg
)
2077 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2080 = gen_rtx_EXPR_LIST (VOIDmode
,
2081 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2084 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2085 starting at REGNO. All of these registers must be hard registers. */
2088 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2092 gcc_assert (regno
+ nregs
<= FIRST_PSEUDO_REGISTER
);
2094 for (i
= 0; i
< nregs
; i
++)
2095 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2098 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2099 PARALLEL REGS. This is for calls that pass values in multiple
2100 non-contiguous locations. The Irix 6 ABI has examples of this. */
2103 use_group_regs (rtx
*call_fusage
, rtx regs
)
2107 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2109 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2111 /* A NULL entry means the parameter goes both on the stack and in
2112 registers. This can also be a MEM for targets that pass values
2113 partially on the stack and partially in registers. */
2114 if (reg
!= 0 && REG_P (reg
))
2115 use_reg (call_fusage
, reg
);
2120 /* Determine whether the LEN bytes generated by CONSTFUN can be
2121 stored to memory using several move instructions. CONSTFUNDATA is
2122 a pointer which will be passed as argument in every CONSTFUN call.
2123 ALIGN is maximum alignment we can assume. Return nonzero if a
2124 call to store_by_pieces should succeed. */
2127 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
2128 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2129 void *constfundata
, unsigned int align
)
2131 unsigned HOST_WIDE_INT l
;
2132 unsigned int max_size
;
2133 HOST_WIDE_INT offset
= 0;
2134 enum machine_mode mode
, tmode
;
2135 enum insn_code icode
;
2142 if (! STORE_BY_PIECES_P (len
, align
))
2145 tmode
= mode_for_size (STORE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
2146 if (align
>= GET_MODE_ALIGNMENT (tmode
))
2147 align
= GET_MODE_ALIGNMENT (tmode
);
2150 enum machine_mode xmode
;
2152 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
2154 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
2155 if (GET_MODE_SIZE (tmode
) > STORE_MAX_PIECES
2156 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
2159 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
2162 /* We would first store what we can in the largest integer mode, then go to
2163 successively smaller modes. */
2166 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2171 max_size
= STORE_MAX_PIECES
+ 1;
2172 while (max_size
> 1)
2174 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2175 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2176 if (GET_MODE_SIZE (tmode
) < max_size
)
2179 if (mode
== VOIDmode
)
2182 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2183 if (icode
!= CODE_FOR_nothing
2184 && align
>= GET_MODE_ALIGNMENT (mode
))
2186 unsigned int size
= GET_MODE_SIZE (mode
);
2193 cst
= (*constfun
) (constfundata
, offset
, mode
);
2194 if (!LEGITIMATE_CONSTANT_P (cst
))
2204 max_size
= GET_MODE_SIZE (mode
);
2207 /* The code above should have handled everything. */
2214 /* Generate several move instructions to store LEN bytes generated by
2215 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2216 pointer which will be passed as argument in every CONSTFUN call.
2217 ALIGN is maximum alignment we can assume.
2218 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2219 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2223 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
2224 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2225 void *constfundata
, unsigned int align
, int endp
)
2227 struct store_by_pieces data
;
2231 gcc_assert (endp
!= 2);
2235 gcc_assert (STORE_BY_PIECES_P (len
, align
));
2236 data
.constfun
= constfun
;
2237 data
.constfundata
= constfundata
;
2240 store_by_pieces_1 (&data
, align
);
2245 gcc_assert (!data
.reverse
);
2250 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2251 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2253 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
2256 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2263 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2271 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2272 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2275 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
2277 struct store_by_pieces data
;
2282 data
.constfun
= clear_by_pieces_1
;
2283 data
.constfundata
= NULL
;
2286 store_by_pieces_1 (&data
, align
);
2289 /* Callback routine for clear_by_pieces.
2290 Return const0_rtx unconditionally. */
2293 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED
,
2294 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2295 enum machine_mode mode ATTRIBUTE_UNUSED
)
2300 /* Subroutine of clear_by_pieces and store_by_pieces.
2301 Generate several move instructions to store LEN bytes of block TO. (A MEM
2302 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2305 store_by_pieces_1 (struct store_by_pieces
*data ATTRIBUTE_UNUSED
,
2306 unsigned int align ATTRIBUTE_UNUSED
)
2308 rtx to_addr
= XEXP (data
->to
, 0);
2309 unsigned int max_size
= STORE_MAX_PIECES
+ 1;
2310 enum machine_mode mode
= VOIDmode
, tmode
;
2311 enum insn_code icode
;
2314 data
->to_addr
= to_addr
;
2316 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2317 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2319 data
->explicit_inc_to
= 0;
2321 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2323 data
->offset
= data
->len
;
2325 /* If storing requires more than two move insns,
2326 copy addresses to registers (to make displacements shorter)
2327 and use post-increment if available. */
2328 if (!data
->autinc_to
2329 && move_by_pieces_ninsns (data
->len
, align
, max_size
) > 2)
2331 /* Determine the main mode we'll be using. */
2332 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2333 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2334 if (GET_MODE_SIZE (tmode
) < max_size
)
2337 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2339 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2340 data
->autinc_to
= 1;
2341 data
->explicit_inc_to
= -1;
2344 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2345 && ! data
->autinc_to
)
2347 data
->to_addr
= copy_addr_to_reg (to_addr
);
2348 data
->autinc_to
= 1;
2349 data
->explicit_inc_to
= 1;
2352 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2353 data
->to_addr
= copy_addr_to_reg (to_addr
);
2356 tmode
= mode_for_size (STORE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
2357 if (align
>= GET_MODE_ALIGNMENT (tmode
))
2358 align
= GET_MODE_ALIGNMENT (tmode
);
2361 enum machine_mode xmode
;
2363 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
2365 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
2366 if (GET_MODE_SIZE (tmode
) > STORE_MAX_PIECES
2367 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
2370 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
2373 /* First store what we can in the largest integer mode, then go to
2374 successively smaller modes. */
2376 while (max_size
> 1)
2378 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2379 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2380 if (GET_MODE_SIZE (tmode
) < max_size
)
2383 if (mode
== VOIDmode
)
2386 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2387 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2388 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2390 max_size
= GET_MODE_SIZE (mode
);
2393 /* The code above should have handled everything. */
2394 gcc_assert (!data
->len
);
2397 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2398 with move instructions for mode MODE. GENFUN is the gen_... function
2399 to make a move insn for that mode. DATA has all the other info. */
2402 store_by_pieces_2 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
2403 struct store_by_pieces
*data
)
2405 unsigned int size
= GET_MODE_SIZE (mode
);
2408 while (data
->len
>= size
)
2411 data
->offset
-= size
;
2413 if (data
->autinc_to
)
2414 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2417 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2419 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2420 emit_insn (gen_add2_insn (data
->to_addr
,
2421 GEN_INT (-(HOST_WIDE_INT
) size
)));
2423 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2424 emit_insn ((*genfun
) (to1
, cst
));
2426 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2427 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2429 if (! data
->reverse
)
2430 data
->offset
+= size
;
2436 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2437 its length in bytes. */
2440 clear_storage (rtx object
, rtx size
, enum block_op_methods method
)
2442 enum machine_mode mode
= GET_MODE (object
);
2445 gcc_assert (method
== BLOCK_OP_NORMAL
|| method
== BLOCK_OP_TAILCALL
);
2447 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2448 just move a zero. Otherwise, do this a piece at a time. */
2450 && GET_CODE (size
) == CONST_INT
2451 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (mode
))
2453 rtx zero
= CONST0_RTX (mode
);
2456 emit_move_insn (object
, zero
);
2460 if (COMPLEX_MODE_P (mode
))
2462 zero
= CONST0_RTX (GET_MODE_INNER (mode
));
2465 write_complex_part (object
, zero
, 0);
2466 write_complex_part (object
, zero
, 1);
2472 if (size
== const0_rtx
)
2475 align
= MEM_ALIGN (object
);
2477 if (GET_CODE (size
) == CONST_INT
2478 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2479 clear_by_pieces (object
, INTVAL (size
), align
);
2480 else if (set_storage_via_setmem (object
, size
, const0_rtx
, align
))
2483 return clear_storage_via_libcall (object
, size
,
2484 method
== BLOCK_OP_TAILCALL
);
2489 /* A subroutine of clear_storage. Expand a call to memset.
2490 Return the return value of memset, 0 otherwise. */
2493 clear_storage_via_libcall (rtx object
, rtx size
, bool tailcall
)
2495 tree call_expr
, arg_list
, fn
, object_tree
, size_tree
;
2496 enum machine_mode size_mode
;
2499 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2500 place those into new pseudos into a VAR_DECL and use them later. */
2502 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2504 size_mode
= TYPE_MODE (sizetype
);
2505 size
= convert_to_mode (size_mode
, size
, 1);
2506 size
= copy_to_mode_reg (size_mode
, size
);
2508 /* It is incorrect to use the libcall calling conventions to call
2509 memset in this context. This could be a user call to memset and
2510 the user may wish to examine the return value from memset. For
2511 targets where libcalls and normal calls have different conventions
2512 for returning pointers, we could end up generating incorrect code. */
2514 object_tree
= make_tree (ptr_type_node
, object
);
2515 size_tree
= make_tree (sizetype
, size
);
2517 fn
= clear_storage_libcall_fn (true);
2518 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
2519 arg_list
= tree_cons (NULL_TREE
, integer_zero_node
, arg_list
);
2520 arg_list
= tree_cons (NULL_TREE
, object_tree
, arg_list
);
2522 /* Now we have to build up the CALL_EXPR itself. */
2523 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2524 call_expr
= build3 (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2525 call_expr
, arg_list
, NULL_TREE
);
2526 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
2528 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
2533 /* A subroutine of clear_storage_via_libcall. Create the tree node
2534 for the function we use for block clears. The first time FOR_CALL
2535 is true, we call assemble_external. */
2537 static GTY(()) tree block_clear_fn
;
2540 init_block_clear_fn (const char *asmspec
)
2542 if (!block_clear_fn
)
2546 fn
= get_identifier ("memset");
2547 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2548 integer_type_node
, sizetype
,
2551 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
2552 DECL_EXTERNAL (fn
) = 1;
2553 TREE_PUBLIC (fn
) = 1;
2554 DECL_ARTIFICIAL (fn
) = 1;
2555 TREE_NOTHROW (fn
) = 1;
2557 block_clear_fn
= fn
;
2561 set_user_assembler_name (block_clear_fn
, asmspec
);
2565 clear_storage_libcall_fn (int for_call
)
2567 static bool emitted_extern
;
2569 if (!block_clear_fn
)
2570 init_block_clear_fn (NULL
);
2572 if (for_call
&& !emitted_extern
)
2574 emitted_extern
= true;
2575 make_decl_rtl (block_clear_fn
);
2576 assemble_external (block_clear_fn
);
2579 return block_clear_fn
;
2582 /* Expand a setmem pattern; return true if successful. */
2585 set_storage_via_setmem (rtx object
, rtx size
, rtx val
, unsigned int align
)
2587 /* Try the most limited insn first, because there's no point
2588 including more than one in the machine description unless
2589 the more limited one has some advantage. */
2591 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2592 enum machine_mode mode
;
2594 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2595 mode
= GET_MODE_WIDER_MODE (mode
))
2597 enum insn_code code
= setmem_optab
[(int) mode
];
2598 insn_operand_predicate_fn pred
;
2600 if (code
!= CODE_FOR_nothing
2601 /* We don't need MODE to be narrower than
2602 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2603 the mode mask, as it is returned by the macro, it will
2604 definitely be less than the actual mode mask. */
2605 && ((GET_CODE (size
) == CONST_INT
2606 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2607 <= (GET_MODE_MASK (mode
) >> 1)))
2608 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2609 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2610 || (*pred
) (object
, BLKmode
))
2611 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
2612 || (*pred
) (opalign
, VOIDmode
)))
2615 rtx last
= get_last_insn ();
2618 opsize
= convert_to_mode (mode
, size
, 1);
2619 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2620 if (pred
!= 0 && ! (*pred
) (opsize
, mode
))
2621 opsize
= copy_to_mode_reg (mode
, opsize
);
2623 opchar
= convert_to_mode (mode
, val
, 1);
2624 pred
= insn_data
[(int) code
].operand
[2].predicate
;
2625 if (pred
!= 0 && ! (*pred
) (opchar
, mode
))
2626 opchar
= copy_to_mode_reg (mode
, opchar
);
2628 pat
= GEN_FCN ((int) code
) (object
, opsize
, opchar
, opalign
);
2635 delete_insns_since (last
);
2643 /* Write to one of the components of the complex value CPLX. Write VAL to
2644 the real part if IMAG_P is false, and the imaginary part if its true. */
2647 write_complex_part (rtx cplx
, rtx val
, bool imag_p
)
2649 enum machine_mode cmode
;
2650 enum machine_mode imode
;
2653 if (GET_CODE (cplx
) == CONCAT
)
2655 emit_move_insn (XEXP (cplx
, imag_p
), val
);
2659 cmode
= GET_MODE (cplx
);
2660 imode
= GET_MODE_INNER (cmode
);
2661 ibitsize
= GET_MODE_BITSIZE (imode
);
2663 /* If the sub-object is at least word sized, then we know that subregging
2664 will work. This special case is important, since store_bit_field
2665 wants to operate on integer modes, and there's rarely an OImode to
2666 correspond to TCmode. */
2667 if (ibitsize
>= BITS_PER_WORD
2668 /* For hard regs we have exact predicates. Assume we can split
2669 the original object if it spans an even number of hard regs.
2670 This special case is important for SCmode on 64-bit platforms
2671 where the natural size of floating-point regs is 32-bit. */
2673 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
2674 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0)
2675 /* For MEMs we always try to make a "subreg", that is to adjust
2676 the MEM, because store_bit_field may generate overly
2677 convoluted RTL for sub-word fields. */
2680 rtx part
= simplify_gen_subreg (imode
, cplx
, cmode
,
2681 imag_p
? GET_MODE_SIZE (imode
) : 0);
2684 emit_move_insn (part
, val
);
2688 /* simplify_gen_subreg may fail for sub-word MEMs. */
2689 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
2692 store_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0, imode
, val
);
2695 /* Extract one of the components of the complex value CPLX. Extract the
2696 real part if IMAG_P is false, and the imaginary part if it's true. */
2699 read_complex_part (rtx cplx
, bool imag_p
)
2701 enum machine_mode cmode
, imode
;
2704 if (GET_CODE (cplx
) == CONCAT
)
2705 return XEXP (cplx
, imag_p
);
2707 cmode
= GET_MODE (cplx
);
2708 imode
= GET_MODE_INNER (cmode
);
2709 ibitsize
= GET_MODE_BITSIZE (imode
);
2711 /* Special case reads from complex constants that got spilled to memory. */
2712 if (MEM_P (cplx
) && GET_CODE (XEXP (cplx
, 0)) == SYMBOL_REF
)
2714 tree decl
= SYMBOL_REF_DECL (XEXP (cplx
, 0));
2715 if (decl
&& TREE_CODE (decl
) == COMPLEX_CST
)
2717 tree part
= imag_p
? TREE_IMAGPART (decl
) : TREE_REALPART (decl
);
2718 if (CONSTANT_CLASS_P (part
))
2719 return expand_expr (part
, NULL_RTX
, imode
, EXPAND_NORMAL
);
2723 /* If the sub-object is at least word sized, then we know that subregging
2724 will work. This special case is important, since extract_bit_field
2725 wants to operate on integer modes, and there's rarely an OImode to
2726 correspond to TCmode. */
2727 if (ibitsize
>= BITS_PER_WORD
2728 /* For hard regs we have exact predicates. Assume we can split
2729 the original object if it spans an even number of hard regs.
2730 This special case is important for SCmode on 64-bit platforms
2731 where the natural size of floating-point regs is 32-bit. */
2733 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
2734 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0)
2735 /* For MEMs we always try to make a "subreg", that is to adjust
2736 the MEM, because extract_bit_field may generate overly
2737 convoluted RTL for sub-word fields. */
2740 rtx ret
= simplify_gen_subreg (imode
, cplx
, cmode
,
2741 imag_p
? GET_MODE_SIZE (imode
) : 0);
2745 /* simplify_gen_subreg may fail for sub-word MEMs. */
2746 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
2749 return extract_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0,
2750 true, NULL_RTX
, imode
, imode
);
2753 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2754 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2755 represented in NEW_MODE. If FORCE is true, this will never happen, as
2756 we'll force-create a SUBREG if needed. */
2759 emit_move_change_mode (enum machine_mode new_mode
,
2760 enum machine_mode old_mode
, rtx x
, bool force
)
2764 if (reload_in_progress
&& MEM_P (x
))
2766 /* We can't use gen_lowpart here because it may call change_address
2767 which is not appropriate if we were called when a reload was in
2768 progress. We don't have to worry about changing the address since
2769 the size in bytes is supposed to be the same. Copy the MEM to
2770 change the mode and move any substitutions from the old MEM to
2773 ret
= adjust_address_nv (x
, new_mode
, 0);
2774 copy_replacements (x
, ret
);
2778 /* Note that we do want simplify_subreg's behavior of validating
2779 that the new mode is ok for a hard register. If we were to use
2780 simplify_gen_subreg, we would create the subreg, but would
2781 probably run into the target not being able to implement it. */
2782 /* Except, of course, when FORCE is true, when this is exactly what
2783 we want. Which is needed for CCmodes on some targets. */
2785 ret
= simplify_gen_subreg (new_mode
, x
, old_mode
, 0);
2787 ret
= simplify_subreg (new_mode
, x
, old_mode
, 0);
2793 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2794 an integer mode of the same size as MODE. Returns the instruction
2795 emitted, or NULL if such a move could not be generated. */
2798 emit_move_via_integer (enum machine_mode mode
, rtx x
, rtx y
)
2800 enum machine_mode imode
;
2801 enum insn_code code
;
2803 /* There must exist a mode of the exact size we require. */
2804 imode
= int_mode_for_mode (mode
);
2805 if (imode
== BLKmode
)
2808 /* The target must support moves in this mode. */
2809 code
= mov_optab
->handlers
[imode
].insn_code
;
2810 if (code
== CODE_FOR_nothing
)
2813 x
= emit_move_change_mode (imode
, mode
, x
, false);
2816 y
= emit_move_change_mode (imode
, mode
, y
, false);
2819 return emit_insn (GEN_FCN (code
) (x
, y
));
2822 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2823 Return an equivalent MEM that does not use an auto-increment. */
2826 emit_move_resolve_push (enum machine_mode mode
, rtx x
)
2828 enum rtx_code code
= GET_CODE (XEXP (x
, 0));
2829 HOST_WIDE_INT adjust
;
2832 adjust
= GET_MODE_SIZE (mode
);
2833 #ifdef PUSH_ROUNDING
2834 adjust
= PUSH_ROUNDING (adjust
);
2836 if (code
== PRE_DEC
|| code
== POST_DEC
)
2839 /* Do not use anti_adjust_stack, since we don't want to update
2840 stack_pointer_delta. */
2841 temp
= expand_simple_binop (Pmode
, PLUS
, stack_pointer_rtx
,
2842 GEN_INT (adjust
), stack_pointer_rtx
,
2843 0, OPTAB_LIB_WIDEN
);
2844 if (temp
!= stack_pointer_rtx
)
2845 emit_move_insn (stack_pointer_rtx
, temp
);
2851 temp
= stack_pointer_rtx
;
2854 temp
= plus_constant (stack_pointer_rtx
, -GET_MODE_SIZE (mode
));
2857 temp
= plus_constant (stack_pointer_rtx
, GET_MODE_SIZE (mode
));
2863 return replace_equiv_address (x
, temp
);
2866 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2867 X is known to satisfy push_operand, and MODE is known to be complex.
2868 Returns the last instruction emitted. */
2871 emit_move_complex_push (enum machine_mode mode
, rtx x
, rtx y
)
2873 enum machine_mode submode
= GET_MODE_INNER (mode
);
2876 #ifdef PUSH_ROUNDING
2877 unsigned int submodesize
= GET_MODE_SIZE (submode
);
2879 /* In case we output to the stack, but the size is smaller than the
2880 machine can push exactly, we need to use move instructions. */
2881 if (PUSH_ROUNDING (submodesize
) != submodesize
)
2883 x
= emit_move_resolve_push (mode
, x
);
2884 return emit_move_insn (x
, y
);
2888 /* Note that the real part always precedes the imag part in memory
2889 regardless of machine's endianness. */
2890 switch (GET_CODE (XEXP (x
, 0)))
2904 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2905 read_complex_part (y
, imag_first
));
2906 return emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2907 read_complex_part (y
, !imag_first
));
2910 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2911 MODE is known to be complex. Returns the last instruction emitted. */
2914 emit_move_complex (enum machine_mode mode
, rtx x
, rtx y
)
2918 /* Need to take special care for pushes, to maintain proper ordering
2919 of the data, and possibly extra padding. */
2920 if (push_operand (x
, mode
))
2921 return emit_move_complex_push (mode
, x
, y
);
2923 /* See if we can coerce the target into moving both values at once. */
2925 /* Move floating point as parts. */
2926 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
2927 && mov_optab
->handlers
[GET_MODE_INNER (mode
)].insn_code
!= CODE_FOR_nothing
)
2929 /* Not possible if the values are inherently not adjacent. */
2930 else if (GET_CODE (x
) == CONCAT
|| GET_CODE (y
) == CONCAT
)
2932 /* Is possible if both are registers (or subregs of registers). */
2933 else if (register_operand (x
, mode
) && register_operand (y
, mode
))
2935 /* If one of the operands is a memory, and alignment constraints
2936 are friendly enough, we may be able to do combined memory operations.
2937 We do not attempt this if Y is a constant because that combination is
2938 usually better with the by-parts thing below. */
2939 else if ((MEM_P (x
) ? !CONSTANT_P (y
) : MEM_P (y
))
2940 && (!STRICT_ALIGNMENT
2941 || get_mode_alignment (mode
) == BIGGEST_ALIGNMENT
))
2950 /* For memory to memory moves, optimal behavior can be had with the
2951 existing block move logic. */
2952 if (MEM_P (x
) && MEM_P (y
))
2954 emit_block_move (x
, y
, GEN_INT (GET_MODE_SIZE (mode
)),
2955 BLOCK_OP_NO_LIBCALL
);
2956 return get_last_insn ();
2959 ret
= emit_move_via_integer (mode
, x
, y
);
2964 /* Show the output dies here. This is necessary for SUBREGs
2965 of pseudos since we cannot track their lifetimes correctly;
2966 hard regs shouldn't appear here except as return values. */
2967 if (!reload_completed
&& !reload_in_progress
2968 && REG_P (x
) && !reg_overlap_mentioned_p (x
, y
))
2969 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
2971 write_complex_part (x
, read_complex_part (y
, false), false);
2972 write_complex_part (x
, read_complex_part (y
, true), true);
2973 return get_last_insn ();
2976 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2977 MODE is known to be MODE_CC. Returns the last instruction emitted. */
2980 emit_move_ccmode (enum machine_mode mode
, rtx x
, rtx y
)
2984 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
2987 enum insn_code code
= mov_optab
->handlers
[CCmode
].insn_code
;
2988 if (code
!= CODE_FOR_nothing
)
2990 x
= emit_move_change_mode (CCmode
, mode
, x
, true);
2991 y
= emit_move_change_mode (CCmode
, mode
, y
, true);
2992 return emit_insn (GEN_FCN (code
) (x
, y
));
2996 /* Otherwise, find the MODE_INT mode of the same width. */
2997 ret
= emit_move_via_integer (mode
, x
, y
);
2998 gcc_assert (ret
!= NULL
);
3002 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3003 MODE is any multi-word or full-word mode that lacks a move_insn
3004 pattern. Note that you will get better code if you define such
3005 patterns, even if they must turn into multiple assembler instructions. */
3008 emit_move_multi_word (enum machine_mode mode
, rtx x
, rtx y
)
3015 gcc_assert (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
);
3017 /* If X is a push on the stack, do the push now and replace
3018 X with a reference to the stack pointer. */
3019 if (push_operand (x
, mode
))
3020 x
= emit_move_resolve_push (mode
, x
);
3022 /* If we are in reload, see if either operand is a MEM whose address
3023 is scheduled for replacement. */
3024 if (reload_in_progress
&& MEM_P (x
)
3025 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3026 x
= replace_equiv_address_nv (x
, inner
);
3027 if (reload_in_progress
&& MEM_P (y
)
3028 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3029 y
= replace_equiv_address_nv (y
, inner
);
3033 need_clobber
= false;
3035 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3038 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3039 rtx ypart
= operand_subword (y
, i
, 1, mode
);
3041 /* If we can't get a part of Y, put Y into memory if it is a
3042 constant. Otherwise, force it into a register. Then we must
3043 be able to get a part of Y. */
3044 if (ypart
== 0 && CONSTANT_P (y
))
3046 y
= force_const_mem (mode
, y
);
3047 ypart
= operand_subword (y
, i
, 1, mode
);
3049 else if (ypart
== 0)
3050 ypart
= operand_subword_force (y
, i
, mode
);
3052 gcc_assert (xpart
&& ypart
);
3054 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3056 last_insn
= emit_move_insn (xpart
, ypart
);
3062 /* Show the output dies here. This is necessary for SUBREGs
3063 of pseudos since we cannot track their lifetimes correctly;
3064 hard regs shouldn't appear here except as return values.
3065 We never want to emit such a clobber after reload. */
3067 && ! (reload_in_progress
|| reload_completed
)
3068 && need_clobber
!= 0)
3069 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3076 /* Low level part of emit_move_insn.
3077 Called just like emit_move_insn, but assumes X and Y
3078 are basically valid. */
3081 emit_move_insn_1 (rtx x
, rtx y
)
3083 enum machine_mode mode
= GET_MODE (x
);
3084 enum insn_code code
;
3086 gcc_assert ((unsigned int) mode
< (unsigned int) MAX_MACHINE_MODE
);
3088 code
= mov_optab
->handlers
[mode
].insn_code
;
3089 if (code
!= CODE_FOR_nothing
)
3090 return emit_insn (GEN_FCN (code
) (x
, y
));
3092 /* Expand complex moves by moving real part and imag part. */
3093 if (COMPLEX_MODE_P (mode
))
3094 return emit_move_complex (mode
, x
, y
);
3096 if (GET_MODE_CLASS (mode
) == MODE_CC
)
3097 return emit_move_ccmode (mode
, x
, y
);
3099 /* Try using a move pattern for the corresponding integer mode. This is
3100 only safe when simplify_subreg can convert MODE constants into integer
3101 constants. At present, it can only do this reliably if the value
3102 fits within a HOST_WIDE_INT. */
3103 if (!CONSTANT_P (y
) || GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
3105 rtx ret
= emit_move_via_integer (mode
, x
, y
);
3110 return emit_move_multi_word (mode
, x
, y
);
3113 /* Generate code to copy Y into X.
3114 Both Y and X must have the same mode, except that
3115 Y can be a constant with VOIDmode.
3116 This mode cannot be BLKmode; use emit_block_move for that.
3118 Return the last instruction emitted. */
3121 emit_move_insn (rtx x
, rtx y
)
3123 enum machine_mode mode
= GET_MODE (x
);
3124 rtx y_cst
= NULL_RTX
;
3127 gcc_assert (mode
!= BLKmode
3128 && (GET_MODE (y
) == mode
|| GET_MODE (y
) == VOIDmode
));
3133 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3134 && (last_insn
= compress_float_constant (x
, y
)))
3139 if (!LEGITIMATE_CONSTANT_P (y
))
3141 y
= force_const_mem (mode
, y
);
3143 /* If the target's cannot_force_const_mem prevented the spill,
3144 assume that the target's move expanders will also take care
3145 of the non-legitimate constant. */
3151 /* If X or Y are memory references, verify that their addresses are valid
3154 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
3155 && ! push_operand (x
, GET_MODE (x
)))
3157 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
3158 x
= validize_mem (x
);
3161 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
3163 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
3164 y
= validize_mem (y
);
3166 gcc_assert (mode
!= BLKmode
);
3168 last_insn
= emit_move_insn_1 (x
, y
);
3170 if (y_cst
&& REG_P (x
)
3171 && (set
= single_set (last_insn
)) != NULL_RTX
3172 && SET_DEST (set
) == x
3173 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
3174 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
3179 /* If Y is representable exactly in a narrower mode, and the target can
3180 perform the extension directly from constant or memory, then emit the
3181 move as an extension. */
3184 compress_float_constant (rtx x
, rtx y
)
3186 enum machine_mode dstmode
= GET_MODE (x
);
3187 enum machine_mode orig_srcmode
= GET_MODE (y
);
3188 enum machine_mode srcmode
;
3191 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3193 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3194 srcmode
!= orig_srcmode
;
3195 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3198 rtx trunc_y
, last_insn
;
3200 /* Skip if the target can't extend this way. */
3201 ic
= can_extend_p (dstmode
, srcmode
, 0);
3202 if (ic
== CODE_FOR_nothing
)
3205 /* Skip if the narrowed value isn't exact. */
3206 if (! exact_real_truncate (srcmode
, &r
))
3209 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3211 if (LEGITIMATE_CONSTANT_P (trunc_y
))
3213 /* Skip if the target needs extra instructions to perform
3215 if (! (*insn_data
[ic
].operand
[1].predicate
) (trunc_y
, srcmode
))
3218 else if (float_extend_from_mem
[dstmode
][srcmode
])
3219 trunc_y
= validize_mem (force_const_mem (srcmode
, trunc_y
));
3223 emit_unop_insn (ic
, x
, trunc_y
, UNKNOWN
);
3224 last_insn
= get_last_insn ();
3227 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3235 /* Pushing data onto the stack. */
3237 /* Push a block of length SIZE (perhaps variable)
3238 and return an rtx to address the beginning of the block.
3239 The value may be virtual_outgoing_args_rtx.
3241 EXTRA is the number of bytes of padding to push in addition to SIZE.
3242 BELOW nonzero means this padding comes at low addresses;
3243 otherwise, the padding comes at high addresses. */
3246 push_block (rtx size
, int extra
, int below
)
3250 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3251 if (CONSTANT_P (size
))
3252 anti_adjust_stack (plus_constant (size
, extra
));
3253 else if (REG_P (size
) && extra
== 0)
3254 anti_adjust_stack (size
);
3257 temp
= copy_to_mode_reg (Pmode
, size
);
3259 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3260 temp
, 0, OPTAB_LIB_WIDEN
);
3261 anti_adjust_stack (temp
);
3264 #ifndef STACK_GROWS_DOWNWARD
3270 temp
= virtual_outgoing_args_rtx
;
3271 if (extra
!= 0 && below
)
3272 temp
= plus_constant (temp
, extra
);
3276 if (GET_CODE (size
) == CONST_INT
)
3277 temp
= plus_constant (virtual_outgoing_args_rtx
,
3278 -INTVAL (size
) - (below
? 0 : extra
));
3279 else if (extra
!= 0 && !below
)
3280 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3281 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3283 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3284 negate_rtx (Pmode
, size
));
3287 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3290 #ifdef PUSH_ROUNDING
3292 /* Emit single push insn. */
3295 emit_single_push_insn (enum machine_mode mode
, rtx x
, tree type
)
3298 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3300 enum insn_code icode
;
3301 insn_operand_predicate_fn pred
;
3303 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3304 /* If there is push pattern, use it. Otherwise try old way of throwing
3305 MEM representing push operation to move expander. */
3306 icode
= push_optab
->handlers
[(int) mode
].insn_code
;
3307 if (icode
!= CODE_FOR_nothing
)
3309 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3310 && !((*pred
) (x
, mode
))))
3311 x
= force_reg (mode
, x
);
3312 emit_insn (GEN_FCN (icode
) (x
));
3315 if (GET_MODE_SIZE (mode
) == rounded_size
)
3316 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3317 /* If we are to pad downward, adjust the stack pointer first and
3318 then store X into the stack location using an offset. This is
3319 because emit_move_insn does not know how to pad; it does not have
3321 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
3323 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
3324 HOST_WIDE_INT offset
;
3326 emit_move_insn (stack_pointer_rtx
,
3327 expand_binop (Pmode
,
3328 #ifdef STACK_GROWS_DOWNWARD
3334 GEN_INT (rounded_size
),
3335 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
3337 offset
= (HOST_WIDE_INT
) padding_size
;
3338 #ifdef STACK_GROWS_DOWNWARD
3339 if (STACK_PUSH_CODE
== POST_DEC
)
3340 /* We have already decremented the stack pointer, so get the
3342 offset
+= (HOST_WIDE_INT
) rounded_size
;
3344 if (STACK_PUSH_CODE
== POST_INC
)
3345 /* We have already incremented the stack pointer, so get the
3347 offset
-= (HOST_WIDE_INT
) rounded_size
;
3349 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (offset
));
3353 #ifdef STACK_GROWS_DOWNWARD
3354 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3355 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3356 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3358 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3359 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3360 GEN_INT (rounded_size
));
3362 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3365 dest
= gen_rtx_MEM (mode
, dest_addr
);
3369 set_mem_attributes (dest
, type
, 1);
3371 if (flag_optimize_sibling_calls
)
3372 /* Function incoming arguments may overlap with sibling call
3373 outgoing arguments and we cannot allow reordering of reads
3374 from function arguments with stores to outgoing arguments
3375 of sibling calls. */
3376 set_mem_alias_set (dest
, 0);
3378 emit_move_insn (dest
, x
);
3382 /* Generate code to push X onto the stack, assuming it has mode MODE and
3384 MODE is redundant except when X is a CONST_INT (since they don't
3386 SIZE is an rtx for the size of data to be copied (in bytes),
3387 needed only if X is BLKmode.
3389 ALIGN (in bits) is maximum alignment we can assume.
3391 If PARTIAL and REG are both nonzero, then copy that many of the first
3392 bytes of X into registers starting with REG, and push the rest of X.
3393 The amount of space pushed is decreased by PARTIAL bytes.
3394 REG must be a hard register in this case.
3395 If REG is zero but PARTIAL is not, take any all others actions for an
3396 argument partially in registers, but do not actually load any
3399 EXTRA is the amount in bytes of extra space to leave next to this arg.
3400 This is ignored if an argument block has already been allocated.
3402 On a machine that lacks real push insns, ARGS_ADDR is the address of
3403 the bottom of the argument block for this call. We use indexing off there
3404 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3405 argument block has not been preallocated.
3407 ARGS_SO_FAR is the size of args previously pushed for this call.
3409 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3410 for arguments passed in registers. If nonzero, it will be the number
3411 of bytes required. */
3414 emit_push_insn (rtx x
, enum machine_mode mode
, tree type
, rtx size
,
3415 unsigned int align
, int partial
, rtx reg
, int extra
,
3416 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
3420 enum direction stack_direction
3421 #ifdef STACK_GROWS_DOWNWARD
3427 /* Decide where to pad the argument: `downward' for below,
3428 `upward' for above, or `none' for don't pad it.
3429 Default is below for small data on big-endian machines; else above. */
3430 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3432 /* Invert direction if stack is post-decrement.
3434 if (STACK_PUSH_CODE
== POST_DEC
)
3435 if (where_pad
!= none
)
3436 where_pad
= (where_pad
== downward
? upward
: downward
);
3440 if (mode
== BLKmode
)
3442 /* Copy a block into the stack, entirely or partially. */
3449 offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3450 used
= partial
- offset
;
3454 /* USED is now the # of bytes we need not copy to the stack
3455 because registers will take care of them. */
3458 xinner
= adjust_address (xinner
, BLKmode
, used
);
3460 /* If the partial register-part of the arg counts in its stack size,
3461 skip the part of stack space corresponding to the registers.
3462 Otherwise, start copying to the beginning of the stack space,
3463 by setting SKIP to 0. */
3464 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3466 #ifdef PUSH_ROUNDING
3467 /* Do it with several push insns if that doesn't take lots of insns
3468 and if there is no difficulty with push insns that skip bytes
3469 on the stack for alignment purposes. */
3472 && GET_CODE (size
) == CONST_INT
3474 && MEM_ALIGN (xinner
) >= align
3475 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3476 /* Here we avoid the case of a structure whose weak alignment
3477 forces many pushes of a small amount of data,
3478 and such small pushes do rounding that causes trouble. */
3479 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3480 || align
>= BIGGEST_ALIGNMENT
3481 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3482 == (align
/ BITS_PER_UNIT
)))
3483 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3485 /* Push padding now if padding above and stack grows down,
3486 or if padding below and stack grows up.
3487 But if space already allocated, this has already been done. */
3488 if (extra
&& args_addr
== 0
3489 && where_pad
!= none
&& where_pad
!= stack_direction
)
3490 anti_adjust_stack (GEN_INT (extra
));
3492 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
3495 #endif /* PUSH_ROUNDING */
3499 /* Otherwise make space on the stack and copy the data
3500 to the address of that space. */
3502 /* Deduct words put into registers from the size we must copy. */
3505 if (GET_CODE (size
) == CONST_INT
)
3506 size
= GEN_INT (INTVAL (size
) - used
);
3508 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3509 GEN_INT (used
), NULL_RTX
, 0,
3513 /* Get the address of the stack space.
3514 In this case, we do not deal with EXTRA separately.
3515 A single stack adjust will do. */
3518 temp
= push_block (size
, extra
, where_pad
== downward
);
3521 else if (GET_CODE (args_so_far
) == CONST_INT
)
3522 temp
= memory_address (BLKmode
,
3523 plus_constant (args_addr
,
3524 skip
+ INTVAL (args_so_far
)));
3526 temp
= memory_address (BLKmode
,
3527 plus_constant (gen_rtx_PLUS (Pmode
,
3532 if (!ACCUMULATE_OUTGOING_ARGS
)
3534 /* If the source is referenced relative to the stack pointer,
3535 copy it to another register to stabilize it. We do not need
3536 to do this if we know that we won't be changing sp. */
3538 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3539 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3540 temp
= copy_to_reg (temp
);
3543 target
= gen_rtx_MEM (BLKmode
, temp
);
3545 /* We do *not* set_mem_attributes here, because incoming arguments
3546 may overlap with sibling call outgoing arguments and we cannot
3547 allow reordering of reads from function arguments with stores
3548 to outgoing arguments of sibling calls. We do, however, want
3549 to record the alignment of the stack slot. */
3550 /* ALIGN may well be better aligned than TYPE, e.g. due to
3551 PARM_BOUNDARY. Assume the caller isn't lying. */
3552 set_mem_align (target
, align
);
3554 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
3557 else if (partial
> 0)
3559 /* Scalar partly in registers. */
3561 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3564 /* # bytes of start of argument
3565 that we must make space for but need not store. */
3566 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3567 int args_offset
= INTVAL (args_so_far
);
3570 /* Push padding now if padding above and stack grows down,
3571 or if padding below and stack grows up.
3572 But if space already allocated, this has already been done. */
3573 if (extra
&& args_addr
== 0
3574 && where_pad
!= none
&& where_pad
!= stack_direction
)
3575 anti_adjust_stack (GEN_INT (extra
));
3577 /* If we make space by pushing it, we might as well push
3578 the real data. Otherwise, we can leave OFFSET nonzero
3579 and leave the space uninitialized. */
3583 /* Now NOT_STACK gets the number of words that we don't need to
3584 allocate on the stack. Convert OFFSET to words too. */
3585 not_stack
= (partial
- offset
) / UNITS_PER_WORD
;
3586 offset
/= UNITS_PER_WORD
;
3588 /* If the partial register-part of the arg counts in its stack size,
3589 skip the part of stack space corresponding to the registers.
3590 Otherwise, start copying to the beginning of the stack space,
3591 by setting SKIP to 0. */
3592 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3594 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3595 x
= validize_mem (force_const_mem (mode
, x
));
3597 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3598 SUBREGs of such registers are not allowed. */
3599 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
3600 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3601 x
= copy_to_reg (x
);
3603 /* Loop over all the words allocated on the stack for this arg. */
3604 /* We can do it by words, because any scalar bigger than a word
3605 has a size a multiple of a word. */
3606 #ifndef PUSH_ARGS_REVERSED
3607 for (i
= not_stack
; i
< size
; i
++)
3609 for (i
= size
- 1; i
>= not_stack
; i
--)
3611 if (i
>= not_stack
+ offset
)
3612 emit_push_insn (operand_subword_force (x
, i
, mode
),
3613 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3615 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3617 reg_parm_stack_space
, alignment_pad
);
3624 /* Push padding now if padding above and stack grows down,
3625 or if padding below and stack grows up.
3626 But if space already allocated, this has already been done. */
3627 if (extra
&& args_addr
== 0
3628 && where_pad
!= none
&& where_pad
!= stack_direction
)
3629 anti_adjust_stack (GEN_INT (extra
));
3631 #ifdef PUSH_ROUNDING
3632 if (args_addr
== 0 && PUSH_ARGS
)
3633 emit_single_push_insn (mode
, x
, type
);
3637 if (GET_CODE (args_so_far
) == CONST_INT
)
3639 = memory_address (mode
,
3640 plus_constant (args_addr
,
3641 INTVAL (args_so_far
)));
3643 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3645 dest
= gen_rtx_MEM (mode
, addr
);
3647 /* We do *not* set_mem_attributes here, because incoming arguments
3648 may overlap with sibling call outgoing arguments and we cannot
3649 allow reordering of reads from function arguments with stores
3650 to outgoing arguments of sibling calls. We do, however, want
3651 to record the alignment of the stack slot. */
3652 /* ALIGN may well be better aligned than TYPE, e.g. due to
3653 PARM_BOUNDARY. Assume the caller isn't lying. */
3654 set_mem_align (dest
, align
);
3656 emit_move_insn (dest
, x
);
3660 /* If part should go in registers, copy that part
3661 into the appropriate registers. Do this now, at the end,
3662 since mem-to-mem copies above may do function calls. */
3663 if (partial
> 0 && reg
!= 0)
3665 /* Handle calls that pass values in multiple non-contiguous locations.
3666 The Irix 6 ABI has examples of this. */
3667 if (GET_CODE (reg
) == PARALLEL
)
3668 emit_group_load (reg
, x
, type
, -1);
3671 gcc_assert (partial
% UNITS_PER_WORD
== 0);
3672 move_block_to_reg (REGNO (reg
), x
, partial
/ UNITS_PER_WORD
, mode
);
3676 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3677 anti_adjust_stack (GEN_INT (extra
));
3679 if (alignment_pad
&& args_addr
== 0)
3680 anti_adjust_stack (alignment_pad
);
3683 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3687 get_subtarget (rtx x
)
3691 /* Only registers can be subtargets. */
3693 /* Don't use hard regs to avoid extending their life. */
3694 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3698 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3699 FIELD is a bitfield. Returns true if the optimization was successful,
3700 and there's nothing else to do. */
3703 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize
,
3704 unsigned HOST_WIDE_INT bitpos
,
3705 enum machine_mode mode1
, rtx str_rtx
,
3708 enum machine_mode str_mode
= GET_MODE (str_rtx
);
3709 unsigned int str_bitsize
= GET_MODE_BITSIZE (str_mode
);
3714 if (mode1
!= VOIDmode
3715 || bitsize
>= BITS_PER_WORD
3716 || str_bitsize
> BITS_PER_WORD
3717 || TREE_SIDE_EFFECTS (to
)
3718 || TREE_THIS_VOLATILE (to
))
3722 if (!BINARY_CLASS_P (src
)
3723 || TREE_CODE (TREE_TYPE (src
)) != INTEGER_TYPE
)
3726 op0
= TREE_OPERAND (src
, 0);
3727 op1
= TREE_OPERAND (src
, 1);
3730 if (!operand_equal_p (to
, op0
, 0))
3733 if (MEM_P (str_rtx
))
3735 unsigned HOST_WIDE_INT offset1
;
3737 if (str_bitsize
== 0 || str_bitsize
> BITS_PER_WORD
)
3738 str_mode
= word_mode
;
3739 str_mode
= get_best_mode (bitsize
, bitpos
,
3740 MEM_ALIGN (str_rtx
), str_mode
, 0);
3741 if (str_mode
== VOIDmode
)
3743 str_bitsize
= GET_MODE_BITSIZE (str_mode
);
3746 bitpos
%= str_bitsize
;
3747 offset1
= (offset1
- bitpos
) / BITS_PER_UNIT
;
3748 str_rtx
= adjust_address (str_rtx
, str_mode
, offset1
);
3750 else if (!REG_P (str_rtx
) && GET_CODE (str_rtx
) != SUBREG
)
3753 /* If the bit field covers the whole REG/MEM, store_field
3754 will likely generate better code. */
3755 if (bitsize
>= str_bitsize
)
3758 /* We can't handle fields split across multiple entities. */
3759 if (bitpos
+ bitsize
> str_bitsize
)
3762 if (BYTES_BIG_ENDIAN
)
3763 bitpos
= str_bitsize
- bitpos
- bitsize
;
3765 switch (TREE_CODE (src
))
3769 /* For now, just optimize the case of the topmost bitfield
3770 where we don't need to do any masking and also
3771 1 bit bitfields where xor can be used.
3772 We might win by one instruction for the other bitfields
3773 too if insv/extv instructions aren't used, so that
3774 can be added later. */
3775 if (bitpos
+ bitsize
!= str_bitsize
3776 && (bitsize
!= 1 || TREE_CODE (op1
) != INTEGER_CST
))
3779 value
= expand_expr (op1
, NULL_RTX
, str_mode
, 0);
3780 value
= convert_modes (str_mode
,
3781 TYPE_MODE (TREE_TYPE (op1
)), value
,
3782 TYPE_UNSIGNED (TREE_TYPE (op1
)));
3784 /* We may be accessing data outside the field, which means
3785 we can alias adjacent data. */
3786 if (MEM_P (str_rtx
))
3788 str_rtx
= shallow_copy_rtx (str_rtx
);
3789 set_mem_alias_set (str_rtx
, 0);
3790 set_mem_expr (str_rtx
, 0);
3793 binop
= TREE_CODE (src
) == PLUS_EXPR
? add_optab
: sub_optab
;
3794 if (bitsize
== 1 && bitpos
+ bitsize
!= str_bitsize
)
3796 value
= expand_and (str_mode
, value
, const1_rtx
, NULL
);
3799 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
,
3800 build_int_cst (NULL_TREE
, bitpos
),
3802 result
= expand_binop (str_mode
, binop
, str_rtx
,
3803 value
, str_rtx
, 1, OPTAB_WIDEN
);
3804 if (result
!= str_rtx
)
3805 emit_move_insn (str_rtx
, result
);
3810 if (TREE_CODE (op1
) != INTEGER_CST
)
3812 value
= expand_expr (op1
, NULL_RTX
, GET_MODE (str_rtx
), 0);
3813 value
= convert_modes (GET_MODE (str_rtx
),
3814 TYPE_MODE (TREE_TYPE (op1
)), value
,
3815 TYPE_UNSIGNED (TREE_TYPE (op1
)));
3817 /* We may be accessing data outside the field, which means
3818 we can alias adjacent data. */
3819 if (MEM_P (str_rtx
))
3821 str_rtx
= shallow_copy_rtx (str_rtx
);
3822 set_mem_alias_set (str_rtx
, 0);
3823 set_mem_expr (str_rtx
, 0);
3826 binop
= TREE_CODE (src
) == BIT_IOR_EXPR
? ior_optab
: xor_optab
;
3827 if (bitpos
+ bitsize
!= GET_MODE_BITSIZE (GET_MODE (str_rtx
)))
3829 rtx mask
= GEN_INT (((unsigned HOST_WIDE_INT
) 1 << bitsize
)
3831 value
= expand_and (GET_MODE (str_rtx
), value
, mask
,
3834 value
= expand_shift (LSHIFT_EXPR
, GET_MODE (str_rtx
), value
,
3835 build_int_cst (NULL_TREE
, bitpos
),
3837 result
= expand_binop (GET_MODE (str_rtx
), binop
, str_rtx
,
3838 value
, str_rtx
, 1, OPTAB_WIDEN
);
3839 if (result
!= str_rtx
)
3840 emit_move_insn (str_rtx
, result
);
3851 /* Expand an assignment that stores the value of FROM into TO. */
3854 expand_assignment (tree to
, tree from
)
3859 /* Don't crash if the lhs of the assignment was erroneous. */
3861 if (TREE_CODE (to
) == ERROR_MARK
)
3863 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3867 /* Assignment of a structure component needs special treatment
3868 if the structure component's rtx is not simply a MEM.
3869 Assignment of an array element at a constant index, and assignment of
3870 an array element in an unaligned packed structure field, has the same
3872 if (handled_component_p (to
)
3873 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
3875 enum machine_mode mode1
;
3876 HOST_WIDE_INT bitsize
, bitpos
;
3883 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
3884 &unsignedp
, &volatilep
, true);
3886 /* If we are going to use store_bit_field and extract_bit_field,
3887 make sure to_rtx will be safe for multiple use. */
3889 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, 0);
3893 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
3895 gcc_assert (MEM_P (to_rtx
));
3897 #ifdef POINTERS_EXTEND_UNSIGNED
3898 if (GET_MODE (offset_rtx
) != Pmode
)
3899 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
3901 if (GET_MODE (offset_rtx
) != ptr_mode
)
3902 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
3905 /* A constant address in TO_RTX can have VOIDmode, we must not try
3906 to call force_reg for that case. Avoid that case. */
3908 && GET_MODE (to_rtx
) == BLKmode
3909 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
3911 && (bitpos
% bitsize
) == 0
3912 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
3913 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
3915 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
3919 to_rtx
= offset_address (to_rtx
, offset_rtx
,
3920 highest_pow2_factor_for_target (to
,
3924 /* Handle expand_expr of a complex value returning a CONCAT. */
3925 if (GET_CODE (to_rtx
) == CONCAT
)
3927 if (TREE_CODE (TREE_TYPE (from
)) == COMPLEX_TYPE
)
3929 gcc_assert (bitpos
== 0);
3930 result
= store_expr (from
, to_rtx
, false);
3934 gcc_assert (bitpos
== 0 || bitpos
== GET_MODE_BITSIZE (mode1
));
3935 result
= store_expr (from
, XEXP (to_rtx
, bitpos
!= 0), false);
3942 /* If the field is at offset zero, we could have been given the
3943 DECL_RTX of the parent struct. Don't munge it. */
3944 to_rtx
= shallow_copy_rtx (to_rtx
);
3946 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
3948 /* Deal with volatile and readonly fields. The former is only
3949 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3951 MEM_VOLATILE_P (to_rtx
) = 1;
3952 if (component_uses_parent_alias_set (to
))
3953 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
3956 if (optimize_bitfield_assignment_op (bitsize
, bitpos
, mode1
,
3960 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
3961 TREE_TYPE (tem
), get_alias_set (to
));
3965 preserve_temp_slots (result
);
3971 /* If the rhs is a function call and its value is not an aggregate,
3972 call the function before we start to compute the lhs.
3973 This is needed for correct code for cases such as
3974 val = setjmp (buf) on machines where reference to val
3975 requires loading up part of an address in a separate insn.
3977 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3978 since it might be a promoted variable where the zero- or sign- extension
3979 needs to be done. Handling this in the normal way is safe because no
3980 computation is done before the call. */
3981 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
3982 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
3983 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
3984 && REG_P (DECL_RTL (to
))))
3989 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3991 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3993 /* Handle calls that return values in multiple non-contiguous locations.
3994 The Irix 6 ABI has examples of this. */
3995 if (GET_CODE (to_rtx
) == PARALLEL
)
3996 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
3997 int_size_in_bytes (TREE_TYPE (from
)));
3998 else if (GET_MODE (to_rtx
) == BLKmode
)
3999 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
4002 if (POINTER_TYPE_P (TREE_TYPE (to
)))
4003 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
4004 emit_move_insn (to_rtx
, value
);
4006 preserve_temp_slots (to_rtx
);
4012 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4013 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4016 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4018 /* Don't move directly into a return register. */
4019 if (TREE_CODE (to
) == RESULT_DECL
4020 && (REG_P (to_rtx
) || GET_CODE (to_rtx
) == PARALLEL
))
4025 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
4027 if (GET_CODE (to_rtx
) == PARALLEL
)
4028 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
4029 int_size_in_bytes (TREE_TYPE (from
)));
4031 emit_move_insn (to_rtx
, temp
);
4033 preserve_temp_slots (to_rtx
);
4039 /* In case we are returning the contents of an object which overlaps
4040 the place the value is being stored, use a safe function when copying
4041 a value through a pointer into a structure value return block. */
4042 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
4043 && current_function_returns_struct
4044 && !current_function_returns_pcc_struct
)
4049 size
= expr_size (from
);
4050 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
4052 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
4053 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
4054 XEXP (from_rtx
, 0), Pmode
,
4055 convert_to_mode (TYPE_MODE (sizetype
),
4056 size
, TYPE_UNSIGNED (sizetype
)),
4057 TYPE_MODE (sizetype
));
4059 preserve_temp_slots (to_rtx
);
4065 /* Compute FROM and store the value in the rtx we got. */
4068 result
= store_expr (from
, to_rtx
, 0);
4069 preserve_temp_slots (result
);
4075 /* Generate code for computing expression EXP,
4076 and storing the value into TARGET.
4078 If the mode is BLKmode then we may return TARGET itself.
4079 It turns out that in BLKmode it doesn't cause a problem.
4080 because C has no operators that could combine two different
4081 assignments into the same BLKmode object with different values
4082 with no sequence point. Will other languages need this to
4085 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4086 stack, and block moves may need to be treated specially. */
4089 store_expr (tree exp
, rtx target
, int call_param_p
)
4092 rtx alt_rtl
= NULL_RTX
;
4093 int dont_return_target
= 0;
4095 if (VOID_TYPE_P (TREE_TYPE (exp
)))
4097 /* C++ can generate ?: expressions with a throw expression in one
4098 branch and an rvalue in the other. Here, we resolve attempts to
4099 store the throw expression's nonexistent result. */
4100 gcc_assert (!call_param_p
);
4101 expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
4104 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4106 /* Perform first part of compound expression, then assign from second
4108 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
4109 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4110 return store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
);
4112 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4114 /* For conditional expression, get safe form of the target. Then
4115 test the condition, doing the appropriate assignment on either
4116 side. This avoids the creation of unnecessary temporaries.
4117 For non-BLKmode, it is more efficient not to do this. */
4119 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4121 do_pending_stack_adjust ();
4123 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
4124 store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
);
4125 emit_jump_insn (gen_jump (lab2
));
4128 store_expr (TREE_OPERAND (exp
, 2), target
, call_param_p
);
4134 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4135 /* If this is a scalar in a register that is stored in a wider mode
4136 than the declared mode, compute the result into its declared mode
4137 and then convert to the wider mode. Our value is the computed
4140 rtx inner_target
= 0;
4142 /* We can do the conversion inside EXP, which will often result
4143 in some optimizations. Do the conversion in two steps: first
4144 change the signedness, if needed, then the extend. But don't
4145 do this if the type of EXP is a subtype of something else
4146 since then the conversion might involve more than just
4147 converting modes. */
4148 if (INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4149 && TREE_TYPE (TREE_TYPE (exp
)) == 0
4150 && (!lang_hooks
.reduce_bit_field_operations
4151 || (GET_MODE_PRECISION (GET_MODE (target
))
4152 == TYPE_PRECISION (TREE_TYPE (exp
)))))
4154 if (TYPE_UNSIGNED (TREE_TYPE (exp
))
4155 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4157 (lang_hooks
.types
.signed_or_unsigned_type
4158 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)), exp
);
4160 exp
= convert (lang_hooks
.types
.type_for_mode
4161 (GET_MODE (SUBREG_REG (target
)),
4162 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4165 inner_target
= SUBREG_REG (target
);
4168 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
4169 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4171 /* If TEMP is a VOIDmode constant, use convert_modes to make
4172 sure that we properly convert it. */
4173 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4175 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4176 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4177 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4178 GET_MODE (target
), temp
,
4179 SUBREG_PROMOTED_UNSIGNED_P (target
));
4182 convert_move (SUBREG_REG (target
), temp
,
4183 SUBREG_PROMOTED_UNSIGNED_P (target
));
4189 temp
= expand_expr_real (exp
, target
, GET_MODE (target
),
4191 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
4193 /* Return TARGET if it's a specified hardware register.
4194 If TARGET is a volatile mem ref, either return TARGET
4195 or return a reg copied *from* TARGET; ANSI requires this.
4197 Otherwise, if TEMP is not TARGET, return TEMP
4198 if it is constant (for efficiency),
4199 or if we really want the correct value. */
4200 if (!(target
&& REG_P (target
)
4201 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4202 && !(MEM_P (target
) && MEM_VOLATILE_P (target
))
4203 && ! rtx_equal_p (temp
, target
)
4204 && CONSTANT_P (temp
))
4205 dont_return_target
= 1;
4208 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4209 the same as that of TARGET, adjust the constant. This is needed, for
4210 example, in case it is a CONST_DOUBLE and we want only a word-sized
4212 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4213 && TREE_CODE (exp
) != ERROR_MARK
4214 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4215 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4216 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
4218 /* If value was not generated in the target, store it there.
4219 Convert the value to TARGET's type first if necessary and emit the
4220 pending incrementations that have been queued when expanding EXP.
4221 Note that we cannot emit the whole queue blindly because this will
4222 effectively disable the POST_INC optimization later.
4224 If TEMP and TARGET compare equal according to rtx_equal_p, but
4225 one or both of them are volatile memory refs, we have to distinguish
4227 - expand_expr has used TARGET. In this case, we must not generate
4228 another copy. This can be detected by TARGET being equal according
4230 - expand_expr has not used TARGET - that means that the source just
4231 happens to have the same RTX form. Since temp will have been created
4232 by expand_expr, it will compare unequal according to == .
4233 We must generate a copy in this case, to reach the correct number
4234 of volatile memory references. */
4236 if ((! rtx_equal_p (temp
, target
)
4237 || (temp
!= target
&& (side_effects_p (temp
)
4238 || side_effects_p (target
))))
4239 && TREE_CODE (exp
) != ERROR_MARK
4240 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4241 but TARGET is not valid memory reference, TEMP will differ
4242 from TARGET although it is really the same location. */
4243 && !(alt_rtl
&& rtx_equal_p (alt_rtl
, target
))
4244 /* If there's nothing to copy, don't bother. Don't call
4245 expr_size unless necessary, because some front-ends (C++)
4246 expr_size-hook must not be given objects that are not
4247 supposed to be bit-copied or bit-initialized. */
4248 && expr_size (exp
) != const0_rtx
)
4250 if (GET_MODE (temp
) != GET_MODE (target
)
4251 && GET_MODE (temp
) != VOIDmode
)
4253 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
4254 if (dont_return_target
)
4256 /* In this case, we will return TEMP,
4257 so make sure it has the proper mode.
4258 But don't forget to store the value into TARGET. */
4259 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4260 emit_move_insn (target
, temp
);
4263 convert_move (target
, temp
, unsignedp
);
4266 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4268 /* Handle copying a string constant into an array. The string
4269 constant may be shorter than the array. So copy just the string's
4270 actual length, and clear the rest. First get the size of the data
4271 type of the string, which is actually the size of the target. */
4272 rtx size
= expr_size (exp
);
4274 if (GET_CODE (size
) == CONST_INT
4275 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4276 emit_block_move (target
, temp
, size
,
4278 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4281 /* Compute the size of the data to copy from the string. */
4283 = size_binop (MIN_EXPR
,
4284 make_tree (sizetype
, size
),
4285 size_int (TREE_STRING_LENGTH (exp
)));
4287 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
4289 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4292 /* Copy that much. */
4293 copy_size_rtx
= convert_to_mode (ptr_mode
, copy_size_rtx
,
4294 TYPE_UNSIGNED (sizetype
));
4295 emit_block_move (target
, temp
, copy_size_rtx
,
4297 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4299 /* Figure out how much is left in TARGET that we have to clear.
4300 Do all calculations in ptr_mode. */
4301 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4303 size
= plus_constant (size
, -INTVAL (copy_size_rtx
));
4304 target
= adjust_address (target
, BLKmode
,
4305 INTVAL (copy_size_rtx
));
4309 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
4310 copy_size_rtx
, NULL_RTX
, 0,
4313 #ifdef POINTERS_EXTEND_UNSIGNED
4314 if (GET_MODE (copy_size_rtx
) != Pmode
)
4315 copy_size_rtx
= convert_to_mode (Pmode
, copy_size_rtx
,
4316 TYPE_UNSIGNED (sizetype
));
4319 target
= offset_address (target
, copy_size_rtx
,
4320 highest_pow2_factor (copy_size
));
4321 label
= gen_label_rtx ();
4322 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4323 GET_MODE (size
), 0, label
);
4326 if (size
!= const0_rtx
)
4327 clear_storage (target
, size
, BLOCK_OP_NORMAL
);
4333 /* Handle calls that return values in multiple non-contiguous locations.
4334 The Irix 6 ABI has examples of this. */
4335 else if (GET_CODE (target
) == PARALLEL
)
4336 emit_group_load (target
, temp
, TREE_TYPE (exp
),
4337 int_size_in_bytes (TREE_TYPE (exp
)));
4338 else if (GET_MODE (temp
) == BLKmode
)
4339 emit_block_move (target
, temp
, expr_size (exp
),
4341 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4344 temp
= force_operand (temp
, target
);
4346 emit_move_insn (target
, temp
);
4353 /* Examine CTOR to discover:
4354 * how many scalar fields are set to nonzero values,
4355 and place it in *P_NZ_ELTS;
4356 * how many scalar fields are set to non-constant values,
4357 and place it in *P_NC_ELTS; and
4358 * how many scalar fields in total are in CTOR,
4359 and place it in *P_ELT_COUNT.
4360 * if a type is a union, and the initializer from the constructor
4361 is not the largest element in the union, then set *p_must_clear. */
4364 categorize_ctor_elements_1 (tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
4365 HOST_WIDE_INT
*p_nc_elts
,
4366 HOST_WIDE_INT
*p_elt_count
,
4369 HOST_WIDE_INT nz_elts
, nc_elts
, elt_count
;
4376 for (list
= CONSTRUCTOR_ELTS (ctor
); list
; list
= TREE_CHAIN (list
))
4378 tree value
= TREE_VALUE (list
);
4379 tree purpose
= TREE_PURPOSE (list
);
4383 if (TREE_CODE (purpose
) == RANGE_EXPR
)
4385 tree lo_index
= TREE_OPERAND (purpose
, 0);
4386 tree hi_index
= TREE_OPERAND (purpose
, 1);
4388 if (host_integerp (lo_index
, 1) && host_integerp (hi_index
, 1))
4389 mult
= (tree_low_cst (hi_index
, 1)
4390 - tree_low_cst (lo_index
, 1) + 1);
4393 switch (TREE_CODE (value
))
4397 HOST_WIDE_INT nz
= 0, nc
= 0, ic
= 0;
4398 categorize_ctor_elements_1 (value
, &nz
, &nc
, &ic
, p_must_clear
);
4399 nz_elts
+= mult
* nz
;
4400 nc_elts
+= mult
* nc
;
4401 elt_count
+= mult
* ic
;
4407 if (!initializer_zerop (value
))
4413 nz_elts
+= mult
* TREE_STRING_LENGTH (value
);
4414 elt_count
+= mult
* TREE_STRING_LENGTH (value
);
4418 if (!initializer_zerop (TREE_REALPART (value
)))
4420 if (!initializer_zerop (TREE_IMAGPART (value
)))
4428 for (v
= TREE_VECTOR_CST_ELTS (value
); v
; v
= TREE_CHAIN (v
))
4430 if (!initializer_zerop (TREE_VALUE (v
)))
4440 if (!initializer_constant_valid_p (value
, TREE_TYPE (value
)))
4447 && (TREE_CODE (TREE_TYPE (ctor
)) == UNION_TYPE
4448 || TREE_CODE (TREE_TYPE (ctor
)) == QUAL_UNION_TYPE
))
4451 bool clear_this
= true;
4453 list
= CONSTRUCTOR_ELTS (ctor
);
4456 /* We don't expect more than one element of the union to be
4457 initialized. Not sure what we should do otherwise... */
4458 gcc_assert (TREE_CHAIN (list
) == NULL
);
4460 init_sub_type
= TREE_TYPE (TREE_VALUE (list
));
4462 /* ??? We could look at each element of the union, and find the
4463 largest element. Which would avoid comparing the size of the
4464 initialized element against any tail padding in the union.
4465 Doesn't seem worth the effort... */
4466 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor
)),
4467 TYPE_SIZE (init_sub_type
)) == 1)
4469 /* And now we have to find out if the element itself is fully
4470 constructed. E.g. for union { struct { int a, b; } s; } u
4471 = { .s = { .a = 1 } }. */
4472 if (elt_count
== count_type_elements (init_sub_type
))
4477 *p_must_clear
= clear_this
;
4480 *p_nz_elts
+= nz_elts
;
4481 *p_nc_elts
+= nc_elts
;
4482 *p_elt_count
+= elt_count
;
4486 categorize_ctor_elements (tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
4487 HOST_WIDE_INT
*p_nc_elts
,
4488 HOST_WIDE_INT
*p_elt_count
,
4494 *p_must_clear
= false;
4495 categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_nc_elts
, p_elt_count
,
4499 /* Count the number of scalars in TYPE. Return -1 on overflow or
4503 count_type_elements (tree type
)
4505 const HOST_WIDE_INT max
= ~((HOST_WIDE_INT
)1 << (HOST_BITS_PER_WIDE_INT
-1));
4506 switch (TREE_CODE (type
))
4510 tree telts
= array_type_nelts (type
);
4511 if (telts
&& host_integerp (telts
, 1))
4513 HOST_WIDE_INT n
= tree_low_cst (telts
, 1) + 1;
4514 HOST_WIDE_INT m
= count_type_elements (TREE_TYPE (type
));
4517 else if (max
/ n
> m
)
4525 HOST_WIDE_INT n
= 0, t
;
4528 for (f
= TYPE_FIELDS (type
); f
; f
= TREE_CHAIN (f
))
4529 if (TREE_CODE (f
) == FIELD_DECL
)
4531 t
= count_type_elements (TREE_TYPE (f
));
4541 case QUAL_UNION_TYPE
:
4543 /* Ho hum. How in the world do we guess here? Clearly it isn't
4544 right to count the fields. Guess based on the number of words. */
4545 HOST_WIDE_INT n
= int_size_in_bytes (type
);
4548 return n
/ UNITS_PER_WORD
;
4555 return TYPE_VECTOR_SUBPARTS (type
);
4564 case REFERENCE_TYPE
:
4576 /* Return 1 if EXP contains mostly (3/4) zeros. */
4579 mostly_zeros_p (tree exp
)
4581 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4584 HOST_WIDE_INT nz_elts
, nc_elts
, count
, elts
;
4587 categorize_ctor_elements (exp
, &nz_elts
, &nc_elts
, &count
, &must_clear
);
4591 elts
= count_type_elements (TREE_TYPE (exp
));
4593 return nz_elts
< elts
/ 4;
4596 return initializer_zerop (exp
);
4599 /* Helper function for store_constructor.
4600 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4601 TYPE is the type of the CONSTRUCTOR, not the element type.
4602 CLEARED is as for store_constructor.
4603 ALIAS_SET is the alias set to use for any stores.
4605 This provides a recursive shortcut back to store_constructor when it isn't
4606 necessary to go through store_field. This is so that we can pass through
4607 the cleared field to let store_constructor know that we may not have to
4608 clear a substructure if the outer structure has already been cleared. */
4611 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
4612 HOST_WIDE_INT bitpos
, enum machine_mode mode
,
4613 tree exp
, tree type
, int cleared
, int alias_set
)
4615 if (TREE_CODE (exp
) == CONSTRUCTOR
4616 /* We can only call store_constructor recursively if the size and
4617 bit position are on a byte boundary. */
4618 && bitpos
% BITS_PER_UNIT
== 0
4619 && (bitsize
> 0 && bitsize
% BITS_PER_UNIT
== 0)
4620 /* If we have a nonzero bitpos for a register target, then we just
4621 let store_field do the bitfield handling. This is unlikely to
4622 generate unnecessary clear instructions anyways. */
4623 && (bitpos
== 0 || MEM_P (target
)))
4627 = adjust_address (target
,
4628 GET_MODE (target
) == BLKmode
4630 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4631 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
4634 /* Update the alias set, if required. */
4635 if (MEM_P (target
) && ! MEM_KEEP_ALIAS_SET_P (target
)
4636 && MEM_ALIAS_SET (target
) != 0)
4638 target
= copy_rtx (target
);
4639 set_mem_alias_set (target
, alias_set
);
4642 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
4645 store_field (target
, bitsize
, bitpos
, mode
, exp
, type
, alias_set
);
4648 /* Store the value of constructor EXP into the rtx TARGET.
4649 TARGET is either a REG or a MEM; we know it cannot conflict, since
4650 safe_from_p has been called.
4651 CLEARED is true if TARGET is known to have been zero'd.
4652 SIZE is the number of bytes of TARGET we are allowed to modify: this
4653 may not be the same as the size of EXP if we are assigning to a field
4654 which has been packed to exclude padding bits. */
4657 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
)
4659 tree type
= TREE_TYPE (exp
);
4660 #ifdef WORD_REGISTER_OPERATIONS
4661 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4664 switch (TREE_CODE (type
))
4668 case QUAL_UNION_TYPE
:
4672 /* If size is zero or the target is already cleared, do nothing. */
4673 if (size
== 0 || cleared
)
4675 /* We either clear the aggregate or indicate the value is dead. */
4676 else if ((TREE_CODE (type
) == UNION_TYPE
4677 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4678 && ! CONSTRUCTOR_ELTS (exp
))
4679 /* If the constructor is empty, clear the union. */
4681 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
4685 /* If we are building a static constructor into a register,
4686 set the initial value as zero so we can fold the value into
4687 a constant. But if more than one register is involved,
4688 this probably loses. */
4689 else if (REG_P (target
) && TREE_STATIC (exp
)
4690 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4692 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4696 /* If the constructor has fewer fields than the structure or
4697 if we are initializing the structure to mostly zeros, clear
4698 the whole structure first. Don't do this if TARGET is a
4699 register whose mode size isn't equal to SIZE since
4700 clear_storage can't handle this case. */
4702 && ((list_length (CONSTRUCTOR_ELTS (exp
))
4703 != fields_length (type
))
4704 || mostly_zeros_p (exp
))
4706 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
4709 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
4714 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4716 /* Store each element of the constructor into the
4717 corresponding field of TARGET. */
4719 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4721 tree field
= TREE_PURPOSE (elt
);
4722 tree value
= TREE_VALUE (elt
);
4723 enum machine_mode mode
;
4724 HOST_WIDE_INT bitsize
;
4725 HOST_WIDE_INT bitpos
= 0;
4727 rtx to_rtx
= target
;
4729 /* Just ignore missing fields. We cleared the whole
4730 structure, above, if any fields are missing. */
4734 if (cleared
&& initializer_zerop (value
))
4737 if (host_integerp (DECL_SIZE (field
), 1))
4738 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4742 mode
= DECL_MODE (field
);
4743 if (DECL_BIT_FIELD (field
))
4746 offset
= DECL_FIELD_OFFSET (field
);
4747 if (host_integerp (offset
, 0)
4748 && host_integerp (bit_position (field
), 0))
4750 bitpos
= int_bit_position (field
);
4754 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
4761 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset
,
4762 make_tree (TREE_TYPE (exp
),
4765 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4766 gcc_assert (MEM_P (to_rtx
));
4768 #ifdef POINTERS_EXTEND_UNSIGNED
4769 if (GET_MODE (offset_rtx
) != Pmode
)
4770 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
4772 if (GET_MODE (offset_rtx
) != ptr_mode
)
4773 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4776 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4777 highest_pow2_factor (offset
));
4780 #ifdef WORD_REGISTER_OPERATIONS
4781 /* If this initializes a field that is smaller than a
4782 word, at the start of a word, try to widen it to a full
4783 word. This special case allows us to output C++ member
4784 function initializations in a form that the optimizers
4787 && bitsize
< BITS_PER_WORD
4788 && bitpos
% BITS_PER_WORD
== 0
4789 && GET_MODE_CLASS (mode
) == MODE_INT
4790 && TREE_CODE (value
) == INTEGER_CST
4792 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
4794 tree type
= TREE_TYPE (value
);
4796 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4798 type
= lang_hooks
.types
.type_for_size
4799 (BITS_PER_WORD
, TYPE_UNSIGNED (type
));
4800 value
= convert (type
, value
);
4803 if (BYTES_BIG_ENDIAN
)
4805 = fold_build2 (LSHIFT_EXPR
, type
, value
,
4806 build_int_cst (NULL_TREE
,
4807 BITS_PER_WORD
- bitsize
));
4808 bitsize
= BITS_PER_WORD
;
4813 if (MEM_P (to_rtx
) && !MEM_KEEP_ALIAS_SET_P (to_rtx
)
4814 && DECL_NONADDRESSABLE_P (field
))
4816 to_rtx
= copy_rtx (to_rtx
);
4817 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4820 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
4821 value
, type
, cleared
,
4822 get_alias_set (TREE_TYPE (field
)));
4832 tree elttype
= TREE_TYPE (type
);
4834 HOST_WIDE_INT minelt
= 0;
4835 HOST_WIDE_INT maxelt
= 0;
4837 domain
= TYPE_DOMAIN (type
);
4838 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
4839 && TYPE_MAX_VALUE (domain
)
4840 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
4841 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
4843 /* If we have constant bounds for the range of the type, get them. */
4846 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
4847 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
4850 /* If the constructor has fewer elements than the array, clear
4851 the whole array first. Similarly if this is static
4852 constructor of a non-BLKmode object. */
4855 else if (REG_P (target
) && TREE_STATIC (exp
))
4859 HOST_WIDE_INT count
= 0, zero_count
= 0;
4860 need_to_clear
= ! const_bounds_p
;
4862 /* This loop is a more accurate version of the loop in
4863 mostly_zeros_p (it handles RANGE_EXPR in an index). It
4864 is also needed to check for missing elements. */
4865 for (elt
= CONSTRUCTOR_ELTS (exp
);
4866 elt
!= NULL_TREE
&& ! need_to_clear
;
4867 elt
= TREE_CHAIN (elt
))
4869 tree index
= TREE_PURPOSE (elt
);
4870 HOST_WIDE_INT this_node_count
;
4872 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4874 tree lo_index
= TREE_OPERAND (index
, 0);
4875 tree hi_index
= TREE_OPERAND (index
, 1);
4877 if (! host_integerp (lo_index
, 1)
4878 || ! host_integerp (hi_index
, 1))
4884 this_node_count
= (tree_low_cst (hi_index
, 1)
4885 - tree_low_cst (lo_index
, 1) + 1);
4888 this_node_count
= 1;
4890 count
+= this_node_count
;
4891 if (mostly_zeros_p (TREE_VALUE (elt
)))
4892 zero_count
+= this_node_count
;
4895 /* Clear the entire array first if there are any missing
4896 elements, or if the incidence of zero elements is >=
4899 && (count
< maxelt
- minelt
+ 1
4900 || 4 * zero_count
>= 3 * count
))
4904 if (need_to_clear
&& size
> 0)
4907 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4909 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
4913 if (!cleared
&& REG_P (target
))
4914 /* Inform later passes that the old value is dead. */
4915 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4917 /* Store each element of the constructor into the
4918 corresponding element of TARGET, determined by counting the
4920 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
4922 elt
= TREE_CHAIN (elt
), i
++)
4924 enum machine_mode mode
;
4925 HOST_WIDE_INT bitsize
;
4926 HOST_WIDE_INT bitpos
;
4928 tree value
= TREE_VALUE (elt
);
4929 tree index
= TREE_PURPOSE (elt
);
4930 rtx xtarget
= target
;
4932 if (cleared
&& initializer_zerop (value
))
4935 unsignedp
= TYPE_UNSIGNED (elttype
);
4936 mode
= TYPE_MODE (elttype
);
4937 if (mode
== BLKmode
)
4938 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
4939 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
4942 bitsize
= GET_MODE_BITSIZE (mode
);
4944 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4946 tree lo_index
= TREE_OPERAND (index
, 0);
4947 tree hi_index
= TREE_OPERAND (index
, 1);
4948 rtx index_r
, pos_rtx
;
4949 HOST_WIDE_INT lo
, hi
, count
;
4952 /* If the range is constant and "small", unroll the loop. */
4954 && host_integerp (lo_index
, 0)
4955 && host_integerp (hi_index
, 0)
4956 && (lo
= tree_low_cst (lo_index
, 0),
4957 hi
= tree_low_cst (hi_index
, 0),
4958 count
= hi
- lo
+ 1,
4961 || (host_integerp (TYPE_SIZE (elttype
), 1)
4962 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
4965 lo
-= minelt
; hi
-= minelt
;
4966 for (; lo
<= hi
; lo
++)
4968 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
4971 && !MEM_KEEP_ALIAS_SET_P (target
)
4972 && TREE_CODE (type
) == ARRAY_TYPE
4973 && TYPE_NONALIASED_COMPONENT (type
))
4975 target
= copy_rtx (target
);
4976 MEM_KEEP_ALIAS_SET_P (target
) = 1;
4979 store_constructor_field
4980 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
4981 get_alias_set (elttype
));
4986 rtx loop_start
= gen_label_rtx ();
4987 rtx loop_end
= gen_label_rtx ();
4990 expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
4991 unsignedp
= TYPE_UNSIGNED (domain
);
4993 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
4996 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
4998 SET_DECL_RTL (index
, index_r
);
4999 store_expr (lo_index
, index_r
, 0);
5001 /* Build the head of the loop. */
5002 do_pending_stack_adjust ();
5003 emit_label (loop_start
);
5005 /* Assign value to element index. */
5007 = convert (ssizetype
,
5008 fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
5009 index
, TYPE_MIN_VALUE (domain
)));
5010 position
= size_binop (MULT_EXPR
, position
,
5012 TYPE_SIZE_UNIT (elttype
)));
5014 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
5015 xtarget
= offset_address (target
, pos_rtx
,
5016 highest_pow2_factor (position
));
5017 xtarget
= adjust_address (xtarget
, mode
, 0);
5018 if (TREE_CODE (value
) == CONSTRUCTOR
)
5019 store_constructor (value
, xtarget
, cleared
,
5020 bitsize
/ BITS_PER_UNIT
);
5022 store_expr (value
, xtarget
, 0);
5024 /* Generate a conditional jump to exit the loop. */
5025 exit_cond
= build2 (LT_EXPR
, integer_type_node
,
5027 jumpif (exit_cond
, loop_end
);
5029 /* Update the loop counter, and jump to the head of
5031 expand_assignment (index
,
5032 build2 (PLUS_EXPR
, TREE_TYPE (index
),
5033 index
, integer_one_node
));
5035 emit_jump (loop_start
);
5037 /* Build the end of the loop. */
5038 emit_label (loop_end
);
5041 else if ((index
!= 0 && ! host_integerp (index
, 0))
5042 || ! host_integerp (TYPE_SIZE (elttype
), 1))
5047 index
= ssize_int (1);
5050 index
= fold_convert (ssizetype
,
5051 fold_build2 (MINUS_EXPR
,
5054 TYPE_MIN_VALUE (domain
)));
5056 position
= size_binop (MULT_EXPR
, index
,
5058 TYPE_SIZE_UNIT (elttype
)));
5059 xtarget
= offset_address (target
,
5060 expand_expr (position
, 0, VOIDmode
, 0),
5061 highest_pow2_factor (position
));
5062 xtarget
= adjust_address (xtarget
, mode
, 0);
5063 store_expr (value
, xtarget
, 0);
5068 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
5069 * tree_low_cst (TYPE_SIZE (elttype
), 1));
5071 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
5073 if (MEM_P (target
) && !MEM_KEEP_ALIAS_SET_P (target
)
5074 && TREE_CODE (type
) == ARRAY_TYPE
5075 && TYPE_NONALIASED_COMPONENT (type
))
5077 target
= copy_rtx (target
);
5078 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5080 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
5081 type
, cleared
, get_alias_set (elttype
));
5093 tree elttype
= TREE_TYPE (type
);
5094 int elt_size
= tree_low_cst (TYPE_SIZE (elttype
), 1);
5095 enum machine_mode eltmode
= TYPE_MODE (elttype
);
5096 HOST_WIDE_INT bitsize
;
5097 HOST_WIDE_INT bitpos
;
5098 rtvec vector
= NULL
;
5101 gcc_assert (eltmode
!= BLKmode
);
5103 n_elts
= TYPE_VECTOR_SUBPARTS (type
);
5104 if (REG_P (target
) && VECTOR_MODE_P (GET_MODE (target
)))
5106 enum machine_mode mode
= GET_MODE (target
);
5108 icode
= (int) vec_init_optab
->handlers
[mode
].insn_code
;
5109 if (icode
!= CODE_FOR_nothing
)
5113 vector
= rtvec_alloc (n_elts
);
5114 for (i
= 0; i
< n_elts
; i
++)
5115 RTVEC_ELT (vector
, i
) = CONST0_RTX (GET_MODE_INNER (mode
));
5119 /* If the constructor has fewer elements than the vector,
5120 clear the whole array first. Similarly if this is static
5121 constructor of a non-BLKmode object. */
5124 else if (REG_P (target
) && TREE_STATIC (exp
))
5128 unsigned HOST_WIDE_INT count
= 0, zero_count
= 0;
5130 for (elt
= CONSTRUCTOR_ELTS (exp
);
5132 elt
= TREE_CHAIN (elt
))
5134 int n_elts_here
= tree_low_cst
5135 (int_const_binop (TRUNC_DIV_EXPR
,
5136 TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt
))),
5137 TYPE_SIZE (elttype
), 0), 1);
5139 count
+= n_elts_here
;
5140 if (mostly_zeros_p (TREE_VALUE (elt
)))
5141 zero_count
+= n_elts_here
;
5144 /* Clear the entire vector first if there are any missing elements,
5145 or if the incidence of zero elements is >= 75%. */
5146 need_to_clear
= (count
< n_elts
|| 4 * zero_count
>= 3 * count
);
5149 if (need_to_clear
&& size
> 0 && !vector
)
5152 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5154 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
5158 /* Inform later passes that the old value is dead. */
5159 if (!cleared
&& REG_P (target
))
5160 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5162 /* Store each element of the constructor into the corresponding
5163 element of TARGET, determined by counting the elements. */
5164 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
5166 elt
= TREE_CHAIN (elt
), i
+= bitsize
/ elt_size
)
5168 tree value
= TREE_VALUE (elt
);
5169 tree index
= TREE_PURPOSE (elt
);
5170 HOST_WIDE_INT eltpos
;
5172 bitsize
= tree_low_cst (TYPE_SIZE (TREE_TYPE (value
)), 1);
5173 if (cleared
&& initializer_zerop (value
))
5177 eltpos
= tree_low_cst (index
, 1);
5183 /* Vector CONSTRUCTORs should only be built from smaller
5184 vectors in the case of BLKmode vectors. */
5185 gcc_assert (TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
);
5186 RTVEC_ELT (vector
, eltpos
)
5187 = expand_expr (value
, NULL_RTX
, VOIDmode
, 0);
5191 enum machine_mode value_mode
=
5192 TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
5193 ? TYPE_MODE (TREE_TYPE (value
))
5195 bitpos
= eltpos
* elt_size
;
5196 store_constructor_field (target
, bitsize
, bitpos
,
5197 value_mode
, value
, type
,
5198 cleared
, get_alias_set (elttype
));
5203 emit_insn (GEN_FCN (icode
)
5205 gen_rtx_PARALLEL (GET_MODE (target
), vector
)));
5214 /* Store the value of EXP (an expression tree)
5215 into a subfield of TARGET which has mode MODE and occupies
5216 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5217 If MODE is VOIDmode, it means that we are storing into a bit-field.
5219 Always return const0_rtx unless we have something particular to
5222 TYPE is the type of the underlying object,
5224 ALIAS_SET is the alias set for the destination. This value will
5225 (in general) be different from that for TARGET, since TARGET is a
5226 reference to the containing structure. */
5229 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
5230 enum machine_mode mode
, tree exp
, tree type
, int alias_set
)
5232 HOST_WIDE_INT width_mask
= 0;
5234 if (TREE_CODE (exp
) == ERROR_MARK
)
5237 /* If we have nothing to store, do nothing unless the expression has
5240 return expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
5241 else if (bitsize
>= 0 && bitsize
< HOST_BITS_PER_WIDE_INT
)
5242 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5244 /* If we are storing into an unaligned field of an aligned union that is
5245 in a register, we may have the mode of TARGET being an integer mode but
5246 MODE == BLKmode. In that case, get an aligned object whose size and
5247 alignment are the same as TARGET and store TARGET into it (we can avoid
5248 the store if the field being stored is the entire width of TARGET). Then
5249 call ourselves recursively to store the field into a BLKmode version of
5250 that object. Finally, load from the object into TARGET. This is not
5251 very efficient in general, but should only be slightly more expensive
5252 than the otherwise-required unaligned accesses. Perhaps this can be
5253 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5254 twice, once with emit_move_insn and once via store_field. */
5257 && (REG_P (target
) || GET_CODE (target
) == SUBREG
))
5259 rtx object
= assign_temp (type
, 0, 1, 1);
5260 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
5262 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5263 emit_move_insn (object
, target
);
5265 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, type
, alias_set
);
5267 emit_move_insn (target
, object
);
5269 /* We want to return the BLKmode version of the data. */
5273 if (GET_CODE (target
) == CONCAT
)
5275 /* We're storing into a struct containing a single __complex. */
5277 gcc_assert (!bitpos
);
5278 return store_expr (exp
, target
, 0);
5281 /* If the structure is in a register or if the component
5282 is a bit field, we cannot use addressing to access it.
5283 Use bit-field techniques or SUBREG to store in it. */
5285 if (mode
== VOIDmode
5286 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5287 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5288 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5290 || GET_CODE (target
) == SUBREG
5291 /* If the field isn't aligned enough to store as an ordinary memref,
5292 store it as a bit field. */
5294 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
5295 || bitpos
% GET_MODE_ALIGNMENT (mode
))
5296 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
5297 || (bitpos
% BITS_PER_UNIT
!= 0)))
5298 /* If the RHS and field are a constant size and the size of the
5299 RHS isn't the same size as the bitfield, we must use bitfield
5302 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5303 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5307 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5308 implies a mask operation. If the precision is the same size as
5309 the field we're storing into, that mask is redundant. This is
5310 particularly common with bit field assignments generated by the
5312 if (TREE_CODE (exp
) == NOP_EXPR
)
5314 tree type
= TREE_TYPE (exp
);
5315 if (INTEGRAL_TYPE_P (type
)
5316 && TYPE_PRECISION (type
) < GET_MODE_BITSIZE (TYPE_MODE (type
))
5317 && bitsize
== TYPE_PRECISION (type
))
5319 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
5320 if (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) >= bitsize
)
5321 exp
= TREE_OPERAND (exp
, 0);
5325 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
5327 /* If BITSIZE is narrower than the size of the type of EXP
5328 we will be narrowing TEMP. Normally, what's wanted are the
5329 low-order bits. However, if EXP's type is a record and this is
5330 big-endian machine, we want the upper BITSIZE bits. */
5331 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5332 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
5333 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5334 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5335 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5339 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5341 if (mode
!= VOIDmode
&& mode
!= BLKmode
5342 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5343 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5345 /* If the modes of TARGET and TEMP are both BLKmode, both
5346 must be in memory and BITPOS must be aligned on a byte
5347 boundary. If so, we simply do a block copy. */
5348 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5350 gcc_assert (MEM_P (target
) && MEM_P (temp
)
5351 && !(bitpos
% BITS_PER_UNIT
));
5353 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5354 emit_block_move (target
, temp
,
5355 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5362 /* Store the value in the bitfield. */
5363 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
);
5369 /* Now build a reference to just the desired component. */
5370 rtx to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5372 if (to_rtx
== target
)
5373 to_rtx
= copy_rtx (to_rtx
);
5375 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5376 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5377 set_mem_alias_set (to_rtx
, alias_set
);
5379 return store_expr (exp
, to_rtx
, 0);
5383 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5384 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5385 codes and find the ultimate containing object, which we return.
5387 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5388 bit position, and *PUNSIGNEDP to the signedness of the field.
5389 If the position of the field is variable, we store a tree
5390 giving the variable offset (in units) in *POFFSET.
5391 This offset is in addition to the bit position.
5392 If the position is not variable, we store 0 in *POFFSET.
5394 If any of the extraction expressions is volatile,
5395 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5397 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5398 is a mode that can be used to access the field. In that case, *PBITSIZE
5401 If the field describes a variable-sized object, *PMODE is set to
5402 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5403 this case, but the address of the object can be found.
5405 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5406 look through nodes that serve as markers of a greater alignment than
5407 the one that can be deduced from the expression. These nodes make it
5408 possible for front-ends to prevent temporaries from being created by
5409 the middle-end on alignment considerations. For that purpose, the
5410 normal operating mode at high-level is to always pass FALSE so that
5411 the ultimate containing object is really returned; moreover, the
5412 associated predicate handled_component_p will always return TRUE
5413 on these nodes, thus indicating that they are essentially handled
5414 by get_inner_reference. TRUE should only be passed when the caller
5415 is scanning the expression in order to build another representation
5416 and specifically knows how to handle these nodes; as such, this is
5417 the normal operating mode in the RTL expanders. */
5420 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
5421 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
5422 enum machine_mode
*pmode
, int *punsignedp
,
5423 int *pvolatilep
, bool keep_aligning
)
5426 enum machine_mode mode
= VOIDmode
;
5427 tree offset
= size_zero_node
;
5428 tree bit_offset
= bitsize_zero_node
;
5431 /* First get the mode, signedness, and size. We do this from just the
5432 outermost expression. */
5433 if (TREE_CODE (exp
) == COMPONENT_REF
)
5435 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5436 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5437 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5439 *punsignedp
= DECL_UNSIGNED (TREE_OPERAND (exp
, 1));
5441 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5443 size_tree
= TREE_OPERAND (exp
, 1);
5444 *punsignedp
= BIT_FIELD_REF_UNSIGNED (exp
);
5448 mode
= TYPE_MODE (TREE_TYPE (exp
));
5449 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
5451 if (mode
== BLKmode
)
5452 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5454 *pbitsize
= GET_MODE_BITSIZE (mode
);
5459 if (! host_integerp (size_tree
, 1))
5460 mode
= BLKmode
, *pbitsize
= -1;
5462 *pbitsize
= tree_low_cst (size_tree
, 1);
5465 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5466 and find the ultimate containing object. */
5469 switch (TREE_CODE (exp
))
5472 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5473 TREE_OPERAND (exp
, 2));
5478 tree field
= TREE_OPERAND (exp
, 1);
5479 tree this_offset
= component_ref_field_offset (exp
);
5481 /* If this field hasn't been filled in yet, don't go past it.
5482 This should only happen when folding expressions made during
5483 type construction. */
5484 if (this_offset
== 0)
5487 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5488 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5489 DECL_FIELD_BIT_OFFSET (field
));
5491 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5496 case ARRAY_RANGE_REF
:
5498 tree index
= TREE_OPERAND (exp
, 1);
5499 tree low_bound
= array_ref_low_bound (exp
);
5500 tree unit_size
= array_ref_element_size (exp
);
5502 /* We assume all arrays have sizes that are a multiple of a byte.
5503 First subtract the lower bound, if any, in the type of the
5504 index, then convert to sizetype and multiply by the size of
5505 the array element. */
5506 if (! integer_zerop (low_bound
))
5507 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
5510 offset
= size_binop (PLUS_EXPR
, offset
,
5511 size_binop (MULT_EXPR
,
5512 convert (sizetype
, index
),
5521 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5522 bitsize_int (*pbitsize
));
5525 case VIEW_CONVERT_EXPR
:
5526 if (keep_aligning
&& STRICT_ALIGNMENT
5527 && (TYPE_ALIGN (TREE_TYPE (exp
))
5528 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5529 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
5530 < BIGGEST_ALIGNMENT
)
5531 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
5532 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
5540 /* If any reference in the chain is volatile, the effect is volatile. */
5541 if (TREE_THIS_VOLATILE (exp
))
5544 exp
= TREE_OPERAND (exp
, 0);
5548 /* If OFFSET is constant, see if we can return the whole thing as a
5549 constant bit position. Otherwise, split it up. */
5550 if (host_integerp (offset
, 0)
5551 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5553 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5554 && host_integerp (tem
, 0))
5555 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5557 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5563 /* Return a tree of sizetype representing the size, in bytes, of the element
5564 of EXP, an ARRAY_REF. */
5567 array_ref_element_size (tree exp
)
5569 tree aligned_size
= TREE_OPERAND (exp
, 3);
5570 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5572 /* If a size was specified in the ARRAY_REF, it's the size measured
5573 in alignment units of the element type. So multiply by that value. */
5576 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5577 sizetype from another type of the same width and signedness. */
5578 if (TREE_TYPE (aligned_size
) != sizetype
)
5579 aligned_size
= fold_convert (sizetype
, aligned_size
);
5580 return size_binop (MULT_EXPR
, aligned_size
,
5581 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
5584 /* Otherwise, take the size from that of the element type. Substitute
5585 any PLACEHOLDER_EXPR that we have. */
5587 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type
), exp
);
5590 /* Return a tree representing the lower bound of the array mentioned in
5591 EXP, an ARRAY_REF. */
5594 array_ref_low_bound (tree exp
)
5596 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5598 /* If a lower bound is specified in EXP, use it. */
5599 if (TREE_OPERAND (exp
, 2))
5600 return TREE_OPERAND (exp
, 2);
5602 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5603 substituting for a PLACEHOLDER_EXPR as needed. */
5604 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
5605 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type
), exp
);
5607 /* Otherwise, return a zero of the appropriate type. */
5608 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp
, 1)), 0);
5611 /* Return a tree representing the upper bound of the array mentioned in
5612 EXP, an ARRAY_REF. */
5615 array_ref_up_bound (tree exp
)
5617 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5619 /* If there is a domain type and it has an upper bound, use it, substituting
5620 for a PLACEHOLDER_EXPR as needed. */
5621 if (domain_type
&& TYPE_MAX_VALUE (domain_type
))
5622 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type
), exp
);
5624 /* Otherwise fail. */
5628 /* Return a tree representing the offset, in bytes, of the field referenced
5629 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5632 component_ref_field_offset (tree exp
)
5634 tree aligned_offset
= TREE_OPERAND (exp
, 2);
5635 tree field
= TREE_OPERAND (exp
, 1);
5637 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5638 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5642 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5643 sizetype from another type of the same width and signedness. */
5644 if (TREE_TYPE (aligned_offset
) != sizetype
)
5645 aligned_offset
= fold_convert (sizetype
, aligned_offset
);
5646 return size_binop (MULT_EXPR
, aligned_offset
,
5647 size_int (DECL_OFFSET_ALIGN (field
) / BITS_PER_UNIT
));
5650 /* Otherwise, take the offset from that of the field. Substitute
5651 any PLACEHOLDER_EXPR that we have. */
5653 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field
), exp
);
5656 /* Return 1 if T is an expression that get_inner_reference handles. */
5659 handled_component_p (tree t
)
5661 switch (TREE_CODE (t
))
5666 case ARRAY_RANGE_REF
:
5667 case VIEW_CONVERT_EXPR
:
5677 /* Given an rtx VALUE that may contain additions and multiplications, return
5678 an equivalent value that just refers to a register, memory, or constant.
5679 This is done by generating instructions to perform the arithmetic and
5680 returning a pseudo-register containing the value.
5682 The returned value may be a REG, SUBREG, MEM or constant. */
5685 force_operand (rtx value
, rtx target
)
5688 /* Use subtarget as the target for operand 0 of a binary operation. */
5689 rtx subtarget
= get_subtarget (target
);
5690 enum rtx_code code
= GET_CODE (value
);
5692 /* Check for subreg applied to an expression produced by loop optimizer. */
5694 && !REG_P (SUBREG_REG (value
))
5695 && !MEM_P (SUBREG_REG (value
)))
5697 value
= simplify_gen_subreg (GET_MODE (value
),
5698 force_reg (GET_MODE (SUBREG_REG (value
)),
5699 force_operand (SUBREG_REG (value
),
5701 GET_MODE (SUBREG_REG (value
)),
5702 SUBREG_BYTE (value
));
5703 code
= GET_CODE (value
);
5706 /* Check for a PIC address load. */
5707 if ((code
== PLUS
|| code
== MINUS
)
5708 && XEXP (value
, 0) == pic_offset_table_rtx
5709 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5710 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5711 || GET_CODE (XEXP (value
, 1)) == CONST
))
5714 subtarget
= gen_reg_rtx (GET_MODE (value
));
5715 emit_move_insn (subtarget
, value
);
5719 if (code
== ZERO_EXTEND
|| code
== SIGN_EXTEND
)
5722 target
= gen_reg_rtx (GET_MODE (value
));
5723 convert_move (target
, force_operand (XEXP (value
, 0), NULL
),
5724 code
== ZERO_EXTEND
);
5728 if (ARITHMETIC_P (value
))
5730 op2
= XEXP (value
, 1);
5731 if (!CONSTANT_P (op2
) && !(REG_P (op2
) && op2
!= subtarget
))
5733 if (code
== MINUS
&& GET_CODE (op2
) == CONST_INT
)
5736 op2
= negate_rtx (GET_MODE (value
), op2
);
5739 /* Check for an addition with OP2 a constant integer and our first
5740 operand a PLUS of a virtual register and something else. In that
5741 case, we want to emit the sum of the virtual register and the
5742 constant first and then add the other value. This allows virtual
5743 register instantiation to simply modify the constant rather than
5744 creating another one around this addition. */
5745 if (code
== PLUS
&& GET_CODE (op2
) == CONST_INT
5746 && GET_CODE (XEXP (value
, 0)) == PLUS
5747 && REG_P (XEXP (XEXP (value
, 0), 0))
5748 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5749 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5751 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
5752 XEXP (XEXP (value
, 0), 0), op2
,
5753 subtarget
, 0, OPTAB_LIB_WIDEN
);
5754 return expand_simple_binop (GET_MODE (value
), code
, temp
,
5755 force_operand (XEXP (XEXP (value
,
5757 target
, 0, OPTAB_LIB_WIDEN
);
5760 op1
= force_operand (XEXP (value
, 0), subtarget
);
5761 op2
= force_operand (op2
, NULL_RTX
);
5765 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
5767 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
5768 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5769 target
, 1, OPTAB_LIB_WIDEN
);
5771 return expand_divmod (0,
5772 FLOAT_MODE_P (GET_MODE (value
))
5773 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
5774 GET_MODE (value
), op1
, op2
, target
, 0);
5777 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5781 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
5785 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5789 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5790 target
, 0, OPTAB_LIB_WIDEN
);
5793 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5794 target
, 1, OPTAB_LIB_WIDEN
);
5797 if (UNARY_P (value
))
5799 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
5800 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
5803 #ifdef INSN_SCHEDULING
5804 /* On machines that have insn scheduling, we want all memory reference to be
5805 explicit, so we need to deal with such paradoxical SUBREGs. */
5806 if (GET_CODE (value
) == SUBREG
&& MEM_P (SUBREG_REG (value
))
5807 && (GET_MODE_SIZE (GET_MODE (value
))
5808 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
5810 = simplify_gen_subreg (GET_MODE (value
),
5811 force_reg (GET_MODE (SUBREG_REG (value
)),
5812 force_operand (SUBREG_REG (value
),
5814 GET_MODE (SUBREG_REG (value
)),
5815 SUBREG_BYTE (value
));
5821 /* Subroutine of expand_expr: return nonzero iff there is no way that
5822 EXP can reference X, which is being modified. TOP_P is nonzero if this
5823 call is going to be used to determine whether we need a temporary
5824 for EXP, as opposed to a recursive call to this function.
5826 It is always safe for this routine to return zero since it merely
5827 searches for optimization opportunities. */
5830 safe_from_p (rtx x
, tree exp
, int top_p
)
5836 /* If EXP has varying size, we MUST use a target since we currently
5837 have no way of allocating temporaries of variable size
5838 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5839 So we assume here that something at a higher level has prevented a
5840 clash. This is somewhat bogus, but the best we can do. Only
5841 do this when X is BLKmode and when we are at the top level. */
5842 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
5843 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
5844 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
5845 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
5846 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
5848 && GET_MODE (x
) == BLKmode
)
5849 /* If X is in the outgoing argument area, it is always safe. */
5851 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
5852 || (GET_CODE (XEXP (x
, 0)) == PLUS
5853 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
5856 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5857 find the underlying pseudo. */
5858 if (GET_CODE (x
) == SUBREG
)
5861 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5865 /* Now look at our tree code and possibly recurse. */
5866 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
5868 case tcc_declaration
:
5869 exp_rtl
= DECL_RTL_IF_SET (exp
);
5875 case tcc_exceptional
:
5876 if (TREE_CODE (exp
) == TREE_LIST
)
5880 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
5882 exp
= TREE_CHAIN (exp
);
5885 if (TREE_CODE (exp
) != TREE_LIST
)
5886 return safe_from_p (x
, exp
, 0);
5889 else if (TREE_CODE (exp
) == ERROR_MARK
)
5890 return 1; /* An already-visited SAVE_EXPR? */
5895 /* The only case we look at here is the DECL_INITIAL inside a
5897 return (TREE_CODE (exp
) != DECL_EXPR
5898 || TREE_CODE (DECL_EXPR_DECL (exp
)) != VAR_DECL
5899 || !DECL_INITIAL (DECL_EXPR_DECL (exp
))
5900 || safe_from_p (x
, DECL_INITIAL (DECL_EXPR_DECL (exp
)), 0));
5903 case tcc_comparison
:
5904 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
5909 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5911 case tcc_expression
:
5913 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5914 the expression. If it is set, we conflict iff we are that rtx or
5915 both are in memory. Otherwise, we check all operands of the
5916 expression recursively. */
5918 switch (TREE_CODE (exp
))
5921 /* If the operand is static or we are static, we can't conflict.
5922 Likewise if we don't conflict with the operand at all. */
5923 if (staticp (TREE_OPERAND (exp
, 0))
5924 || TREE_STATIC (exp
)
5925 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
5928 /* Otherwise, the only way this can conflict is if we are taking
5929 the address of a DECL a that address if part of X, which is
5931 exp
= TREE_OPERAND (exp
, 0);
5934 if (!DECL_RTL_SET_P (exp
)
5935 || !MEM_P (DECL_RTL (exp
)))
5938 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
5942 case MISALIGNED_INDIRECT_REF
:
5943 case ALIGN_INDIRECT_REF
:
5946 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
5947 get_alias_set (exp
)))
5952 /* Assume that the call will clobber all hard registers and
5954 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5959 case WITH_CLEANUP_EXPR
:
5960 case CLEANUP_POINT_EXPR
:
5961 /* Lowered by gimplify.c. */
5965 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5971 /* If we have an rtx, we do not need to scan our operands. */
5975 nops
= TREE_CODE_LENGTH (TREE_CODE (exp
));
5976 for (i
= 0; i
< nops
; i
++)
5977 if (TREE_OPERAND (exp
, i
) != 0
5978 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
5981 /* If this is a language-specific tree code, it may require
5982 special handling. */
5983 if ((unsigned int) TREE_CODE (exp
)
5984 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5985 && !lang_hooks
.safe_from_p (x
, exp
))
5990 /* Should never get a type here. */
5994 /* If we have an rtl, find any enclosed object. Then see if we conflict
5998 if (GET_CODE (exp_rtl
) == SUBREG
)
6000 exp_rtl
= SUBREG_REG (exp_rtl
);
6002 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
6006 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6007 are memory and they conflict. */
6008 return ! (rtx_equal_p (x
, exp_rtl
)
6009 || (MEM_P (x
) && MEM_P (exp_rtl
)
6010 && true_dependence (exp_rtl
, VOIDmode
, x
,
6011 rtx_addr_varies_p
)));
6014 /* If we reach here, it is safe. */
6019 /* Return the highest power of two that EXP is known to be a multiple of.
6020 This is used in updating alignment of MEMs in array references. */
6022 static unsigned HOST_WIDE_INT
6023 highest_pow2_factor (tree exp
)
6025 unsigned HOST_WIDE_INT c0
, c1
;
6027 switch (TREE_CODE (exp
))
6030 /* We can find the lowest bit that's a one. If the low
6031 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6032 We need to handle this case since we can find it in a COND_EXPR,
6033 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6034 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6036 if (TREE_CONSTANT_OVERFLOW (exp
))
6037 return BIGGEST_ALIGNMENT
;
6040 /* Note: tree_low_cst is intentionally not used here,
6041 we don't care about the upper bits. */
6042 c0
= TREE_INT_CST_LOW (exp
);
6044 return c0
? c0
: BIGGEST_ALIGNMENT
;
6048 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
6049 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6050 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6051 return MIN (c0
, c1
);
6054 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6055 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6058 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6060 if (integer_pow2p (TREE_OPERAND (exp
, 1))
6061 && host_integerp (TREE_OPERAND (exp
, 1), 1))
6063 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6064 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
6065 return MAX (1, c0
/ c1
);
6069 case NON_LVALUE_EXPR
: case NOP_EXPR
: case CONVERT_EXPR
:
6071 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
6074 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6077 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6078 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
6079 return MIN (c0
, c1
);
6088 /* Similar, except that the alignment requirements of TARGET are
6089 taken into account. Assume it is at least as aligned as its
6090 type, unless it is a COMPONENT_REF in which case the layout of
6091 the structure gives the alignment. */
6093 static unsigned HOST_WIDE_INT
6094 highest_pow2_factor_for_target (tree target
, tree exp
)
6096 unsigned HOST_WIDE_INT target_align
, factor
;
6098 factor
= highest_pow2_factor (exp
);
6099 if (TREE_CODE (target
) == COMPONENT_REF
)
6100 target_align
= DECL_ALIGN_UNIT (TREE_OPERAND (target
, 1));
6102 target_align
= TYPE_ALIGN_UNIT (TREE_TYPE (target
));
6103 return MAX (factor
, target_align
);
6106 /* Expands variable VAR. */
6109 expand_var (tree var
)
6111 if (DECL_EXTERNAL (var
))
6114 if (TREE_STATIC (var
))
6115 /* If this is an inlined copy of a static local variable,
6116 look up the original decl. */
6117 var
= DECL_ORIGIN (var
);
6119 if (TREE_STATIC (var
)
6120 ? !TREE_ASM_WRITTEN (var
)
6121 : !DECL_RTL_SET_P (var
))
6123 if (TREE_CODE (var
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (var
))
6124 /* Should be ignored. */;
6125 else if (lang_hooks
.expand_decl (var
))
6127 else if (TREE_CODE (var
) == VAR_DECL
&& !TREE_STATIC (var
))
6129 else if (TREE_CODE (var
) == VAR_DECL
&& TREE_STATIC (var
))
6130 rest_of_decl_compilation (var
, 0, 0);
6132 /* No expansion needed. */
6133 gcc_assert (TREE_CODE (var
) == TYPE_DECL
6134 || TREE_CODE (var
) == CONST_DECL
6135 || TREE_CODE (var
) == FUNCTION_DECL
6136 || TREE_CODE (var
) == LABEL_DECL
);
6140 /* Subroutine of expand_expr. Expand the two operands of a binary
6141 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6142 The value may be stored in TARGET if TARGET is nonzero. The
6143 MODIFIER argument is as documented by expand_expr. */
6146 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
6147 enum expand_modifier modifier
)
6149 if (! safe_from_p (target
, exp1
, 1))
6151 if (operand_equal_p (exp0
, exp1
, 0))
6153 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6154 *op1
= copy_rtx (*op0
);
6158 /* If we need to preserve evaluation order, copy exp0 into its own
6159 temporary variable so that it can't be clobbered by exp1. */
6160 if (flag_evaluation_order
&& TREE_SIDE_EFFECTS (exp1
))
6161 exp0
= save_expr (exp0
);
6162 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6163 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
6168 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6169 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6172 expand_expr_addr_expr_1 (tree exp
, rtx target
, enum machine_mode tmode
,
6173 enum expand_modifier modifier
)
6175 rtx result
, subtarget
;
6177 HOST_WIDE_INT bitsize
, bitpos
;
6178 int volatilep
, unsignedp
;
6179 enum machine_mode mode1
;
6181 /* If we are taking the address of a constant and are at the top level,
6182 we have to use output_constant_def since we can't call force_const_mem
6184 /* ??? This should be considered a front-end bug. We should not be
6185 generating ADDR_EXPR of something that isn't an LVALUE. The only
6186 exception here is STRING_CST. */
6187 if (TREE_CODE (exp
) == CONSTRUCTOR
6188 || CONSTANT_CLASS_P (exp
))
6189 return XEXP (output_constant_def (exp
, 0), 0);
6191 /* Everything must be something allowed by is_gimple_addressable. */
6192 switch (TREE_CODE (exp
))
6195 /* This case will happen via recursion for &a->b. */
6196 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, EXPAND_NORMAL
);
6199 /* Recurse and make the output_constant_def clause above handle this. */
6200 return expand_expr_addr_expr_1 (DECL_INITIAL (exp
), target
,
6204 /* The real part of the complex number is always first, therefore
6205 the address is the same as the address of the parent object. */
6208 inner
= TREE_OPERAND (exp
, 0);
6212 /* The imaginary part of the complex number is always second.
6213 The expression is therefore always offset by the size of the
6216 bitpos
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp
)));
6217 inner
= TREE_OPERAND (exp
, 0);
6221 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6222 expand_expr, as that can have various side effects; LABEL_DECLs for
6223 example, may not have their DECL_RTL set yet. Assume language
6224 specific tree nodes can be expanded in some interesting way. */
6226 || TREE_CODE (exp
) >= LAST_AND_UNUSED_TREE_CODE
)
6228 result
= expand_expr (exp
, target
, tmode
,
6229 modifier
== EXPAND_INITIALIZER
6230 ? EXPAND_INITIALIZER
: EXPAND_CONST_ADDRESS
);
6232 /* If the DECL isn't in memory, then the DECL wasn't properly
6233 marked TREE_ADDRESSABLE, which will be either a front-end
6234 or a tree optimizer bug. */
6235 gcc_assert (MEM_P (result
));
6236 result
= XEXP (result
, 0);
6238 /* ??? Is this needed anymore? */
6239 if (DECL_P (exp
) && !TREE_USED (exp
) == 0)
6241 assemble_external (exp
);
6242 TREE_USED (exp
) = 1;
6245 if (modifier
!= EXPAND_INITIALIZER
6246 && modifier
!= EXPAND_CONST_ADDRESS
)
6247 result
= force_operand (result
, target
);
6251 /* Pass FALSE as the last argument to get_inner_reference although
6252 we are expanding to RTL. The rationale is that we know how to
6253 handle "aligning nodes" here: we can just bypass them because
6254 they won't change the final object whose address will be returned
6255 (they actually exist only for that purpose). */
6256 inner
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
6257 &mode1
, &unsignedp
, &volatilep
, false);
6261 /* We must have made progress. */
6262 gcc_assert (inner
!= exp
);
6264 subtarget
= offset
|| bitpos
? NULL_RTX
: target
;
6265 result
= expand_expr_addr_expr_1 (inner
, subtarget
, tmode
, modifier
);
6271 if (modifier
!= EXPAND_NORMAL
)
6272 result
= force_operand (result
, NULL
);
6273 tmp
= expand_expr (offset
, NULL
, tmode
, EXPAND_NORMAL
);
6275 result
= convert_memory_address (tmode
, result
);
6276 tmp
= convert_memory_address (tmode
, tmp
);
6278 if (modifier
== EXPAND_SUM
)
6279 result
= gen_rtx_PLUS (tmode
, result
, tmp
);
6282 subtarget
= bitpos
? NULL_RTX
: target
;
6283 result
= expand_simple_binop (tmode
, PLUS
, result
, tmp
, subtarget
,
6284 1, OPTAB_LIB_WIDEN
);
6290 /* Someone beforehand should have rejected taking the address
6291 of such an object. */
6292 gcc_assert ((bitpos
% BITS_PER_UNIT
) == 0);
6294 result
= plus_constant (result
, bitpos
/ BITS_PER_UNIT
);
6295 if (modifier
< EXPAND_SUM
)
6296 result
= force_operand (result
, target
);
6302 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6303 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6306 expand_expr_addr_expr (tree exp
, rtx target
, enum machine_mode tmode
,
6307 enum expand_modifier modifier
)
6309 enum machine_mode rmode
;
6312 /* Target mode of VOIDmode says "whatever's natural". */
6313 if (tmode
== VOIDmode
)
6314 tmode
= TYPE_MODE (TREE_TYPE (exp
));
6316 /* We can get called with some Weird Things if the user does silliness
6317 like "(short) &a". In that case, convert_memory_address won't do
6318 the right thing, so ignore the given target mode. */
6319 if (tmode
!= Pmode
&& tmode
!= ptr_mode
)
6322 result
= expand_expr_addr_expr_1 (TREE_OPERAND (exp
, 0), target
,
6325 /* Despite expand_expr claims concerning ignoring TMODE when not
6326 strictly convenient, stuff breaks if we don't honor it. Note
6327 that combined with the above, we only do this for pointer modes. */
6328 rmode
= GET_MODE (result
);
6329 if (rmode
== VOIDmode
)
6332 result
= convert_memory_address (tmode
, result
);
6338 /* expand_expr: generate code for computing expression EXP.
6339 An rtx for the computed value is returned. The value is never null.
6340 In the case of a void EXP, const0_rtx is returned.
6342 The value may be stored in TARGET if TARGET is nonzero.
6343 TARGET is just a suggestion; callers must assume that
6344 the rtx returned may not be the same as TARGET.
6346 If TARGET is CONST0_RTX, it means that the value will be ignored.
6348 If TMODE is not VOIDmode, it suggests generating the
6349 result in mode TMODE. But this is done only when convenient.
6350 Otherwise, TMODE is ignored and the value generated in its natural mode.
6351 TMODE is just a suggestion; callers must assume that
6352 the rtx returned may not have mode TMODE.
6354 Note that TARGET may have neither TMODE nor MODE. In that case, it
6355 probably will not be used.
6357 If MODIFIER is EXPAND_SUM then when EXP is an addition
6358 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6359 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6360 products as above, or REG or MEM, or constant.
6361 Ordinarily in such cases we would output mul or add instructions
6362 and then return a pseudo reg containing the sum.
6364 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6365 it also marks a label as absolutely required (it can't be dead).
6366 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6367 This is used for outputting expressions used in initializers.
6369 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6370 with a constant address even if that address is not normally legitimate.
6371 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6373 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6374 a call parameter. Such targets require special care as we haven't yet
6375 marked TARGET so that it's safe from being trashed by libcalls. We
6376 don't want to use TARGET for anything but the final result;
6377 Intermediate values must go elsewhere. Additionally, calls to
6378 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6380 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6381 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6382 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6383 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6386 static rtx
expand_expr_real_1 (tree
, rtx
, enum machine_mode
,
6387 enum expand_modifier
, rtx
*);
6390 expand_expr_real (tree exp
, rtx target
, enum machine_mode tmode
,
6391 enum expand_modifier modifier
, rtx
*alt_rtl
)
6394 rtx ret
, last
= NULL
;
6396 /* Handle ERROR_MARK before anybody tries to access its type. */
6397 if (TREE_CODE (exp
) == ERROR_MARK
6398 || TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
)
6400 ret
= CONST0_RTX (tmode
);
6401 return ret
? ret
: const0_rtx
;
6404 if (flag_non_call_exceptions
)
6406 rn
= lookup_stmt_eh_region (exp
);
6407 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6409 last
= get_last_insn ();
6412 /* If this is an expression of some kind and it has an associated line
6413 number, then emit the line number before expanding the expression.
6415 We need to save and restore the file and line information so that
6416 errors discovered during expansion are emitted with the right
6417 information. It would be better of the diagnostic routines
6418 used the file/line information embedded in the tree nodes rather
6420 if (cfun
&& EXPR_HAS_LOCATION (exp
))
6422 location_t saved_location
= input_location
;
6423 input_location
= EXPR_LOCATION (exp
);
6424 emit_line_note (input_location
);
6426 /* Record where the insns produced belong. */
6427 record_block_change (TREE_BLOCK (exp
));
6429 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
6431 input_location
= saved_location
;
6435 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
6438 /* If using non-call exceptions, mark all insns that may trap.
6439 expand_call() will mark CALL_INSNs before we get to this code,
6440 but it doesn't handle libcalls, and these may trap. */
6444 for (insn
= next_real_insn (last
); insn
;
6445 insn
= next_real_insn (insn
))
6447 if (! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
6448 /* If we want exceptions for non-call insns, any
6449 may_trap_p instruction may throw. */
6450 && GET_CODE (PATTERN (insn
)) != CLOBBER
6451 && GET_CODE (PATTERN (insn
)) != USE
6452 && (CALL_P (insn
) || may_trap_p (PATTERN (insn
))))
6454 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_EH_REGION
, GEN_INT (rn
),
6464 expand_expr_real_1 (tree exp
, rtx target
, enum machine_mode tmode
,
6465 enum expand_modifier modifier
, rtx
*alt_rtl
)
6468 tree type
= TREE_TYPE (exp
);
6470 enum machine_mode mode
;
6471 enum tree_code code
= TREE_CODE (exp
);
6473 rtx subtarget
, original_target
;
6476 bool reduce_bit_field
= false;
6477 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6478 ? reduce_to_bit_field_precision ((expr), \
6483 mode
= TYPE_MODE (type
);
6484 unsignedp
= TYPE_UNSIGNED (type
);
6485 if (lang_hooks
.reduce_bit_field_operations
6486 && TREE_CODE (type
) == INTEGER_TYPE
6487 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
))
6489 /* An operation in what may be a bit-field type needs the
6490 result to be reduced to the precision of the bit-field type,
6491 which is narrower than that of the type's mode. */
6492 reduce_bit_field
= true;
6493 if (modifier
== EXPAND_STACK_PARM
)
6497 /* Use subtarget as the target for operand 0 of a binary operation. */
6498 subtarget
= get_subtarget (target
);
6499 original_target
= target
;
6500 ignore
= (target
== const0_rtx
6501 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
6502 || code
== CONVERT_EXPR
|| code
== COND_EXPR
6503 || code
== VIEW_CONVERT_EXPR
)
6504 && TREE_CODE (type
) == VOID_TYPE
));
6506 /* If we are going to ignore this result, we need only do something
6507 if there is a side-effect somewhere in the expression. If there
6508 is, short-circuit the most common cases here. Note that we must
6509 not call expand_expr with anything but const0_rtx in case this
6510 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6514 if (! TREE_SIDE_EFFECTS (exp
))
6517 /* Ensure we reference a volatile object even if value is ignored, but
6518 don't do this if all we are doing is taking its address. */
6519 if (TREE_THIS_VOLATILE (exp
)
6520 && TREE_CODE (exp
) != FUNCTION_DECL
6521 && mode
!= VOIDmode
&& mode
!= BLKmode
6522 && modifier
!= EXPAND_CONST_ADDRESS
)
6524 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
6526 temp
= copy_to_reg (temp
);
6530 if (TREE_CODE_CLASS (code
) == tcc_unary
6531 || code
== COMPONENT_REF
|| code
== INDIRECT_REF
)
6532 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6535 else if (TREE_CODE_CLASS (code
) == tcc_binary
6536 || TREE_CODE_CLASS (code
) == tcc_comparison
6537 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
6539 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6540 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6543 else if (code
== BIT_FIELD_REF
)
6545 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6546 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6547 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, modifier
);
6554 /* If will do cse, generate all results into pseudo registers
6555 since 1) that allows cse to find more things
6556 and 2) otherwise cse could produce an insn the machine
6557 cannot support. An exception is a CONSTRUCTOR into a multi-word
6558 MEM: that's much more likely to be most efficient into the MEM.
6559 Another is a CALL_EXPR which must return in memory. */
6561 if (! cse_not_expected
&& mode
!= BLKmode
&& target
6562 && (!REG_P (target
) || REGNO (target
) < FIRST_PSEUDO_REGISTER
)
6563 && ! (code
== CONSTRUCTOR
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
6564 && ! (code
== CALL_EXPR
&& aggregate_value_p (exp
, exp
)))
6571 tree function
= decl_function_context (exp
);
6573 temp
= label_rtx (exp
);
6574 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
6576 if (function
!= current_function_decl
6578 LABEL_REF_NONLOCAL_P (temp
) = 1;
6580 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
6585 return expand_expr_real_1 (SSA_NAME_VAR (exp
), target
, tmode
, modifier
,
6590 /* If a static var's type was incomplete when the decl was written,
6591 but the type is complete now, lay out the decl now. */
6592 if (DECL_SIZE (exp
) == 0
6593 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
6594 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6595 layout_decl (exp
, 0);
6597 /* ... fall through ... */
6601 gcc_assert (DECL_RTL (exp
));
6603 /* Ensure variable marked as used even if it doesn't go through
6604 a parser. If it hasn't be used yet, write out an external
6606 if (! TREE_USED (exp
))
6608 assemble_external (exp
);
6609 TREE_USED (exp
) = 1;
6612 /* Show we haven't gotten RTL for this yet. */
6615 /* Variables inherited from containing functions should have
6616 been lowered by this point. */
6617 context
= decl_function_context (exp
);
6618 gcc_assert (!context
6619 || context
== current_function_decl
6620 || TREE_STATIC (exp
)
6621 /* ??? C++ creates functions that are not TREE_STATIC. */
6622 || TREE_CODE (exp
) == FUNCTION_DECL
);
6624 /* This is the case of an array whose size is to be determined
6625 from its initializer, while the initializer is still being parsed.
6628 if (MEM_P (DECL_RTL (exp
))
6629 && REG_P (XEXP (DECL_RTL (exp
), 0)))
6630 temp
= validize_mem (DECL_RTL (exp
));
6632 /* If DECL_RTL is memory, we are in the normal case and either
6633 the address is not valid or it is not a register and -fforce-addr
6634 is specified, get the address into a register. */
6636 else if (MEM_P (DECL_RTL (exp
))
6637 && modifier
!= EXPAND_CONST_ADDRESS
6638 && modifier
!= EXPAND_SUM
6639 && modifier
!= EXPAND_INITIALIZER
6640 && (! memory_address_p (DECL_MODE (exp
),
6641 XEXP (DECL_RTL (exp
), 0))
6643 && !REG_P (XEXP (DECL_RTL (exp
), 0)))))
6646 *alt_rtl
= DECL_RTL (exp
);
6647 temp
= replace_equiv_address (DECL_RTL (exp
),
6648 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
6651 /* If we got something, return it. But first, set the alignment
6652 if the address is a register. */
6655 if (MEM_P (temp
) && REG_P (XEXP (temp
, 0)))
6656 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6661 /* If the mode of DECL_RTL does not match that of the decl, it
6662 must be a promoted value. We return a SUBREG of the wanted mode,
6663 but mark it so that we know that it was already extended. */
6665 if (REG_P (DECL_RTL (exp
))
6666 && GET_MODE (DECL_RTL (exp
)) != DECL_MODE (exp
))
6668 enum machine_mode pmode
;
6670 /* Get the signedness used for this variable. Ensure we get the
6671 same mode we got when the variable was declared. */
6672 pmode
= promote_mode (type
, DECL_MODE (exp
), &unsignedp
,
6673 (TREE_CODE (exp
) == RESULT_DECL
? 1 : 0));
6674 gcc_assert (GET_MODE (DECL_RTL (exp
)) == pmode
);
6676 temp
= gen_lowpart_SUBREG (mode
, DECL_RTL (exp
));
6677 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6678 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6682 return DECL_RTL (exp
);
6685 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
6686 TREE_INT_CST_HIGH (exp
), mode
);
6688 /* ??? If overflow is set, fold will have done an incomplete job,
6689 which can result in (plus xx (const_int 0)), which can get
6690 simplified by validate_replace_rtx during virtual register
6691 instantiation, which can result in unrecognizable insns.
6692 Avoid this by forcing all overflows into registers. */
6693 if (TREE_CONSTANT_OVERFLOW (exp
)
6694 && modifier
!= EXPAND_INITIALIZER
)
6695 temp
= force_reg (mode
, temp
);
6700 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp
))) == MODE_VECTOR_INT
6701 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp
))) == MODE_VECTOR_FLOAT
)
6702 return const_vector_from_tree (exp
);
6704 return expand_expr (build1 (CONSTRUCTOR
, TREE_TYPE (exp
),
6705 TREE_VECTOR_CST_ELTS (exp
)),
6706 ignore
? const0_rtx
: target
, tmode
, modifier
);
6709 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
6712 /* If optimized, generate immediate CONST_DOUBLE
6713 which will be turned into memory by reload if necessary.
6715 We used to force a register so that loop.c could see it. But
6716 this does not allow gen_* patterns to perform optimizations with
6717 the constants. It also produces two insns in cases like "x = 1.0;".
6718 On most machines, floating-point constants are not permitted in
6719 many insns, so we'd end up copying it to a register in any case.
6721 Now, we do the copying in expand_binop, if appropriate. */
6722 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
6723 TYPE_MODE (TREE_TYPE (exp
)));
6726 /* Handle evaluating a complex constant in a CONCAT target. */
6727 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
6729 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
6732 rtarg
= XEXP (original_target
, 0);
6733 itarg
= XEXP (original_target
, 1);
6735 /* Move the real and imaginary parts separately. */
6736 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, 0);
6737 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, 0);
6740 emit_move_insn (rtarg
, op0
);
6742 emit_move_insn (itarg
, op1
);
6744 return original_target
;
6747 /* ... fall through ... */
6750 temp
= output_constant_def (exp
, 1);
6752 /* temp contains a constant address.
6753 On RISC machines where a constant address isn't valid,
6754 make some insns to get that address into a register. */
6755 if (modifier
!= EXPAND_CONST_ADDRESS
6756 && modifier
!= EXPAND_INITIALIZER
6757 && modifier
!= EXPAND_SUM
6758 && (! memory_address_p (mode
, XEXP (temp
, 0))
6759 || flag_force_addr
))
6760 return replace_equiv_address (temp
,
6761 copy_rtx (XEXP (temp
, 0)));
6766 tree val
= TREE_OPERAND (exp
, 0);
6767 rtx ret
= expand_expr_real_1 (val
, target
, tmode
, modifier
, alt_rtl
);
6769 if (!SAVE_EXPR_RESOLVED_P (exp
))
6771 /* We can indeed still hit this case, typically via builtin
6772 expanders calling save_expr immediately before expanding
6773 something. Assume this means that we only have to deal
6774 with non-BLKmode values. */
6775 gcc_assert (GET_MODE (ret
) != BLKmode
);
6777 val
= build_decl (VAR_DECL
, NULL
, TREE_TYPE (exp
));
6778 DECL_ARTIFICIAL (val
) = 1;
6779 DECL_IGNORED_P (val
) = 1;
6780 TREE_OPERAND (exp
, 0) = val
;
6781 SAVE_EXPR_RESOLVED_P (exp
) = 1;
6783 if (!CONSTANT_P (ret
))
6784 ret
= copy_to_reg (ret
);
6785 SET_DECL_RTL (val
, ret
);
6792 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6793 expand_goto (TREE_OPERAND (exp
, 0));
6795 expand_computed_goto (TREE_OPERAND (exp
, 0));
6799 /* If we don't need the result, just ensure we evaluate any
6805 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
6806 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
, 0);
6811 /* All elts simple constants => refer to a constant in memory. But
6812 if this is a non-BLKmode mode, let it store a field at a time
6813 since that should make a CONST_INT or CONST_DOUBLE when we
6814 fold. Likewise, if we have a target we can use, it is best to
6815 store directly into the target unless the type is large enough
6816 that memcpy will be used. If we are making an initializer and
6817 all operands are constant, put it in memory as well.
6819 FIXME: Avoid trying to fill vector constructors piece-meal.
6820 Output them with output_constant_def below unless we're sure
6821 they're zeros. This should go away when vector initializers
6822 are treated like VECTOR_CST instead of arrays.
6824 else if ((TREE_STATIC (exp
)
6825 && ((mode
== BLKmode
6826 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
6827 || TREE_ADDRESSABLE (exp
)
6828 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
6829 && (! MOVE_BY_PIECES_P
6830 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
6832 && ! mostly_zeros_p (exp
))))
6833 || ((modifier
== EXPAND_INITIALIZER
6834 || modifier
== EXPAND_CONST_ADDRESS
)
6835 && TREE_CONSTANT (exp
)))
6837 rtx constructor
= output_constant_def (exp
, 1);
6839 if (modifier
!= EXPAND_CONST_ADDRESS
6840 && modifier
!= EXPAND_INITIALIZER
6841 && modifier
!= EXPAND_SUM
)
6842 constructor
= validize_mem (constructor
);
6848 /* Handle calls that pass values in multiple non-contiguous
6849 locations. The Irix 6 ABI has examples of this. */
6850 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
6851 || GET_CODE (target
) == PARALLEL
6852 || modifier
== EXPAND_STACK_PARM
)
6854 = assign_temp (build_qualified_type (type
,
6856 | (TREE_READONLY (exp
)
6857 * TYPE_QUAL_CONST
))),
6858 0, TREE_ADDRESSABLE (exp
), 1);
6860 store_constructor (exp
, target
, 0, int_expr_size (exp
));
6864 case MISALIGNED_INDIRECT_REF
:
6865 case ALIGN_INDIRECT_REF
:
6868 tree exp1
= TREE_OPERAND (exp
, 0);
6870 if (modifier
!= EXPAND_WRITE
)
6874 t
= fold_read_from_constant_string (exp
);
6876 return expand_expr (t
, target
, tmode
, modifier
);
6879 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
6880 op0
= memory_address (mode
, op0
);
6882 if (code
== ALIGN_INDIRECT_REF
)
6884 int align
= TYPE_ALIGN_UNIT (type
);
6885 op0
= gen_rtx_AND (Pmode
, op0
, GEN_INT (-align
));
6886 op0
= memory_address (mode
, op0
);
6889 temp
= gen_rtx_MEM (mode
, op0
);
6891 set_mem_attributes (temp
, exp
, 0);
6893 /* Resolve the misalignment now, so that we don't have to remember
6894 to resolve it later. Of course, this only works for reads. */
6895 /* ??? When we get around to supporting writes, we'll have to handle
6896 this in store_expr directly. The vectorizer isn't generating
6897 those yet, however. */
6898 if (code
== MISALIGNED_INDIRECT_REF
)
6903 gcc_assert (modifier
== EXPAND_NORMAL
);
6905 /* The vectorizer should have already checked the mode. */
6906 icode
= movmisalign_optab
->handlers
[mode
].insn_code
;
6907 gcc_assert (icode
!= CODE_FOR_nothing
);
6909 /* We've already validated the memory, and we're creating a
6910 new pseudo destination. The predicates really can't fail. */
6911 reg
= gen_reg_rtx (mode
);
6913 /* Nor can the insn generator. */
6914 insn
= GEN_FCN (icode
) (reg
, temp
);
6923 case TARGET_MEM_REF
:
6925 struct mem_address addr
;
6927 get_address_description (exp
, &addr
);
6928 op0
= addr_for_mem_ref (&addr
, true);
6929 op0
= memory_address (mode
, op0
);
6930 temp
= gen_rtx_MEM (mode
, op0
);
6931 set_mem_attributes (temp
, TMR_ORIGINAL (exp
), 0);
6938 tree array
= TREE_OPERAND (exp
, 0);
6939 tree index
= TREE_OPERAND (exp
, 1);
6941 /* Fold an expression like: "foo"[2].
6942 This is not done in fold so it won't happen inside &.
6943 Don't fold if this is for wide characters since it's too
6944 difficult to do correctly and this is a very rare case. */
6946 if (modifier
!= EXPAND_CONST_ADDRESS
6947 && modifier
!= EXPAND_INITIALIZER
6948 && modifier
!= EXPAND_MEMORY
)
6950 tree t
= fold_read_from_constant_string (exp
);
6953 return expand_expr (t
, target
, tmode
, modifier
);
6956 /* If this is a constant index into a constant array,
6957 just get the value from the array. Handle both the cases when
6958 we have an explicit constructor and when our operand is a variable
6959 that was declared const. */
6961 if (modifier
!= EXPAND_CONST_ADDRESS
6962 && modifier
!= EXPAND_INITIALIZER
6963 && modifier
!= EXPAND_MEMORY
6964 && TREE_CODE (array
) == CONSTRUCTOR
6965 && ! TREE_SIDE_EFFECTS (array
)
6966 && TREE_CODE (index
) == INTEGER_CST
)
6970 for (elem
= CONSTRUCTOR_ELTS (array
);
6971 (elem
&& !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
6972 elem
= TREE_CHAIN (elem
))
6975 if (elem
&& !TREE_SIDE_EFFECTS (TREE_VALUE (elem
)))
6976 return expand_expr (fold (TREE_VALUE (elem
)), target
, tmode
,
6980 else if (optimize
>= 1
6981 && modifier
!= EXPAND_CONST_ADDRESS
6982 && modifier
!= EXPAND_INITIALIZER
6983 && modifier
!= EXPAND_MEMORY
6984 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
6985 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
6986 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
6987 && targetm
.binds_local_p (array
))
6989 if (TREE_CODE (index
) == INTEGER_CST
)
6991 tree init
= DECL_INITIAL (array
);
6993 if (TREE_CODE (init
) == CONSTRUCTOR
)
6997 for (elem
= CONSTRUCTOR_ELTS (init
);
6999 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
7000 elem
= TREE_CHAIN (elem
))
7003 if (elem
&& !TREE_SIDE_EFFECTS (TREE_VALUE (elem
)))
7004 return expand_expr (fold (TREE_VALUE (elem
)), target
,
7007 else if (TREE_CODE (init
) == STRING_CST
7008 && 0 > compare_tree_int (index
,
7009 TREE_STRING_LENGTH (init
)))
7011 tree type
= TREE_TYPE (TREE_TYPE (init
));
7012 enum machine_mode mode
= TYPE_MODE (type
);
7014 if (GET_MODE_CLASS (mode
) == MODE_INT
7015 && GET_MODE_SIZE (mode
) == 1)
7016 return gen_int_mode (TREE_STRING_POINTER (init
)
7017 [TREE_INT_CST_LOW (index
)], mode
);
7022 goto normal_inner_ref
;
7025 /* If the operand is a CONSTRUCTOR, we can just extract the
7026 appropriate field if it is present. */
7027 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
)
7031 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
7032 elt
= TREE_CHAIN (elt
))
7033 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
7034 /* We can normally use the value of the field in the
7035 CONSTRUCTOR. However, if this is a bitfield in
7036 an integral mode that we can fit in a HOST_WIDE_INT,
7037 we must mask only the number of bits in the bitfield,
7038 since this is done implicitly by the constructor. If
7039 the bitfield does not meet either of those conditions,
7040 we can't do this optimization. */
7041 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
7042 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
7044 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
7045 <= HOST_BITS_PER_WIDE_INT
))))
7047 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
))
7048 && modifier
== EXPAND_STACK_PARM
)
7050 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
7051 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
7053 HOST_WIDE_INT bitsize
7054 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt
)));
7055 enum machine_mode imode
7056 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
7058 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
7060 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
7061 op0
= expand_and (imode
, op0
, op1
, target
);
7066 = build_int_cst (NULL_TREE
,
7067 GET_MODE_BITSIZE (imode
) - bitsize
);
7069 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
7071 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
7079 goto normal_inner_ref
;
7082 case ARRAY_RANGE_REF
:
7085 enum machine_mode mode1
;
7086 HOST_WIDE_INT bitsize
, bitpos
;
7089 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7090 &mode1
, &unsignedp
, &volatilep
, true);
7093 /* If we got back the original object, something is wrong. Perhaps
7094 we are evaluating an expression too early. In any event, don't
7095 infinitely recurse. */
7096 gcc_assert (tem
!= exp
);
7098 /* If TEM's type is a union of variable size, pass TARGET to the inner
7099 computation, since it will need a temporary and TARGET is known
7100 to have to do. This occurs in unchecked conversion in Ada. */
7104 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
7105 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
7107 && modifier
!= EXPAND_STACK_PARM
7108 ? target
: NULL_RTX
),
7110 (modifier
== EXPAND_INITIALIZER
7111 || modifier
== EXPAND_CONST_ADDRESS
7112 || modifier
== EXPAND_STACK_PARM
)
7113 ? modifier
: EXPAND_NORMAL
);
7115 /* If this is a constant, put it into a register if it is a
7116 legitimate constant and OFFSET is 0 and memory if it isn't. */
7117 if (CONSTANT_P (op0
))
7119 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
7120 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
7122 op0
= force_reg (mode
, op0
);
7124 op0
= validize_mem (force_const_mem (mode
, op0
));
7127 /* Otherwise, if this object not in memory and we either have an
7128 offset or a BLKmode result, put it there. This case can't occur in
7129 C, but can in Ada if we have unchecked conversion of an expression
7130 from a scalar type to an array or record type or for an
7131 ARRAY_RANGE_REF whose type is BLKmode. */
7132 else if (!MEM_P (op0
)
7134 || (code
== ARRAY_RANGE_REF
&& mode
== BLKmode
)))
7136 tree nt
= build_qualified_type (TREE_TYPE (tem
),
7137 (TYPE_QUALS (TREE_TYPE (tem
))
7138 | TYPE_QUAL_CONST
));
7139 rtx memloc
= assign_temp (nt
, 1, 1, 1);
7141 emit_move_insn (memloc
, op0
);
7147 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
7150 gcc_assert (MEM_P (op0
));
7152 #ifdef POINTERS_EXTEND_UNSIGNED
7153 if (GET_MODE (offset_rtx
) != Pmode
)
7154 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
7156 if (GET_MODE (offset_rtx
) != ptr_mode
)
7157 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
7160 if (GET_MODE (op0
) == BLKmode
7161 /* A constant address in OP0 can have VOIDmode, we must
7162 not try to call force_reg in that case. */
7163 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
7165 && (bitpos
% bitsize
) == 0
7166 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
7167 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
7169 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7173 op0
= offset_address (op0
, offset_rtx
,
7174 highest_pow2_factor (offset
));
7177 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7178 record its alignment as BIGGEST_ALIGNMENT. */
7179 if (MEM_P (op0
) && bitpos
== 0 && offset
!= 0
7180 && is_aligning_offset (offset
, tem
))
7181 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
7183 /* Don't forget about volatility even if this is a bitfield. */
7184 if (MEM_P (op0
) && volatilep
&& ! MEM_VOLATILE_P (op0
))
7186 if (op0
== orig_op0
)
7187 op0
= copy_rtx (op0
);
7189 MEM_VOLATILE_P (op0
) = 1;
7192 /* The following code doesn't handle CONCAT.
7193 Assume only bitpos == 0 can be used for CONCAT, due to
7194 one element arrays having the same mode as its element. */
7195 if (GET_CODE (op0
) == CONCAT
)
7197 gcc_assert (bitpos
== 0
7198 && bitsize
== GET_MODE_BITSIZE (GET_MODE (op0
)));
7202 /* In cases where an aligned union has an unaligned object
7203 as a field, we might be extracting a BLKmode value from
7204 an integer-mode (e.g., SImode) object. Handle this case
7205 by doing the extract into an object as wide as the field
7206 (which we know to be the width of a basic mode), then
7207 storing into memory, and changing the mode to BLKmode. */
7208 if (mode1
== VOIDmode
7209 || REG_P (op0
) || GET_CODE (op0
) == SUBREG
7210 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
7211 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7212 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
7213 && modifier
!= EXPAND_CONST_ADDRESS
7214 && modifier
!= EXPAND_INITIALIZER
)
7215 /* If the field isn't aligned enough to fetch as a memref,
7216 fetch it as a bit field. */
7217 || (mode1
!= BLKmode
7218 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
7219 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)
7221 && (MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
7222 || (bitpos
% GET_MODE_ALIGNMENT (mode1
) != 0))))
7223 && ((modifier
== EXPAND_CONST_ADDRESS
7224 || modifier
== EXPAND_INITIALIZER
)
7226 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
7227 || (bitpos
% BITS_PER_UNIT
!= 0)))
7228 /* If the type and the field are a constant size and the
7229 size of the type isn't the same size as the bitfield,
7230 we must use bitfield operations. */
7232 && TYPE_SIZE (TREE_TYPE (exp
))
7233 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
7234 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7237 enum machine_mode ext_mode
= mode
;
7239 if (ext_mode
== BLKmode
7240 && ! (target
!= 0 && MEM_P (op0
)
7242 && bitpos
% BITS_PER_UNIT
== 0))
7243 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7245 if (ext_mode
== BLKmode
)
7248 target
= assign_temp (type
, 0, 1, 1);
7253 /* In this case, BITPOS must start at a byte boundary and
7254 TARGET, if specified, must be a MEM. */
7255 gcc_assert (MEM_P (op0
)
7256 && (!target
|| MEM_P (target
))
7257 && !(bitpos
% BITS_PER_UNIT
));
7259 emit_block_move (target
,
7260 adjust_address (op0
, VOIDmode
,
7261 bitpos
/ BITS_PER_UNIT
),
7262 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7264 (modifier
== EXPAND_STACK_PARM
7265 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7270 op0
= validize_mem (op0
);
7272 if (MEM_P (op0
) && REG_P (XEXP (op0
, 0)))
7273 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7275 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
7276 (modifier
== EXPAND_STACK_PARM
7277 ? NULL_RTX
: target
),
7278 ext_mode
, ext_mode
);
7280 /* If the result is a record type and BITSIZE is narrower than
7281 the mode of OP0, an integral mode, and this is a big endian
7282 machine, we must put the field into the high-order bits. */
7283 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7284 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7285 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
7286 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7287 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7291 /* If the result type is BLKmode, store the data into a temporary
7292 of the appropriate type, but with the mode corresponding to the
7293 mode for the data we have (op0's mode). It's tempting to make
7294 this a constant type, since we know it's only being stored once,
7295 but that can cause problems if we are taking the address of this
7296 COMPONENT_REF because the MEM of any reference via that address
7297 will have flags corresponding to the type, which will not
7298 necessarily be constant. */
7299 if (mode
== BLKmode
)
7302 = assign_stack_temp_for_type
7303 (ext_mode
, GET_MODE_BITSIZE (ext_mode
), 0, type
);
7305 emit_move_insn (new, op0
);
7306 op0
= copy_rtx (new);
7307 PUT_MODE (op0
, BLKmode
);
7308 set_mem_attributes (op0
, exp
, 1);
7314 /* If the result is BLKmode, use that to access the object
7316 if (mode
== BLKmode
)
7319 /* Get a reference to just this component. */
7320 if (modifier
== EXPAND_CONST_ADDRESS
7321 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7322 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7324 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7326 if (op0
== orig_op0
)
7327 op0
= copy_rtx (op0
);
7329 set_mem_attributes (op0
, exp
, 0);
7330 if (REG_P (XEXP (op0
, 0)))
7331 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7333 MEM_VOLATILE_P (op0
) |= volatilep
;
7334 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7335 || modifier
== EXPAND_CONST_ADDRESS
7336 || modifier
== EXPAND_INITIALIZER
)
7338 else if (target
== 0)
7339 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7341 convert_move (target
, op0
, unsignedp
);
7346 return expand_expr (OBJ_TYPE_REF_EXPR (exp
), target
, tmode
, modifier
);
7349 /* Check for a built-in function. */
7350 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7351 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7353 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7355 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7356 == BUILT_IN_FRONTEND
)
7357 return lang_hooks
.expand_expr (exp
, original_target
,
7361 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7364 return expand_call (exp
, target
, ignore
);
7366 case NON_LVALUE_EXPR
:
7369 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7372 if (TREE_CODE (type
) == UNION_TYPE
)
7374 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7376 /* If both input and output are BLKmode, this conversion isn't doing
7377 anything except possibly changing memory attribute. */
7378 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7380 rtx result
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7383 result
= copy_rtx (result
);
7384 set_mem_attributes (result
, exp
, 0);
7390 if (TYPE_MODE (type
) != BLKmode
)
7391 target
= gen_reg_rtx (TYPE_MODE (type
));
7393 target
= assign_temp (type
, 0, 1, 1);
7397 /* Store data into beginning of memory target. */
7398 store_expr (TREE_OPERAND (exp
, 0),
7399 adjust_address (target
, TYPE_MODE (valtype
), 0),
7400 modifier
== EXPAND_STACK_PARM
);
7404 gcc_assert (REG_P (target
));
7406 /* Store this field into a union of the proper type. */
7407 store_field (target
,
7408 MIN ((int_size_in_bytes (TREE_TYPE
7409 (TREE_OPERAND (exp
, 0)))
7411 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7412 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7416 /* Return the entire union. */
7420 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7422 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7425 /* If the signedness of the conversion differs and OP0 is
7426 a promoted SUBREG, clear that indication since we now
7427 have to do the proper extension. */
7428 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7429 && GET_CODE (op0
) == SUBREG
)
7430 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7432 return REDUCE_BIT_FIELD (op0
);
7435 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7436 if (GET_MODE (op0
) == mode
)
7439 /* If OP0 is a constant, just convert it into the proper mode. */
7440 else if (CONSTANT_P (op0
))
7442 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7443 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
7445 if (modifier
== EXPAND_INITIALIZER
)
7446 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
,
7447 subreg_lowpart_offset (mode
,
7450 op0
= convert_modes (mode
, inner_mode
, op0
,
7451 TYPE_UNSIGNED (inner_type
));
7454 else if (modifier
== EXPAND_INITIALIZER
)
7455 op0
= gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7457 else if (target
== 0)
7458 op0
= convert_to_mode (mode
, op0
,
7459 TYPE_UNSIGNED (TREE_TYPE
7460 (TREE_OPERAND (exp
, 0))));
7463 convert_move (target
, op0
,
7464 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7468 return REDUCE_BIT_FIELD (op0
);
7470 case VIEW_CONVERT_EXPR
:
7471 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7473 /* If the input and output modes are both the same, we are done.
7474 Otherwise, if neither mode is BLKmode and both are integral and within
7475 a word, we can use gen_lowpart. If neither is true, make sure the
7476 operand is in memory and convert the MEM to the new mode. */
7477 if (TYPE_MODE (type
) == GET_MODE (op0
))
7479 else if (TYPE_MODE (type
) != BLKmode
&& GET_MODE (op0
) != BLKmode
7480 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7481 && GET_MODE_CLASS (TYPE_MODE (type
)) == MODE_INT
7482 && GET_MODE_SIZE (TYPE_MODE (type
)) <= UNITS_PER_WORD
7483 && GET_MODE_SIZE (GET_MODE (op0
)) <= UNITS_PER_WORD
)
7484 op0
= gen_lowpart (TYPE_MODE (type
), op0
);
7485 else if (!MEM_P (op0
))
7487 /* If the operand is not a MEM, force it into memory. Since we
7488 are going to be be changing the mode of the MEM, don't call
7489 force_const_mem for constants because we don't allow pool
7490 constants to change mode. */
7491 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7493 gcc_assert (!TREE_ADDRESSABLE (exp
));
7495 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
7497 = assign_stack_temp_for_type
7498 (TYPE_MODE (inner_type
),
7499 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
7501 emit_move_insn (target
, op0
);
7505 /* At this point, OP0 is in the correct mode. If the output type is such
7506 that the operand is known to be aligned, indicate that it is.
7507 Otherwise, we need only be concerned about alignment for non-BLKmode
7511 op0
= copy_rtx (op0
);
7513 if (TYPE_ALIGN_OK (type
))
7514 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
7515 else if (TYPE_MODE (type
) != BLKmode
&& STRICT_ALIGNMENT
7516 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
7518 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7519 HOST_WIDE_INT temp_size
7520 = MAX (int_size_in_bytes (inner_type
),
7521 (HOST_WIDE_INT
) GET_MODE_SIZE (TYPE_MODE (type
)));
7522 rtx
new = assign_stack_temp_for_type (TYPE_MODE (type
),
7523 temp_size
, 0, type
);
7524 rtx new_with_op0_mode
= adjust_address (new, GET_MODE (op0
), 0);
7526 gcc_assert (!TREE_ADDRESSABLE (exp
));
7528 if (GET_MODE (op0
) == BLKmode
)
7529 emit_block_move (new_with_op0_mode
, op0
,
7530 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type
))),
7531 (modifier
== EXPAND_STACK_PARM
7532 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7534 emit_move_insn (new_with_op0_mode
, op0
);
7539 op0
= adjust_address (op0
, TYPE_MODE (type
), 0);
7545 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7546 something else, make sure we add the register to the constant and
7547 then to the other thing. This case can occur during strength
7548 reduction and doing it this way will produce better code if the
7549 frame pointer or argument pointer is eliminated.
7551 fold-const.c will ensure that the constant is always in the inner
7552 PLUS_EXPR, so the only case we need to do anything about is if
7553 sp, ap, or fp is our second argument, in which case we must swap
7554 the innermost first argument and our second argument. */
7556 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7557 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7558 && TREE_CODE (TREE_OPERAND (exp
, 1)) == VAR_DECL
7559 && (DECL_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7560 || DECL_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7561 || DECL_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7563 tree t
= TREE_OPERAND (exp
, 1);
7565 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7566 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
7569 /* If the result is to be ptr_mode and we are adding an integer to
7570 something, we might be forming a constant. So try to use
7571 plus_constant. If it produces a sum and we can't accept it,
7572 use force_operand. This allows P = &ARR[const] to generate
7573 efficient code on machines where a SYMBOL_REF is not a valid
7576 If this is an EXPAND_SUM call, always return the sum. */
7577 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7578 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
7580 if (modifier
== EXPAND_STACK_PARM
)
7582 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
7583 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7584 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
7588 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
7590 /* Use immed_double_const to ensure that the constant is
7591 truncated according to the mode of OP1, then sign extended
7592 to a HOST_WIDE_INT. Using the constant directly can result
7593 in non-canonical RTL in a 64x32 cross compile. */
7595 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
7597 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
7598 op1
= plus_constant (op1
, INTVAL (constant_part
));
7599 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7600 op1
= force_operand (op1
, target
);
7601 return REDUCE_BIT_FIELD (op1
);
7604 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7605 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
7606 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
7610 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7611 (modifier
== EXPAND_INITIALIZER
7612 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
7613 if (! CONSTANT_P (op0
))
7615 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7616 VOIDmode
, modifier
);
7617 /* Return a PLUS if modifier says it's OK. */
7618 if (modifier
== EXPAND_SUM
7619 || modifier
== EXPAND_INITIALIZER
)
7620 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7623 /* Use immed_double_const to ensure that the constant is
7624 truncated according to the mode of OP1, then sign extended
7625 to a HOST_WIDE_INT. Using the constant directly can result
7626 in non-canonical RTL in a 64x32 cross compile. */
7628 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
7630 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7631 op0
= plus_constant (op0
, INTVAL (constant_part
));
7632 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7633 op0
= force_operand (op0
, target
);
7634 return REDUCE_BIT_FIELD (op0
);
7638 /* No sense saving up arithmetic to be done
7639 if it's all in the wrong mode to form part of an address.
7640 And force_operand won't know whether to sign-extend or
7642 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7643 || mode
!= ptr_mode
)
7645 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7646 subtarget
, &op0
, &op1
, 0);
7647 if (op0
== const0_rtx
)
7649 if (op1
== const0_rtx
)
7654 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7655 subtarget
, &op0
, &op1
, modifier
);
7656 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
7659 /* For initializers, we are allowed to return a MINUS of two
7660 symbolic constants. Here we handle all cases when both operands
7662 /* Handle difference of two symbolic constants,
7663 for the sake of an initializer. */
7664 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7665 && really_constant_p (TREE_OPERAND (exp
, 0))
7666 && really_constant_p (TREE_OPERAND (exp
, 1)))
7668 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7669 NULL_RTX
, &op0
, &op1
, modifier
);
7671 /* If the last operand is a CONST_INT, use plus_constant of
7672 the negated constant. Else make the MINUS. */
7673 if (GET_CODE (op1
) == CONST_INT
)
7674 return REDUCE_BIT_FIELD (plus_constant (op0
, - INTVAL (op1
)));
7676 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode
, op0
, op1
));
7679 /* No sense saving up arithmetic to be done
7680 if it's all in the wrong mode to form part of an address.
7681 And force_operand won't know whether to sign-extend or
7683 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7684 || mode
!= ptr_mode
)
7687 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7688 subtarget
, &op0
, &op1
, modifier
);
7690 /* Convert A - const to A + (-const). */
7691 if (GET_CODE (op1
) == CONST_INT
)
7693 op1
= negate_rtx (mode
, op1
);
7694 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
7700 /* If first operand is constant, swap them.
7701 Thus the following special case checks need only
7702 check the second operand. */
7703 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
7705 tree t1
= TREE_OPERAND (exp
, 0);
7706 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
7707 TREE_OPERAND (exp
, 1) = t1
;
7710 /* Attempt to return something suitable for generating an
7711 indexed address, for machines that support that. */
7713 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
7714 && host_integerp (TREE_OPERAND (exp
, 1), 0))
7716 tree exp1
= TREE_OPERAND (exp
, 1);
7718 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7722 op0
= force_operand (op0
, NULL_RTX
);
7724 op0
= copy_to_mode_reg (mode
, op0
);
7726 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode
, op0
,
7727 gen_int_mode (tree_low_cst (exp1
, 0),
7728 TYPE_MODE (TREE_TYPE (exp1
)))));
7731 if (modifier
== EXPAND_STACK_PARM
)
7734 /* Check for multiplying things that have been extended
7735 from a narrower type. If this machine supports multiplying
7736 in that narrower type with a result in the desired type,
7737 do it that way, and avoid the explicit type-conversion. */
7738 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
7739 && TREE_CODE (type
) == INTEGER_TYPE
7740 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7741 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7742 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7743 && int_fits_type_p (TREE_OPERAND (exp
, 1),
7744 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7745 /* Don't use a widening multiply if a shift will do. */
7746 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
7747 > HOST_BITS_PER_WIDE_INT
)
7748 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
7750 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7751 && (TYPE_PRECISION (TREE_TYPE
7752 (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7753 == TYPE_PRECISION (TREE_TYPE
7755 (TREE_OPERAND (exp
, 0), 0))))
7756 /* If both operands are extended, they must either both
7757 be zero-extended or both be sign-extended. */
7758 && (TYPE_UNSIGNED (TREE_TYPE
7759 (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7760 == TYPE_UNSIGNED (TREE_TYPE
7762 (TREE_OPERAND (exp
, 0), 0)))))))
7764 tree op0type
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0));
7765 enum machine_mode innermode
= TYPE_MODE (op0type
);
7766 bool zextend_p
= TYPE_UNSIGNED (op0type
);
7767 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
7768 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
7770 if (mode
== GET_MODE_2XWIDER_MODE (innermode
))
7772 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
7774 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7775 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7776 TREE_OPERAND (exp
, 1),
7777 NULL_RTX
, &op0
, &op1
, 0);
7779 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7780 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7781 NULL_RTX
, &op0
, &op1
, 0);
7784 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
7785 && innermode
== word_mode
)
7788 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7789 NULL_RTX
, VOIDmode
, 0);
7790 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7791 op1
= convert_modes (innermode
, mode
,
7792 expand_expr (TREE_OPERAND (exp
, 1),
7793 NULL_RTX
, VOIDmode
, 0),
7796 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7797 NULL_RTX
, VOIDmode
, 0);
7798 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
7799 unsignedp
, OPTAB_LIB_WIDEN
);
7800 hipart
= gen_highpart (innermode
, temp
);
7801 htem
= expand_mult_highpart_adjust (innermode
, hipart
,
7805 emit_move_insn (hipart
, htem
);
7806 return REDUCE_BIT_FIELD (temp
);
7810 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7811 subtarget
, &op0
, &op1
, 0);
7812 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
7814 case TRUNC_DIV_EXPR
:
7815 case FLOOR_DIV_EXPR
:
7817 case ROUND_DIV_EXPR
:
7818 case EXACT_DIV_EXPR
:
7819 if (modifier
== EXPAND_STACK_PARM
)
7821 /* Possible optimization: compute the dividend with EXPAND_SUM
7822 then if the divisor is constant can optimize the case
7823 where some terms of the dividend have coeffs divisible by it. */
7824 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7825 subtarget
, &op0
, &op1
, 0);
7826 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
7831 case TRUNC_MOD_EXPR
:
7832 case FLOOR_MOD_EXPR
:
7834 case ROUND_MOD_EXPR
:
7835 if (modifier
== EXPAND_STACK_PARM
)
7837 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7838 subtarget
, &op0
, &op1
, 0);
7839 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
7841 case FIX_ROUND_EXPR
:
7842 case FIX_FLOOR_EXPR
:
7844 gcc_unreachable (); /* Not used for C. */
7846 case FIX_TRUNC_EXPR
:
7847 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7848 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
7849 target
= gen_reg_rtx (mode
);
7850 expand_fix (target
, op0
, unsignedp
);
7854 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7855 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
7856 target
= gen_reg_rtx (mode
);
7857 /* expand_float can't figure out what to do if FROM has VOIDmode.
7858 So give it the correct mode. With -O, cse will optimize this. */
7859 if (GET_MODE (op0
) == VOIDmode
)
7860 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7862 expand_float (target
, op0
,
7863 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7867 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7868 if (modifier
== EXPAND_STACK_PARM
)
7870 temp
= expand_unop (mode
,
7871 optab_for_tree_code (NEGATE_EXPR
, type
),
7874 return REDUCE_BIT_FIELD (temp
);
7877 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7878 if (modifier
== EXPAND_STACK_PARM
)
7881 /* ABS_EXPR is not valid for complex arguments. */
7882 gcc_assert (GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7883 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
);
7885 /* Unsigned abs is simply the operand. Testing here means we don't
7886 risk generating incorrect code below. */
7887 if (TYPE_UNSIGNED (type
))
7890 return expand_abs (mode
, op0
, target
, unsignedp
,
7891 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
7895 target
= original_target
;
7897 || modifier
== EXPAND_STACK_PARM
7898 || (MEM_P (target
) && MEM_VOLATILE_P (target
))
7899 || GET_MODE (target
) != mode
7901 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
7902 target
= gen_reg_rtx (mode
);
7903 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7904 target
, &op0
, &op1
, 0);
7906 /* First try to do it with a special MIN or MAX instruction.
7907 If that does not win, use a conditional jump to select the proper
7909 this_optab
= optab_for_tree_code (code
, type
);
7910 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
7915 /* At this point, a MEM target is no longer useful; we will get better
7918 if (! REG_P (target
))
7919 target
= gen_reg_rtx (mode
);
7921 /* If op1 was placed in target, swap op0 and op1. */
7922 if (target
!= op0
&& target
== op1
)
7929 /* We generate better code and avoid problems with op1 mentioning
7930 target by forcing op1 into a pseudo if it isn't a constant. */
7931 if (! CONSTANT_P (op1
))
7932 op1
= force_reg (mode
, op1
);
7934 #ifdef HAVE_conditional_move
7935 /* Use a conditional move if possible. */
7936 if (can_conditionally_move_p (mode
))
7938 enum rtx_code comparison_code
;
7941 if (code
== MAX_EXPR
)
7942 comparison_code
= unsignedp
? GEU
: GE
;
7944 comparison_code
= unsignedp
? LEU
: LE
;
7946 /* ??? Same problem as in expmed.c: emit_conditional_move
7947 forces a stack adjustment via compare_from_rtx, and we
7948 lose the stack adjustment if the sequence we are about
7949 to create is discarded. */
7950 do_pending_stack_adjust ();
7954 /* Try to emit the conditional move. */
7955 insn
= emit_conditional_move (target
, comparison_code
,
7960 /* If we could do the conditional move, emit the sequence,
7964 rtx seq
= get_insns ();
7970 /* Otherwise discard the sequence and fall back to code with
7976 emit_move_insn (target
, op0
);
7978 temp
= gen_label_rtx ();
7980 /* If this mode is an integer too wide to compare properly,
7981 compare word by word. Rely on cse to optimize constant cases. */
7982 if (GET_MODE_CLASS (mode
) == MODE_INT
7983 && ! can_compare_p (GE
, mode
, ccp_jump
))
7985 if (code
== MAX_EXPR
)
7986 do_jump_by_parts_greater_rtx (mode
, unsignedp
, target
, op1
,
7989 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op1
, target
,
7994 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
7995 unsignedp
, mode
, NULL_RTX
, NULL_RTX
, temp
);
7997 emit_move_insn (target
, op1
);
8002 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8003 if (modifier
== EXPAND_STACK_PARM
)
8005 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8009 /* ??? Can optimize bitwise operations with one arg constant.
8010 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8011 and (a bitwise1 b) bitwise2 b (etc)
8012 but that is probably not worth while. */
8014 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8015 boolean values when we want in all cases to compute both of them. In
8016 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8017 as actual zero-or-1 values and then bitwise anding. In cases where
8018 there cannot be any side effects, better code would be made by
8019 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8020 how to recognize those cases. */
8022 case TRUTH_AND_EXPR
:
8023 code
= BIT_AND_EXPR
;
8028 code
= BIT_IOR_EXPR
;
8032 case TRUTH_XOR_EXPR
:
8033 code
= BIT_XOR_EXPR
;
8041 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8043 if (modifier
== EXPAND_STACK_PARM
)
8045 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8046 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
8049 /* Could determine the answer when only additive constants differ. Also,
8050 the addition of one can be handled by changing the condition. */
8057 case UNORDERED_EXPR
:
8065 temp
= do_store_flag (exp
,
8066 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
8067 tmode
!= VOIDmode
? tmode
: mode
, 0);
8071 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8072 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
8074 && REG_P (original_target
)
8075 && (GET_MODE (original_target
)
8076 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
8078 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
8081 /* If temp is constant, we can just compute the result. */
8082 if (GET_CODE (temp
) == CONST_INT
)
8084 if (INTVAL (temp
) != 0)
8085 emit_move_insn (target
, const1_rtx
);
8087 emit_move_insn (target
, const0_rtx
);
8092 if (temp
!= original_target
)
8094 enum machine_mode mode1
= GET_MODE (temp
);
8095 if (mode1
== VOIDmode
)
8096 mode1
= tmode
!= VOIDmode
? tmode
: mode
;
8098 temp
= copy_to_mode_reg (mode1
, temp
);
8101 op1
= gen_label_rtx ();
8102 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
8103 GET_MODE (temp
), unsignedp
, op1
);
8104 emit_move_insn (temp
, const1_rtx
);
8109 /* If no set-flag instruction, must generate a conditional store
8110 into a temporary variable. Drop through and handle this
8115 || modifier
== EXPAND_STACK_PARM
8116 || ! safe_from_p (target
, exp
, 1)
8117 /* Make sure we don't have a hard reg (such as function's return
8118 value) live across basic blocks, if not optimizing. */
8119 || (!optimize
&& REG_P (target
)
8120 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8121 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8124 emit_move_insn (target
, const0_rtx
);
8126 op1
= gen_label_rtx ();
8127 jumpifnot (exp
, op1
);
8130 emit_move_insn (target
, const1_rtx
);
8133 return ignore
? const0_rtx
: target
;
8135 case TRUTH_NOT_EXPR
:
8136 if (modifier
== EXPAND_STACK_PARM
)
8138 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8139 /* The parser is careful to generate TRUTH_NOT_EXPR
8140 only with operands that are always zero or one. */
8141 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8142 target
, 1, OPTAB_LIB_WIDEN
);
8146 case STATEMENT_LIST
:
8148 tree_stmt_iterator iter
;
8150 gcc_assert (ignore
);
8152 for (iter
= tsi_start (exp
); !tsi_end_p (iter
); tsi_next (&iter
))
8153 expand_expr (tsi_stmt (iter
), const0_rtx
, VOIDmode
, modifier
);
8158 /* A COND_EXPR with its type being VOID_TYPE represents a
8159 conditional jump and is handled in
8160 expand_gimple_cond_expr. */
8161 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp
)));
8163 /* Note that COND_EXPRs whose type is a structure or union
8164 are required to be constructed to contain assignments of
8165 a temporary variable, so that we can evaluate them here
8166 for side effect only. If type is void, we must do likewise. */
8168 gcc_assert (!TREE_ADDRESSABLE (type
)
8170 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
8171 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
);
8173 /* If we are not to produce a result, we have no target. Otherwise,
8174 if a target was specified use it; it will not be used as an
8175 intermediate target unless it is safe. If no target, use a
8178 if (modifier
!= EXPAND_STACK_PARM
8180 && safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
8181 && GET_MODE (original_target
) == mode
8182 #ifdef HAVE_conditional_move
8183 && (! can_conditionally_move_p (mode
)
8184 || REG_P (original_target
))
8186 && !MEM_P (original_target
))
8187 temp
= original_target
;
8189 temp
= assign_temp (type
, 0, 0, 1);
8191 do_pending_stack_adjust ();
8193 op0
= gen_label_rtx ();
8194 op1
= gen_label_rtx ();
8195 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8196 store_expr (TREE_OPERAND (exp
, 1), temp
,
8197 modifier
== EXPAND_STACK_PARM
);
8199 emit_jump_insn (gen_jump (op1
));
8202 store_expr (TREE_OPERAND (exp
, 2), temp
,
8203 modifier
== EXPAND_STACK_PARM
);
8210 target
= expand_vec_cond_expr (exp
, target
);
8215 tree lhs
= TREE_OPERAND (exp
, 0);
8216 tree rhs
= TREE_OPERAND (exp
, 1);
8218 gcc_assert (ignore
);
8220 /* Check for |= or &= of a bitfield of size one into another bitfield
8221 of size 1. In this case, (unless we need the result of the
8222 assignment) we can do this more efficiently with a
8223 test followed by an assignment, if necessary.
8225 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8226 things change so we do, this code should be enhanced to
8228 if (TREE_CODE (lhs
) == COMPONENT_REF
8229 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
8230 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
8231 && TREE_OPERAND (rhs
, 0) == lhs
8232 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
8233 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
8234 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
8236 rtx label
= gen_label_rtx ();
8238 do_jump (TREE_OPERAND (rhs
, 1),
8239 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
8240 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
8241 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
8242 (TREE_CODE (rhs
) == BIT_IOR_EXPR
8244 : integer_zero_node
)));
8245 do_pending_stack_adjust ();
8250 expand_assignment (lhs
, rhs
);
8256 if (!TREE_OPERAND (exp
, 0))
8257 expand_null_return ();
8259 expand_return (TREE_OPERAND (exp
, 0));
8263 return expand_expr_addr_expr (exp
, target
, tmode
, modifier
);
8266 /* Get the rtx code of the operands. */
8267 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8268 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
8271 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8273 /* Move the real (op0) and imaginary (op1) parts to their location. */
8274 write_complex_part (target
, op0
, false);
8275 write_complex_part (target
, op1
, true);
8280 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8281 return read_complex_part (op0
, false);
8284 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8285 return read_complex_part (op0
, true);
8288 expand_resx_expr (exp
);
8291 case TRY_CATCH_EXPR
:
8293 case EH_FILTER_EXPR
:
8294 case TRY_FINALLY_EXPR
:
8295 /* Lowered by tree-eh.c. */
8298 case WITH_CLEANUP_EXPR
:
8299 case CLEANUP_POINT_EXPR
:
8301 case CASE_LABEL_EXPR
:
8307 case PREINCREMENT_EXPR
:
8308 case PREDECREMENT_EXPR
:
8309 case POSTINCREMENT_EXPR
:
8310 case POSTDECREMENT_EXPR
:
8313 case TRUTH_ANDIF_EXPR
:
8314 case TRUTH_ORIF_EXPR
:
8315 /* Lowered by gimplify.c. */
8319 return get_exception_pointer (cfun
);
8322 return get_exception_filter (cfun
);
8325 /* Function descriptors are not valid except for as
8326 initialization constants, and should not be expanded. */
8334 expand_label (TREE_OPERAND (exp
, 0));
8338 expand_asm_expr (exp
);
8341 case WITH_SIZE_EXPR
:
8342 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8343 have pulled out the size to use in whatever context it needed. */
8344 return expand_expr_real (TREE_OPERAND (exp
, 0), original_target
, tmode
,
8347 case REALIGN_LOAD_EXPR
:
8349 tree oprnd0
= TREE_OPERAND (exp
, 0);
8350 tree oprnd1
= TREE_OPERAND (exp
, 1);
8351 tree oprnd2
= TREE_OPERAND (exp
, 2);
8354 this_optab
= optab_for_tree_code (code
, type
);
8355 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, 0);
8356 op2
= expand_expr (oprnd2
, NULL_RTX
, VOIDmode
, 0);
8357 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
8363 case REDUC_MAX_EXPR
:
8364 case REDUC_MIN_EXPR
:
8365 case REDUC_PLUS_EXPR
:
8367 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8368 this_optab
= optab_for_tree_code (code
, type
);
8369 temp
= expand_unop (mode
, this_optab
, op0
, target
, unsignedp
);
8374 case VEC_LSHIFT_EXPR
:
8375 case VEC_RSHIFT_EXPR
:
8377 target
= expand_vec_shift_expr (exp
, target
);
8382 return lang_hooks
.expand_expr (exp
, original_target
, tmode
,
8386 /* Here to do an ordinary binary operator. */
8388 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8389 subtarget
, &op0
, &op1
, 0);
8391 this_optab
= optab_for_tree_code (code
, type
);
8393 if (modifier
== EXPAND_STACK_PARM
)
8395 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
8396 unsignedp
, OPTAB_LIB_WIDEN
);
8398 return REDUCE_BIT_FIELD (temp
);
8400 #undef REDUCE_BIT_FIELD
8402 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8403 signedness of TYPE), possibly returning the result in TARGET. */
8405 reduce_to_bit_field_precision (rtx exp
, rtx target
, tree type
)
8407 HOST_WIDE_INT prec
= TYPE_PRECISION (type
);
8408 if (target
&& GET_MODE (target
) != GET_MODE (exp
))
8410 if (TYPE_UNSIGNED (type
))
8413 if (prec
< HOST_BITS_PER_WIDE_INT
)
8414 mask
= immed_double_const (((unsigned HOST_WIDE_INT
) 1 << prec
) - 1, 0,
8417 mask
= immed_double_const ((unsigned HOST_WIDE_INT
) -1,
8418 ((unsigned HOST_WIDE_INT
) 1
8419 << (prec
- HOST_BITS_PER_WIDE_INT
)) - 1,
8421 return expand_and (GET_MODE (exp
), exp
, mask
, target
);
8425 tree count
= build_int_cst (NULL_TREE
,
8426 GET_MODE_BITSIZE (GET_MODE (exp
)) - prec
);
8427 exp
= expand_shift (LSHIFT_EXPR
, GET_MODE (exp
), exp
, count
, target
, 0);
8428 return expand_shift (RSHIFT_EXPR
, GET_MODE (exp
), exp
, count
, target
, 0);
8432 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8433 when applied to the address of EXP produces an address known to be
8434 aligned more than BIGGEST_ALIGNMENT. */
8437 is_aligning_offset (tree offset
, tree exp
)
8439 /* Strip off any conversions. */
8440 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
8441 || TREE_CODE (offset
) == NOP_EXPR
8442 || TREE_CODE (offset
) == CONVERT_EXPR
)
8443 offset
= TREE_OPERAND (offset
, 0);
8445 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8446 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8447 if (TREE_CODE (offset
) != BIT_AND_EXPR
8448 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
8449 || compare_tree_int (TREE_OPERAND (offset
, 1),
8450 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
8451 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
8454 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8455 It must be NEGATE_EXPR. Then strip any more conversions. */
8456 offset
= TREE_OPERAND (offset
, 0);
8457 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
8458 || TREE_CODE (offset
) == NOP_EXPR
8459 || TREE_CODE (offset
) == CONVERT_EXPR
)
8460 offset
= TREE_OPERAND (offset
, 0);
8462 if (TREE_CODE (offset
) != NEGATE_EXPR
)
8465 offset
= TREE_OPERAND (offset
, 0);
8466 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
8467 || TREE_CODE (offset
) == NOP_EXPR
8468 || TREE_CODE (offset
) == CONVERT_EXPR
)
8469 offset
= TREE_OPERAND (offset
, 0);
8471 /* This must now be the address of EXP. */
8472 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
8475 /* Return the tree node if an ARG corresponds to a string constant or zero
8476 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8477 in bytes within the string that ARG is accessing. The type of the
8478 offset will be `sizetype'. */
8481 string_constant (tree arg
, tree
*ptr_offset
)
8486 if (TREE_CODE (arg
) == ADDR_EXPR
)
8488 if (TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
8490 *ptr_offset
= size_zero_node
;
8491 return TREE_OPERAND (arg
, 0);
8493 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == VAR_DECL
)
8495 array
= TREE_OPERAND (arg
, 0);
8496 offset
= size_zero_node
;
8498 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
8500 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
8501 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
8502 if (TREE_CODE (array
) != STRING_CST
8503 && TREE_CODE (array
) != VAR_DECL
)
8509 else if (TREE_CODE (arg
) == PLUS_EXPR
)
8511 tree arg0
= TREE_OPERAND (arg
, 0);
8512 tree arg1
= TREE_OPERAND (arg
, 1);
8517 if (TREE_CODE (arg0
) == ADDR_EXPR
8518 && (TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
8519 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == VAR_DECL
))
8521 array
= TREE_OPERAND (arg0
, 0);
8524 else if (TREE_CODE (arg1
) == ADDR_EXPR
8525 && (TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
8526 || TREE_CODE (TREE_OPERAND (arg1
, 0)) == VAR_DECL
))
8528 array
= TREE_OPERAND (arg1
, 0);
8537 if (TREE_CODE (array
) == STRING_CST
)
8539 *ptr_offset
= convert (sizetype
, offset
);
8542 else if (TREE_CODE (array
) == VAR_DECL
)
8546 /* Variables initialized to string literals can be handled too. */
8547 if (DECL_INITIAL (array
) == NULL_TREE
8548 || TREE_CODE (DECL_INITIAL (array
)) != STRING_CST
)
8551 /* If they are read-only, non-volatile and bind locally. */
8552 if (! TREE_READONLY (array
)
8553 || TREE_SIDE_EFFECTS (array
)
8554 || ! targetm
.binds_local_p (array
))
8557 /* Avoid const char foo[4] = "abcde"; */
8558 if (DECL_SIZE_UNIT (array
) == NULL_TREE
8559 || TREE_CODE (DECL_SIZE_UNIT (array
)) != INTEGER_CST
8560 || (length
= TREE_STRING_LENGTH (DECL_INITIAL (array
))) <= 0
8561 || compare_tree_int (DECL_SIZE_UNIT (array
), length
) < 0)
8564 /* If variable is bigger than the string literal, OFFSET must be constant
8565 and inside of the bounds of the string literal. */
8566 offset
= convert (sizetype
, offset
);
8567 if (compare_tree_int (DECL_SIZE_UNIT (array
), length
) > 0
8568 && (! host_integerp (offset
, 1)
8569 || compare_tree_int (offset
, length
) >= 0))
8572 *ptr_offset
= offset
;
8573 return DECL_INITIAL (array
);
8579 /* Generate code to calculate EXP using a store-flag instruction
8580 and return an rtx for the result. EXP is either a comparison
8581 or a TRUTH_NOT_EXPR whose operand is a comparison.
8583 If TARGET is nonzero, store the result there if convenient.
8585 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8588 Return zero if there is no suitable set-flag instruction
8589 available on this machine.
8591 Once expand_expr has been called on the arguments of the comparison,
8592 we are committed to doing the store flag, since it is not safe to
8593 re-evaluate the expression. We emit the store-flag insn by calling
8594 emit_store_flag, but only expand the arguments if we have a reason
8595 to believe that emit_store_flag will be successful. If we think that
8596 it will, but it isn't, we have to simulate the store-flag with a
8597 set/jump/set sequence. */
8600 do_store_flag (tree exp
, rtx target
, enum machine_mode mode
, int only_cheap
)
8603 tree arg0
, arg1
, type
;
8605 enum machine_mode operand_mode
;
8609 enum insn_code icode
;
8610 rtx subtarget
= target
;
8613 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8614 result at the end. We can't simply invert the test since it would
8615 have already been inverted if it were valid. This case occurs for
8616 some floating-point comparisons. */
8618 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
8619 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
8621 arg0
= TREE_OPERAND (exp
, 0);
8622 arg1
= TREE_OPERAND (exp
, 1);
8624 /* Don't crash if the comparison was erroneous. */
8625 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
8628 type
= TREE_TYPE (arg0
);
8629 operand_mode
= TYPE_MODE (type
);
8630 unsignedp
= TYPE_UNSIGNED (type
);
8632 /* We won't bother with BLKmode store-flag operations because it would mean
8633 passing a lot of information to emit_store_flag. */
8634 if (operand_mode
== BLKmode
)
8637 /* We won't bother with store-flag operations involving function pointers
8638 when function pointers must be canonicalized before comparisons. */
8639 #ifdef HAVE_canonicalize_funcptr_for_compare
8640 if (HAVE_canonicalize_funcptr_for_compare
8641 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
8642 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
8644 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
8645 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
8646 == FUNCTION_TYPE
))))
8653 /* Get the rtx comparison code to use. We know that EXP is a comparison
8654 operation of some type. Some comparisons against 1 and -1 can be
8655 converted to comparisons with zero. Do so here so that the tests
8656 below will be aware that we have a comparison with zero. These
8657 tests will not catch constants in the first operand, but constants
8658 are rarely passed as the first operand. */
8660 switch (TREE_CODE (exp
))
8669 if (integer_onep (arg1
))
8670 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
8672 code
= unsignedp
? LTU
: LT
;
8675 if (! unsignedp
&& integer_all_onesp (arg1
))
8676 arg1
= integer_zero_node
, code
= LT
;
8678 code
= unsignedp
? LEU
: LE
;
8681 if (! unsignedp
&& integer_all_onesp (arg1
))
8682 arg1
= integer_zero_node
, code
= GE
;
8684 code
= unsignedp
? GTU
: GT
;
8687 if (integer_onep (arg1
))
8688 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
8690 code
= unsignedp
? GEU
: GE
;
8693 case UNORDERED_EXPR
:
8722 /* Put a constant second. */
8723 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
8725 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
8726 code
= swap_condition (code
);
8729 /* If this is an equality or inequality test of a single bit, we can
8730 do this by shifting the bit being tested to the low-order bit and
8731 masking the result with the constant 1. If the condition was EQ,
8732 we xor it with 1. This does not require an scc insn and is faster
8733 than an scc insn even if we have it.
8735 The code to make this transformation was moved into fold_single_bit_test,
8736 so we just call into the folder and expand its result. */
8738 if ((code
== NE
|| code
== EQ
)
8739 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
8740 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
8742 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
8743 return expand_expr (fold_single_bit_test (code
== NE
? NE_EXPR
: EQ_EXPR
,
8745 target
, VOIDmode
, EXPAND_NORMAL
);
8748 /* Now see if we are likely to be able to do this. Return if not. */
8749 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
8752 icode
= setcc_gen_code
[(int) code
];
8753 if (icode
== CODE_FOR_nothing
8754 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
8756 /* We can only do this if it is one of the special cases that
8757 can be handled without an scc insn. */
8758 if ((code
== LT
&& integer_zerop (arg1
))
8759 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
8761 else if (! only_cheap
&& (code
== NE
|| code
== EQ
)
8762 && TREE_CODE (type
) != REAL_TYPE
8763 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
8764 != CODE_FOR_nothing
)
8765 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
8766 != CODE_FOR_nothing
)))
8772 if (! get_subtarget (target
)
8773 || GET_MODE (subtarget
) != operand_mode
)
8776 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, 0);
8779 target
= gen_reg_rtx (mode
);
8781 result
= emit_store_flag (target
, code
, op0
, op1
,
8782 operand_mode
, unsignedp
, 1);
8787 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
8788 result
, 0, OPTAB_LIB_WIDEN
);
8792 /* If this failed, we have to do this with set/compare/jump/set code. */
8794 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
8795 target
= gen_reg_rtx (GET_MODE (target
));
8797 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
8798 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
8799 operand_mode
, NULL_RTX
);
8800 if (GET_CODE (result
) == CONST_INT
)
8801 return (((result
== const0_rtx
&& ! invert
)
8802 || (result
!= const0_rtx
&& invert
))
8803 ? const0_rtx
: const1_rtx
);
8805 /* The code of RESULT may not match CODE if compare_from_rtx
8806 decided to swap its operands and reverse the original code.
8808 We know that compare_from_rtx returns either a CONST_INT or
8809 a new comparison code, so it is safe to just extract the
8810 code from RESULT. */
8811 code
= GET_CODE (result
);
8813 label
= gen_label_rtx ();
8814 gcc_assert (bcc_gen_fctn
[(int) code
]);
8816 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
8817 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
8824 /* Stubs in case we haven't got a casesi insn. */
8826 # define HAVE_casesi 0
8827 # define gen_casesi(a, b, c, d, e) (0)
8828 # define CODE_FOR_casesi CODE_FOR_nothing
8831 /* If the machine does not have a case insn that compares the bounds,
8832 this means extra overhead for dispatch tables, which raises the
8833 threshold for using them. */
8834 #ifndef CASE_VALUES_THRESHOLD
8835 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
8836 #endif /* CASE_VALUES_THRESHOLD */
8839 case_values_threshold (void)
8841 return CASE_VALUES_THRESHOLD
;
8844 /* Attempt to generate a casesi instruction. Returns 1 if successful,
8845 0 otherwise (i.e. if there is no casesi instruction). */
8847 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
8848 rtx table_label ATTRIBUTE_UNUSED
, rtx default_label
)
8850 enum machine_mode index_mode
= SImode
;
8851 int index_bits
= GET_MODE_BITSIZE (index_mode
);
8852 rtx op1
, op2
, index
;
8853 enum machine_mode op_mode
;
8858 /* Convert the index to SImode. */
8859 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
8861 enum machine_mode omode
= TYPE_MODE (index_type
);
8862 rtx rangertx
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
8864 /* We must handle the endpoints in the original mode. */
8865 index_expr
= build2 (MINUS_EXPR
, index_type
,
8866 index_expr
, minval
);
8867 minval
= integer_zero_node
;
8868 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
8869 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
8870 omode
, 1, default_label
);
8871 /* Now we can safely truncate. */
8872 index
= convert_to_mode (index_mode
, index
, 0);
8876 if (TYPE_MODE (index_type
) != index_mode
)
8878 index_expr
= convert (lang_hooks
.types
.type_for_size
8879 (index_bits
, 0), index_expr
);
8880 index_type
= TREE_TYPE (index_expr
);
8883 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
8886 do_pending_stack_adjust ();
8888 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
8889 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
8891 index
= copy_to_mode_reg (op_mode
, index
);
8893 op1
= expand_expr (minval
, NULL_RTX
, VOIDmode
, 0);
8895 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
8896 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
8897 op1
, TYPE_UNSIGNED (TREE_TYPE (minval
)));
8898 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
8900 op1
= copy_to_mode_reg (op_mode
, op1
);
8902 op2
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
8904 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
8905 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
8906 op2
, TYPE_UNSIGNED (TREE_TYPE (range
)));
8907 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
8909 op2
= copy_to_mode_reg (op_mode
, op2
);
8911 emit_jump_insn (gen_casesi (index
, op1
, op2
,
8912 table_label
, default_label
));
8916 /* Attempt to generate a tablejump instruction; same concept. */
8917 #ifndef HAVE_tablejump
8918 #define HAVE_tablejump 0
8919 #define gen_tablejump(x, y) (0)
8922 /* Subroutine of the next function.
8924 INDEX is the value being switched on, with the lowest value
8925 in the table already subtracted.
8926 MODE is its expected mode (needed if INDEX is constant).
8927 RANGE is the length of the jump table.
8928 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8930 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8931 index value is out of range. */
8934 do_tablejump (rtx index
, enum machine_mode mode
, rtx range
, rtx table_label
,
8939 if (INTVAL (range
) > cfun
->max_jumptable_ents
)
8940 cfun
->max_jumptable_ents
= INTVAL (range
);
8942 /* Do an unsigned comparison (in the proper mode) between the index
8943 expression and the value which represents the length of the range.
8944 Since we just finished subtracting the lower bound of the range
8945 from the index expression, this comparison allows us to simultaneously
8946 check that the original index expression value is both greater than
8947 or equal to the minimum value of the range and less than or equal to
8948 the maximum value of the range. */
8950 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
8953 /* If index is in range, it must fit in Pmode.
8954 Convert to Pmode so we can index with it. */
8956 index
= convert_to_mode (Pmode
, index
, 1);
8958 /* Don't let a MEM slip through, because then INDEX that comes
8959 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
8960 and break_out_memory_refs will go to work on it and mess it up. */
8961 #ifdef PIC_CASE_VECTOR_ADDRESS
8962 if (flag_pic
&& !REG_P (index
))
8963 index
= copy_to_mode_reg (Pmode
, index
);
8966 /* If flag_force_addr were to affect this address
8967 it could interfere with the tricky assumptions made
8968 about addresses that contain label-refs,
8969 which may be valid only very near the tablejump itself. */
8970 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8971 GET_MODE_SIZE, because this indicates how large insns are. The other
8972 uses should all be Pmode, because they are addresses. This code
8973 could fail if addresses and insns are not the same size. */
8974 index
= gen_rtx_PLUS (Pmode
,
8975 gen_rtx_MULT (Pmode
, index
,
8976 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
8977 gen_rtx_LABEL_REF (Pmode
, table_label
));
8978 #ifdef PIC_CASE_VECTOR_ADDRESS
8980 index
= PIC_CASE_VECTOR_ADDRESS (index
);
8983 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
8984 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
8985 vector
= gen_const_mem (CASE_VECTOR_MODE
, index
);
8986 convert_move (temp
, vector
, 0);
8988 emit_jump_insn (gen_tablejump (temp
, table_label
));
8990 /* If we are generating PIC code or if the table is PC-relative, the
8991 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8992 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
8997 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
8998 rtx table_label
, rtx default_label
)
9002 if (! HAVE_tablejump
)
9005 index_expr
= fold_build2 (MINUS_EXPR
, index_type
,
9006 convert (index_type
, index_expr
),
9007 convert (index_type
, minval
));
9008 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
9009 do_pending_stack_adjust ();
9011 do_tablejump (index
, TYPE_MODE (index_type
),
9012 convert_modes (TYPE_MODE (index_type
),
9013 TYPE_MODE (TREE_TYPE (range
)),
9014 expand_expr (range
, NULL_RTX
,
9016 TYPE_UNSIGNED (TREE_TYPE (range
))),
9017 table_label
, default_label
);
9021 /* Nonzero if the mode is a valid vector mode for this architecture.
9022 This returns nonzero even if there is no hardware support for the
9023 vector mode, but we can emulate with narrower modes. */
9026 vector_mode_valid_p (enum machine_mode mode
)
9028 enum mode_class
class = GET_MODE_CLASS (mode
);
9029 enum machine_mode innermode
;
9031 /* Doh! What's going on? */
9032 if (class != MODE_VECTOR_INT
9033 && class != MODE_VECTOR_FLOAT
)
9036 /* Hardware support. Woo hoo! */
9037 if (targetm
.vector_mode_supported_p (mode
))
9040 innermode
= GET_MODE_INNER (mode
);
9042 /* We should probably return 1 if requesting V4DI and we have no DI,
9043 but we have V2DI, but this is probably very unlikely. */
9045 /* If we have support for the inner mode, we can safely emulate it.
9046 We may not have V2DI, but me can emulate with a pair of DIs. */
9047 return targetm
.scalar_mode_supported_p (innermode
);
9050 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9052 const_vector_from_tree (tree exp
)
9057 enum machine_mode inner
, mode
;
9059 mode
= TYPE_MODE (TREE_TYPE (exp
));
9061 if (initializer_zerop (exp
))
9062 return CONST0_RTX (mode
);
9064 units
= GET_MODE_NUNITS (mode
);
9065 inner
= GET_MODE_INNER (mode
);
9067 v
= rtvec_alloc (units
);
9069 link
= TREE_VECTOR_CST_ELTS (exp
);
9070 for (i
= 0; link
; link
= TREE_CHAIN (link
), ++i
)
9072 elt
= TREE_VALUE (link
);
9074 if (TREE_CODE (elt
) == REAL_CST
)
9075 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
9078 RTVEC_ELT (v
, i
) = immed_double_const (TREE_INT_CST_LOW (elt
),
9079 TREE_INT_CST_HIGH (elt
),
9083 /* Initialize remaining elements to 0. */
9084 for (; i
< units
; ++i
)
9085 RTVEC_ELT (v
, i
) = CONST0_RTX (inner
);
9087 return gen_rtx_CONST_VECTOR (mode
, v
);
9089 #include "gt-expr.h"