1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
25 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
45 #include "typeclass.h"
48 #include "langhooks.h"
51 #include "tree-iterator.h"
52 #include "tree-pass.h"
53 #include "tree-flow.h"
57 /* Decide whether a function's arguments should be processed
58 from first to last or from last to first.
60 They should if the stack and args grow in opposite directions, but
61 only if we have push insns. */
65 #ifndef PUSH_ARGS_REVERSED
66 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
67 #define PUSH_ARGS_REVERSED /* If it's last to first. */
73 #ifndef STACK_PUSH_CODE
74 #ifdef STACK_GROWS_DOWNWARD
75 #define STACK_PUSH_CODE PRE_DEC
77 #define STACK_PUSH_CODE PRE_INC
82 /* If this is nonzero, we do not bother generating VOLATILE
83 around volatile memory references, and we are willing to
84 output indirect addresses. If cse is to follow, we reject
85 indirect addresses so a useful potential cse is generated;
86 if it is used only once, instruction combination will produce
87 the same indirect address eventually. */
90 /* This structure is used by move_by_pieces to describe the move to
101 int explicit_inc_from
;
102 unsigned HOST_WIDE_INT len
;
103 HOST_WIDE_INT offset
;
107 /* This structure is used by store_by_pieces to describe the clear to
110 struct store_by_pieces
116 unsigned HOST_WIDE_INT len
;
117 HOST_WIDE_INT offset
;
118 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
);
123 static unsigned HOST_WIDE_INT
move_by_pieces_ninsns (unsigned HOST_WIDE_INT
,
126 static void move_by_pieces_1 (rtx (*) (rtx
, ...), enum machine_mode
,
127 struct move_by_pieces
*);
128 static bool block_move_libcall_safe_for_call_parm (void);
129 static bool emit_block_move_via_movmem (rtx
, rtx
, rtx
, unsigned, unsigned, HOST_WIDE_INT
);
130 static tree
emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
132 static rtx
clear_by_pieces_1 (void *, HOST_WIDE_INT
, enum machine_mode
);
133 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces
*, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx
, ...), enum machine_mode
,
136 struct store_by_pieces
*);
137 static tree
clear_storage_libcall_fn (int);
138 static rtx
compress_float_constant (rtx
, rtx
);
139 static rtx
get_subtarget (rtx
);
140 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
141 HOST_WIDE_INT
, enum machine_mode
,
142 tree
, tree
, int, int);
143 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
);
144 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
, enum machine_mode
,
147 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (tree
, tree
);
149 static int is_aligning_offset (tree
, tree
);
150 static void expand_operands (tree
, tree
, rtx
, rtx
*, rtx
*,
151 enum expand_modifier
);
152 static rtx
reduce_to_bit_field_precision (rtx
, rtx
, tree
);
153 static rtx
do_store_flag (tree
, rtx
, enum machine_mode
, int);
155 static void emit_single_push_insn (enum machine_mode
, rtx
, tree
);
157 static void do_tablejump (rtx
, enum machine_mode
, rtx
, rtx
, rtx
);
158 static rtx
const_vector_from_tree (tree
);
159 static void write_complex_part (rtx
, rtx
, bool);
161 /* Record for each mode whether we can move a register directly to or
162 from an object of that mode in memory. If we can't, we won't try
163 to use that mode directly when accessing a field of that mode. */
165 static char direct_load
[NUM_MACHINE_MODES
];
166 static char direct_store
[NUM_MACHINE_MODES
];
168 /* Record for each mode whether we can float-extend from memory. */
170 static bool float_extend_from_mem
[NUM_MACHINE_MODES
][NUM_MACHINE_MODES
];
172 /* This macro is used to determine whether move_by_pieces should be called
173 to perform a structure copy. */
174 #ifndef MOVE_BY_PIECES_P
175 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
176 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
177 < (unsigned int) MOVE_RATIO)
180 /* This macro is used to determine whether clear_by_pieces should be
181 called to clear storage. */
182 #ifndef CLEAR_BY_PIECES_P
183 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
185 < (unsigned int) CLEAR_RATIO)
188 /* This macro is used to determine whether store_by_pieces should be
189 called to "memset" storage with byte values other than zero, or
190 to "memcpy" storage when the source is a constant string. */
191 #ifndef STORE_BY_PIECES_P
192 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
193 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
194 < (unsigned int) MOVE_RATIO)
197 /* This array records the insn_code of insns to perform block moves. */
198 enum insn_code movmem_optab
[NUM_MACHINE_MODES
];
200 /* This array records the insn_code of insns to perform block sets. */
201 enum insn_code setmem_optab
[NUM_MACHINE_MODES
];
203 /* These arrays record the insn_code of three different kinds of insns
204 to perform block compares. */
205 enum insn_code cmpstr_optab
[NUM_MACHINE_MODES
];
206 enum insn_code cmpstrn_optab
[NUM_MACHINE_MODES
];
207 enum insn_code cmpmem_optab
[NUM_MACHINE_MODES
];
209 /* Synchronization primitives. */
210 enum insn_code sync_add_optab
[NUM_MACHINE_MODES
];
211 enum insn_code sync_sub_optab
[NUM_MACHINE_MODES
];
212 enum insn_code sync_ior_optab
[NUM_MACHINE_MODES
];
213 enum insn_code sync_and_optab
[NUM_MACHINE_MODES
];
214 enum insn_code sync_xor_optab
[NUM_MACHINE_MODES
];
215 enum insn_code sync_nand_optab
[NUM_MACHINE_MODES
];
216 enum insn_code sync_old_add_optab
[NUM_MACHINE_MODES
];
217 enum insn_code sync_old_sub_optab
[NUM_MACHINE_MODES
];
218 enum insn_code sync_old_ior_optab
[NUM_MACHINE_MODES
];
219 enum insn_code sync_old_and_optab
[NUM_MACHINE_MODES
];
220 enum insn_code sync_old_xor_optab
[NUM_MACHINE_MODES
];
221 enum insn_code sync_old_nand_optab
[NUM_MACHINE_MODES
];
222 enum insn_code sync_new_add_optab
[NUM_MACHINE_MODES
];
223 enum insn_code sync_new_sub_optab
[NUM_MACHINE_MODES
];
224 enum insn_code sync_new_ior_optab
[NUM_MACHINE_MODES
];
225 enum insn_code sync_new_and_optab
[NUM_MACHINE_MODES
];
226 enum insn_code sync_new_xor_optab
[NUM_MACHINE_MODES
];
227 enum insn_code sync_new_nand_optab
[NUM_MACHINE_MODES
];
228 enum insn_code sync_compare_and_swap
[NUM_MACHINE_MODES
];
229 enum insn_code sync_compare_and_swap_cc
[NUM_MACHINE_MODES
];
230 enum insn_code sync_lock_test_and_set
[NUM_MACHINE_MODES
];
231 enum insn_code sync_lock_release
[NUM_MACHINE_MODES
];
233 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
235 #ifndef SLOW_UNALIGNED_ACCESS
236 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
239 /* This is run once per compilation to set up which modes can be used
240 directly in memory and to initialize the block move optab. */
243 init_expr_once (void)
246 enum machine_mode mode
;
251 /* Try indexing by frame ptr and try by stack ptr.
252 It is known that on the Convex the stack ptr isn't a valid index.
253 With luck, one or the other is valid on any machine. */
254 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
255 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
257 /* A scratch register we can modify in-place below to avoid
258 useless RTL allocations. */
259 reg
= gen_rtx_REG (VOIDmode
, -1);
261 insn
= rtx_alloc (INSN
);
262 pat
= gen_rtx_SET (0, NULL_RTX
, NULL_RTX
);
263 PATTERN (insn
) = pat
;
265 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
266 mode
= (enum machine_mode
) ((int) mode
+ 1))
270 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
271 PUT_MODE (mem
, mode
);
272 PUT_MODE (mem1
, mode
);
273 PUT_MODE (reg
, mode
);
275 /* See if there is some register that can be used in this mode and
276 directly loaded or stored from memory. */
278 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
279 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
280 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
283 if (! HARD_REGNO_MODE_OK (regno
, mode
))
289 SET_DEST (pat
) = reg
;
290 if (recog (pat
, insn
, &num_clobbers
) >= 0)
291 direct_load
[(int) mode
] = 1;
293 SET_SRC (pat
) = mem1
;
294 SET_DEST (pat
) = reg
;
295 if (recog (pat
, insn
, &num_clobbers
) >= 0)
296 direct_load
[(int) mode
] = 1;
299 SET_DEST (pat
) = mem
;
300 if (recog (pat
, insn
, &num_clobbers
) >= 0)
301 direct_store
[(int) mode
] = 1;
304 SET_DEST (pat
) = mem1
;
305 if (recog (pat
, insn
, &num_clobbers
) >= 0)
306 direct_store
[(int) mode
] = 1;
310 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
312 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
313 mode
= GET_MODE_WIDER_MODE (mode
))
315 enum machine_mode srcmode
;
316 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
317 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
321 ic
= can_extend_p (mode
, srcmode
, 0);
322 if (ic
== CODE_FOR_nothing
)
325 PUT_MODE (mem
, srcmode
);
327 if ((*insn_data
[ic
].operand
[1].predicate
) (mem
, srcmode
))
328 float_extend_from_mem
[mode
][srcmode
] = true;
333 /* This is run at the start of compiling a function. */
338 cfun
->expr
= ggc_alloc_cleared (sizeof (struct expr_status
));
341 /* Copy data from FROM to TO, where the machine modes are not the same.
342 Both modes may be integer, or both may be floating.
343 UNSIGNEDP should be nonzero if FROM is an unsigned type.
344 This causes zero-extension instead of sign-extension. */
347 convert_move (rtx to
, rtx from
, int unsignedp
)
349 enum machine_mode to_mode
= GET_MODE (to
);
350 enum machine_mode from_mode
= GET_MODE (from
);
351 int to_real
= SCALAR_FLOAT_MODE_P (to_mode
);
352 int from_real
= SCALAR_FLOAT_MODE_P (from_mode
);
356 /* rtx code for making an equivalent value. */
357 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
358 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
361 gcc_assert (to_real
== from_real
);
363 /* If the source and destination are already the same, then there's
368 /* If FROM is a SUBREG that indicates that we have already done at least
369 the required extension, strip it. We don't handle such SUBREGs as
372 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
373 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
374 >= GET_MODE_SIZE (to_mode
))
375 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
376 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
378 gcc_assert (GET_CODE (to
) != SUBREG
|| !SUBREG_PROMOTED_VAR_P (to
));
380 if (to_mode
== from_mode
381 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
383 emit_move_insn (to
, from
);
387 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
389 gcc_assert (GET_MODE_BITSIZE (from_mode
) == GET_MODE_BITSIZE (to_mode
));
391 if (VECTOR_MODE_P (to_mode
))
392 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
394 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
396 emit_move_insn (to
, from
);
400 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
402 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
403 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
412 gcc_assert ((GET_MODE_PRECISION (from_mode
)
413 != GET_MODE_PRECISION (to_mode
))
414 || (DECIMAL_FLOAT_MODE_P (from_mode
)
415 != DECIMAL_FLOAT_MODE_P (to_mode
)));
417 if (GET_MODE_PRECISION (from_mode
) == GET_MODE_PRECISION (to_mode
))
418 /* Conversion between decimal float and binary float, same size. */
419 tab
= DECIMAL_FLOAT_MODE_P (from_mode
) ? trunc_optab
: sext_optab
;
420 else if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
425 /* Try converting directly if the insn is supported. */
427 code
= tab
->handlers
[to_mode
][from_mode
].insn_code
;
428 if (code
!= CODE_FOR_nothing
)
430 emit_unop_insn (code
, to
, from
,
431 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
435 /* Otherwise use a libcall. */
436 libcall
= tab
->handlers
[to_mode
][from_mode
].libfunc
;
438 /* Is this conversion implemented yet? */
439 gcc_assert (libcall
);
442 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
444 insns
= get_insns ();
446 emit_libcall_block (insns
, to
, value
,
447 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
449 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
453 /* Handle pointer conversion. */ /* SPEE 900220. */
454 /* Targets are expected to provide conversion insns between PxImode and
455 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
456 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
458 enum machine_mode full_mode
459 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode
), MODE_INT
);
461 gcc_assert (trunc_optab
->handlers
[to_mode
][full_mode
].insn_code
462 != CODE_FOR_nothing
);
464 if (full_mode
!= from_mode
)
465 from
= convert_to_mode (full_mode
, from
, unsignedp
);
466 emit_unop_insn (trunc_optab
->handlers
[to_mode
][full_mode
].insn_code
,
470 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
473 enum machine_mode full_mode
474 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode
), MODE_INT
);
476 gcc_assert (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
477 != CODE_FOR_nothing
);
479 if (to_mode
== full_mode
)
481 emit_unop_insn (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
,
486 new_from
= gen_reg_rtx (full_mode
);
487 emit_unop_insn (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
,
488 new_from
, from
, UNKNOWN
);
490 /* else proceed to integer conversions below. */
491 from_mode
= full_mode
;
495 /* Now both modes are integers. */
497 /* Handle expanding beyond a word. */
498 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
499 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
506 enum machine_mode lowpart_mode
;
507 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
509 /* Try converting directly if the insn is supported. */
510 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
513 /* If FROM is a SUBREG, put it into a register. Do this
514 so that we always generate the same set of insns for
515 better cse'ing; if an intermediate assignment occurred,
516 we won't be doing the operation directly on the SUBREG. */
517 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
518 from
= force_reg (from_mode
, from
);
519 emit_unop_insn (code
, to
, from
, equiv_code
);
522 /* Next, try converting via full word. */
523 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
524 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
525 != CODE_FOR_nothing
))
529 if (reg_overlap_mentioned_p (to
, from
))
530 from
= force_reg (from_mode
, from
);
531 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
533 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
534 emit_unop_insn (code
, to
,
535 gen_lowpart (word_mode
, to
), equiv_code
);
539 /* No special multiword conversion insn; do it by hand. */
542 /* Since we will turn this into a no conflict block, we must ensure
543 that the source does not overlap the target. */
545 if (reg_overlap_mentioned_p (to
, from
))
546 from
= force_reg (from_mode
, from
);
548 /* Get a copy of FROM widened to a word, if necessary. */
549 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
550 lowpart_mode
= word_mode
;
552 lowpart_mode
= from_mode
;
554 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
556 lowpart
= gen_lowpart (lowpart_mode
, to
);
557 emit_move_insn (lowpart
, lowfrom
);
559 /* Compute the value to put in each remaining word. */
561 fill_value
= const0_rtx
;
566 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
567 && STORE_FLAG_VALUE
== -1)
569 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
571 fill_value
= gen_reg_rtx (word_mode
);
572 emit_insn (gen_slt (fill_value
));
578 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
579 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
581 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
585 /* Fill the remaining words. */
586 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
588 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
589 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
591 gcc_assert (subword
);
593 if (fill_value
!= subword
)
594 emit_move_insn (subword
, fill_value
);
597 insns
= get_insns ();
600 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
601 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
605 /* Truncating multi-word to a word or less. */
606 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
607 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
610 && ! MEM_VOLATILE_P (from
)
611 && direct_load
[(int) to_mode
]
612 && ! mode_dependent_address_p (XEXP (from
, 0)))
614 || GET_CODE (from
) == SUBREG
))
615 from
= force_reg (from_mode
, from
);
616 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
620 /* Now follow all the conversions between integers
621 no more than a word long. */
623 /* For truncation, usually we can just refer to FROM in a narrower mode. */
624 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
625 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
626 GET_MODE_BITSIZE (from_mode
)))
629 && ! MEM_VOLATILE_P (from
)
630 && direct_load
[(int) to_mode
]
631 && ! mode_dependent_address_p (XEXP (from
, 0)))
633 || GET_CODE (from
) == SUBREG
))
634 from
= force_reg (from_mode
, from
);
635 if (REG_P (from
) && REGNO (from
) < FIRST_PSEUDO_REGISTER
636 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
637 from
= copy_to_reg (from
);
638 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
642 /* Handle extension. */
643 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
645 /* Convert directly if that works. */
646 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
649 emit_unop_insn (code
, to
, from
, equiv_code
);
654 enum machine_mode intermediate
;
658 /* Search for a mode to convert via. */
659 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
660 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
661 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
663 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
664 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
665 GET_MODE_BITSIZE (intermediate
))))
666 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
667 != CODE_FOR_nothing
))
669 convert_move (to
, convert_to_mode (intermediate
, from
,
670 unsignedp
), unsignedp
);
674 /* No suitable intermediate mode.
675 Generate what we need with shifts. */
676 shift_amount
= build_int_cst (NULL_TREE
,
677 GET_MODE_BITSIZE (to_mode
)
678 - GET_MODE_BITSIZE (from_mode
));
679 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
680 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
682 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
685 emit_move_insn (to
, tmp
);
690 /* Support special truncate insns for certain modes. */
691 if (trunc_optab
->handlers
[to_mode
][from_mode
].insn_code
!= CODE_FOR_nothing
)
693 emit_unop_insn (trunc_optab
->handlers
[to_mode
][from_mode
].insn_code
,
698 /* Handle truncation of volatile memrefs, and so on;
699 the things that couldn't be truncated directly,
700 and for which there was no special instruction.
702 ??? Code above formerly short-circuited this, for most integer
703 mode pairs, with a force_reg in from_mode followed by a recursive
704 call to this routine. Appears always to have been wrong. */
705 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
707 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
708 emit_move_insn (to
, temp
);
712 /* Mode combination is not recognized. */
716 /* Return an rtx for a value that would result
717 from converting X to mode MODE.
718 Both X and MODE may be floating, or both integer.
719 UNSIGNEDP is nonzero if X is an unsigned value.
720 This can be done by referring to a part of X in place
721 or by copying to a new temporary with conversion. */
724 convert_to_mode (enum machine_mode mode
, rtx x
, int unsignedp
)
726 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
729 /* Return an rtx for a value that would result
730 from converting X from mode OLDMODE to mode MODE.
731 Both modes may be floating, or both integer.
732 UNSIGNEDP is nonzero if X is an unsigned value.
734 This can be done by referring to a part of X in place
735 or by copying to a new temporary with conversion.
737 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
740 convert_modes (enum machine_mode mode
, enum machine_mode oldmode
, rtx x
, int unsignedp
)
744 /* If FROM is a SUBREG that indicates that we have already done at least
745 the required extension, strip it. */
747 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
748 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
749 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
750 x
= gen_lowpart (mode
, x
);
752 if (GET_MODE (x
) != VOIDmode
)
753 oldmode
= GET_MODE (x
);
758 /* There is one case that we must handle specially: If we are converting
759 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
760 we are to interpret the constant as unsigned, gen_lowpart will do
761 the wrong if the constant appears negative. What we want to do is
762 make the high-order word of the constant zero, not all ones. */
764 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
765 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
766 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
768 HOST_WIDE_INT val
= INTVAL (x
);
770 if (oldmode
!= VOIDmode
771 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
773 int width
= GET_MODE_BITSIZE (oldmode
);
775 /* We need to zero extend VAL. */
776 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
779 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
782 /* We can do this with a gen_lowpart if both desired and current modes
783 are integer, and this is either a constant integer, a register, or a
784 non-volatile MEM. Except for the constant case where MODE is no
785 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
787 if ((GET_CODE (x
) == CONST_INT
788 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
789 || (GET_MODE_CLASS (mode
) == MODE_INT
790 && GET_MODE_CLASS (oldmode
) == MODE_INT
791 && (GET_CODE (x
) == CONST_DOUBLE
792 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
793 && ((MEM_P (x
) && ! MEM_VOLATILE_P (x
)
794 && direct_load
[(int) mode
])
796 && (! HARD_REGISTER_P (x
)
797 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
798 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
799 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
801 /* ?? If we don't know OLDMODE, we have to assume here that
802 X does not need sign- or zero-extension. This may not be
803 the case, but it's the best we can do. */
804 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
805 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
807 HOST_WIDE_INT val
= INTVAL (x
);
808 int width
= GET_MODE_BITSIZE (oldmode
);
810 /* We must sign or zero-extend in this case. Start by
811 zero-extending, then sign extend if we need to. */
812 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
814 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
815 val
|= (HOST_WIDE_INT
) (-1) << width
;
817 return gen_int_mode (val
, mode
);
820 return gen_lowpart (mode
, x
);
823 /* Converting from integer constant into mode is always equivalent to an
825 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
827 gcc_assert (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (oldmode
));
828 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
831 temp
= gen_reg_rtx (mode
);
832 convert_move (temp
, x
, unsignedp
);
836 /* STORE_MAX_PIECES is the number of bytes at a time that we can
837 store efficiently. Due to internal GCC limitations, this is
838 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
839 for an immediate constant. */
841 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
843 /* Determine whether the LEN bytes can be moved by using several move
844 instructions. Return nonzero if a call to move_by_pieces should
848 can_move_by_pieces (unsigned HOST_WIDE_INT len
,
849 unsigned int align ATTRIBUTE_UNUSED
)
851 return MOVE_BY_PIECES_P (len
, align
);
854 /* Generate several move instructions to copy LEN bytes from block FROM to
855 block TO. (These are MEM rtx's with BLKmode).
857 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
858 used to push FROM to the stack.
860 ALIGN is maximum stack alignment we can assume.
862 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
863 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
867 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
868 unsigned int align
, int endp
)
870 struct move_by_pieces data
;
871 rtx to_addr
, from_addr
= XEXP (from
, 0);
872 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
873 enum machine_mode mode
= VOIDmode
, tmode
;
874 enum insn_code icode
;
876 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
879 data
.from_addr
= from_addr
;
882 to_addr
= XEXP (to
, 0);
885 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
886 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
888 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
895 #ifdef STACK_GROWS_DOWNWARD
901 data
.to_addr
= to_addr
;
904 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
905 || GET_CODE (from_addr
) == POST_INC
906 || GET_CODE (from_addr
) == POST_DEC
);
908 data
.explicit_inc_from
= 0;
909 data
.explicit_inc_to
= 0;
910 if (data
.reverse
) data
.offset
= len
;
913 /* If copying requires more than two move insns,
914 copy addresses to registers (to make displacements shorter)
915 and use post-increment if available. */
916 if (!(data
.autinc_from
&& data
.autinc_to
)
917 && move_by_pieces_ninsns (len
, align
, max_size
) > 2)
919 /* Find the mode of the largest move... */
920 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
921 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
922 if (GET_MODE_SIZE (tmode
) < max_size
)
925 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
927 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
928 data
.autinc_from
= 1;
929 data
.explicit_inc_from
= -1;
931 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
933 data
.from_addr
= copy_addr_to_reg (from_addr
);
934 data
.autinc_from
= 1;
935 data
.explicit_inc_from
= 1;
937 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
938 data
.from_addr
= copy_addr_to_reg (from_addr
);
939 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
941 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
943 data
.explicit_inc_to
= -1;
945 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
947 data
.to_addr
= copy_addr_to_reg (to_addr
);
949 data
.explicit_inc_to
= 1;
951 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
952 data
.to_addr
= copy_addr_to_reg (to_addr
);
955 tmode
= mode_for_size (MOVE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
956 if (align
>= GET_MODE_ALIGNMENT (tmode
))
957 align
= GET_MODE_ALIGNMENT (tmode
);
960 enum machine_mode xmode
;
962 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
964 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
965 if (GET_MODE_SIZE (tmode
) > MOVE_MAX_PIECES
966 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
969 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
972 /* First move what we can in the largest integer mode, then go to
973 successively smaller modes. */
977 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
978 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
979 if (GET_MODE_SIZE (tmode
) < max_size
)
982 if (mode
== VOIDmode
)
985 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
986 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
987 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
989 max_size
= GET_MODE_SIZE (mode
);
992 /* The code above should have handled everything. */
993 gcc_assert (!data
.len
);
999 gcc_assert (!data
.reverse
);
1004 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
1005 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
1007 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
1010 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
1017 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
1025 /* Return number of insns required to move L bytes by pieces.
1026 ALIGN (in bits) is maximum alignment we can assume. */
1028 static unsigned HOST_WIDE_INT
1029 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
,
1030 unsigned int max_size
)
1032 unsigned HOST_WIDE_INT n_insns
= 0;
1033 enum machine_mode tmode
;
1035 tmode
= mode_for_size (MOVE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
1036 if (align
>= GET_MODE_ALIGNMENT (tmode
))
1037 align
= GET_MODE_ALIGNMENT (tmode
);
1040 enum machine_mode tmode
, xmode
;
1042 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
1044 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
1045 if (GET_MODE_SIZE (tmode
) > MOVE_MAX_PIECES
1046 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
1049 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
1052 while (max_size
> 1)
1054 enum machine_mode mode
= VOIDmode
;
1055 enum insn_code icode
;
1057 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1058 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1059 if (GET_MODE_SIZE (tmode
) < max_size
)
1062 if (mode
== VOIDmode
)
1065 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1066 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1067 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1069 max_size
= GET_MODE_SIZE (mode
);
1076 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1077 with move instructions for mode MODE. GENFUN is the gen_... function
1078 to make a move insn for that mode. DATA has all the other info. */
1081 move_by_pieces_1 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
1082 struct move_by_pieces
*data
)
1084 unsigned int size
= GET_MODE_SIZE (mode
);
1085 rtx to1
= NULL_RTX
, from1
;
1087 while (data
->len
>= size
)
1090 data
->offset
-= size
;
1094 if (data
->autinc_to
)
1095 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1098 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1101 if (data
->autinc_from
)
1102 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1105 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1107 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1108 emit_insn (gen_add2_insn (data
->to_addr
,
1109 GEN_INT (-(HOST_WIDE_INT
)size
)));
1110 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1111 emit_insn (gen_add2_insn (data
->from_addr
,
1112 GEN_INT (-(HOST_WIDE_INT
)size
)));
1115 emit_insn ((*genfun
) (to1
, from1
));
1118 #ifdef PUSH_ROUNDING
1119 emit_single_push_insn (mode
, from1
, NULL
);
1125 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1126 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1127 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1128 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1130 if (! data
->reverse
)
1131 data
->offset
+= size
;
1137 /* Emit code to move a block Y to a block X. This may be done with
1138 string-move instructions, with multiple scalar move instructions,
1139 or with a library call.
1141 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1142 SIZE is an rtx that says how long they are.
1143 ALIGN is the maximum alignment we can assume they have.
1144 METHOD describes what kind of copy this is, and what mechanisms may be used.
1146 Return the address of the new block, if memcpy is called and returns it,
1150 emit_block_move_hints (rtx x
, rtx y
, rtx size
, enum block_op_methods method
,
1151 unsigned int expected_align
, HOST_WIDE_INT expected_size
)
1159 case BLOCK_OP_NORMAL
:
1160 case BLOCK_OP_TAILCALL
:
1161 may_use_call
= true;
1164 case BLOCK_OP_CALL_PARM
:
1165 may_use_call
= block_move_libcall_safe_for_call_parm ();
1167 /* Make inhibit_defer_pop nonzero around the library call
1168 to force it to pop the arguments right away. */
1172 case BLOCK_OP_NO_LIBCALL
:
1173 may_use_call
= false;
1180 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1182 gcc_assert (MEM_P (x
));
1183 gcc_assert (MEM_P (y
));
1186 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1187 block copy is more efficient for other large modes, e.g. DCmode. */
1188 x
= adjust_address (x
, BLKmode
, 0);
1189 y
= adjust_address (y
, BLKmode
, 0);
1191 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1192 can be incorrect is coming from __builtin_memcpy. */
1193 if (GET_CODE (size
) == CONST_INT
)
1195 if (INTVAL (size
) == 0)
1198 x
= shallow_copy_rtx (x
);
1199 y
= shallow_copy_rtx (y
);
1200 set_mem_size (x
, size
);
1201 set_mem_size (y
, size
);
1204 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1205 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1206 else if (emit_block_move_via_movmem (x
, y
, size
, align
,
1207 expected_align
, expected_size
))
1209 else if (may_use_call
)
1210 retval
= emit_block_move_via_libcall (x
, y
, size
,
1211 method
== BLOCK_OP_TAILCALL
);
1213 emit_block_move_via_loop (x
, y
, size
, align
);
1215 if (method
== BLOCK_OP_CALL_PARM
)
1222 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1224 return emit_block_move_hints (x
, y
, size
, method
, 0, -1);
1227 /* A subroutine of emit_block_move. Returns true if calling the
1228 block move libcall will not clobber any parameters which may have
1229 already been placed on the stack. */
1232 block_move_libcall_safe_for_call_parm (void)
1234 /* If arguments are pushed on the stack, then they're safe. */
1238 /* If registers go on the stack anyway, any argument is sure to clobber
1239 an outgoing argument. */
1240 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1242 tree fn
= emit_block_move_libcall_fn (false);
1244 if (REG_PARM_STACK_SPACE (fn
) != 0)
1249 /* If any argument goes in memory, then it might clobber an outgoing
1252 CUMULATIVE_ARGS args_so_far
;
1255 fn
= emit_block_move_libcall_fn (false);
1256 INIT_CUMULATIVE_ARGS (args_so_far
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1258 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1259 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1261 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1262 rtx tmp
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
1263 if (!tmp
|| !REG_P (tmp
))
1265 if (targetm
.calls
.arg_partial_bytes (&args_so_far
, mode
, NULL
, 1))
1267 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, NULL_TREE
, 1);
1273 /* A subroutine of emit_block_move. Expand a movmem pattern;
1274 return true if successful. */
1277 emit_block_move_via_movmem (rtx x
, rtx y
, rtx size
, unsigned int align
,
1278 unsigned int expected_align
, HOST_WIDE_INT expected_size
)
1280 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1281 int save_volatile_ok
= volatile_ok
;
1282 enum machine_mode mode
;
1284 if (expected_align
< align
)
1285 expected_align
= align
;
1287 /* Since this is a move insn, we don't care about volatility. */
1290 /* Try the most limited insn first, because there's no point
1291 including more than one in the machine description unless
1292 the more limited one has some advantage. */
1294 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1295 mode
= GET_MODE_WIDER_MODE (mode
))
1297 enum insn_code code
= movmem_optab
[(int) mode
];
1298 insn_operand_predicate_fn pred
;
1300 if (code
!= CODE_FOR_nothing
1301 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1302 here because if SIZE is less than the mode mask, as it is
1303 returned by the macro, it will definitely be less than the
1304 actual mode mask. */
1305 && ((GET_CODE (size
) == CONST_INT
1306 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1307 <= (GET_MODE_MASK (mode
) >> 1)))
1308 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1309 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1310 || (*pred
) (x
, BLKmode
))
1311 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1312 || (*pred
) (y
, BLKmode
))
1313 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1314 || (*pred
) (opalign
, VOIDmode
)))
1317 rtx last
= get_last_insn ();
1320 op2
= convert_to_mode (mode
, size
, 1);
1321 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1322 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1323 op2
= copy_to_mode_reg (mode
, op2
);
1325 /* ??? When called via emit_block_move_for_call, it'd be
1326 nice if there were some way to inform the backend, so
1327 that it doesn't fail the expansion because it thinks
1328 emitting the libcall would be more efficient. */
1330 if (insn_data
[(int) code
].n_operands
== 4)
1331 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1333 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
,
1334 GEN_INT (expected_align
),
1335 GEN_INT (expected_size
));
1339 volatile_ok
= save_volatile_ok
;
1343 delete_insns_since (last
);
1347 volatile_ok
= save_volatile_ok
;
1351 /* A subroutine of emit_block_move. Expand a call to memcpy.
1352 Return the return value from memcpy, 0 otherwise. */
1355 emit_block_move_via_libcall (rtx dst
, rtx src
, rtx size
, bool tailcall
)
1357 rtx dst_addr
, src_addr
;
1358 tree call_expr
, fn
, src_tree
, dst_tree
, size_tree
;
1359 enum machine_mode size_mode
;
1362 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1363 pseudos. We can then place those new pseudos into a VAR_DECL and
1366 dst_addr
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
1367 src_addr
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
1369 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1370 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1372 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1373 src_tree
= make_tree (ptr_type_node
, src_addr
);
1375 size_mode
= TYPE_MODE (sizetype
);
1377 size
= convert_to_mode (size_mode
, size
, 1);
1378 size
= copy_to_mode_reg (size_mode
, size
);
1380 /* It is incorrect to use the libcall calling conventions to call
1381 memcpy in this context. This could be a user call to memcpy and
1382 the user may wish to examine the return value from memcpy. For
1383 targets where libcalls and normal calls have different conventions
1384 for returning pointers, we could end up generating incorrect code. */
1386 size_tree
= make_tree (sizetype
, size
);
1388 fn
= emit_block_move_libcall_fn (true);
1389 call_expr
= build_call_expr (fn
, 3, dst_tree
, src_tree
, size_tree
);
1390 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
1392 retval
= expand_normal (call_expr
);
1397 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1398 for the function we use for block copies. The first time FOR_CALL
1399 is true, we call assemble_external. */
1401 static GTY(()) tree block_move_fn
;
1404 init_block_move_fn (const char *asmspec
)
1410 fn
= get_identifier ("memcpy");
1411 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
1412 const_ptr_type_node
, sizetype
,
1415 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
1416 DECL_EXTERNAL (fn
) = 1;
1417 TREE_PUBLIC (fn
) = 1;
1418 DECL_ARTIFICIAL (fn
) = 1;
1419 TREE_NOTHROW (fn
) = 1;
1420 DECL_VISIBILITY (fn
) = VISIBILITY_DEFAULT
;
1421 DECL_VISIBILITY_SPECIFIED (fn
) = 1;
1427 set_user_assembler_name (block_move_fn
, asmspec
);
1431 emit_block_move_libcall_fn (int for_call
)
1433 static bool emitted_extern
;
1436 init_block_move_fn (NULL
);
1438 if (for_call
&& !emitted_extern
)
1440 emitted_extern
= true;
1441 make_decl_rtl (block_move_fn
);
1442 assemble_external (block_move_fn
);
1445 return block_move_fn
;
1448 /* A subroutine of emit_block_move. Copy the data via an explicit
1449 loop. This is used only when libcalls are forbidden. */
1450 /* ??? It'd be nice to copy in hunks larger than QImode. */
1453 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1454 unsigned int align ATTRIBUTE_UNUSED
)
1456 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
1457 enum machine_mode iter_mode
;
1459 iter_mode
= GET_MODE (size
);
1460 if (iter_mode
== VOIDmode
)
1461 iter_mode
= word_mode
;
1463 top_label
= gen_label_rtx ();
1464 cmp_label
= gen_label_rtx ();
1465 iter
= gen_reg_rtx (iter_mode
);
1467 emit_move_insn (iter
, const0_rtx
);
1469 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1470 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1471 do_pending_stack_adjust ();
1473 emit_jump (cmp_label
);
1474 emit_label (top_label
);
1476 tmp
= convert_modes (Pmode
, iter_mode
, iter
, true);
1477 x_addr
= gen_rtx_PLUS (Pmode
, x_addr
, tmp
);
1478 y_addr
= gen_rtx_PLUS (Pmode
, y_addr
, tmp
);
1479 x
= change_address (x
, QImode
, x_addr
);
1480 y
= change_address (y
, QImode
, y_addr
);
1482 emit_move_insn (x
, y
);
1484 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1485 true, OPTAB_LIB_WIDEN
);
1487 emit_move_insn (iter
, tmp
);
1489 emit_label (cmp_label
);
1491 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1495 /* Copy all or part of a value X into registers starting at REGNO.
1496 The number of registers to be filled is NREGS. */
1499 move_block_to_reg (int regno
, rtx x
, int nregs
, enum machine_mode mode
)
1502 #ifdef HAVE_load_multiple
1510 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1511 x
= validize_mem (force_const_mem (mode
, x
));
1513 /* See if the machine can do this with a load multiple insn. */
1514 #ifdef HAVE_load_multiple
1515 if (HAVE_load_multiple
)
1517 last
= get_last_insn ();
1518 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1526 delete_insns_since (last
);
1530 for (i
= 0; i
< nregs
; i
++)
1531 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1532 operand_subword_force (x
, i
, mode
));
1535 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1536 The number of registers to be filled is NREGS. */
1539 move_block_from_reg (int regno
, rtx x
, int nregs
)
1546 /* See if the machine can do this with a store multiple insn. */
1547 #ifdef HAVE_store_multiple
1548 if (HAVE_store_multiple
)
1550 rtx last
= get_last_insn ();
1551 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1559 delete_insns_since (last
);
1563 for (i
= 0; i
< nregs
; i
++)
1565 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1569 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1573 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1574 ORIG, where ORIG is a non-consecutive group of registers represented by
1575 a PARALLEL. The clone is identical to the original except in that the
1576 original set of registers is replaced by a new set of pseudo registers.
1577 The new set has the same modes as the original set. */
1580 gen_group_rtx (rtx orig
)
1585 gcc_assert (GET_CODE (orig
) == PARALLEL
);
1587 length
= XVECLEN (orig
, 0);
1588 tmps
= alloca (sizeof (rtx
) * length
);
1590 /* Skip a NULL entry in first slot. */
1591 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
1596 for (; i
< length
; i
++)
1598 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
1599 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
1601 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
1604 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
1607 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1608 except that values are placed in TMPS[i], and must later be moved
1609 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1612 emit_group_load_1 (rtx
*tmps
, rtx dst
, rtx orig_src
, tree type
, int ssize
)
1616 enum machine_mode m
= GET_MODE (orig_src
);
1618 gcc_assert (GET_CODE (dst
) == PARALLEL
);
1621 && !SCALAR_INT_MODE_P (m
)
1622 && !MEM_P (orig_src
)
1623 && GET_CODE (orig_src
) != CONCAT
)
1625 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_src
));
1626 if (imode
== BLKmode
)
1627 src
= assign_stack_temp (GET_MODE (orig_src
), ssize
, 0);
1629 src
= gen_reg_rtx (imode
);
1630 if (imode
!= BLKmode
)
1631 src
= gen_lowpart (GET_MODE (orig_src
), src
);
1632 emit_move_insn (src
, orig_src
);
1633 /* ...and back again. */
1634 if (imode
!= BLKmode
)
1635 src
= gen_lowpart (imode
, src
);
1636 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1640 /* Check for a NULL entry, used to indicate that the parameter goes
1641 both on the stack and in registers. */
1642 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1647 /* Process the pieces. */
1648 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1650 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1651 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1652 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1655 /* Handle trailing fragments that run over the size of the struct. */
1656 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1658 /* Arrange to shift the fragment to where it belongs.
1659 extract_bit_field loads to the lsb of the reg. */
1661 #ifdef BLOCK_REG_PADDING
1662 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
1663 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1668 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1669 bytelen
= ssize
- bytepos
;
1670 gcc_assert (bytelen
> 0);
1673 /* If we won't be loading directly from memory, protect the real source
1674 from strange tricks we might play; but make sure that the source can
1675 be loaded directly into the destination. */
1677 if (!MEM_P (orig_src
)
1678 && (!CONSTANT_P (orig_src
)
1679 || (GET_MODE (orig_src
) != mode
1680 && GET_MODE (orig_src
) != VOIDmode
)))
1682 if (GET_MODE (orig_src
) == VOIDmode
)
1683 src
= gen_reg_rtx (mode
);
1685 src
= gen_reg_rtx (GET_MODE (orig_src
));
1687 emit_move_insn (src
, orig_src
);
1690 /* Optimize the access just a bit. */
1692 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
1693 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
1694 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1695 && bytelen
== GET_MODE_SIZE (mode
))
1697 tmps
[i
] = gen_reg_rtx (mode
);
1698 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
1700 else if (COMPLEX_MODE_P (mode
)
1701 && GET_MODE (src
) == mode
1702 && bytelen
== GET_MODE_SIZE (mode
))
1703 /* Let emit_move_complex do the bulk of the work. */
1705 else if (GET_CODE (src
) == CONCAT
)
1707 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
1708 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
1710 if ((bytepos
== 0 && bytelen
== slen0
)
1711 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
1713 /* The following assumes that the concatenated objects all
1714 have the same size. In this case, a simple calculation
1715 can be used to determine the object and the bit field
1717 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
1718 if (! CONSTANT_P (tmps
[i
])
1719 && (!REG_P (tmps
[i
]) || GET_MODE (tmps
[i
]) != mode
))
1720 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
1721 (bytepos
% slen0
) * BITS_PER_UNIT
,
1722 1, NULL_RTX
, mode
, mode
);
1728 gcc_assert (!bytepos
);
1729 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1730 emit_move_insn (mem
, src
);
1731 tmps
[i
] = extract_bit_field (mem
, bytelen
* BITS_PER_UNIT
,
1732 0, 1, NULL_RTX
, mode
, mode
);
1735 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1736 SIMD register, which is currently broken. While we get GCC
1737 to emit proper RTL for these cases, let's dump to memory. */
1738 else if (VECTOR_MODE_P (GET_MODE (dst
))
1741 int slen
= GET_MODE_SIZE (GET_MODE (src
));
1744 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1745 emit_move_insn (mem
, src
);
1746 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
1748 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
1749 && XVECLEN (dst
, 0) > 1)
1750 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE(dst
), bytepos
);
1751 else if (CONSTANT_P (src
)
1752 || (REG_P (src
) && GET_MODE (src
) == mode
))
1755 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
1756 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
1760 tmps
[i
] = expand_shift (LSHIFT_EXPR
, mode
, tmps
[i
],
1761 build_int_cst (NULL_TREE
, shift
), tmps
[i
], 0);
1765 /* Emit code to move a block SRC of type TYPE to a block DST,
1766 where DST is non-consecutive registers represented by a PARALLEL.
1767 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1771 emit_group_load (rtx dst
, rtx src
, tree type
, int ssize
)
1776 tmps
= alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
1777 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1779 /* Copy the extracted pieces into the proper (probable) hard regs. */
1780 for (i
= 0; i
< XVECLEN (dst
, 0); i
++)
1782 rtx d
= XEXP (XVECEXP (dst
, 0, i
), 0);
1785 emit_move_insn (d
, tmps
[i
]);
1789 /* Similar, but load SRC into new pseudos in a format that looks like
1790 PARALLEL. This can later be fed to emit_group_move to get things
1791 in the right place. */
1794 emit_group_load_into_temps (rtx parallel
, rtx src
, tree type
, int ssize
)
1799 vec
= rtvec_alloc (XVECLEN (parallel
, 0));
1800 emit_group_load_1 (&RTVEC_ELT (vec
, 0), parallel
, src
, type
, ssize
);
1802 /* Convert the vector to look just like the original PARALLEL, except
1803 with the computed values. */
1804 for (i
= 0; i
< XVECLEN (parallel
, 0); i
++)
1806 rtx e
= XVECEXP (parallel
, 0, i
);
1807 rtx d
= XEXP (e
, 0);
1811 d
= force_reg (GET_MODE (d
), RTVEC_ELT (vec
, i
));
1812 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), d
, XEXP (e
, 1));
1814 RTVEC_ELT (vec
, i
) = e
;
1817 return gen_rtx_PARALLEL (GET_MODE (parallel
), vec
);
1820 /* Emit code to move a block SRC to block DST, where SRC and DST are
1821 non-consecutive groups of registers, each represented by a PARALLEL. */
1824 emit_group_move (rtx dst
, rtx src
)
1828 gcc_assert (GET_CODE (src
) == PARALLEL
1829 && GET_CODE (dst
) == PARALLEL
1830 && XVECLEN (src
, 0) == XVECLEN (dst
, 0));
1832 /* Skip first entry if NULL. */
1833 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
1834 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
1835 XEXP (XVECEXP (src
, 0, i
), 0));
1838 /* Move a group of registers represented by a PARALLEL into pseudos. */
1841 emit_group_move_into_temps (rtx src
)
1843 rtvec vec
= rtvec_alloc (XVECLEN (src
, 0));
1846 for (i
= 0; i
< XVECLEN (src
, 0); i
++)
1848 rtx e
= XVECEXP (src
, 0, i
);
1849 rtx d
= XEXP (e
, 0);
1852 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), copy_to_reg (d
), XEXP (e
, 1));
1853 RTVEC_ELT (vec
, i
) = e
;
1856 return gen_rtx_PARALLEL (GET_MODE (src
), vec
);
1859 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1860 where SRC is non-consecutive registers represented by a PARALLEL.
1861 SSIZE represents the total size of block ORIG_DST, or -1 if not
1865 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1868 int start
, finish
, i
;
1869 enum machine_mode m
= GET_MODE (orig_dst
);
1871 gcc_assert (GET_CODE (src
) == PARALLEL
);
1873 if (!SCALAR_INT_MODE_P (m
)
1874 && !MEM_P (orig_dst
) && GET_CODE (orig_dst
) != CONCAT
)
1876 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_dst
));
1877 if (imode
== BLKmode
)
1878 dst
= assign_stack_temp (GET_MODE (orig_dst
), ssize
, 0);
1880 dst
= gen_reg_rtx (imode
);
1881 emit_group_store (dst
, src
, type
, ssize
);
1882 if (imode
!= BLKmode
)
1883 dst
= gen_lowpart (GET_MODE (orig_dst
), dst
);
1884 emit_move_insn (orig_dst
, dst
);
1888 /* Check for a NULL entry, used to indicate that the parameter goes
1889 both on the stack and in registers. */
1890 if (XEXP (XVECEXP (src
, 0, 0), 0))
1894 finish
= XVECLEN (src
, 0);
1896 tmps
= alloca (sizeof (rtx
) * finish
);
1898 /* Copy the (probable) hard regs into pseudos. */
1899 for (i
= start
; i
< finish
; i
++)
1901 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
1902 if (!REG_P (reg
) || REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
1904 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
1905 emit_move_insn (tmps
[i
], reg
);
1911 /* If we won't be storing directly into memory, protect the real destination
1912 from strange tricks we might play. */
1914 if (GET_CODE (dst
) == PARALLEL
)
1918 /* We can get a PARALLEL dst if there is a conditional expression in
1919 a return statement. In that case, the dst and src are the same,
1920 so no action is necessary. */
1921 if (rtx_equal_p (dst
, src
))
1924 /* It is unclear if we can ever reach here, but we may as well handle
1925 it. Allocate a temporary, and split this into a store/load to/from
1928 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
1929 emit_group_store (temp
, src
, type
, ssize
);
1930 emit_group_load (dst
, temp
, type
, ssize
);
1933 else if (!MEM_P (dst
) && GET_CODE (dst
) != CONCAT
)
1935 enum machine_mode outer
= GET_MODE (dst
);
1936 enum machine_mode inner
;
1937 HOST_WIDE_INT bytepos
;
1941 if (!REG_P (dst
) || REGNO (dst
) < FIRST_PSEUDO_REGISTER
)
1942 dst
= gen_reg_rtx (outer
);
1944 /* Make life a bit easier for combine. */
1945 /* If the first element of the vector is the low part
1946 of the destination mode, use a paradoxical subreg to
1947 initialize the destination. */
1950 inner
= GET_MODE (tmps
[start
]);
1951 bytepos
= subreg_lowpart_offset (inner
, outer
);
1952 if (INTVAL (XEXP (XVECEXP (src
, 0, start
), 1)) == bytepos
)
1954 temp
= simplify_gen_subreg (outer
, tmps
[start
],
1958 emit_move_insn (dst
, temp
);
1965 /* If the first element wasn't the low part, try the last. */
1967 && start
< finish
- 1)
1969 inner
= GET_MODE (tmps
[finish
- 1]);
1970 bytepos
= subreg_lowpart_offset (inner
, outer
);
1971 if (INTVAL (XEXP (XVECEXP (src
, 0, finish
- 1), 1)) == bytepos
)
1973 temp
= simplify_gen_subreg (outer
, tmps
[finish
- 1],
1977 emit_move_insn (dst
, temp
);
1984 /* Otherwise, simply initialize the result to zero. */
1986 emit_move_insn (dst
, CONST0_RTX (outer
));
1989 /* Process the pieces. */
1990 for (i
= start
; i
< finish
; i
++)
1992 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
1993 enum machine_mode mode
= GET_MODE (tmps
[i
]);
1994 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1997 /* Handle trailing fragments that run over the size of the struct. */
1998 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2000 /* store_bit_field always takes its value from the lsb.
2001 Move the fragment to the lsb if it's not already there. */
2003 #ifdef BLOCK_REG_PADDING
2004 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
2005 == (BYTES_BIG_ENDIAN
? upward
: downward
)
2011 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2012 tmps
[i
] = expand_shift (RSHIFT_EXPR
, mode
, tmps
[i
],
2013 build_int_cst (NULL_TREE
, shift
),
2016 bytelen
= ssize
- bytepos
;
2019 if (GET_CODE (dst
) == CONCAT
)
2021 if (bytepos
+ bytelen
<= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2022 dest
= XEXP (dst
, 0);
2023 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2025 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2026 dest
= XEXP (dst
, 1);
2030 gcc_assert (bytepos
== 0 && XVECLEN (src
, 0));
2031 dest
= assign_stack_temp (GET_MODE (dest
),
2032 GET_MODE_SIZE (GET_MODE (dest
)), 0);
2033 emit_move_insn (adjust_address (dest
, GET_MODE (tmps
[i
]), bytepos
),
2040 /* Optimize the access just a bit. */
2042 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
2043 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2044 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2045 && bytelen
== GET_MODE_SIZE (mode
))
2046 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2048 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2052 /* Copy from the pseudo into the (probable) hard reg. */
2053 if (orig_dst
!= dst
)
2054 emit_move_insn (orig_dst
, dst
);
2057 /* Generate code to copy a BLKmode object of TYPE out of a
2058 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2059 is null, a stack temporary is created. TGTBLK is returned.
2061 The purpose of this routine is to handle functions that return
2062 BLKmode structures in registers. Some machines (the PA for example)
2063 want to return all small structures in registers regardless of the
2064 structure's alignment. */
2067 copy_blkmode_from_reg (rtx tgtblk
, rtx srcreg
, tree type
)
2069 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2070 rtx src
= NULL
, dst
= NULL
;
2071 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2072 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2076 tgtblk
= assign_temp (build_qualified_type (type
,
2078 | TYPE_QUAL_CONST
)),
2080 preserve_temp_slots (tgtblk
);
2083 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2084 into a new pseudo which is a full word. */
2086 if (GET_MODE (srcreg
) != BLKmode
2087 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2088 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
2090 /* If the structure doesn't take up a whole number of words, see whether
2091 SRCREG is padded on the left or on the right. If it's on the left,
2092 set PADDING_CORRECTION to the number of bits to skip.
2094 In most ABIs, the structure will be returned at the least end of
2095 the register, which translates to right padding on little-endian
2096 targets and left padding on big-endian targets. The opposite
2097 holds if the structure is returned at the most significant
2098 end of the register. */
2099 if (bytes
% UNITS_PER_WORD
!= 0
2100 && (targetm
.calls
.return_in_msb (type
)
2102 : BYTES_BIG_ENDIAN
))
2104 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2106 /* Copy the structure BITSIZE bites at a time.
2108 We could probably emit more efficient code for machines which do not use
2109 strict alignment, but it doesn't seem worth the effort at the current
2111 for (bitpos
= 0, xbitpos
= padding_correction
;
2112 bitpos
< bytes
* BITS_PER_UNIT
;
2113 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2115 /* We need a new source operand each time xbitpos is on a
2116 word boundary and when xbitpos == padding_correction
2117 (the first time through). */
2118 if (xbitpos
% BITS_PER_WORD
== 0
2119 || xbitpos
== padding_correction
)
2120 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2123 /* We need a new destination operand each time bitpos is on
2125 if (bitpos
% BITS_PER_WORD
== 0)
2126 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2128 /* Use xbitpos for the source extraction (right justified) and
2129 xbitpos for the destination store (left justified). */
2130 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2131 extract_bit_field (src
, bitsize
,
2132 xbitpos
% BITS_PER_WORD
, 1,
2133 NULL_RTX
, word_mode
, word_mode
));
2139 /* Add a USE expression for REG to the (possibly empty) list pointed
2140 to by CALL_FUSAGE. REG must denote a hard register. */
2143 use_reg (rtx
*call_fusage
, rtx reg
)
2145 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2148 = gen_rtx_EXPR_LIST (VOIDmode
,
2149 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2152 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2153 starting at REGNO. All of these registers must be hard registers. */
2156 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2160 gcc_assert (regno
+ nregs
<= FIRST_PSEUDO_REGISTER
);
2162 for (i
= 0; i
< nregs
; i
++)
2163 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2166 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2167 PARALLEL REGS. This is for calls that pass values in multiple
2168 non-contiguous locations. The Irix 6 ABI has examples of this. */
2171 use_group_regs (rtx
*call_fusage
, rtx regs
)
2175 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2177 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2179 /* A NULL entry means the parameter goes both on the stack and in
2180 registers. This can also be a MEM for targets that pass values
2181 partially on the stack and partially in registers. */
2182 if (reg
!= 0 && REG_P (reg
))
2183 use_reg (call_fusage
, reg
);
2188 /* Determine whether the LEN bytes generated by CONSTFUN can be
2189 stored to memory using several move instructions. CONSTFUNDATA is
2190 a pointer which will be passed as argument in every CONSTFUN call.
2191 ALIGN is maximum alignment we can assume. Return nonzero if a
2192 call to store_by_pieces should succeed. */
2195 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
2196 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2197 void *constfundata
, unsigned int align
)
2199 unsigned HOST_WIDE_INT l
;
2200 unsigned int max_size
;
2201 HOST_WIDE_INT offset
= 0;
2202 enum machine_mode mode
, tmode
;
2203 enum insn_code icode
;
2210 if (! STORE_BY_PIECES_P (len
, align
))
2213 tmode
= mode_for_size (STORE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
2214 if (align
>= GET_MODE_ALIGNMENT (tmode
))
2215 align
= GET_MODE_ALIGNMENT (tmode
);
2218 enum machine_mode xmode
;
2220 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
2222 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
2223 if (GET_MODE_SIZE (tmode
) > STORE_MAX_PIECES
2224 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
2227 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
2230 /* We would first store what we can in the largest integer mode, then go to
2231 successively smaller modes. */
2234 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2239 max_size
= STORE_MAX_PIECES
+ 1;
2240 while (max_size
> 1)
2242 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2243 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2244 if (GET_MODE_SIZE (tmode
) < max_size
)
2247 if (mode
== VOIDmode
)
2250 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2251 if (icode
!= CODE_FOR_nothing
2252 && align
>= GET_MODE_ALIGNMENT (mode
))
2254 unsigned int size
= GET_MODE_SIZE (mode
);
2261 cst
= (*constfun
) (constfundata
, offset
, mode
);
2262 if (!LEGITIMATE_CONSTANT_P (cst
))
2272 max_size
= GET_MODE_SIZE (mode
);
2275 /* The code above should have handled everything. */
2282 /* Generate several move instructions to store LEN bytes generated by
2283 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2284 pointer which will be passed as argument in every CONSTFUN call.
2285 ALIGN is maximum alignment we can assume.
2286 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2287 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2291 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
2292 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2293 void *constfundata
, unsigned int align
, int endp
)
2295 struct store_by_pieces data
;
2299 gcc_assert (endp
!= 2);
2303 gcc_assert (STORE_BY_PIECES_P (len
, align
));
2304 data
.constfun
= constfun
;
2305 data
.constfundata
= constfundata
;
2308 store_by_pieces_1 (&data
, align
);
2313 gcc_assert (!data
.reverse
);
2318 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2319 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2321 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
2324 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2331 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2339 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2340 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2343 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
2345 struct store_by_pieces data
;
2350 data
.constfun
= clear_by_pieces_1
;
2351 data
.constfundata
= NULL
;
2354 store_by_pieces_1 (&data
, align
);
2357 /* Callback routine for clear_by_pieces.
2358 Return const0_rtx unconditionally. */
2361 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED
,
2362 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2363 enum machine_mode mode ATTRIBUTE_UNUSED
)
2368 /* Subroutine of clear_by_pieces and store_by_pieces.
2369 Generate several move instructions to store LEN bytes of block TO. (A MEM
2370 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2373 store_by_pieces_1 (struct store_by_pieces
*data ATTRIBUTE_UNUSED
,
2374 unsigned int align ATTRIBUTE_UNUSED
)
2376 rtx to_addr
= XEXP (data
->to
, 0);
2377 unsigned int max_size
= STORE_MAX_PIECES
+ 1;
2378 enum machine_mode mode
= VOIDmode
, tmode
;
2379 enum insn_code icode
;
2382 data
->to_addr
= to_addr
;
2384 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2385 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2387 data
->explicit_inc_to
= 0;
2389 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2391 data
->offset
= data
->len
;
2393 /* If storing requires more than two move insns,
2394 copy addresses to registers (to make displacements shorter)
2395 and use post-increment if available. */
2396 if (!data
->autinc_to
2397 && move_by_pieces_ninsns (data
->len
, align
, max_size
) > 2)
2399 /* Determine the main mode we'll be using. */
2400 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2401 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2402 if (GET_MODE_SIZE (tmode
) < max_size
)
2405 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2407 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2408 data
->autinc_to
= 1;
2409 data
->explicit_inc_to
= -1;
2412 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2413 && ! data
->autinc_to
)
2415 data
->to_addr
= copy_addr_to_reg (to_addr
);
2416 data
->autinc_to
= 1;
2417 data
->explicit_inc_to
= 1;
2420 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2421 data
->to_addr
= copy_addr_to_reg (to_addr
);
2424 tmode
= mode_for_size (STORE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
2425 if (align
>= GET_MODE_ALIGNMENT (tmode
))
2426 align
= GET_MODE_ALIGNMENT (tmode
);
2429 enum machine_mode xmode
;
2431 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
2433 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
2434 if (GET_MODE_SIZE (tmode
) > STORE_MAX_PIECES
2435 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
2438 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
2441 /* First store what we can in the largest integer mode, then go to
2442 successively smaller modes. */
2444 while (max_size
> 1)
2446 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2447 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2448 if (GET_MODE_SIZE (tmode
) < max_size
)
2451 if (mode
== VOIDmode
)
2454 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2455 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2456 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2458 max_size
= GET_MODE_SIZE (mode
);
2461 /* The code above should have handled everything. */
2462 gcc_assert (!data
->len
);
2465 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2466 with move instructions for mode MODE. GENFUN is the gen_... function
2467 to make a move insn for that mode. DATA has all the other info. */
2470 store_by_pieces_2 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
2471 struct store_by_pieces
*data
)
2473 unsigned int size
= GET_MODE_SIZE (mode
);
2476 while (data
->len
>= size
)
2479 data
->offset
-= size
;
2481 if (data
->autinc_to
)
2482 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2485 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2487 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2488 emit_insn (gen_add2_insn (data
->to_addr
,
2489 GEN_INT (-(HOST_WIDE_INT
) size
)));
2491 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2492 emit_insn ((*genfun
) (to1
, cst
));
2494 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2495 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2497 if (! data
->reverse
)
2498 data
->offset
+= size
;
2504 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2505 its length in bytes. */
2508 clear_storage_hints (rtx object
, rtx size
, enum block_op_methods method
,
2509 unsigned int expected_align
, HOST_WIDE_INT expected_size
)
2511 enum machine_mode mode
= GET_MODE (object
);
2514 gcc_assert (method
== BLOCK_OP_NORMAL
|| method
== BLOCK_OP_TAILCALL
);
2516 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2517 just move a zero. Otherwise, do this a piece at a time. */
2519 && GET_CODE (size
) == CONST_INT
2520 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (mode
))
2522 rtx zero
= CONST0_RTX (mode
);
2525 emit_move_insn (object
, zero
);
2529 if (COMPLEX_MODE_P (mode
))
2531 zero
= CONST0_RTX (GET_MODE_INNER (mode
));
2534 write_complex_part (object
, zero
, 0);
2535 write_complex_part (object
, zero
, 1);
2541 if (size
== const0_rtx
)
2544 align
= MEM_ALIGN (object
);
2546 if (GET_CODE (size
) == CONST_INT
2547 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2548 clear_by_pieces (object
, INTVAL (size
), align
);
2549 else if (set_storage_via_setmem (object
, size
, const0_rtx
, align
,
2550 expected_align
, expected_size
))
2553 return set_storage_via_libcall (object
, size
, const0_rtx
,
2554 method
== BLOCK_OP_TAILCALL
);
2560 clear_storage (rtx object
, rtx size
, enum block_op_methods method
)
2562 return clear_storage_hints (object
, size
, method
, 0, -1);
2566 /* A subroutine of clear_storage. Expand a call to memset.
2567 Return the return value of memset, 0 otherwise. */
2570 set_storage_via_libcall (rtx object
, rtx size
, rtx val
, bool tailcall
)
2572 tree call_expr
, fn
, object_tree
, size_tree
, val_tree
;
2573 enum machine_mode size_mode
;
2576 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2577 place those into new pseudos into a VAR_DECL and use them later. */
2579 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2581 size_mode
= TYPE_MODE (sizetype
);
2582 size
= convert_to_mode (size_mode
, size
, 1);
2583 size
= copy_to_mode_reg (size_mode
, size
);
2585 /* It is incorrect to use the libcall calling conventions to call
2586 memset in this context. This could be a user call to memset and
2587 the user may wish to examine the return value from memset. For
2588 targets where libcalls and normal calls have different conventions
2589 for returning pointers, we could end up generating incorrect code. */
2591 object_tree
= make_tree (ptr_type_node
, object
);
2592 if (GET_CODE (val
) != CONST_INT
)
2593 val
= convert_to_mode (TYPE_MODE (integer_type_node
), val
, 1);
2594 size_tree
= make_tree (sizetype
, size
);
2595 val_tree
= make_tree (integer_type_node
, val
);
2597 fn
= clear_storage_libcall_fn (true);
2598 call_expr
= build_call_expr (fn
, 3,
2599 object_tree
, integer_zero_node
, size_tree
);
2600 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
2602 retval
= expand_normal (call_expr
);
2607 /* A subroutine of set_storage_via_libcall. Create the tree node
2608 for the function we use for block clears. The first time FOR_CALL
2609 is true, we call assemble_external. */
2611 static GTY(()) tree block_clear_fn
;
2614 init_block_clear_fn (const char *asmspec
)
2616 if (!block_clear_fn
)
2620 fn
= get_identifier ("memset");
2621 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2622 integer_type_node
, sizetype
,
2625 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
2626 DECL_EXTERNAL (fn
) = 1;
2627 TREE_PUBLIC (fn
) = 1;
2628 DECL_ARTIFICIAL (fn
) = 1;
2629 TREE_NOTHROW (fn
) = 1;
2630 DECL_VISIBILITY (fn
) = VISIBILITY_DEFAULT
;
2631 DECL_VISIBILITY_SPECIFIED (fn
) = 1;
2633 block_clear_fn
= fn
;
2637 set_user_assembler_name (block_clear_fn
, asmspec
);
2641 clear_storage_libcall_fn (int for_call
)
2643 static bool emitted_extern
;
2645 if (!block_clear_fn
)
2646 init_block_clear_fn (NULL
);
2648 if (for_call
&& !emitted_extern
)
2650 emitted_extern
= true;
2651 make_decl_rtl (block_clear_fn
);
2652 assemble_external (block_clear_fn
);
2655 return block_clear_fn
;
2658 /* Expand a setmem pattern; return true if successful. */
2661 set_storage_via_setmem (rtx object
, rtx size
, rtx val
, unsigned int align
,
2662 unsigned int expected_align
, HOST_WIDE_INT expected_size
)
2664 /* Try the most limited insn first, because there's no point
2665 including more than one in the machine description unless
2666 the more limited one has some advantage. */
2668 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2669 enum machine_mode mode
;
2671 if (expected_align
< align
)
2672 expected_align
= align
;
2674 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2675 mode
= GET_MODE_WIDER_MODE (mode
))
2677 enum insn_code code
= setmem_optab
[(int) mode
];
2678 insn_operand_predicate_fn pred
;
2680 if (code
!= CODE_FOR_nothing
2681 /* We don't need MODE to be narrower than
2682 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2683 the mode mask, as it is returned by the macro, it will
2684 definitely be less than the actual mode mask. */
2685 && ((GET_CODE (size
) == CONST_INT
2686 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2687 <= (GET_MODE_MASK (mode
) >> 1)))
2688 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2689 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2690 || (*pred
) (object
, BLKmode
))
2691 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
2692 || (*pred
) (opalign
, VOIDmode
)))
2695 enum machine_mode char_mode
;
2696 rtx last
= get_last_insn ();
2699 opsize
= convert_to_mode (mode
, size
, 1);
2700 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2701 if (pred
!= 0 && ! (*pred
) (opsize
, mode
))
2702 opsize
= copy_to_mode_reg (mode
, opsize
);
2705 char_mode
= insn_data
[(int) code
].operand
[2].mode
;
2706 if (char_mode
!= VOIDmode
)
2708 opchar
= convert_to_mode (char_mode
, opchar
, 1);
2709 pred
= insn_data
[(int) code
].operand
[2].predicate
;
2710 if (pred
!= 0 && ! (*pred
) (opchar
, char_mode
))
2711 opchar
= copy_to_mode_reg (char_mode
, opchar
);
2714 if (insn_data
[(int) code
].n_operands
== 4)
2715 pat
= GEN_FCN ((int) code
) (object
, opsize
, opchar
, opalign
);
2717 pat
= GEN_FCN ((int) code
) (object
, opsize
, opchar
, opalign
,
2718 GEN_INT (expected_align
),
2719 GEN_INT (expected_size
));
2726 delete_insns_since (last
);
2734 /* Write to one of the components of the complex value CPLX. Write VAL to
2735 the real part if IMAG_P is false, and the imaginary part if its true. */
2738 write_complex_part (rtx cplx
, rtx val
, bool imag_p
)
2740 enum machine_mode cmode
;
2741 enum machine_mode imode
;
2744 if (GET_CODE (cplx
) == CONCAT
)
2746 emit_move_insn (XEXP (cplx
, imag_p
), val
);
2750 cmode
= GET_MODE (cplx
);
2751 imode
= GET_MODE_INNER (cmode
);
2752 ibitsize
= GET_MODE_BITSIZE (imode
);
2754 /* For MEMs simplify_gen_subreg may generate an invalid new address
2755 because, e.g., the original address is considered mode-dependent
2756 by the target, which restricts simplify_subreg from invoking
2757 adjust_address_nv. Instead of preparing fallback support for an
2758 invalid address, we call adjust_address_nv directly. */
2761 emit_move_insn (adjust_address_nv (cplx
, imode
,
2762 imag_p
? GET_MODE_SIZE (imode
) : 0),
2767 /* If the sub-object is at least word sized, then we know that subregging
2768 will work. This special case is important, since store_bit_field
2769 wants to operate on integer modes, and there's rarely an OImode to
2770 correspond to TCmode. */
2771 if (ibitsize
>= BITS_PER_WORD
2772 /* For hard regs we have exact predicates. Assume we can split
2773 the original object if it spans an even number of hard regs.
2774 This special case is important for SCmode on 64-bit platforms
2775 where the natural size of floating-point regs is 32-bit. */
2777 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
2778 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0))
2780 rtx part
= simplify_gen_subreg (imode
, cplx
, cmode
,
2781 imag_p
? GET_MODE_SIZE (imode
) : 0);
2784 emit_move_insn (part
, val
);
2788 /* simplify_gen_subreg may fail for sub-word MEMs. */
2789 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
2792 store_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0, imode
, val
);
2795 /* Extract one of the components of the complex value CPLX. Extract the
2796 real part if IMAG_P is false, and the imaginary part if it's true. */
2799 read_complex_part (rtx cplx
, bool imag_p
)
2801 enum machine_mode cmode
, imode
;
2804 if (GET_CODE (cplx
) == CONCAT
)
2805 return XEXP (cplx
, imag_p
);
2807 cmode
= GET_MODE (cplx
);
2808 imode
= GET_MODE_INNER (cmode
);
2809 ibitsize
= GET_MODE_BITSIZE (imode
);
2811 /* Special case reads from complex constants that got spilled to memory. */
2812 if (MEM_P (cplx
) && GET_CODE (XEXP (cplx
, 0)) == SYMBOL_REF
)
2814 tree decl
= SYMBOL_REF_DECL (XEXP (cplx
, 0));
2815 if (decl
&& TREE_CODE (decl
) == COMPLEX_CST
)
2817 tree part
= imag_p
? TREE_IMAGPART (decl
) : TREE_REALPART (decl
);
2818 if (CONSTANT_CLASS_P (part
))
2819 return expand_expr (part
, NULL_RTX
, imode
, EXPAND_NORMAL
);
2823 /* For MEMs simplify_gen_subreg may generate an invalid new address
2824 because, e.g., the original address is considered mode-dependent
2825 by the target, which restricts simplify_subreg from invoking
2826 adjust_address_nv. Instead of preparing fallback support for an
2827 invalid address, we call adjust_address_nv directly. */
2829 return adjust_address_nv (cplx
, imode
,
2830 imag_p
? GET_MODE_SIZE (imode
) : 0);
2832 /* If the sub-object is at least word sized, then we know that subregging
2833 will work. This special case is important, since extract_bit_field
2834 wants to operate on integer modes, and there's rarely an OImode to
2835 correspond to TCmode. */
2836 if (ibitsize
>= BITS_PER_WORD
2837 /* For hard regs we have exact predicates. Assume we can split
2838 the original object if it spans an even number of hard regs.
2839 This special case is important for SCmode on 64-bit platforms
2840 where the natural size of floating-point regs is 32-bit. */
2842 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
2843 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0))
2845 rtx ret
= simplify_gen_subreg (imode
, cplx
, cmode
,
2846 imag_p
? GET_MODE_SIZE (imode
) : 0);
2850 /* simplify_gen_subreg may fail for sub-word MEMs. */
2851 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
2854 return extract_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0,
2855 true, NULL_RTX
, imode
, imode
);
2858 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2859 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2860 represented in NEW_MODE. If FORCE is true, this will never happen, as
2861 we'll force-create a SUBREG if needed. */
2864 emit_move_change_mode (enum machine_mode new_mode
,
2865 enum machine_mode old_mode
, rtx x
, bool force
)
2871 /* We don't have to worry about changing the address since the
2872 size in bytes is supposed to be the same. */
2873 if (reload_in_progress
)
2875 /* Copy the MEM to change the mode and move any
2876 substitutions from the old MEM to the new one. */
2877 ret
= adjust_address_nv (x
, new_mode
, 0);
2878 copy_replacements (x
, ret
);
2881 ret
= adjust_address (x
, new_mode
, 0);
2885 /* Note that we do want simplify_subreg's behavior of validating
2886 that the new mode is ok for a hard register. If we were to use
2887 simplify_gen_subreg, we would create the subreg, but would
2888 probably run into the target not being able to implement it. */
2889 /* Except, of course, when FORCE is true, when this is exactly what
2890 we want. Which is needed for CCmodes on some targets. */
2892 ret
= simplify_gen_subreg (new_mode
, x
, old_mode
, 0);
2894 ret
= simplify_subreg (new_mode
, x
, old_mode
, 0);
2900 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2901 an integer mode of the same size as MODE. Returns the instruction
2902 emitted, or NULL if such a move could not be generated. */
2905 emit_move_via_integer (enum machine_mode mode
, rtx x
, rtx y
, bool force
)
2907 enum machine_mode imode
;
2908 enum insn_code code
;
2910 /* There must exist a mode of the exact size we require. */
2911 imode
= int_mode_for_mode (mode
);
2912 if (imode
== BLKmode
)
2915 /* The target must support moves in this mode. */
2916 code
= mov_optab
->handlers
[imode
].insn_code
;
2917 if (code
== CODE_FOR_nothing
)
2920 x
= emit_move_change_mode (imode
, mode
, x
, force
);
2923 y
= emit_move_change_mode (imode
, mode
, y
, force
);
2926 return emit_insn (GEN_FCN (code
) (x
, y
));
2929 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2930 Return an equivalent MEM that does not use an auto-increment. */
2933 emit_move_resolve_push (enum machine_mode mode
, rtx x
)
2935 enum rtx_code code
= GET_CODE (XEXP (x
, 0));
2936 HOST_WIDE_INT adjust
;
2939 adjust
= GET_MODE_SIZE (mode
);
2940 #ifdef PUSH_ROUNDING
2941 adjust
= PUSH_ROUNDING (adjust
);
2943 if (code
== PRE_DEC
|| code
== POST_DEC
)
2945 else if (code
== PRE_MODIFY
|| code
== POST_MODIFY
)
2947 rtx expr
= XEXP (XEXP (x
, 0), 1);
2950 gcc_assert (GET_CODE (expr
) == PLUS
|| GET_CODE (expr
) == MINUS
);
2951 gcc_assert (GET_CODE (XEXP (expr
, 1)) == CONST_INT
);
2952 val
= INTVAL (XEXP (expr
, 1));
2953 if (GET_CODE (expr
) == MINUS
)
2955 gcc_assert (adjust
== val
|| adjust
== -val
);
2959 /* Do not use anti_adjust_stack, since we don't want to update
2960 stack_pointer_delta. */
2961 temp
= expand_simple_binop (Pmode
, PLUS
, stack_pointer_rtx
,
2962 GEN_INT (adjust
), stack_pointer_rtx
,
2963 0, OPTAB_LIB_WIDEN
);
2964 if (temp
!= stack_pointer_rtx
)
2965 emit_move_insn (stack_pointer_rtx
, temp
);
2972 temp
= stack_pointer_rtx
;
2977 temp
= plus_constant (stack_pointer_rtx
, -adjust
);
2983 return replace_equiv_address (x
, temp
);
2986 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2987 X is known to satisfy push_operand, and MODE is known to be complex.
2988 Returns the last instruction emitted. */
2991 emit_move_complex_push (enum machine_mode mode
, rtx x
, rtx y
)
2993 enum machine_mode submode
= GET_MODE_INNER (mode
);
2996 #ifdef PUSH_ROUNDING
2997 unsigned int submodesize
= GET_MODE_SIZE (submode
);
2999 /* In case we output to the stack, but the size is smaller than the
3000 machine can push exactly, we need to use move instructions. */
3001 if (PUSH_ROUNDING (submodesize
) != submodesize
)
3003 x
= emit_move_resolve_push (mode
, x
);
3004 return emit_move_insn (x
, y
);
3008 /* Note that the real part always precedes the imag part in memory
3009 regardless of machine's endianness. */
3010 switch (GET_CODE (XEXP (x
, 0)))
3024 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3025 read_complex_part (y
, imag_first
));
3026 return emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3027 read_complex_part (y
, !imag_first
));
3030 /* A subroutine of emit_move_complex. Perform the move from Y to X
3031 via two moves of the parts. Returns the last instruction emitted. */
3034 emit_move_complex_parts (rtx x
, rtx y
)
3036 /* Show the output dies here. This is necessary for SUBREGs
3037 of pseudos since we cannot track their lifetimes correctly;
3038 hard regs shouldn't appear here except as return values. */
3039 if (!reload_completed
&& !reload_in_progress
3040 && REG_P (x
) && !reg_overlap_mentioned_p (x
, y
))
3041 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3043 write_complex_part (x
, read_complex_part (y
, false), false);
3044 write_complex_part (x
, read_complex_part (y
, true), true);
3046 return get_last_insn ();
3049 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3050 MODE is known to be complex. Returns the last instruction emitted. */
3053 emit_move_complex (enum machine_mode mode
, rtx x
, rtx y
)
3057 /* Need to take special care for pushes, to maintain proper ordering
3058 of the data, and possibly extra padding. */
3059 if (push_operand (x
, mode
))
3060 return emit_move_complex_push (mode
, x
, y
);
3062 /* See if we can coerce the target into moving both values at once. */
3064 /* Move floating point as parts. */
3065 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
3066 && mov_optab
->handlers
[GET_MODE_INNER (mode
)].insn_code
!= CODE_FOR_nothing
)
3068 /* Not possible if the values are inherently not adjacent. */
3069 else if (GET_CODE (x
) == CONCAT
|| GET_CODE (y
) == CONCAT
)
3071 /* Is possible if both are registers (or subregs of registers). */
3072 else if (register_operand (x
, mode
) && register_operand (y
, mode
))
3074 /* If one of the operands is a memory, and alignment constraints
3075 are friendly enough, we may be able to do combined memory operations.
3076 We do not attempt this if Y is a constant because that combination is
3077 usually better with the by-parts thing below. */
3078 else if ((MEM_P (x
) ? !CONSTANT_P (y
) : MEM_P (y
))
3079 && (!STRICT_ALIGNMENT
3080 || get_mode_alignment (mode
) == BIGGEST_ALIGNMENT
))
3089 /* For memory to memory moves, optimal behavior can be had with the
3090 existing block move logic. */
3091 if (MEM_P (x
) && MEM_P (y
))
3093 emit_block_move (x
, y
, GEN_INT (GET_MODE_SIZE (mode
)),
3094 BLOCK_OP_NO_LIBCALL
);
3095 return get_last_insn ();
3098 ret
= emit_move_via_integer (mode
, x
, y
, true);
3103 return emit_move_complex_parts (x
, y
);
3106 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3107 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3110 emit_move_ccmode (enum machine_mode mode
, rtx x
, rtx y
)
3114 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3117 enum insn_code code
= mov_optab
->handlers
[CCmode
].insn_code
;
3118 if (code
!= CODE_FOR_nothing
)
3120 x
= emit_move_change_mode (CCmode
, mode
, x
, true);
3121 y
= emit_move_change_mode (CCmode
, mode
, y
, true);
3122 return emit_insn (GEN_FCN (code
) (x
, y
));
3126 /* Otherwise, find the MODE_INT mode of the same width. */
3127 ret
= emit_move_via_integer (mode
, x
, y
, false);
3128 gcc_assert (ret
!= NULL
);
3132 /* Return true if word I of OP lies entirely in the
3133 undefined bits of a paradoxical subreg. */
3136 undefined_operand_subword_p (rtx op
, int i
)
3138 enum machine_mode innermode
, innermostmode
;
3140 if (GET_CODE (op
) != SUBREG
)
3142 innermode
= GET_MODE (op
);
3143 innermostmode
= GET_MODE (SUBREG_REG (op
));
3144 offset
= i
* UNITS_PER_WORD
+ SUBREG_BYTE (op
);
3145 /* The SUBREG_BYTE represents offset, as if the value were stored in
3146 memory, except for a paradoxical subreg where we define
3147 SUBREG_BYTE to be 0; undo this exception as in
3149 if (SUBREG_BYTE (op
) == 0
3150 && GET_MODE_SIZE (innermostmode
) < GET_MODE_SIZE (innermode
))
3152 int difference
= (GET_MODE_SIZE (innermostmode
) - GET_MODE_SIZE (innermode
));
3153 if (WORDS_BIG_ENDIAN
)
3154 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
3155 if (BYTES_BIG_ENDIAN
)
3156 offset
+= difference
% UNITS_PER_WORD
;
3158 if (offset
>= GET_MODE_SIZE (innermostmode
)
3159 || offset
<= -GET_MODE_SIZE (word_mode
))
3164 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3165 MODE is any multi-word or full-word mode that lacks a move_insn
3166 pattern. Note that you will get better code if you define such
3167 patterns, even if they must turn into multiple assembler instructions. */
3170 emit_move_multi_word (enum machine_mode mode
, rtx x
, rtx y
)
3177 gcc_assert (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
);
3179 /* If X is a push on the stack, do the push now and replace
3180 X with a reference to the stack pointer. */
3181 if (push_operand (x
, mode
))
3182 x
= emit_move_resolve_push (mode
, x
);
3184 /* If we are in reload, see if either operand is a MEM whose address
3185 is scheduled for replacement. */
3186 if (reload_in_progress
&& MEM_P (x
)
3187 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3188 x
= replace_equiv_address_nv (x
, inner
);
3189 if (reload_in_progress
&& MEM_P (y
)
3190 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3191 y
= replace_equiv_address_nv (y
, inner
);
3195 need_clobber
= false;
3197 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3200 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3203 /* Do not generate code for a move if it would come entirely
3204 from the undefined bits of a paradoxical subreg. */
3205 if (undefined_operand_subword_p (y
, i
))
3208 ypart
= operand_subword (y
, i
, 1, mode
);
3210 /* If we can't get a part of Y, put Y into memory if it is a
3211 constant. Otherwise, force it into a register. Then we must
3212 be able to get a part of Y. */
3213 if (ypart
== 0 && CONSTANT_P (y
))
3215 y
= use_anchored_address (force_const_mem (mode
, y
));
3216 ypart
= operand_subword (y
, i
, 1, mode
);
3218 else if (ypart
== 0)
3219 ypart
= operand_subword_force (y
, i
, mode
);
3221 gcc_assert (xpart
&& ypart
);
3223 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3225 last_insn
= emit_move_insn (xpart
, ypart
);
3231 /* Show the output dies here. This is necessary for SUBREGs
3232 of pseudos since we cannot track their lifetimes correctly;
3233 hard regs shouldn't appear here except as return values.
3234 We never want to emit such a clobber after reload. */
3236 && ! (reload_in_progress
|| reload_completed
)
3237 && need_clobber
!= 0)
3238 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3245 /* Low level part of emit_move_insn.
3246 Called just like emit_move_insn, but assumes X and Y
3247 are basically valid. */
3250 emit_move_insn_1 (rtx x
, rtx y
)
3252 enum machine_mode mode
= GET_MODE (x
);
3253 enum insn_code code
;
3255 gcc_assert ((unsigned int) mode
< (unsigned int) MAX_MACHINE_MODE
);
3257 code
= mov_optab
->handlers
[mode
].insn_code
;
3258 if (code
!= CODE_FOR_nothing
)
3259 return emit_insn (GEN_FCN (code
) (x
, y
));
3261 /* Expand complex moves by moving real part and imag part. */
3262 if (COMPLEX_MODE_P (mode
))
3263 return emit_move_complex (mode
, x
, y
);
3265 if (GET_MODE_CLASS (mode
) == MODE_DECIMAL_FLOAT
)
3267 rtx result
= emit_move_via_integer (mode
, x
, y
, true);
3269 /* If we can't find an integer mode, use multi words. */
3273 return emit_move_multi_word (mode
, x
, y
);
3276 if (GET_MODE_CLASS (mode
) == MODE_CC
)
3277 return emit_move_ccmode (mode
, x
, y
);
3279 /* Try using a move pattern for the corresponding integer mode. This is
3280 only safe when simplify_subreg can convert MODE constants into integer
3281 constants. At present, it can only do this reliably if the value
3282 fits within a HOST_WIDE_INT. */
3283 if (!CONSTANT_P (y
) || GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
3285 rtx ret
= emit_move_via_integer (mode
, x
, y
, false);
3290 return emit_move_multi_word (mode
, x
, y
);
3293 /* Generate code to copy Y into X.
3294 Both Y and X must have the same mode, except that
3295 Y can be a constant with VOIDmode.
3296 This mode cannot be BLKmode; use emit_block_move for that.
3298 Return the last instruction emitted. */
3301 emit_move_insn (rtx x
, rtx y
)
3303 enum machine_mode mode
= GET_MODE (x
);
3304 rtx y_cst
= NULL_RTX
;
3307 gcc_assert (mode
!= BLKmode
3308 && (GET_MODE (y
) == mode
|| GET_MODE (y
) == VOIDmode
));
3313 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3314 && (last_insn
= compress_float_constant (x
, y
)))
3319 if (!LEGITIMATE_CONSTANT_P (y
))
3321 y
= force_const_mem (mode
, y
);
3323 /* If the target's cannot_force_const_mem prevented the spill,
3324 assume that the target's move expanders will also take care
3325 of the non-legitimate constant. */
3329 y
= use_anchored_address (y
);
3333 /* If X or Y are memory references, verify that their addresses are valid
3336 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
3337 && ! push_operand (x
, GET_MODE (x
)))
3339 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
3340 x
= validize_mem (x
);
3343 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
3345 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
3346 y
= validize_mem (y
);
3348 gcc_assert (mode
!= BLKmode
);
3350 last_insn
= emit_move_insn_1 (x
, y
);
3352 if (y_cst
&& REG_P (x
)
3353 && (set
= single_set (last_insn
)) != NULL_RTX
3354 && SET_DEST (set
) == x
3355 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
3356 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
3361 /* If Y is representable exactly in a narrower mode, and the target can
3362 perform the extension directly from constant or memory, then emit the
3363 move as an extension. */
3366 compress_float_constant (rtx x
, rtx y
)
3368 enum machine_mode dstmode
= GET_MODE (x
);
3369 enum machine_mode orig_srcmode
= GET_MODE (y
);
3370 enum machine_mode srcmode
;
3372 int oldcost
, newcost
;
3374 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3376 if (LEGITIMATE_CONSTANT_P (y
))
3377 oldcost
= rtx_cost (y
, SET
);
3379 oldcost
= rtx_cost (force_const_mem (dstmode
, y
), SET
);
3381 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3382 srcmode
!= orig_srcmode
;
3383 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3386 rtx trunc_y
, last_insn
;
3388 /* Skip if the target can't extend this way. */
3389 ic
= can_extend_p (dstmode
, srcmode
, 0);
3390 if (ic
== CODE_FOR_nothing
)
3393 /* Skip if the narrowed value isn't exact. */
3394 if (! exact_real_truncate (srcmode
, &r
))
3397 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3399 if (LEGITIMATE_CONSTANT_P (trunc_y
))
3401 /* Skip if the target needs extra instructions to perform
3403 if (! (*insn_data
[ic
].operand
[1].predicate
) (trunc_y
, srcmode
))
3405 /* This is valid, but may not be cheaper than the original. */
3406 newcost
= rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
), SET
);
3407 if (oldcost
< newcost
)
3410 else if (float_extend_from_mem
[dstmode
][srcmode
])
3412 trunc_y
= force_const_mem (srcmode
, trunc_y
);
3413 /* This is valid, but may not be cheaper than the original. */
3414 newcost
= rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
), SET
);
3415 if (oldcost
< newcost
)
3417 trunc_y
= validize_mem (trunc_y
);
3422 /* For CSE's benefit, force the compressed constant pool entry
3423 into a new pseudo. This constant may be used in different modes,
3424 and if not, combine will put things back together for us. */
3425 trunc_y
= force_reg (srcmode
, trunc_y
);
3426 emit_unop_insn (ic
, x
, trunc_y
, UNKNOWN
);
3427 last_insn
= get_last_insn ();
3430 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3438 /* Pushing data onto the stack. */
3440 /* Push a block of length SIZE (perhaps variable)
3441 and return an rtx to address the beginning of the block.
3442 The value may be virtual_outgoing_args_rtx.
3444 EXTRA is the number of bytes of padding to push in addition to SIZE.
3445 BELOW nonzero means this padding comes at low addresses;
3446 otherwise, the padding comes at high addresses. */
3449 push_block (rtx size
, int extra
, int below
)
3453 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3454 if (CONSTANT_P (size
))
3455 anti_adjust_stack (plus_constant (size
, extra
));
3456 else if (REG_P (size
) && extra
== 0)
3457 anti_adjust_stack (size
);
3460 temp
= copy_to_mode_reg (Pmode
, size
);
3462 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3463 temp
, 0, OPTAB_LIB_WIDEN
);
3464 anti_adjust_stack (temp
);
3467 #ifndef STACK_GROWS_DOWNWARD
3473 temp
= virtual_outgoing_args_rtx
;
3474 if (extra
!= 0 && below
)
3475 temp
= plus_constant (temp
, extra
);
3479 if (GET_CODE (size
) == CONST_INT
)
3480 temp
= plus_constant (virtual_outgoing_args_rtx
,
3481 -INTVAL (size
) - (below
? 0 : extra
));
3482 else if (extra
!= 0 && !below
)
3483 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3484 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3486 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3487 negate_rtx (Pmode
, size
));
3490 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3493 #ifdef PUSH_ROUNDING
3495 /* Emit single push insn. */
3498 emit_single_push_insn (enum machine_mode mode
, rtx x
, tree type
)
3501 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3503 enum insn_code icode
;
3504 insn_operand_predicate_fn pred
;
3506 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3507 /* If there is push pattern, use it. Otherwise try old way of throwing
3508 MEM representing push operation to move expander. */
3509 icode
= push_optab
->handlers
[(int) mode
].insn_code
;
3510 if (icode
!= CODE_FOR_nothing
)
3512 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3513 && !((*pred
) (x
, mode
))))
3514 x
= force_reg (mode
, x
);
3515 emit_insn (GEN_FCN (icode
) (x
));
3518 if (GET_MODE_SIZE (mode
) == rounded_size
)
3519 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3520 /* If we are to pad downward, adjust the stack pointer first and
3521 then store X into the stack location using an offset. This is
3522 because emit_move_insn does not know how to pad; it does not have
3524 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
3526 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
3527 HOST_WIDE_INT offset
;
3529 emit_move_insn (stack_pointer_rtx
,
3530 expand_binop (Pmode
,
3531 #ifdef STACK_GROWS_DOWNWARD
3537 GEN_INT (rounded_size
),
3538 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
3540 offset
= (HOST_WIDE_INT
) padding_size
;
3541 #ifdef STACK_GROWS_DOWNWARD
3542 if (STACK_PUSH_CODE
== POST_DEC
)
3543 /* We have already decremented the stack pointer, so get the
3545 offset
+= (HOST_WIDE_INT
) rounded_size
;
3547 if (STACK_PUSH_CODE
== POST_INC
)
3548 /* We have already incremented the stack pointer, so get the
3550 offset
-= (HOST_WIDE_INT
) rounded_size
;
3552 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (offset
));
3556 #ifdef STACK_GROWS_DOWNWARD
3557 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3558 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3559 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3561 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3562 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3563 GEN_INT (rounded_size
));
3565 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3568 dest
= gen_rtx_MEM (mode
, dest_addr
);
3572 set_mem_attributes (dest
, type
, 1);
3574 if (flag_optimize_sibling_calls
)
3575 /* Function incoming arguments may overlap with sibling call
3576 outgoing arguments and we cannot allow reordering of reads
3577 from function arguments with stores to outgoing arguments
3578 of sibling calls. */
3579 set_mem_alias_set (dest
, 0);
3581 emit_move_insn (dest
, x
);
3585 /* Generate code to push X onto the stack, assuming it has mode MODE and
3587 MODE is redundant except when X is a CONST_INT (since they don't
3589 SIZE is an rtx for the size of data to be copied (in bytes),
3590 needed only if X is BLKmode.
3592 ALIGN (in bits) is maximum alignment we can assume.
3594 If PARTIAL and REG are both nonzero, then copy that many of the first
3595 bytes of X into registers starting with REG, and push the rest of X.
3596 The amount of space pushed is decreased by PARTIAL bytes.
3597 REG must be a hard register in this case.
3598 If REG is zero but PARTIAL is not, take any all others actions for an
3599 argument partially in registers, but do not actually load any
3602 EXTRA is the amount in bytes of extra space to leave next to this arg.
3603 This is ignored if an argument block has already been allocated.
3605 On a machine that lacks real push insns, ARGS_ADDR is the address of
3606 the bottom of the argument block for this call. We use indexing off there
3607 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3608 argument block has not been preallocated.
3610 ARGS_SO_FAR is the size of args previously pushed for this call.
3612 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3613 for arguments passed in registers. If nonzero, it will be the number
3614 of bytes required. */
3617 emit_push_insn (rtx x
, enum machine_mode mode
, tree type
, rtx size
,
3618 unsigned int align
, int partial
, rtx reg
, int extra
,
3619 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
3623 enum direction stack_direction
3624 #ifdef STACK_GROWS_DOWNWARD
3630 /* Decide where to pad the argument: `downward' for below,
3631 `upward' for above, or `none' for don't pad it.
3632 Default is below for small data on big-endian machines; else above. */
3633 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3635 /* Invert direction if stack is post-decrement.
3637 if (STACK_PUSH_CODE
== POST_DEC
)
3638 if (where_pad
!= none
)
3639 where_pad
= (where_pad
== downward
? upward
: downward
);
3644 || (STRICT_ALIGNMENT
&& align
< GET_MODE_ALIGNMENT (mode
)))
3646 /* Copy a block into the stack, entirely or partially. */
3653 offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3654 used
= partial
- offset
;
3656 if (mode
!= BLKmode
)
3658 /* A value is to be stored in an insufficiently aligned
3659 stack slot; copy via a suitably aligned slot if
3661 size
= GEN_INT (GET_MODE_SIZE (mode
));
3662 if (!MEM_P (xinner
))
3664 temp
= assign_temp (type
, 0, 1, 1);
3665 emit_move_insn (temp
, xinner
);
3672 /* USED is now the # of bytes we need not copy to the stack
3673 because registers will take care of them. */
3676 xinner
= adjust_address (xinner
, BLKmode
, used
);
3678 /* If the partial register-part of the arg counts in its stack size,
3679 skip the part of stack space corresponding to the registers.
3680 Otherwise, start copying to the beginning of the stack space,
3681 by setting SKIP to 0. */
3682 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3684 #ifdef PUSH_ROUNDING
3685 /* Do it with several push insns if that doesn't take lots of insns
3686 and if there is no difficulty with push insns that skip bytes
3687 on the stack for alignment purposes. */
3690 && GET_CODE (size
) == CONST_INT
3692 && MEM_ALIGN (xinner
) >= align
3693 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3694 /* Here we avoid the case of a structure whose weak alignment
3695 forces many pushes of a small amount of data,
3696 and such small pushes do rounding that causes trouble. */
3697 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3698 || align
>= BIGGEST_ALIGNMENT
3699 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3700 == (align
/ BITS_PER_UNIT
)))
3701 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3703 /* Push padding now if padding above and stack grows down,
3704 or if padding below and stack grows up.
3705 But if space already allocated, this has already been done. */
3706 if (extra
&& args_addr
== 0
3707 && where_pad
!= none
&& where_pad
!= stack_direction
)
3708 anti_adjust_stack (GEN_INT (extra
));
3710 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
3713 #endif /* PUSH_ROUNDING */
3717 /* Otherwise make space on the stack and copy the data
3718 to the address of that space. */
3720 /* Deduct words put into registers from the size we must copy. */
3723 if (GET_CODE (size
) == CONST_INT
)
3724 size
= GEN_INT (INTVAL (size
) - used
);
3726 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3727 GEN_INT (used
), NULL_RTX
, 0,
3731 /* Get the address of the stack space.
3732 In this case, we do not deal with EXTRA separately.
3733 A single stack adjust will do. */
3736 temp
= push_block (size
, extra
, where_pad
== downward
);
3739 else if (GET_CODE (args_so_far
) == CONST_INT
)
3740 temp
= memory_address (BLKmode
,
3741 plus_constant (args_addr
,
3742 skip
+ INTVAL (args_so_far
)));
3744 temp
= memory_address (BLKmode
,
3745 plus_constant (gen_rtx_PLUS (Pmode
,
3750 if (!ACCUMULATE_OUTGOING_ARGS
)
3752 /* If the source is referenced relative to the stack pointer,
3753 copy it to another register to stabilize it. We do not need
3754 to do this if we know that we won't be changing sp. */
3756 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3757 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3758 temp
= copy_to_reg (temp
);
3761 target
= gen_rtx_MEM (BLKmode
, temp
);
3763 /* We do *not* set_mem_attributes here, because incoming arguments
3764 may overlap with sibling call outgoing arguments and we cannot
3765 allow reordering of reads from function arguments with stores
3766 to outgoing arguments of sibling calls. We do, however, want
3767 to record the alignment of the stack slot. */
3768 /* ALIGN may well be better aligned than TYPE, e.g. due to
3769 PARM_BOUNDARY. Assume the caller isn't lying. */
3770 set_mem_align (target
, align
);
3772 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
3775 else if (partial
> 0)
3777 /* Scalar partly in registers. */
3779 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3782 /* # bytes of start of argument
3783 that we must make space for but need not store. */
3784 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3785 int args_offset
= INTVAL (args_so_far
);
3788 /* Push padding now if padding above and stack grows down,
3789 or if padding below and stack grows up.
3790 But if space already allocated, this has already been done. */
3791 if (extra
&& args_addr
== 0
3792 && where_pad
!= none
&& where_pad
!= stack_direction
)
3793 anti_adjust_stack (GEN_INT (extra
));
3795 /* If we make space by pushing it, we might as well push
3796 the real data. Otherwise, we can leave OFFSET nonzero
3797 and leave the space uninitialized. */
3801 /* Now NOT_STACK gets the number of words that we don't need to
3802 allocate on the stack. Convert OFFSET to words too. */
3803 not_stack
= (partial
- offset
) / UNITS_PER_WORD
;
3804 offset
/= UNITS_PER_WORD
;
3806 /* If the partial register-part of the arg counts in its stack size,
3807 skip the part of stack space corresponding to the registers.
3808 Otherwise, start copying to the beginning of the stack space,
3809 by setting SKIP to 0. */
3810 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3812 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3813 x
= validize_mem (force_const_mem (mode
, x
));
3815 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3816 SUBREGs of such registers are not allowed. */
3817 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
3818 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3819 x
= copy_to_reg (x
);
3821 /* Loop over all the words allocated on the stack for this arg. */
3822 /* We can do it by words, because any scalar bigger than a word
3823 has a size a multiple of a word. */
3824 #ifndef PUSH_ARGS_REVERSED
3825 for (i
= not_stack
; i
< size
; i
++)
3827 for (i
= size
- 1; i
>= not_stack
; i
--)
3829 if (i
>= not_stack
+ offset
)
3830 emit_push_insn (operand_subword_force (x
, i
, mode
),
3831 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3833 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3835 reg_parm_stack_space
, alignment_pad
);
3842 /* Push padding now if padding above and stack grows down,
3843 or if padding below and stack grows up.
3844 But if space already allocated, this has already been done. */
3845 if (extra
&& args_addr
== 0
3846 && where_pad
!= none
&& where_pad
!= stack_direction
)
3847 anti_adjust_stack (GEN_INT (extra
));
3849 #ifdef PUSH_ROUNDING
3850 if (args_addr
== 0 && PUSH_ARGS
)
3851 emit_single_push_insn (mode
, x
, type
);
3855 if (GET_CODE (args_so_far
) == CONST_INT
)
3857 = memory_address (mode
,
3858 plus_constant (args_addr
,
3859 INTVAL (args_so_far
)));
3861 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3863 dest
= gen_rtx_MEM (mode
, addr
);
3865 /* We do *not* set_mem_attributes here, because incoming arguments
3866 may overlap with sibling call outgoing arguments and we cannot
3867 allow reordering of reads from function arguments with stores
3868 to outgoing arguments of sibling calls. We do, however, want
3869 to record the alignment of the stack slot. */
3870 /* ALIGN may well be better aligned than TYPE, e.g. due to
3871 PARM_BOUNDARY. Assume the caller isn't lying. */
3872 set_mem_align (dest
, align
);
3874 emit_move_insn (dest
, x
);
3878 /* If part should go in registers, copy that part
3879 into the appropriate registers. Do this now, at the end,
3880 since mem-to-mem copies above may do function calls. */
3881 if (partial
> 0 && reg
!= 0)
3883 /* Handle calls that pass values in multiple non-contiguous locations.
3884 The Irix 6 ABI has examples of this. */
3885 if (GET_CODE (reg
) == PARALLEL
)
3886 emit_group_load (reg
, x
, type
, -1);
3889 gcc_assert (partial
% UNITS_PER_WORD
== 0);
3890 move_block_to_reg (REGNO (reg
), x
, partial
/ UNITS_PER_WORD
, mode
);
3894 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3895 anti_adjust_stack (GEN_INT (extra
));
3897 if (alignment_pad
&& args_addr
== 0)
3898 anti_adjust_stack (alignment_pad
);
3901 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3905 get_subtarget (rtx x
)
3909 /* Only registers can be subtargets. */
3911 /* Don't use hard regs to avoid extending their life. */
3912 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3916 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3917 FIELD is a bitfield. Returns true if the optimization was successful,
3918 and there's nothing else to do. */
3921 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize
,
3922 unsigned HOST_WIDE_INT bitpos
,
3923 enum machine_mode mode1
, rtx str_rtx
,
3926 enum machine_mode str_mode
= GET_MODE (str_rtx
);
3927 unsigned int str_bitsize
= GET_MODE_BITSIZE (str_mode
);
3932 if (mode1
!= VOIDmode
3933 || bitsize
>= BITS_PER_WORD
3934 || str_bitsize
> BITS_PER_WORD
3935 || TREE_SIDE_EFFECTS (to
)
3936 || TREE_THIS_VOLATILE (to
))
3940 if (!BINARY_CLASS_P (src
)
3941 || TREE_CODE (TREE_TYPE (src
)) != INTEGER_TYPE
)
3944 op0
= TREE_OPERAND (src
, 0);
3945 op1
= TREE_OPERAND (src
, 1);
3948 if (!operand_equal_p (to
, op0
, 0))
3951 if (MEM_P (str_rtx
))
3953 unsigned HOST_WIDE_INT offset1
;
3955 if (str_bitsize
== 0 || str_bitsize
> BITS_PER_WORD
)
3956 str_mode
= word_mode
;
3957 str_mode
= get_best_mode (bitsize
, bitpos
,
3958 MEM_ALIGN (str_rtx
), str_mode
, 0);
3959 if (str_mode
== VOIDmode
)
3961 str_bitsize
= GET_MODE_BITSIZE (str_mode
);
3964 bitpos
%= str_bitsize
;
3965 offset1
= (offset1
- bitpos
) / BITS_PER_UNIT
;
3966 str_rtx
= adjust_address (str_rtx
, str_mode
, offset1
);
3968 else if (!REG_P (str_rtx
) && GET_CODE (str_rtx
) != SUBREG
)
3971 /* If the bit field covers the whole REG/MEM, store_field
3972 will likely generate better code. */
3973 if (bitsize
>= str_bitsize
)
3976 /* We can't handle fields split across multiple entities. */
3977 if (bitpos
+ bitsize
> str_bitsize
)
3980 if (BYTES_BIG_ENDIAN
)
3981 bitpos
= str_bitsize
- bitpos
- bitsize
;
3983 switch (TREE_CODE (src
))
3987 /* For now, just optimize the case of the topmost bitfield
3988 where we don't need to do any masking and also
3989 1 bit bitfields where xor can be used.
3990 We might win by one instruction for the other bitfields
3991 too if insv/extv instructions aren't used, so that
3992 can be added later. */
3993 if (bitpos
+ bitsize
!= str_bitsize
3994 && (bitsize
!= 1 || TREE_CODE (op1
) != INTEGER_CST
))
3997 value
= expand_expr (op1
, NULL_RTX
, str_mode
, 0);
3998 value
= convert_modes (str_mode
,
3999 TYPE_MODE (TREE_TYPE (op1
)), value
,
4000 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4002 /* We may be accessing data outside the field, which means
4003 we can alias adjacent data. */
4004 if (MEM_P (str_rtx
))
4006 str_rtx
= shallow_copy_rtx (str_rtx
);
4007 set_mem_alias_set (str_rtx
, 0);
4008 set_mem_expr (str_rtx
, 0);
4011 binop
= TREE_CODE (src
) == PLUS_EXPR
? add_optab
: sub_optab
;
4012 if (bitsize
== 1 && bitpos
+ bitsize
!= str_bitsize
)
4014 value
= expand_and (str_mode
, value
, const1_rtx
, NULL
);
4017 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
,
4018 build_int_cst (NULL_TREE
, bitpos
),
4020 result
= expand_binop (str_mode
, binop
, str_rtx
,
4021 value
, str_rtx
, 1, OPTAB_WIDEN
);
4022 if (result
!= str_rtx
)
4023 emit_move_insn (str_rtx
, result
);
4028 if (TREE_CODE (op1
) != INTEGER_CST
)
4030 value
= expand_expr (op1
, NULL_RTX
, GET_MODE (str_rtx
), 0);
4031 value
= convert_modes (GET_MODE (str_rtx
),
4032 TYPE_MODE (TREE_TYPE (op1
)), value
,
4033 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4035 /* We may be accessing data outside the field, which means
4036 we can alias adjacent data. */
4037 if (MEM_P (str_rtx
))
4039 str_rtx
= shallow_copy_rtx (str_rtx
);
4040 set_mem_alias_set (str_rtx
, 0);
4041 set_mem_expr (str_rtx
, 0);
4044 binop
= TREE_CODE (src
) == BIT_IOR_EXPR
? ior_optab
: xor_optab
;
4045 if (bitpos
+ bitsize
!= GET_MODE_BITSIZE (GET_MODE (str_rtx
)))
4047 rtx mask
= GEN_INT (((unsigned HOST_WIDE_INT
) 1 << bitsize
)
4049 value
= expand_and (GET_MODE (str_rtx
), value
, mask
,
4052 value
= expand_shift (LSHIFT_EXPR
, GET_MODE (str_rtx
), value
,
4053 build_int_cst (NULL_TREE
, bitpos
),
4055 result
= expand_binop (GET_MODE (str_rtx
), binop
, str_rtx
,
4056 value
, str_rtx
, 1, OPTAB_WIDEN
);
4057 if (result
!= str_rtx
)
4058 emit_move_insn (str_rtx
, result
);
4069 /* Expand an assignment that stores the value of FROM into TO. */
4072 expand_assignment (tree to
, tree from
)
4077 /* Don't crash if the lhs of the assignment was erroneous. */
4078 if (TREE_CODE (to
) == ERROR_MARK
)
4080 result
= expand_normal (from
);
4084 /* Optimize away no-op moves without side-effects. */
4085 if (operand_equal_p (to
, from
, 0))
4088 /* Assignment of a structure component needs special treatment
4089 if the structure component's rtx is not simply a MEM.
4090 Assignment of an array element at a constant index, and assignment of
4091 an array element in an unaligned packed structure field, has the same
4093 if (handled_component_p (to
)
4094 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
4096 enum machine_mode mode1
;
4097 HOST_WIDE_INT bitsize
, bitpos
;
4104 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
4105 &unsignedp
, &volatilep
, true);
4107 /* If we are going to use store_bit_field and extract_bit_field,
4108 make sure to_rtx will be safe for multiple use. */
4110 to_rtx
= expand_normal (tem
);
4116 if (!MEM_P (to_rtx
))
4118 /* We can get constant negative offsets into arrays with broken
4119 user code. Translate this to a trap instead of ICEing. */
4120 gcc_assert (TREE_CODE (offset
) == INTEGER_CST
);
4121 expand_builtin_trap ();
4122 to_rtx
= gen_rtx_MEM (BLKmode
, const0_rtx
);
4125 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
4126 #ifdef POINTERS_EXTEND_UNSIGNED
4127 if (GET_MODE (offset_rtx
) != Pmode
)
4128 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
4130 if (GET_MODE (offset_rtx
) != ptr_mode
)
4131 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4134 /* A constant address in TO_RTX can have VOIDmode, we must not try
4135 to call force_reg for that case. Avoid that case. */
4137 && GET_MODE (to_rtx
) == BLKmode
4138 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
4140 && (bitpos
% bitsize
) == 0
4141 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
4142 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
4144 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
4148 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4149 highest_pow2_factor_for_target (to
,
4153 /* Handle expand_expr of a complex value returning a CONCAT. */
4154 if (GET_CODE (to_rtx
) == CONCAT
)
4156 if (TREE_CODE (TREE_TYPE (from
)) == COMPLEX_TYPE
)
4158 gcc_assert (bitpos
== 0);
4159 result
= store_expr (from
, to_rtx
, false);
4163 gcc_assert (bitpos
== 0 || bitpos
== GET_MODE_BITSIZE (mode1
));
4164 result
= store_expr (from
, XEXP (to_rtx
, bitpos
!= 0), false);
4171 /* If the field is at offset zero, we could have been given the
4172 DECL_RTX of the parent struct. Don't munge it. */
4173 to_rtx
= shallow_copy_rtx (to_rtx
);
4175 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
4177 /* Deal with volatile and readonly fields. The former is only
4178 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4180 MEM_VOLATILE_P (to_rtx
) = 1;
4181 if (component_uses_parent_alias_set (to
))
4182 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4185 if (optimize_bitfield_assignment_op (bitsize
, bitpos
, mode1
,
4189 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
4190 TREE_TYPE (tem
), get_alias_set (to
));
4194 preserve_temp_slots (result
);
4200 /* If the rhs is a function call and its value is not an aggregate,
4201 call the function before we start to compute the lhs.
4202 This is needed for correct code for cases such as
4203 val = setjmp (buf) on machines where reference to val
4204 requires loading up part of an address in a separate insn.
4206 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4207 since it might be a promoted variable where the zero- or sign- extension
4208 needs to be done. Handling this in the normal way is safe because no
4209 computation is done before the call. */
4210 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
4211 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
4212 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
4213 && REG_P (DECL_RTL (to
))))
4218 value
= expand_normal (from
);
4220 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4222 /* Handle calls that return values in multiple non-contiguous locations.
4223 The Irix 6 ABI has examples of this. */
4224 if (GET_CODE (to_rtx
) == PARALLEL
)
4225 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
4226 int_size_in_bytes (TREE_TYPE (from
)));
4227 else if (GET_MODE (to_rtx
) == BLKmode
)
4228 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
4231 if (POINTER_TYPE_P (TREE_TYPE (to
)))
4232 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
4233 emit_move_insn (to_rtx
, value
);
4235 preserve_temp_slots (to_rtx
);
4241 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4242 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4245 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4247 /* Don't move directly into a return register. */
4248 if (TREE_CODE (to
) == RESULT_DECL
4249 && (REG_P (to_rtx
) || GET_CODE (to_rtx
) == PARALLEL
))
4254 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
4256 if (GET_CODE (to_rtx
) == PARALLEL
)
4257 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
4258 int_size_in_bytes (TREE_TYPE (from
)));
4260 emit_move_insn (to_rtx
, temp
);
4262 preserve_temp_slots (to_rtx
);
4268 /* In case we are returning the contents of an object which overlaps
4269 the place the value is being stored, use a safe function when copying
4270 a value through a pointer into a structure value return block. */
4271 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
4272 && current_function_returns_struct
4273 && !current_function_returns_pcc_struct
)
4278 size
= expr_size (from
);
4279 from_rtx
= expand_normal (from
);
4281 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
4282 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
4283 XEXP (from_rtx
, 0), Pmode
,
4284 convert_to_mode (TYPE_MODE (sizetype
),
4285 size
, TYPE_UNSIGNED (sizetype
)),
4286 TYPE_MODE (sizetype
));
4288 preserve_temp_slots (to_rtx
);
4294 /* Compute FROM and store the value in the rtx we got. */
4297 result
= store_expr (from
, to_rtx
, 0);
4298 preserve_temp_slots (result
);
4304 /* Generate code for computing expression EXP,
4305 and storing the value into TARGET.
4307 If the mode is BLKmode then we may return TARGET itself.
4308 It turns out that in BLKmode it doesn't cause a problem.
4309 because C has no operators that could combine two different
4310 assignments into the same BLKmode object with different values
4311 with no sequence point. Will other languages need this to
4314 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4315 stack, and block moves may need to be treated specially. */
4318 store_expr (tree exp
, rtx target
, int call_param_p
)
4321 rtx alt_rtl
= NULL_RTX
;
4322 int dont_return_target
= 0;
4324 if (VOID_TYPE_P (TREE_TYPE (exp
)))
4326 /* C++ can generate ?: expressions with a throw expression in one
4327 branch and an rvalue in the other. Here, we resolve attempts to
4328 store the throw expression's nonexistent result. */
4329 gcc_assert (!call_param_p
);
4330 expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
4333 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4335 /* Perform first part of compound expression, then assign from second
4337 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
4338 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4339 return store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
);
4341 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4343 /* For conditional expression, get safe form of the target. Then
4344 test the condition, doing the appropriate assignment on either
4345 side. This avoids the creation of unnecessary temporaries.
4346 For non-BLKmode, it is more efficient not to do this. */
4348 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4350 do_pending_stack_adjust ();
4352 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
4353 store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
);
4354 emit_jump_insn (gen_jump (lab2
));
4357 store_expr (TREE_OPERAND (exp
, 2), target
, call_param_p
);
4363 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4364 /* If this is a scalar in a register that is stored in a wider mode
4365 than the declared mode, compute the result into its declared mode
4366 and then convert to the wider mode. Our value is the computed
4369 rtx inner_target
= 0;
4371 /* We can do the conversion inside EXP, which will often result
4372 in some optimizations. Do the conversion in two steps: first
4373 change the signedness, if needed, then the extend. But don't
4374 do this if the type of EXP is a subtype of something else
4375 since then the conversion might involve more than just
4376 converting modes. */
4377 if (INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4378 && TREE_TYPE (TREE_TYPE (exp
)) == 0
4379 && (!lang_hooks
.reduce_bit_field_operations
4380 || (GET_MODE_PRECISION (GET_MODE (target
))
4381 == TYPE_PRECISION (TREE_TYPE (exp
)))))
4383 if (TYPE_UNSIGNED (TREE_TYPE (exp
))
4384 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4386 (lang_hooks
.types
.signed_or_unsigned_type
4387 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)), exp
);
4389 exp
= fold_convert (lang_hooks
.types
.type_for_mode
4390 (GET_MODE (SUBREG_REG (target
)),
4391 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4394 inner_target
= SUBREG_REG (target
);
4397 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
4398 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4400 /* If TEMP is a VOIDmode constant, use convert_modes to make
4401 sure that we properly convert it. */
4402 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4404 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4405 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4406 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4407 GET_MODE (target
), temp
,
4408 SUBREG_PROMOTED_UNSIGNED_P (target
));
4411 convert_move (SUBREG_REG (target
), temp
,
4412 SUBREG_PROMOTED_UNSIGNED_P (target
));
4418 temp
= expand_expr_real (exp
, target
, GET_MODE (target
),
4420 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
4422 /* Return TARGET if it's a specified hardware register.
4423 If TARGET is a volatile mem ref, either return TARGET
4424 or return a reg copied *from* TARGET; ANSI requires this.
4426 Otherwise, if TEMP is not TARGET, return TEMP
4427 if it is constant (for efficiency),
4428 or if we really want the correct value. */
4429 if (!(target
&& REG_P (target
)
4430 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4431 && !(MEM_P (target
) && MEM_VOLATILE_P (target
))
4432 && ! rtx_equal_p (temp
, target
)
4433 && CONSTANT_P (temp
))
4434 dont_return_target
= 1;
4437 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4438 the same as that of TARGET, adjust the constant. This is needed, for
4439 example, in case it is a CONST_DOUBLE and we want only a word-sized
4441 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4442 && TREE_CODE (exp
) != ERROR_MARK
4443 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4444 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4445 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
4447 /* If value was not generated in the target, store it there.
4448 Convert the value to TARGET's type first if necessary and emit the
4449 pending incrementations that have been queued when expanding EXP.
4450 Note that we cannot emit the whole queue blindly because this will
4451 effectively disable the POST_INC optimization later.
4453 If TEMP and TARGET compare equal according to rtx_equal_p, but
4454 one or both of them are volatile memory refs, we have to distinguish
4456 - expand_expr has used TARGET. In this case, we must not generate
4457 another copy. This can be detected by TARGET being equal according
4459 - expand_expr has not used TARGET - that means that the source just
4460 happens to have the same RTX form. Since temp will have been created
4461 by expand_expr, it will compare unequal according to == .
4462 We must generate a copy in this case, to reach the correct number
4463 of volatile memory references. */
4465 if ((! rtx_equal_p (temp
, target
)
4466 || (temp
!= target
&& (side_effects_p (temp
)
4467 || side_effects_p (target
))))
4468 && TREE_CODE (exp
) != ERROR_MARK
4469 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4470 but TARGET is not valid memory reference, TEMP will differ
4471 from TARGET although it is really the same location. */
4472 && !(alt_rtl
&& rtx_equal_p (alt_rtl
, target
))
4473 /* If there's nothing to copy, don't bother. Don't call
4474 expr_size unless necessary, because some front-ends (C++)
4475 expr_size-hook must not be given objects that are not
4476 supposed to be bit-copied or bit-initialized. */
4477 && expr_size (exp
) != const0_rtx
)
4479 if (GET_MODE (temp
) != GET_MODE (target
)
4480 && GET_MODE (temp
) != VOIDmode
)
4482 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
4483 if (dont_return_target
)
4485 /* In this case, we will return TEMP,
4486 so make sure it has the proper mode.
4487 But don't forget to store the value into TARGET. */
4488 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4489 emit_move_insn (target
, temp
);
4492 convert_move (target
, temp
, unsignedp
);
4495 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4497 /* Handle copying a string constant into an array. The string
4498 constant may be shorter than the array. So copy just the string's
4499 actual length, and clear the rest. First get the size of the data
4500 type of the string, which is actually the size of the target. */
4501 rtx size
= expr_size (exp
);
4503 if (GET_CODE (size
) == CONST_INT
4504 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4505 emit_block_move (target
, temp
, size
,
4507 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4510 /* Compute the size of the data to copy from the string. */
4512 = size_binop (MIN_EXPR
,
4513 make_tree (sizetype
, size
),
4514 size_int (TREE_STRING_LENGTH (exp
)));
4516 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
4518 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4521 /* Copy that much. */
4522 copy_size_rtx
= convert_to_mode (ptr_mode
, copy_size_rtx
,
4523 TYPE_UNSIGNED (sizetype
));
4524 emit_block_move (target
, temp
, copy_size_rtx
,
4526 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4528 /* Figure out how much is left in TARGET that we have to clear.
4529 Do all calculations in ptr_mode. */
4530 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4532 size
= plus_constant (size
, -INTVAL (copy_size_rtx
));
4533 target
= adjust_address (target
, BLKmode
,
4534 INTVAL (copy_size_rtx
));
4538 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
4539 copy_size_rtx
, NULL_RTX
, 0,
4542 #ifdef POINTERS_EXTEND_UNSIGNED
4543 if (GET_MODE (copy_size_rtx
) != Pmode
)
4544 copy_size_rtx
= convert_to_mode (Pmode
, copy_size_rtx
,
4545 TYPE_UNSIGNED (sizetype
));
4548 target
= offset_address (target
, copy_size_rtx
,
4549 highest_pow2_factor (copy_size
));
4550 label
= gen_label_rtx ();
4551 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4552 GET_MODE (size
), 0, label
);
4555 if (size
!= const0_rtx
)
4556 clear_storage (target
, size
, BLOCK_OP_NORMAL
);
4562 /* Handle calls that return values in multiple non-contiguous locations.
4563 The Irix 6 ABI has examples of this. */
4564 else if (GET_CODE (target
) == PARALLEL
)
4565 emit_group_load (target
, temp
, TREE_TYPE (exp
),
4566 int_size_in_bytes (TREE_TYPE (exp
)));
4567 else if (GET_MODE (temp
) == BLKmode
)
4568 emit_block_move (target
, temp
, expr_size (exp
),
4570 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4573 temp
= force_operand (temp
, target
);
4575 emit_move_insn (target
, temp
);
4582 /* Helper for categorize_ctor_elements. Identical interface. */
4585 categorize_ctor_elements_1 (tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
4586 HOST_WIDE_INT
*p_elt_count
,
4589 unsigned HOST_WIDE_INT idx
;
4590 HOST_WIDE_INT nz_elts
, elt_count
;
4591 tree value
, purpose
;
4593 /* Whether CTOR is a valid constant initializer, in accordance with what
4594 initializer_constant_valid_p does. If inferred from the constructor
4595 elements, true until proven otherwise. */
4596 bool const_from_elts_p
= constructor_static_from_elts_p (ctor
);
4597 bool const_p
= const_from_elts_p
? true : TREE_STATIC (ctor
);
4602 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), idx
, purpose
, value
)
4607 if (TREE_CODE (purpose
) == RANGE_EXPR
)
4609 tree lo_index
= TREE_OPERAND (purpose
, 0);
4610 tree hi_index
= TREE_OPERAND (purpose
, 1);
4612 if (host_integerp (lo_index
, 1) && host_integerp (hi_index
, 1))
4613 mult
= (tree_low_cst (hi_index
, 1)
4614 - tree_low_cst (lo_index
, 1) + 1);
4617 switch (TREE_CODE (value
))
4621 HOST_WIDE_INT nz
= 0, ic
= 0;
4624 = categorize_ctor_elements_1 (value
, &nz
, &ic
, p_must_clear
);
4626 nz_elts
+= mult
* nz
;
4627 elt_count
+= mult
* ic
;
4629 if (const_from_elts_p
&& const_p
)
4630 const_p
= const_elt_p
;
4636 if (!initializer_zerop (value
))
4642 nz_elts
+= mult
* TREE_STRING_LENGTH (value
);
4643 elt_count
+= mult
* TREE_STRING_LENGTH (value
);
4647 if (!initializer_zerop (TREE_REALPART (value
)))
4649 if (!initializer_zerop (TREE_IMAGPART (value
)))
4657 for (v
= TREE_VECTOR_CST_ELTS (value
); v
; v
= TREE_CHAIN (v
))
4659 if (!initializer_zerop (TREE_VALUE (v
)))
4670 if (const_from_elts_p
&& const_p
)
4671 const_p
= initializer_constant_valid_p (value
, TREE_TYPE (value
))
4678 && (TREE_CODE (TREE_TYPE (ctor
)) == UNION_TYPE
4679 || TREE_CODE (TREE_TYPE (ctor
)) == QUAL_UNION_TYPE
))
4682 bool clear_this
= true;
4684 if (!VEC_empty (constructor_elt
, CONSTRUCTOR_ELTS (ctor
)))
4686 /* We don't expect more than one element of the union to be
4687 initialized. Not sure what we should do otherwise... */
4688 gcc_assert (VEC_length (constructor_elt
, CONSTRUCTOR_ELTS (ctor
))
4691 init_sub_type
= TREE_TYPE (VEC_index (constructor_elt
,
4692 CONSTRUCTOR_ELTS (ctor
),
4695 /* ??? We could look at each element of the union, and find the
4696 largest element. Which would avoid comparing the size of the
4697 initialized element against any tail padding in the union.
4698 Doesn't seem worth the effort... */
4699 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor
)),
4700 TYPE_SIZE (init_sub_type
)) == 1)
4702 /* And now we have to find out if the element itself is fully
4703 constructed. E.g. for union { struct { int a, b; } s; } u
4704 = { .s = { .a = 1 } }. */
4705 if (elt_count
== count_type_elements (init_sub_type
, false))
4710 *p_must_clear
= clear_this
;
4713 *p_nz_elts
+= nz_elts
;
4714 *p_elt_count
+= elt_count
;
4719 /* Examine CTOR to discover:
4720 * how many scalar fields are set to nonzero values,
4721 and place it in *P_NZ_ELTS;
4722 * how many scalar fields in total are in CTOR,
4723 and place it in *P_ELT_COUNT.
4724 * if a type is a union, and the initializer from the constructor
4725 is not the largest element in the union, then set *p_must_clear.
4727 Return whether or not CTOR is a valid static constant initializer, the same
4728 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4731 categorize_ctor_elements (tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
4732 HOST_WIDE_INT
*p_elt_count
,
4737 *p_must_clear
= false;
4740 categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_elt_count
, p_must_clear
);
4743 /* Count the number of scalars in TYPE. Return -1 on overflow or
4744 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4745 array member at the end of the structure. */
4748 count_type_elements (tree type
, bool allow_flexarr
)
4750 const HOST_WIDE_INT max
= ~((HOST_WIDE_INT
)1 << (HOST_BITS_PER_WIDE_INT
-1));
4751 switch (TREE_CODE (type
))
4755 tree telts
= array_type_nelts (type
);
4756 if (telts
&& host_integerp (telts
, 1))
4758 HOST_WIDE_INT n
= tree_low_cst (telts
, 1) + 1;
4759 HOST_WIDE_INT m
= count_type_elements (TREE_TYPE (type
), false);
4762 else if (max
/ n
> m
)
4770 HOST_WIDE_INT n
= 0, t
;
4773 for (f
= TYPE_FIELDS (type
); f
; f
= TREE_CHAIN (f
))
4774 if (TREE_CODE (f
) == FIELD_DECL
)
4776 t
= count_type_elements (TREE_TYPE (f
), false);
4779 /* Check for structures with flexible array member. */
4780 tree tf
= TREE_TYPE (f
);
4782 && TREE_CHAIN (f
) == NULL
4783 && TREE_CODE (tf
) == ARRAY_TYPE
4785 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf
))
4786 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf
)))
4787 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf
))
4788 && int_size_in_bytes (type
) >= 0)
4800 case QUAL_UNION_TYPE
:
4802 /* Ho hum. How in the world do we guess here? Clearly it isn't
4803 right to count the fields. Guess based on the number of words. */
4804 HOST_WIDE_INT n
= int_size_in_bytes (type
);
4807 return n
/ UNITS_PER_WORD
;
4814 return TYPE_VECTOR_SUBPARTS (type
);
4822 case REFERENCE_TYPE
:
4834 /* Return 1 if EXP contains mostly (3/4) zeros. */
4837 mostly_zeros_p (tree exp
)
4839 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4842 HOST_WIDE_INT nz_elts
, count
, elts
;
4845 categorize_ctor_elements (exp
, &nz_elts
, &count
, &must_clear
);
4849 elts
= count_type_elements (TREE_TYPE (exp
), false);
4851 return nz_elts
< elts
/ 4;
4854 return initializer_zerop (exp
);
4857 /* Return 1 if EXP contains all zeros. */
4860 all_zeros_p (tree exp
)
4862 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4865 HOST_WIDE_INT nz_elts
, count
;
4868 categorize_ctor_elements (exp
, &nz_elts
, &count
, &must_clear
);
4869 return nz_elts
== 0;
4872 return initializer_zerop (exp
);
4875 /* Helper function for store_constructor.
4876 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4877 TYPE is the type of the CONSTRUCTOR, not the element type.
4878 CLEARED is as for store_constructor.
4879 ALIAS_SET is the alias set to use for any stores.
4881 This provides a recursive shortcut back to store_constructor when it isn't
4882 necessary to go through store_field. This is so that we can pass through
4883 the cleared field to let store_constructor know that we may not have to
4884 clear a substructure if the outer structure has already been cleared. */
4887 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
4888 HOST_WIDE_INT bitpos
, enum machine_mode mode
,
4889 tree exp
, tree type
, int cleared
, int alias_set
)
4891 if (TREE_CODE (exp
) == CONSTRUCTOR
4892 /* We can only call store_constructor recursively if the size and
4893 bit position are on a byte boundary. */
4894 && bitpos
% BITS_PER_UNIT
== 0
4895 && (bitsize
> 0 && bitsize
% BITS_PER_UNIT
== 0)
4896 /* If we have a nonzero bitpos for a register target, then we just
4897 let store_field do the bitfield handling. This is unlikely to
4898 generate unnecessary clear instructions anyways. */
4899 && (bitpos
== 0 || MEM_P (target
)))
4903 = adjust_address (target
,
4904 GET_MODE (target
) == BLKmode
4906 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4907 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
4910 /* Update the alias set, if required. */
4911 if (MEM_P (target
) && ! MEM_KEEP_ALIAS_SET_P (target
)
4912 && MEM_ALIAS_SET (target
) != 0)
4914 target
= copy_rtx (target
);
4915 set_mem_alias_set (target
, alias_set
);
4918 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
4921 store_field (target
, bitsize
, bitpos
, mode
, exp
, type
, alias_set
);
4924 /* Store the value of constructor EXP into the rtx TARGET.
4925 TARGET is either a REG or a MEM; we know it cannot conflict, since
4926 safe_from_p has been called.
4927 CLEARED is true if TARGET is known to have been zero'd.
4928 SIZE is the number of bytes of TARGET we are allowed to modify: this
4929 may not be the same as the size of EXP if we are assigning to a field
4930 which has been packed to exclude padding bits. */
4933 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
)
4935 tree type
= TREE_TYPE (exp
);
4936 #ifdef WORD_REGISTER_OPERATIONS
4937 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4940 switch (TREE_CODE (type
))
4944 case QUAL_UNION_TYPE
:
4946 unsigned HOST_WIDE_INT idx
;
4949 /* If size is zero or the target is already cleared, do nothing. */
4950 if (size
== 0 || cleared
)
4952 /* We either clear the aggregate or indicate the value is dead. */
4953 else if ((TREE_CODE (type
) == UNION_TYPE
4954 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4955 && ! CONSTRUCTOR_ELTS (exp
))
4956 /* If the constructor is empty, clear the union. */
4958 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
4962 /* If we are building a static constructor into a register,
4963 set the initial value as zero so we can fold the value into
4964 a constant. But if more than one register is involved,
4965 this probably loses. */
4966 else if (REG_P (target
) && TREE_STATIC (exp
)
4967 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4969 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4973 /* If the constructor has fewer fields than the structure or
4974 if we are initializing the structure to mostly zeros, clear
4975 the whole structure first. Don't do this if TARGET is a
4976 register whose mode size isn't equal to SIZE since
4977 clear_storage can't handle this case. */
4979 && (((int)VEC_length (constructor_elt
, CONSTRUCTOR_ELTS (exp
))
4980 != fields_length (type
))
4981 || mostly_zeros_p (exp
))
4983 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
4986 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
4991 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4993 /* Store each element of the constructor into the
4994 corresponding field of TARGET. */
4995 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, field
, value
)
4997 enum machine_mode mode
;
4998 HOST_WIDE_INT bitsize
;
4999 HOST_WIDE_INT bitpos
= 0;
5001 rtx to_rtx
= target
;
5003 /* Just ignore missing fields. We cleared the whole
5004 structure, above, if any fields are missing. */
5008 if (cleared
&& initializer_zerop (value
))
5011 if (host_integerp (DECL_SIZE (field
), 1))
5012 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
5016 mode
= DECL_MODE (field
);
5017 if (DECL_BIT_FIELD (field
))
5020 offset
= DECL_FIELD_OFFSET (field
);
5021 if (host_integerp (offset
, 0)
5022 && host_integerp (bit_position (field
), 0))
5024 bitpos
= int_bit_position (field
);
5028 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
5035 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset
,
5036 make_tree (TREE_TYPE (exp
),
5039 offset_rtx
= expand_normal (offset
);
5040 gcc_assert (MEM_P (to_rtx
));
5042 #ifdef POINTERS_EXTEND_UNSIGNED
5043 if (GET_MODE (offset_rtx
) != Pmode
)
5044 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
5046 if (GET_MODE (offset_rtx
) != ptr_mode
)
5047 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
5050 to_rtx
= offset_address (to_rtx
, offset_rtx
,
5051 highest_pow2_factor (offset
));
5054 #ifdef WORD_REGISTER_OPERATIONS
5055 /* If this initializes a field that is smaller than a
5056 word, at the start of a word, try to widen it to a full
5057 word. This special case allows us to output C++ member
5058 function initializations in a form that the optimizers
5061 && bitsize
< BITS_PER_WORD
5062 && bitpos
% BITS_PER_WORD
== 0
5063 && GET_MODE_CLASS (mode
) == MODE_INT
5064 && TREE_CODE (value
) == INTEGER_CST
5066 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
5068 tree type
= TREE_TYPE (value
);
5070 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
5072 type
= lang_hooks
.types
.type_for_size
5073 (BITS_PER_WORD
, TYPE_UNSIGNED (type
));
5074 value
= fold_convert (type
, value
);
5077 if (BYTES_BIG_ENDIAN
)
5079 = fold_build2 (LSHIFT_EXPR
, type
, value
,
5080 build_int_cst (type
,
5081 BITS_PER_WORD
- bitsize
));
5082 bitsize
= BITS_PER_WORD
;
5087 if (MEM_P (to_rtx
) && !MEM_KEEP_ALIAS_SET_P (to_rtx
)
5088 && DECL_NONADDRESSABLE_P (field
))
5090 to_rtx
= copy_rtx (to_rtx
);
5091 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
5094 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
5095 value
, type
, cleared
,
5096 get_alias_set (TREE_TYPE (field
)));
5103 unsigned HOST_WIDE_INT i
;
5106 tree elttype
= TREE_TYPE (type
);
5108 HOST_WIDE_INT minelt
= 0;
5109 HOST_WIDE_INT maxelt
= 0;
5111 domain
= TYPE_DOMAIN (type
);
5112 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
5113 && TYPE_MAX_VALUE (domain
)
5114 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
5115 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
5117 /* If we have constant bounds for the range of the type, get them. */
5120 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
5121 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
5124 /* If the constructor has fewer elements than the array, clear
5125 the whole array first. Similarly if this is static
5126 constructor of a non-BLKmode object. */
5129 else if (REG_P (target
) && TREE_STATIC (exp
))
5133 unsigned HOST_WIDE_INT idx
;
5135 HOST_WIDE_INT count
= 0, zero_count
= 0;
5136 need_to_clear
= ! const_bounds_p
;
5138 /* This loop is a more accurate version of the loop in
5139 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5140 is also needed to check for missing elements. */
5141 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, index
, value
)
5143 HOST_WIDE_INT this_node_count
;
5148 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
5150 tree lo_index
= TREE_OPERAND (index
, 0);
5151 tree hi_index
= TREE_OPERAND (index
, 1);
5153 if (! host_integerp (lo_index
, 1)
5154 || ! host_integerp (hi_index
, 1))
5160 this_node_count
= (tree_low_cst (hi_index
, 1)
5161 - tree_low_cst (lo_index
, 1) + 1);
5164 this_node_count
= 1;
5166 count
+= this_node_count
;
5167 if (mostly_zeros_p (value
))
5168 zero_count
+= this_node_count
;
5171 /* Clear the entire array first if there are any missing
5172 elements, or if the incidence of zero elements is >=
5175 && (count
< maxelt
- minelt
+ 1
5176 || 4 * zero_count
>= 3 * count
))
5180 if (need_to_clear
&& size
> 0)
5183 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5185 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
5189 if (!cleared
&& REG_P (target
))
5190 /* Inform later passes that the old value is dead. */
5191 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
5193 /* Store each element of the constructor into the
5194 corresponding element of TARGET, determined by counting the
5196 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), i
, index
, value
)
5198 enum machine_mode mode
;
5199 HOST_WIDE_INT bitsize
;
5200 HOST_WIDE_INT bitpos
;
5202 rtx xtarget
= target
;
5204 if (cleared
&& initializer_zerop (value
))
5207 unsignedp
= TYPE_UNSIGNED (elttype
);
5208 mode
= TYPE_MODE (elttype
);
5209 if (mode
== BLKmode
)
5210 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
5211 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
5214 bitsize
= GET_MODE_BITSIZE (mode
);
5216 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
5218 tree lo_index
= TREE_OPERAND (index
, 0);
5219 tree hi_index
= TREE_OPERAND (index
, 1);
5220 rtx index_r
, pos_rtx
;
5221 HOST_WIDE_INT lo
, hi
, count
;
5224 /* If the range is constant and "small", unroll the loop. */
5226 && host_integerp (lo_index
, 0)
5227 && host_integerp (hi_index
, 0)
5228 && (lo
= tree_low_cst (lo_index
, 0),
5229 hi
= tree_low_cst (hi_index
, 0),
5230 count
= hi
- lo
+ 1,
5233 || (host_integerp (TYPE_SIZE (elttype
), 1)
5234 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
5237 lo
-= minelt
; hi
-= minelt
;
5238 for (; lo
<= hi
; lo
++)
5240 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
5243 && !MEM_KEEP_ALIAS_SET_P (target
)
5244 && TREE_CODE (type
) == ARRAY_TYPE
5245 && TYPE_NONALIASED_COMPONENT (type
))
5247 target
= copy_rtx (target
);
5248 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5251 store_constructor_field
5252 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
5253 get_alias_set (elttype
));
5258 rtx loop_start
= gen_label_rtx ();
5259 rtx loop_end
= gen_label_rtx ();
5262 expand_normal (hi_index
);
5263 unsignedp
= TYPE_UNSIGNED (domain
);
5265 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
5268 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
5270 SET_DECL_RTL (index
, index_r
);
5271 store_expr (lo_index
, index_r
, 0);
5273 /* Build the head of the loop. */
5274 do_pending_stack_adjust ();
5275 emit_label (loop_start
);
5277 /* Assign value to element index. */
5279 fold_convert (ssizetype
,
5280 fold_build2 (MINUS_EXPR
,
5283 TYPE_MIN_VALUE (domain
)));
5286 size_binop (MULT_EXPR
, position
,
5287 fold_convert (ssizetype
,
5288 TYPE_SIZE_UNIT (elttype
)));
5290 pos_rtx
= expand_normal (position
);
5291 xtarget
= offset_address (target
, pos_rtx
,
5292 highest_pow2_factor (position
));
5293 xtarget
= adjust_address (xtarget
, mode
, 0);
5294 if (TREE_CODE (value
) == CONSTRUCTOR
)
5295 store_constructor (value
, xtarget
, cleared
,
5296 bitsize
/ BITS_PER_UNIT
);
5298 store_expr (value
, xtarget
, 0);
5300 /* Generate a conditional jump to exit the loop. */
5301 exit_cond
= build2 (LT_EXPR
, integer_type_node
,
5303 jumpif (exit_cond
, loop_end
);
5305 /* Update the loop counter, and jump to the head of
5307 expand_assignment (index
,
5308 build2 (PLUS_EXPR
, TREE_TYPE (index
),
5309 index
, integer_one_node
));
5311 emit_jump (loop_start
);
5313 /* Build the end of the loop. */
5314 emit_label (loop_end
);
5317 else if ((index
!= 0 && ! host_integerp (index
, 0))
5318 || ! host_integerp (TYPE_SIZE (elttype
), 1))
5323 index
= ssize_int (1);
5326 index
= fold_convert (ssizetype
,
5327 fold_build2 (MINUS_EXPR
,
5330 TYPE_MIN_VALUE (domain
)));
5333 size_binop (MULT_EXPR
, index
,
5334 fold_convert (ssizetype
,
5335 TYPE_SIZE_UNIT (elttype
)));
5336 xtarget
= offset_address (target
,
5337 expand_normal (position
),
5338 highest_pow2_factor (position
));
5339 xtarget
= adjust_address (xtarget
, mode
, 0);
5340 store_expr (value
, xtarget
, 0);
5345 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
5346 * tree_low_cst (TYPE_SIZE (elttype
), 1));
5348 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
5350 if (MEM_P (target
) && !MEM_KEEP_ALIAS_SET_P (target
)
5351 && TREE_CODE (type
) == ARRAY_TYPE
5352 && TYPE_NONALIASED_COMPONENT (type
))
5354 target
= copy_rtx (target
);
5355 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5357 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
5358 type
, cleared
, get_alias_set (elttype
));
5366 unsigned HOST_WIDE_INT idx
;
5367 constructor_elt
*ce
;
5371 tree elttype
= TREE_TYPE (type
);
5372 int elt_size
= tree_low_cst (TYPE_SIZE (elttype
), 1);
5373 enum machine_mode eltmode
= TYPE_MODE (elttype
);
5374 HOST_WIDE_INT bitsize
;
5375 HOST_WIDE_INT bitpos
;
5376 rtvec vector
= NULL
;
5379 gcc_assert (eltmode
!= BLKmode
);
5381 n_elts
= TYPE_VECTOR_SUBPARTS (type
);
5382 if (REG_P (target
) && VECTOR_MODE_P (GET_MODE (target
)))
5384 enum machine_mode mode
= GET_MODE (target
);
5386 icode
= (int) vec_init_optab
->handlers
[mode
].insn_code
;
5387 if (icode
!= CODE_FOR_nothing
)
5391 vector
= rtvec_alloc (n_elts
);
5392 for (i
= 0; i
< n_elts
; i
++)
5393 RTVEC_ELT (vector
, i
) = CONST0_RTX (GET_MODE_INNER (mode
));
5397 /* If the constructor has fewer elements than the vector,
5398 clear the whole array first. Similarly if this is static
5399 constructor of a non-BLKmode object. */
5402 else if (REG_P (target
) && TREE_STATIC (exp
))
5406 unsigned HOST_WIDE_INT count
= 0, zero_count
= 0;
5409 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
5411 int n_elts_here
= tree_low_cst
5412 (int_const_binop (TRUNC_DIV_EXPR
,
5413 TYPE_SIZE (TREE_TYPE (value
)),
5414 TYPE_SIZE (elttype
), 0), 1);
5416 count
+= n_elts_here
;
5417 if (mostly_zeros_p (value
))
5418 zero_count
+= n_elts_here
;
5421 /* Clear the entire vector first if there are any missing elements,
5422 or if the incidence of zero elements is >= 75%. */
5423 need_to_clear
= (count
< n_elts
|| 4 * zero_count
>= 3 * count
);
5426 if (need_to_clear
&& size
> 0 && !vector
)
5429 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5431 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
5435 /* Inform later passes that the old value is dead. */
5436 if (!cleared
&& !vector
&& REG_P (target
))
5437 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5439 /* Store each element of the constructor into the corresponding
5440 element of TARGET, determined by counting the elements. */
5441 for (idx
= 0, i
= 0;
5442 VEC_iterate (constructor_elt
, CONSTRUCTOR_ELTS (exp
), idx
, ce
);
5443 idx
++, i
+= bitsize
/ elt_size
)
5445 HOST_WIDE_INT eltpos
;
5446 tree value
= ce
->value
;
5448 bitsize
= tree_low_cst (TYPE_SIZE (TREE_TYPE (value
)), 1);
5449 if (cleared
&& initializer_zerop (value
))
5453 eltpos
= tree_low_cst (ce
->index
, 1);
5459 /* Vector CONSTRUCTORs should only be built from smaller
5460 vectors in the case of BLKmode vectors. */
5461 gcc_assert (TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
);
5462 RTVEC_ELT (vector
, eltpos
)
5463 = expand_normal (value
);
5467 enum machine_mode value_mode
=
5468 TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
5469 ? TYPE_MODE (TREE_TYPE (value
))
5471 bitpos
= eltpos
* elt_size
;
5472 store_constructor_field (target
, bitsize
, bitpos
,
5473 value_mode
, value
, type
,
5474 cleared
, get_alias_set (elttype
));
5479 emit_insn (GEN_FCN (icode
)
5481 gen_rtx_PARALLEL (GET_MODE (target
), vector
)));
5490 /* Store the value of EXP (an expression tree)
5491 into a subfield of TARGET which has mode MODE and occupies
5492 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5493 If MODE is VOIDmode, it means that we are storing into a bit-field.
5495 Always return const0_rtx unless we have something particular to
5498 TYPE is the type of the underlying object,
5500 ALIAS_SET is the alias set for the destination. This value will
5501 (in general) be different from that for TARGET, since TARGET is a
5502 reference to the containing structure. */
5505 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
5506 enum machine_mode mode
, tree exp
, tree type
, int alias_set
)
5508 HOST_WIDE_INT width_mask
= 0;
5510 if (TREE_CODE (exp
) == ERROR_MARK
)
5513 /* If we have nothing to store, do nothing unless the expression has
5516 return expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
5517 else if (bitsize
>= 0 && bitsize
< HOST_BITS_PER_WIDE_INT
)
5518 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5520 /* If we are storing into an unaligned field of an aligned union that is
5521 in a register, we may have the mode of TARGET being an integer mode but
5522 MODE == BLKmode. In that case, get an aligned object whose size and
5523 alignment are the same as TARGET and store TARGET into it (we can avoid
5524 the store if the field being stored is the entire width of TARGET). Then
5525 call ourselves recursively to store the field into a BLKmode version of
5526 that object. Finally, load from the object into TARGET. This is not
5527 very efficient in general, but should only be slightly more expensive
5528 than the otherwise-required unaligned accesses. Perhaps this can be
5529 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5530 twice, once with emit_move_insn and once via store_field. */
5533 && (REG_P (target
) || GET_CODE (target
) == SUBREG
))
5535 rtx object
= assign_temp (type
, 0, 1, 1);
5536 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
5538 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5539 emit_move_insn (object
, target
);
5541 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, type
, alias_set
);
5543 emit_move_insn (target
, object
);
5545 /* We want to return the BLKmode version of the data. */
5549 if (GET_CODE (target
) == CONCAT
)
5551 /* We're storing into a struct containing a single __complex. */
5553 gcc_assert (!bitpos
);
5554 return store_expr (exp
, target
, 0);
5557 /* If the structure is in a register or if the component
5558 is a bit field, we cannot use addressing to access it.
5559 Use bit-field techniques or SUBREG to store in it. */
5561 if (mode
== VOIDmode
5562 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5563 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5564 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5566 || GET_CODE (target
) == SUBREG
5567 /* If the field isn't aligned enough to store as an ordinary memref,
5568 store it as a bit field. */
5570 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
5571 || bitpos
% GET_MODE_ALIGNMENT (mode
))
5572 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
5573 || (bitpos
% BITS_PER_UNIT
!= 0)))
5574 /* If the RHS and field are a constant size and the size of the
5575 RHS isn't the same size as the bitfield, we must use bitfield
5578 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5579 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5583 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5584 implies a mask operation. If the precision is the same size as
5585 the field we're storing into, that mask is redundant. This is
5586 particularly common with bit field assignments generated by the
5588 if (TREE_CODE (exp
) == NOP_EXPR
)
5590 tree type
= TREE_TYPE (exp
);
5591 if (INTEGRAL_TYPE_P (type
)
5592 && TYPE_PRECISION (type
) < GET_MODE_BITSIZE (TYPE_MODE (type
))
5593 && bitsize
== TYPE_PRECISION (type
))
5595 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
5596 if (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) >= bitsize
)
5597 exp
= TREE_OPERAND (exp
, 0);
5601 temp
= expand_normal (exp
);
5603 /* If BITSIZE is narrower than the size of the type of EXP
5604 we will be narrowing TEMP. Normally, what's wanted are the
5605 low-order bits. However, if EXP's type is a record and this is
5606 big-endian machine, we want the upper BITSIZE bits. */
5607 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5608 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
5609 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5610 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5611 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5615 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5617 if (mode
!= VOIDmode
&& mode
!= BLKmode
5618 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5619 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5621 /* If the modes of TARGET and TEMP are both BLKmode, both
5622 must be in memory and BITPOS must be aligned on a byte
5623 boundary. If so, we simply do a block copy. */
5624 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5626 gcc_assert (MEM_P (target
) && MEM_P (temp
)
5627 && !(bitpos
% BITS_PER_UNIT
));
5629 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5630 emit_block_move (target
, temp
,
5631 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5638 /* Store the value in the bitfield. */
5639 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
);
5645 /* Now build a reference to just the desired component. */
5646 rtx to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5648 if (to_rtx
== target
)
5649 to_rtx
= copy_rtx (to_rtx
);
5651 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5652 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5653 set_mem_alias_set (to_rtx
, alias_set
);
5655 return store_expr (exp
, to_rtx
, 0);
5659 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5660 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5661 codes and find the ultimate containing object, which we return.
5663 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5664 bit position, and *PUNSIGNEDP to the signedness of the field.
5665 If the position of the field is variable, we store a tree
5666 giving the variable offset (in units) in *POFFSET.
5667 This offset is in addition to the bit position.
5668 If the position is not variable, we store 0 in *POFFSET.
5670 If any of the extraction expressions is volatile,
5671 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5673 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5674 is a mode that can be used to access the field. In that case, *PBITSIZE
5677 If the field describes a variable-sized object, *PMODE is set to
5678 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5679 this case, but the address of the object can be found.
5681 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5682 look through nodes that serve as markers of a greater alignment than
5683 the one that can be deduced from the expression. These nodes make it
5684 possible for front-ends to prevent temporaries from being created by
5685 the middle-end on alignment considerations. For that purpose, the
5686 normal operating mode at high-level is to always pass FALSE so that
5687 the ultimate containing object is really returned; moreover, the
5688 associated predicate handled_component_p will always return TRUE
5689 on these nodes, thus indicating that they are essentially handled
5690 by get_inner_reference. TRUE should only be passed when the caller
5691 is scanning the expression in order to build another representation
5692 and specifically knows how to handle these nodes; as such, this is
5693 the normal operating mode in the RTL expanders. */
5696 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
5697 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
5698 enum machine_mode
*pmode
, int *punsignedp
,
5699 int *pvolatilep
, bool keep_aligning
)
5702 enum machine_mode mode
= VOIDmode
;
5703 tree offset
= size_zero_node
;
5704 tree bit_offset
= bitsize_zero_node
;
5707 /* First get the mode, signedness, and size. We do this from just the
5708 outermost expression. */
5709 if (TREE_CODE (exp
) == COMPONENT_REF
)
5711 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5712 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5713 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5715 *punsignedp
= DECL_UNSIGNED (TREE_OPERAND (exp
, 1));
5717 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5719 size_tree
= TREE_OPERAND (exp
, 1);
5720 *punsignedp
= BIT_FIELD_REF_UNSIGNED (exp
);
5722 /* For vector types, with the correct size of access, use the mode of
5724 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == VECTOR_TYPE
5725 && TREE_TYPE (exp
) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
5726 && tree_int_cst_equal (size_tree
, TYPE_SIZE (TREE_TYPE (exp
))))
5727 mode
= TYPE_MODE (TREE_TYPE (exp
));
5731 mode
= TYPE_MODE (TREE_TYPE (exp
));
5732 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
5734 if (mode
== BLKmode
)
5735 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5737 *pbitsize
= GET_MODE_BITSIZE (mode
);
5742 if (! host_integerp (size_tree
, 1))
5743 mode
= BLKmode
, *pbitsize
= -1;
5745 *pbitsize
= tree_low_cst (size_tree
, 1);
5748 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5749 and find the ultimate containing object. */
5752 switch (TREE_CODE (exp
))
5755 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5756 TREE_OPERAND (exp
, 2));
5761 tree field
= TREE_OPERAND (exp
, 1);
5762 tree this_offset
= component_ref_field_offset (exp
);
5764 /* If this field hasn't been filled in yet, don't go past it.
5765 This should only happen when folding expressions made during
5766 type construction. */
5767 if (this_offset
== 0)
5770 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5771 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5772 DECL_FIELD_BIT_OFFSET (field
));
5774 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5779 case ARRAY_RANGE_REF
:
5781 tree index
= TREE_OPERAND (exp
, 1);
5782 tree low_bound
= array_ref_low_bound (exp
);
5783 tree unit_size
= array_ref_element_size (exp
);
5785 /* We assume all arrays have sizes that are a multiple of a byte.
5786 First subtract the lower bound, if any, in the type of the
5787 index, then convert to sizetype and multiply by the size of
5788 the array element. */
5789 if (! integer_zerop (low_bound
))
5790 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
5793 offset
= size_binop (PLUS_EXPR
, offset
,
5794 size_binop (MULT_EXPR
,
5795 fold_convert (sizetype
, index
),
5804 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5805 bitsize_int (*pbitsize
));
5808 case VIEW_CONVERT_EXPR
:
5809 if (keep_aligning
&& STRICT_ALIGNMENT
5810 && (TYPE_ALIGN (TREE_TYPE (exp
))
5811 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5812 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
5813 < BIGGEST_ALIGNMENT
)
5814 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
5815 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
5823 /* If any reference in the chain is volatile, the effect is volatile. */
5824 if (TREE_THIS_VOLATILE (exp
))
5827 exp
= TREE_OPERAND (exp
, 0);
5831 /* If OFFSET is constant, see if we can return the whole thing as a
5832 constant bit position. Otherwise, split it up. */
5833 if (host_integerp (offset
, 0)
5834 && 0 != (tem
= size_binop (MULT_EXPR
,
5835 fold_convert (bitsizetype
, offset
),
5837 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5838 && host_integerp (tem
, 0))
5839 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5841 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5847 /* Return a tree of sizetype representing the size, in bytes, of the element
5848 of EXP, an ARRAY_REF. */
5851 array_ref_element_size (tree exp
)
5853 tree aligned_size
= TREE_OPERAND (exp
, 3);
5854 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5856 /* If a size was specified in the ARRAY_REF, it's the size measured
5857 in alignment units of the element type. So multiply by that value. */
5860 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5861 sizetype from another type of the same width and signedness. */
5862 if (TREE_TYPE (aligned_size
) != sizetype
)
5863 aligned_size
= fold_convert (sizetype
, aligned_size
);
5864 return size_binop (MULT_EXPR
, aligned_size
,
5865 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
5868 /* Otherwise, take the size from that of the element type. Substitute
5869 any PLACEHOLDER_EXPR that we have. */
5871 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type
), exp
);
5874 /* Return a tree representing the lower bound of the array mentioned in
5875 EXP, an ARRAY_REF. */
5878 array_ref_low_bound (tree exp
)
5880 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5882 /* If a lower bound is specified in EXP, use it. */
5883 if (TREE_OPERAND (exp
, 2))
5884 return TREE_OPERAND (exp
, 2);
5886 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5887 substituting for a PLACEHOLDER_EXPR as needed. */
5888 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
5889 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type
), exp
);
5891 /* Otherwise, return a zero of the appropriate type. */
5892 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp
, 1)), 0);
5895 /* Return a tree representing the upper bound of the array mentioned in
5896 EXP, an ARRAY_REF. */
5899 array_ref_up_bound (tree exp
)
5901 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5903 /* If there is a domain type and it has an upper bound, use it, substituting
5904 for a PLACEHOLDER_EXPR as needed. */
5905 if (domain_type
&& TYPE_MAX_VALUE (domain_type
))
5906 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type
), exp
);
5908 /* Otherwise fail. */
5912 /* Return a tree representing the offset, in bytes, of the field referenced
5913 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5916 component_ref_field_offset (tree exp
)
5918 tree aligned_offset
= TREE_OPERAND (exp
, 2);
5919 tree field
= TREE_OPERAND (exp
, 1);
5921 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5922 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5926 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5927 sizetype from another type of the same width and signedness. */
5928 if (TREE_TYPE (aligned_offset
) != sizetype
)
5929 aligned_offset
= fold_convert (sizetype
, aligned_offset
);
5930 return size_binop (MULT_EXPR
, aligned_offset
,
5931 size_int (DECL_OFFSET_ALIGN (field
) / BITS_PER_UNIT
));
5934 /* Otherwise, take the offset from that of the field. Substitute
5935 any PLACEHOLDER_EXPR that we have. */
5937 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field
), exp
);
5940 /* Return 1 if T is an expression that get_inner_reference handles. */
5943 handled_component_p (tree t
)
5945 switch (TREE_CODE (t
))
5950 case ARRAY_RANGE_REF
:
5951 case VIEW_CONVERT_EXPR
:
5961 /* Given an rtx VALUE that may contain additions and multiplications, return
5962 an equivalent value that just refers to a register, memory, or constant.
5963 This is done by generating instructions to perform the arithmetic and
5964 returning a pseudo-register containing the value.
5966 The returned value may be a REG, SUBREG, MEM or constant. */
5969 force_operand (rtx value
, rtx target
)
5972 /* Use subtarget as the target for operand 0 of a binary operation. */
5973 rtx subtarget
= get_subtarget (target
);
5974 enum rtx_code code
= GET_CODE (value
);
5976 /* Check for subreg applied to an expression produced by loop optimizer. */
5978 && !REG_P (SUBREG_REG (value
))
5979 && !MEM_P (SUBREG_REG (value
)))
5981 value
= simplify_gen_subreg (GET_MODE (value
),
5982 force_reg (GET_MODE (SUBREG_REG (value
)),
5983 force_operand (SUBREG_REG (value
),
5985 GET_MODE (SUBREG_REG (value
)),
5986 SUBREG_BYTE (value
));
5987 code
= GET_CODE (value
);
5990 /* Check for a PIC address load. */
5991 if ((code
== PLUS
|| code
== MINUS
)
5992 && XEXP (value
, 0) == pic_offset_table_rtx
5993 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5994 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5995 || GET_CODE (XEXP (value
, 1)) == CONST
))
5998 subtarget
= gen_reg_rtx (GET_MODE (value
));
5999 emit_move_insn (subtarget
, value
);
6003 if (ARITHMETIC_P (value
))
6005 op2
= XEXP (value
, 1);
6006 if (!CONSTANT_P (op2
) && !(REG_P (op2
) && op2
!= subtarget
))
6008 if (code
== MINUS
&& GET_CODE (op2
) == CONST_INT
)
6011 op2
= negate_rtx (GET_MODE (value
), op2
);
6014 /* Check for an addition with OP2 a constant integer and our first
6015 operand a PLUS of a virtual register and something else. In that
6016 case, we want to emit the sum of the virtual register and the
6017 constant first and then add the other value. This allows virtual
6018 register instantiation to simply modify the constant rather than
6019 creating another one around this addition. */
6020 if (code
== PLUS
&& GET_CODE (op2
) == CONST_INT
6021 && GET_CODE (XEXP (value
, 0)) == PLUS
6022 && REG_P (XEXP (XEXP (value
, 0), 0))
6023 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6024 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
6026 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
6027 XEXP (XEXP (value
, 0), 0), op2
,
6028 subtarget
, 0, OPTAB_LIB_WIDEN
);
6029 return expand_simple_binop (GET_MODE (value
), code
, temp
,
6030 force_operand (XEXP (XEXP (value
,
6032 target
, 0, OPTAB_LIB_WIDEN
);
6035 op1
= force_operand (XEXP (value
, 0), subtarget
);
6036 op2
= force_operand (op2
, NULL_RTX
);
6040 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
6042 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
6043 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
6044 target
, 1, OPTAB_LIB_WIDEN
);
6046 return expand_divmod (0,
6047 FLOAT_MODE_P (GET_MODE (value
))
6048 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
6049 GET_MODE (value
), op1
, op2
, target
, 0);
6052 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
6056 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
6060 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
6064 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
6065 target
, 0, OPTAB_LIB_WIDEN
);
6068 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
6069 target
, 1, OPTAB_LIB_WIDEN
);
6072 if (UNARY_P (value
))
6075 target
= gen_reg_rtx (GET_MODE (value
));
6076 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
6083 case FLOAT_TRUNCATE
:
6084 convert_move (target
, op1
, code
== ZERO_EXTEND
);
6089 expand_fix (target
, op1
, code
== UNSIGNED_FIX
);
6093 case UNSIGNED_FLOAT
:
6094 expand_float (target
, op1
, code
== UNSIGNED_FLOAT
);
6098 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
6102 #ifdef INSN_SCHEDULING
6103 /* On machines that have insn scheduling, we want all memory reference to be
6104 explicit, so we need to deal with such paradoxical SUBREGs. */
6105 if (GET_CODE (value
) == SUBREG
&& MEM_P (SUBREG_REG (value
))
6106 && (GET_MODE_SIZE (GET_MODE (value
))
6107 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
6109 = simplify_gen_subreg (GET_MODE (value
),
6110 force_reg (GET_MODE (SUBREG_REG (value
)),
6111 force_operand (SUBREG_REG (value
),
6113 GET_MODE (SUBREG_REG (value
)),
6114 SUBREG_BYTE (value
));
6120 /* Subroutine of expand_expr: return nonzero iff there is no way that
6121 EXP can reference X, which is being modified. TOP_P is nonzero if this
6122 call is going to be used to determine whether we need a temporary
6123 for EXP, as opposed to a recursive call to this function.
6125 It is always safe for this routine to return zero since it merely
6126 searches for optimization opportunities. */
6129 safe_from_p (rtx x
, tree exp
, int top_p
)
6135 /* If EXP has varying size, we MUST use a target since we currently
6136 have no way of allocating temporaries of variable size
6137 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6138 So we assume here that something at a higher level has prevented a
6139 clash. This is somewhat bogus, but the best we can do. Only
6140 do this when X is BLKmode and when we are at the top level. */
6141 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
6142 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
6143 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
6144 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
6145 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
6147 && GET_MODE (x
) == BLKmode
)
6148 /* If X is in the outgoing argument area, it is always safe. */
6150 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
6151 || (GET_CODE (XEXP (x
, 0)) == PLUS
6152 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
6155 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6156 find the underlying pseudo. */
6157 if (GET_CODE (x
) == SUBREG
)
6160 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6164 /* Now look at our tree code and possibly recurse. */
6165 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
6167 case tcc_declaration
:
6168 exp_rtl
= DECL_RTL_IF_SET (exp
);
6174 case tcc_exceptional
:
6175 if (TREE_CODE (exp
) == TREE_LIST
)
6179 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
6181 exp
= TREE_CHAIN (exp
);
6184 if (TREE_CODE (exp
) != TREE_LIST
)
6185 return safe_from_p (x
, exp
, 0);
6188 else if (TREE_CODE (exp
) == CONSTRUCTOR
)
6190 constructor_elt
*ce
;
6191 unsigned HOST_WIDE_INT idx
;
6194 VEC_iterate (constructor_elt
, CONSTRUCTOR_ELTS (exp
), idx
, ce
);
6196 if ((ce
->index
!= NULL_TREE
&& !safe_from_p (x
, ce
->index
, 0))
6197 || !safe_from_p (x
, ce
->value
, 0))
6201 else if (TREE_CODE (exp
) == ERROR_MARK
)
6202 return 1; /* An already-visited SAVE_EXPR? */
6207 /* The only case we look at here is the DECL_INITIAL inside a
6209 return (TREE_CODE (exp
) != DECL_EXPR
6210 || TREE_CODE (DECL_EXPR_DECL (exp
)) != VAR_DECL
6211 || !DECL_INITIAL (DECL_EXPR_DECL (exp
))
6212 || safe_from_p (x
, DECL_INITIAL (DECL_EXPR_DECL (exp
)), 0));
6215 case tcc_comparison
:
6216 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
6221 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6223 case tcc_expression
:
6226 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6227 the expression. If it is set, we conflict iff we are that rtx or
6228 both are in memory. Otherwise, we check all operands of the
6229 expression recursively. */
6231 switch (TREE_CODE (exp
))
6234 /* If the operand is static or we are static, we can't conflict.
6235 Likewise if we don't conflict with the operand at all. */
6236 if (staticp (TREE_OPERAND (exp
, 0))
6237 || TREE_STATIC (exp
)
6238 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
6241 /* Otherwise, the only way this can conflict is if we are taking
6242 the address of a DECL a that address if part of X, which is
6244 exp
= TREE_OPERAND (exp
, 0);
6247 if (!DECL_RTL_SET_P (exp
)
6248 || !MEM_P (DECL_RTL (exp
)))
6251 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
6255 case MISALIGNED_INDIRECT_REF
:
6256 case ALIGN_INDIRECT_REF
:
6259 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
6260 get_alias_set (exp
)))
6265 /* Assume that the call will clobber all hard registers and
6267 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6272 case WITH_CLEANUP_EXPR
:
6273 case CLEANUP_POINT_EXPR
:
6274 /* Lowered by gimplify.c. */
6278 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6284 /* If we have an rtx, we do not need to scan our operands. */
6288 nops
= TREE_OPERAND_LENGTH (exp
);
6289 for (i
= 0; i
< nops
; i
++)
6290 if (TREE_OPERAND (exp
, i
) != 0
6291 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
6294 /* If this is a language-specific tree code, it may require
6295 special handling. */
6296 if ((unsigned int) TREE_CODE (exp
)
6297 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6298 && !lang_hooks
.safe_from_p (x
, exp
))
6303 /* Should never get a type here. */
6306 case tcc_gimple_stmt
:
6310 /* If we have an rtl, find any enclosed object. Then see if we conflict
6314 if (GET_CODE (exp_rtl
) == SUBREG
)
6316 exp_rtl
= SUBREG_REG (exp_rtl
);
6318 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
6322 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6323 are memory and they conflict. */
6324 return ! (rtx_equal_p (x
, exp_rtl
)
6325 || (MEM_P (x
) && MEM_P (exp_rtl
)
6326 && true_dependence (exp_rtl
, VOIDmode
, x
,
6327 rtx_addr_varies_p
)));
6330 /* If we reach here, it is safe. */
6335 /* Return the highest power of two that EXP is known to be a multiple of.
6336 This is used in updating alignment of MEMs in array references. */
6338 unsigned HOST_WIDE_INT
6339 highest_pow2_factor (tree exp
)
6341 unsigned HOST_WIDE_INT c0
, c1
;
6343 switch (TREE_CODE (exp
))
6346 /* We can find the lowest bit that's a one. If the low
6347 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6348 We need to handle this case since we can find it in a COND_EXPR,
6349 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6350 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6352 if (TREE_OVERFLOW (exp
))
6353 return BIGGEST_ALIGNMENT
;
6356 /* Note: tree_low_cst is intentionally not used here,
6357 we don't care about the upper bits. */
6358 c0
= TREE_INT_CST_LOW (exp
);
6360 return c0
? c0
: BIGGEST_ALIGNMENT
;
6364 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
6365 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6366 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6367 return MIN (c0
, c1
);
6370 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6371 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6374 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6376 if (integer_pow2p (TREE_OPERAND (exp
, 1))
6377 && host_integerp (TREE_OPERAND (exp
, 1), 1))
6379 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6380 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
6381 return MAX (1, c0
/ c1
);
6385 case NON_LVALUE_EXPR
: case NOP_EXPR
: case CONVERT_EXPR
:
6387 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
6390 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6393 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6394 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
6395 return MIN (c0
, c1
);
6404 /* Similar, except that the alignment requirements of TARGET are
6405 taken into account. Assume it is at least as aligned as its
6406 type, unless it is a COMPONENT_REF in which case the layout of
6407 the structure gives the alignment. */
6409 static unsigned HOST_WIDE_INT
6410 highest_pow2_factor_for_target (tree target
, tree exp
)
6412 unsigned HOST_WIDE_INT target_align
, factor
;
6414 factor
= highest_pow2_factor (exp
);
6415 if (TREE_CODE (target
) == COMPONENT_REF
)
6416 target_align
= DECL_ALIGN_UNIT (TREE_OPERAND (target
, 1));
6418 target_align
= TYPE_ALIGN_UNIT (TREE_TYPE (target
));
6419 return MAX (factor
, target_align
);
6422 /* Return &VAR expression for emulated thread local VAR. */
6425 emutls_var_address (tree var
)
6427 tree emuvar
= emutls_decl (var
);
6428 tree fn
= built_in_decls
[BUILT_IN_EMUTLS_GET_ADDRESS
];
6429 tree arg
= build_fold_addr_expr_with_type (emuvar
, ptr_type_node
);
6430 tree arglist
= build_tree_list (NULL_TREE
, arg
);
6431 tree call
= build_function_call_expr (fn
, arglist
);
6432 return fold_convert (build_pointer_type (TREE_TYPE (var
)), call
);
6435 /* Expands variable VAR. */
6438 expand_var (tree var
)
6440 if (DECL_EXTERNAL (var
))
6443 if (TREE_STATIC (var
))
6444 /* If this is an inlined copy of a static local variable,
6445 look up the original decl. */
6446 var
= DECL_ORIGIN (var
);
6448 if (TREE_STATIC (var
)
6449 ? !TREE_ASM_WRITTEN (var
)
6450 : !DECL_RTL_SET_P (var
))
6452 if (TREE_CODE (var
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (var
))
6453 /* Should be ignored. */;
6454 else if (lang_hooks
.expand_decl (var
))
6456 else if (TREE_CODE (var
) == VAR_DECL
&& !TREE_STATIC (var
))
6458 else if (TREE_CODE (var
) == VAR_DECL
&& TREE_STATIC (var
))
6459 rest_of_decl_compilation (var
, 0, 0);
6461 /* No expansion needed. */
6462 gcc_assert (TREE_CODE (var
) == TYPE_DECL
6463 || TREE_CODE (var
) == CONST_DECL
6464 || TREE_CODE (var
) == FUNCTION_DECL
6465 || TREE_CODE (var
) == LABEL_DECL
);
6469 /* Subroutine of expand_expr. Expand the two operands of a binary
6470 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6471 The value may be stored in TARGET if TARGET is nonzero. The
6472 MODIFIER argument is as documented by expand_expr. */
6475 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
6476 enum expand_modifier modifier
)
6478 if (! safe_from_p (target
, exp1
, 1))
6480 if (operand_equal_p (exp0
, exp1
, 0))
6482 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6483 *op1
= copy_rtx (*op0
);
6487 /* If we need to preserve evaluation order, copy exp0 into its own
6488 temporary variable so that it can't be clobbered by exp1. */
6489 if (flag_evaluation_order
&& TREE_SIDE_EFFECTS (exp1
))
6490 exp0
= save_expr (exp0
);
6491 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6492 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
6497 /* Return a MEM that contains constant EXP. DEFER is as for
6498 output_constant_def and MODIFIER is as for expand_expr. */
6501 expand_expr_constant (tree exp
, int defer
, enum expand_modifier modifier
)
6505 mem
= output_constant_def (exp
, defer
);
6506 if (modifier
!= EXPAND_INITIALIZER
)
6507 mem
= use_anchored_address (mem
);
6511 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6512 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6515 expand_expr_addr_expr_1 (tree exp
, rtx target
, enum machine_mode tmode
,
6516 enum expand_modifier modifier
)
6518 rtx result
, subtarget
;
6520 HOST_WIDE_INT bitsize
, bitpos
;
6521 int volatilep
, unsignedp
;
6522 enum machine_mode mode1
;
6524 /* If we are taking the address of a constant and are at the top level,
6525 we have to use output_constant_def since we can't call force_const_mem
6527 /* ??? This should be considered a front-end bug. We should not be
6528 generating ADDR_EXPR of something that isn't an LVALUE. The only
6529 exception here is STRING_CST. */
6530 if (TREE_CODE (exp
) == CONSTRUCTOR
6531 || CONSTANT_CLASS_P (exp
))
6532 return XEXP (expand_expr_constant (exp
, 0, modifier
), 0);
6534 /* Everything must be something allowed by is_gimple_addressable. */
6535 switch (TREE_CODE (exp
))
6538 /* This case will happen via recursion for &a->b. */
6539 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6542 /* Recurse and make the output_constant_def clause above handle this. */
6543 return expand_expr_addr_expr_1 (DECL_INITIAL (exp
), target
,
6547 /* The real part of the complex number is always first, therefore
6548 the address is the same as the address of the parent object. */
6551 inner
= TREE_OPERAND (exp
, 0);
6555 /* The imaginary part of the complex number is always second.
6556 The expression is therefore always offset by the size of the
6559 bitpos
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp
)));
6560 inner
= TREE_OPERAND (exp
, 0);
6564 /* TLS emulation hook - replace __thread VAR's &VAR with
6565 __emutls_get_address (&_emutls.VAR). */
6566 if (! targetm
.have_tls
6567 && TREE_CODE (exp
) == VAR_DECL
6568 && DECL_THREAD_LOCAL_P (exp
))
6570 exp
= emutls_var_address (exp
);
6571 return expand_expr (exp
, target
, tmode
, modifier
);
6576 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6577 expand_expr, as that can have various side effects; LABEL_DECLs for
6578 example, may not have their DECL_RTL set yet. Assume language
6579 specific tree nodes can be expanded in some interesting way. */
6581 || TREE_CODE (exp
) >= LAST_AND_UNUSED_TREE_CODE
)
6583 result
= expand_expr (exp
, target
, tmode
,
6584 modifier
== EXPAND_INITIALIZER
6585 ? EXPAND_INITIALIZER
: EXPAND_CONST_ADDRESS
);
6587 /* If the DECL isn't in memory, then the DECL wasn't properly
6588 marked TREE_ADDRESSABLE, which will be either a front-end
6589 or a tree optimizer bug. */
6590 gcc_assert (MEM_P (result
));
6591 result
= XEXP (result
, 0);
6593 /* ??? Is this needed anymore? */
6594 if (DECL_P (exp
) && !TREE_USED (exp
) == 0)
6596 assemble_external (exp
);
6597 TREE_USED (exp
) = 1;
6600 if (modifier
!= EXPAND_INITIALIZER
6601 && modifier
!= EXPAND_CONST_ADDRESS
)
6602 result
= force_operand (result
, target
);
6606 /* Pass FALSE as the last argument to get_inner_reference although
6607 we are expanding to RTL. The rationale is that we know how to
6608 handle "aligning nodes" here: we can just bypass them because
6609 they won't change the final object whose address will be returned
6610 (they actually exist only for that purpose). */
6611 inner
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
6612 &mode1
, &unsignedp
, &volatilep
, false);
6616 /* We must have made progress. */
6617 gcc_assert (inner
!= exp
);
6619 subtarget
= offset
|| bitpos
? NULL_RTX
: target
;
6620 result
= expand_expr_addr_expr_1 (inner
, subtarget
, tmode
, modifier
);
6626 if (modifier
!= EXPAND_NORMAL
)
6627 result
= force_operand (result
, NULL
);
6628 tmp
= expand_expr (offset
, NULL
, tmode
, EXPAND_NORMAL
);
6630 result
= convert_memory_address (tmode
, result
);
6631 tmp
= convert_memory_address (tmode
, tmp
);
6633 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
6634 result
= gen_rtx_PLUS (tmode
, result
, tmp
);
6637 subtarget
= bitpos
? NULL_RTX
: target
;
6638 result
= expand_simple_binop (tmode
, PLUS
, result
, tmp
, subtarget
,
6639 1, OPTAB_LIB_WIDEN
);
6645 /* Someone beforehand should have rejected taking the address
6646 of such an object. */
6647 gcc_assert ((bitpos
% BITS_PER_UNIT
) == 0);
6649 result
= plus_constant (result
, bitpos
/ BITS_PER_UNIT
);
6650 if (modifier
< EXPAND_SUM
)
6651 result
= force_operand (result
, target
);
6657 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6658 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6661 expand_expr_addr_expr (tree exp
, rtx target
, enum machine_mode tmode
,
6662 enum expand_modifier modifier
)
6664 enum machine_mode rmode
;
6667 /* Target mode of VOIDmode says "whatever's natural". */
6668 if (tmode
== VOIDmode
)
6669 tmode
= TYPE_MODE (TREE_TYPE (exp
));
6671 /* We can get called with some Weird Things if the user does silliness
6672 like "(short) &a". In that case, convert_memory_address won't do
6673 the right thing, so ignore the given target mode. */
6674 if (tmode
!= Pmode
&& tmode
!= ptr_mode
)
6677 result
= expand_expr_addr_expr_1 (TREE_OPERAND (exp
, 0), target
,
6680 /* Despite expand_expr claims concerning ignoring TMODE when not
6681 strictly convenient, stuff breaks if we don't honor it. Note
6682 that combined with the above, we only do this for pointer modes. */
6683 rmode
= GET_MODE (result
);
6684 if (rmode
== VOIDmode
)
6687 result
= convert_memory_address (tmode
, result
);
6693 /* expand_expr: generate code for computing expression EXP.
6694 An rtx for the computed value is returned. The value is never null.
6695 In the case of a void EXP, const0_rtx is returned.
6697 The value may be stored in TARGET if TARGET is nonzero.
6698 TARGET is just a suggestion; callers must assume that
6699 the rtx returned may not be the same as TARGET.
6701 If TARGET is CONST0_RTX, it means that the value will be ignored.
6703 If TMODE is not VOIDmode, it suggests generating the
6704 result in mode TMODE. But this is done only when convenient.
6705 Otherwise, TMODE is ignored and the value generated in its natural mode.
6706 TMODE is just a suggestion; callers must assume that
6707 the rtx returned may not have mode TMODE.
6709 Note that TARGET may have neither TMODE nor MODE. In that case, it
6710 probably will not be used.
6712 If MODIFIER is EXPAND_SUM then when EXP is an addition
6713 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6714 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6715 products as above, or REG or MEM, or constant.
6716 Ordinarily in such cases we would output mul or add instructions
6717 and then return a pseudo reg containing the sum.
6719 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6720 it also marks a label as absolutely required (it can't be dead).
6721 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6722 This is used for outputting expressions used in initializers.
6724 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6725 with a constant address even if that address is not normally legitimate.
6726 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6728 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6729 a call parameter. Such targets require special care as we haven't yet
6730 marked TARGET so that it's safe from being trashed by libcalls. We
6731 don't want to use TARGET for anything but the final result;
6732 Intermediate values must go elsewhere. Additionally, calls to
6733 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6735 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6736 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6737 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6738 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6741 static rtx
expand_expr_real_1 (tree
, rtx
, enum machine_mode
,
6742 enum expand_modifier
, rtx
*);
6745 expand_expr_real (tree exp
, rtx target
, enum machine_mode tmode
,
6746 enum expand_modifier modifier
, rtx
*alt_rtl
)
6749 rtx ret
, last
= NULL
;
6751 /* Handle ERROR_MARK before anybody tries to access its type. */
6752 if (TREE_CODE (exp
) == ERROR_MARK
6753 || (!GIMPLE_TUPLE_P (exp
) && TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
))
6755 ret
= CONST0_RTX (tmode
);
6756 return ret
? ret
: const0_rtx
;
6759 if (flag_non_call_exceptions
)
6761 rn
= lookup_stmt_eh_region (exp
);
6762 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6764 last
= get_last_insn ();
6767 /* If this is an expression of some kind and it has an associated line
6768 number, then emit the line number before expanding the expression.
6770 We need to save and restore the file and line information so that
6771 errors discovered during expansion are emitted with the right
6772 information. It would be better of the diagnostic routines
6773 used the file/line information embedded in the tree nodes rather
6775 if (cfun
&& cfun
->ib_boundaries_block
&& EXPR_HAS_LOCATION (exp
))
6777 location_t saved_location
= input_location
;
6778 input_location
= EXPR_LOCATION (exp
);
6779 emit_line_note (input_location
);
6781 /* Record where the insns produced belong. */
6782 record_block_change (TREE_BLOCK (exp
));
6784 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
6786 input_location
= saved_location
;
6790 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
6793 /* If using non-call exceptions, mark all insns that may trap.
6794 expand_call() will mark CALL_INSNs before we get to this code,
6795 but it doesn't handle libcalls, and these may trap. */
6799 for (insn
= next_real_insn (last
); insn
;
6800 insn
= next_real_insn (insn
))
6802 if (! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
6803 /* If we want exceptions for non-call insns, any
6804 may_trap_p instruction may throw. */
6805 && GET_CODE (PATTERN (insn
)) != CLOBBER
6806 && GET_CODE (PATTERN (insn
)) != USE
6807 && (CALL_P (insn
) || may_trap_p (PATTERN (insn
))))
6809 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_EH_REGION
, GEN_INT (rn
),
6819 expand_expr_real_1 (tree exp
, rtx target
, enum machine_mode tmode
,
6820 enum expand_modifier modifier
, rtx
*alt_rtl
)
6822 rtx op0
, op1
, temp
, decl_rtl
;
6825 enum machine_mode mode
;
6826 enum tree_code code
= TREE_CODE (exp
);
6828 rtx subtarget
, original_target
;
6830 tree context
, subexp0
, subexp1
;
6831 bool reduce_bit_field
= false;
6832 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6833 ? reduce_to_bit_field_precision ((expr), \
6838 if (GIMPLE_STMT_P (exp
))
6840 type
= void_type_node
;
6846 type
= TREE_TYPE (exp
);
6847 mode
= TYPE_MODE (type
);
6848 unsignedp
= TYPE_UNSIGNED (type
);
6850 if (lang_hooks
.reduce_bit_field_operations
6851 && TREE_CODE (type
) == INTEGER_TYPE
6852 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
))
6854 /* An operation in what may be a bit-field type needs the
6855 result to be reduced to the precision of the bit-field type,
6856 which is narrower than that of the type's mode. */
6857 reduce_bit_field
= true;
6858 if (modifier
== EXPAND_STACK_PARM
)
6862 /* Use subtarget as the target for operand 0 of a binary operation. */
6863 subtarget
= get_subtarget (target
);
6864 original_target
= target
;
6865 ignore
= (target
== const0_rtx
6866 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
6867 || code
== CONVERT_EXPR
|| code
== COND_EXPR
6868 || code
== VIEW_CONVERT_EXPR
)
6869 && TREE_CODE (type
) == VOID_TYPE
));
6871 /* If we are going to ignore this result, we need only do something
6872 if there is a side-effect somewhere in the expression. If there
6873 is, short-circuit the most common cases here. Note that we must
6874 not call expand_expr with anything but const0_rtx in case this
6875 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6879 if (! TREE_SIDE_EFFECTS (exp
))
6882 /* Ensure we reference a volatile object even if value is ignored, but
6883 don't do this if all we are doing is taking its address. */
6884 if (TREE_THIS_VOLATILE (exp
)
6885 && TREE_CODE (exp
) != FUNCTION_DECL
6886 && mode
!= VOIDmode
&& mode
!= BLKmode
6887 && modifier
!= EXPAND_CONST_ADDRESS
)
6889 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
6891 temp
= copy_to_reg (temp
);
6895 if (TREE_CODE_CLASS (code
) == tcc_unary
6896 || code
== COMPONENT_REF
|| code
== INDIRECT_REF
)
6897 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6900 else if (TREE_CODE_CLASS (code
) == tcc_binary
6901 || TREE_CODE_CLASS (code
) == tcc_comparison
6902 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
6904 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6905 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6908 else if (code
== BIT_FIELD_REF
)
6910 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6911 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6912 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, modifier
);
6924 tree function
= decl_function_context (exp
);
6926 temp
= label_rtx (exp
);
6927 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
6929 if (function
!= current_function_decl
6931 LABEL_REF_NONLOCAL_P (temp
) = 1;
6933 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
6938 return expand_expr_real_1 (SSA_NAME_VAR (exp
), target
, tmode
, modifier
,
6943 /* If a static var's type was incomplete when the decl was written,
6944 but the type is complete now, lay out the decl now. */
6945 if (DECL_SIZE (exp
) == 0
6946 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
6947 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6948 layout_decl (exp
, 0);
6950 /* TLS emulation hook - replace __thread vars with
6951 *__emutls_get_address (&_emutls.var). */
6952 if (! targetm
.have_tls
6953 && TREE_CODE (exp
) == VAR_DECL
6954 && DECL_THREAD_LOCAL_P (exp
))
6956 exp
= build_fold_indirect_ref (emutls_var_address (exp
));
6957 return expand_expr_real_1 (exp
, target
, tmode
, modifier
, NULL
);
6960 /* ... fall through ... */
6964 decl_rtl
= DECL_RTL (exp
);
6965 gcc_assert (decl_rtl
);
6967 /* Ensure variable marked as used even if it doesn't go through
6968 a parser. If it hasn't be used yet, write out an external
6970 if (! TREE_USED (exp
))
6972 assemble_external (exp
);
6973 TREE_USED (exp
) = 1;
6976 /* Show we haven't gotten RTL for this yet. */
6979 /* Variables inherited from containing functions should have
6980 been lowered by this point. */
6981 context
= decl_function_context (exp
);
6982 gcc_assert (!context
6983 || context
== current_function_decl
6984 || TREE_STATIC (exp
)
6985 /* ??? C++ creates functions that are not TREE_STATIC. */
6986 || TREE_CODE (exp
) == FUNCTION_DECL
);
6988 /* This is the case of an array whose size is to be determined
6989 from its initializer, while the initializer is still being parsed.
6992 if (MEM_P (decl_rtl
) && REG_P (XEXP (decl_rtl
, 0)))
6993 temp
= validize_mem (decl_rtl
);
6995 /* If DECL_RTL is memory, we are in the normal case and either
6996 the address is not valid or it is not a register and -fforce-addr
6997 is specified, get the address into a register. */
6999 else if (MEM_P (decl_rtl
) && modifier
!= EXPAND_INITIALIZER
)
7002 *alt_rtl
= decl_rtl
;
7003 decl_rtl
= use_anchored_address (decl_rtl
);
7004 if (modifier
!= EXPAND_CONST_ADDRESS
7005 && modifier
!= EXPAND_SUM
7006 && (!memory_address_p (DECL_MODE (exp
), XEXP (decl_rtl
, 0))
7007 || (flag_force_addr
&& !REG_P (XEXP (decl_rtl
, 0)))))
7008 temp
= replace_equiv_address (decl_rtl
,
7009 copy_rtx (XEXP (decl_rtl
, 0)));
7012 /* If we got something, return it. But first, set the alignment
7013 if the address is a register. */
7016 if (MEM_P (temp
) && REG_P (XEXP (temp
, 0)))
7017 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
7022 /* If the mode of DECL_RTL does not match that of the decl, it
7023 must be a promoted value. We return a SUBREG of the wanted mode,
7024 but mark it so that we know that it was already extended. */
7026 if (REG_P (decl_rtl
)
7027 && GET_MODE (decl_rtl
) != DECL_MODE (exp
))
7029 enum machine_mode pmode
;
7031 /* Get the signedness used for this variable. Ensure we get the
7032 same mode we got when the variable was declared. */
7033 pmode
= promote_mode (type
, DECL_MODE (exp
), &unsignedp
,
7034 (TREE_CODE (exp
) == RESULT_DECL
7035 || TREE_CODE (exp
) == PARM_DECL
) ? 1 : 0);
7036 gcc_assert (GET_MODE (decl_rtl
) == pmode
);
7038 temp
= gen_lowpart_SUBREG (mode
, decl_rtl
);
7039 SUBREG_PROMOTED_VAR_P (temp
) = 1;
7040 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
7047 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
7048 TREE_INT_CST_HIGH (exp
), mode
);
7050 /* ??? If overflow is set, fold will have done an incomplete job,
7051 which can result in (plus xx (const_int 0)), which can get
7052 simplified by validate_replace_rtx during virtual register
7053 instantiation, which can result in unrecognizable insns.
7054 Avoid this by forcing all overflows into registers. */
7055 if (TREE_OVERFLOW (exp
)
7056 && modifier
!= EXPAND_INITIALIZER
)
7057 temp
= force_reg (mode
, temp
);
7063 tree tmp
= NULL_TREE
;
7064 if (GET_MODE_CLASS (mode
) == MODE_VECTOR_INT
7065 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FLOAT
)
7066 return const_vector_from_tree (exp
);
7067 if (GET_MODE_CLASS (mode
) == MODE_INT
)
7069 tree type_for_mode
= lang_hooks
.types
.type_for_mode (mode
, 1);
7071 tmp
= fold_unary (VIEW_CONVERT_EXPR
, type_for_mode
, exp
);
7074 tmp
= build_constructor_from_list (type
,
7075 TREE_VECTOR_CST_ELTS (exp
));
7076 return expand_expr (tmp
, ignore
? const0_rtx
: target
,
7081 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
7084 /* If optimized, generate immediate CONST_DOUBLE
7085 which will be turned into memory by reload if necessary.
7087 We used to force a register so that loop.c could see it. But
7088 this does not allow gen_* patterns to perform optimizations with
7089 the constants. It also produces two insns in cases like "x = 1.0;".
7090 On most machines, floating-point constants are not permitted in
7091 many insns, so we'd end up copying it to a register in any case.
7093 Now, we do the copying in expand_binop, if appropriate. */
7094 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
7095 TYPE_MODE (TREE_TYPE (exp
)));
7098 /* Handle evaluating a complex constant in a CONCAT target. */
7099 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
7101 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
7104 rtarg
= XEXP (original_target
, 0);
7105 itarg
= XEXP (original_target
, 1);
7107 /* Move the real and imaginary parts separately. */
7108 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, 0);
7109 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, 0);
7112 emit_move_insn (rtarg
, op0
);
7114 emit_move_insn (itarg
, op1
);
7116 return original_target
;
7119 /* ... fall through ... */
7122 temp
= expand_expr_constant (exp
, 1, modifier
);
7124 /* temp contains a constant address.
7125 On RISC machines where a constant address isn't valid,
7126 make some insns to get that address into a register. */
7127 if (modifier
!= EXPAND_CONST_ADDRESS
7128 && modifier
!= EXPAND_INITIALIZER
7129 && modifier
!= EXPAND_SUM
7130 && (! memory_address_p (mode
, XEXP (temp
, 0))
7131 || flag_force_addr
))
7132 return replace_equiv_address (temp
,
7133 copy_rtx (XEXP (temp
, 0)));
7138 tree val
= TREE_OPERAND (exp
, 0);
7139 rtx ret
= expand_expr_real_1 (val
, target
, tmode
, modifier
, alt_rtl
);
7141 if (!SAVE_EXPR_RESOLVED_P (exp
))
7143 /* We can indeed still hit this case, typically via builtin
7144 expanders calling save_expr immediately before expanding
7145 something. Assume this means that we only have to deal
7146 with non-BLKmode values. */
7147 gcc_assert (GET_MODE (ret
) != BLKmode
);
7149 val
= build_decl (VAR_DECL
, NULL
, TREE_TYPE (exp
));
7150 DECL_ARTIFICIAL (val
) = 1;
7151 DECL_IGNORED_P (val
) = 1;
7152 TREE_OPERAND (exp
, 0) = val
;
7153 SAVE_EXPR_RESOLVED_P (exp
) = 1;
7155 if (!CONSTANT_P (ret
))
7156 ret
= copy_to_reg (ret
);
7157 SET_DECL_RTL (val
, ret
);
7164 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
7165 expand_goto (TREE_OPERAND (exp
, 0));
7167 expand_computed_goto (TREE_OPERAND (exp
, 0));
7171 /* If we don't need the result, just ensure we evaluate any
7175 unsigned HOST_WIDE_INT idx
;
7178 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
7179 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
7184 /* Try to avoid creating a temporary at all. This is possible
7185 if all of the initializer is zero.
7186 FIXME: try to handle all [0..255] initializers we can handle
7188 else if (TREE_STATIC (exp
)
7189 && !TREE_ADDRESSABLE (exp
)
7190 && target
!= 0 && mode
== BLKmode
7191 && all_zeros_p (exp
))
7193 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
7197 /* All elts simple constants => refer to a constant in memory. But
7198 if this is a non-BLKmode mode, let it store a field at a time
7199 since that should make a CONST_INT or CONST_DOUBLE when we
7200 fold. Likewise, if we have a target we can use, it is best to
7201 store directly into the target unless the type is large enough
7202 that memcpy will be used. If we are making an initializer and
7203 all operands are constant, put it in memory as well.
7205 FIXME: Avoid trying to fill vector constructors piece-meal.
7206 Output them with output_constant_def below unless we're sure
7207 they're zeros. This should go away when vector initializers
7208 are treated like VECTOR_CST instead of arrays.
7210 else if ((TREE_STATIC (exp
)
7211 && ((mode
== BLKmode
7212 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
7213 || TREE_ADDRESSABLE (exp
)
7214 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
7215 && (! MOVE_BY_PIECES_P
7216 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
7218 && ! mostly_zeros_p (exp
))))
7219 || ((modifier
== EXPAND_INITIALIZER
7220 || modifier
== EXPAND_CONST_ADDRESS
)
7221 && TREE_CONSTANT (exp
)))
7223 rtx constructor
= expand_expr_constant (exp
, 1, modifier
);
7225 if (modifier
!= EXPAND_CONST_ADDRESS
7226 && modifier
!= EXPAND_INITIALIZER
7227 && modifier
!= EXPAND_SUM
)
7228 constructor
= validize_mem (constructor
);
7234 /* Handle calls that pass values in multiple non-contiguous
7235 locations. The Irix 6 ABI has examples of this. */
7236 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
7237 || GET_CODE (target
) == PARALLEL
7238 || modifier
== EXPAND_STACK_PARM
)
7240 = assign_temp (build_qualified_type (type
,
7242 | (TREE_READONLY (exp
)
7243 * TYPE_QUAL_CONST
))),
7244 0, TREE_ADDRESSABLE (exp
), 1);
7246 store_constructor (exp
, target
, 0, int_expr_size (exp
));
7250 case MISALIGNED_INDIRECT_REF
:
7251 case ALIGN_INDIRECT_REF
:
7254 tree exp1
= TREE_OPERAND (exp
, 0);
7256 if (modifier
!= EXPAND_WRITE
)
7260 t
= fold_read_from_constant_string (exp
);
7262 return expand_expr (t
, target
, tmode
, modifier
);
7265 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
7266 op0
= memory_address (mode
, op0
);
7268 if (code
== ALIGN_INDIRECT_REF
)
7270 int align
= TYPE_ALIGN_UNIT (type
);
7271 op0
= gen_rtx_AND (Pmode
, op0
, GEN_INT (-align
));
7272 op0
= memory_address (mode
, op0
);
7275 temp
= gen_rtx_MEM (mode
, op0
);
7277 set_mem_attributes (temp
, exp
, 0);
7279 /* Resolve the misalignment now, so that we don't have to remember
7280 to resolve it later. Of course, this only works for reads. */
7281 /* ??? When we get around to supporting writes, we'll have to handle
7282 this in store_expr directly. The vectorizer isn't generating
7283 those yet, however. */
7284 if (code
== MISALIGNED_INDIRECT_REF
)
7289 gcc_assert (modifier
== EXPAND_NORMAL
7290 || modifier
== EXPAND_STACK_PARM
);
7292 /* The vectorizer should have already checked the mode. */
7293 icode
= movmisalign_optab
->handlers
[mode
].insn_code
;
7294 gcc_assert (icode
!= CODE_FOR_nothing
);
7296 /* We've already validated the memory, and we're creating a
7297 new pseudo destination. The predicates really can't fail. */
7298 reg
= gen_reg_rtx (mode
);
7300 /* Nor can the insn generator. */
7301 insn
= GEN_FCN (icode
) (reg
, temp
);
7310 case TARGET_MEM_REF
:
7312 struct mem_address addr
;
7314 get_address_description (exp
, &addr
);
7315 op0
= addr_for_mem_ref (&addr
, true);
7316 op0
= memory_address (mode
, op0
);
7317 temp
= gen_rtx_MEM (mode
, op0
);
7318 set_mem_attributes (temp
, TMR_ORIGINAL (exp
), 0);
7325 tree array
= TREE_OPERAND (exp
, 0);
7326 tree index
= TREE_OPERAND (exp
, 1);
7328 /* Fold an expression like: "foo"[2].
7329 This is not done in fold so it won't happen inside &.
7330 Don't fold if this is for wide characters since it's too
7331 difficult to do correctly and this is a very rare case. */
7333 if (modifier
!= EXPAND_CONST_ADDRESS
7334 && modifier
!= EXPAND_INITIALIZER
7335 && modifier
!= EXPAND_MEMORY
)
7337 tree t
= fold_read_from_constant_string (exp
);
7340 return expand_expr (t
, target
, tmode
, modifier
);
7343 /* If this is a constant index into a constant array,
7344 just get the value from the array. Handle both the cases when
7345 we have an explicit constructor and when our operand is a variable
7346 that was declared const. */
7348 if (modifier
!= EXPAND_CONST_ADDRESS
7349 && modifier
!= EXPAND_INITIALIZER
7350 && modifier
!= EXPAND_MEMORY
7351 && TREE_CODE (array
) == CONSTRUCTOR
7352 && ! TREE_SIDE_EFFECTS (array
)
7353 && TREE_CODE (index
) == INTEGER_CST
)
7355 unsigned HOST_WIDE_INT ix
;
7358 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array
), ix
,
7360 if (tree_int_cst_equal (field
, index
))
7362 if (!TREE_SIDE_EFFECTS (value
))
7363 return expand_expr (fold (value
), target
, tmode
, modifier
);
7368 else if (optimize
>= 1
7369 && modifier
!= EXPAND_CONST_ADDRESS
7370 && modifier
!= EXPAND_INITIALIZER
7371 && modifier
!= EXPAND_MEMORY
7372 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
7373 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
7374 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
7375 && targetm
.binds_local_p (array
))
7377 if (TREE_CODE (index
) == INTEGER_CST
)
7379 tree init
= DECL_INITIAL (array
);
7381 if (TREE_CODE (init
) == CONSTRUCTOR
)
7383 unsigned HOST_WIDE_INT ix
;
7386 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), ix
,
7388 if (tree_int_cst_equal (field
, index
))
7390 if (!TREE_SIDE_EFFECTS (value
))
7391 return expand_expr (fold (value
), target
, tmode
,
7396 else if(TREE_CODE (init
) == STRING_CST
)
7398 tree index1
= index
;
7399 tree low_bound
= array_ref_low_bound (exp
);
7400 index1
= fold_convert (sizetype
, TREE_OPERAND (exp
, 1));
7402 /* Optimize the special-case of a zero lower bound.
7404 We convert the low_bound to sizetype to avoid some problems
7405 with constant folding. (E.g. suppose the lower bound is 1,
7406 and its mode is QI. Without the conversion,l (ARRAY
7407 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7408 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7410 if (! integer_zerop (low_bound
))
7411 index1
= size_diffop (index1
, fold_convert (sizetype
,
7414 if (0 > compare_tree_int (index1
,
7415 TREE_STRING_LENGTH (init
)))
7417 tree type
= TREE_TYPE (TREE_TYPE (init
));
7418 enum machine_mode mode
= TYPE_MODE (type
);
7420 if (GET_MODE_CLASS (mode
) == MODE_INT
7421 && GET_MODE_SIZE (mode
) == 1)
7422 return gen_int_mode (TREE_STRING_POINTER (init
)
7423 [TREE_INT_CST_LOW (index1
)],
7430 goto normal_inner_ref
;
7433 /* If the operand is a CONSTRUCTOR, we can just extract the
7434 appropriate field if it is present. */
7435 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
)
7437 unsigned HOST_WIDE_INT idx
;
7440 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
7442 if (field
== TREE_OPERAND (exp
, 1)
7443 /* We can normally use the value of the field in the
7444 CONSTRUCTOR. However, if this is a bitfield in
7445 an integral mode that we can fit in a HOST_WIDE_INT,
7446 we must mask only the number of bits in the bitfield,
7447 since this is done implicitly by the constructor. If
7448 the bitfield does not meet either of those conditions,
7449 we can't do this optimization. */
7450 && (! DECL_BIT_FIELD (field
)
7451 || ((GET_MODE_CLASS (DECL_MODE (field
)) == MODE_INT
)
7452 && (GET_MODE_BITSIZE (DECL_MODE (field
))
7453 <= HOST_BITS_PER_WIDE_INT
))))
7455 if (DECL_BIT_FIELD (field
)
7456 && modifier
== EXPAND_STACK_PARM
)
7458 op0
= expand_expr (value
, target
, tmode
, modifier
);
7459 if (DECL_BIT_FIELD (field
))
7461 HOST_WIDE_INT bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
7462 enum machine_mode imode
= TYPE_MODE (TREE_TYPE (field
));
7464 if (TYPE_UNSIGNED (TREE_TYPE (field
)))
7466 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
7467 op0
= expand_and (imode
, op0
, op1
, target
);
7472 = build_int_cst (NULL_TREE
,
7473 GET_MODE_BITSIZE (imode
) - bitsize
);
7475 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
7477 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
7485 goto normal_inner_ref
;
7488 case ARRAY_RANGE_REF
:
7491 enum machine_mode mode1
;
7492 HOST_WIDE_INT bitsize
, bitpos
;
7495 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7496 &mode1
, &unsignedp
, &volatilep
, true);
7499 /* If we got back the original object, something is wrong. Perhaps
7500 we are evaluating an expression too early. In any event, don't
7501 infinitely recurse. */
7502 gcc_assert (tem
!= exp
);
7504 /* If TEM's type is a union of variable size, pass TARGET to the inner
7505 computation, since it will need a temporary and TARGET is known
7506 to have to do. This occurs in unchecked conversion in Ada. */
7510 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
7511 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
7513 && modifier
!= EXPAND_STACK_PARM
7514 ? target
: NULL_RTX
),
7516 (modifier
== EXPAND_INITIALIZER
7517 || modifier
== EXPAND_CONST_ADDRESS
7518 || modifier
== EXPAND_STACK_PARM
)
7519 ? modifier
: EXPAND_NORMAL
);
7521 /* If this is a constant, put it into a register if it is a legitimate
7522 constant, OFFSET is 0, and we won't try to extract outside the
7523 register (in case we were passed a partially uninitialized object
7524 or a view_conversion to a larger size). Force the constant to
7525 memory otherwise. */
7526 if (CONSTANT_P (op0
))
7528 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
7529 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
7531 && bitpos
+ bitsize
<= GET_MODE_BITSIZE (mode
))
7532 op0
= force_reg (mode
, op0
);
7534 op0
= validize_mem (force_const_mem (mode
, op0
));
7537 /* Otherwise, if this object not in memory and we either have an
7538 offset, a BLKmode result, or a reference outside the object, put it
7539 there. Such cases can occur in Ada if we have unchecked conversion
7540 of an expression from a scalar type to an array or record type or
7541 for an ARRAY_RANGE_REF whose type is BLKmode. */
7542 else if (!MEM_P (op0
)
7544 || (bitpos
+ bitsize
> GET_MODE_BITSIZE (GET_MODE (op0
)))
7545 || (code
== ARRAY_RANGE_REF
&& mode
== BLKmode
)))
7547 tree nt
= build_qualified_type (TREE_TYPE (tem
),
7548 (TYPE_QUALS (TREE_TYPE (tem
))
7549 | TYPE_QUAL_CONST
));
7550 rtx memloc
= assign_temp (nt
, 1, 1, 1);
7552 emit_move_insn (memloc
, op0
);
7558 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
7561 gcc_assert (MEM_P (op0
));
7563 #ifdef POINTERS_EXTEND_UNSIGNED
7564 if (GET_MODE (offset_rtx
) != Pmode
)
7565 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
7567 if (GET_MODE (offset_rtx
) != ptr_mode
)
7568 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
7571 if (GET_MODE (op0
) == BLKmode
7572 /* A constant address in OP0 can have VOIDmode, we must
7573 not try to call force_reg in that case. */
7574 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
7576 && (bitpos
% bitsize
) == 0
7577 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
7578 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
7580 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7584 op0
= offset_address (op0
, offset_rtx
,
7585 highest_pow2_factor (offset
));
7588 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7589 record its alignment as BIGGEST_ALIGNMENT. */
7590 if (MEM_P (op0
) && bitpos
== 0 && offset
!= 0
7591 && is_aligning_offset (offset
, tem
))
7592 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
7594 /* Don't forget about volatility even if this is a bitfield. */
7595 if (MEM_P (op0
) && volatilep
&& ! MEM_VOLATILE_P (op0
))
7597 if (op0
== orig_op0
)
7598 op0
= copy_rtx (op0
);
7600 MEM_VOLATILE_P (op0
) = 1;
7603 /* The following code doesn't handle CONCAT.
7604 Assume only bitpos == 0 can be used for CONCAT, due to
7605 one element arrays having the same mode as its element. */
7606 if (GET_CODE (op0
) == CONCAT
)
7608 gcc_assert (bitpos
== 0
7609 && bitsize
== GET_MODE_BITSIZE (GET_MODE (op0
)));
7613 /* In cases where an aligned union has an unaligned object
7614 as a field, we might be extracting a BLKmode value from
7615 an integer-mode (e.g., SImode) object. Handle this case
7616 by doing the extract into an object as wide as the field
7617 (which we know to be the width of a basic mode), then
7618 storing into memory, and changing the mode to BLKmode. */
7619 if (mode1
== VOIDmode
7620 || REG_P (op0
) || GET_CODE (op0
) == SUBREG
7621 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
7622 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7623 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
7624 && modifier
!= EXPAND_CONST_ADDRESS
7625 && modifier
!= EXPAND_INITIALIZER
)
7626 /* If the field isn't aligned enough to fetch as a memref,
7627 fetch it as a bit field. */
7628 || (mode1
!= BLKmode
7629 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
7630 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)
7632 && (MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
7633 || (bitpos
% GET_MODE_ALIGNMENT (mode1
) != 0))))
7634 && ((modifier
== EXPAND_CONST_ADDRESS
7635 || modifier
== EXPAND_INITIALIZER
)
7637 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
7638 || (bitpos
% BITS_PER_UNIT
!= 0)))
7639 /* If the type and the field are a constant size and the
7640 size of the type isn't the same size as the bitfield,
7641 we must use bitfield operations. */
7643 && TYPE_SIZE (TREE_TYPE (exp
))
7644 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
7645 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7648 enum machine_mode ext_mode
= mode
;
7650 if (ext_mode
== BLKmode
7651 && ! (target
!= 0 && MEM_P (op0
)
7653 && bitpos
% BITS_PER_UNIT
== 0))
7654 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7656 if (ext_mode
== BLKmode
)
7659 target
= assign_temp (type
, 0, 1, 1);
7664 /* In this case, BITPOS must start at a byte boundary and
7665 TARGET, if specified, must be a MEM. */
7666 gcc_assert (MEM_P (op0
)
7667 && (!target
|| MEM_P (target
))
7668 && !(bitpos
% BITS_PER_UNIT
));
7670 emit_block_move (target
,
7671 adjust_address (op0
, VOIDmode
,
7672 bitpos
/ BITS_PER_UNIT
),
7673 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7675 (modifier
== EXPAND_STACK_PARM
7676 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7681 op0
= validize_mem (op0
);
7683 if (MEM_P (op0
) && REG_P (XEXP (op0
, 0)))
7684 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7686 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
7687 (modifier
== EXPAND_STACK_PARM
7688 ? NULL_RTX
: target
),
7689 ext_mode
, ext_mode
);
7691 /* If the result is a record type and BITSIZE is narrower than
7692 the mode of OP0, an integral mode, and this is a big endian
7693 machine, we must put the field into the high-order bits. */
7694 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7695 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7696 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
7697 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7698 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7702 /* If the result type is BLKmode, store the data into a temporary
7703 of the appropriate type, but with the mode corresponding to the
7704 mode for the data we have (op0's mode). It's tempting to make
7705 this a constant type, since we know it's only being stored once,
7706 but that can cause problems if we are taking the address of this
7707 COMPONENT_REF because the MEM of any reference via that address
7708 will have flags corresponding to the type, which will not
7709 necessarily be constant. */
7710 if (mode
== BLKmode
)
7712 HOST_WIDE_INT size
= GET_MODE_BITSIZE (ext_mode
);
7715 /* If the reference doesn't use the alias set of its type,
7716 we cannot create the temporary using that type. */
7717 if (component_uses_parent_alias_set (exp
))
7719 new = assign_stack_local (ext_mode
, size
, 0);
7720 set_mem_alias_set (new, get_alias_set (exp
));
7723 new = assign_stack_temp_for_type (ext_mode
, size
, 0, type
);
7725 emit_move_insn (new, op0
);
7726 op0
= copy_rtx (new);
7727 PUT_MODE (op0
, BLKmode
);
7728 set_mem_attributes (op0
, exp
, 1);
7734 /* If the result is BLKmode, use that to access the object
7736 if (mode
== BLKmode
)
7739 /* Get a reference to just this component. */
7740 if (modifier
== EXPAND_CONST_ADDRESS
7741 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7742 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7744 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7746 if (op0
== orig_op0
)
7747 op0
= copy_rtx (op0
);
7749 set_mem_attributes (op0
, exp
, 0);
7750 if (REG_P (XEXP (op0
, 0)))
7751 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7753 MEM_VOLATILE_P (op0
) |= volatilep
;
7754 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7755 || modifier
== EXPAND_CONST_ADDRESS
7756 || modifier
== EXPAND_INITIALIZER
)
7758 else if (target
== 0)
7759 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7761 convert_move (target
, op0
, unsignedp
);
7766 return expand_expr (OBJ_TYPE_REF_EXPR (exp
), target
, tmode
, modifier
);
7769 /* Check for a built-in function. */
7770 if (TREE_CODE (CALL_EXPR_FN (exp
)) == ADDR_EXPR
7771 && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp
), 0))
7773 && DECL_BUILT_IN (TREE_OPERAND (CALL_EXPR_FN (exp
), 0)))
7775 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (CALL_EXPR_FN (exp
), 0))
7776 == BUILT_IN_FRONTEND
)
7777 return lang_hooks
.expand_expr (exp
, original_target
,
7781 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7784 return expand_call (exp
, target
, ignore
);
7786 case NON_LVALUE_EXPR
:
7789 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7792 if (TREE_CODE (type
) == UNION_TYPE
)
7794 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7796 /* If both input and output are BLKmode, this conversion isn't doing
7797 anything except possibly changing memory attribute. */
7798 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7800 rtx result
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7803 result
= copy_rtx (result
);
7804 set_mem_attributes (result
, exp
, 0);
7810 if (TYPE_MODE (type
) != BLKmode
)
7811 target
= gen_reg_rtx (TYPE_MODE (type
));
7813 target
= assign_temp (type
, 0, 1, 1);
7817 /* Store data into beginning of memory target. */
7818 store_expr (TREE_OPERAND (exp
, 0),
7819 adjust_address (target
, TYPE_MODE (valtype
), 0),
7820 modifier
== EXPAND_STACK_PARM
);
7824 gcc_assert (REG_P (target
));
7826 /* Store this field into a union of the proper type. */
7827 store_field (target
,
7828 MIN ((int_size_in_bytes (TREE_TYPE
7829 (TREE_OPERAND (exp
, 0)))
7831 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7832 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7836 /* Return the entire union. */
7840 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7842 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7845 /* If the signedness of the conversion differs and OP0 is
7846 a promoted SUBREG, clear that indication since we now
7847 have to do the proper extension. */
7848 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7849 && GET_CODE (op0
) == SUBREG
)
7850 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7852 return REDUCE_BIT_FIELD (op0
);
7855 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
,
7856 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
);
7857 if (GET_MODE (op0
) == mode
)
7860 /* If OP0 is a constant, just convert it into the proper mode. */
7861 else if (CONSTANT_P (op0
))
7863 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7864 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
7866 if (modifier
== EXPAND_INITIALIZER
)
7867 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
,
7868 subreg_lowpart_offset (mode
,
7871 op0
= convert_modes (mode
, inner_mode
, op0
,
7872 TYPE_UNSIGNED (inner_type
));
7875 else if (modifier
== EXPAND_INITIALIZER
)
7876 op0
= gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7878 else if (target
== 0)
7879 op0
= convert_to_mode (mode
, op0
,
7880 TYPE_UNSIGNED (TREE_TYPE
7881 (TREE_OPERAND (exp
, 0))));
7884 convert_move (target
, op0
,
7885 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7889 return REDUCE_BIT_FIELD (op0
);
7891 case VIEW_CONVERT_EXPR
:
7892 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7894 /* If the input and output modes are both the same, we are done. */
7895 if (TYPE_MODE (type
) == GET_MODE (op0
))
7897 /* If neither mode is BLKmode, and both modes are the same size
7898 then we can use gen_lowpart. */
7899 else if (TYPE_MODE (type
) != BLKmode
&& GET_MODE (op0
) != BLKmode
7900 && GET_MODE_SIZE (TYPE_MODE (type
))
7901 == GET_MODE_SIZE (GET_MODE (op0
)))
7903 if (GET_CODE (op0
) == SUBREG
)
7904 op0
= force_reg (GET_MODE (op0
), op0
);
7905 op0
= gen_lowpart (TYPE_MODE (type
), op0
);
7907 /* If both modes are integral, then we can convert from one to the
7909 else if (SCALAR_INT_MODE_P (GET_MODE (op0
))
7910 && SCALAR_INT_MODE_P (TYPE_MODE (type
)))
7911 op0
= convert_modes (TYPE_MODE (type
), GET_MODE (op0
), op0
,
7912 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7913 /* As a last resort, spill op0 to memory, and reload it in a
7915 else if (!MEM_P (op0
))
7917 /* If the operand is not a MEM, force it into memory. Since we
7918 are going to be changing the mode of the MEM, don't call
7919 force_const_mem for constants because we don't allow pool
7920 constants to change mode. */
7921 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7923 gcc_assert (!TREE_ADDRESSABLE (exp
));
7925 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
7927 = assign_stack_temp_for_type
7928 (TYPE_MODE (inner_type
),
7929 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
7931 emit_move_insn (target
, op0
);
7935 /* At this point, OP0 is in the correct mode. If the output type is such
7936 that the operand is known to be aligned, indicate that it is.
7937 Otherwise, we need only be concerned about alignment for non-BLKmode
7941 op0
= copy_rtx (op0
);
7943 if (TYPE_ALIGN_OK (type
))
7944 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
7945 else if (TYPE_MODE (type
) != BLKmode
&& STRICT_ALIGNMENT
7946 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
7948 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7949 HOST_WIDE_INT temp_size
7950 = MAX (int_size_in_bytes (inner_type
),
7951 (HOST_WIDE_INT
) GET_MODE_SIZE (TYPE_MODE (type
)));
7952 rtx
new = assign_stack_temp_for_type (TYPE_MODE (type
),
7953 temp_size
, 0, type
);
7954 rtx new_with_op0_mode
= adjust_address (new, GET_MODE (op0
), 0);
7956 gcc_assert (!TREE_ADDRESSABLE (exp
));
7958 if (GET_MODE (op0
) == BLKmode
)
7959 emit_block_move (new_with_op0_mode
, op0
,
7960 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type
))),
7961 (modifier
== EXPAND_STACK_PARM
7962 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7964 emit_move_insn (new_with_op0_mode
, op0
);
7969 op0
= adjust_address (op0
, TYPE_MODE (type
), 0);
7975 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7976 something else, make sure we add the register to the constant and
7977 then to the other thing. This case can occur during strength
7978 reduction and doing it this way will produce better code if the
7979 frame pointer or argument pointer is eliminated.
7981 fold-const.c will ensure that the constant is always in the inner
7982 PLUS_EXPR, so the only case we need to do anything about is if
7983 sp, ap, or fp is our second argument, in which case we must swap
7984 the innermost first argument and our second argument. */
7986 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7987 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7988 && TREE_CODE (TREE_OPERAND (exp
, 1)) == VAR_DECL
7989 && (DECL_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7990 || DECL_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7991 || DECL_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7993 tree t
= TREE_OPERAND (exp
, 1);
7995 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7996 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
7999 /* If the result is to be ptr_mode and we are adding an integer to
8000 something, we might be forming a constant. So try to use
8001 plus_constant. If it produces a sum and we can't accept it,
8002 use force_operand. This allows P = &ARR[const] to generate
8003 efficient code on machines where a SYMBOL_REF is not a valid
8006 If this is an EXPAND_SUM call, always return the sum. */
8007 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
8008 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
8010 if (modifier
== EXPAND_STACK_PARM
)
8012 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
8013 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
8014 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
8018 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
8020 /* Use immed_double_const to ensure that the constant is
8021 truncated according to the mode of OP1, then sign extended
8022 to a HOST_WIDE_INT. Using the constant directly can result
8023 in non-canonical RTL in a 64x32 cross compile. */
8025 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
8027 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
8028 op1
= plus_constant (op1
, INTVAL (constant_part
));
8029 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8030 op1
= force_operand (op1
, target
);
8031 return REDUCE_BIT_FIELD (op1
);
8034 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
8035 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
8036 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
8040 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
8041 (modifier
== EXPAND_INITIALIZER
8042 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
8043 if (! CONSTANT_P (op0
))
8045 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
8046 VOIDmode
, modifier
);
8047 /* Return a PLUS if modifier says it's OK. */
8048 if (modifier
== EXPAND_SUM
8049 || modifier
== EXPAND_INITIALIZER
)
8050 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
8053 /* Use immed_double_const to ensure that the constant is
8054 truncated according to the mode of OP1, then sign extended
8055 to a HOST_WIDE_INT. Using the constant directly can result
8056 in non-canonical RTL in a 64x32 cross compile. */
8058 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
8060 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8061 op0
= plus_constant (op0
, INTVAL (constant_part
));
8062 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8063 op0
= force_operand (op0
, target
);
8064 return REDUCE_BIT_FIELD (op0
);
8068 /* No sense saving up arithmetic to be done
8069 if it's all in the wrong mode to form part of an address.
8070 And force_operand won't know whether to sign-extend or
8072 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8073 || mode
!= ptr_mode
)
8075 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8076 subtarget
, &op0
, &op1
, 0);
8077 if (op0
== const0_rtx
)
8079 if (op1
== const0_rtx
)
8084 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8085 subtarget
, &op0
, &op1
, modifier
);
8086 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8089 /* For initializers, we are allowed to return a MINUS of two
8090 symbolic constants. Here we handle all cases when both operands
8092 /* Handle difference of two symbolic constants,
8093 for the sake of an initializer. */
8094 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8095 && really_constant_p (TREE_OPERAND (exp
, 0))
8096 && really_constant_p (TREE_OPERAND (exp
, 1)))
8098 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8099 NULL_RTX
, &op0
, &op1
, modifier
);
8101 /* If the last operand is a CONST_INT, use plus_constant of
8102 the negated constant. Else make the MINUS. */
8103 if (GET_CODE (op1
) == CONST_INT
)
8104 return REDUCE_BIT_FIELD (plus_constant (op0
, - INTVAL (op1
)));
8106 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode
, op0
, op1
));
8109 /* No sense saving up arithmetic to be done
8110 if it's all in the wrong mode to form part of an address.
8111 And force_operand won't know whether to sign-extend or
8113 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8114 || mode
!= ptr_mode
)
8117 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8118 subtarget
, &op0
, &op1
, modifier
);
8120 /* Convert A - const to A + (-const). */
8121 if (GET_CODE (op1
) == CONST_INT
)
8123 op1
= negate_rtx (mode
, op1
);
8124 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8130 /* If first operand is constant, swap them.
8131 Thus the following special case checks need only
8132 check the second operand. */
8133 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
8135 tree t1
= TREE_OPERAND (exp
, 0);
8136 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
8137 TREE_OPERAND (exp
, 1) = t1
;
8140 /* Attempt to return something suitable for generating an
8141 indexed address, for machines that support that. */
8143 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
8144 && host_integerp (TREE_OPERAND (exp
, 1), 0))
8146 tree exp1
= TREE_OPERAND (exp
, 1);
8148 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
8152 op0
= force_operand (op0
, NULL_RTX
);
8154 op0
= copy_to_mode_reg (mode
, op0
);
8156 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode
, op0
,
8157 gen_int_mode (tree_low_cst (exp1
, 0),
8158 TYPE_MODE (TREE_TYPE (exp1
)))));
8161 if (modifier
== EXPAND_STACK_PARM
)
8164 /* Check for multiplying things that have been extended
8165 from a narrower type. If this machine supports multiplying
8166 in that narrower type with a result in the desired type,
8167 do it that way, and avoid the explicit type-conversion. */
8169 subexp0
= TREE_OPERAND (exp
, 0);
8170 subexp1
= TREE_OPERAND (exp
, 1);
8171 /* First, check if we have a multiplication of one signed and one
8172 unsigned operand. */
8173 if (TREE_CODE (subexp0
) == NOP_EXPR
8174 && TREE_CODE (subexp1
) == NOP_EXPR
8175 && TREE_CODE (type
) == INTEGER_TYPE
8176 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0
, 0)))
8177 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
8178 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0
, 0)))
8179 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1
, 0))))
8180 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0
, 0)))
8181 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1
, 0)))))
8183 enum machine_mode innermode
8184 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0
, 0)));
8185 this_optab
= usmul_widen_optab
;
8186 if (mode
== GET_MODE_WIDER_MODE (innermode
))
8188 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
8190 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0
, 0))))
8191 expand_operands (TREE_OPERAND (subexp0
, 0),
8192 TREE_OPERAND (subexp1
, 0),
8193 NULL_RTX
, &op0
, &op1
, 0);
8195 expand_operands (TREE_OPERAND (subexp0
, 0),
8196 TREE_OPERAND (subexp1
, 0),
8197 NULL_RTX
, &op1
, &op0
, 0);
8203 /* Check for a multiplication with matching signedness. */
8204 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
8205 && TREE_CODE (type
) == INTEGER_TYPE
8206 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8207 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
8208 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
8209 && int_fits_type_p (TREE_OPERAND (exp
, 1),
8210 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8211 /* Don't use a widening multiply if a shift will do. */
8212 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
8213 > HOST_BITS_PER_WIDE_INT
)
8214 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
8216 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8217 && (TYPE_PRECISION (TREE_TYPE
8218 (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
8219 == TYPE_PRECISION (TREE_TYPE
8221 (TREE_OPERAND (exp
, 0), 0))))
8222 /* If both operands are extended, they must either both
8223 be zero-extended or both be sign-extended. */
8224 && (TYPE_UNSIGNED (TREE_TYPE
8225 (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
8226 == TYPE_UNSIGNED (TREE_TYPE
8228 (TREE_OPERAND (exp
, 0), 0)))))))
8230 tree op0type
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0));
8231 enum machine_mode innermode
= TYPE_MODE (op0type
);
8232 bool zextend_p
= TYPE_UNSIGNED (op0type
);
8233 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
8234 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
8236 if (mode
== GET_MODE_2XWIDER_MODE (innermode
))
8238 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
8240 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
8241 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8242 TREE_OPERAND (exp
, 1),
8243 NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8245 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8246 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
8247 NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8250 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
8251 && innermode
== word_mode
)
8254 op0
= expand_normal (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0));
8255 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
8256 op1
= convert_modes (innermode
, mode
,
8257 expand_normal (TREE_OPERAND (exp
, 1)),
8260 op1
= expand_normal (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0));
8261 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8262 unsignedp
, OPTAB_LIB_WIDEN
);
8263 hipart
= gen_highpart (innermode
, temp
);
8264 htem
= expand_mult_highpart_adjust (innermode
, hipart
,
8268 emit_move_insn (hipart
, htem
);
8269 return REDUCE_BIT_FIELD (temp
);
8273 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8274 subtarget
, &op0
, &op1
, 0);
8275 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8277 case TRUNC_DIV_EXPR
:
8278 case FLOOR_DIV_EXPR
:
8280 case ROUND_DIV_EXPR
:
8281 case EXACT_DIV_EXPR
:
8282 if (modifier
== EXPAND_STACK_PARM
)
8284 /* Possible optimization: compute the dividend with EXPAND_SUM
8285 then if the divisor is constant can optimize the case
8286 where some terms of the dividend have coeffs divisible by it. */
8287 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8288 subtarget
, &op0
, &op1
, 0);
8289 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
8294 case TRUNC_MOD_EXPR
:
8295 case FLOOR_MOD_EXPR
:
8297 case ROUND_MOD_EXPR
:
8298 if (modifier
== EXPAND_STACK_PARM
)
8300 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8301 subtarget
, &op0
, &op1
, 0);
8302 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
8304 case FIX_TRUNC_EXPR
:
8305 op0
= expand_normal (TREE_OPERAND (exp
, 0));
8306 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8307 target
= gen_reg_rtx (mode
);
8308 expand_fix (target
, op0
, unsignedp
);
8312 op0
= expand_normal (TREE_OPERAND (exp
, 0));
8313 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8314 target
= gen_reg_rtx (mode
);
8315 /* expand_float can't figure out what to do if FROM has VOIDmode.
8316 So give it the correct mode. With -O, cse will optimize this. */
8317 if (GET_MODE (op0
) == VOIDmode
)
8318 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8320 expand_float (target
, op0
,
8321 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8325 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8326 if (modifier
== EXPAND_STACK_PARM
)
8328 temp
= expand_unop (mode
,
8329 optab_for_tree_code (NEGATE_EXPR
, type
),
8332 return REDUCE_BIT_FIELD (temp
);
8335 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8336 if (modifier
== EXPAND_STACK_PARM
)
8339 /* ABS_EXPR is not valid for complex arguments. */
8340 gcc_assert (GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
8341 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
);
8343 /* Unsigned abs is simply the operand. Testing here means we don't
8344 risk generating incorrect code below. */
8345 if (TYPE_UNSIGNED (type
))
8348 return expand_abs (mode
, op0
, target
, unsignedp
,
8349 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
8353 target
= original_target
;
8355 || modifier
== EXPAND_STACK_PARM
8356 || (MEM_P (target
) && MEM_VOLATILE_P (target
))
8357 || GET_MODE (target
) != mode
8359 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8360 target
= gen_reg_rtx (mode
);
8361 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8362 target
, &op0
, &op1
, 0);
8364 /* First try to do it with a special MIN or MAX instruction.
8365 If that does not win, use a conditional jump to select the proper
8367 this_optab
= optab_for_tree_code (code
, type
);
8368 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8373 /* At this point, a MEM target is no longer useful; we will get better
8376 if (! REG_P (target
))
8377 target
= gen_reg_rtx (mode
);
8379 /* If op1 was placed in target, swap op0 and op1. */
8380 if (target
!= op0
&& target
== op1
)
8387 /* We generate better code and avoid problems with op1 mentioning
8388 target by forcing op1 into a pseudo if it isn't a constant. */
8389 if (! CONSTANT_P (op1
))
8390 op1
= force_reg (mode
, op1
);
8393 enum rtx_code comparison_code
;
8396 if (code
== MAX_EXPR
)
8397 comparison_code
= unsignedp
? GEU
: GE
;
8399 comparison_code
= unsignedp
? LEU
: LE
;
8401 /* Canonicalize to comparisons against 0. */
8402 if (op1
== const1_rtx
)
8404 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8405 or (a != 0 ? a : 1) for unsigned.
8406 For MIN we are safe converting (a <= 1 ? a : 1)
8407 into (a <= 0 ? a : 1) */
8408 cmpop1
= const0_rtx
;
8409 if (code
== MAX_EXPR
)
8410 comparison_code
= unsignedp
? NE
: GT
;
8412 if (op1
== constm1_rtx
&& !unsignedp
)
8414 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8415 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8416 cmpop1
= const0_rtx
;
8417 if (code
== MIN_EXPR
)
8418 comparison_code
= LT
;
8420 #ifdef HAVE_conditional_move
8421 /* Use a conditional move if possible. */
8422 if (can_conditionally_move_p (mode
))
8426 /* ??? Same problem as in expmed.c: emit_conditional_move
8427 forces a stack adjustment via compare_from_rtx, and we
8428 lose the stack adjustment if the sequence we are about
8429 to create is discarded. */
8430 do_pending_stack_adjust ();
8434 /* Try to emit the conditional move. */
8435 insn
= emit_conditional_move (target
, comparison_code
,
8440 /* If we could do the conditional move, emit the sequence,
8444 rtx seq
= get_insns ();
8450 /* Otherwise discard the sequence and fall back to code with
8456 emit_move_insn (target
, op0
);
8458 temp
= gen_label_rtx ();
8459 do_compare_rtx_and_jump (target
, cmpop1
, comparison_code
,
8460 unsignedp
, mode
, NULL_RTX
, NULL_RTX
, temp
);
8462 emit_move_insn (target
, op1
);
8467 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8468 if (modifier
== EXPAND_STACK_PARM
)
8470 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8474 /* ??? Can optimize bitwise operations with one arg constant.
8475 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8476 and (a bitwise1 b) bitwise2 b (etc)
8477 but that is probably not worth while. */
8479 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8480 boolean values when we want in all cases to compute both of them. In
8481 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8482 as actual zero-or-1 values and then bitwise anding. In cases where
8483 there cannot be any side effects, better code would be made by
8484 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8485 how to recognize those cases. */
8487 case TRUTH_AND_EXPR
:
8488 code
= BIT_AND_EXPR
;
8493 code
= BIT_IOR_EXPR
;
8497 case TRUTH_XOR_EXPR
:
8498 code
= BIT_XOR_EXPR
;
8506 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8508 if (modifier
== EXPAND_STACK_PARM
)
8510 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8511 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
8514 /* Could determine the answer when only additive constants differ. Also,
8515 the addition of one can be handled by changing the condition. */
8522 case UNORDERED_EXPR
:
8530 temp
= do_store_flag (exp
,
8531 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
8532 tmode
!= VOIDmode
? tmode
: mode
, 0);
8536 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8537 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
8539 && REG_P (original_target
)
8540 && (GET_MODE (original_target
)
8541 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
8543 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
8546 /* If temp is constant, we can just compute the result. */
8547 if (GET_CODE (temp
) == CONST_INT
)
8549 if (INTVAL (temp
) != 0)
8550 emit_move_insn (target
, const1_rtx
);
8552 emit_move_insn (target
, const0_rtx
);
8557 if (temp
!= original_target
)
8559 enum machine_mode mode1
= GET_MODE (temp
);
8560 if (mode1
== VOIDmode
)
8561 mode1
= tmode
!= VOIDmode
? tmode
: mode
;
8563 temp
= copy_to_mode_reg (mode1
, temp
);
8566 op1
= gen_label_rtx ();
8567 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
8568 GET_MODE (temp
), unsignedp
, op1
);
8569 emit_move_insn (temp
, const1_rtx
);
8574 /* If no set-flag instruction, must generate a conditional store
8575 into a temporary variable. Drop through and handle this
8580 || modifier
== EXPAND_STACK_PARM
8581 || ! safe_from_p (target
, exp
, 1)
8582 /* Make sure we don't have a hard reg (such as function's return
8583 value) live across basic blocks, if not optimizing. */
8584 || (!optimize
&& REG_P (target
)
8585 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8586 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8589 emit_move_insn (target
, const0_rtx
);
8591 op1
= gen_label_rtx ();
8592 jumpifnot (exp
, op1
);
8595 emit_move_insn (target
, const1_rtx
);
8598 return ignore
? const0_rtx
: target
;
8600 case TRUTH_NOT_EXPR
:
8601 if (modifier
== EXPAND_STACK_PARM
)
8603 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8604 /* The parser is careful to generate TRUTH_NOT_EXPR
8605 only with operands that are always zero or one. */
8606 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8607 target
, 1, OPTAB_LIB_WIDEN
);
8611 case STATEMENT_LIST
:
8613 tree_stmt_iterator iter
;
8615 gcc_assert (ignore
);
8617 for (iter
= tsi_start (exp
); !tsi_end_p (iter
); tsi_next (&iter
))
8618 expand_expr (tsi_stmt (iter
), const0_rtx
, VOIDmode
, modifier
);
8623 /* A COND_EXPR with its type being VOID_TYPE represents a
8624 conditional jump and is handled in
8625 expand_gimple_cond_expr. */
8626 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp
)));
8628 /* Note that COND_EXPRs whose type is a structure or union
8629 are required to be constructed to contain assignments of
8630 a temporary variable, so that we can evaluate them here
8631 for side effect only. If type is void, we must do likewise. */
8633 gcc_assert (!TREE_ADDRESSABLE (type
)
8635 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
8636 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
);
8638 /* If we are not to produce a result, we have no target. Otherwise,
8639 if a target was specified use it; it will not be used as an
8640 intermediate target unless it is safe. If no target, use a
8643 if (modifier
!= EXPAND_STACK_PARM
8645 && safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
8646 && GET_MODE (original_target
) == mode
8647 #ifdef HAVE_conditional_move
8648 && (! can_conditionally_move_p (mode
)
8649 || REG_P (original_target
))
8651 && !MEM_P (original_target
))
8652 temp
= original_target
;
8654 temp
= assign_temp (type
, 0, 0, 1);
8656 do_pending_stack_adjust ();
8658 op0
= gen_label_rtx ();
8659 op1
= gen_label_rtx ();
8660 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8661 store_expr (TREE_OPERAND (exp
, 1), temp
,
8662 modifier
== EXPAND_STACK_PARM
);
8664 emit_jump_insn (gen_jump (op1
));
8667 store_expr (TREE_OPERAND (exp
, 2), temp
,
8668 modifier
== EXPAND_STACK_PARM
);
8675 target
= expand_vec_cond_expr (exp
, target
);
8680 tree lhs
= TREE_OPERAND (exp
, 0);
8681 tree rhs
= TREE_OPERAND (exp
, 1);
8682 gcc_assert (ignore
);
8683 expand_assignment (lhs
, rhs
);
8687 case GIMPLE_MODIFY_STMT
:
8689 tree lhs
= GIMPLE_STMT_OPERAND (exp
, 0);
8690 tree rhs
= GIMPLE_STMT_OPERAND (exp
, 1);
8692 gcc_assert (ignore
);
8694 /* Check for |= or &= of a bitfield of size one into another bitfield
8695 of size 1. In this case, (unless we need the result of the
8696 assignment) we can do this more efficiently with a
8697 test followed by an assignment, if necessary.
8699 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8700 things change so we do, this code should be enhanced to
8702 if (TREE_CODE (lhs
) == COMPONENT_REF
8703 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
8704 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
8705 && TREE_OPERAND (rhs
, 0) == lhs
8706 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
8707 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
8708 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
8710 rtx label
= gen_label_rtx ();
8711 int value
= TREE_CODE (rhs
) == BIT_IOR_EXPR
;
8712 do_jump (TREE_OPERAND (rhs
, 1),
8715 expand_assignment (lhs
, build_int_cst (TREE_TYPE (rhs
), value
));
8716 do_pending_stack_adjust ();
8721 expand_assignment (lhs
, rhs
);
8726 if (!TREE_OPERAND (exp
, 0))
8727 expand_null_return ();
8729 expand_return (TREE_OPERAND (exp
, 0));
8733 return expand_expr_addr_expr (exp
, target
, tmode
, modifier
);
8736 /* Get the rtx code of the operands. */
8737 op0
= expand_normal (TREE_OPERAND (exp
, 0));
8738 op1
= expand_normal (TREE_OPERAND (exp
, 1));
8741 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8743 /* Move the real (op0) and imaginary (op1) parts to their location. */
8744 write_complex_part (target
, op0
, false);
8745 write_complex_part (target
, op1
, true);
8750 op0
= expand_normal (TREE_OPERAND (exp
, 0));
8751 return read_complex_part (op0
, false);
8754 op0
= expand_normal (TREE_OPERAND (exp
, 0));
8755 return read_complex_part (op0
, true);
8758 expand_resx_expr (exp
);
8761 case TRY_CATCH_EXPR
:
8763 case EH_FILTER_EXPR
:
8764 case TRY_FINALLY_EXPR
:
8765 /* Lowered by tree-eh.c. */
8768 case WITH_CLEANUP_EXPR
:
8769 case CLEANUP_POINT_EXPR
:
8771 case CASE_LABEL_EXPR
:
8777 case PREINCREMENT_EXPR
:
8778 case PREDECREMENT_EXPR
:
8779 case POSTINCREMENT_EXPR
:
8780 case POSTDECREMENT_EXPR
:
8783 case TRUTH_ANDIF_EXPR
:
8784 case TRUTH_ORIF_EXPR
:
8785 /* Lowered by gimplify.c. */
8789 return get_exception_pointer (cfun
);
8792 return get_exception_filter (cfun
);
8795 /* Function descriptors are not valid except for as
8796 initialization constants, and should not be expanded. */
8804 expand_label (TREE_OPERAND (exp
, 0));
8808 expand_asm_expr (exp
);
8811 case WITH_SIZE_EXPR
:
8812 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8813 have pulled out the size to use in whatever context it needed. */
8814 return expand_expr_real (TREE_OPERAND (exp
, 0), original_target
, tmode
,
8817 case REALIGN_LOAD_EXPR
:
8819 tree oprnd0
= TREE_OPERAND (exp
, 0);
8820 tree oprnd1
= TREE_OPERAND (exp
, 1);
8821 tree oprnd2
= TREE_OPERAND (exp
, 2);
8824 this_optab
= optab_for_tree_code (code
, type
);
8825 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8826 op2
= expand_normal (oprnd2
);
8827 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
8835 tree oprnd0
= TREE_OPERAND (exp
, 0);
8836 tree oprnd1
= TREE_OPERAND (exp
, 1);
8837 tree oprnd2
= TREE_OPERAND (exp
, 2);
8840 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8841 op2
= expand_normal (oprnd2
);
8842 target
= expand_widen_pattern_expr (exp
, op0
, op1
, op2
,
8847 case WIDEN_SUM_EXPR
:
8849 tree oprnd0
= TREE_OPERAND (exp
, 0);
8850 tree oprnd1
= TREE_OPERAND (exp
, 1);
8852 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, 0);
8853 target
= expand_widen_pattern_expr (exp
, op0
, NULL_RTX
, op1
,
8858 case REDUC_MAX_EXPR
:
8859 case REDUC_MIN_EXPR
:
8860 case REDUC_PLUS_EXPR
:
8862 op0
= expand_normal (TREE_OPERAND (exp
, 0));
8863 this_optab
= optab_for_tree_code (code
, type
);
8864 temp
= expand_unop (mode
, this_optab
, op0
, target
, unsignedp
);
8869 case VEC_EXTRACT_EVEN_EXPR
:
8870 case VEC_EXTRACT_ODD_EXPR
:
8872 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8873 NULL_RTX
, &op0
, &op1
, 0);
8874 this_optab
= optab_for_tree_code (code
, type
);
8875 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8881 case VEC_INTERLEAVE_HIGH_EXPR
:
8882 case VEC_INTERLEAVE_LOW_EXPR
:
8884 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8885 NULL_RTX
, &op0
, &op1
, 0);
8886 this_optab
= optab_for_tree_code (code
, type
);
8887 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8893 case VEC_LSHIFT_EXPR
:
8894 case VEC_RSHIFT_EXPR
:
8896 target
= expand_vec_shift_expr (exp
, target
);
8900 case VEC_UNPACK_HI_EXPR
:
8901 case VEC_UNPACK_LO_EXPR
:
8903 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8904 this_optab
= optab_for_tree_code (code
, type
);
8905 temp
= expand_widen_pattern_expr (exp
, op0
, NULL_RTX
, NULL_RTX
,
8911 case VEC_WIDEN_MULT_HI_EXPR
:
8912 case VEC_WIDEN_MULT_LO_EXPR
:
8914 tree oprnd0
= TREE_OPERAND (exp
, 0);
8915 tree oprnd1
= TREE_OPERAND (exp
, 1);
8917 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, 0);
8918 target
= expand_widen_pattern_expr (exp
, op0
, op1
, NULL_RTX
,
8920 gcc_assert (target
);
8924 case VEC_PACK_MOD_EXPR
:
8925 case VEC_PACK_SAT_EXPR
:
8927 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
8932 return lang_hooks
.expand_expr (exp
, original_target
, tmode
,
8936 /* Here to do an ordinary binary operator. */
8938 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8939 subtarget
, &op0
, &op1
, 0);
8941 this_optab
= optab_for_tree_code (code
, type
);
8943 if (modifier
== EXPAND_STACK_PARM
)
8945 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
8946 unsignedp
, OPTAB_LIB_WIDEN
);
8948 return REDUCE_BIT_FIELD (temp
);
8950 #undef REDUCE_BIT_FIELD
8952 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8953 signedness of TYPE), possibly returning the result in TARGET. */
8955 reduce_to_bit_field_precision (rtx exp
, rtx target
, tree type
)
8957 HOST_WIDE_INT prec
= TYPE_PRECISION (type
);
8958 if (target
&& GET_MODE (target
) != GET_MODE (exp
))
8960 if (TYPE_UNSIGNED (type
))
8963 if (prec
< HOST_BITS_PER_WIDE_INT
)
8964 mask
= immed_double_const (((unsigned HOST_WIDE_INT
) 1 << prec
) - 1, 0,
8967 mask
= immed_double_const ((unsigned HOST_WIDE_INT
) -1,
8968 ((unsigned HOST_WIDE_INT
) 1
8969 << (prec
- HOST_BITS_PER_WIDE_INT
)) - 1,
8971 return expand_and (GET_MODE (exp
), exp
, mask
, target
);
8975 tree count
= build_int_cst (NULL_TREE
,
8976 GET_MODE_BITSIZE (GET_MODE (exp
)) - prec
);
8977 exp
= expand_shift (LSHIFT_EXPR
, GET_MODE (exp
), exp
, count
, target
, 0);
8978 return expand_shift (RSHIFT_EXPR
, GET_MODE (exp
), exp
, count
, target
, 0);
8982 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8983 when applied to the address of EXP produces an address known to be
8984 aligned more than BIGGEST_ALIGNMENT. */
8987 is_aligning_offset (tree offset
, tree exp
)
8989 /* Strip off any conversions. */
8990 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
8991 || TREE_CODE (offset
) == NOP_EXPR
8992 || TREE_CODE (offset
) == CONVERT_EXPR
)
8993 offset
= TREE_OPERAND (offset
, 0);
8995 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8996 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8997 if (TREE_CODE (offset
) != BIT_AND_EXPR
8998 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
8999 || compare_tree_int (TREE_OPERAND (offset
, 1),
9000 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
9001 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
9004 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9005 It must be NEGATE_EXPR. Then strip any more conversions. */
9006 offset
= TREE_OPERAND (offset
, 0);
9007 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9008 || TREE_CODE (offset
) == NOP_EXPR
9009 || TREE_CODE (offset
) == CONVERT_EXPR
)
9010 offset
= TREE_OPERAND (offset
, 0);
9012 if (TREE_CODE (offset
) != NEGATE_EXPR
)
9015 offset
= TREE_OPERAND (offset
, 0);
9016 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9017 || TREE_CODE (offset
) == NOP_EXPR
9018 || TREE_CODE (offset
) == CONVERT_EXPR
)
9019 offset
= TREE_OPERAND (offset
, 0);
9021 /* This must now be the address of EXP. */
9022 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
9025 /* Return the tree node if an ARG corresponds to a string constant or zero
9026 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9027 in bytes within the string that ARG is accessing. The type of the
9028 offset will be `sizetype'. */
9031 string_constant (tree arg
, tree
*ptr_offset
)
9033 tree array
, offset
, lower_bound
;
9036 if (TREE_CODE (arg
) == ADDR_EXPR
)
9038 if (TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
9040 *ptr_offset
= size_zero_node
;
9041 return TREE_OPERAND (arg
, 0);
9043 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == VAR_DECL
)
9045 array
= TREE_OPERAND (arg
, 0);
9046 offset
= size_zero_node
;
9048 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
9050 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
9051 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
9052 if (TREE_CODE (array
) != STRING_CST
9053 && TREE_CODE (array
) != VAR_DECL
)
9056 /* Check if the array has a nonzero lower bound. */
9057 lower_bound
= array_ref_low_bound (TREE_OPERAND (arg
, 0));
9058 if (!integer_zerop (lower_bound
))
9060 /* If the offset and base aren't both constants, return 0. */
9061 if (TREE_CODE (lower_bound
) != INTEGER_CST
)
9063 if (TREE_CODE (offset
) != INTEGER_CST
)
9065 /* Adjust offset by the lower bound. */
9066 offset
= size_diffop (fold_convert (sizetype
, offset
),
9067 fold_convert (sizetype
, lower_bound
));
9073 else if (TREE_CODE (arg
) == PLUS_EXPR
)
9075 tree arg0
= TREE_OPERAND (arg
, 0);
9076 tree arg1
= TREE_OPERAND (arg
, 1);
9081 if (TREE_CODE (arg0
) == ADDR_EXPR
9082 && (TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
9083 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == VAR_DECL
))
9085 array
= TREE_OPERAND (arg0
, 0);
9088 else if (TREE_CODE (arg1
) == ADDR_EXPR
9089 && (TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
9090 || TREE_CODE (TREE_OPERAND (arg1
, 0)) == VAR_DECL
))
9092 array
= TREE_OPERAND (arg1
, 0);
9101 if (TREE_CODE (array
) == STRING_CST
)
9103 *ptr_offset
= fold_convert (sizetype
, offset
);
9106 else if (TREE_CODE (array
) == VAR_DECL
)
9110 /* Variables initialized to string literals can be handled too. */
9111 if (DECL_INITIAL (array
) == NULL_TREE
9112 || TREE_CODE (DECL_INITIAL (array
)) != STRING_CST
)
9115 /* If they are read-only, non-volatile and bind locally. */
9116 if (! TREE_READONLY (array
)
9117 || TREE_SIDE_EFFECTS (array
)
9118 || ! targetm
.binds_local_p (array
))
9121 /* Avoid const char foo[4] = "abcde"; */
9122 if (DECL_SIZE_UNIT (array
) == NULL_TREE
9123 || TREE_CODE (DECL_SIZE_UNIT (array
)) != INTEGER_CST
9124 || (length
= TREE_STRING_LENGTH (DECL_INITIAL (array
))) <= 0
9125 || compare_tree_int (DECL_SIZE_UNIT (array
), length
) < 0)
9128 /* If variable is bigger than the string literal, OFFSET must be constant
9129 and inside of the bounds of the string literal. */
9130 offset
= fold_convert (sizetype
, offset
);
9131 if (compare_tree_int (DECL_SIZE_UNIT (array
), length
) > 0
9132 && (! host_integerp (offset
, 1)
9133 || compare_tree_int (offset
, length
) >= 0))
9136 *ptr_offset
= offset
;
9137 return DECL_INITIAL (array
);
9143 /* Generate code to calculate EXP using a store-flag instruction
9144 and return an rtx for the result. EXP is either a comparison
9145 or a TRUTH_NOT_EXPR whose operand is a comparison.
9147 If TARGET is nonzero, store the result there if convenient.
9149 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9152 Return zero if there is no suitable set-flag instruction
9153 available on this machine.
9155 Once expand_expr has been called on the arguments of the comparison,
9156 we are committed to doing the store flag, since it is not safe to
9157 re-evaluate the expression. We emit the store-flag insn by calling
9158 emit_store_flag, but only expand the arguments if we have a reason
9159 to believe that emit_store_flag will be successful. If we think that
9160 it will, but it isn't, we have to simulate the store-flag with a
9161 set/jump/set sequence. */
9164 do_store_flag (tree exp
, rtx target
, enum machine_mode mode
, int only_cheap
)
9167 tree arg0
, arg1
, type
;
9169 enum machine_mode operand_mode
;
9173 enum insn_code icode
;
9174 rtx subtarget
= target
;
9177 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9178 result at the end. We can't simply invert the test since it would
9179 have already been inverted if it were valid. This case occurs for
9180 some floating-point comparisons. */
9182 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
9183 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
9185 arg0
= TREE_OPERAND (exp
, 0);
9186 arg1
= TREE_OPERAND (exp
, 1);
9188 /* Don't crash if the comparison was erroneous. */
9189 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
9192 type
= TREE_TYPE (arg0
);
9193 operand_mode
= TYPE_MODE (type
);
9194 unsignedp
= TYPE_UNSIGNED (type
);
9196 /* We won't bother with BLKmode store-flag operations because it would mean
9197 passing a lot of information to emit_store_flag. */
9198 if (operand_mode
== BLKmode
)
9201 /* We won't bother with store-flag operations involving function pointers
9202 when function pointers must be canonicalized before comparisons. */
9203 #ifdef HAVE_canonicalize_funcptr_for_compare
9204 if (HAVE_canonicalize_funcptr_for_compare
9205 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
9206 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
9208 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
9209 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
9210 == FUNCTION_TYPE
))))
9217 /* Get the rtx comparison code to use. We know that EXP is a comparison
9218 operation of some type. Some comparisons against 1 and -1 can be
9219 converted to comparisons with zero. Do so here so that the tests
9220 below will be aware that we have a comparison with zero. These
9221 tests will not catch constants in the first operand, but constants
9222 are rarely passed as the first operand. */
9224 switch (TREE_CODE (exp
))
9233 if (integer_onep (arg1
))
9234 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
9236 code
= unsignedp
? LTU
: LT
;
9239 if (! unsignedp
&& integer_all_onesp (arg1
))
9240 arg1
= integer_zero_node
, code
= LT
;
9242 code
= unsignedp
? LEU
: LE
;
9245 if (! unsignedp
&& integer_all_onesp (arg1
))
9246 arg1
= integer_zero_node
, code
= GE
;
9248 code
= unsignedp
? GTU
: GT
;
9251 if (integer_onep (arg1
))
9252 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
9254 code
= unsignedp
? GEU
: GE
;
9257 case UNORDERED_EXPR
:
9286 /* Put a constant second. */
9287 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
9289 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
9290 code
= swap_condition (code
);
9293 /* If this is an equality or inequality test of a single bit, we can
9294 do this by shifting the bit being tested to the low-order bit and
9295 masking the result with the constant 1. If the condition was EQ,
9296 we xor it with 1. This does not require an scc insn and is faster
9297 than an scc insn even if we have it.
9299 The code to make this transformation was moved into fold_single_bit_test,
9300 so we just call into the folder and expand its result. */
9302 if ((code
== NE
|| code
== EQ
)
9303 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
9304 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
9306 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
9307 return expand_expr (fold_single_bit_test (code
== NE
? NE_EXPR
: EQ_EXPR
,
9309 target
, VOIDmode
, EXPAND_NORMAL
);
9312 /* Now see if we are likely to be able to do this. Return if not. */
9313 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
9316 icode
= setcc_gen_code
[(int) code
];
9318 if (icode
== CODE_FOR_nothing
)
9320 enum machine_mode wmode
;
9322 for (wmode
= operand_mode
;
9323 icode
== CODE_FOR_nothing
&& wmode
!= VOIDmode
;
9324 wmode
= GET_MODE_WIDER_MODE (wmode
))
9325 icode
= cstore_optab
->handlers
[(int) wmode
].insn_code
;
9328 if (icode
== CODE_FOR_nothing
9329 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
9331 /* We can only do this if it is one of the special cases that
9332 can be handled without an scc insn. */
9333 if ((code
== LT
&& integer_zerop (arg1
))
9334 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
9336 else if (! only_cheap
&& (code
== NE
|| code
== EQ
)
9337 && TREE_CODE (type
) != REAL_TYPE
9338 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
9339 != CODE_FOR_nothing
)
9340 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
9341 != CODE_FOR_nothing
)))
9347 if (! get_subtarget (target
)
9348 || GET_MODE (subtarget
) != operand_mode
)
9351 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, 0);
9354 target
= gen_reg_rtx (mode
);
9356 result
= emit_store_flag (target
, code
, op0
, op1
,
9357 operand_mode
, unsignedp
, 1);
9362 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
9363 result
, 0, OPTAB_LIB_WIDEN
);
9367 /* If this failed, we have to do this with set/compare/jump/set code. */
9369 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
9370 target
= gen_reg_rtx (GET_MODE (target
));
9372 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
9373 label
= gen_label_rtx ();
9374 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, operand_mode
, NULL_RTX
,
9377 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
9384 /* Stubs in case we haven't got a casesi insn. */
9386 # define HAVE_casesi 0
9387 # define gen_casesi(a, b, c, d, e) (0)
9388 # define CODE_FOR_casesi CODE_FOR_nothing
9391 /* If the machine does not have a case insn that compares the bounds,
9392 this means extra overhead for dispatch tables, which raises the
9393 threshold for using them. */
9394 #ifndef CASE_VALUES_THRESHOLD
9395 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9396 #endif /* CASE_VALUES_THRESHOLD */
9399 case_values_threshold (void)
9401 return CASE_VALUES_THRESHOLD
;
9404 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9405 0 otherwise (i.e. if there is no casesi instruction). */
9407 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
9408 rtx table_label ATTRIBUTE_UNUSED
, rtx default_label
)
9410 enum machine_mode index_mode
= SImode
;
9411 int index_bits
= GET_MODE_BITSIZE (index_mode
);
9412 rtx op1
, op2
, index
;
9413 enum machine_mode op_mode
;
9418 /* Convert the index to SImode. */
9419 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
9421 enum machine_mode omode
= TYPE_MODE (index_type
);
9422 rtx rangertx
= expand_normal (range
);
9424 /* We must handle the endpoints in the original mode. */
9425 index_expr
= build2 (MINUS_EXPR
, index_type
,
9426 index_expr
, minval
);
9427 minval
= integer_zero_node
;
9428 index
= expand_normal (index_expr
);
9429 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
9430 omode
, 1, default_label
);
9431 /* Now we can safely truncate. */
9432 index
= convert_to_mode (index_mode
, index
, 0);
9436 if (TYPE_MODE (index_type
) != index_mode
)
9438 index_type
= lang_hooks
.types
.type_for_size (index_bits
, 0);
9439 index_expr
= fold_convert (index_type
, index_expr
);
9442 index
= expand_normal (index_expr
);
9445 do_pending_stack_adjust ();
9447 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
9448 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
9450 index
= copy_to_mode_reg (op_mode
, index
);
9452 op1
= expand_normal (minval
);
9454 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
9455 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
9456 op1
, TYPE_UNSIGNED (TREE_TYPE (minval
)));
9457 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
9459 op1
= copy_to_mode_reg (op_mode
, op1
);
9461 op2
= expand_normal (range
);
9463 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
9464 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
9465 op2
, TYPE_UNSIGNED (TREE_TYPE (range
)));
9466 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
9468 op2
= copy_to_mode_reg (op_mode
, op2
);
9470 emit_jump_insn (gen_casesi (index
, op1
, op2
,
9471 table_label
, default_label
));
9475 /* Attempt to generate a tablejump instruction; same concept. */
9476 #ifndef HAVE_tablejump
9477 #define HAVE_tablejump 0
9478 #define gen_tablejump(x, y) (0)
9481 /* Subroutine of the next function.
9483 INDEX is the value being switched on, with the lowest value
9484 in the table already subtracted.
9485 MODE is its expected mode (needed if INDEX is constant).
9486 RANGE is the length of the jump table.
9487 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9489 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9490 index value is out of range. */
9493 do_tablejump (rtx index
, enum machine_mode mode
, rtx range
, rtx table_label
,
9498 if (INTVAL (range
) > cfun
->max_jumptable_ents
)
9499 cfun
->max_jumptable_ents
= INTVAL (range
);
9501 /* Do an unsigned comparison (in the proper mode) between the index
9502 expression and the value which represents the length of the range.
9503 Since we just finished subtracting the lower bound of the range
9504 from the index expression, this comparison allows us to simultaneously
9505 check that the original index expression value is both greater than
9506 or equal to the minimum value of the range and less than or equal to
9507 the maximum value of the range. */
9509 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
9512 /* If index is in range, it must fit in Pmode.
9513 Convert to Pmode so we can index with it. */
9515 index
= convert_to_mode (Pmode
, index
, 1);
9517 /* Don't let a MEM slip through, because then INDEX that comes
9518 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9519 and break_out_memory_refs will go to work on it and mess it up. */
9520 #ifdef PIC_CASE_VECTOR_ADDRESS
9521 if (flag_pic
&& !REG_P (index
))
9522 index
= copy_to_mode_reg (Pmode
, index
);
9525 /* If flag_force_addr were to affect this address
9526 it could interfere with the tricky assumptions made
9527 about addresses that contain label-refs,
9528 which may be valid only very near the tablejump itself. */
9529 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9530 GET_MODE_SIZE, because this indicates how large insns are. The other
9531 uses should all be Pmode, because they are addresses. This code
9532 could fail if addresses and insns are not the same size. */
9533 index
= gen_rtx_PLUS (Pmode
,
9534 gen_rtx_MULT (Pmode
, index
,
9535 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
9536 gen_rtx_LABEL_REF (Pmode
, table_label
));
9537 #ifdef PIC_CASE_VECTOR_ADDRESS
9539 index
= PIC_CASE_VECTOR_ADDRESS (index
);
9542 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
9543 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
9544 vector
= gen_const_mem (CASE_VECTOR_MODE
, index
);
9545 convert_move (temp
, vector
, 0);
9547 emit_jump_insn (gen_tablejump (temp
, table_label
));
9549 /* If we are generating PIC code or if the table is PC-relative, the
9550 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9551 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
9556 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
9557 rtx table_label
, rtx default_label
)
9561 if (! HAVE_tablejump
)
9564 index_expr
= fold_build2 (MINUS_EXPR
, index_type
,
9565 fold_convert (index_type
, index_expr
),
9566 fold_convert (index_type
, minval
));
9567 index
= expand_normal (index_expr
);
9568 do_pending_stack_adjust ();
9570 do_tablejump (index
, TYPE_MODE (index_type
),
9571 convert_modes (TYPE_MODE (index_type
),
9572 TYPE_MODE (TREE_TYPE (range
)),
9573 expand_normal (range
),
9574 TYPE_UNSIGNED (TREE_TYPE (range
))),
9575 table_label
, default_label
);
9579 /* Nonzero if the mode is a valid vector mode for this architecture.
9580 This returns nonzero even if there is no hardware support for the
9581 vector mode, but we can emulate with narrower modes. */
9584 vector_mode_valid_p (enum machine_mode mode
)
9586 enum mode_class
class = GET_MODE_CLASS (mode
);
9587 enum machine_mode innermode
;
9589 /* Doh! What's going on? */
9590 if (class != MODE_VECTOR_INT
9591 && class != MODE_VECTOR_FLOAT
)
9594 /* Hardware support. Woo hoo! */
9595 if (targetm
.vector_mode_supported_p (mode
))
9598 innermode
= GET_MODE_INNER (mode
);
9600 /* We should probably return 1 if requesting V4DI and we have no DI,
9601 but we have V2DI, but this is probably very unlikely. */
9603 /* If we have support for the inner mode, we can safely emulate it.
9604 We may not have V2DI, but me can emulate with a pair of DIs. */
9605 return targetm
.scalar_mode_supported_p (innermode
);
9608 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9610 const_vector_from_tree (tree exp
)
9615 enum machine_mode inner
, mode
;
9617 mode
= TYPE_MODE (TREE_TYPE (exp
));
9619 if (initializer_zerop (exp
))
9620 return CONST0_RTX (mode
);
9622 units
= GET_MODE_NUNITS (mode
);
9623 inner
= GET_MODE_INNER (mode
);
9625 v
= rtvec_alloc (units
);
9627 link
= TREE_VECTOR_CST_ELTS (exp
);
9628 for (i
= 0; link
; link
= TREE_CHAIN (link
), ++i
)
9630 elt
= TREE_VALUE (link
);
9632 if (TREE_CODE (elt
) == REAL_CST
)
9633 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
9636 RTVEC_ELT (v
, i
) = immed_double_const (TREE_INT_CST_LOW (elt
),
9637 TREE_INT_CST_HIGH (elt
),
9641 /* Initialize remaining elements to 0. */
9642 for (; i
< units
; ++i
)
9643 RTVEC_ELT (v
, i
) = CONST0_RTX (inner
);
9645 return gen_rtx_CONST_VECTOR (mode
, v
);
9647 #include "gt-expr.h"