1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
76 #define STACK_PUSH_CODE PRE_INC
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
89 /* This structure is used by move_by_pieces to describe the move to
100 int explicit_inc_from
;
101 unsigned HOST_WIDE_INT len
;
102 HOST_WIDE_INT offset
;
106 /* This structure is used by store_by_pieces to describe the clear to
109 struct store_by_pieces
115 unsigned HOST_WIDE_INT len
;
116 HOST_WIDE_INT offset
;
117 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
);
122 static unsigned HOST_WIDE_INT
move_by_pieces_ninsns (unsigned HOST_WIDE_INT
,
125 static void move_by_pieces_1 (rtx (*) (rtx
, ...), enum machine_mode
,
126 struct move_by_pieces
*);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx
, rtx
, rtx
, unsigned);
129 static rtx
emit_block_move_via_libcall (rtx
, rtx
, rtx
);
130 static tree
emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
132 static rtx
clear_by_pieces_1 (void *, HOST_WIDE_INT
, enum machine_mode
);
133 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces
*, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx
, ...), enum machine_mode
,
136 struct store_by_pieces
*);
137 static bool clear_storage_via_clrmem (rtx
, rtx
, unsigned);
138 static rtx
clear_storage_via_libcall (rtx
, rtx
);
139 static tree
clear_storage_libcall_fn (int);
140 static rtx
compress_float_constant (rtx
, rtx
);
141 static rtx
get_subtarget (rtx
);
142 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
143 HOST_WIDE_INT
, enum machine_mode
,
144 tree
, tree
, int, int);
145 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
);
146 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
, enum machine_mode
,
149 static unsigned HOST_WIDE_INT
highest_pow2_factor (tree
);
150 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (tree
, tree
);
152 static int is_aligning_offset (tree
, tree
);
153 static void expand_operands (tree
, tree
, rtx
, rtx
*, rtx
*,
154 enum expand_modifier
);
155 static rtx
reduce_to_bit_field_precision (rtx
, rtx
, tree
);
156 static rtx
do_store_flag (tree
, rtx
, enum machine_mode
, int);
158 static void emit_single_push_insn (enum machine_mode
, rtx
, tree
);
160 static void do_tablejump (rtx
, enum machine_mode
, rtx
, rtx
, rtx
);
161 static rtx
const_vector_from_tree (tree
);
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load
[NUM_MACHINE_MODES
];
168 static char direct_store
[NUM_MACHINE_MODES
];
170 /* Record for each mode whether we can float-extend from memory. */
172 static bool float_extend_from_mem
[NUM_MACHINE_MODES
][NUM_MACHINE_MODES
];
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO)
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO)
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193 #ifndef STORE_BY_PIECES_P
194 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) MOVE_RATIO)
199 /* This array records the insn_code of insns to perform block moves. */
200 enum insn_code movmem_optab
[NUM_MACHINE_MODES
];
202 /* This array records the insn_code of insns to perform block clears. */
203 enum insn_code clrmem_optab
[NUM_MACHINE_MODES
];
205 /* These arrays record the insn_code of two different kinds of insns
206 to perform block compares. */
207 enum insn_code cmpstr_optab
[NUM_MACHINE_MODES
];
208 enum insn_code cmpmem_optab
[NUM_MACHINE_MODES
];
210 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
212 #ifndef SLOW_UNALIGNED_ACCESS
213 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
216 /* This is run once per compilation to set up which modes can be used
217 directly in memory and to initialize the block move optab. */
220 init_expr_once (void)
223 enum machine_mode mode
;
228 /* Try indexing by frame ptr and try by stack ptr.
229 It is known that on the Convex the stack ptr isn't a valid index.
230 With luck, one or the other is valid on any machine. */
231 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
232 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
234 /* A scratch register we can modify in-place below to avoid
235 useless RTL allocations. */
236 reg
= gen_rtx_REG (VOIDmode
, -1);
238 insn
= rtx_alloc (INSN
);
239 pat
= gen_rtx_SET (0, NULL_RTX
, NULL_RTX
);
240 PATTERN (insn
) = pat
;
242 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
243 mode
= (enum machine_mode
) ((int) mode
+ 1))
247 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
248 PUT_MODE (mem
, mode
);
249 PUT_MODE (mem1
, mode
);
250 PUT_MODE (reg
, mode
);
252 /* See if there is some register that can be used in this mode and
253 directly loaded or stored from memory. */
255 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
256 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
257 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
260 if (! HARD_REGNO_MODE_OK (regno
, mode
))
266 SET_DEST (pat
) = reg
;
267 if (recog (pat
, insn
, &num_clobbers
) >= 0)
268 direct_load
[(int) mode
] = 1;
270 SET_SRC (pat
) = mem1
;
271 SET_DEST (pat
) = reg
;
272 if (recog (pat
, insn
, &num_clobbers
) >= 0)
273 direct_load
[(int) mode
] = 1;
276 SET_DEST (pat
) = mem
;
277 if (recog (pat
, insn
, &num_clobbers
) >= 0)
278 direct_store
[(int) mode
] = 1;
281 SET_DEST (pat
) = mem1
;
282 if (recog (pat
, insn
, &num_clobbers
) >= 0)
283 direct_store
[(int) mode
] = 1;
287 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
289 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
290 mode
= GET_MODE_WIDER_MODE (mode
))
292 enum machine_mode srcmode
;
293 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
294 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
298 ic
= can_extend_p (mode
, srcmode
, 0);
299 if (ic
== CODE_FOR_nothing
)
302 PUT_MODE (mem
, srcmode
);
304 if ((*insn_data
[ic
].operand
[1].predicate
) (mem
, srcmode
))
305 float_extend_from_mem
[mode
][srcmode
] = true;
310 /* This is run at the start of compiling a function. */
315 cfun
->expr
= ggc_alloc_cleared (sizeof (struct expr_status
));
318 /* Copy data from FROM to TO, where the machine modes are not the same.
319 Both modes may be integer, or both may be floating.
320 UNSIGNEDP should be nonzero if FROM is an unsigned type.
321 This causes zero-extension instead of sign-extension. */
324 convert_move (rtx to
, rtx from
, int unsignedp
)
326 enum machine_mode to_mode
= GET_MODE (to
);
327 enum machine_mode from_mode
= GET_MODE (from
);
328 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
329 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
333 /* rtx code for making an equivalent value. */
334 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
335 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
338 gcc_assert (to_real
== from_real
);
340 /* If the source and destination are already the same, then there's
345 /* If FROM is a SUBREG that indicates that we have already done at least
346 the required extension, strip it. We don't handle such SUBREGs as
349 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
350 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
351 >= GET_MODE_SIZE (to_mode
))
352 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
353 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
355 gcc_assert (GET_CODE (to
) != SUBREG
|| !SUBREG_PROMOTED_VAR_P (to
));
357 if (to_mode
== from_mode
358 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
360 emit_move_insn (to
, from
);
364 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
366 gcc_assert (GET_MODE_BITSIZE (from_mode
) == GET_MODE_BITSIZE (to_mode
));
368 if (VECTOR_MODE_P (to_mode
))
369 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
371 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
373 emit_move_insn (to
, from
);
377 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
379 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
380 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
389 gcc_assert (GET_MODE_PRECISION (from_mode
)
390 != GET_MODE_PRECISION (to_mode
));
392 if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
397 /* Try converting directly if the insn is supported. */
399 code
= tab
->handlers
[to_mode
][from_mode
].insn_code
;
400 if (code
!= CODE_FOR_nothing
)
402 emit_unop_insn (code
, to
, from
,
403 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
407 /* Otherwise use a libcall. */
408 libcall
= tab
->handlers
[to_mode
][from_mode
].libfunc
;
410 /* Is this conversion implemented yet? */
411 gcc_assert (libcall
);
414 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
416 insns
= get_insns ();
418 emit_libcall_block (insns
, to
, value
,
419 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
421 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
425 /* Handle pointer conversion. */ /* SPEE 900220. */
426 /* Targets are expected to provide conversion insns between PxImode and
427 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
428 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
430 enum machine_mode full_mode
431 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode
), MODE_INT
);
433 gcc_assert (trunc_optab
->handlers
[to_mode
][full_mode
].insn_code
434 != CODE_FOR_nothing
);
436 if (full_mode
!= from_mode
)
437 from
= convert_to_mode (full_mode
, from
, unsignedp
);
438 emit_unop_insn (trunc_optab
->handlers
[to_mode
][full_mode
].insn_code
,
442 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
444 enum machine_mode full_mode
445 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode
), MODE_INT
);
447 gcc_assert (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
448 != CODE_FOR_nothing
);
450 emit_unop_insn (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
,
452 if (to_mode
== full_mode
)
455 /* else proceed to integer conversions below. */
456 from_mode
= full_mode
;
459 /* Now both modes are integers. */
461 /* Handle expanding beyond a word. */
462 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
463 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
470 enum machine_mode lowpart_mode
;
471 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
473 /* Try converting directly if the insn is supported. */
474 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
477 /* If FROM is a SUBREG, put it into a register. Do this
478 so that we always generate the same set of insns for
479 better cse'ing; if an intermediate assignment occurred,
480 we won't be doing the operation directly on the SUBREG. */
481 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
482 from
= force_reg (from_mode
, from
);
483 emit_unop_insn (code
, to
, from
, equiv_code
);
486 /* Next, try converting via full word. */
487 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
488 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
489 != CODE_FOR_nothing
))
493 if (reg_overlap_mentioned_p (to
, from
))
494 from
= force_reg (from_mode
, from
);
495 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
497 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
498 emit_unop_insn (code
, to
,
499 gen_lowpart (word_mode
, to
), equiv_code
);
503 /* No special multiword conversion insn; do it by hand. */
506 /* Since we will turn this into a no conflict block, we must ensure
507 that the source does not overlap the target. */
509 if (reg_overlap_mentioned_p (to
, from
))
510 from
= force_reg (from_mode
, from
);
512 /* Get a copy of FROM widened to a word, if necessary. */
513 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
514 lowpart_mode
= word_mode
;
516 lowpart_mode
= from_mode
;
518 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
520 lowpart
= gen_lowpart (lowpart_mode
, to
);
521 emit_move_insn (lowpart
, lowfrom
);
523 /* Compute the value to put in each remaining word. */
525 fill_value
= const0_rtx
;
530 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
531 && STORE_FLAG_VALUE
== -1)
533 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
535 fill_value
= gen_reg_rtx (word_mode
);
536 emit_insn (gen_slt (fill_value
));
542 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
543 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
545 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
549 /* Fill the remaining words. */
550 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
552 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
553 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
555 gcc_assert (subword
);
557 if (fill_value
!= subword
)
558 emit_move_insn (subword
, fill_value
);
561 insns
= get_insns ();
564 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
565 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
569 /* Truncating multi-word to a word or less. */
570 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
571 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
574 && ! MEM_VOLATILE_P (from
)
575 && direct_load
[(int) to_mode
]
576 && ! mode_dependent_address_p (XEXP (from
, 0)))
578 || GET_CODE (from
) == SUBREG
))
579 from
= force_reg (from_mode
, from
);
580 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
584 /* Now follow all the conversions between integers
585 no more than a word long. */
587 /* For truncation, usually we can just refer to FROM in a narrower mode. */
588 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
589 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
590 GET_MODE_BITSIZE (from_mode
)))
593 && ! MEM_VOLATILE_P (from
)
594 && direct_load
[(int) to_mode
]
595 && ! mode_dependent_address_p (XEXP (from
, 0)))
597 || GET_CODE (from
) == SUBREG
))
598 from
= force_reg (from_mode
, from
);
599 if (REG_P (from
) && REGNO (from
) < FIRST_PSEUDO_REGISTER
600 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
601 from
= copy_to_reg (from
);
602 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
606 /* Handle extension. */
607 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
609 /* Convert directly if that works. */
610 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
614 from
= force_not_mem (from
);
616 emit_unop_insn (code
, to
, from
, equiv_code
);
621 enum machine_mode intermediate
;
625 /* Search for a mode to convert via. */
626 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
627 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
628 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
630 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
631 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
632 GET_MODE_BITSIZE (intermediate
))))
633 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
634 != CODE_FOR_nothing
))
636 convert_move (to
, convert_to_mode (intermediate
, from
,
637 unsignedp
), unsignedp
);
641 /* No suitable intermediate mode.
642 Generate what we need with shifts. */
643 shift_amount
= build_int_cst (NULL_TREE
,
644 GET_MODE_BITSIZE (to_mode
)
645 - GET_MODE_BITSIZE (from_mode
));
646 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
647 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
649 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
652 emit_move_insn (to
, tmp
);
657 /* Support special truncate insns for certain modes. */
658 if (trunc_optab
->handlers
[to_mode
][from_mode
].insn_code
!= CODE_FOR_nothing
)
660 emit_unop_insn (trunc_optab
->handlers
[to_mode
][from_mode
].insn_code
,
665 /* Handle truncation of volatile memrefs, and so on;
666 the things that couldn't be truncated directly,
667 and for which there was no special instruction.
669 ??? Code above formerly short-circuited this, for most integer
670 mode pairs, with a force_reg in from_mode followed by a recursive
671 call to this routine. Appears always to have been wrong. */
672 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
674 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
675 emit_move_insn (to
, temp
);
679 /* Mode combination is not recognized. */
683 /* Return an rtx for a value that would result
684 from converting X to mode MODE.
685 Both X and MODE may be floating, or both integer.
686 UNSIGNEDP is nonzero if X is an unsigned value.
687 This can be done by referring to a part of X in place
688 or by copying to a new temporary with conversion. */
691 convert_to_mode (enum machine_mode mode
, rtx x
, int unsignedp
)
693 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
696 /* Return an rtx for a value that would result
697 from converting X from mode OLDMODE to mode MODE.
698 Both modes may be floating, or both integer.
699 UNSIGNEDP is nonzero if X is an unsigned value.
701 This can be done by referring to a part of X in place
702 or by copying to a new temporary with conversion.
704 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
707 convert_modes (enum machine_mode mode
, enum machine_mode oldmode
, rtx x
, int unsignedp
)
711 /* If FROM is a SUBREG that indicates that we have already done at least
712 the required extension, strip it. */
714 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
715 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
716 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
717 x
= gen_lowpart (mode
, x
);
719 if (GET_MODE (x
) != VOIDmode
)
720 oldmode
= GET_MODE (x
);
725 /* There is one case that we must handle specially: If we are converting
726 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
727 we are to interpret the constant as unsigned, gen_lowpart will do
728 the wrong if the constant appears negative. What we want to do is
729 make the high-order word of the constant zero, not all ones. */
731 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
732 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
733 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
735 HOST_WIDE_INT val
= INTVAL (x
);
737 if (oldmode
!= VOIDmode
738 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
740 int width
= GET_MODE_BITSIZE (oldmode
);
742 /* We need to zero extend VAL. */
743 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
746 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
749 /* We can do this with a gen_lowpart if both desired and current modes
750 are integer, and this is either a constant integer, a register, or a
751 non-volatile MEM. Except for the constant case where MODE is no
752 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
754 if ((GET_CODE (x
) == CONST_INT
755 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
756 || (GET_MODE_CLASS (mode
) == MODE_INT
757 && GET_MODE_CLASS (oldmode
) == MODE_INT
758 && (GET_CODE (x
) == CONST_DOUBLE
759 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
760 && ((MEM_P (x
) && ! MEM_VOLATILE_P (x
)
761 && direct_load
[(int) mode
])
763 && (! HARD_REGISTER_P (x
)
764 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
765 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
766 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
768 /* ?? If we don't know OLDMODE, we have to assume here that
769 X does not need sign- or zero-extension. This may not be
770 the case, but it's the best we can do. */
771 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
772 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
774 HOST_WIDE_INT val
= INTVAL (x
);
775 int width
= GET_MODE_BITSIZE (oldmode
);
777 /* We must sign or zero-extend in this case. Start by
778 zero-extending, then sign extend if we need to. */
779 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
781 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
782 val
|= (HOST_WIDE_INT
) (-1) << width
;
784 return gen_int_mode (val
, mode
);
787 return gen_lowpart (mode
, x
);
790 /* Converting from integer constant into mode is always equivalent to an
792 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
794 gcc_assert (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (oldmode
));
795 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
798 temp
= gen_reg_rtx (mode
);
799 convert_move (temp
, x
, unsignedp
);
803 /* STORE_MAX_PIECES is the number of bytes at a time that we can
804 store efficiently. Due to internal GCC limitations, this is
805 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
806 for an immediate constant. */
808 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
810 /* Determine whether the LEN bytes can be moved by using several move
811 instructions. Return nonzero if a call to move_by_pieces should
815 can_move_by_pieces (unsigned HOST_WIDE_INT len
,
816 unsigned int align ATTRIBUTE_UNUSED
)
818 return MOVE_BY_PIECES_P (len
, align
);
821 /* Generate several move instructions to copy LEN bytes from block FROM to
822 block TO. (These are MEM rtx's with BLKmode).
824 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
825 used to push FROM to the stack.
827 ALIGN is maximum stack alignment we can assume.
829 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
830 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
834 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
835 unsigned int align
, int endp
)
837 struct move_by_pieces data
;
838 rtx to_addr
, from_addr
= XEXP (from
, 0);
839 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
840 enum machine_mode mode
= VOIDmode
, tmode
;
841 enum insn_code icode
;
843 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
846 data
.from_addr
= from_addr
;
849 to_addr
= XEXP (to
, 0);
852 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
853 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
855 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
862 #ifdef STACK_GROWS_DOWNWARD
868 data
.to_addr
= to_addr
;
871 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
872 || GET_CODE (from_addr
) == POST_INC
873 || GET_CODE (from_addr
) == POST_DEC
);
875 data
.explicit_inc_from
= 0;
876 data
.explicit_inc_to
= 0;
877 if (data
.reverse
) data
.offset
= len
;
880 /* If copying requires more than two move insns,
881 copy addresses to registers (to make displacements shorter)
882 and use post-increment if available. */
883 if (!(data
.autinc_from
&& data
.autinc_to
)
884 && move_by_pieces_ninsns (len
, align
, max_size
) > 2)
886 /* Find the mode of the largest move... */
887 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
888 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
889 if (GET_MODE_SIZE (tmode
) < max_size
)
892 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
894 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
895 data
.autinc_from
= 1;
896 data
.explicit_inc_from
= -1;
898 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
900 data
.from_addr
= copy_addr_to_reg (from_addr
);
901 data
.autinc_from
= 1;
902 data
.explicit_inc_from
= 1;
904 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
905 data
.from_addr
= copy_addr_to_reg (from_addr
);
906 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
908 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
910 data
.explicit_inc_to
= -1;
912 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
914 data
.to_addr
= copy_addr_to_reg (to_addr
);
916 data
.explicit_inc_to
= 1;
918 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
919 data
.to_addr
= copy_addr_to_reg (to_addr
);
922 tmode
= mode_for_size (MOVE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
923 if (align
>= GET_MODE_ALIGNMENT (tmode
))
924 align
= GET_MODE_ALIGNMENT (tmode
);
927 enum machine_mode xmode
;
929 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
931 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
932 if (GET_MODE_SIZE (tmode
) > MOVE_MAX_PIECES
933 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
936 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
939 /* First move what we can in the largest integer mode, then go to
940 successively smaller modes. */
944 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
945 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
946 if (GET_MODE_SIZE (tmode
) < max_size
)
949 if (mode
== VOIDmode
)
952 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
953 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
954 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
956 max_size
= GET_MODE_SIZE (mode
);
959 /* The code above should have handled everything. */
960 gcc_assert (!data
.len
);
966 gcc_assert (!data
.reverse
);
971 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
972 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
974 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
977 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
984 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
992 /* Return number of insns required to move L bytes by pieces.
993 ALIGN (in bits) is maximum alignment we can assume. */
995 static unsigned HOST_WIDE_INT
996 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
,
997 unsigned int max_size
)
999 unsigned HOST_WIDE_INT n_insns
= 0;
1000 enum machine_mode tmode
;
1002 tmode
= mode_for_size (MOVE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
1003 if (align
>= GET_MODE_ALIGNMENT (tmode
))
1004 align
= GET_MODE_ALIGNMENT (tmode
);
1007 enum machine_mode tmode
, xmode
;
1009 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
1011 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
1012 if (GET_MODE_SIZE (tmode
) > MOVE_MAX_PIECES
1013 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
1016 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
1019 while (max_size
> 1)
1021 enum machine_mode mode
= VOIDmode
;
1022 enum insn_code icode
;
1024 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1025 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1026 if (GET_MODE_SIZE (tmode
) < max_size
)
1029 if (mode
== VOIDmode
)
1032 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1033 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1034 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1036 max_size
= GET_MODE_SIZE (mode
);
1043 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1044 with move instructions for mode MODE. GENFUN is the gen_... function
1045 to make a move insn for that mode. DATA has all the other info. */
1048 move_by_pieces_1 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
1049 struct move_by_pieces
*data
)
1051 unsigned int size
= GET_MODE_SIZE (mode
);
1052 rtx to1
= NULL_RTX
, from1
;
1054 while (data
->len
>= size
)
1057 data
->offset
-= size
;
1061 if (data
->autinc_to
)
1062 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1065 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1068 if (data
->autinc_from
)
1069 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1072 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1074 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1075 emit_insn (gen_add2_insn (data
->to_addr
,
1076 GEN_INT (-(HOST_WIDE_INT
)size
)));
1077 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1078 emit_insn (gen_add2_insn (data
->from_addr
,
1079 GEN_INT (-(HOST_WIDE_INT
)size
)));
1082 emit_insn ((*genfun
) (to1
, from1
));
1085 #ifdef PUSH_ROUNDING
1086 emit_single_push_insn (mode
, from1
, NULL
);
1092 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1093 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1094 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1095 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1097 if (! data
->reverse
)
1098 data
->offset
+= size
;
1104 /* Emit code to move a block Y to a block X. This may be done with
1105 string-move instructions, with multiple scalar move instructions,
1106 or with a library call.
1108 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1109 SIZE is an rtx that says how long they are.
1110 ALIGN is the maximum alignment we can assume they have.
1111 METHOD describes what kind of copy this is, and what mechanisms may be used.
1113 Return the address of the new block, if memcpy is called and returns it,
1117 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1125 case BLOCK_OP_NORMAL
:
1126 may_use_call
= true;
1129 case BLOCK_OP_CALL_PARM
:
1130 may_use_call
= block_move_libcall_safe_for_call_parm ();
1132 /* Make inhibit_defer_pop nonzero around the library call
1133 to force it to pop the arguments right away. */
1137 case BLOCK_OP_NO_LIBCALL
:
1138 may_use_call
= false;
1145 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1147 gcc_assert (MEM_P (x
));
1148 gcc_assert (MEM_P (y
));
1151 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1152 block copy is more efficient for other large modes, e.g. DCmode. */
1153 x
= adjust_address (x
, BLKmode
, 0);
1154 y
= adjust_address (y
, BLKmode
, 0);
1156 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1157 can be incorrect is coming from __builtin_memcpy. */
1158 if (GET_CODE (size
) == CONST_INT
)
1160 if (INTVAL (size
) == 0)
1163 x
= shallow_copy_rtx (x
);
1164 y
= shallow_copy_rtx (y
);
1165 set_mem_size (x
, size
);
1166 set_mem_size (y
, size
);
1169 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1170 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1171 else if (emit_block_move_via_movmem (x
, y
, size
, align
))
1173 else if (may_use_call
)
1174 retval
= emit_block_move_via_libcall (x
, y
, size
);
1176 emit_block_move_via_loop (x
, y
, size
, align
);
1178 if (method
== BLOCK_OP_CALL_PARM
)
1184 /* A subroutine of emit_block_move. Returns true if calling the
1185 block move libcall will not clobber any parameters which may have
1186 already been placed on the stack. */
1189 block_move_libcall_safe_for_call_parm (void)
1191 /* If arguments are pushed on the stack, then they're safe. */
1195 /* If registers go on the stack anyway, any argument is sure to clobber
1196 an outgoing argument. */
1197 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1199 tree fn
= emit_block_move_libcall_fn (false);
1201 if (REG_PARM_STACK_SPACE (fn
) != 0)
1206 /* If any argument goes in memory, then it might clobber an outgoing
1209 CUMULATIVE_ARGS args_so_far
;
1212 fn
= emit_block_move_libcall_fn (false);
1213 INIT_CUMULATIVE_ARGS (args_so_far
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1215 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1216 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1218 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1219 rtx tmp
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
1220 if (!tmp
|| !REG_P (tmp
))
1222 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, mode
,
1225 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, NULL_TREE
, 1);
1231 /* A subroutine of emit_block_move. Expand a movmem pattern;
1232 return true if successful. */
1235 emit_block_move_via_movmem (rtx x
, rtx y
, rtx size
, unsigned int align
)
1237 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1238 int save_volatile_ok
= volatile_ok
;
1239 enum machine_mode mode
;
1241 /* Since this is a move insn, we don't care about volatility. */
1244 /* Try the most limited insn first, because there's no point
1245 including more than one in the machine description unless
1246 the more limited one has some advantage. */
1248 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1249 mode
= GET_MODE_WIDER_MODE (mode
))
1251 enum insn_code code
= movmem_optab
[(int) mode
];
1252 insn_operand_predicate_fn pred
;
1254 if (code
!= CODE_FOR_nothing
1255 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1256 here because if SIZE is less than the mode mask, as it is
1257 returned by the macro, it will definitely be less than the
1258 actual mode mask. */
1259 && ((GET_CODE (size
) == CONST_INT
1260 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1261 <= (GET_MODE_MASK (mode
) >> 1)))
1262 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1263 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1264 || (*pred
) (x
, BLKmode
))
1265 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1266 || (*pred
) (y
, BLKmode
))
1267 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1268 || (*pred
) (opalign
, VOIDmode
)))
1271 rtx last
= get_last_insn ();
1274 op2
= convert_to_mode (mode
, size
, 1);
1275 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1276 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1277 op2
= copy_to_mode_reg (mode
, op2
);
1279 /* ??? When called via emit_block_move_for_call, it'd be
1280 nice if there were some way to inform the backend, so
1281 that it doesn't fail the expansion because it thinks
1282 emitting the libcall would be more efficient. */
1284 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1288 volatile_ok
= save_volatile_ok
;
1292 delete_insns_since (last
);
1296 volatile_ok
= save_volatile_ok
;
1300 /* A subroutine of emit_block_move. Expand a call to memcpy.
1301 Return the return value from memcpy, 0 otherwise. */
1304 emit_block_move_via_libcall (rtx dst
, rtx src
, rtx size
)
1306 rtx dst_addr
, src_addr
;
1307 tree call_expr
, arg_list
, fn
, src_tree
, dst_tree
, size_tree
;
1308 enum machine_mode size_mode
;
1311 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1312 pseudos. We can then place those new pseudos into a VAR_DECL and
1315 dst_addr
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
1316 src_addr
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
1318 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1319 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1321 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1322 src_tree
= make_tree (ptr_type_node
, src_addr
);
1324 size_mode
= TYPE_MODE (sizetype
);
1326 size
= convert_to_mode (size_mode
, size
, 1);
1327 size
= copy_to_mode_reg (size_mode
, size
);
1329 /* It is incorrect to use the libcall calling conventions to call
1330 memcpy in this context. This could be a user call to memcpy and
1331 the user may wish to examine the return value from memcpy. For
1332 targets where libcalls and normal calls have different conventions
1333 for returning pointers, we could end up generating incorrect code. */
1335 size_tree
= make_tree (sizetype
, size
);
1337 fn
= emit_block_move_libcall_fn (true);
1338 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
1339 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1340 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1342 /* Now we have to build up the CALL_EXPR itself. */
1343 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1344 call_expr
= build3 (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1345 call_expr
, arg_list
, NULL_TREE
);
1347 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
1352 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1353 for the function we use for block copies. The first time FOR_CALL
1354 is true, we call assemble_external. */
1356 static GTY(()) tree block_move_fn
;
1359 init_block_move_fn (const char *asmspec
)
1365 fn
= get_identifier ("memcpy");
1366 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
1367 const_ptr_type_node
, sizetype
,
1370 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
1371 DECL_EXTERNAL (fn
) = 1;
1372 TREE_PUBLIC (fn
) = 1;
1373 DECL_ARTIFICIAL (fn
) = 1;
1374 TREE_NOTHROW (fn
) = 1;
1380 set_user_assembler_name (block_move_fn
, asmspec
);
1384 emit_block_move_libcall_fn (int for_call
)
1386 static bool emitted_extern
;
1389 init_block_move_fn (NULL
);
1391 if (for_call
&& !emitted_extern
)
1393 emitted_extern
= true;
1394 make_decl_rtl (block_move_fn
);
1395 assemble_external (block_move_fn
);
1398 return block_move_fn
;
1401 /* A subroutine of emit_block_move. Copy the data via an explicit
1402 loop. This is used only when libcalls are forbidden. */
1403 /* ??? It'd be nice to copy in hunks larger than QImode. */
1406 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1407 unsigned int align ATTRIBUTE_UNUSED
)
1409 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
1410 enum machine_mode iter_mode
;
1412 iter_mode
= GET_MODE (size
);
1413 if (iter_mode
== VOIDmode
)
1414 iter_mode
= word_mode
;
1416 top_label
= gen_label_rtx ();
1417 cmp_label
= gen_label_rtx ();
1418 iter
= gen_reg_rtx (iter_mode
);
1420 emit_move_insn (iter
, const0_rtx
);
1422 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1423 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1424 do_pending_stack_adjust ();
1426 emit_jump (cmp_label
);
1427 emit_label (top_label
);
1429 tmp
= convert_modes (Pmode
, iter_mode
, iter
, true);
1430 x_addr
= gen_rtx_PLUS (Pmode
, x_addr
, tmp
);
1431 y_addr
= gen_rtx_PLUS (Pmode
, y_addr
, tmp
);
1432 x
= change_address (x
, QImode
, x_addr
);
1433 y
= change_address (y
, QImode
, y_addr
);
1435 emit_move_insn (x
, y
);
1437 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1438 true, OPTAB_LIB_WIDEN
);
1440 emit_move_insn (iter
, tmp
);
1442 emit_label (cmp_label
);
1444 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1448 /* Copy all or part of a value X into registers starting at REGNO.
1449 The number of registers to be filled is NREGS. */
1452 move_block_to_reg (int regno
, rtx x
, int nregs
, enum machine_mode mode
)
1455 #ifdef HAVE_load_multiple
1463 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1464 x
= validize_mem (force_const_mem (mode
, x
));
1466 /* See if the machine can do this with a load multiple insn. */
1467 #ifdef HAVE_load_multiple
1468 if (HAVE_load_multiple
)
1470 last
= get_last_insn ();
1471 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1479 delete_insns_since (last
);
1483 for (i
= 0; i
< nregs
; i
++)
1484 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1485 operand_subword_force (x
, i
, mode
));
1488 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1489 The number of registers to be filled is NREGS. */
1492 move_block_from_reg (int regno
, rtx x
, int nregs
)
1499 /* See if the machine can do this with a store multiple insn. */
1500 #ifdef HAVE_store_multiple
1501 if (HAVE_store_multiple
)
1503 rtx last
= get_last_insn ();
1504 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1512 delete_insns_since (last
);
1516 for (i
= 0; i
< nregs
; i
++)
1518 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1522 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1526 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1527 ORIG, where ORIG is a non-consecutive group of registers represented by
1528 a PARALLEL. The clone is identical to the original except in that the
1529 original set of registers is replaced by a new set of pseudo registers.
1530 The new set has the same modes as the original set. */
1533 gen_group_rtx (rtx orig
)
1538 gcc_assert (GET_CODE (orig
) == PARALLEL
);
1540 length
= XVECLEN (orig
, 0);
1541 tmps
= alloca (sizeof (rtx
) * length
);
1543 /* Skip a NULL entry in first slot. */
1544 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
1549 for (; i
< length
; i
++)
1551 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
1552 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
1554 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
1557 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
1560 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1561 except that values are placed in TMPS[i], and must later be moved
1562 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1565 emit_group_load_1 (rtx
*tmps
, rtx dst
, rtx orig_src
, tree type
, int ssize
)
1569 enum machine_mode m
= GET_MODE (orig_src
);
1571 gcc_assert (GET_CODE (dst
) == PARALLEL
);
1573 if (!SCALAR_INT_MODE_P (m
)
1574 && !MEM_P (orig_src
) && GET_CODE (orig_src
) != CONCAT
)
1576 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_src
));
1577 if (imode
== BLKmode
)
1578 src
= assign_stack_temp (GET_MODE (orig_src
), ssize
, 0);
1580 src
= gen_reg_rtx (imode
);
1581 if (imode
!= BLKmode
)
1582 src
= gen_lowpart (GET_MODE (orig_src
), src
);
1583 emit_move_insn (src
, orig_src
);
1584 /* ...and back again. */
1585 if (imode
!= BLKmode
)
1586 src
= gen_lowpart (imode
, src
);
1587 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1591 /* Check for a NULL entry, used to indicate that the parameter goes
1592 both on the stack and in registers. */
1593 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1598 /* Process the pieces. */
1599 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1601 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1602 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1603 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1606 /* Handle trailing fragments that run over the size of the struct. */
1607 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1609 /* Arrange to shift the fragment to where it belongs.
1610 extract_bit_field loads to the lsb of the reg. */
1612 #ifdef BLOCK_REG_PADDING
1613 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
1614 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1619 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1620 bytelen
= ssize
- bytepos
;
1621 gcc_assert (bytelen
> 0);
1624 /* If we won't be loading directly from memory, protect the real source
1625 from strange tricks we might play; but make sure that the source can
1626 be loaded directly into the destination. */
1628 if (!MEM_P (orig_src
)
1629 && (!CONSTANT_P (orig_src
)
1630 || (GET_MODE (orig_src
) != mode
1631 && GET_MODE (orig_src
) != VOIDmode
)))
1633 if (GET_MODE (orig_src
) == VOIDmode
)
1634 src
= gen_reg_rtx (mode
);
1636 src
= gen_reg_rtx (GET_MODE (orig_src
));
1638 emit_move_insn (src
, orig_src
);
1641 /* Optimize the access just a bit. */
1643 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
1644 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
1645 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1646 && bytelen
== GET_MODE_SIZE (mode
))
1648 tmps
[i
] = gen_reg_rtx (mode
);
1649 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
1651 else if (GET_CODE (src
) == CONCAT
)
1653 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
1654 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
1656 if ((bytepos
== 0 && bytelen
== slen0
)
1657 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
1659 /* The following assumes that the concatenated objects all
1660 have the same size. In this case, a simple calculation
1661 can be used to determine the object and the bit field
1663 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
1664 if (! CONSTANT_P (tmps
[i
])
1665 && (!REG_P (tmps
[i
]) || GET_MODE (tmps
[i
]) != mode
))
1666 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
1667 (bytepos
% slen0
) * BITS_PER_UNIT
,
1668 1, NULL_RTX
, mode
, mode
);
1674 gcc_assert (!bytepos
);
1675 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1676 emit_move_insn (mem
, src
);
1677 tmps
[i
] = extract_bit_field (mem
, bytelen
* BITS_PER_UNIT
,
1678 0, 1, NULL_RTX
, mode
, mode
);
1681 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1682 SIMD register, which is currently broken. While we get GCC
1683 to emit proper RTL for these cases, let's dump to memory. */
1684 else if (VECTOR_MODE_P (GET_MODE (dst
))
1687 int slen
= GET_MODE_SIZE (GET_MODE (src
));
1690 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1691 emit_move_insn (mem
, src
);
1692 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
1694 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
1695 && XVECLEN (dst
, 0) > 1)
1696 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE(dst
), bytepos
);
1697 else if (CONSTANT_P (src
)
1698 || (REG_P (src
) && GET_MODE (src
) == mode
))
1701 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
1702 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
1706 tmps
[i
] = expand_shift (LSHIFT_EXPR
, mode
, tmps
[i
],
1707 build_int_cst (NULL_TREE
, shift
), tmps
[i
], 0);
1711 /* Emit code to move a block SRC of type TYPE to a block DST,
1712 where DST is non-consecutive registers represented by a PARALLEL.
1713 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1717 emit_group_load (rtx dst
, rtx src
, tree type
, int ssize
)
1722 tmps
= alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
1723 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1725 /* Copy the extracted pieces into the proper (probable) hard regs. */
1726 for (i
= 0; i
< XVECLEN (dst
, 0); i
++)
1728 rtx d
= XEXP (XVECEXP (dst
, 0, i
), 0);
1731 emit_move_insn (d
, tmps
[i
]);
1735 /* Similar, but load SRC into new pseudos in a format that looks like
1736 PARALLEL. This can later be fed to emit_group_move to get things
1737 in the right place. */
1740 emit_group_load_into_temps (rtx parallel
, rtx src
, tree type
, int ssize
)
1745 vec
= rtvec_alloc (XVECLEN (parallel
, 0));
1746 emit_group_load_1 (&RTVEC_ELT (vec
, 0), parallel
, src
, type
, ssize
);
1748 /* Convert the vector to look just like the original PARALLEL, except
1749 with the computed values. */
1750 for (i
= 0; i
< XVECLEN (parallel
, 0); i
++)
1752 rtx e
= XVECEXP (parallel
, 0, i
);
1753 rtx d
= XEXP (e
, 0);
1757 d
= force_reg (GET_MODE (d
), RTVEC_ELT (vec
, i
));
1758 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), d
, XEXP (e
, 1));
1760 RTVEC_ELT (vec
, i
) = e
;
1763 return gen_rtx_PARALLEL (GET_MODE (parallel
), vec
);
1766 /* Emit code to move a block SRC to block DST, where SRC and DST are
1767 non-consecutive groups of registers, each represented by a PARALLEL. */
1770 emit_group_move (rtx dst
, rtx src
)
1774 gcc_assert (GET_CODE (src
) == PARALLEL
1775 && GET_CODE (dst
) == PARALLEL
1776 && XVECLEN (src
, 0) == XVECLEN (dst
, 0));
1778 /* Skip first entry if NULL. */
1779 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
1780 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
1781 XEXP (XVECEXP (src
, 0, i
), 0));
1784 /* Move a group of registers represented by a PARALLEL into pseudos. */
1787 emit_group_move_into_temps (rtx src
)
1789 rtvec vec
= rtvec_alloc (XVECLEN (src
, 0));
1792 for (i
= 0; i
< XVECLEN (src
, 0); i
++)
1794 rtx e
= XVECEXP (src
, 0, i
);
1795 rtx d
= XEXP (e
, 0);
1798 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), copy_to_reg (d
), XEXP (e
, 1));
1799 RTVEC_ELT (vec
, i
) = e
;
1802 return gen_rtx_PARALLEL (GET_MODE (src
), vec
);
1805 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1806 where SRC is non-consecutive registers represented by a PARALLEL.
1807 SSIZE represents the total size of block ORIG_DST, or -1 if not
1811 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1815 enum machine_mode m
= GET_MODE (orig_dst
);
1817 gcc_assert (GET_CODE (src
) == PARALLEL
);
1819 if (!SCALAR_INT_MODE_P (m
)
1820 && !MEM_P (orig_dst
) && GET_CODE (orig_dst
) != CONCAT
)
1822 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_dst
));
1823 if (imode
== BLKmode
)
1824 dst
= assign_stack_temp (GET_MODE (orig_dst
), ssize
, 0);
1826 dst
= gen_reg_rtx (imode
);
1827 emit_group_store (dst
, src
, type
, ssize
);
1828 if (imode
!= BLKmode
)
1829 dst
= gen_lowpart (GET_MODE (orig_dst
), dst
);
1830 emit_move_insn (orig_dst
, dst
);
1834 /* Check for a NULL entry, used to indicate that the parameter goes
1835 both on the stack and in registers. */
1836 if (XEXP (XVECEXP (src
, 0, 0), 0))
1841 tmps
= alloca (sizeof (rtx
) * XVECLEN (src
, 0));
1843 /* Copy the (probable) hard regs into pseudos. */
1844 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
1846 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
1847 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
1848 emit_move_insn (tmps
[i
], reg
);
1851 /* If we won't be storing directly into memory, protect the real destination
1852 from strange tricks we might play. */
1854 if (GET_CODE (dst
) == PARALLEL
)
1858 /* We can get a PARALLEL dst if there is a conditional expression in
1859 a return statement. In that case, the dst and src are the same,
1860 so no action is necessary. */
1861 if (rtx_equal_p (dst
, src
))
1864 /* It is unclear if we can ever reach here, but we may as well handle
1865 it. Allocate a temporary, and split this into a store/load to/from
1868 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
1869 emit_group_store (temp
, src
, type
, ssize
);
1870 emit_group_load (dst
, temp
, type
, ssize
);
1873 else if (!MEM_P (dst
) && GET_CODE (dst
) != CONCAT
)
1875 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
1876 /* Make life a bit easier for combine. */
1877 emit_move_insn (dst
, CONST0_RTX (GET_MODE (orig_dst
)));
1880 /* Process the pieces. */
1881 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
1883 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
1884 enum machine_mode mode
= GET_MODE (tmps
[i
]);
1885 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1888 /* Handle trailing fragments that run over the size of the struct. */
1889 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1891 /* store_bit_field always takes its value from the lsb.
1892 Move the fragment to the lsb if it's not already there. */
1894 #ifdef BLOCK_REG_PADDING
1895 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
1896 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1902 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1903 tmps
[i
] = expand_shift (RSHIFT_EXPR
, mode
, tmps
[i
],
1904 build_int_cst (NULL_TREE
, shift
),
1907 bytelen
= ssize
- bytepos
;
1910 if (GET_CODE (dst
) == CONCAT
)
1912 if (bytepos
+ bytelen
<= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
1913 dest
= XEXP (dst
, 0);
1914 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
1916 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
1917 dest
= XEXP (dst
, 1);
1921 gcc_assert (bytepos
== 0 && XVECLEN (src
, 0));
1922 dest
= assign_stack_temp (GET_MODE (dest
),
1923 GET_MODE_SIZE (GET_MODE (dest
)), 0);
1924 emit_move_insn (adjust_address (dest
, GET_MODE (tmps
[i
]), bytepos
),
1931 /* Optimize the access just a bit. */
1933 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
1934 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
1935 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1936 && bytelen
== GET_MODE_SIZE (mode
))
1937 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
1939 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
1943 /* Copy from the pseudo into the (probable) hard reg. */
1944 if (orig_dst
!= dst
)
1945 emit_move_insn (orig_dst
, dst
);
1948 /* Generate code to copy a BLKmode object of TYPE out of a
1949 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1950 is null, a stack temporary is created. TGTBLK is returned.
1952 The purpose of this routine is to handle functions that return
1953 BLKmode structures in registers. Some machines (the PA for example)
1954 want to return all small structures in registers regardless of the
1955 structure's alignment. */
1958 copy_blkmode_from_reg (rtx tgtblk
, rtx srcreg
, tree type
)
1960 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
1961 rtx src
= NULL
, dst
= NULL
;
1962 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
1963 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
1967 tgtblk
= assign_temp (build_qualified_type (type
,
1969 | TYPE_QUAL_CONST
)),
1971 preserve_temp_slots (tgtblk
);
1974 /* This code assumes srcreg is at least a full word. If it isn't, copy it
1975 into a new pseudo which is a full word. */
1977 if (GET_MODE (srcreg
) != BLKmode
1978 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
1979 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
1981 /* If the structure doesn't take up a whole number of words, see whether
1982 SRCREG is padded on the left or on the right. If it's on the left,
1983 set PADDING_CORRECTION to the number of bits to skip.
1985 In most ABIs, the structure will be returned at the least end of
1986 the register, which translates to right padding on little-endian
1987 targets and left padding on big-endian targets. The opposite
1988 holds if the structure is returned at the most significant
1989 end of the register. */
1990 if (bytes
% UNITS_PER_WORD
!= 0
1991 && (targetm
.calls
.return_in_msb (type
)
1993 : BYTES_BIG_ENDIAN
))
1995 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
1997 /* Copy the structure BITSIZE bites at a time.
1999 We could probably emit more efficient code for machines which do not use
2000 strict alignment, but it doesn't seem worth the effort at the current
2002 for (bitpos
= 0, xbitpos
= padding_correction
;
2003 bitpos
< bytes
* BITS_PER_UNIT
;
2004 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2006 /* We need a new source operand each time xbitpos is on a
2007 word boundary and when xbitpos == padding_correction
2008 (the first time through). */
2009 if (xbitpos
% BITS_PER_WORD
== 0
2010 || xbitpos
== padding_correction
)
2011 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2014 /* We need a new destination operand each time bitpos is on
2016 if (bitpos
% BITS_PER_WORD
== 0)
2017 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2019 /* Use xbitpos for the source extraction (right justified) and
2020 xbitpos for the destination store (left justified). */
2021 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2022 extract_bit_field (src
, bitsize
,
2023 xbitpos
% BITS_PER_WORD
, 1,
2024 NULL_RTX
, word_mode
, word_mode
));
2030 /* Add a USE expression for REG to the (possibly empty) list pointed
2031 to by CALL_FUSAGE. REG must denote a hard register. */
2034 use_reg (rtx
*call_fusage
, rtx reg
)
2036 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2039 = gen_rtx_EXPR_LIST (VOIDmode
,
2040 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2043 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2044 starting at REGNO. All of these registers must be hard registers. */
2047 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2051 gcc_assert (regno
+ nregs
<= FIRST_PSEUDO_REGISTER
);
2053 for (i
= 0; i
< nregs
; i
++)
2054 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2057 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2058 PARALLEL REGS. This is for calls that pass values in multiple
2059 non-contiguous locations. The Irix 6 ABI has examples of this. */
2062 use_group_regs (rtx
*call_fusage
, rtx regs
)
2066 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2068 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2070 /* A NULL entry means the parameter goes both on the stack and in
2071 registers. This can also be a MEM for targets that pass values
2072 partially on the stack and partially in registers. */
2073 if (reg
!= 0 && REG_P (reg
))
2074 use_reg (call_fusage
, reg
);
2079 /* Determine whether the LEN bytes generated by CONSTFUN can be
2080 stored to memory using several move instructions. CONSTFUNDATA is
2081 a pointer which will be passed as argument in every CONSTFUN call.
2082 ALIGN is maximum alignment we can assume. Return nonzero if a
2083 call to store_by_pieces should succeed. */
2086 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
2087 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2088 void *constfundata
, unsigned int align
)
2090 unsigned HOST_WIDE_INT l
;
2091 unsigned int max_size
;
2092 HOST_WIDE_INT offset
= 0;
2093 enum machine_mode mode
, tmode
;
2094 enum insn_code icode
;
2101 if (! STORE_BY_PIECES_P (len
, align
))
2104 tmode
= mode_for_size (STORE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
2105 if (align
>= GET_MODE_ALIGNMENT (tmode
))
2106 align
= GET_MODE_ALIGNMENT (tmode
);
2109 enum machine_mode xmode
;
2111 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
2113 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
2114 if (GET_MODE_SIZE (tmode
) > STORE_MAX_PIECES
2115 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
2118 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
2121 /* We would first store what we can in the largest integer mode, then go to
2122 successively smaller modes. */
2125 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2130 max_size
= STORE_MAX_PIECES
+ 1;
2131 while (max_size
> 1)
2133 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2134 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2135 if (GET_MODE_SIZE (tmode
) < max_size
)
2138 if (mode
== VOIDmode
)
2141 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2142 if (icode
!= CODE_FOR_nothing
2143 && align
>= GET_MODE_ALIGNMENT (mode
))
2145 unsigned int size
= GET_MODE_SIZE (mode
);
2152 cst
= (*constfun
) (constfundata
, offset
, mode
);
2153 if (!LEGITIMATE_CONSTANT_P (cst
))
2163 max_size
= GET_MODE_SIZE (mode
);
2166 /* The code above should have handled everything. */
2173 /* Generate several move instructions to store LEN bytes generated by
2174 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2175 pointer which will be passed as argument in every CONSTFUN call.
2176 ALIGN is maximum alignment we can assume.
2177 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2178 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2182 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
2183 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2184 void *constfundata
, unsigned int align
, int endp
)
2186 struct store_by_pieces data
;
2190 gcc_assert (endp
!= 2);
2194 gcc_assert (STORE_BY_PIECES_P (len
, align
));
2195 data
.constfun
= constfun
;
2196 data
.constfundata
= constfundata
;
2199 store_by_pieces_1 (&data
, align
);
2204 gcc_assert (!data
.reverse
);
2209 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2210 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2212 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
2215 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2222 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2230 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2231 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2234 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
2236 struct store_by_pieces data
;
2241 data
.constfun
= clear_by_pieces_1
;
2242 data
.constfundata
= NULL
;
2245 store_by_pieces_1 (&data
, align
);
2248 /* Callback routine for clear_by_pieces.
2249 Return const0_rtx unconditionally. */
2252 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED
,
2253 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2254 enum machine_mode mode ATTRIBUTE_UNUSED
)
2259 /* Subroutine of clear_by_pieces and store_by_pieces.
2260 Generate several move instructions to store LEN bytes of block TO. (A MEM
2261 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2264 store_by_pieces_1 (struct store_by_pieces
*data ATTRIBUTE_UNUSED
,
2265 unsigned int align ATTRIBUTE_UNUSED
)
2267 rtx to_addr
= XEXP (data
->to
, 0);
2268 unsigned int max_size
= STORE_MAX_PIECES
+ 1;
2269 enum machine_mode mode
= VOIDmode
, tmode
;
2270 enum insn_code icode
;
2273 data
->to_addr
= to_addr
;
2275 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2276 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2278 data
->explicit_inc_to
= 0;
2280 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2282 data
->offset
= data
->len
;
2284 /* If storing requires more than two move insns,
2285 copy addresses to registers (to make displacements shorter)
2286 and use post-increment if available. */
2287 if (!data
->autinc_to
2288 && move_by_pieces_ninsns (data
->len
, align
, max_size
) > 2)
2290 /* Determine the main mode we'll be using. */
2291 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2292 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2293 if (GET_MODE_SIZE (tmode
) < max_size
)
2296 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2298 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2299 data
->autinc_to
= 1;
2300 data
->explicit_inc_to
= -1;
2303 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2304 && ! data
->autinc_to
)
2306 data
->to_addr
= copy_addr_to_reg (to_addr
);
2307 data
->autinc_to
= 1;
2308 data
->explicit_inc_to
= 1;
2311 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2312 data
->to_addr
= copy_addr_to_reg (to_addr
);
2315 tmode
= mode_for_size (STORE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
2316 if (align
>= GET_MODE_ALIGNMENT (tmode
))
2317 align
= GET_MODE_ALIGNMENT (tmode
);
2320 enum machine_mode xmode
;
2322 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
2324 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
2325 if (GET_MODE_SIZE (tmode
) > STORE_MAX_PIECES
2326 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
2329 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
2332 /* First store what we can in the largest integer mode, then go to
2333 successively smaller modes. */
2335 while (max_size
> 1)
2337 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2338 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2339 if (GET_MODE_SIZE (tmode
) < max_size
)
2342 if (mode
== VOIDmode
)
2345 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2346 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2347 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2349 max_size
= GET_MODE_SIZE (mode
);
2352 /* The code above should have handled everything. */
2353 gcc_assert (!data
->len
);
2356 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2357 with move instructions for mode MODE. GENFUN is the gen_... function
2358 to make a move insn for that mode. DATA has all the other info. */
2361 store_by_pieces_2 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
2362 struct store_by_pieces
*data
)
2364 unsigned int size
= GET_MODE_SIZE (mode
);
2367 while (data
->len
>= size
)
2370 data
->offset
-= size
;
2372 if (data
->autinc_to
)
2373 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2376 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2378 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2379 emit_insn (gen_add2_insn (data
->to_addr
,
2380 GEN_INT (-(HOST_WIDE_INT
) size
)));
2382 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2383 emit_insn ((*genfun
) (to1
, cst
));
2385 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2386 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2388 if (! data
->reverse
)
2389 data
->offset
+= size
;
2395 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2396 its length in bytes. */
2399 clear_storage (rtx object
, rtx size
)
2402 unsigned int align
= (MEM_P (object
) ? MEM_ALIGN (object
)
2403 : GET_MODE_ALIGNMENT (GET_MODE (object
)));
2405 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2406 just move a zero. Otherwise, do this a piece at a time. */
2407 if (GET_MODE (object
) != BLKmode
2408 && GET_CODE (size
) == CONST_INT
2409 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (object
)))
2410 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2413 if (size
== const0_rtx
)
2415 else if (GET_CODE (size
) == CONST_INT
2416 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2417 clear_by_pieces (object
, INTVAL (size
), align
);
2418 else if (clear_storage_via_clrmem (object
, size
, align
))
2421 retval
= clear_storage_via_libcall (object
, size
);
2427 /* A subroutine of clear_storage. Expand a clrmem pattern;
2428 return true if successful. */
2431 clear_storage_via_clrmem (rtx object
, rtx size
, unsigned int align
)
2433 /* Try the most limited insn first, because there's no point
2434 including more than one in the machine description unless
2435 the more limited one has some advantage. */
2437 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2438 enum machine_mode mode
;
2440 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2441 mode
= GET_MODE_WIDER_MODE (mode
))
2443 enum insn_code code
= clrmem_optab
[(int) mode
];
2444 insn_operand_predicate_fn pred
;
2446 if (code
!= CODE_FOR_nothing
2447 /* We don't need MODE to be narrower than
2448 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2449 the mode mask, as it is returned by the macro, it will
2450 definitely be less than the actual mode mask. */
2451 && ((GET_CODE (size
) == CONST_INT
2452 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2453 <= (GET_MODE_MASK (mode
) >> 1)))
2454 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2455 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2456 || (*pred
) (object
, BLKmode
))
2457 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
2458 || (*pred
) (opalign
, VOIDmode
)))
2461 rtx last
= get_last_insn ();
2464 op1
= convert_to_mode (mode
, size
, 1);
2465 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2466 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
2467 op1
= copy_to_mode_reg (mode
, op1
);
2469 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2476 delete_insns_since (last
);
2483 /* A subroutine of clear_storage. Expand a call to memset.
2484 Return the return value of memset, 0 otherwise. */
2487 clear_storage_via_libcall (rtx object
, rtx size
)
2489 tree call_expr
, arg_list
, fn
, object_tree
, size_tree
;
2490 enum machine_mode size_mode
;
2493 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2494 place those into new pseudos into a VAR_DECL and use them later. */
2496 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2498 size_mode
= TYPE_MODE (sizetype
);
2499 size
= convert_to_mode (size_mode
, size
, 1);
2500 size
= copy_to_mode_reg (size_mode
, size
);
2502 /* It is incorrect to use the libcall calling conventions to call
2503 memset in this context. This could be a user call to memset and
2504 the user may wish to examine the return value from memset. For
2505 targets where libcalls and normal calls have different conventions
2506 for returning pointers, we could end up generating incorrect code. */
2508 object_tree
= make_tree (ptr_type_node
, object
);
2509 size_tree
= make_tree (sizetype
, size
);
2511 fn
= clear_storage_libcall_fn (true);
2512 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
2513 arg_list
= tree_cons (NULL_TREE
, integer_zero_node
, arg_list
);
2514 arg_list
= tree_cons (NULL_TREE
, object_tree
, arg_list
);
2516 /* Now we have to build up the CALL_EXPR itself. */
2517 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2518 call_expr
= build3 (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2519 call_expr
, arg_list
, NULL_TREE
);
2521 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
2526 /* A subroutine of clear_storage_via_libcall. Create the tree node
2527 for the function we use for block clears. The first time FOR_CALL
2528 is true, we call assemble_external. */
2530 static GTY(()) tree block_clear_fn
;
2533 init_block_clear_fn (const char *asmspec
)
2535 if (!block_clear_fn
)
2539 fn
= get_identifier ("memset");
2540 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2541 integer_type_node
, sizetype
,
2544 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
2545 DECL_EXTERNAL (fn
) = 1;
2546 TREE_PUBLIC (fn
) = 1;
2547 DECL_ARTIFICIAL (fn
) = 1;
2548 TREE_NOTHROW (fn
) = 1;
2550 block_clear_fn
= fn
;
2554 set_user_assembler_name (block_clear_fn
, asmspec
);
2558 clear_storage_libcall_fn (int for_call
)
2560 static bool emitted_extern
;
2562 if (!block_clear_fn
)
2563 init_block_clear_fn (NULL
);
2565 if (for_call
&& !emitted_extern
)
2567 emitted_extern
= true;
2568 make_decl_rtl (block_clear_fn
);
2569 assemble_external (block_clear_fn
);
2572 return block_clear_fn
;
2575 /* Generate code to copy Y into X.
2576 Both Y and X must have the same mode, except that
2577 Y can be a constant with VOIDmode.
2578 This mode cannot be BLKmode; use emit_block_move for that.
2580 Return the last instruction emitted. */
2583 emit_move_insn (rtx x
, rtx y
)
2585 enum machine_mode mode
= GET_MODE (x
);
2586 rtx y_cst
= NULL_RTX
;
2589 gcc_assert (mode
!= BLKmode
2590 && (GET_MODE (y
) == mode
|| GET_MODE (y
) == VOIDmode
));
2595 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
2596 && (last_insn
= compress_float_constant (x
, y
)))
2601 if (!LEGITIMATE_CONSTANT_P (y
))
2603 y
= force_const_mem (mode
, y
);
2605 /* If the target's cannot_force_const_mem prevented the spill,
2606 assume that the target's move expanders will also take care
2607 of the non-legitimate constant. */
2613 /* If X or Y are memory references, verify that their addresses are valid
2616 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2617 && ! push_operand (x
, GET_MODE (x
)))
2619 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2620 x
= validize_mem (x
);
2623 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2625 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2626 y
= validize_mem (y
);
2628 gcc_assert (mode
!= BLKmode
);
2630 last_insn
= emit_move_insn_1 (x
, y
);
2632 if (y_cst
&& REG_P (x
)
2633 && (set
= single_set (last_insn
)) != NULL_RTX
2634 && SET_DEST (set
) == x
2635 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
2636 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
2641 /* Low level part of emit_move_insn.
2642 Called just like emit_move_insn, but assumes X and Y
2643 are basically valid. */
2646 emit_move_insn_1 (rtx x
, rtx y
)
2648 enum machine_mode mode
= GET_MODE (x
);
2649 enum machine_mode submode
;
2651 gcc_assert ((unsigned int) mode
< (unsigned int) MAX_MACHINE_MODE
);
2653 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2655 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2657 /* Expand complex moves by moving real part and imag part, if possible. */
2658 else if (COMPLEX_MODE_P (mode
)
2659 && BLKmode
!= (submode
= GET_MODE_INNER (mode
))
2660 && (mov_optab
->handlers
[(int) submode
].insn_code
2661 != CODE_FOR_nothing
))
2663 unsigned int modesize
= GET_MODE_SIZE (mode
);
2664 unsigned int submodesize
= GET_MODE_SIZE (submode
);
2666 /* Don't split destination if it is a stack push. */
2667 int stack
= push_operand (x
, mode
);
2669 #ifdef PUSH_ROUNDING
2670 /* In case we output to the stack, but the size is smaller than the
2671 machine can push exactly, we need to use move instructions. */
2672 if (stack
&& PUSH_ROUNDING (submodesize
) != submodesize
)
2675 HOST_WIDE_INT offset1
, offset2
;
2677 /* Do not use anti_adjust_stack, since we don't want to update
2678 stack_pointer_delta. */
2679 temp
= expand_binop (Pmode
,
2680 #ifdef STACK_GROWS_DOWNWARD
2686 GEN_INT (PUSH_ROUNDING (modesize
)),
2687 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
2689 if (temp
!= stack_pointer_rtx
)
2690 emit_move_insn (stack_pointer_rtx
, temp
);
2692 #ifdef STACK_GROWS_DOWNWARD
2694 offset2
= submodesize
;
2696 offset1
= -PUSH_ROUNDING (modesize
);
2697 offset2
= -PUSH_ROUNDING (modesize
) + submodesize
;
2700 emit_move_insn (change_address (x
, submode
,
2701 gen_rtx_PLUS (Pmode
,
2703 GEN_INT (offset1
))),
2704 gen_realpart (submode
, y
));
2705 emit_move_insn (change_address (x
, submode
,
2706 gen_rtx_PLUS (Pmode
,
2708 GEN_INT (offset2
))),
2709 gen_imagpart (submode
, y
));
2713 /* If this is a stack, push the highpart first, so it
2714 will be in the argument order.
2716 In that case, change_address is used only to convert
2717 the mode, not to change the address. */
2720 /* Note that the real part always precedes the imag part in memory
2721 regardless of machine's endianness. */
2722 #ifdef STACK_GROWS_DOWNWARD
2723 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2724 gen_imagpart (submode
, y
));
2725 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2726 gen_realpart (submode
, y
));
2728 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2729 gen_realpart (submode
, y
));
2730 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2731 gen_imagpart (submode
, y
));
2736 rtx realpart_x
, realpart_y
;
2737 rtx imagpart_x
, imagpart_y
;
2739 /* If this is a complex value with each part being smaller than a
2740 word, the usual calling sequence will likely pack the pieces into
2741 a single register. Unfortunately, SUBREG of hard registers only
2742 deals in terms of words, so we have a problem converting input
2743 arguments to the CONCAT of two registers that is used elsewhere
2744 for complex values. If this is before reload, we can copy it into
2745 memory and reload. FIXME, we should see about using extract and
2746 insert on integer registers, but complex short and complex char
2747 variables should be rarely used. */
2748 if ((reload_in_progress
| reload_completed
) == 0
2749 && (!validate_subreg (submode
, mode
, NULL
, submodesize
)
2750 || !validate_subreg (submode
, mode
, NULL
, 0)))
2752 if (REG_P (x
) || REG_P (y
))
2755 enum machine_mode reg_mode
2756 = mode_for_size (GET_MODE_BITSIZE (mode
), MODE_INT
, 1);
2758 gcc_assert (reg_mode
!= BLKmode
);
2760 mem
= assign_stack_temp (reg_mode
, modesize
, 0);
2761 cmem
= adjust_address (mem
, mode
, 0);
2765 rtx sreg
= gen_rtx_SUBREG (reg_mode
, x
, 0);
2766 emit_move_insn_1 (cmem
, y
);
2767 return emit_move_insn_1 (sreg
, mem
);
2771 rtx sreg
= gen_rtx_SUBREG (reg_mode
, y
, 0);
2772 emit_move_insn_1 (mem
, sreg
);
2773 return emit_move_insn_1 (x
, cmem
);
2778 realpart_x
= gen_realpart (submode
, x
);
2779 realpart_y
= gen_realpart (submode
, y
);
2780 imagpart_x
= gen_imagpart (submode
, x
);
2781 imagpart_y
= gen_imagpart (submode
, y
);
2783 /* Show the output dies here. This is necessary for SUBREGs
2784 of pseudos since we cannot track their lifetimes correctly;
2785 hard regs shouldn't appear here except as return values.
2786 We never want to emit such a clobber after reload. */
2788 && ! (reload_in_progress
|| reload_completed
)
2789 && (GET_CODE (realpart_x
) == SUBREG
2790 || GET_CODE (imagpart_x
) == SUBREG
))
2791 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
2793 emit_move_insn (realpart_x
, realpart_y
);
2794 emit_move_insn (imagpart_x
, imagpart_y
);
2797 return get_last_insn ();
2800 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
2801 find a mode to do it in. If we have a movcc, use it. Otherwise,
2802 find the MODE_INT mode of the same width. */
2803 else if (GET_MODE_CLASS (mode
) == MODE_CC
2804 && mov_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
2806 enum insn_code insn_code
;
2807 enum machine_mode tmode
= VOIDmode
;
2811 && mov_optab
->handlers
[(int) CCmode
].insn_code
!= CODE_FOR_nothing
)
2814 for (tmode
= QImode
; tmode
!= VOIDmode
;
2815 tmode
= GET_MODE_WIDER_MODE (tmode
))
2816 if (GET_MODE_SIZE (tmode
) == GET_MODE_SIZE (mode
))
2819 gcc_assert (tmode
!= VOIDmode
);
2821 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
2822 may call change_address which is not appropriate if we were
2823 called when a reload was in progress. We don't have to worry
2824 about changing the address since the size in bytes is supposed to
2825 be the same. Copy the MEM to change the mode and move any
2826 substitutions from the old MEM to the new one. */
2828 if (reload_in_progress
)
2830 x
= gen_lowpart_common (tmode
, x1
);
2831 if (x
== 0 && MEM_P (x1
))
2833 x
= adjust_address_nv (x1
, tmode
, 0);
2834 copy_replacements (x1
, x
);
2837 y
= gen_lowpart_common (tmode
, y1
);
2838 if (y
== 0 && MEM_P (y1
))
2840 y
= adjust_address_nv (y1
, tmode
, 0);
2841 copy_replacements (y1
, y
);
2846 x
= gen_lowpart (tmode
, x
);
2847 y
= gen_lowpart (tmode
, y
);
2850 insn_code
= mov_optab
->handlers
[(int) tmode
].insn_code
;
2851 return emit_insn (GEN_FCN (insn_code
) (x
, y
));
2854 /* Try using a move pattern for the corresponding integer mode. This is
2855 only safe when simplify_subreg can convert MODE constants into integer
2856 constants. At present, it can only do this reliably if the value
2857 fits within a HOST_WIDE_INT. */
2858 else if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
2859 && (submode
= int_mode_for_mode (mode
)) != BLKmode
2860 && mov_optab
->handlers
[submode
].insn_code
!= CODE_FOR_nothing
)
2861 return emit_insn (GEN_FCN (mov_optab
->handlers
[submode
].insn_code
)
2862 (simplify_gen_subreg (submode
, x
, mode
, 0),
2863 simplify_gen_subreg (submode
, y
, mode
, 0)));
2865 /* This will handle any multi-word or full-word mode that lacks a move_insn
2866 pattern. However, you will get better code if you define such patterns,
2867 even if they must turn into multiple assembler instructions. */
2875 gcc_assert (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
);
2877 #ifdef PUSH_ROUNDING
2879 /* If X is a push on the stack, do the push now and replace
2880 X with a reference to the stack pointer. */
2881 if (push_operand (x
, GET_MODE (x
)))
2886 /* Do not use anti_adjust_stack, since we don't want to update
2887 stack_pointer_delta. */
2888 temp
= expand_binop (Pmode
,
2889 #ifdef STACK_GROWS_DOWNWARD
2897 (GET_MODE_SIZE (GET_MODE (x
)))),
2898 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
2900 if (temp
!= stack_pointer_rtx
)
2901 emit_move_insn (stack_pointer_rtx
, temp
);
2903 code
= GET_CODE (XEXP (x
, 0));
2905 /* Just hope that small offsets off SP are OK. */
2906 if (code
== POST_INC
)
2907 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
2908 GEN_INT (-((HOST_WIDE_INT
)
2909 GET_MODE_SIZE (GET_MODE (x
)))));
2910 else if (code
== POST_DEC
)
2911 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
2912 GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
2914 temp
= stack_pointer_rtx
;
2916 x
= change_address (x
, VOIDmode
, temp
);
2920 /* If we are in reload, see if either operand is a MEM whose address
2921 is scheduled for replacement. */
2922 if (reload_in_progress
&& MEM_P (x
)
2923 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
2924 x
= replace_equiv_address_nv (x
, inner
);
2925 if (reload_in_progress
&& MEM_P (y
)
2926 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
2927 y
= replace_equiv_address_nv (y
, inner
);
2933 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
2936 rtx xpart
= operand_subword (x
, i
, 1, mode
);
2937 rtx ypart
= operand_subword (y
, i
, 1, mode
);
2939 /* If we can't get a part of Y, put Y into memory if it is a
2940 constant. Otherwise, force it into a register. If we still
2941 can't get a part of Y, abort. */
2942 if (ypart
== 0 && CONSTANT_P (y
))
2944 y
= force_const_mem (mode
, y
);
2945 ypart
= operand_subword (y
, i
, 1, mode
);
2947 else if (ypart
== 0)
2948 ypart
= operand_subword_force (y
, i
, mode
);
2950 gcc_assert (xpart
&& ypart
);
2952 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
2954 last_insn
= emit_move_insn (xpart
, ypart
);
2960 /* Show the output dies here. This is necessary for SUBREGs
2961 of pseudos since we cannot track their lifetimes correctly;
2962 hard regs shouldn't appear here except as return values.
2963 We never want to emit such a clobber after reload. */
2965 && ! (reload_in_progress
|| reload_completed
)
2966 && need_clobber
!= 0)
2967 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
2975 /* If Y is representable exactly in a narrower mode, and the target can
2976 perform the extension directly from constant or memory, then emit the
2977 move as an extension. */
2980 compress_float_constant (rtx x
, rtx y
)
2982 enum machine_mode dstmode
= GET_MODE (x
);
2983 enum machine_mode orig_srcmode
= GET_MODE (y
);
2984 enum machine_mode srcmode
;
2987 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
2989 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
2990 srcmode
!= orig_srcmode
;
2991 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
2994 rtx trunc_y
, last_insn
;
2996 /* Skip if the target can't extend this way. */
2997 ic
= can_extend_p (dstmode
, srcmode
, 0);
2998 if (ic
== CODE_FOR_nothing
)
3001 /* Skip if the narrowed value isn't exact. */
3002 if (! exact_real_truncate (srcmode
, &r
))
3005 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3007 if (LEGITIMATE_CONSTANT_P (trunc_y
))
3009 /* Skip if the target needs extra instructions to perform
3011 if (! (*insn_data
[ic
].operand
[1].predicate
) (trunc_y
, srcmode
))
3014 else if (float_extend_from_mem
[dstmode
][srcmode
])
3015 trunc_y
= validize_mem (force_const_mem (srcmode
, trunc_y
));
3019 emit_unop_insn (ic
, x
, trunc_y
, UNKNOWN
);
3020 last_insn
= get_last_insn ();
3023 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3031 /* Pushing data onto the stack. */
3033 /* Push a block of length SIZE (perhaps variable)
3034 and return an rtx to address the beginning of the block.
3035 The value may be virtual_outgoing_args_rtx.
3037 EXTRA is the number of bytes of padding to push in addition to SIZE.
3038 BELOW nonzero means this padding comes at low addresses;
3039 otherwise, the padding comes at high addresses. */
3042 push_block (rtx size
, int extra
, int below
)
3046 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3047 if (CONSTANT_P (size
))
3048 anti_adjust_stack (plus_constant (size
, extra
));
3049 else if (REG_P (size
) && extra
== 0)
3050 anti_adjust_stack (size
);
3053 temp
= copy_to_mode_reg (Pmode
, size
);
3055 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3056 temp
, 0, OPTAB_LIB_WIDEN
);
3057 anti_adjust_stack (temp
);
3060 #ifndef STACK_GROWS_DOWNWARD
3066 temp
= virtual_outgoing_args_rtx
;
3067 if (extra
!= 0 && below
)
3068 temp
= plus_constant (temp
, extra
);
3072 if (GET_CODE (size
) == CONST_INT
)
3073 temp
= plus_constant (virtual_outgoing_args_rtx
,
3074 -INTVAL (size
) - (below
? 0 : extra
));
3075 else if (extra
!= 0 && !below
)
3076 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3077 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3079 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3080 negate_rtx (Pmode
, size
));
3083 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3086 #ifdef PUSH_ROUNDING
3088 /* Emit single push insn. */
3091 emit_single_push_insn (enum machine_mode mode
, rtx x
, tree type
)
3094 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3096 enum insn_code icode
;
3097 insn_operand_predicate_fn pred
;
3099 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3100 /* If there is push pattern, use it. Otherwise try old way of throwing
3101 MEM representing push operation to move expander. */
3102 icode
= push_optab
->handlers
[(int) mode
].insn_code
;
3103 if (icode
!= CODE_FOR_nothing
)
3105 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3106 && !((*pred
) (x
, mode
))))
3107 x
= force_reg (mode
, x
);
3108 emit_insn (GEN_FCN (icode
) (x
));
3111 if (GET_MODE_SIZE (mode
) == rounded_size
)
3112 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3113 /* If we are to pad downward, adjust the stack pointer first and
3114 then store X into the stack location using an offset. This is
3115 because emit_move_insn does not know how to pad; it does not have
3117 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
3119 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
3120 HOST_WIDE_INT offset
;
3122 emit_move_insn (stack_pointer_rtx
,
3123 expand_binop (Pmode
,
3124 #ifdef STACK_GROWS_DOWNWARD
3130 GEN_INT (rounded_size
),
3131 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
3133 offset
= (HOST_WIDE_INT
) padding_size
;
3134 #ifdef STACK_GROWS_DOWNWARD
3135 if (STACK_PUSH_CODE
== POST_DEC
)
3136 /* We have already decremented the stack pointer, so get the
3138 offset
+= (HOST_WIDE_INT
) rounded_size
;
3140 if (STACK_PUSH_CODE
== POST_INC
)
3141 /* We have already incremented the stack pointer, so get the
3143 offset
-= (HOST_WIDE_INT
) rounded_size
;
3145 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (offset
));
3149 #ifdef STACK_GROWS_DOWNWARD
3150 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3151 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3152 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3154 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3155 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3156 GEN_INT (rounded_size
));
3158 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3161 dest
= gen_rtx_MEM (mode
, dest_addr
);
3165 set_mem_attributes (dest
, type
, 1);
3167 if (flag_optimize_sibling_calls
)
3168 /* Function incoming arguments may overlap with sibling call
3169 outgoing arguments and we cannot allow reordering of reads
3170 from function arguments with stores to outgoing arguments
3171 of sibling calls. */
3172 set_mem_alias_set (dest
, 0);
3174 emit_move_insn (dest
, x
);
3178 /* Generate code to push X onto the stack, assuming it has mode MODE and
3180 MODE is redundant except when X is a CONST_INT (since they don't
3182 SIZE is an rtx for the size of data to be copied (in bytes),
3183 needed only if X is BLKmode.
3185 ALIGN (in bits) is maximum alignment we can assume.
3187 If PARTIAL and REG are both nonzero, then copy that many of the first
3188 words of X into registers starting with REG, and push the rest of X.
3189 The amount of space pushed is decreased by PARTIAL words,
3190 rounded *down* to a multiple of PARM_BOUNDARY.
3191 REG must be a hard register in this case.
3192 If REG is zero but PARTIAL is not, take any all others actions for an
3193 argument partially in registers, but do not actually load any
3196 EXTRA is the amount in bytes of extra space to leave next to this arg.
3197 This is ignored if an argument block has already been allocated.
3199 On a machine that lacks real push insns, ARGS_ADDR is the address of
3200 the bottom of the argument block for this call. We use indexing off there
3201 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3202 argument block has not been preallocated.
3204 ARGS_SO_FAR is the size of args previously pushed for this call.
3206 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3207 for arguments passed in registers. If nonzero, it will be the number
3208 of bytes required. */
3211 emit_push_insn (rtx x
, enum machine_mode mode
, tree type
, rtx size
,
3212 unsigned int align
, int partial
, rtx reg
, int extra
,
3213 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
3217 enum direction stack_direction
3218 #ifdef STACK_GROWS_DOWNWARD
3224 /* Decide where to pad the argument: `downward' for below,
3225 `upward' for above, or `none' for don't pad it.
3226 Default is below for small data on big-endian machines; else above. */
3227 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3229 /* Invert direction if stack is post-decrement.
3231 if (STACK_PUSH_CODE
== POST_DEC
)
3232 if (where_pad
!= none
)
3233 where_pad
= (where_pad
== downward
? upward
: downward
);
3237 if (mode
== BLKmode
)
3239 /* Copy a block into the stack, entirely or partially. */
3242 int used
= partial
* UNITS_PER_WORD
;
3246 if (reg
&& GET_CODE (reg
) == PARALLEL
)
3248 /* Use the size of the elt to compute offset. */
3249 rtx elt
= XEXP (XVECEXP (reg
, 0, 0), 0);
3250 used
= partial
* GET_MODE_SIZE (GET_MODE (elt
));
3251 offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3254 offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3260 /* USED is now the # of bytes we need not copy to the stack
3261 because registers will take care of them. */
3264 xinner
= adjust_address (xinner
, BLKmode
, used
);
3266 /* If the partial register-part of the arg counts in its stack size,
3267 skip the part of stack space corresponding to the registers.
3268 Otherwise, start copying to the beginning of the stack space,
3269 by setting SKIP to 0. */
3270 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3272 #ifdef PUSH_ROUNDING
3273 /* Do it with several push insns if that doesn't take lots of insns
3274 and if there is no difficulty with push insns that skip bytes
3275 on the stack for alignment purposes. */
3278 && GET_CODE (size
) == CONST_INT
3280 && MEM_ALIGN (xinner
) >= align
3281 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3282 /* Here we avoid the case of a structure whose weak alignment
3283 forces many pushes of a small amount of data,
3284 and such small pushes do rounding that causes trouble. */
3285 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3286 || align
>= BIGGEST_ALIGNMENT
3287 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3288 == (align
/ BITS_PER_UNIT
)))
3289 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3291 /* Push padding now if padding above and stack grows down,
3292 or if padding below and stack grows up.
3293 But if space already allocated, this has already been done. */
3294 if (extra
&& args_addr
== 0
3295 && where_pad
!= none
&& where_pad
!= stack_direction
)
3296 anti_adjust_stack (GEN_INT (extra
));
3298 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
3301 #endif /* PUSH_ROUNDING */
3305 /* Otherwise make space on the stack and copy the data
3306 to the address of that space. */
3308 /* Deduct words put into registers from the size we must copy. */
3311 if (GET_CODE (size
) == CONST_INT
)
3312 size
= GEN_INT (INTVAL (size
) - used
);
3314 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3315 GEN_INT (used
), NULL_RTX
, 0,
3319 /* Get the address of the stack space.
3320 In this case, we do not deal with EXTRA separately.
3321 A single stack adjust will do. */
3324 temp
= push_block (size
, extra
, where_pad
== downward
);
3327 else if (GET_CODE (args_so_far
) == CONST_INT
)
3328 temp
= memory_address (BLKmode
,
3329 plus_constant (args_addr
,
3330 skip
+ INTVAL (args_so_far
)));
3332 temp
= memory_address (BLKmode
,
3333 plus_constant (gen_rtx_PLUS (Pmode
,
3338 if (!ACCUMULATE_OUTGOING_ARGS
)
3340 /* If the source is referenced relative to the stack pointer,
3341 copy it to another register to stabilize it. We do not need
3342 to do this if we know that we won't be changing sp. */
3344 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3345 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3346 temp
= copy_to_reg (temp
);
3349 target
= gen_rtx_MEM (BLKmode
, temp
);
3351 /* We do *not* set_mem_attributes here, because incoming arguments
3352 may overlap with sibling call outgoing arguments and we cannot
3353 allow reordering of reads from function arguments with stores
3354 to outgoing arguments of sibling calls. We do, however, want
3355 to record the alignment of the stack slot. */
3356 /* ALIGN may well be better aligned than TYPE, e.g. due to
3357 PARM_BOUNDARY. Assume the caller isn't lying. */
3358 set_mem_align (target
, align
);
3360 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
3363 else if (partial
> 0)
3365 /* Scalar partly in registers. */
3367 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3370 /* # words of start of argument
3371 that we must make space for but need not store. */
3372 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
3373 int args_offset
= INTVAL (args_so_far
);
3376 /* Push padding now if padding above and stack grows down,
3377 or if padding below and stack grows up.
3378 But if space already allocated, this has already been done. */
3379 if (extra
&& args_addr
== 0
3380 && where_pad
!= none
&& where_pad
!= stack_direction
)
3381 anti_adjust_stack (GEN_INT (extra
));
3383 /* If we make space by pushing it, we might as well push
3384 the real data. Otherwise, we can leave OFFSET nonzero
3385 and leave the space uninitialized. */
3389 /* Now NOT_STACK gets the number of words that we don't need to
3390 allocate on the stack. */
3391 not_stack
= partial
- offset
;
3393 /* If the partial register-part of the arg counts in its stack size,
3394 skip the part of stack space corresponding to the registers.
3395 Otherwise, start copying to the beginning of the stack space,
3396 by setting SKIP to 0. */
3397 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3399 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3400 x
= validize_mem (force_const_mem (mode
, x
));
3402 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3403 SUBREGs of such registers are not allowed. */
3404 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
3405 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3406 x
= copy_to_reg (x
);
3408 /* Loop over all the words allocated on the stack for this arg. */
3409 /* We can do it by words, because any scalar bigger than a word
3410 has a size a multiple of a word. */
3411 #ifndef PUSH_ARGS_REVERSED
3412 for (i
= not_stack
; i
< size
; i
++)
3414 for (i
= size
- 1; i
>= not_stack
; i
--)
3416 if (i
>= not_stack
+ offset
)
3417 emit_push_insn (operand_subword_force (x
, i
, mode
),
3418 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3420 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3422 reg_parm_stack_space
, alignment_pad
);
3429 /* Push padding now if padding above and stack grows down,
3430 or if padding below and stack grows up.
3431 But if space already allocated, this has already been done. */
3432 if (extra
&& args_addr
== 0
3433 && where_pad
!= none
&& where_pad
!= stack_direction
)
3434 anti_adjust_stack (GEN_INT (extra
));
3436 #ifdef PUSH_ROUNDING
3437 if (args_addr
== 0 && PUSH_ARGS
)
3438 emit_single_push_insn (mode
, x
, type
);
3442 if (GET_CODE (args_so_far
) == CONST_INT
)
3444 = memory_address (mode
,
3445 plus_constant (args_addr
,
3446 INTVAL (args_so_far
)));
3448 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3450 dest
= gen_rtx_MEM (mode
, addr
);
3452 /* We do *not* set_mem_attributes here, because incoming arguments
3453 may overlap with sibling call outgoing arguments and we cannot
3454 allow reordering of reads from function arguments with stores
3455 to outgoing arguments of sibling calls. We do, however, want
3456 to record the alignment of the stack slot. */
3457 /* ALIGN may well be better aligned than TYPE, e.g. due to
3458 PARM_BOUNDARY. Assume the caller isn't lying. */
3459 set_mem_align (dest
, align
);
3461 emit_move_insn (dest
, x
);
3465 /* If part should go in registers, copy that part
3466 into the appropriate registers. Do this now, at the end,
3467 since mem-to-mem copies above may do function calls. */
3468 if (partial
> 0 && reg
!= 0)
3470 /* Handle calls that pass values in multiple non-contiguous locations.
3471 The Irix 6 ABI has examples of this. */
3472 if (GET_CODE (reg
) == PARALLEL
)
3473 emit_group_load (reg
, x
, type
, -1);
3475 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
3478 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3479 anti_adjust_stack (GEN_INT (extra
));
3481 if (alignment_pad
&& args_addr
== 0)
3482 anti_adjust_stack (alignment_pad
);
3485 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3489 get_subtarget (rtx x
)
3493 /* Only registers can be subtargets. */
3495 /* Don't use hard regs to avoid extending their life. */
3496 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3500 /* Expand an assignment that stores the value of FROM into TO. */
3503 expand_assignment (tree to
, tree from
)
3508 /* Don't crash if the lhs of the assignment was erroneous. */
3510 if (TREE_CODE (to
) == ERROR_MARK
)
3512 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3516 /* Assignment of a structure component needs special treatment
3517 if the structure component's rtx is not simply a MEM.
3518 Assignment of an array element at a constant index, and assignment of
3519 an array element in an unaligned packed structure field, has the same
3522 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
3523 || TREE_CODE (to
) == ARRAY_REF
|| TREE_CODE (to
) == ARRAY_RANGE_REF
3524 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
3526 enum machine_mode mode1
;
3527 HOST_WIDE_INT bitsize
, bitpos
;
3535 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
3536 &unsignedp
, &volatilep
);
3538 /* If we are going to use store_bit_field and extract_bit_field,
3539 make sure to_rtx will be safe for multiple use. */
3541 orig_to_rtx
= to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, 0);
3545 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
3547 gcc_assert (MEM_P (to_rtx
));
3549 #ifdef POINTERS_EXTEND_UNSIGNED
3550 if (GET_MODE (offset_rtx
) != Pmode
)
3551 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
3553 if (GET_MODE (offset_rtx
) != ptr_mode
)
3554 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
3557 /* A constant address in TO_RTX can have VOIDmode, we must not try
3558 to call force_reg for that case. Avoid that case. */
3560 && GET_MODE (to_rtx
) == BLKmode
3561 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
3563 && (bitpos
% bitsize
) == 0
3564 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
3565 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
3567 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
3571 to_rtx
= offset_address (to_rtx
, offset_rtx
,
3572 highest_pow2_factor_for_target (to
,
3578 /* If the field is at offset zero, we could have been given the
3579 DECL_RTX of the parent struct. Don't munge it. */
3580 to_rtx
= shallow_copy_rtx (to_rtx
);
3582 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
3585 /* Deal with volatile and readonly fields. The former is only done
3586 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3587 if (volatilep
&& MEM_P (to_rtx
))
3589 if (to_rtx
== orig_to_rtx
)
3590 to_rtx
= copy_rtx (to_rtx
);
3591 MEM_VOLATILE_P (to_rtx
) = 1;
3594 if (MEM_P (to_rtx
) && ! can_address_p (to
))
3596 if (to_rtx
== orig_to_rtx
)
3597 to_rtx
= copy_rtx (to_rtx
);
3598 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
3601 /* Optimize bitfld op= val in certain cases. */
3602 while (mode1
== VOIDmode
3603 && bitsize
> 0 && bitsize
< BITS_PER_WORD
3604 && GET_MODE_BITSIZE (GET_MODE (to_rtx
)) <= BITS_PER_WORD
3605 && !TREE_SIDE_EFFECTS (to
)
3606 && !TREE_THIS_VOLATILE (to
))
3609 rtx value
, str_rtx
= to_rtx
;
3610 HOST_WIDE_INT bitpos1
= bitpos
;
3615 if (TREE_CODE (TREE_TYPE (src
)) != INTEGER_TYPE
3616 || !BINARY_CLASS_P (src
))
3619 op0
= TREE_OPERAND (src
, 0);
3620 op1
= TREE_OPERAND (src
, 1);
3623 if (! operand_equal_p (to
, op0
, 0))
3626 if (MEM_P (str_rtx
))
3628 enum machine_mode mode
= GET_MODE (str_rtx
);
3629 HOST_WIDE_INT offset1
;
3631 if (GET_MODE_BITSIZE (mode
) == 0
3632 || GET_MODE_BITSIZE (mode
) > BITS_PER_WORD
)
3634 mode
= get_best_mode (bitsize
, bitpos1
, MEM_ALIGN (str_rtx
),
3636 if (mode
== VOIDmode
)
3640 bitpos1
%= GET_MODE_BITSIZE (mode
);
3641 offset1
= (offset1
- bitpos1
) / BITS_PER_UNIT
;
3642 str_rtx
= adjust_address (str_rtx
, mode
, offset1
);
3644 else if (!REG_P (str_rtx
) && GET_CODE (str_rtx
) != SUBREG
)
3647 /* If the bit field covers the whole REG/MEM, store_field
3648 will likely generate better code. */
3649 if (bitsize
>= GET_MODE_BITSIZE (GET_MODE (str_rtx
)))
3652 /* We can't handle fields split across multiple entities. */
3653 if (bitpos1
+ bitsize
> GET_MODE_BITSIZE (GET_MODE (str_rtx
)))
3656 if (BYTES_BIG_ENDIAN
)
3657 bitpos1
= GET_MODE_BITSIZE (GET_MODE (str_rtx
)) - bitpos1
3660 /* Special case some bitfield op= exp. */
3661 switch (TREE_CODE (src
))
3665 /* For now, just optimize the case of the topmost bitfield
3666 where we don't need to do any masking and also
3667 1 bit bitfields where xor can be used.
3668 We might win by one instruction for the other bitfields
3669 too if insv/extv instructions aren't used, so that
3670 can be added later. */
3671 if (bitpos1
+ bitsize
!= GET_MODE_BITSIZE (GET_MODE (str_rtx
))
3672 && (bitsize
!= 1 || TREE_CODE (op1
) != INTEGER_CST
))
3674 value
= expand_expr (op1
, NULL_RTX
, GET_MODE (str_rtx
), 0);
3675 value
= convert_modes (GET_MODE (str_rtx
),
3676 TYPE_MODE (TREE_TYPE (op1
)), value
,
3677 TYPE_UNSIGNED (TREE_TYPE (op1
)));
3679 /* We may be accessing data outside the field, which means
3680 we can alias adjacent data. */
3681 if (MEM_P (str_rtx
))
3683 str_rtx
= shallow_copy_rtx (str_rtx
);
3684 set_mem_alias_set (str_rtx
, 0);
3685 set_mem_expr (str_rtx
, 0);
3688 binop
= TREE_CODE (src
) == PLUS_EXPR
? add_optab
: sub_optab
;
3690 && bitpos1
+ bitsize
!= GET_MODE_BITSIZE (GET_MODE (str_rtx
)))
3692 value
= expand_and (GET_MODE (str_rtx
), value
, const1_rtx
,
3696 value
= expand_shift (LSHIFT_EXPR
, GET_MODE (str_rtx
), value
,
3697 build_int_cst (NULL_TREE
, bitpos1
),
3699 result
= expand_binop (GET_MODE (str_rtx
), binop
, str_rtx
,
3700 value
, str_rtx
, 1, OPTAB_WIDEN
);
3701 if (result
!= str_rtx
)
3702 emit_move_insn (str_rtx
, result
);
3714 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
3715 TREE_TYPE (tem
), get_alias_set (to
));
3717 preserve_temp_slots (result
);
3721 /* If the value is meaningful, convert RESULT to the proper mode.
3722 Otherwise, return nothing. */
3726 /* If the rhs is a function call and its value is not an aggregate,
3727 call the function before we start to compute the lhs.
3728 This is needed for correct code for cases such as
3729 val = setjmp (buf) on machines where reference to val
3730 requires loading up part of an address in a separate insn.
3732 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3733 since it might be a promoted variable where the zero- or sign- extension
3734 needs to be done. Handling this in the normal way is safe because no
3735 computation is done before the call. */
3736 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
3737 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
3738 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
3739 && REG_P (DECL_RTL (to
))))
3744 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3746 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3748 /* Handle calls that return values in multiple non-contiguous locations.
3749 The Irix 6 ABI has examples of this. */
3750 if (GET_CODE (to_rtx
) == PARALLEL
)
3751 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
3752 int_size_in_bytes (TREE_TYPE (from
)));
3753 else if (GET_MODE (to_rtx
) == BLKmode
)
3754 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
3757 if (POINTER_TYPE_P (TREE_TYPE (to
)))
3758 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
3759 emit_move_insn (to_rtx
, value
);
3761 preserve_temp_slots (to_rtx
);
3767 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3768 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3771 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3773 /* Don't move directly into a return register. */
3774 if (TREE_CODE (to
) == RESULT_DECL
3775 && (REG_P (to_rtx
) || GET_CODE (to_rtx
) == PARALLEL
))
3780 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
3782 if (GET_CODE (to_rtx
) == PARALLEL
)
3783 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
3784 int_size_in_bytes (TREE_TYPE (from
)));
3786 emit_move_insn (to_rtx
, temp
);
3788 preserve_temp_slots (to_rtx
);
3794 /* In case we are returning the contents of an object which overlaps
3795 the place the value is being stored, use a safe function when copying
3796 a value through a pointer into a structure value return block. */
3797 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
3798 && current_function_returns_struct
3799 && !current_function_returns_pcc_struct
)
3804 size
= expr_size (from
);
3805 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3807 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
3808 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3809 XEXP (from_rtx
, 0), Pmode
,
3810 convert_to_mode (TYPE_MODE (sizetype
),
3811 size
, TYPE_UNSIGNED (sizetype
)),
3812 TYPE_MODE (sizetype
));
3814 preserve_temp_slots (to_rtx
);
3820 /* Compute FROM and store the value in the rtx we got. */
3823 result
= store_expr (from
, to_rtx
, 0);
3824 preserve_temp_slots (result
);
3830 /* Generate code for computing expression EXP,
3831 and storing the value into TARGET.
3833 If the mode is BLKmode then we may return TARGET itself.
3834 It turns out that in BLKmode it doesn't cause a problem.
3835 because C has no operators that could combine two different
3836 assignments into the same BLKmode object with different values
3837 with no sequence point. Will other languages need this to
3840 If CALL_PARAM_P is nonzero, this is a store into a call param on the
3841 stack, and block moves may need to be treated specially. */
3844 store_expr (tree exp
, rtx target
, int call_param_p
)
3847 rtx alt_rtl
= NULL_RTX
;
3848 int dont_return_target
= 0;
3850 if (VOID_TYPE_P (TREE_TYPE (exp
)))
3852 /* C++ can generate ?: expressions with a throw expression in one
3853 branch and an rvalue in the other. Here, we resolve attempts to
3854 store the throw expression's nonexistent result. */
3855 gcc_assert (!call_param_p
);
3856 expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
3859 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
3861 /* Perform first part of compound expression, then assign from second
3863 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
3864 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
3865 return store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
);
3867 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
3869 /* For conditional expression, get safe form of the target. Then
3870 test the condition, doing the appropriate assignment on either
3871 side. This avoids the creation of unnecessary temporaries.
3872 For non-BLKmode, it is more efficient not to do this. */
3874 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
3876 do_pending_stack_adjust ();
3878 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
3879 store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
);
3880 emit_jump_insn (gen_jump (lab2
));
3883 store_expr (TREE_OPERAND (exp
, 2), target
, call_param_p
);
3889 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
3890 /* If this is a scalar in a register that is stored in a wider mode
3891 than the declared mode, compute the result into its declared mode
3892 and then convert to the wider mode. Our value is the computed
3895 rtx inner_target
= 0;
3897 /* We can do the conversion inside EXP, which will often result
3898 in some optimizations. Do the conversion in two steps: first
3899 change the signedness, if needed, then the extend. But don't
3900 do this if the type of EXP is a subtype of something else
3901 since then the conversion might involve more than just
3902 converting modes. */
3903 if (INTEGRAL_TYPE_P (TREE_TYPE (exp
))
3904 && TREE_TYPE (TREE_TYPE (exp
)) == 0
3905 && (!lang_hooks
.reduce_bit_field_operations
3906 || (GET_MODE_PRECISION (GET_MODE (target
))
3907 == TYPE_PRECISION (TREE_TYPE (exp
)))))
3909 if (TYPE_UNSIGNED (TREE_TYPE (exp
))
3910 != SUBREG_PROMOTED_UNSIGNED_P (target
))
3912 (lang_hooks
.types
.signed_or_unsigned_type
3913 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)), exp
);
3915 exp
= convert (lang_hooks
.types
.type_for_mode
3916 (GET_MODE (SUBREG_REG (target
)),
3917 SUBREG_PROMOTED_UNSIGNED_P (target
)),
3920 inner_target
= SUBREG_REG (target
);
3923 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
3924 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
3926 /* If TEMP is a VOIDmode constant, use convert_modes to make
3927 sure that we properly convert it. */
3928 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
3930 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
3931 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
3932 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
3933 GET_MODE (target
), temp
,
3934 SUBREG_PROMOTED_UNSIGNED_P (target
));
3937 convert_move (SUBREG_REG (target
), temp
,
3938 SUBREG_PROMOTED_UNSIGNED_P (target
));
3944 temp
= expand_expr_real (exp
, target
, GET_MODE (target
),
3946 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
3948 /* Return TARGET if it's a specified hardware register.
3949 If TARGET is a volatile mem ref, either return TARGET
3950 or return a reg copied *from* TARGET; ANSI requires this.
3952 Otherwise, if TEMP is not TARGET, return TEMP
3953 if it is constant (for efficiency),
3954 or if we really want the correct value. */
3955 if (!(target
&& REG_P (target
)
3956 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
3957 && !(MEM_P (target
) && MEM_VOLATILE_P (target
))
3958 && ! rtx_equal_p (temp
, target
)
3959 && CONSTANT_P (temp
))
3960 dont_return_target
= 1;
3963 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3964 the same as that of TARGET, adjust the constant. This is needed, for
3965 example, in case it is a CONST_DOUBLE and we want only a word-sized
3967 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
3968 && TREE_CODE (exp
) != ERROR_MARK
3969 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
3970 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
3971 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
3973 /* If value was not generated in the target, store it there.
3974 Convert the value to TARGET's type first if necessary and emit the
3975 pending incrementations that have been queued when expanding EXP.
3976 Note that we cannot emit the whole queue blindly because this will
3977 effectively disable the POST_INC optimization later.
3979 If TEMP and TARGET compare equal according to rtx_equal_p, but
3980 one or both of them are volatile memory refs, we have to distinguish
3982 - expand_expr has used TARGET. In this case, we must not generate
3983 another copy. This can be detected by TARGET being equal according
3985 - expand_expr has not used TARGET - that means that the source just
3986 happens to have the same RTX form. Since temp will have been created
3987 by expand_expr, it will compare unequal according to == .
3988 We must generate a copy in this case, to reach the correct number
3989 of volatile memory references. */
3991 if ((! rtx_equal_p (temp
, target
)
3992 || (temp
!= target
&& (side_effects_p (temp
)
3993 || side_effects_p (target
))))
3994 && TREE_CODE (exp
) != ERROR_MARK
3995 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
3996 but TARGET is not valid memory reference, TEMP will differ
3997 from TARGET although it is really the same location. */
3998 && !(alt_rtl
&& rtx_equal_p (alt_rtl
, target
))
3999 /* If there's nothing to copy, don't bother. Don't call expr_size
4000 unless necessary, because some front-ends (C++) expr_size-hook
4001 aborts on objects that are not supposed to be bit-copied or
4003 && expr_size (exp
) != const0_rtx
)
4005 if (GET_MODE (temp
) != GET_MODE (target
)
4006 && GET_MODE (temp
) != VOIDmode
)
4008 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
4009 if (dont_return_target
)
4011 /* In this case, we will return TEMP,
4012 so make sure it has the proper mode.
4013 But don't forget to store the value into TARGET. */
4014 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4015 emit_move_insn (target
, temp
);
4018 convert_move (target
, temp
, unsignedp
);
4021 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4023 /* Handle copying a string constant into an array. The string
4024 constant may be shorter than the array. So copy just the string's
4025 actual length, and clear the rest. First get the size of the data
4026 type of the string, which is actually the size of the target. */
4027 rtx size
= expr_size (exp
);
4029 if (GET_CODE (size
) == CONST_INT
4030 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4031 emit_block_move (target
, temp
, size
,
4033 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4036 /* Compute the size of the data to copy from the string. */
4038 = size_binop (MIN_EXPR
,
4039 make_tree (sizetype
, size
),
4040 size_int (TREE_STRING_LENGTH (exp
)));
4042 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
4044 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4047 /* Copy that much. */
4048 copy_size_rtx
= convert_to_mode (ptr_mode
, copy_size_rtx
,
4049 TYPE_UNSIGNED (sizetype
));
4050 emit_block_move (target
, temp
, copy_size_rtx
,
4052 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4054 /* Figure out how much is left in TARGET that we have to clear.
4055 Do all calculations in ptr_mode. */
4056 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4058 size
= plus_constant (size
, -INTVAL (copy_size_rtx
));
4059 target
= adjust_address (target
, BLKmode
,
4060 INTVAL (copy_size_rtx
));
4064 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
4065 copy_size_rtx
, NULL_RTX
, 0,
4068 #ifdef POINTERS_EXTEND_UNSIGNED
4069 if (GET_MODE (copy_size_rtx
) != Pmode
)
4070 copy_size_rtx
= convert_to_mode (Pmode
, copy_size_rtx
,
4071 TYPE_UNSIGNED (sizetype
));
4074 target
= offset_address (target
, copy_size_rtx
,
4075 highest_pow2_factor (copy_size
));
4076 label
= gen_label_rtx ();
4077 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4078 GET_MODE (size
), 0, label
);
4081 if (size
!= const0_rtx
)
4082 clear_storage (target
, size
);
4088 /* Handle calls that return values in multiple non-contiguous locations.
4089 The Irix 6 ABI has examples of this. */
4090 else if (GET_CODE (target
) == PARALLEL
)
4091 emit_group_load (target
, temp
, TREE_TYPE (exp
),
4092 int_size_in_bytes (TREE_TYPE (exp
)));
4093 else if (GET_MODE (temp
) == BLKmode
)
4094 emit_block_move (target
, temp
, expr_size (exp
),
4096 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4099 temp
= force_operand (temp
, target
);
4101 emit_move_insn (target
, temp
);
4108 /* Examine CTOR. Discover how many scalar fields are set to nonzero
4109 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4110 are set to non-constant values and place it in *P_NC_ELTS. */
4113 categorize_ctor_elements_1 (tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
4114 HOST_WIDE_INT
*p_nc_elts
)
4116 HOST_WIDE_INT nz_elts
, nc_elts
;
4122 for (list
= CONSTRUCTOR_ELTS (ctor
); list
; list
= TREE_CHAIN (list
))
4124 tree value
= TREE_VALUE (list
);
4125 tree purpose
= TREE_PURPOSE (list
);
4129 if (TREE_CODE (purpose
) == RANGE_EXPR
)
4131 tree lo_index
= TREE_OPERAND (purpose
, 0);
4132 tree hi_index
= TREE_OPERAND (purpose
, 1);
4134 if (host_integerp (lo_index
, 1) && host_integerp (hi_index
, 1))
4135 mult
= (tree_low_cst (hi_index
, 1)
4136 - tree_low_cst (lo_index
, 1) + 1);
4139 switch (TREE_CODE (value
))
4143 HOST_WIDE_INT nz
= 0, nc
= 0;
4144 categorize_ctor_elements_1 (value
, &nz
, &nc
);
4145 nz_elts
+= mult
* nz
;
4146 nc_elts
+= mult
* nc
;
4152 if (!initializer_zerop (value
))
4156 if (!initializer_zerop (TREE_REALPART (value
)))
4158 if (!initializer_zerop (TREE_IMAGPART (value
)))
4164 for (v
= TREE_VECTOR_CST_ELTS (value
); v
; v
= TREE_CHAIN (v
))
4165 if (!initializer_zerop (TREE_VALUE (v
)))
4172 if (!initializer_constant_valid_p (value
, TREE_TYPE (value
)))
4178 *p_nz_elts
+= nz_elts
;
4179 *p_nc_elts
+= nc_elts
;
4183 categorize_ctor_elements (tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
4184 HOST_WIDE_INT
*p_nc_elts
)
4188 categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_nc_elts
);
4191 /* Count the number of scalars in TYPE. Return -1 on overflow or
4195 count_type_elements (tree type
)
4197 const HOST_WIDE_INT max
= ~((HOST_WIDE_INT
)1 << (HOST_BITS_PER_WIDE_INT
-1));
4198 switch (TREE_CODE (type
))
4202 tree telts
= array_type_nelts (type
);
4203 if (telts
&& host_integerp (telts
, 1))
4205 HOST_WIDE_INT n
= tree_low_cst (telts
, 1) + 1;
4206 HOST_WIDE_INT m
= count_type_elements (TREE_TYPE (type
));
4209 else if (max
/ n
> m
)
4217 HOST_WIDE_INT n
= 0, t
;
4220 for (f
= TYPE_FIELDS (type
); f
; f
= TREE_CHAIN (f
))
4221 if (TREE_CODE (f
) == FIELD_DECL
)
4223 t
= count_type_elements (TREE_TYPE (f
));
4233 case QUAL_UNION_TYPE
:
4235 /* Ho hum. How in the world do we guess here? Clearly it isn't
4236 right to count the fields. Guess based on the number of words. */
4237 HOST_WIDE_INT n
= int_size_in_bytes (type
);
4240 return n
/ UNITS_PER_WORD
;
4247 return TYPE_VECTOR_SUBPARTS (type
);
4256 case REFERENCE_TYPE
:
4270 /* Return 1 if EXP contains mostly (3/4) zeros. */
4273 mostly_zeros_p (tree exp
)
4275 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4278 HOST_WIDE_INT nz_elts
, nc_elts
, elts
;
4280 /* If there are no ranges of true bits, it is all zero. */
4281 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4282 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
4284 categorize_ctor_elements (exp
, &nz_elts
, &nc_elts
);
4285 elts
= count_type_elements (TREE_TYPE (exp
));
4287 return nz_elts
< elts
/ 4;
4290 return initializer_zerop (exp
);
4293 /* Helper function for store_constructor.
4294 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4295 TYPE is the type of the CONSTRUCTOR, not the element type.
4296 CLEARED is as for store_constructor.
4297 ALIAS_SET is the alias set to use for any stores.
4299 This provides a recursive shortcut back to store_constructor when it isn't
4300 necessary to go through store_field. This is so that we can pass through
4301 the cleared field to let store_constructor know that we may not have to
4302 clear a substructure if the outer structure has already been cleared. */
4305 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
4306 HOST_WIDE_INT bitpos
, enum machine_mode mode
,
4307 tree exp
, tree type
, int cleared
, int alias_set
)
4309 if (TREE_CODE (exp
) == CONSTRUCTOR
4310 /* We can only call store_constructor recursively if the size and
4311 bit position are on a byte boundary. */
4312 && bitpos
% BITS_PER_UNIT
== 0
4313 && (bitsize
> 0 && bitsize
% BITS_PER_UNIT
== 0)
4314 /* If we have a nonzero bitpos for a register target, then we just
4315 let store_field do the bitfield handling. This is unlikely to
4316 generate unnecessary clear instructions anyways. */
4317 && (bitpos
== 0 || MEM_P (target
)))
4321 = adjust_address (target
,
4322 GET_MODE (target
) == BLKmode
4324 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4325 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
4328 /* Update the alias set, if required. */
4329 if (MEM_P (target
) && ! MEM_KEEP_ALIAS_SET_P (target
)
4330 && MEM_ALIAS_SET (target
) != 0)
4332 target
= copy_rtx (target
);
4333 set_mem_alias_set (target
, alias_set
);
4336 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
4339 store_field (target
, bitsize
, bitpos
, mode
, exp
, type
, alias_set
);
4342 /* Store the value of constructor EXP into the rtx TARGET.
4343 TARGET is either a REG or a MEM; we know it cannot conflict, since
4344 safe_from_p has been called.
4345 CLEARED is true if TARGET is known to have been zero'd.
4346 SIZE is the number of bytes of TARGET we are allowed to modify: this
4347 may not be the same as the size of EXP if we are assigning to a field
4348 which has been packed to exclude padding bits. */
4351 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
)
4353 tree type
= TREE_TYPE (exp
);
4354 #ifdef WORD_REGISTER_OPERATIONS
4355 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4358 switch (TREE_CODE (type
))
4362 case QUAL_UNION_TYPE
:
4366 /* If size is zero or the target is already cleared, do nothing. */
4367 if (size
== 0 || cleared
)
4369 /* We either clear the aggregate or indicate the value is dead. */
4370 else if ((TREE_CODE (type
) == UNION_TYPE
4371 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4372 && ! CONSTRUCTOR_ELTS (exp
))
4373 /* If the constructor is empty, clear the union. */
4375 clear_storage (target
, expr_size (exp
));
4379 /* If we are building a static constructor into a register,
4380 set the initial value as zero so we can fold the value into
4381 a constant. But if more than one register is involved,
4382 this probably loses. */
4383 else if (REG_P (target
) && TREE_STATIC (exp
)
4384 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4386 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4390 /* If the constructor has fewer fields than the structure or
4391 if we are initializing the structure to mostly zeros, clear
4392 the whole structure first. Don't do this if TARGET is a
4393 register whose mode size isn't equal to SIZE since
4394 clear_storage can't handle this case. */
4396 && ((list_length (CONSTRUCTOR_ELTS (exp
))
4397 != fields_length (type
))
4398 || mostly_zeros_p (exp
))
4400 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
4403 clear_storage (target
, GEN_INT (size
));
4408 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4410 /* Store each element of the constructor into the
4411 corresponding field of TARGET. */
4413 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4415 tree field
= TREE_PURPOSE (elt
);
4416 tree value
= TREE_VALUE (elt
);
4417 enum machine_mode mode
;
4418 HOST_WIDE_INT bitsize
;
4419 HOST_WIDE_INT bitpos
= 0;
4421 rtx to_rtx
= target
;
4423 /* Just ignore missing fields. We cleared the whole
4424 structure, above, if any fields are missing. */
4428 if (cleared
&& initializer_zerop (value
))
4431 if (host_integerp (DECL_SIZE (field
), 1))
4432 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4436 mode
= DECL_MODE (field
);
4437 if (DECL_BIT_FIELD (field
))
4440 offset
= DECL_FIELD_OFFSET (field
);
4441 if (host_integerp (offset
, 0)
4442 && host_integerp (bit_position (field
), 0))
4444 bitpos
= int_bit_position (field
);
4448 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
4455 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset
,
4456 make_tree (TREE_TYPE (exp
),
4459 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4460 gcc_assert (MEM_P (to_rtx
));
4462 #ifdef POINTERS_EXTEND_UNSIGNED
4463 if (GET_MODE (offset_rtx
) != Pmode
)
4464 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
4466 if (GET_MODE (offset_rtx
) != ptr_mode
)
4467 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4470 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4471 highest_pow2_factor (offset
));
4474 #ifdef WORD_REGISTER_OPERATIONS
4475 /* If this initializes a field that is smaller than a
4476 word, at the start of a word, try to widen it to a full
4477 word. This special case allows us to output C++ member
4478 function initializations in a form that the optimizers
4481 && bitsize
< BITS_PER_WORD
4482 && bitpos
% BITS_PER_WORD
== 0
4483 && GET_MODE_CLASS (mode
) == MODE_INT
4484 && TREE_CODE (value
) == INTEGER_CST
4486 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
4488 tree type
= TREE_TYPE (value
);
4490 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4492 type
= lang_hooks
.types
.type_for_size
4493 (BITS_PER_WORD
, TYPE_UNSIGNED (type
));
4494 value
= convert (type
, value
);
4497 if (BYTES_BIG_ENDIAN
)
4499 = fold (build2 (LSHIFT_EXPR
, type
, value
,
4500 build_int_cst (NULL_TREE
,
4501 BITS_PER_WORD
- bitsize
)));
4502 bitsize
= BITS_PER_WORD
;
4507 if (MEM_P (to_rtx
) && !MEM_KEEP_ALIAS_SET_P (to_rtx
)
4508 && DECL_NONADDRESSABLE_P (field
))
4510 to_rtx
= copy_rtx (to_rtx
);
4511 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4514 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
4515 value
, type
, cleared
,
4516 get_alias_set (TREE_TYPE (field
)));
4526 tree elttype
= TREE_TYPE (type
);
4528 HOST_WIDE_INT minelt
= 0;
4529 HOST_WIDE_INT maxelt
= 0;
4531 domain
= TYPE_DOMAIN (type
);
4532 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
4533 && TYPE_MAX_VALUE (domain
)
4534 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
4535 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
4537 /* If we have constant bounds for the range of the type, get them. */
4540 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
4541 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
4544 /* If the constructor has fewer elements than the array, clear
4545 the whole array first. Similarly if this is static
4546 constructor of a non-BLKmode object. */
4549 else if (REG_P (target
) && TREE_STATIC (exp
))
4553 HOST_WIDE_INT count
= 0, zero_count
= 0;
4554 need_to_clear
= ! const_bounds_p
;
4556 /* This loop is a more accurate version of the loop in
4557 mostly_zeros_p (it handles RANGE_EXPR in an index). It
4558 is also needed to check for missing elements. */
4559 for (elt
= CONSTRUCTOR_ELTS (exp
);
4560 elt
!= NULL_TREE
&& ! need_to_clear
;
4561 elt
= TREE_CHAIN (elt
))
4563 tree index
= TREE_PURPOSE (elt
);
4564 HOST_WIDE_INT this_node_count
;
4566 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4568 tree lo_index
= TREE_OPERAND (index
, 0);
4569 tree hi_index
= TREE_OPERAND (index
, 1);
4571 if (! host_integerp (lo_index
, 1)
4572 || ! host_integerp (hi_index
, 1))
4578 this_node_count
= (tree_low_cst (hi_index
, 1)
4579 - tree_low_cst (lo_index
, 1) + 1);
4582 this_node_count
= 1;
4584 count
+= this_node_count
;
4585 if (mostly_zeros_p (TREE_VALUE (elt
)))
4586 zero_count
+= this_node_count
;
4589 /* Clear the entire array first if there are any missing
4590 elements, or if the incidence of zero elements is >=
4593 && (count
< maxelt
- minelt
+ 1
4594 || 4 * zero_count
>= 3 * count
))
4598 if (need_to_clear
&& size
> 0)
4601 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4603 clear_storage (target
, GEN_INT (size
));
4607 if (!cleared
&& REG_P (target
))
4608 /* Inform later passes that the old value is dead. */
4609 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4611 /* Store each element of the constructor into the
4612 corresponding element of TARGET, determined by counting the
4614 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
4616 elt
= TREE_CHAIN (elt
), i
++)
4618 enum machine_mode mode
;
4619 HOST_WIDE_INT bitsize
;
4620 HOST_WIDE_INT bitpos
;
4622 tree value
= TREE_VALUE (elt
);
4623 tree index
= TREE_PURPOSE (elt
);
4624 rtx xtarget
= target
;
4626 if (cleared
&& initializer_zerop (value
))
4629 unsignedp
= TYPE_UNSIGNED (elttype
);
4630 mode
= TYPE_MODE (elttype
);
4631 if (mode
== BLKmode
)
4632 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
4633 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
4636 bitsize
= GET_MODE_BITSIZE (mode
);
4638 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4640 tree lo_index
= TREE_OPERAND (index
, 0);
4641 tree hi_index
= TREE_OPERAND (index
, 1);
4642 rtx index_r
, pos_rtx
;
4643 HOST_WIDE_INT lo
, hi
, count
;
4646 /* If the range is constant and "small", unroll the loop. */
4648 && host_integerp (lo_index
, 0)
4649 && host_integerp (hi_index
, 0)
4650 && (lo
= tree_low_cst (lo_index
, 0),
4651 hi
= tree_low_cst (hi_index
, 0),
4652 count
= hi
- lo
+ 1,
4655 || (host_integerp (TYPE_SIZE (elttype
), 1)
4656 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
4659 lo
-= minelt
; hi
-= minelt
;
4660 for (; lo
<= hi
; lo
++)
4662 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
4665 && !MEM_KEEP_ALIAS_SET_P (target
)
4666 && TREE_CODE (type
) == ARRAY_TYPE
4667 && TYPE_NONALIASED_COMPONENT (type
))
4669 target
= copy_rtx (target
);
4670 MEM_KEEP_ALIAS_SET_P (target
) = 1;
4673 store_constructor_field
4674 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
4675 get_alias_set (elttype
));
4680 rtx loop_start
= gen_label_rtx ();
4681 rtx loop_end
= gen_label_rtx ();
4684 expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
4685 unsignedp
= TYPE_UNSIGNED (domain
);
4687 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
4690 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
4692 SET_DECL_RTL (index
, index_r
);
4693 store_expr (lo_index
, index_r
, 0);
4695 /* Build the head of the loop. */
4696 do_pending_stack_adjust ();
4697 emit_label (loop_start
);
4699 /* Assign value to element index. */
4701 = convert (ssizetype
,
4702 fold (build2 (MINUS_EXPR
, TREE_TYPE (index
),
4703 index
, TYPE_MIN_VALUE (domain
))));
4704 position
= size_binop (MULT_EXPR
, position
,
4706 TYPE_SIZE_UNIT (elttype
)));
4708 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
4709 xtarget
= offset_address (target
, pos_rtx
,
4710 highest_pow2_factor (position
));
4711 xtarget
= adjust_address (xtarget
, mode
, 0);
4712 if (TREE_CODE (value
) == CONSTRUCTOR
)
4713 store_constructor (value
, xtarget
, cleared
,
4714 bitsize
/ BITS_PER_UNIT
);
4716 store_expr (value
, xtarget
, 0);
4718 /* Generate a conditional jump to exit the loop. */
4719 exit_cond
= build2 (LT_EXPR
, integer_type_node
,
4721 jumpif (exit_cond
, loop_end
);
4723 /* Update the loop counter, and jump to the head of
4725 expand_assignment (index
,
4726 build2 (PLUS_EXPR
, TREE_TYPE (index
),
4727 index
, integer_one_node
));
4729 emit_jump (loop_start
);
4731 /* Build the end of the loop. */
4732 emit_label (loop_end
);
4735 else if ((index
!= 0 && ! host_integerp (index
, 0))
4736 || ! host_integerp (TYPE_SIZE (elttype
), 1))
4741 index
= ssize_int (1);
4744 index
= fold_convert (ssizetype
,
4745 fold (build2 (MINUS_EXPR
,
4748 TYPE_MIN_VALUE (domain
))));
4750 position
= size_binop (MULT_EXPR
, index
,
4752 TYPE_SIZE_UNIT (elttype
)));
4753 xtarget
= offset_address (target
,
4754 expand_expr (position
, 0, VOIDmode
, 0),
4755 highest_pow2_factor (position
));
4756 xtarget
= adjust_address (xtarget
, mode
, 0);
4757 store_expr (value
, xtarget
, 0);
4762 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
4763 * tree_low_cst (TYPE_SIZE (elttype
), 1));
4765 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
4767 if (MEM_P (target
) && !MEM_KEEP_ALIAS_SET_P (target
)
4768 && TREE_CODE (type
) == ARRAY_TYPE
4769 && TYPE_NONALIASED_COMPONENT (type
))
4771 target
= copy_rtx (target
);
4772 MEM_KEEP_ALIAS_SET_P (target
) = 1;
4774 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
4775 type
, cleared
, get_alias_set (elttype
));
4787 tree elttype
= TREE_TYPE (type
);
4788 int elt_size
= tree_low_cst (TYPE_SIZE (elttype
), 1);
4789 enum machine_mode eltmode
= TYPE_MODE (elttype
);
4790 HOST_WIDE_INT bitsize
;
4791 HOST_WIDE_INT bitpos
;
4795 gcc_assert (eltmode
!= BLKmode
);
4797 n_elts
= TYPE_VECTOR_SUBPARTS (type
);
4798 if (REG_P (target
) && VECTOR_MODE_P (GET_MODE (target
)))
4800 enum machine_mode mode
= GET_MODE (target
);
4802 icode
= (int) vec_init_optab
->handlers
[mode
].insn_code
;
4803 if (icode
!= CODE_FOR_nothing
)
4807 vector
= alloca (n_elts
);
4808 for (i
= 0; i
< n_elts
; i
++)
4809 vector
[i
] = CONST0_RTX (GET_MODE_INNER (mode
));
4813 /* If the constructor has fewer elements than the vector,
4814 clear the whole array first. Similarly if this is static
4815 constructor of a non-BLKmode object. */
4818 else if (REG_P (target
) && TREE_STATIC (exp
))
4822 unsigned HOST_WIDE_INT count
= 0, zero_count
= 0;
4824 for (elt
= CONSTRUCTOR_ELTS (exp
);
4826 elt
= TREE_CHAIN (elt
))
4828 int n_elts_here
= tree_low_cst
4829 (int_const_binop (TRUNC_DIV_EXPR
,
4830 TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt
))),
4831 TYPE_SIZE (elttype
), 0), 1);
4833 count
+= n_elts_here
;
4834 if (mostly_zeros_p (TREE_VALUE (elt
)))
4835 zero_count
+= n_elts_here
;
4838 /* Clear the entire vector first if there are any missing elements,
4839 or if the incidence of zero elements is >= 75%. */
4840 need_to_clear
= (count
< n_elts
|| 4 * zero_count
>= 3 * count
);
4843 if (need_to_clear
&& size
> 0 && !vector
)
4846 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4848 clear_storage (target
, GEN_INT (size
));
4852 if (!cleared
&& REG_P (target
))
4853 /* Inform later passes that the old value is dead. */
4854 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4856 /* Store each element of the constructor into the corresponding
4857 element of TARGET, determined by counting the elements. */
4858 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
4860 elt
= TREE_CHAIN (elt
), i
+= bitsize
/ elt_size
)
4862 tree value
= TREE_VALUE (elt
);
4863 tree index
= TREE_PURPOSE (elt
);
4864 HOST_WIDE_INT eltpos
;
4866 bitsize
= tree_low_cst (TYPE_SIZE (TREE_TYPE (value
)), 1);
4867 if (cleared
&& initializer_zerop (value
))
4871 eltpos
= tree_low_cst (index
, 1);
4877 /* Vector CONSTRUCTORs should only be built from smaller
4878 vectors in the case of BLKmode vectors. */
4879 gcc_assert (TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
);
4880 vector
[eltpos
] = expand_expr (value
, NULL_RTX
, VOIDmode
, 0);
4884 enum machine_mode value_mode
=
4885 TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
4886 ? TYPE_MODE (TREE_TYPE (value
))
4888 bitpos
= eltpos
* elt_size
;
4889 store_constructor_field (target
, bitsize
, bitpos
,
4890 value_mode
, value
, type
,
4891 cleared
, get_alias_set (elttype
));
4896 emit_insn (GEN_FCN (icode
)
4898 gen_rtx_PARALLEL (GET_MODE (target
),
4899 gen_rtvec_v (n_elts
, vector
))));
4903 /* Set constructor assignments. */
4906 tree elt
= CONSTRUCTOR_ELTS (exp
);
4907 unsigned HOST_WIDE_INT nbytes
= int_size_in_bytes (type
), nbits
;
4908 tree domain
= TYPE_DOMAIN (type
);
4909 tree domain_min
, domain_max
, bitlength
;
4911 /* The default implementation strategy is to extract the
4912 constant parts of the constructor, use that to initialize
4913 the target, and then "or" in whatever non-constant ranges
4914 we need in addition.
4916 If a large set is all zero or all ones, it is probably
4917 better to set it using memset. Also, if a large set has
4918 just a single range, it may also be better to first clear
4919 all the first clear the set (using memset), and set the
4922 /* Check for all zeros. */
4923 if (elt
== NULL_TREE
&& size
> 0)
4926 clear_storage (target
, GEN_INT (size
));
4930 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
4931 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
4932 bitlength
= size_binop (PLUS_EXPR
,
4933 size_diffop (domain_max
, domain_min
),
4936 nbits
= tree_low_cst (bitlength
, 1);
4938 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets
4939 that are "complicated" (more than one range), initialize
4940 (the constant parts) by copying from a constant. */
4941 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
4942 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
4944 unsigned int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
4945 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
4946 char *bit_buffer
= alloca (nbits
);
4947 HOST_WIDE_INT word
= 0;
4948 unsigned int bit_pos
= 0;
4949 unsigned int ibit
= 0;
4950 unsigned int offset
= 0; /* In bytes from beginning of set. */
4952 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
4955 if (bit_buffer
[ibit
])
4957 if (BYTES_BIG_ENDIAN
)
4958 word
|= (1 << (set_word_size
- 1 - bit_pos
));
4960 word
|= 1 << bit_pos
;
4964 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
4966 if (word
!= 0 || ! cleared
)
4968 rtx datum
= gen_int_mode (word
, mode
);
4971 /* The assumption here is that it is safe to
4972 use XEXP if the set is multi-word, but not
4973 if it's single-word. */
4975 to_rtx
= adjust_address (target
, mode
, offset
);
4978 gcc_assert (!offset
);
4981 emit_move_insn (to_rtx
, datum
);
4988 offset
+= set_word_size
/ BITS_PER_UNIT
;
4993 /* Don't bother clearing storage if the set is all ones. */
4994 if (TREE_CHAIN (elt
) != NULL_TREE
4995 || (TREE_PURPOSE (elt
) == NULL_TREE
4997 : ( ! host_integerp (TREE_VALUE (elt
), 0)
4998 || ! host_integerp (TREE_PURPOSE (elt
), 0)
4999 || (tree_low_cst (TREE_VALUE (elt
), 0)
5000 - tree_low_cst (TREE_PURPOSE (elt
), 0) + 1
5001 != (HOST_WIDE_INT
) nbits
))))
5002 clear_storage (target
, expr_size (exp
));
5004 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
5006 /* Start of range of element or NULL. */
5007 tree startbit
= TREE_PURPOSE (elt
);
5008 /* End of range of element, or element value. */
5009 tree endbit
= TREE_VALUE (elt
);
5010 HOST_WIDE_INT startb
, endb
;
5011 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
5013 bitlength_rtx
= expand_expr (bitlength
,
5014 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
5016 /* Handle non-range tuple element like [ expr ]. */
5017 if (startbit
== NULL_TREE
)
5019 startbit
= save_expr (endbit
);
5023 startbit
= convert (sizetype
, startbit
);
5024 endbit
= convert (sizetype
, endbit
);
5025 if (! integer_zerop (domain_min
))
5027 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
5028 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
5030 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
5031 EXPAND_CONST_ADDRESS
);
5032 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
5033 EXPAND_CONST_ADDRESS
);
5039 ((build_qualified_type (lang_hooks
.types
.type_for_mode
5040 (GET_MODE (target
), 0),
5043 emit_move_insn (targetx
, target
);
5048 gcc_assert (MEM_P (target
));
5052 /* Optimization: If startbit and endbit are constants divisible
5053 by BITS_PER_UNIT, call memset instead. */
5054 if (TREE_CODE (startbit
) == INTEGER_CST
5055 && TREE_CODE (endbit
) == INTEGER_CST
5056 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
5057 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
5059 emit_library_call (memset_libfunc
, LCT_NORMAL
,
5061 plus_constant (XEXP (targetx
, 0),
5062 startb
/ BITS_PER_UNIT
),
5064 constm1_rtx
, TYPE_MODE (integer_type_node
),
5065 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
5066 TYPE_MODE (sizetype
));
5069 emit_library_call (setbits_libfunc
, LCT_NORMAL
,
5070 VOIDmode
, 4, XEXP (targetx
, 0),
5071 Pmode
, bitlength_rtx
, TYPE_MODE (sizetype
),
5072 startbit_rtx
, TYPE_MODE (sizetype
),
5073 endbit_rtx
, TYPE_MODE (sizetype
));
5076 emit_move_insn (target
, targetx
);
5085 /* Store the value of EXP (an expression tree)
5086 into a subfield of TARGET which has mode MODE and occupies
5087 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5088 If MODE is VOIDmode, it means that we are storing into a bit-field.
5090 Always return const0_rtx unless we have something particular to
5093 TYPE is the type of the underlying object,
5095 ALIAS_SET is the alias set for the destination. This value will
5096 (in general) be different from that for TARGET, since TARGET is a
5097 reference to the containing structure. */
5100 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
5101 enum machine_mode mode
, tree exp
, tree type
, int alias_set
)
5103 HOST_WIDE_INT width_mask
= 0;
5105 if (TREE_CODE (exp
) == ERROR_MARK
)
5108 /* If we have nothing to store, do nothing unless the expression has
5111 return expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
5112 else if (bitsize
>= 0 && bitsize
< HOST_BITS_PER_WIDE_INT
)
5113 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5115 /* If we are storing into an unaligned field of an aligned union that is
5116 in a register, we may have the mode of TARGET being an integer mode but
5117 MODE == BLKmode. In that case, get an aligned object whose size and
5118 alignment are the same as TARGET and store TARGET into it (we can avoid
5119 the store if the field being stored is the entire width of TARGET). Then
5120 call ourselves recursively to store the field into a BLKmode version of
5121 that object. Finally, load from the object into TARGET. This is not
5122 very efficient in general, but should only be slightly more expensive
5123 than the otherwise-required unaligned accesses. Perhaps this can be
5124 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5125 twice, once with emit_move_insn and once via store_field. */
5128 && (REG_P (target
) || GET_CODE (target
) == SUBREG
))
5130 rtx object
= assign_temp (type
, 0, 1, 1);
5131 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
5133 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5134 emit_move_insn (object
, target
);
5136 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, type
, alias_set
);
5138 emit_move_insn (target
, object
);
5140 /* We want to return the BLKmode version of the data. */
5144 if (GET_CODE (target
) == CONCAT
)
5146 /* We're storing into a struct containing a single __complex. */
5148 gcc_assert (!bitpos
);
5149 return store_expr (exp
, target
, 0);
5152 /* If the structure is in a register or if the component
5153 is a bit field, we cannot use addressing to access it.
5154 Use bit-field techniques or SUBREG to store in it. */
5156 if (mode
== VOIDmode
5157 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5158 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5159 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5161 || GET_CODE (target
) == SUBREG
5162 /* If the field isn't aligned enough to store as an ordinary memref,
5163 store it as a bit field. */
5165 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
5166 || bitpos
% GET_MODE_ALIGNMENT (mode
))
5167 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
5168 || (bitpos
% BITS_PER_UNIT
!= 0)))
5169 /* If the RHS and field are a constant size and the size of the
5170 RHS isn't the same size as the bitfield, we must use bitfield
5173 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5174 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5176 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
5178 /* If BITSIZE is narrower than the size of the type of EXP
5179 we will be narrowing TEMP. Normally, what's wanted are the
5180 low-order bits. However, if EXP's type is a record and this is
5181 big-endian machine, we want the upper BITSIZE bits. */
5182 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5183 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
5184 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5185 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5186 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5190 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5192 if (mode
!= VOIDmode
&& mode
!= BLKmode
5193 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5194 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5196 /* If the modes of TARGET and TEMP are both BLKmode, both
5197 must be in memory and BITPOS must be aligned on a byte
5198 boundary. If so, we simply do a block copy. */
5199 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5201 gcc_assert (MEM_P (target
) && MEM_P (temp
)
5202 && !(bitpos
% BITS_PER_UNIT
));
5204 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5205 emit_block_move (target
, temp
,
5206 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5213 /* Store the value in the bitfield. */
5214 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
);
5220 /* Now build a reference to just the desired component. */
5221 rtx to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5223 if (to_rtx
== target
)
5224 to_rtx
= copy_rtx (to_rtx
);
5226 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5227 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5228 set_mem_alias_set (to_rtx
, alias_set
);
5230 return store_expr (exp
, to_rtx
, 0);
5234 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5235 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5236 codes and find the ultimate containing object, which we return.
5238 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5239 bit position, and *PUNSIGNEDP to the signedness of the field.
5240 If the position of the field is variable, we store a tree
5241 giving the variable offset (in units) in *POFFSET.
5242 This offset is in addition to the bit position.
5243 If the position is not variable, we store 0 in *POFFSET.
5245 If any of the extraction expressions is volatile,
5246 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5248 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5249 is a mode that can be used to access the field. In that case, *PBITSIZE
5252 If the field describes a variable-sized object, *PMODE is set to
5253 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5254 this case, but the address of the object can be found. */
5257 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
5258 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
5259 enum machine_mode
*pmode
, int *punsignedp
,
5263 enum machine_mode mode
= VOIDmode
;
5264 tree offset
= size_zero_node
;
5265 tree bit_offset
= bitsize_zero_node
;
5268 /* First get the mode, signedness, and size. We do this from just the
5269 outermost expression. */
5270 if (TREE_CODE (exp
) == COMPONENT_REF
)
5272 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5273 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5274 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5276 *punsignedp
= DECL_UNSIGNED (TREE_OPERAND (exp
, 1));
5278 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5280 size_tree
= TREE_OPERAND (exp
, 1);
5281 *punsignedp
= BIT_FIELD_REF_UNSIGNED (exp
);
5285 mode
= TYPE_MODE (TREE_TYPE (exp
));
5286 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
5288 if (mode
== BLKmode
)
5289 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5291 *pbitsize
= GET_MODE_BITSIZE (mode
);
5296 if (! host_integerp (size_tree
, 1))
5297 mode
= BLKmode
, *pbitsize
= -1;
5299 *pbitsize
= tree_low_cst (size_tree
, 1);
5302 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5303 and find the ultimate containing object. */
5306 if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5307 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
, TREE_OPERAND (exp
, 2));
5308 else if (TREE_CODE (exp
) == COMPONENT_REF
)
5310 tree field
= TREE_OPERAND (exp
, 1);
5311 tree this_offset
= component_ref_field_offset (exp
);
5313 /* If this field hasn't been filled in yet, don't go
5314 past it. This should only happen when folding expressions
5315 made during type construction. */
5316 if (this_offset
== 0)
5319 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5320 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5321 DECL_FIELD_BIT_OFFSET (field
));
5323 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5326 else if (TREE_CODE (exp
) == ARRAY_REF
5327 || TREE_CODE (exp
) == ARRAY_RANGE_REF
)
5329 tree index
= TREE_OPERAND (exp
, 1);
5330 tree low_bound
= array_ref_low_bound (exp
);
5331 tree unit_size
= array_ref_element_size (exp
);
5333 /* We assume all arrays have sizes that are a multiple of a byte.
5334 First subtract the lower bound, if any, in the type of the
5335 index, then convert to sizetype and multiply by the size of the
5337 if (! integer_zerop (low_bound
))
5338 index
= fold (build2 (MINUS_EXPR
, TREE_TYPE (index
),
5341 offset
= size_binop (PLUS_EXPR
, offset
,
5342 size_binop (MULT_EXPR
,
5343 convert (sizetype
, index
),
5347 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5348 conversions that don't change the mode, and all view conversions
5349 except those that need to "step up" the alignment. */
5350 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
5351 && ! (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
5352 && ! ((TYPE_ALIGN (TREE_TYPE (exp
))
5353 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5355 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
5356 < BIGGEST_ALIGNMENT
)
5357 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
5358 || TYPE_ALIGN_OK (TREE_TYPE
5359 (TREE_OPERAND (exp
, 0))))))
5360 && ! ((TREE_CODE (exp
) == NOP_EXPR
5361 || TREE_CODE (exp
) == CONVERT_EXPR
)
5362 && (TYPE_MODE (TREE_TYPE (exp
))
5363 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
5366 /* If any reference in the chain is volatile, the effect is volatile. */
5367 if (TREE_THIS_VOLATILE (exp
))
5370 exp
= TREE_OPERAND (exp
, 0);
5373 /* If OFFSET is constant, see if we can return the whole thing as a
5374 constant bit position. Otherwise, split it up. */
5375 if (host_integerp (offset
, 0)
5376 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5378 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5379 && host_integerp (tem
, 0))
5380 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5382 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5388 /* Return a tree of sizetype representing the size, in bytes, of the element
5389 of EXP, an ARRAY_REF. */
5392 array_ref_element_size (tree exp
)
5394 tree aligned_size
= TREE_OPERAND (exp
, 3);
5395 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5397 /* If a size was specified in the ARRAY_REF, it's the size measured
5398 in alignment units of the element type. So multiply by that value. */
5401 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5402 sizetype from another type of the same width and signedness. */
5403 if (TREE_TYPE (aligned_size
) != sizetype
)
5404 aligned_size
= fold_convert (sizetype
, aligned_size
);
5405 return size_binop (MULT_EXPR
, aligned_size
,
5406 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
5409 /* Otherwise, take the size from that of the element type. Substitute
5410 any PLACEHOLDER_EXPR that we have. */
5412 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type
), exp
);
5415 /* Return a tree representing the lower bound of the array mentioned in
5416 EXP, an ARRAY_REF. */
5419 array_ref_low_bound (tree exp
)
5421 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5423 /* If a lower bound is specified in EXP, use it. */
5424 if (TREE_OPERAND (exp
, 2))
5425 return TREE_OPERAND (exp
, 2);
5427 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5428 substituting for a PLACEHOLDER_EXPR as needed. */
5429 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
5430 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type
), exp
);
5432 /* Otherwise, return a zero of the appropriate type. */
5433 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp
, 1)), 0);
5436 /* Return a tree representing the upper bound of the array mentioned in
5437 EXP, an ARRAY_REF. */
5440 array_ref_up_bound (tree exp
)
5442 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5444 /* If there is a domain type and it has an upper bound, use it, substituting
5445 for a PLACEHOLDER_EXPR as needed. */
5446 if (domain_type
&& TYPE_MAX_VALUE (domain_type
))
5447 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type
), exp
);
5449 /* Otherwise fail. */
5453 /* Return a tree representing the offset, in bytes, of the field referenced
5454 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5457 component_ref_field_offset (tree exp
)
5459 tree aligned_offset
= TREE_OPERAND (exp
, 2);
5460 tree field
= TREE_OPERAND (exp
, 1);
5462 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5463 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5467 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5468 sizetype from another type of the same width and signedness. */
5469 if (TREE_TYPE (aligned_offset
) != sizetype
)
5470 aligned_offset
= fold_convert (sizetype
, aligned_offset
);
5471 return size_binop (MULT_EXPR
, aligned_offset
,
5472 size_int (DECL_OFFSET_ALIGN (field
) / BITS_PER_UNIT
));
5475 /* Otherwise, take the offset from that of the field. Substitute
5476 any PLACEHOLDER_EXPR that we have. */
5478 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field
), exp
);
5481 /* Return 1 if T is an expression that get_inner_reference handles. */
5484 handled_component_p (tree t
)
5486 switch (TREE_CODE (t
))
5491 case ARRAY_RANGE_REF
:
5492 case NON_LVALUE_EXPR
:
5493 case VIEW_CONVERT_EXPR
:
5496 /* ??? Sure they are handled, but get_inner_reference may return
5497 a different PBITSIZE, depending upon whether the expression is
5498 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5501 return (TYPE_MODE (TREE_TYPE (t
))
5502 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t
, 0))));
5509 /* Given an rtx VALUE that may contain additions and multiplications, return
5510 an equivalent value that just refers to a register, memory, or constant.
5511 This is done by generating instructions to perform the arithmetic and
5512 returning a pseudo-register containing the value.
5514 The returned value may be a REG, SUBREG, MEM or constant. */
5517 force_operand (rtx value
, rtx target
)
5520 /* Use subtarget as the target for operand 0 of a binary operation. */
5521 rtx subtarget
= get_subtarget (target
);
5522 enum rtx_code code
= GET_CODE (value
);
5524 /* Check for subreg applied to an expression produced by loop optimizer. */
5526 && !REG_P (SUBREG_REG (value
))
5527 && !MEM_P (SUBREG_REG (value
)))
5529 value
= simplify_gen_subreg (GET_MODE (value
),
5530 force_reg (GET_MODE (SUBREG_REG (value
)),
5531 force_operand (SUBREG_REG (value
),
5533 GET_MODE (SUBREG_REG (value
)),
5534 SUBREG_BYTE (value
));
5535 code
= GET_CODE (value
);
5538 /* Check for a PIC address load. */
5539 if ((code
== PLUS
|| code
== MINUS
)
5540 && XEXP (value
, 0) == pic_offset_table_rtx
5541 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5542 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5543 || GET_CODE (XEXP (value
, 1)) == CONST
))
5546 subtarget
= gen_reg_rtx (GET_MODE (value
));
5547 emit_move_insn (subtarget
, value
);
5551 if (code
== ZERO_EXTEND
|| code
== SIGN_EXTEND
)
5554 target
= gen_reg_rtx (GET_MODE (value
));
5555 convert_move (target
, force_operand (XEXP (value
, 0), NULL
),
5556 code
== ZERO_EXTEND
);
5560 if (ARITHMETIC_P (value
))
5562 op2
= XEXP (value
, 1);
5563 if (!CONSTANT_P (op2
) && !(REG_P (op2
) && op2
!= subtarget
))
5565 if (code
== MINUS
&& GET_CODE (op2
) == CONST_INT
)
5568 op2
= negate_rtx (GET_MODE (value
), op2
);
5571 /* Check for an addition with OP2 a constant integer and our first
5572 operand a PLUS of a virtual register and something else. In that
5573 case, we want to emit the sum of the virtual register and the
5574 constant first and then add the other value. This allows virtual
5575 register instantiation to simply modify the constant rather than
5576 creating another one around this addition. */
5577 if (code
== PLUS
&& GET_CODE (op2
) == CONST_INT
5578 && GET_CODE (XEXP (value
, 0)) == PLUS
5579 && REG_P (XEXP (XEXP (value
, 0), 0))
5580 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5581 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5583 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
5584 XEXP (XEXP (value
, 0), 0), op2
,
5585 subtarget
, 0, OPTAB_LIB_WIDEN
);
5586 return expand_simple_binop (GET_MODE (value
), code
, temp
,
5587 force_operand (XEXP (XEXP (value
,
5589 target
, 0, OPTAB_LIB_WIDEN
);
5592 op1
= force_operand (XEXP (value
, 0), subtarget
);
5593 op2
= force_operand (op2
, NULL_RTX
);
5597 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
5599 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
5600 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5601 target
, 1, OPTAB_LIB_WIDEN
);
5603 return expand_divmod (0,
5604 FLOAT_MODE_P (GET_MODE (value
))
5605 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
5606 GET_MODE (value
), op1
, op2
, target
, 0);
5609 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5613 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
5617 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5621 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5622 target
, 0, OPTAB_LIB_WIDEN
);
5625 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5626 target
, 1, OPTAB_LIB_WIDEN
);
5629 if (UNARY_P (value
))
5631 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
5632 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
5635 #ifdef INSN_SCHEDULING
5636 /* On machines that have insn scheduling, we want all memory reference to be
5637 explicit, so we need to deal with such paradoxical SUBREGs. */
5638 if (GET_CODE (value
) == SUBREG
&& MEM_P (SUBREG_REG (value
))
5639 && (GET_MODE_SIZE (GET_MODE (value
))
5640 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
5642 = simplify_gen_subreg (GET_MODE (value
),
5643 force_reg (GET_MODE (SUBREG_REG (value
)),
5644 force_operand (SUBREG_REG (value
),
5646 GET_MODE (SUBREG_REG (value
)),
5647 SUBREG_BYTE (value
));
5653 /* Subroutine of expand_expr: return nonzero iff there is no way that
5654 EXP can reference X, which is being modified. TOP_P is nonzero if this
5655 call is going to be used to determine whether we need a temporary
5656 for EXP, as opposed to a recursive call to this function.
5658 It is always safe for this routine to return zero since it merely
5659 searches for optimization opportunities. */
5662 safe_from_p (rtx x
, tree exp
, int top_p
)
5668 /* If EXP has varying size, we MUST use a target since we currently
5669 have no way of allocating temporaries of variable size
5670 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5671 So we assume here that something at a higher level has prevented a
5672 clash. This is somewhat bogus, but the best we can do. Only
5673 do this when X is BLKmode and when we are at the top level. */
5674 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
5675 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
5676 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
5677 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
5678 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
5680 && GET_MODE (x
) == BLKmode
)
5681 /* If X is in the outgoing argument area, it is always safe. */
5683 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
5684 || (GET_CODE (XEXP (x
, 0)) == PLUS
5685 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
5688 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5689 find the underlying pseudo. */
5690 if (GET_CODE (x
) == SUBREG
)
5693 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5697 /* Now look at our tree code and possibly recurse. */
5698 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
5700 case tcc_declaration
:
5701 exp_rtl
= DECL_RTL_IF_SET (exp
);
5707 case tcc_exceptional
:
5708 if (TREE_CODE (exp
) == TREE_LIST
)
5712 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
5714 exp
= TREE_CHAIN (exp
);
5717 if (TREE_CODE (exp
) != TREE_LIST
)
5718 return safe_from_p (x
, exp
, 0);
5721 else if (TREE_CODE (exp
) == ERROR_MARK
)
5722 return 1; /* An already-visited SAVE_EXPR? */
5727 /* The only case we look at here is the DECL_INITIAL inside a
5729 return (TREE_CODE (exp
) != DECL_EXPR
5730 || TREE_CODE (DECL_EXPR_DECL (exp
)) != VAR_DECL
5731 || !DECL_INITIAL (DECL_EXPR_DECL (exp
))
5732 || safe_from_p (x
, DECL_INITIAL (DECL_EXPR_DECL (exp
)), 0));
5735 case tcc_comparison
:
5736 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
5741 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5743 case tcc_expression
:
5745 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5746 the expression. If it is set, we conflict iff we are that rtx or
5747 both are in memory. Otherwise, we check all operands of the
5748 expression recursively. */
5750 switch (TREE_CODE (exp
))
5753 /* If the operand is static or we are static, we can't conflict.
5754 Likewise if we don't conflict with the operand at all. */
5755 if (staticp (TREE_OPERAND (exp
, 0))
5756 || TREE_STATIC (exp
)
5757 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
5760 /* Otherwise, the only way this can conflict is if we are taking
5761 the address of a DECL a that address if part of X, which is
5763 exp
= TREE_OPERAND (exp
, 0);
5766 if (!DECL_RTL_SET_P (exp
)
5767 || !MEM_P (DECL_RTL (exp
)))
5770 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
5774 case MISALIGNED_INDIRECT_REF
:
5775 case ALIGN_INDIRECT_REF
:
5778 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
5779 get_alias_set (exp
)))
5784 /* Assume that the call will clobber all hard registers and
5786 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5791 case WITH_CLEANUP_EXPR
:
5792 case CLEANUP_POINT_EXPR
:
5793 /* Lowered by gimplify.c. */
5797 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5803 /* If we have an rtx, we do not need to scan our operands. */
5807 nops
= first_rtl_op (TREE_CODE (exp
));
5808 for (i
= 0; i
< nops
; i
++)
5809 if (TREE_OPERAND (exp
, i
) != 0
5810 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
5813 /* If this is a language-specific tree code, it may require
5814 special handling. */
5815 if ((unsigned int) TREE_CODE (exp
)
5816 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5817 && !lang_hooks
.safe_from_p (x
, exp
))
5822 /* Should never get a type here. */
5826 /* If we have an rtl, find any enclosed object. Then see if we conflict
5830 if (GET_CODE (exp_rtl
) == SUBREG
)
5832 exp_rtl
= SUBREG_REG (exp_rtl
);
5834 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
5838 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5839 are memory and they conflict. */
5840 return ! (rtx_equal_p (x
, exp_rtl
)
5841 || (MEM_P (x
) && MEM_P (exp_rtl
)
5842 && true_dependence (exp_rtl
, VOIDmode
, x
,
5843 rtx_addr_varies_p
)));
5846 /* If we reach here, it is safe. */
5851 /* Return the highest power of two that EXP is known to be a multiple of.
5852 This is used in updating alignment of MEMs in array references. */
5854 static unsigned HOST_WIDE_INT
5855 highest_pow2_factor (tree exp
)
5857 unsigned HOST_WIDE_INT c0
, c1
;
5859 switch (TREE_CODE (exp
))
5862 /* We can find the lowest bit that's a one. If the low
5863 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5864 We need to handle this case since we can find it in a COND_EXPR,
5865 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5866 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5868 if (TREE_CONSTANT_OVERFLOW (exp
))
5869 return BIGGEST_ALIGNMENT
;
5872 /* Note: tree_low_cst is intentionally not used here,
5873 we don't care about the upper bits. */
5874 c0
= TREE_INT_CST_LOW (exp
);
5876 return c0
? c0
: BIGGEST_ALIGNMENT
;
5880 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
5881 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
5882 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
5883 return MIN (c0
, c1
);
5886 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
5887 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
5890 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5892 if (integer_pow2p (TREE_OPERAND (exp
, 1))
5893 && host_integerp (TREE_OPERAND (exp
, 1), 1))
5895 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
5896 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
5897 return MAX (1, c0
/ c1
);
5901 case NON_LVALUE_EXPR
: case NOP_EXPR
: case CONVERT_EXPR
:
5903 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
5906 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
5909 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
5910 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
5911 return MIN (c0
, c1
);
5920 /* Similar, except that the alignment requirements of TARGET are
5921 taken into account. Assume it is at least as aligned as its
5922 type, unless it is a COMPONENT_REF in which case the layout of
5923 the structure gives the alignment. */
5925 static unsigned HOST_WIDE_INT
5926 highest_pow2_factor_for_target (tree target
, tree exp
)
5928 unsigned HOST_WIDE_INT target_align
, factor
;
5930 factor
= highest_pow2_factor (exp
);
5931 if (TREE_CODE (target
) == COMPONENT_REF
)
5932 target_align
= DECL_ALIGN_UNIT (TREE_OPERAND (target
, 1));
5934 target_align
= TYPE_ALIGN_UNIT (TREE_TYPE (target
));
5935 return MAX (factor
, target_align
);
5938 /* Expands variable VAR. */
5941 expand_var (tree var
)
5943 if (DECL_EXTERNAL (var
))
5946 if (TREE_STATIC (var
))
5947 /* If this is an inlined copy of a static local variable,
5948 look up the original decl. */
5949 var
= DECL_ORIGIN (var
);
5951 if (TREE_STATIC (var
)
5952 ? !TREE_ASM_WRITTEN (var
)
5953 : !DECL_RTL_SET_P (var
))
5955 if (TREE_CODE (var
) == VAR_DECL
&& DECL_VALUE_EXPR (var
))
5956 /* Should be ignored. */;
5957 else if (lang_hooks
.expand_decl (var
))
5959 else if (TREE_CODE (var
) == VAR_DECL
&& !TREE_STATIC (var
))
5961 else if (TREE_CODE (var
) == VAR_DECL
&& TREE_STATIC (var
))
5962 rest_of_decl_compilation (var
, 0, 0);
5964 /* No expansion needed. */
5965 gcc_assert (TREE_CODE (var
) == TYPE_DECL
5966 || TREE_CODE (var
) == CONST_DECL
5967 || TREE_CODE (var
) == FUNCTION_DECL
5968 || TREE_CODE (var
) == LABEL_DECL
);
5972 /* Subroutine of expand_expr. Expand the two operands of a binary
5973 expression EXP0 and EXP1 placing the results in OP0 and OP1.
5974 The value may be stored in TARGET if TARGET is nonzero. The
5975 MODIFIER argument is as documented by expand_expr. */
5978 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
5979 enum expand_modifier modifier
)
5981 if (! safe_from_p (target
, exp1
, 1))
5983 if (operand_equal_p (exp0
, exp1
, 0))
5985 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
5986 *op1
= copy_rtx (*op0
);
5990 /* If we need to preserve evaluation order, copy exp0 into its own
5991 temporary variable so that it can't be clobbered by exp1. */
5992 if (flag_evaluation_order
&& TREE_SIDE_EFFECTS (exp1
))
5993 exp0
= save_expr (exp0
);
5994 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
5995 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
6000 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6001 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6004 expand_expr_addr_expr_1 (tree exp
, rtx target
, enum machine_mode tmode
,
6005 enum expand_modifier modifier
)
6007 rtx result
, subtarget
;
6009 HOST_WIDE_INT bitsize
, bitpos
;
6010 int volatilep
, unsignedp
;
6011 enum machine_mode mode1
;
6013 /* If we are taking the address of a constant and are at the top level,
6014 we have to use output_constant_def since we can't call force_const_mem
6016 /* ??? This should be considered a front-end bug. We should not be
6017 generating ADDR_EXPR of something that isn't an LVALUE. The only
6018 exception here is STRING_CST. */
6019 if (TREE_CODE (exp
) == CONSTRUCTOR
6020 || CONSTANT_CLASS_P (exp
))
6021 return XEXP (output_constant_def (exp
, 0), 0);
6023 /* Everything must be something allowed by is_gimple_addressable. */
6024 switch (TREE_CODE (exp
))
6027 /* This case will happen via recursion for &a->b. */
6028 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, EXPAND_NORMAL
);
6031 /* Recurse and make the output_constant_def clause above handle this. */
6032 return expand_expr_addr_expr_1 (DECL_INITIAL (exp
), target
,
6036 /* The real part of the complex number is always first, therefore
6037 the address is the same as the address of the parent object. */
6040 inner
= TREE_OPERAND (exp
, 0);
6044 /* The imaginary part of the complex number is always second.
6045 The expression is therefore always offset by the size of the
6048 bitpos
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp
)));
6049 inner
= TREE_OPERAND (exp
, 0);
6053 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6054 expand_expr, as that can have various side effects; LABEL_DECLs for
6055 example, may not have their DECL_RTL set yet. Assume language
6056 specific tree nodes can be expanded in some interesting way. */
6058 || TREE_CODE (exp
) >= LAST_AND_UNUSED_TREE_CODE
)
6060 result
= expand_expr (exp
, target
, tmode
,
6061 modifier
== EXPAND_INITIALIZER
6062 ? EXPAND_INITIALIZER
: EXPAND_CONST_ADDRESS
);
6064 /* If the DECL isn't in memory, then the DECL wasn't properly
6065 marked TREE_ADDRESSABLE, which will be either a front-end
6066 or a tree optimizer bug. */
6067 gcc_assert (GET_CODE (result
) == MEM
);
6068 result
= XEXP (result
, 0);
6070 /* ??? Is this needed anymore? */
6071 if (DECL_P (exp
) && !TREE_USED (exp
) == 0)
6073 assemble_external (exp
);
6074 TREE_USED (exp
) = 1;
6077 if (modifier
!= EXPAND_INITIALIZER
6078 && modifier
!= EXPAND_CONST_ADDRESS
)
6079 result
= force_operand (result
, target
);
6083 inner
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
6084 &mode1
, &unsignedp
, &volatilep
);
6088 /* We must have made progress. */
6089 gcc_assert (inner
!= exp
);
6091 subtarget
= offset
|| bitpos
? NULL_RTX
: target
;
6092 result
= expand_expr_addr_expr_1 (inner
, subtarget
, tmode
, modifier
);
6098 if (modifier
!= EXPAND_NORMAL
)
6099 result
= force_operand (result
, NULL
);
6100 tmp
= expand_expr (offset
, NULL
, tmode
, EXPAND_NORMAL
);
6102 result
= convert_memory_address (tmode
, result
);
6103 tmp
= convert_memory_address (tmode
, tmp
);
6105 if (modifier
== EXPAND_SUM
)
6106 result
= gen_rtx_PLUS (tmode
, result
, tmp
);
6109 subtarget
= bitpos
? NULL_RTX
: target
;
6110 result
= expand_simple_binop (tmode
, PLUS
, result
, tmp
, subtarget
,
6111 1, OPTAB_LIB_WIDEN
);
6117 /* Someone beforehand should have rejected taking the address
6118 of such an object. */
6119 gcc_assert ((bitpos
% BITS_PER_UNIT
) == 0);
6121 result
= plus_constant (result
, bitpos
/ BITS_PER_UNIT
);
6122 if (modifier
< EXPAND_SUM
)
6123 result
= force_operand (result
, target
);
6129 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6130 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6133 expand_expr_addr_expr (tree exp
, rtx target
, enum machine_mode tmode
,
6134 enum expand_modifier modifier
)
6136 enum machine_mode rmode
;
6139 /* Target mode of VOIDmode says "whatever's natural". */
6140 if (tmode
== VOIDmode
)
6141 tmode
= TYPE_MODE (TREE_TYPE (exp
));
6143 /* We can get called with some Weird Things if the user does silliness
6144 like "(short) &a". In that case, convert_memory_address won't do
6145 the right thing, so ignore the given target mode. */
6146 if (tmode
!= Pmode
&& tmode
!= ptr_mode
)
6149 result
= expand_expr_addr_expr_1 (TREE_OPERAND (exp
, 0), target
,
6152 /* Despite expand_expr claims concerning ignoring TMODE when not
6153 strictly convenient, stuff breaks if we don't honor it. Note
6154 that combined with the above, we only do this for pointer modes. */
6155 rmode
= GET_MODE (result
);
6156 if (rmode
== VOIDmode
)
6159 result
= convert_memory_address (tmode
, result
);
6165 /* expand_expr: generate code for computing expression EXP.
6166 An rtx for the computed value is returned. The value is never null.
6167 In the case of a void EXP, const0_rtx is returned.
6169 The value may be stored in TARGET if TARGET is nonzero.
6170 TARGET is just a suggestion; callers must assume that
6171 the rtx returned may not be the same as TARGET.
6173 If TARGET is CONST0_RTX, it means that the value will be ignored.
6175 If TMODE is not VOIDmode, it suggests generating the
6176 result in mode TMODE. But this is done only when convenient.
6177 Otherwise, TMODE is ignored and the value generated in its natural mode.
6178 TMODE is just a suggestion; callers must assume that
6179 the rtx returned may not have mode TMODE.
6181 Note that TARGET may have neither TMODE nor MODE. In that case, it
6182 probably will not be used.
6184 If MODIFIER is EXPAND_SUM then when EXP is an addition
6185 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6186 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6187 products as above, or REG or MEM, or constant.
6188 Ordinarily in such cases we would output mul or add instructions
6189 and then return a pseudo reg containing the sum.
6191 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6192 it also marks a label as absolutely required (it can't be dead).
6193 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6194 This is used for outputting expressions used in initializers.
6196 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6197 with a constant address even if that address is not normally legitimate.
6198 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6200 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6201 a call parameter. Such targets require special care as we haven't yet
6202 marked TARGET so that it's safe from being trashed by libcalls. We
6203 don't want to use TARGET for anything but the final result;
6204 Intermediate values must go elsewhere. Additionally, calls to
6205 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6207 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6208 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6209 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6210 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6213 static rtx
expand_expr_real_1 (tree
, rtx
, enum machine_mode
,
6214 enum expand_modifier
, rtx
*);
6217 expand_expr_real (tree exp
, rtx target
, enum machine_mode tmode
,
6218 enum expand_modifier modifier
, rtx
*alt_rtl
)
6221 rtx ret
, last
= NULL
;
6223 /* Handle ERROR_MARK before anybody tries to access its type. */
6224 if (TREE_CODE (exp
) == ERROR_MARK
6225 || TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
)
6227 ret
= CONST0_RTX (tmode
);
6228 return ret
? ret
: const0_rtx
;
6231 if (flag_non_call_exceptions
)
6233 rn
= lookup_stmt_eh_region (exp
);
6234 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6236 last
= get_last_insn ();
6239 /* If this is an expression of some kind and it has an associated line
6240 number, then emit the line number before expanding the expression.
6242 We need to save and restore the file and line information so that
6243 errors discovered during expansion are emitted with the right
6244 information. It would be better of the diagnostic routines
6245 used the file/line information embedded in the tree nodes rather
6247 if (cfun
&& EXPR_HAS_LOCATION (exp
))
6249 location_t saved_location
= input_location
;
6250 input_location
= EXPR_LOCATION (exp
);
6251 emit_line_note (input_location
);
6253 /* Record where the insns produced belong. */
6254 record_block_change (TREE_BLOCK (exp
));
6256 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
6258 input_location
= saved_location
;
6262 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
6265 /* If using non-call exceptions, mark all insns that may trap.
6266 expand_call() will mark CALL_INSNs before we get to this code,
6267 but it doesn't handle libcalls, and these may trap. */
6271 for (insn
= next_real_insn (last
); insn
;
6272 insn
= next_real_insn (insn
))
6274 if (! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
6275 /* If we want exceptions for non-call insns, any
6276 may_trap_p instruction may throw. */
6277 && GET_CODE (PATTERN (insn
)) != CLOBBER
6278 && GET_CODE (PATTERN (insn
)) != USE
6279 && (CALL_P (insn
) || may_trap_p (PATTERN (insn
))))
6281 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_EH_REGION
, GEN_INT (rn
),
6291 expand_expr_real_1 (tree exp
, rtx target
, enum machine_mode tmode
,
6292 enum expand_modifier modifier
, rtx
*alt_rtl
)
6295 tree type
= TREE_TYPE (exp
);
6297 enum machine_mode mode
;
6298 enum tree_code code
= TREE_CODE (exp
);
6300 rtx subtarget
, original_target
;
6303 bool reduce_bit_field
= false;
6304 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6305 ? reduce_to_bit_field_precision ((expr), \
6310 mode
= TYPE_MODE (type
);
6311 unsignedp
= TYPE_UNSIGNED (type
);
6312 if (lang_hooks
.reduce_bit_field_operations
6313 && TREE_CODE (type
) == INTEGER_TYPE
6314 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
))
6316 /* An operation in what may be a bit-field type needs the
6317 result to be reduced to the precision of the bit-field type,
6318 which is narrower than that of the type's mode. */
6319 reduce_bit_field
= true;
6320 if (modifier
== EXPAND_STACK_PARM
)
6324 /* Use subtarget as the target for operand 0 of a binary operation. */
6325 subtarget
= get_subtarget (target
);
6326 original_target
= target
;
6327 ignore
= (target
== const0_rtx
6328 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
6329 || code
== CONVERT_EXPR
|| code
== COND_EXPR
6330 || code
== VIEW_CONVERT_EXPR
)
6331 && TREE_CODE (type
) == VOID_TYPE
));
6333 /* If we are going to ignore this result, we need only do something
6334 if there is a side-effect somewhere in the expression. If there
6335 is, short-circuit the most common cases here. Note that we must
6336 not call expand_expr with anything but const0_rtx in case this
6337 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6341 if (! TREE_SIDE_EFFECTS (exp
))
6344 /* Ensure we reference a volatile object even if value is ignored, but
6345 don't do this if all we are doing is taking its address. */
6346 if (TREE_THIS_VOLATILE (exp
)
6347 && TREE_CODE (exp
) != FUNCTION_DECL
6348 && mode
!= VOIDmode
&& mode
!= BLKmode
6349 && modifier
!= EXPAND_CONST_ADDRESS
)
6351 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
6353 temp
= copy_to_reg (temp
);
6357 if (TREE_CODE_CLASS (code
) == tcc_unary
6358 || code
== COMPONENT_REF
|| code
== INDIRECT_REF
)
6359 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6362 else if (TREE_CODE_CLASS (code
) == tcc_binary
6363 || TREE_CODE_CLASS (code
) == tcc_comparison
6364 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
6366 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6367 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6370 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6371 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
6372 /* If the second operand has no side effects, just evaluate
6374 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6376 else if (code
== BIT_FIELD_REF
)
6378 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6379 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6380 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, modifier
);
6387 /* If will do cse, generate all results into pseudo registers
6388 since 1) that allows cse to find more things
6389 and 2) otherwise cse could produce an insn the machine
6390 cannot support. An exception is a CONSTRUCTOR into a multi-word
6391 MEM: that's much more likely to be most efficient into the MEM.
6392 Another is a CALL_EXPR which must return in memory. */
6394 if (! cse_not_expected
&& mode
!= BLKmode
&& target
6395 && (!REG_P (target
) || REGNO (target
) < FIRST_PSEUDO_REGISTER
)
6396 && ! (code
== CONSTRUCTOR
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
6397 && ! (code
== CALL_EXPR
&& aggregate_value_p (exp
, exp
)))
6404 tree function
= decl_function_context (exp
);
6406 temp
= label_rtx (exp
);
6407 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
6409 if (function
!= current_function_decl
6411 LABEL_REF_NONLOCAL_P (temp
) = 1;
6413 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
6418 return expand_expr_real_1 (SSA_NAME_VAR (exp
), target
, tmode
, modifier
,
6423 /* If a static var's type was incomplete when the decl was written,
6424 but the type is complete now, lay out the decl now. */
6425 if (DECL_SIZE (exp
) == 0
6426 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
6427 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6428 layout_decl (exp
, 0);
6430 /* ... fall through ... */
6434 gcc_assert (DECL_RTL (exp
));
6436 /* Ensure variable marked as used even if it doesn't go through
6437 a parser. If it hasn't be used yet, write out an external
6439 if (! TREE_USED (exp
))
6441 assemble_external (exp
);
6442 TREE_USED (exp
) = 1;
6445 /* Show we haven't gotten RTL for this yet. */
6448 /* Variables inherited from containing functions should have
6449 been lowered by this point. */
6450 context
= decl_function_context (exp
);
6451 gcc_assert (!context
6452 || context
== current_function_decl
6453 || TREE_STATIC (exp
)
6454 /* ??? C++ creates functions that are not TREE_STATIC. */
6455 || TREE_CODE (exp
) == FUNCTION_DECL
);
6457 /* This is the case of an array whose size is to be determined
6458 from its initializer, while the initializer is still being parsed.
6461 if (MEM_P (DECL_RTL (exp
))
6462 && REG_P (XEXP (DECL_RTL (exp
), 0)))
6463 temp
= validize_mem (DECL_RTL (exp
));
6465 /* If DECL_RTL is memory, we are in the normal case and either
6466 the address is not valid or it is not a register and -fforce-addr
6467 is specified, get the address into a register. */
6469 else if (MEM_P (DECL_RTL (exp
))
6470 && modifier
!= EXPAND_CONST_ADDRESS
6471 && modifier
!= EXPAND_SUM
6472 && modifier
!= EXPAND_INITIALIZER
6473 && (! memory_address_p (DECL_MODE (exp
),
6474 XEXP (DECL_RTL (exp
), 0))
6476 && !REG_P (XEXP (DECL_RTL (exp
), 0)))))
6479 *alt_rtl
= DECL_RTL (exp
);
6480 temp
= replace_equiv_address (DECL_RTL (exp
),
6481 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
6484 /* If we got something, return it. But first, set the alignment
6485 if the address is a register. */
6488 if (MEM_P (temp
) && REG_P (XEXP (temp
, 0)))
6489 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6494 /* If the mode of DECL_RTL does not match that of the decl, it
6495 must be a promoted value. We return a SUBREG of the wanted mode,
6496 but mark it so that we know that it was already extended. */
6498 if (REG_P (DECL_RTL (exp
))
6499 && GET_MODE (DECL_RTL (exp
)) != DECL_MODE (exp
))
6501 enum machine_mode pmode
;
6503 /* Get the signedness used for this variable. Ensure we get the
6504 same mode we got when the variable was declared. */
6505 pmode
= promote_mode (type
, DECL_MODE (exp
), &unsignedp
,
6506 (TREE_CODE (exp
) == RESULT_DECL
? 1 : 0));
6507 gcc_assert (GET_MODE (DECL_RTL (exp
)) == pmode
);
6509 temp
= gen_lowpart_SUBREG (mode
, DECL_RTL (exp
));
6510 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6511 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6515 return DECL_RTL (exp
);
6518 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
6519 TREE_INT_CST_HIGH (exp
), mode
);
6521 /* ??? If overflow is set, fold will have done an incomplete job,
6522 which can result in (plus xx (const_int 0)), which can get
6523 simplified by validate_replace_rtx during virtual register
6524 instantiation, which can result in unrecognizable insns.
6525 Avoid this by forcing all overflows into registers. */
6526 if (TREE_CONSTANT_OVERFLOW (exp
)
6527 && modifier
!= EXPAND_INITIALIZER
)
6528 temp
= force_reg (mode
, temp
);
6533 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp
))) == MODE_VECTOR_INT
6534 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp
))) == MODE_VECTOR_FLOAT
)
6535 return const_vector_from_tree (exp
);
6537 return expand_expr (build1 (CONSTRUCTOR
, TREE_TYPE (exp
),
6538 TREE_VECTOR_CST_ELTS (exp
)),
6539 ignore
? const0_rtx
: target
, tmode
, modifier
);
6542 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
6545 /* If optimized, generate immediate CONST_DOUBLE
6546 which will be turned into memory by reload if necessary.
6548 We used to force a register so that loop.c could see it. But
6549 this does not allow gen_* patterns to perform optimizations with
6550 the constants. It also produces two insns in cases like "x = 1.0;".
6551 On most machines, floating-point constants are not permitted in
6552 many insns, so we'd end up copying it to a register in any case.
6554 Now, we do the copying in expand_binop, if appropriate. */
6555 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
6556 TYPE_MODE (TREE_TYPE (exp
)));
6559 /* Handle evaluating a complex constant in a CONCAT target. */
6560 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
6562 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
6565 rtarg
= XEXP (original_target
, 0);
6566 itarg
= XEXP (original_target
, 1);
6568 /* Move the real and imaginary parts separately. */
6569 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, 0);
6570 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, 0);
6573 emit_move_insn (rtarg
, op0
);
6575 emit_move_insn (itarg
, op1
);
6577 return original_target
;
6580 /* ... fall through ... */
6583 temp
= output_constant_def (exp
, 1);
6585 /* temp contains a constant address.
6586 On RISC machines where a constant address isn't valid,
6587 make some insns to get that address into a register. */
6588 if (modifier
!= EXPAND_CONST_ADDRESS
6589 && modifier
!= EXPAND_INITIALIZER
6590 && modifier
!= EXPAND_SUM
6591 && (! memory_address_p (mode
, XEXP (temp
, 0))
6592 || flag_force_addr
))
6593 return replace_equiv_address (temp
,
6594 copy_rtx (XEXP (temp
, 0)));
6599 tree val
= TREE_OPERAND (exp
, 0);
6600 rtx ret
= expand_expr_real_1 (val
, target
, tmode
, modifier
, alt_rtl
);
6602 if (!SAVE_EXPR_RESOLVED_P (exp
))
6604 /* We can indeed still hit this case, typically via builtin
6605 expanders calling save_expr immediately before expanding
6606 something. Assume this means that we only have to deal
6607 with non-BLKmode values. */
6608 gcc_assert (GET_MODE (ret
) != BLKmode
);
6610 val
= build_decl (VAR_DECL
, NULL
, TREE_TYPE (exp
));
6611 DECL_ARTIFICIAL (val
) = 1;
6612 DECL_IGNORED_P (val
) = 1;
6613 TREE_OPERAND (exp
, 0) = val
;
6614 SAVE_EXPR_RESOLVED_P (exp
) = 1;
6616 if (!CONSTANT_P (ret
))
6617 ret
= copy_to_reg (ret
);
6618 SET_DECL_RTL (val
, ret
);
6625 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6626 expand_goto (TREE_OPERAND (exp
, 0));
6628 expand_computed_goto (TREE_OPERAND (exp
, 0));
6632 /* If we don't need the result, just ensure we evaluate any
6638 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
6639 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
, 0);
6644 /* All elts simple constants => refer to a constant in memory. But
6645 if this is a non-BLKmode mode, let it store a field at a time
6646 since that should make a CONST_INT or CONST_DOUBLE when we
6647 fold. Likewise, if we have a target we can use, it is best to
6648 store directly into the target unless the type is large enough
6649 that memcpy will be used. If we are making an initializer and
6650 all operands are constant, put it in memory as well.
6652 FIXME: Avoid trying to fill vector constructors piece-meal.
6653 Output them with output_constant_def below unless we're sure
6654 they're zeros. This should go away when vector initializers
6655 are treated like VECTOR_CST instead of arrays.
6657 else if ((TREE_STATIC (exp
)
6658 && ((mode
== BLKmode
6659 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
6660 || TREE_ADDRESSABLE (exp
)
6661 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
6662 && (! MOVE_BY_PIECES_P
6663 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
6665 && ! mostly_zeros_p (exp
))))
6666 || ((modifier
== EXPAND_INITIALIZER
6667 || modifier
== EXPAND_CONST_ADDRESS
)
6668 && TREE_CONSTANT (exp
)))
6670 rtx constructor
= output_constant_def (exp
, 1);
6672 if (modifier
!= EXPAND_CONST_ADDRESS
6673 && modifier
!= EXPAND_INITIALIZER
6674 && modifier
!= EXPAND_SUM
)
6675 constructor
= validize_mem (constructor
);
6681 /* Handle calls that pass values in multiple non-contiguous
6682 locations. The Irix 6 ABI has examples of this. */
6683 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
6684 || GET_CODE (target
) == PARALLEL
6685 || modifier
== EXPAND_STACK_PARM
)
6687 = assign_temp (build_qualified_type (type
,
6689 | (TREE_READONLY (exp
)
6690 * TYPE_QUAL_CONST
))),
6691 0, TREE_ADDRESSABLE (exp
), 1);
6693 store_constructor (exp
, target
, 0, int_expr_size (exp
));
6697 case MISALIGNED_INDIRECT_REF
:
6698 case ALIGN_INDIRECT_REF
:
6701 tree exp1
= TREE_OPERAND (exp
, 0);
6704 if (code
== MISALIGNED_INDIRECT_REF
6705 && !targetm
.vectorize
.misaligned_mem_ok (mode
))
6708 if (modifier
!= EXPAND_WRITE
)
6712 t
= fold_read_from_constant_string (exp
);
6714 return expand_expr (t
, target
, tmode
, modifier
);
6717 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
6718 op0
= memory_address (mode
, op0
);
6720 if (code
== ALIGN_INDIRECT_REF
)
6722 int align
= TYPE_ALIGN_UNIT (type
);
6723 op0
= gen_rtx_AND (Pmode
, op0
, GEN_INT (-align
));
6724 op0
= memory_address (mode
, op0
);
6727 temp
= gen_rtx_MEM (mode
, op0
);
6729 orig
= REF_ORIGINAL (exp
);
6732 set_mem_attributes (temp
, orig
, 0);
6740 tree array
= TREE_OPERAND (exp
, 0);
6741 tree low_bound
= array_ref_low_bound (exp
);
6742 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
6745 gcc_assert (TREE_CODE (TREE_TYPE (array
)) == ARRAY_TYPE
);
6747 /* Optimize the special-case of a zero lower bound.
6749 We convert the low_bound to sizetype to avoid some problems
6750 with constant folding. (E.g. suppose the lower bound is 1,
6751 and its mode is QI. Without the conversion, (ARRAY
6752 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6753 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6755 if (! integer_zerop (low_bound
))
6756 index
= size_diffop (index
, convert (sizetype
, low_bound
));
6758 /* Fold an expression like: "foo"[2].
6759 This is not done in fold so it won't happen inside &.
6760 Don't fold if this is for wide characters since it's too
6761 difficult to do correctly and this is a very rare case. */
6763 if (modifier
!= EXPAND_CONST_ADDRESS
6764 && modifier
!= EXPAND_INITIALIZER
6765 && modifier
!= EXPAND_MEMORY
)
6767 tree t
= fold_read_from_constant_string (exp
);
6770 return expand_expr (t
, target
, tmode
, modifier
);
6773 /* If this is a constant index into a constant array,
6774 just get the value from the array. Handle both the cases when
6775 we have an explicit constructor and when our operand is a variable
6776 that was declared const. */
6778 if (modifier
!= EXPAND_CONST_ADDRESS
6779 && modifier
!= EXPAND_INITIALIZER
6780 && modifier
!= EXPAND_MEMORY
6781 && TREE_CODE (array
) == CONSTRUCTOR
6782 && ! TREE_SIDE_EFFECTS (array
)
6783 && TREE_CODE (index
) == INTEGER_CST
6784 && 0 > compare_tree_int (index
,
6785 list_length (CONSTRUCTOR_ELTS
6786 (TREE_OPERAND (exp
, 0)))))
6790 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
6791 i
= TREE_INT_CST_LOW (index
);
6792 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
6796 return expand_expr (fold (TREE_VALUE (elem
)), target
, tmode
,
6800 else if (optimize
>= 1
6801 && modifier
!= EXPAND_CONST_ADDRESS
6802 && modifier
!= EXPAND_INITIALIZER
6803 && modifier
!= EXPAND_MEMORY
6804 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
6805 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
6806 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
6807 && targetm
.binds_local_p (array
))
6809 if (TREE_CODE (index
) == INTEGER_CST
)
6811 tree init
= DECL_INITIAL (array
);
6813 if (TREE_CODE (init
) == CONSTRUCTOR
)
6817 for (elem
= CONSTRUCTOR_ELTS (init
);
6819 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
6820 elem
= TREE_CHAIN (elem
))
6823 if (elem
&& !TREE_SIDE_EFFECTS (TREE_VALUE (elem
)))
6824 return expand_expr (fold (TREE_VALUE (elem
)), target
,
6827 else if (TREE_CODE (init
) == STRING_CST
6828 && 0 > compare_tree_int (index
,
6829 TREE_STRING_LENGTH (init
)))
6831 tree type
= TREE_TYPE (TREE_TYPE (init
));
6832 enum machine_mode mode
= TYPE_MODE (type
);
6834 if (GET_MODE_CLASS (mode
) == MODE_INT
6835 && GET_MODE_SIZE (mode
) == 1)
6836 return gen_int_mode (TREE_STRING_POINTER (init
)
6837 [TREE_INT_CST_LOW (index
)], mode
);
6842 goto normal_inner_ref
;
6845 /* If the operand is a CONSTRUCTOR, we can just extract the
6846 appropriate field if it is present. */
6847 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
)
6851 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
6852 elt
= TREE_CHAIN (elt
))
6853 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
6854 /* We can normally use the value of the field in the
6855 CONSTRUCTOR. However, if this is a bitfield in
6856 an integral mode that we can fit in a HOST_WIDE_INT,
6857 we must mask only the number of bits in the bitfield,
6858 since this is done implicitly by the constructor. If
6859 the bitfield does not meet either of those conditions,
6860 we can't do this optimization. */
6861 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
6862 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
6864 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
6865 <= HOST_BITS_PER_WIDE_INT
))))
6867 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
))
6868 && modifier
== EXPAND_STACK_PARM
)
6870 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
6871 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
6873 HOST_WIDE_INT bitsize
6874 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt
)));
6875 enum machine_mode imode
6876 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
6878 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
6880 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
6881 op0
= expand_and (imode
, op0
, op1
, target
);
6886 = build_int_cst (NULL_TREE
,
6887 GET_MODE_BITSIZE (imode
) - bitsize
);
6889 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
6891 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
6899 goto normal_inner_ref
;
6902 case ARRAY_RANGE_REF
:
6905 enum machine_mode mode1
;
6906 HOST_WIDE_INT bitsize
, bitpos
;
6909 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
6910 &mode1
, &unsignedp
, &volatilep
);
6913 /* If we got back the original object, something is wrong. Perhaps
6914 we are evaluating an expression too early. In any event, don't
6915 infinitely recurse. */
6916 gcc_assert (tem
!= exp
);
6918 /* If TEM's type is a union of variable size, pass TARGET to the inner
6919 computation, since it will need a temporary and TARGET is known
6920 to have to do. This occurs in unchecked conversion in Ada. */
6924 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
6925 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
6927 && modifier
!= EXPAND_STACK_PARM
6928 ? target
: NULL_RTX
),
6930 (modifier
== EXPAND_INITIALIZER
6931 || modifier
== EXPAND_CONST_ADDRESS
6932 || modifier
== EXPAND_STACK_PARM
)
6933 ? modifier
: EXPAND_NORMAL
);
6935 /* If this is a constant, put it into a register if it is a
6936 legitimate constant and OFFSET is 0 and memory if it isn't. */
6937 if (CONSTANT_P (op0
))
6939 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
6940 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
6942 op0
= force_reg (mode
, op0
);
6944 op0
= validize_mem (force_const_mem (mode
, op0
));
6947 /* Otherwise, if this object not in memory and we either have an
6948 offset or a BLKmode result, put it there. This case can't occur in
6949 C, but can in Ada if we have unchecked conversion of an expression
6950 from a scalar type to an array or record type or for an
6951 ARRAY_RANGE_REF whose type is BLKmode. */
6952 else if (!MEM_P (op0
)
6954 || (code
== ARRAY_RANGE_REF
&& mode
== BLKmode
)))
6956 tree nt
= build_qualified_type (TREE_TYPE (tem
),
6957 (TYPE_QUALS (TREE_TYPE (tem
))
6958 | TYPE_QUAL_CONST
));
6959 rtx memloc
= assign_temp (nt
, 1, 1, 1);
6961 emit_move_insn (memloc
, op0
);
6967 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
6970 gcc_assert (MEM_P (op0
));
6972 #ifdef POINTERS_EXTEND_UNSIGNED
6973 if (GET_MODE (offset_rtx
) != Pmode
)
6974 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
6976 if (GET_MODE (offset_rtx
) != ptr_mode
)
6977 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
6980 if (GET_MODE (op0
) == BLKmode
6981 /* A constant address in OP0 can have VOIDmode, we must
6982 not try to call force_reg in that case. */
6983 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
6985 && (bitpos
% bitsize
) == 0
6986 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
6987 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
6989 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
6993 op0
= offset_address (op0
, offset_rtx
,
6994 highest_pow2_factor (offset
));
6997 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
6998 record its alignment as BIGGEST_ALIGNMENT. */
6999 if (MEM_P (op0
) && bitpos
== 0 && offset
!= 0
7000 && is_aligning_offset (offset
, tem
))
7001 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
7003 /* Don't forget about volatility even if this is a bitfield. */
7004 if (MEM_P (op0
) && volatilep
&& ! MEM_VOLATILE_P (op0
))
7006 if (op0
== orig_op0
)
7007 op0
= copy_rtx (op0
);
7009 MEM_VOLATILE_P (op0
) = 1;
7012 /* The following code doesn't handle CONCAT.
7013 Assume only bitpos == 0 can be used for CONCAT, due to
7014 one element arrays having the same mode as its element. */
7015 if (GET_CODE (op0
) == CONCAT
)
7017 gcc_assert (bitpos
== 0
7018 && bitsize
== GET_MODE_BITSIZE (GET_MODE (op0
)));
7022 /* In cases where an aligned union has an unaligned object
7023 as a field, we might be extracting a BLKmode value from
7024 an integer-mode (e.g., SImode) object. Handle this case
7025 by doing the extract into an object as wide as the field
7026 (which we know to be the width of a basic mode), then
7027 storing into memory, and changing the mode to BLKmode. */
7028 if (mode1
== VOIDmode
7029 || REG_P (op0
) || GET_CODE (op0
) == SUBREG
7030 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
7031 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7032 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
7033 && modifier
!= EXPAND_CONST_ADDRESS
7034 && modifier
!= EXPAND_INITIALIZER
)
7035 /* If the field isn't aligned enough to fetch as a memref,
7036 fetch it as a bit field. */
7037 || (mode1
!= BLKmode
7038 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
7039 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)
7041 && (MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
7042 || (bitpos
% GET_MODE_ALIGNMENT (mode1
) != 0))))
7043 && ((modifier
== EXPAND_CONST_ADDRESS
7044 || modifier
== EXPAND_INITIALIZER
)
7046 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
7047 || (bitpos
% BITS_PER_UNIT
!= 0)))
7048 /* If the type and the field are a constant size and the
7049 size of the type isn't the same size as the bitfield,
7050 we must use bitfield operations. */
7052 && TYPE_SIZE (TREE_TYPE (exp
))
7053 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
7054 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7057 enum machine_mode ext_mode
= mode
;
7059 if (ext_mode
== BLKmode
7060 && ! (target
!= 0 && MEM_P (op0
)
7062 && bitpos
% BITS_PER_UNIT
== 0))
7063 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7065 if (ext_mode
== BLKmode
)
7068 target
= assign_temp (type
, 0, 1, 1);
7073 /* In this case, BITPOS must start at a byte boundary and
7074 TARGET, if specified, must be a MEM. */
7075 gcc_assert (MEM_P (op0
)
7076 && (!target
|| MEM_P (target
))
7077 && !(bitpos
% BITS_PER_UNIT
));
7079 emit_block_move (target
,
7080 adjust_address (op0
, VOIDmode
,
7081 bitpos
/ BITS_PER_UNIT
),
7082 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7084 (modifier
== EXPAND_STACK_PARM
7085 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7090 op0
= validize_mem (op0
);
7092 if (MEM_P (op0
) && REG_P (XEXP (op0
, 0)))
7093 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7095 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
7096 (modifier
== EXPAND_STACK_PARM
7097 ? NULL_RTX
: target
),
7098 ext_mode
, ext_mode
);
7100 /* If the result is a record type and BITSIZE is narrower than
7101 the mode of OP0, an integral mode, and this is a big endian
7102 machine, we must put the field into the high-order bits. */
7103 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7104 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7105 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
7106 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7107 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7111 /* If the result type is BLKmode, store the data into a temporary
7112 of the appropriate type, but with the mode corresponding to the
7113 mode for the data we have (op0's mode). It's tempting to make
7114 this a constant type, since we know it's only being stored once,
7115 but that can cause problems if we are taking the address of this
7116 COMPONENT_REF because the MEM of any reference via that address
7117 will have flags corresponding to the type, which will not
7118 necessarily be constant. */
7119 if (mode
== BLKmode
)
7122 = assign_stack_temp_for_type
7123 (ext_mode
, GET_MODE_BITSIZE (ext_mode
), 0, type
);
7125 emit_move_insn (new, op0
);
7126 op0
= copy_rtx (new);
7127 PUT_MODE (op0
, BLKmode
);
7128 set_mem_attributes (op0
, exp
, 1);
7134 /* If the result is BLKmode, use that to access the object
7136 if (mode
== BLKmode
)
7139 /* Get a reference to just this component. */
7140 if (modifier
== EXPAND_CONST_ADDRESS
7141 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7142 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7144 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7146 if (op0
== orig_op0
)
7147 op0
= copy_rtx (op0
);
7149 set_mem_attributes (op0
, exp
, 0);
7150 if (REG_P (XEXP (op0
, 0)))
7151 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7153 MEM_VOLATILE_P (op0
) |= volatilep
;
7154 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7155 || modifier
== EXPAND_CONST_ADDRESS
7156 || modifier
== EXPAND_INITIALIZER
)
7158 else if (target
== 0)
7159 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7161 convert_move (target
, op0
, unsignedp
);
7166 return expand_expr (OBJ_TYPE_REF_EXPR (exp
), target
, tmode
, modifier
);
7169 /* Check for a built-in function. */
7170 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7171 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7173 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7175 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7176 == BUILT_IN_FRONTEND
)
7177 return lang_hooks
.expand_expr (exp
, original_target
,
7181 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7184 return expand_call (exp
, target
, ignore
);
7186 case NON_LVALUE_EXPR
:
7189 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7192 if (TREE_CODE (type
) == UNION_TYPE
)
7194 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7196 /* If both input and output are BLKmode, this conversion isn't doing
7197 anything except possibly changing memory attribute. */
7198 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7200 rtx result
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7203 result
= copy_rtx (result
);
7204 set_mem_attributes (result
, exp
, 0);
7210 if (TYPE_MODE (type
) != BLKmode
)
7211 target
= gen_reg_rtx (TYPE_MODE (type
));
7213 target
= assign_temp (type
, 0, 1, 1);
7217 /* Store data into beginning of memory target. */
7218 store_expr (TREE_OPERAND (exp
, 0),
7219 adjust_address (target
, TYPE_MODE (valtype
), 0),
7220 modifier
== EXPAND_STACK_PARM
);
7224 gcc_assert (REG_P (target
));
7226 /* Store this field into a union of the proper type. */
7227 store_field (target
,
7228 MIN ((int_size_in_bytes (TREE_TYPE
7229 (TREE_OPERAND (exp
, 0)))
7231 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7232 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7236 /* Return the entire union. */
7240 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7242 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7245 /* If the signedness of the conversion differs and OP0 is
7246 a promoted SUBREG, clear that indication since we now
7247 have to do the proper extension. */
7248 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7249 && GET_CODE (op0
) == SUBREG
)
7250 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7252 return REDUCE_BIT_FIELD (op0
);
7255 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7256 op0
= REDUCE_BIT_FIELD (op0
);
7257 if (GET_MODE (op0
) == mode
)
7260 /* If OP0 is a constant, just convert it into the proper mode. */
7261 if (CONSTANT_P (op0
))
7263 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7264 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
7266 if (modifier
== EXPAND_INITIALIZER
)
7267 return simplify_gen_subreg (mode
, op0
, inner_mode
,
7268 subreg_lowpart_offset (mode
,
7271 return convert_modes (mode
, inner_mode
, op0
,
7272 TYPE_UNSIGNED (inner_type
));
7275 if (modifier
== EXPAND_INITIALIZER
)
7276 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7280 convert_to_mode (mode
, op0
,
7281 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7283 convert_move (target
, op0
,
7284 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7287 case VIEW_CONVERT_EXPR
:
7288 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7290 /* If the input and output modes are both the same, we are done.
7291 Otherwise, if neither mode is BLKmode and both are integral and within
7292 a word, we can use gen_lowpart. If neither is true, make sure the
7293 operand is in memory and convert the MEM to the new mode. */
7294 if (TYPE_MODE (type
) == GET_MODE (op0
))
7296 else if (TYPE_MODE (type
) != BLKmode
&& GET_MODE (op0
) != BLKmode
7297 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7298 && GET_MODE_CLASS (TYPE_MODE (type
)) == MODE_INT
7299 && GET_MODE_SIZE (TYPE_MODE (type
)) <= UNITS_PER_WORD
7300 && GET_MODE_SIZE (GET_MODE (op0
)) <= UNITS_PER_WORD
)
7301 op0
= gen_lowpart (TYPE_MODE (type
), op0
);
7302 else if (!MEM_P (op0
))
7304 /* If the operand is not a MEM, force it into memory. Since we
7305 are going to be be changing the mode of the MEM, don't call
7306 force_const_mem for constants because we don't allow pool
7307 constants to change mode. */
7308 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7310 gcc_assert (!TREE_ADDRESSABLE (exp
));
7312 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
7314 = assign_stack_temp_for_type
7315 (TYPE_MODE (inner_type
),
7316 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
7318 emit_move_insn (target
, op0
);
7322 /* At this point, OP0 is in the correct mode. If the output type is such
7323 that the operand is known to be aligned, indicate that it is.
7324 Otherwise, we need only be concerned about alignment for non-BLKmode
7328 op0
= copy_rtx (op0
);
7330 if (TYPE_ALIGN_OK (type
))
7331 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
7332 else if (TYPE_MODE (type
) != BLKmode
&& STRICT_ALIGNMENT
7333 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
7335 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7336 HOST_WIDE_INT temp_size
7337 = MAX (int_size_in_bytes (inner_type
),
7338 (HOST_WIDE_INT
) GET_MODE_SIZE (TYPE_MODE (type
)));
7339 rtx
new = assign_stack_temp_for_type (TYPE_MODE (type
),
7340 temp_size
, 0, type
);
7341 rtx new_with_op0_mode
= adjust_address (new, GET_MODE (op0
), 0);
7343 gcc_assert (!TREE_ADDRESSABLE (exp
));
7345 if (GET_MODE (op0
) == BLKmode
)
7346 emit_block_move (new_with_op0_mode
, op0
,
7347 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type
))),
7348 (modifier
== EXPAND_STACK_PARM
7349 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7351 emit_move_insn (new_with_op0_mode
, op0
);
7356 op0
= adjust_address (op0
, TYPE_MODE (type
), 0);
7362 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7363 something else, make sure we add the register to the constant and
7364 then to the other thing. This case can occur during strength
7365 reduction and doing it this way will produce better code if the
7366 frame pointer or argument pointer is eliminated.
7368 fold-const.c will ensure that the constant is always in the inner
7369 PLUS_EXPR, so the only case we need to do anything about is if
7370 sp, ap, or fp is our second argument, in which case we must swap
7371 the innermost first argument and our second argument. */
7373 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7374 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7375 && TREE_CODE (TREE_OPERAND (exp
, 1)) == VAR_DECL
7376 && (DECL_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7377 || DECL_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7378 || DECL_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7380 tree t
= TREE_OPERAND (exp
, 1);
7382 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7383 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
7386 /* If the result is to be ptr_mode and we are adding an integer to
7387 something, we might be forming a constant. So try to use
7388 plus_constant. If it produces a sum and we can't accept it,
7389 use force_operand. This allows P = &ARR[const] to generate
7390 efficient code on machines where a SYMBOL_REF is not a valid
7393 If this is an EXPAND_SUM call, always return the sum. */
7394 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7395 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
7397 if (modifier
== EXPAND_STACK_PARM
)
7399 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
7400 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7401 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
7405 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
7407 /* Use immed_double_const to ensure that the constant is
7408 truncated according to the mode of OP1, then sign extended
7409 to a HOST_WIDE_INT. Using the constant directly can result
7410 in non-canonical RTL in a 64x32 cross compile. */
7412 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
7414 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
7415 op1
= plus_constant (op1
, INTVAL (constant_part
));
7416 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7417 op1
= force_operand (op1
, target
);
7418 return REDUCE_BIT_FIELD (op1
);
7421 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7422 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
7423 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
7427 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7428 (modifier
== EXPAND_INITIALIZER
7429 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
7430 if (! CONSTANT_P (op0
))
7432 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7433 VOIDmode
, modifier
);
7434 /* Return a PLUS if modifier says it's OK. */
7435 if (modifier
== EXPAND_SUM
7436 || modifier
== EXPAND_INITIALIZER
)
7437 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7440 /* Use immed_double_const to ensure that the constant is
7441 truncated according to the mode of OP1, then sign extended
7442 to a HOST_WIDE_INT. Using the constant directly can result
7443 in non-canonical RTL in a 64x32 cross compile. */
7445 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
7447 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7448 op0
= plus_constant (op0
, INTVAL (constant_part
));
7449 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7450 op0
= force_operand (op0
, target
);
7451 return REDUCE_BIT_FIELD (op0
);
7455 /* No sense saving up arithmetic to be done
7456 if it's all in the wrong mode to form part of an address.
7457 And force_operand won't know whether to sign-extend or
7459 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7460 || mode
!= ptr_mode
)
7462 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7463 subtarget
, &op0
, &op1
, 0);
7464 if (op0
== const0_rtx
)
7466 if (op1
== const0_rtx
)
7471 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7472 subtarget
, &op0
, &op1
, modifier
);
7473 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
7476 /* For initializers, we are allowed to return a MINUS of two
7477 symbolic constants. Here we handle all cases when both operands
7479 /* Handle difference of two symbolic constants,
7480 for the sake of an initializer. */
7481 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7482 && really_constant_p (TREE_OPERAND (exp
, 0))
7483 && really_constant_p (TREE_OPERAND (exp
, 1)))
7485 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7486 NULL_RTX
, &op0
, &op1
, modifier
);
7488 /* If the last operand is a CONST_INT, use plus_constant of
7489 the negated constant. Else make the MINUS. */
7490 if (GET_CODE (op1
) == CONST_INT
)
7491 return REDUCE_BIT_FIELD (plus_constant (op0
, - INTVAL (op1
)));
7493 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode
, op0
, op1
));
7496 /* No sense saving up arithmetic to be done
7497 if it's all in the wrong mode to form part of an address.
7498 And force_operand won't know whether to sign-extend or
7500 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7501 || mode
!= ptr_mode
)
7504 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7505 subtarget
, &op0
, &op1
, modifier
);
7507 /* Convert A - const to A + (-const). */
7508 if (GET_CODE (op1
) == CONST_INT
)
7510 op1
= negate_rtx (mode
, op1
);
7511 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
7517 /* If first operand is constant, swap them.
7518 Thus the following special case checks need only
7519 check the second operand. */
7520 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
7522 tree t1
= TREE_OPERAND (exp
, 0);
7523 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
7524 TREE_OPERAND (exp
, 1) = t1
;
7527 /* Attempt to return something suitable for generating an
7528 indexed address, for machines that support that. */
7530 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
7531 && host_integerp (TREE_OPERAND (exp
, 1), 0))
7533 tree exp1
= TREE_OPERAND (exp
, 1);
7535 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7539 op0
= force_operand (op0
, NULL_RTX
);
7541 op0
= copy_to_mode_reg (mode
, op0
);
7543 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode
, op0
,
7544 gen_int_mode (tree_low_cst (exp1
, 0),
7545 TYPE_MODE (TREE_TYPE (exp1
)))));
7548 if (modifier
== EXPAND_STACK_PARM
)
7551 /* Check for multiplying things that have been extended
7552 from a narrower type. If this machine supports multiplying
7553 in that narrower type with a result in the desired type,
7554 do it that way, and avoid the explicit type-conversion. */
7555 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
7556 && TREE_CODE (type
) == INTEGER_TYPE
7557 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7558 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7559 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7560 && int_fits_type_p (TREE_OPERAND (exp
, 1),
7561 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7562 /* Don't use a widening multiply if a shift will do. */
7563 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
7564 > HOST_BITS_PER_WIDE_INT
)
7565 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
7567 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7568 && (TYPE_PRECISION (TREE_TYPE
7569 (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7570 == TYPE_PRECISION (TREE_TYPE
7572 (TREE_OPERAND (exp
, 0), 0))))
7573 /* If both operands are extended, they must either both
7574 be zero-extended or both be sign-extended. */
7575 && (TYPE_UNSIGNED (TREE_TYPE
7576 (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7577 == TYPE_UNSIGNED (TREE_TYPE
7579 (TREE_OPERAND (exp
, 0), 0)))))))
7581 tree op0type
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0));
7582 enum machine_mode innermode
= TYPE_MODE (op0type
);
7583 bool zextend_p
= TYPE_UNSIGNED (op0type
);
7584 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
7585 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
7587 if (mode
== GET_MODE_WIDER_MODE (innermode
))
7589 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
7591 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7592 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7593 TREE_OPERAND (exp
, 1),
7594 NULL_RTX
, &op0
, &op1
, 0);
7596 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7597 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7598 NULL_RTX
, &op0
, &op1
, 0);
7601 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
7602 && innermode
== word_mode
)
7605 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7606 NULL_RTX
, VOIDmode
, 0);
7607 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7608 op1
= convert_modes (innermode
, mode
,
7609 expand_expr (TREE_OPERAND (exp
, 1),
7610 NULL_RTX
, VOIDmode
, 0),
7613 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7614 NULL_RTX
, VOIDmode
, 0);
7615 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
7616 unsignedp
, OPTAB_LIB_WIDEN
);
7617 hipart
= gen_highpart (innermode
, temp
);
7618 htem
= expand_mult_highpart_adjust (innermode
, hipart
,
7622 emit_move_insn (hipart
, htem
);
7623 return REDUCE_BIT_FIELD (temp
);
7627 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7628 subtarget
, &op0
, &op1
, 0);
7629 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
7631 case TRUNC_DIV_EXPR
:
7632 case FLOOR_DIV_EXPR
:
7634 case ROUND_DIV_EXPR
:
7635 case EXACT_DIV_EXPR
:
7636 if (modifier
== EXPAND_STACK_PARM
)
7638 /* Possible optimization: compute the dividend with EXPAND_SUM
7639 then if the divisor is constant can optimize the case
7640 where some terms of the dividend have coeffs divisible by it. */
7641 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7642 subtarget
, &op0
, &op1
, 0);
7643 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
7646 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7647 expensive divide. If not, combine will rebuild the original
7649 if (flag_unsafe_math_optimizations
&& optimize
&& !optimize_size
7650 && TREE_CODE (type
) == REAL_TYPE
7651 && !real_onep (TREE_OPERAND (exp
, 0)))
7652 return expand_expr (build2 (MULT_EXPR
, type
, TREE_OPERAND (exp
, 0),
7653 build2 (RDIV_EXPR
, type
,
7654 build_real (type
, dconst1
),
7655 TREE_OPERAND (exp
, 1))),
7656 target
, tmode
, modifier
);
7660 case TRUNC_MOD_EXPR
:
7661 case FLOOR_MOD_EXPR
:
7663 case ROUND_MOD_EXPR
:
7664 if (modifier
== EXPAND_STACK_PARM
)
7666 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7667 subtarget
, &op0
, &op1
, 0);
7668 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
7670 case FIX_ROUND_EXPR
:
7671 case FIX_FLOOR_EXPR
:
7673 gcc_unreachable (); /* Not used for C. */
7675 case FIX_TRUNC_EXPR
:
7676 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7677 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
7678 target
= gen_reg_rtx (mode
);
7679 expand_fix (target
, op0
, unsignedp
);
7683 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7684 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
7685 target
= gen_reg_rtx (mode
);
7686 /* expand_float can't figure out what to do if FROM has VOIDmode.
7687 So give it the correct mode. With -O, cse will optimize this. */
7688 if (GET_MODE (op0
) == VOIDmode
)
7689 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7691 expand_float (target
, op0
,
7692 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7696 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7697 if (modifier
== EXPAND_STACK_PARM
)
7699 temp
= expand_unop (mode
,
7700 optab_for_tree_code (NEGATE_EXPR
, type
),
7703 return REDUCE_BIT_FIELD (temp
);
7706 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7707 if (modifier
== EXPAND_STACK_PARM
)
7710 /* ABS_EXPR is not valid for complex arguments. */
7711 gcc_assert (GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7712 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
);
7714 /* Unsigned abs is simply the operand. Testing here means we don't
7715 risk generating incorrect code below. */
7716 if (TYPE_UNSIGNED (type
))
7719 return expand_abs (mode
, op0
, target
, unsignedp
,
7720 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
7724 target
= original_target
;
7726 || modifier
== EXPAND_STACK_PARM
7727 || (MEM_P (target
) && MEM_VOLATILE_P (target
))
7728 || GET_MODE (target
) != mode
7730 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
7731 target
= gen_reg_rtx (mode
);
7732 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7733 target
, &op0
, &op1
, 0);
7735 /* First try to do it with a special MIN or MAX instruction.
7736 If that does not win, use a conditional jump to select the proper
7738 this_optab
= optab_for_tree_code (code
, type
);
7739 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
7744 /* At this point, a MEM target is no longer useful; we will get better
7748 target
= gen_reg_rtx (mode
);
7750 /* If op1 was placed in target, swap op0 and op1. */
7751 if (target
!= op0
&& target
== op1
)
7759 emit_move_insn (target
, op0
);
7761 op0
= gen_label_rtx ();
7763 /* If this mode is an integer too wide to compare properly,
7764 compare word by word. Rely on cse to optimize constant cases. */
7765 if (GET_MODE_CLASS (mode
) == MODE_INT
7766 && ! can_compare_p (GE
, mode
, ccp_jump
))
7768 if (code
== MAX_EXPR
)
7769 do_jump_by_parts_greater_rtx (mode
, unsignedp
, target
, op1
,
7772 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op1
, target
,
7777 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
7778 unsignedp
, mode
, NULL_RTX
, NULL_RTX
, op0
);
7780 emit_move_insn (target
, op1
);
7785 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7786 if (modifier
== EXPAND_STACK_PARM
)
7788 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
7792 /* ??? Can optimize bitwise operations with one arg constant.
7793 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7794 and (a bitwise1 b) bitwise2 b (etc)
7795 but that is probably not worth while. */
7797 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7798 boolean values when we want in all cases to compute both of them. In
7799 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7800 as actual zero-or-1 values and then bitwise anding. In cases where
7801 there cannot be any side effects, better code would be made by
7802 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7803 how to recognize those cases. */
7805 case TRUTH_AND_EXPR
:
7806 code
= BIT_AND_EXPR
;
7811 code
= BIT_IOR_EXPR
;
7815 case TRUTH_XOR_EXPR
:
7816 code
= BIT_XOR_EXPR
;
7824 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7826 if (modifier
== EXPAND_STACK_PARM
)
7828 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7829 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
7832 /* Could determine the answer when only additive constants differ. Also,
7833 the addition of one can be handled by changing the condition. */
7840 case UNORDERED_EXPR
:
7848 temp
= do_store_flag (exp
,
7849 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
7850 tmode
!= VOIDmode
? tmode
: mode
, 0);
7854 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7855 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
7857 && REG_P (original_target
)
7858 && (GET_MODE (original_target
)
7859 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
7861 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
7864 /* If temp is constant, we can just compute the result. */
7865 if (GET_CODE (temp
) == CONST_INT
)
7867 if (INTVAL (temp
) != 0)
7868 emit_move_insn (target
, const1_rtx
);
7870 emit_move_insn (target
, const0_rtx
);
7875 if (temp
!= original_target
)
7877 enum machine_mode mode1
= GET_MODE (temp
);
7878 if (mode1
== VOIDmode
)
7879 mode1
= tmode
!= VOIDmode
? tmode
: mode
;
7881 temp
= copy_to_mode_reg (mode1
, temp
);
7884 op1
= gen_label_rtx ();
7885 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
7886 GET_MODE (temp
), unsignedp
, op1
);
7887 emit_move_insn (temp
, const1_rtx
);
7892 /* If no set-flag instruction, must generate a conditional store
7893 into a temporary variable. Drop through and handle this
7898 || modifier
== EXPAND_STACK_PARM
7899 || ! safe_from_p (target
, exp
, 1)
7900 /* Make sure we don't have a hard reg (such as function's return
7901 value) live across basic blocks, if not optimizing. */
7902 || (!optimize
&& REG_P (target
)
7903 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
7904 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7907 emit_move_insn (target
, const0_rtx
);
7909 op1
= gen_label_rtx ();
7910 jumpifnot (exp
, op1
);
7913 emit_move_insn (target
, const1_rtx
);
7916 return ignore
? const0_rtx
: target
;
7918 case TRUTH_NOT_EXPR
:
7919 if (modifier
== EXPAND_STACK_PARM
)
7921 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
7922 /* The parser is careful to generate TRUTH_NOT_EXPR
7923 only with operands that are always zero or one. */
7924 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
7925 target
, 1, OPTAB_LIB_WIDEN
);
7929 case STATEMENT_LIST
:
7931 tree_stmt_iterator iter
;
7933 gcc_assert (ignore
);
7935 for (iter
= tsi_start (exp
); !tsi_end_p (iter
); tsi_next (&iter
))
7936 expand_expr (tsi_stmt (iter
), const0_rtx
, VOIDmode
, modifier
);
7941 /* If it's void, we don't need to worry about computing a value. */
7942 if (VOID_TYPE_P (TREE_TYPE (exp
)))
7944 tree pred
= TREE_OPERAND (exp
, 0);
7945 tree then_
= TREE_OPERAND (exp
, 1);
7946 tree else_
= TREE_OPERAND (exp
, 2);
7948 gcc_assert (TREE_CODE (then_
) == GOTO_EXPR
7949 && TREE_CODE (GOTO_DESTINATION (then_
)) == LABEL_DECL
7950 && TREE_CODE (else_
) == GOTO_EXPR
7951 && TREE_CODE (GOTO_DESTINATION (else_
)) == LABEL_DECL
);
7953 jumpif (pred
, label_rtx (GOTO_DESTINATION (then_
)));
7954 return expand_expr (else_
, const0_rtx
, VOIDmode
, 0);
7957 /* Note that COND_EXPRs whose type is a structure or union
7958 are required to be constructed to contain assignments of
7959 a temporary variable, so that we can evaluate them here
7960 for side effect only. If type is void, we must do likewise. */
7962 gcc_assert (!TREE_ADDRESSABLE (type
)
7964 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
7965 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
);
7967 /* If we are not to produce a result, we have no target. Otherwise,
7968 if a target was specified use it; it will not be used as an
7969 intermediate target unless it is safe. If no target, use a
7972 if (modifier
!= EXPAND_STACK_PARM
7974 && safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
7975 && GET_MODE (original_target
) == mode
7976 #ifdef HAVE_conditional_move
7977 && (! can_conditionally_move_p (mode
)
7978 || REG_P (original_target
))
7980 && !MEM_P (original_target
))
7981 temp
= original_target
;
7983 temp
= assign_temp (type
, 0, 0, 1);
7985 do_pending_stack_adjust ();
7987 op0
= gen_label_rtx ();
7988 op1
= gen_label_rtx ();
7989 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
7990 store_expr (TREE_OPERAND (exp
, 1), temp
,
7991 modifier
== EXPAND_STACK_PARM
);
7993 emit_jump_insn (gen_jump (op1
));
7996 store_expr (TREE_OPERAND (exp
, 2), temp
,
7997 modifier
== EXPAND_STACK_PARM
);
8004 target
= expand_vec_cond_expr (exp
, target
);
8009 tree lhs
= TREE_OPERAND (exp
, 0);
8010 tree rhs
= TREE_OPERAND (exp
, 1);
8012 gcc_assert (ignore
);
8014 /* Check for |= or &= of a bitfield of size one into another bitfield
8015 of size 1. In this case, (unless we need the result of the
8016 assignment) we can do this more efficiently with a
8017 test followed by an assignment, if necessary.
8019 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8020 things change so we do, this code should be enhanced to
8022 if (TREE_CODE (lhs
) == COMPONENT_REF
8023 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
8024 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
8025 && TREE_OPERAND (rhs
, 0) == lhs
8026 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
8027 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
8028 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
8030 rtx label
= gen_label_rtx ();
8032 do_jump (TREE_OPERAND (rhs
, 1),
8033 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
8034 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
8035 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
8036 (TREE_CODE (rhs
) == BIT_IOR_EXPR
8038 : integer_zero_node
)));
8039 do_pending_stack_adjust ();
8044 expand_assignment (lhs
, rhs
);
8050 if (!TREE_OPERAND (exp
, 0))
8051 expand_null_return ();
8053 expand_return (TREE_OPERAND (exp
, 0));
8057 return expand_expr_addr_expr (exp
, target
, tmode
, modifier
);
8059 /* COMPLEX type for Extended Pascal & Fortran */
8062 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8065 /* Get the rtx code of the operands. */
8066 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8067 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
8070 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8074 /* Move the real (op0) and imaginary (op1) parts to their location. */
8075 emit_move_insn (gen_realpart (mode
, target
), op0
);
8076 emit_move_insn (gen_imagpart (mode
, target
), op1
);
8078 insns
= get_insns ();
8081 /* Complex construction should appear as a single unit. */
8082 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8083 each with a separate pseudo as destination.
8084 It's not correct for flow to treat them as a unit. */
8085 if (GET_CODE (target
) != CONCAT
)
8086 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
8094 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8095 return gen_realpart (mode
, op0
);
8098 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8099 return gen_imagpart (mode
, op0
);
8102 expand_resx_expr (exp
);
8105 case TRY_CATCH_EXPR
:
8107 case EH_FILTER_EXPR
:
8108 case TRY_FINALLY_EXPR
:
8109 /* Lowered by tree-eh.c. */
8112 case WITH_CLEANUP_EXPR
:
8113 case CLEANUP_POINT_EXPR
:
8115 case CASE_LABEL_EXPR
:
8121 case PREINCREMENT_EXPR
:
8122 case PREDECREMENT_EXPR
:
8123 case POSTINCREMENT_EXPR
:
8124 case POSTDECREMENT_EXPR
:
8127 case TRUTH_ANDIF_EXPR
:
8128 case TRUTH_ORIF_EXPR
:
8129 /* Lowered by gimplify.c. */
8133 return get_exception_pointer (cfun
);
8136 return get_exception_filter (cfun
);
8139 /* Function descriptors are not valid except for as
8140 initialization constants, and should not be expanded. */
8148 expand_label (TREE_OPERAND (exp
, 0));
8152 expand_asm_expr (exp
);
8155 case WITH_SIZE_EXPR
:
8156 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8157 have pulled out the size to use in whatever context it needed. */
8158 return expand_expr_real (TREE_OPERAND (exp
, 0), original_target
, tmode
,
8161 case REALIGN_LOAD_EXPR
:
8163 tree oprnd0
= TREE_OPERAND (exp
, 0);
8164 tree oprnd1
= TREE_OPERAND (exp
, 1);
8165 tree oprnd2
= TREE_OPERAND (exp
, 2);
8168 this_optab
= optab_for_tree_code (code
, type
);
8169 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, 0);
8170 op2
= expand_expr (oprnd2
, NULL_RTX
, VOIDmode
, 0);
8171 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
8180 return lang_hooks
.expand_expr (exp
, original_target
, tmode
,
8184 /* Here to do an ordinary binary operator. */
8186 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8187 subtarget
, &op0
, &op1
, 0);
8189 this_optab
= optab_for_tree_code (code
, type
);
8191 if (modifier
== EXPAND_STACK_PARM
)
8193 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
8194 unsignedp
, OPTAB_LIB_WIDEN
);
8196 return REDUCE_BIT_FIELD (temp
);
8198 #undef REDUCE_BIT_FIELD
8200 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8201 signedness of TYPE), possibly returning the result in TARGET. */
8203 reduce_to_bit_field_precision (rtx exp
, rtx target
, tree type
)
8205 HOST_WIDE_INT prec
= TYPE_PRECISION (type
);
8206 if (target
&& GET_MODE (target
) != GET_MODE (exp
))
8208 if (TYPE_UNSIGNED (type
))
8211 if (prec
< HOST_BITS_PER_WIDE_INT
)
8212 mask
= immed_double_const (((unsigned HOST_WIDE_INT
) 1 << prec
) - 1, 0,
8215 mask
= immed_double_const ((unsigned HOST_WIDE_INT
) -1,
8216 ((unsigned HOST_WIDE_INT
) 1
8217 << (prec
- HOST_BITS_PER_WIDE_INT
)) - 1,
8219 return expand_and (GET_MODE (exp
), exp
, mask
, target
);
8223 tree count
= build_int_cst (NULL_TREE
,
8224 GET_MODE_BITSIZE (GET_MODE (exp
)) - prec
);
8225 exp
= expand_shift (LSHIFT_EXPR
, GET_MODE (exp
), exp
, count
, target
, 0);
8226 return expand_shift (RSHIFT_EXPR
, GET_MODE (exp
), exp
, count
, target
, 0);
8230 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8231 when applied to the address of EXP produces an address known to be
8232 aligned more than BIGGEST_ALIGNMENT. */
8235 is_aligning_offset (tree offset
, tree exp
)
8237 /* Strip off any conversions. */
8238 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
8239 || TREE_CODE (offset
) == NOP_EXPR
8240 || TREE_CODE (offset
) == CONVERT_EXPR
)
8241 offset
= TREE_OPERAND (offset
, 0);
8243 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8244 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8245 if (TREE_CODE (offset
) != BIT_AND_EXPR
8246 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
8247 || compare_tree_int (TREE_OPERAND (offset
, 1),
8248 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
8249 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
8252 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8253 It must be NEGATE_EXPR. Then strip any more conversions. */
8254 offset
= TREE_OPERAND (offset
, 0);
8255 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
8256 || TREE_CODE (offset
) == NOP_EXPR
8257 || TREE_CODE (offset
) == CONVERT_EXPR
)
8258 offset
= TREE_OPERAND (offset
, 0);
8260 if (TREE_CODE (offset
) != NEGATE_EXPR
)
8263 offset
= TREE_OPERAND (offset
, 0);
8264 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
8265 || TREE_CODE (offset
) == NOP_EXPR
8266 || TREE_CODE (offset
) == CONVERT_EXPR
)
8267 offset
= TREE_OPERAND (offset
, 0);
8269 /* This must now be the address of EXP. */
8270 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
8273 /* Return the tree node if an ARG corresponds to a string constant or zero
8274 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8275 in bytes within the string that ARG is accessing. The type of the
8276 offset will be `sizetype'. */
8279 string_constant (tree arg
, tree
*ptr_offset
)
8284 if (TREE_CODE (arg
) == ADDR_EXPR
)
8286 if (TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
8288 *ptr_offset
= size_zero_node
;
8289 return TREE_OPERAND (arg
, 0);
8291 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == VAR_DECL
)
8293 array
= TREE_OPERAND (arg
, 0);
8294 offset
= size_zero_node
;
8296 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
8298 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
8299 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
8300 if (TREE_CODE (array
) != STRING_CST
8301 && TREE_CODE (array
) != VAR_DECL
)
8307 else if (TREE_CODE (arg
) == PLUS_EXPR
)
8309 tree arg0
= TREE_OPERAND (arg
, 0);
8310 tree arg1
= TREE_OPERAND (arg
, 1);
8315 if (TREE_CODE (arg0
) == ADDR_EXPR
8316 && (TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
8317 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == VAR_DECL
))
8319 array
= TREE_OPERAND (arg0
, 0);
8322 else if (TREE_CODE (arg1
) == ADDR_EXPR
8323 && (TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
8324 || TREE_CODE (TREE_OPERAND (arg1
, 0)) == VAR_DECL
))
8326 array
= TREE_OPERAND (arg1
, 0);
8335 if (TREE_CODE (array
) == STRING_CST
)
8337 *ptr_offset
= convert (sizetype
, offset
);
8340 else if (TREE_CODE (array
) == VAR_DECL
)
8344 /* Variables initialized to string literals can be handled too. */
8345 if (DECL_INITIAL (array
) == NULL_TREE
8346 || TREE_CODE (DECL_INITIAL (array
)) != STRING_CST
)
8349 /* If they are read-only, non-volatile and bind locally. */
8350 if (! TREE_READONLY (array
)
8351 || TREE_SIDE_EFFECTS (array
)
8352 || ! targetm
.binds_local_p (array
))
8355 /* Avoid const char foo[4] = "abcde"; */
8356 if (DECL_SIZE_UNIT (array
) == NULL_TREE
8357 || TREE_CODE (DECL_SIZE_UNIT (array
)) != INTEGER_CST
8358 || (length
= TREE_STRING_LENGTH (DECL_INITIAL (array
))) <= 0
8359 || compare_tree_int (DECL_SIZE_UNIT (array
), length
) < 0)
8362 /* If variable is bigger than the string literal, OFFSET must be constant
8363 and inside of the bounds of the string literal. */
8364 offset
= convert (sizetype
, offset
);
8365 if (compare_tree_int (DECL_SIZE_UNIT (array
), length
) > 0
8366 && (! host_integerp (offset
, 1)
8367 || compare_tree_int (offset
, length
) >= 0))
8370 *ptr_offset
= offset
;
8371 return DECL_INITIAL (array
);
8377 /* Generate code to calculate EXP using a store-flag instruction
8378 and return an rtx for the result. EXP is either a comparison
8379 or a TRUTH_NOT_EXPR whose operand is a comparison.
8381 If TARGET is nonzero, store the result there if convenient.
8383 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8386 Return zero if there is no suitable set-flag instruction
8387 available on this machine.
8389 Once expand_expr has been called on the arguments of the comparison,
8390 we are committed to doing the store flag, since it is not safe to
8391 re-evaluate the expression. We emit the store-flag insn by calling
8392 emit_store_flag, but only expand the arguments if we have a reason
8393 to believe that emit_store_flag will be successful. If we think that
8394 it will, but it isn't, we have to simulate the store-flag with a
8395 set/jump/set sequence. */
8398 do_store_flag (tree exp
, rtx target
, enum machine_mode mode
, int only_cheap
)
8401 tree arg0
, arg1
, type
;
8403 enum machine_mode operand_mode
;
8407 enum insn_code icode
;
8408 rtx subtarget
= target
;
8411 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8412 result at the end. We can't simply invert the test since it would
8413 have already been inverted if it were valid. This case occurs for
8414 some floating-point comparisons. */
8416 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
8417 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
8419 arg0
= TREE_OPERAND (exp
, 0);
8420 arg1
= TREE_OPERAND (exp
, 1);
8422 /* Don't crash if the comparison was erroneous. */
8423 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
8426 type
= TREE_TYPE (arg0
);
8427 operand_mode
= TYPE_MODE (type
);
8428 unsignedp
= TYPE_UNSIGNED (type
);
8430 /* We won't bother with BLKmode store-flag operations because it would mean
8431 passing a lot of information to emit_store_flag. */
8432 if (operand_mode
== BLKmode
)
8435 /* We won't bother with store-flag operations involving function pointers
8436 when function pointers must be canonicalized before comparisons. */
8437 #ifdef HAVE_canonicalize_funcptr_for_compare
8438 if (HAVE_canonicalize_funcptr_for_compare
8439 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
8440 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
8442 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
8443 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
8444 == FUNCTION_TYPE
))))
8451 /* Get the rtx comparison code to use. We know that EXP is a comparison
8452 operation of some type. Some comparisons against 1 and -1 can be
8453 converted to comparisons with zero. Do so here so that the tests
8454 below will be aware that we have a comparison with zero. These
8455 tests will not catch constants in the first operand, but constants
8456 are rarely passed as the first operand. */
8458 switch (TREE_CODE (exp
))
8467 if (integer_onep (arg1
))
8468 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
8470 code
= unsignedp
? LTU
: LT
;
8473 if (! unsignedp
&& integer_all_onesp (arg1
))
8474 arg1
= integer_zero_node
, code
= LT
;
8476 code
= unsignedp
? LEU
: LE
;
8479 if (! unsignedp
&& integer_all_onesp (arg1
))
8480 arg1
= integer_zero_node
, code
= GE
;
8482 code
= unsignedp
? GTU
: GT
;
8485 if (integer_onep (arg1
))
8486 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
8488 code
= unsignedp
? GEU
: GE
;
8491 case UNORDERED_EXPR
:
8520 /* Put a constant second. */
8521 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
8523 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
8524 code
= swap_condition (code
);
8527 /* If this is an equality or inequality test of a single bit, we can
8528 do this by shifting the bit being tested to the low-order bit and
8529 masking the result with the constant 1. If the condition was EQ,
8530 we xor it with 1. This does not require an scc insn and is faster
8531 than an scc insn even if we have it.
8533 The code to make this transformation was moved into fold_single_bit_test,
8534 so we just call into the folder and expand its result. */
8536 if ((code
== NE
|| code
== EQ
)
8537 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
8538 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
8540 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
8541 return expand_expr (fold_single_bit_test (code
== NE
? NE_EXPR
: EQ_EXPR
,
8543 target
, VOIDmode
, EXPAND_NORMAL
);
8546 /* Now see if we are likely to be able to do this. Return if not. */
8547 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
8550 icode
= setcc_gen_code
[(int) code
];
8551 if (icode
== CODE_FOR_nothing
8552 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
8554 /* We can only do this if it is one of the special cases that
8555 can be handled without an scc insn. */
8556 if ((code
== LT
&& integer_zerop (arg1
))
8557 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
8559 else if (BRANCH_COST
>= 0
8560 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
8561 && TREE_CODE (type
) != REAL_TYPE
8562 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
8563 != CODE_FOR_nothing
)
8564 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
8565 != CODE_FOR_nothing
)))
8571 if (! get_subtarget (target
)
8572 || GET_MODE (subtarget
) != operand_mode
)
8575 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, 0);
8578 target
= gen_reg_rtx (mode
);
8580 result
= emit_store_flag (target
, code
, op0
, op1
,
8581 operand_mode
, unsignedp
, 1);
8586 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
8587 result
, 0, OPTAB_LIB_WIDEN
);
8591 /* If this failed, we have to do this with set/compare/jump/set code. */
8593 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
8594 target
= gen_reg_rtx (GET_MODE (target
));
8596 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
8597 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
8598 operand_mode
, NULL_RTX
);
8599 if (GET_CODE (result
) == CONST_INT
)
8600 return (((result
== const0_rtx
&& ! invert
)
8601 || (result
!= const0_rtx
&& invert
))
8602 ? const0_rtx
: const1_rtx
);
8604 /* The code of RESULT may not match CODE if compare_from_rtx
8605 decided to swap its operands and reverse the original code.
8607 We know that compare_from_rtx returns either a CONST_INT or
8608 a new comparison code, so it is safe to just extract the
8609 code from RESULT. */
8610 code
= GET_CODE (result
);
8612 label
= gen_label_rtx ();
8613 gcc_assert (bcc_gen_fctn
[(int) code
]);
8615 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
8616 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
8623 /* Stubs in case we haven't got a casesi insn. */
8625 # define HAVE_casesi 0
8626 # define gen_casesi(a, b, c, d, e) (0)
8627 # define CODE_FOR_casesi CODE_FOR_nothing
8630 /* If the machine does not have a case insn that compares the bounds,
8631 this means extra overhead for dispatch tables, which raises the
8632 threshold for using them. */
8633 #ifndef CASE_VALUES_THRESHOLD
8634 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
8635 #endif /* CASE_VALUES_THRESHOLD */
8638 case_values_threshold (void)
8640 return CASE_VALUES_THRESHOLD
;
8643 /* Attempt to generate a casesi instruction. Returns 1 if successful,
8644 0 otherwise (i.e. if there is no casesi instruction). */
8646 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
8647 rtx table_label ATTRIBUTE_UNUSED
, rtx default_label
)
8649 enum machine_mode index_mode
= SImode
;
8650 int index_bits
= GET_MODE_BITSIZE (index_mode
);
8651 rtx op1
, op2
, index
;
8652 enum machine_mode op_mode
;
8657 /* Convert the index to SImode. */
8658 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
8660 enum machine_mode omode
= TYPE_MODE (index_type
);
8661 rtx rangertx
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
8663 /* We must handle the endpoints in the original mode. */
8664 index_expr
= build2 (MINUS_EXPR
, index_type
,
8665 index_expr
, minval
);
8666 minval
= integer_zero_node
;
8667 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
8668 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
8669 omode
, 1, default_label
);
8670 /* Now we can safely truncate. */
8671 index
= convert_to_mode (index_mode
, index
, 0);
8675 if (TYPE_MODE (index_type
) != index_mode
)
8677 index_expr
= convert (lang_hooks
.types
.type_for_size
8678 (index_bits
, 0), index_expr
);
8679 index_type
= TREE_TYPE (index_expr
);
8682 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
8685 do_pending_stack_adjust ();
8687 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
8688 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
8690 index
= copy_to_mode_reg (op_mode
, index
);
8692 op1
= expand_expr (minval
, NULL_RTX
, VOIDmode
, 0);
8694 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
8695 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
8696 op1
, TYPE_UNSIGNED (TREE_TYPE (minval
)));
8697 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
8699 op1
= copy_to_mode_reg (op_mode
, op1
);
8701 op2
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
8703 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
8704 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
8705 op2
, TYPE_UNSIGNED (TREE_TYPE (range
)));
8706 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
8708 op2
= copy_to_mode_reg (op_mode
, op2
);
8710 emit_jump_insn (gen_casesi (index
, op1
, op2
,
8711 table_label
, default_label
));
8715 /* Attempt to generate a tablejump instruction; same concept. */
8716 #ifndef HAVE_tablejump
8717 #define HAVE_tablejump 0
8718 #define gen_tablejump(x, y) (0)
8721 /* Subroutine of the next function.
8723 INDEX is the value being switched on, with the lowest value
8724 in the table already subtracted.
8725 MODE is its expected mode (needed if INDEX is constant).
8726 RANGE is the length of the jump table.
8727 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8729 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8730 index value is out of range. */
8733 do_tablejump (rtx index
, enum machine_mode mode
, rtx range
, rtx table_label
,
8738 if (INTVAL (range
) > cfun
->max_jumptable_ents
)
8739 cfun
->max_jumptable_ents
= INTVAL (range
);
8741 /* Do an unsigned comparison (in the proper mode) between the index
8742 expression and the value which represents the length of the range.
8743 Since we just finished subtracting the lower bound of the range
8744 from the index expression, this comparison allows us to simultaneously
8745 check that the original index expression value is both greater than
8746 or equal to the minimum value of the range and less than or equal to
8747 the maximum value of the range. */
8749 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
8752 /* If index is in range, it must fit in Pmode.
8753 Convert to Pmode so we can index with it. */
8755 index
= convert_to_mode (Pmode
, index
, 1);
8757 /* Don't let a MEM slip through, because then INDEX that comes
8758 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
8759 and break_out_memory_refs will go to work on it and mess it up. */
8760 #ifdef PIC_CASE_VECTOR_ADDRESS
8761 if (flag_pic
&& !REG_P (index
))
8762 index
= copy_to_mode_reg (Pmode
, index
);
8765 /* If flag_force_addr were to affect this address
8766 it could interfere with the tricky assumptions made
8767 about addresses that contain label-refs,
8768 which may be valid only very near the tablejump itself. */
8769 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8770 GET_MODE_SIZE, because this indicates how large insns are. The other
8771 uses should all be Pmode, because they are addresses. This code
8772 could fail if addresses and insns are not the same size. */
8773 index
= gen_rtx_PLUS (Pmode
,
8774 gen_rtx_MULT (Pmode
, index
,
8775 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
8776 gen_rtx_LABEL_REF (Pmode
, table_label
));
8777 #ifdef PIC_CASE_VECTOR_ADDRESS
8779 index
= PIC_CASE_VECTOR_ADDRESS (index
);
8782 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
8783 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
8784 vector
= gen_const_mem (CASE_VECTOR_MODE
, index
);
8785 convert_move (temp
, vector
, 0);
8787 emit_jump_insn (gen_tablejump (temp
, table_label
));
8789 /* If we are generating PIC code or if the table is PC-relative, the
8790 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8791 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
8796 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
8797 rtx table_label
, rtx default_label
)
8801 if (! HAVE_tablejump
)
8804 index_expr
= fold (build2 (MINUS_EXPR
, index_type
,
8805 convert (index_type
, index_expr
),
8806 convert (index_type
, minval
)));
8807 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
8808 do_pending_stack_adjust ();
8810 do_tablejump (index
, TYPE_MODE (index_type
),
8811 convert_modes (TYPE_MODE (index_type
),
8812 TYPE_MODE (TREE_TYPE (range
)),
8813 expand_expr (range
, NULL_RTX
,
8815 TYPE_UNSIGNED (TREE_TYPE (range
))),
8816 table_label
, default_label
);
8820 /* Nonzero if the mode is a valid vector mode for this architecture.
8821 This returns nonzero even if there is no hardware support for the
8822 vector mode, but we can emulate with narrower modes. */
8825 vector_mode_valid_p (enum machine_mode mode
)
8827 enum mode_class
class = GET_MODE_CLASS (mode
);
8828 enum machine_mode innermode
;
8830 /* Doh! What's going on? */
8831 if (class != MODE_VECTOR_INT
8832 && class != MODE_VECTOR_FLOAT
)
8835 /* Hardware support. Woo hoo! */
8836 if (targetm
.vector_mode_supported_p (mode
))
8839 innermode
= GET_MODE_INNER (mode
);
8841 /* We should probably return 1 if requesting V4DI and we have no DI,
8842 but we have V2DI, but this is probably very unlikely. */
8844 /* If we have support for the inner mode, we can safely emulate it.
8845 We may not have V2DI, but me can emulate with a pair of DIs. */
8846 return targetm
.scalar_mode_supported_p (innermode
);
8849 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
8851 const_vector_from_tree (tree exp
)
8856 enum machine_mode inner
, mode
;
8858 mode
= TYPE_MODE (TREE_TYPE (exp
));
8860 if (initializer_zerop (exp
))
8861 return CONST0_RTX (mode
);
8863 units
= GET_MODE_NUNITS (mode
);
8864 inner
= GET_MODE_INNER (mode
);
8866 v
= rtvec_alloc (units
);
8868 link
= TREE_VECTOR_CST_ELTS (exp
);
8869 for (i
= 0; link
; link
= TREE_CHAIN (link
), ++i
)
8871 elt
= TREE_VALUE (link
);
8873 if (TREE_CODE (elt
) == REAL_CST
)
8874 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
8877 RTVEC_ELT (v
, i
) = immed_double_const (TREE_INT_CST_LOW (elt
),
8878 TREE_INT_CST_HIGH (elt
),
8882 /* Initialize remaining elements to 0. */
8883 for (; i
< units
; ++i
)
8884 RTVEC_ELT (v
, i
) = CONST0_RTX (inner
);
8886 return gen_rtx_CONST_VECTOR (mode
, v
);
8888 #include "gt-expr.h"