1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
76 #define STACK_PUSH_CODE PRE_INC
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
89 /* This structure is used by move_by_pieces to describe the move to
100 int explicit_inc_from
;
101 unsigned HOST_WIDE_INT len
;
102 HOST_WIDE_INT offset
;
106 /* This structure is used by store_by_pieces to describe the clear to
109 struct store_by_pieces
115 unsigned HOST_WIDE_INT len
;
116 HOST_WIDE_INT offset
;
117 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
);
122 static unsigned HOST_WIDE_INT
move_by_pieces_ninsns (unsigned HOST_WIDE_INT
,
125 static void move_by_pieces_1 (rtx (*) (rtx
, ...), enum machine_mode
,
126 struct move_by_pieces
*);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx
, rtx
, rtx
, unsigned);
129 static rtx
emit_block_move_via_libcall (rtx
, rtx
, rtx
);
130 static tree
emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
132 static rtx
clear_by_pieces_1 (void *, HOST_WIDE_INT
, enum machine_mode
);
133 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces
*, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx
, ...), enum machine_mode
,
136 struct store_by_pieces
*);
137 static bool clear_storage_via_clrmem (rtx
, rtx
, unsigned);
138 static rtx
clear_storage_via_libcall (rtx
, rtx
);
139 static tree
clear_storage_libcall_fn (int);
140 static rtx
compress_float_constant (rtx
, rtx
);
141 static rtx
get_subtarget (rtx
);
142 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
143 HOST_WIDE_INT
, enum machine_mode
,
144 tree
, tree
, int, int);
145 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
);
146 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
, enum machine_mode
,
147 tree
, enum machine_mode
, int, tree
, int);
149 static unsigned HOST_WIDE_INT
highest_pow2_factor (tree
);
150 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (tree
, tree
);
152 static int is_aligning_offset (tree
, tree
);
153 static void expand_operands (tree
, tree
, rtx
, rtx
*, rtx
*,
154 enum expand_modifier
);
155 static rtx
reduce_to_bit_field_precision (rtx
, rtx
, tree
);
156 static rtx
do_store_flag (tree
, rtx
, enum machine_mode
, int);
158 static void emit_single_push_insn (enum machine_mode
, rtx
, tree
);
160 static void do_tablejump (rtx
, enum machine_mode
, rtx
, rtx
, rtx
);
161 static rtx
const_vector_from_tree (tree
);
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load
[NUM_MACHINE_MODES
];
168 static char direct_store
[NUM_MACHINE_MODES
];
170 /* Record for each mode whether we can float-extend from memory. */
172 static bool float_extend_from_mem
[NUM_MACHINE_MODES
][NUM_MACHINE_MODES
];
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO)
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO)
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193 #ifndef STORE_BY_PIECES_P
194 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) MOVE_RATIO)
199 /* This array records the insn_code of insns to perform block moves. */
200 enum insn_code movmem_optab
[NUM_MACHINE_MODES
];
202 /* This array records the insn_code of insns to perform block clears. */
203 enum insn_code clrmem_optab
[NUM_MACHINE_MODES
];
205 /* These arrays record the insn_code of two different kinds of insns
206 to perform block compares. */
207 enum insn_code cmpstr_optab
[NUM_MACHINE_MODES
];
208 enum insn_code cmpmem_optab
[NUM_MACHINE_MODES
];
210 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
212 #ifndef SLOW_UNALIGNED_ACCESS
213 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
216 /* This is run once per compilation to set up which modes can be used
217 directly in memory and to initialize the block move optab. */
220 init_expr_once (void)
223 enum machine_mode mode
;
228 /* Try indexing by frame ptr and try by stack ptr.
229 It is known that on the Convex the stack ptr isn't a valid index.
230 With luck, one or the other is valid on any machine. */
231 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
232 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
234 /* A scratch register we can modify in-place below to avoid
235 useless RTL allocations. */
236 reg
= gen_rtx_REG (VOIDmode
, -1);
238 insn
= rtx_alloc (INSN
);
239 pat
= gen_rtx_SET (0, NULL_RTX
, NULL_RTX
);
240 PATTERN (insn
) = pat
;
242 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
243 mode
= (enum machine_mode
) ((int) mode
+ 1))
247 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
248 PUT_MODE (mem
, mode
);
249 PUT_MODE (mem1
, mode
);
250 PUT_MODE (reg
, mode
);
252 /* See if there is some register that can be used in this mode and
253 directly loaded or stored from memory. */
255 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
256 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
257 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
260 if (! HARD_REGNO_MODE_OK (regno
, mode
))
266 SET_DEST (pat
) = reg
;
267 if (recog (pat
, insn
, &num_clobbers
) >= 0)
268 direct_load
[(int) mode
] = 1;
270 SET_SRC (pat
) = mem1
;
271 SET_DEST (pat
) = reg
;
272 if (recog (pat
, insn
, &num_clobbers
) >= 0)
273 direct_load
[(int) mode
] = 1;
276 SET_DEST (pat
) = mem
;
277 if (recog (pat
, insn
, &num_clobbers
) >= 0)
278 direct_store
[(int) mode
] = 1;
281 SET_DEST (pat
) = mem1
;
282 if (recog (pat
, insn
, &num_clobbers
) >= 0)
283 direct_store
[(int) mode
] = 1;
287 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
289 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
290 mode
= GET_MODE_WIDER_MODE (mode
))
292 enum machine_mode srcmode
;
293 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
294 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
298 ic
= can_extend_p (mode
, srcmode
, 0);
299 if (ic
== CODE_FOR_nothing
)
302 PUT_MODE (mem
, srcmode
);
304 if ((*insn_data
[ic
].operand
[1].predicate
) (mem
, srcmode
))
305 float_extend_from_mem
[mode
][srcmode
] = true;
310 /* This is run at the start of compiling a function. */
315 cfun
->expr
= ggc_alloc_cleared (sizeof (struct expr_status
));
318 /* Copy data from FROM to TO, where the machine modes are not the same.
319 Both modes may be integer, or both may be floating.
320 UNSIGNEDP should be nonzero if FROM is an unsigned type.
321 This causes zero-extension instead of sign-extension. */
324 convert_move (rtx to
, rtx from
, int unsignedp
)
326 enum machine_mode to_mode
= GET_MODE (to
);
327 enum machine_mode from_mode
= GET_MODE (from
);
328 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
329 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
333 /* rtx code for making an equivalent value. */
334 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
335 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
338 if (to_real
!= from_real
)
341 /* If the source and destination are already the same, then there's
346 /* If FROM is a SUBREG that indicates that we have already done at least
347 the required extension, strip it. We don't handle such SUBREGs as
350 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
351 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
352 >= GET_MODE_SIZE (to_mode
))
353 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
354 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
356 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
359 if (to_mode
== from_mode
360 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
362 emit_move_insn (to
, from
);
366 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
368 if (GET_MODE_BITSIZE (from_mode
) != GET_MODE_BITSIZE (to_mode
))
371 if (VECTOR_MODE_P (to_mode
))
372 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
374 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
376 emit_move_insn (to
, from
);
380 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
382 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
383 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
392 if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
394 else if (GET_MODE_PRECISION (from_mode
) > GET_MODE_PRECISION (to_mode
))
399 /* Try converting directly if the insn is supported. */
401 code
= tab
->handlers
[to_mode
][from_mode
].insn_code
;
402 if (code
!= CODE_FOR_nothing
)
404 emit_unop_insn (code
, to
, from
,
405 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
409 /* Otherwise use a libcall. */
410 libcall
= tab
->handlers
[to_mode
][from_mode
].libfunc
;
413 /* This conversion is not implemented yet. */
417 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
419 insns
= get_insns ();
421 emit_libcall_block (insns
, to
, value
,
422 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
424 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
428 /* Handle pointer conversion. */ /* SPEE 900220. */
429 /* Targets are expected to provide conversion insns between PxImode and
430 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
431 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
433 enum machine_mode full_mode
434 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode
), MODE_INT
);
436 if (trunc_optab
->handlers
[to_mode
][full_mode
].insn_code
440 if (full_mode
!= from_mode
)
441 from
= convert_to_mode (full_mode
, from
, unsignedp
);
442 emit_unop_insn (trunc_optab
->handlers
[to_mode
][full_mode
].insn_code
,
446 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
448 enum machine_mode full_mode
449 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode
), MODE_INT
);
451 if (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
455 emit_unop_insn (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
,
457 if (to_mode
== full_mode
)
460 /* else proceed to integer conversions below. */
461 from_mode
= full_mode
;
464 /* Now both modes are integers. */
466 /* Handle expanding beyond a word. */
467 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
468 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
475 enum machine_mode lowpart_mode
;
476 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
478 /* Try converting directly if the insn is supported. */
479 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
482 /* If FROM is a SUBREG, put it into a register. Do this
483 so that we always generate the same set of insns for
484 better cse'ing; if an intermediate assignment occurred,
485 we won't be doing the operation directly on the SUBREG. */
486 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
487 from
= force_reg (from_mode
, from
);
488 emit_unop_insn (code
, to
, from
, equiv_code
);
491 /* Next, try converting via full word. */
492 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
493 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
494 != CODE_FOR_nothing
))
498 if (reg_overlap_mentioned_p (to
, from
))
499 from
= force_reg (from_mode
, from
);
500 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
502 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
503 emit_unop_insn (code
, to
,
504 gen_lowpart (word_mode
, to
), equiv_code
);
508 /* No special multiword conversion insn; do it by hand. */
511 /* Since we will turn this into a no conflict block, we must ensure
512 that the source does not overlap the target. */
514 if (reg_overlap_mentioned_p (to
, from
))
515 from
= force_reg (from_mode
, from
);
517 /* Get a copy of FROM widened to a word, if necessary. */
518 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
519 lowpart_mode
= word_mode
;
521 lowpart_mode
= from_mode
;
523 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
525 lowpart
= gen_lowpart (lowpart_mode
, to
);
526 emit_move_insn (lowpart
, lowfrom
);
528 /* Compute the value to put in each remaining word. */
530 fill_value
= const0_rtx
;
535 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
536 && STORE_FLAG_VALUE
== -1)
538 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
540 fill_value
= gen_reg_rtx (word_mode
);
541 emit_insn (gen_slt (fill_value
));
547 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
548 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
550 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
554 /* Fill the remaining words. */
555 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
557 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
558 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
563 if (fill_value
!= subword
)
564 emit_move_insn (subword
, fill_value
);
567 insns
= get_insns ();
570 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
571 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
575 /* Truncating multi-word to a word or less. */
576 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
577 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
580 && ! MEM_VOLATILE_P (from
)
581 && direct_load
[(int) to_mode
]
582 && ! mode_dependent_address_p (XEXP (from
, 0)))
584 || GET_CODE (from
) == SUBREG
))
585 from
= force_reg (from_mode
, from
);
586 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
590 /* Now follow all the conversions between integers
591 no more than a word long. */
593 /* For truncation, usually we can just refer to FROM in a narrower mode. */
594 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
595 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
596 GET_MODE_BITSIZE (from_mode
)))
599 && ! MEM_VOLATILE_P (from
)
600 && direct_load
[(int) to_mode
]
601 && ! mode_dependent_address_p (XEXP (from
, 0)))
603 || GET_CODE (from
) == SUBREG
))
604 from
= force_reg (from_mode
, from
);
605 if (REG_P (from
) && REGNO (from
) < FIRST_PSEUDO_REGISTER
606 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
607 from
= copy_to_reg (from
);
608 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
612 /* Handle extension. */
613 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
615 /* Convert directly if that works. */
616 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
620 from
= force_not_mem (from
);
622 emit_unop_insn (code
, to
, from
, equiv_code
);
627 enum machine_mode intermediate
;
631 /* Search for a mode to convert via. */
632 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
633 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
634 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
636 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
637 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
638 GET_MODE_BITSIZE (intermediate
))))
639 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
640 != CODE_FOR_nothing
))
642 convert_move (to
, convert_to_mode (intermediate
, from
,
643 unsignedp
), unsignedp
);
647 /* No suitable intermediate mode.
648 Generate what we need with shifts. */
649 shift_amount
= build_int_2 (GET_MODE_BITSIZE (to_mode
)
650 - GET_MODE_BITSIZE (from_mode
), 0);
651 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
652 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
654 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
657 emit_move_insn (to
, tmp
);
662 /* Support special truncate insns for certain modes. */
663 if (trunc_optab
->handlers
[to_mode
][from_mode
].insn_code
!= CODE_FOR_nothing
)
665 emit_unop_insn (trunc_optab
->handlers
[to_mode
][from_mode
].insn_code
,
670 /* Handle truncation of volatile memrefs, and so on;
671 the things that couldn't be truncated directly,
672 and for which there was no special instruction.
674 ??? Code above formerly short-circuited this, for most integer
675 mode pairs, with a force_reg in from_mode followed by a recursive
676 call to this routine. Appears always to have been wrong. */
677 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
679 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
680 emit_move_insn (to
, temp
);
684 /* Mode combination is not recognized. */
688 /* Return an rtx for a value that would result
689 from converting X to mode MODE.
690 Both X and MODE may be floating, or both integer.
691 UNSIGNEDP is nonzero if X is an unsigned value.
692 This can be done by referring to a part of X in place
693 or by copying to a new temporary with conversion. */
696 convert_to_mode (enum machine_mode mode
, rtx x
, int unsignedp
)
698 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
701 /* Return an rtx for a value that would result
702 from converting X from mode OLDMODE to mode MODE.
703 Both modes may be floating, or both integer.
704 UNSIGNEDP is nonzero if X is an unsigned value.
706 This can be done by referring to a part of X in place
707 or by copying to a new temporary with conversion.
709 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
712 convert_modes (enum machine_mode mode
, enum machine_mode oldmode
, rtx x
, int unsignedp
)
716 /* If FROM is a SUBREG that indicates that we have already done at least
717 the required extension, strip it. */
719 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
720 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
721 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
722 x
= gen_lowpart (mode
, x
);
724 if (GET_MODE (x
) != VOIDmode
)
725 oldmode
= GET_MODE (x
);
730 /* There is one case that we must handle specially: If we are converting
731 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
732 we are to interpret the constant as unsigned, gen_lowpart will do
733 the wrong if the constant appears negative. What we want to do is
734 make the high-order word of the constant zero, not all ones. */
736 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
737 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
738 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
740 HOST_WIDE_INT val
= INTVAL (x
);
742 if (oldmode
!= VOIDmode
743 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
745 int width
= GET_MODE_BITSIZE (oldmode
);
747 /* We need to zero extend VAL. */
748 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
751 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
754 /* We can do this with a gen_lowpart if both desired and current modes
755 are integer, and this is either a constant integer, a register, or a
756 non-volatile MEM. Except for the constant case where MODE is no
757 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
759 if ((GET_CODE (x
) == CONST_INT
760 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
761 || (GET_MODE_CLASS (mode
) == MODE_INT
762 && GET_MODE_CLASS (oldmode
) == MODE_INT
763 && (GET_CODE (x
) == CONST_DOUBLE
764 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
765 && ((MEM_P (x
) && ! MEM_VOLATILE_P (x
)
766 && direct_load
[(int) mode
])
768 && (! HARD_REGISTER_P (x
)
769 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
770 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
771 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
773 /* ?? If we don't know OLDMODE, we have to assume here that
774 X does not need sign- or zero-extension. This may not be
775 the case, but it's the best we can do. */
776 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
777 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
779 HOST_WIDE_INT val
= INTVAL (x
);
780 int width
= GET_MODE_BITSIZE (oldmode
);
782 /* We must sign or zero-extend in this case. Start by
783 zero-extending, then sign extend if we need to. */
784 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
786 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
787 val
|= (HOST_WIDE_INT
) (-1) << width
;
789 return gen_int_mode (val
, mode
);
792 return gen_lowpart (mode
, x
);
795 /* Converting from integer constant into mode is always equivalent to an
797 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
799 if (GET_MODE_BITSIZE (mode
) != GET_MODE_BITSIZE (oldmode
))
801 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
804 temp
= gen_reg_rtx (mode
);
805 convert_move (temp
, x
, unsignedp
);
809 /* STORE_MAX_PIECES is the number of bytes at a time that we can
810 store efficiently. Due to internal GCC limitations, this is
811 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
812 for an immediate constant. */
814 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
816 /* Determine whether the LEN bytes can be moved by using several move
817 instructions. Return nonzero if a call to move_by_pieces should
821 can_move_by_pieces (unsigned HOST_WIDE_INT len
,
822 unsigned int align ATTRIBUTE_UNUSED
)
824 return MOVE_BY_PIECES_P (len
, align
);
827 /* Generate several move instructions to copy LEN bytes from block FROM to
828 block TO. (These are MEM rtx's with BLKmode).
830 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
831 used to push FROM to the stack.
833 ALIGN is maximum stack alignment we can assume.
835 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
836 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
840 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
841 unsigned int align
, int endp
)
843 struct move_by_pieces data
;
844 rtx to_addr
, from_addr
= XEXP (from
, 0);
845 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
846 enum machine_mode mode
= VOIDmode
, tmode
;
847 enum insn_code icode
;
849 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
852 data
.from_addr
= from_addr
;
855 to_addr
= XEXP (to
, 0);
858 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
859 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
861 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
868 #ifdef STACK_GROWS_DOWNWARD
874 data
.to_addr
= to_addr
;
877 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
878 || GET_CODE (from_addr
) == POST_INC
879 || GET_CODE (from_addr
) == POST_DEC
);
881 data
.explicit_inc_from
= 0;
882 data
.explicit_inc_to
= 0;
883 if (data
.reverse
) data
.offset
= len
;
886 /* If copying requires more than two move insns,
887 copy addresses to registers (to make displacements shorter)
888 and use post-increment if available. */
889 if (!(data
.autinc_from
&& data
.autinc_to
)
890 && move_by_pieces_ninsns (len
, align
, max_size
) > 2)
892 /* Find the mode of the largest move... */
893 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
894 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
895 if (GET_MODE_SIZE (tmode
) < max_size
)
898 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
900 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
901 data
.autinc_from
= 1;
902 data
.explicit_inc_from
= -1;
904 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
906 data
.from_addr
= copy_addr_to_reg (from_addr
);
907 data
.autinc_from
= 1;
908 data
.explicit_inc_from
= 1;
910 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
911 data
.from_addr
= copy_addr_to_reg (from_addr
);
912 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
914 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
916 data
.explicit_inc_to
= -1;
918 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
920 data
.to_addr
= copy_addr_to_reg (to_addr
);
922 data
.explicit_inc_to
= 1;
924 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
925 data
.to_addr
= copy_addr_to_reg (to_addr
);
928 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
929 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
930 align
= MOVE_MAX
* BITS_PER_UNIT
;
932 /* First move what we can in the largest integer mode, then go to
933 successively smaller modes. */
937 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
938 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
939 if (GET_MODE_SIZE (tmode
) < max_size
)
942 if (mode
== VOIDmode
)
945 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
946 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
947 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
949 max_size
= GET_MODE_SIZE (mode
);
952 /* The code above should have handled everything. */
966 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
967 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
969 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
972 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
979 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
987 /* Return number of insns required to move L bytes by pieces.
988 ALIGN (in bits) is maximum alignment we can assume. */
990 static unsigned HOST_WIDE_INT
991 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
,
992 unsigned int max_size
)
994 unsigned HOST_WIDE_INT n_insns
= 0;
996 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
997 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
998 align
= MOVE_MAX
* BITS_PER_UNIT
;
1000 while (max_size
> 1)
1002 enum machine_mode mode
= VOIDmode
, tmode
;
1003 enum insn_code icode
;
1005 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1006 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1007 if (GET_MODE_SIZE (tmode
) < max_size
)
1010 if (mode
== VOIDmode
)
1013 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1014 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1015 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1017 max_size
= GET_MODE_SIZE (mode
);
1025 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1026 with move instructions for mode MODE. GENFUN is the gen_... function
1027 to make a move insn for that mode. DATA has all the other info. */
1030 move_by_pieces_1 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
1031 struct move_by_pieces
*data
)
1033 unsigned int size
= GET_MODE_SIZE (mode
);
1034 rtx to1
= NULL_RTX
, from1
;
1036 while (data
->len
>= size
)
1039 data
->offset
-= size
;
1043 if (data
->autinc_to
)
1044 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1047 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1050 if (data
->autinc_from
)
1051 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1054 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1056 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1057 emit_insn (gen_add2_insn (data
->to_addr
,
1058 GEN_INT (-(HOST_WIDE_INT
)size
)));
1059 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1060 emit_insn (gen_add2_insn (data
->from_addr
,
1061 GEN_INT (-(HOST_WIDE_INT
)size
)));
1064 emit_insn ((*genfun
) (to1
, from1
));
1067 #ifdef PUSH_ROUNDING
1068 emit_single_push_insn (mode
, from1
, NULL
);
1074 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1075 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1076 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1077 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1079 if (! data
->reverse
)
1080 data
->offset
+= size
;
1086 /* Emit code to move a block Y to a block X. This may be done with
1087 string-move instructions, with multiple scalar move instructions,
1088 or with a library call.
1090 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1091 SIZE is an rtx that says how long they are.
1092 ALIGN is the maximum alignment we can assume they have.
1093 METHOD describes what kind of copy this is, and what mechanisms may be used.
1095 Return the address of the new block, if memcpy is called and returns it,
1099 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1107 case BLOCK_OP_NORMAL
:
1108 may_use_call
= true;
1111 case BLOCK_OP_CALL_PARM
:
1112 may_use_call
= block_move_libcall_safe_for_call_parm ();
1114 /* Make inhibit_defer_pop nonzero around the library call
1115 to force it to pop the arguments right away. */
1119 case BLOCK_OP_NO_LIBCALL
:
1120 may_use_call
= false;
1127 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1136 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1137 block copy is more efficient for other large modes, e.g. DCmode. */
1138 x
= adjust_address (x
, BLKmode
, 0);
1139 y
= adjust_address (y
, BLKmode
, 0);
1141 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1142 can be incorrect is coming from __builtin_memcpy. */
1143 if (GET_CODE (size
) == CONST_INT
)
1145 if (INTVAL (size
) == 0)
1148 x
= shallow_copy_rtx (x
);
1149 y
= shallow_copy_rtx (y
);
1150 set_mem_size (x
, size
);
1151 set_mem_size (y
, size
);
1154 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1155 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1156 else if (emit_block_move_via_movmem (x
, y
, size
, align
))
1158 else if (may_use_call
)
1159 retval
= emit_block_move_via_libcall (x
, y
, size
);
1161 emit_block_move_via_loop (x
, y
, size
, align
);
1163 if (method
== BLOCK_OP_CALL_PARM
)
1169 /* A subroutine of emit_block_move. Returns true if calling the
1170 block move libcall will not clobber any parameters which may have
1171 already been placed on the stack. */
1174 block_move_libcall_safe_for_call_parm (void)
1176 /* If arguments are pushed on the stack, then they're safe. */
1180 /* If registers go on the stack anyway, any argument is sure to clobber
1181 an outgoing argument. */
1182 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1184 tree fn
= emit_block_move_libcall_fn (false);
1186 if (REG_PARM_STACK_SPACE (fn
) != 0)
1191 /* If any argument goes in memory, then it might clobber an outgoing
1194 CUMULATIVE_ARGS args_so_far
;
1197 fn
= emit_block_move_libcall_fn (false);
1198 INIT_CUMULATIVE_ARGS (args_so_far
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1200 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1201 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1203 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1204 rtx tmp
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
1205 if (!tmp
|| !REG_P (tmp
))
1207 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, mode
,
1210 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, NULL_TREE
, 1);
1216 /* A subroutine of emit_block_move. Expand a movmem pattern;
1217 return true if successful. */
1220 emit_block_move_via_movmem (rtx x
, rtx y
, rtx size
, unsigned int align
)
1222 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1223 int save_volatile_ok
= volatile_ok
;
1224 enum machine_mode mode
;
1226 /* Since this is a move insn, we don't care about volatility. */
1229 /* Try the most limited insn first, because there's no point
1230 including more than one in the machine description unless
1231 the more limited one has some advantage. */
1233 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1234 mode
= GET_MODE_WIDER_MODE (mode
))
1236 enum insn_code code
= movmem_optab
[(int) mode
];
1237 insn_operand_predicate_fn pred
;
1239 if (code
!= CODE_FOR_nothing
1240 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1241 here because if SIZE is less than the mode mask, as it is
1242 returned by the macro, it will definitely be less than the
1243 actual mode mask. */
1244 && ((GET_CODE (size
) == CONST_INT
1245 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1246 <= (GET_MODE_MASK (mode
) >> 1)))
1247 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1248 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1249 || (*pred
) (x
, BLKmode
))
1250 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1251 || (*pred
) (y
, BLKmode
))
1252 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1253 || (*pred
) (opalign
, VOIDmode
)))
1256 rtx last
= get_last_insn ();
1259 op2
= convert_to_mode (mode
, size
, 1);
1260 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1261 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1262 op2
= copy_to_mode_reg (mode
, op2
);
1264 /* ??? When called via emit_block_move_for_call, it'd be
1265 nice if there were some way to inform the backend, so
1266 that it doesn't fail the expansion because it thinks
1267 emitting the libcall would be more efficient. */
1269 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1273 volatile_ok
= save_volatile_ok
;
1277 delete_insns_since (last
);
1281 volatile_ok
= save_volatile_ok
;
1285 /* A subroutine of emit_block_move. Expand a call to memcpy.
1286 Return the return value from memcpy, 0 otherwise. */
1289 emit_block_move_via_libcall (rtx dst
, rtx src
, rtx size
)
1291 rtx dst_addr
, src_addr
;
1292 tree call_expr
, arg_list
, fn
, src_tree
, dst_tree
, size_tree
;
1293 enum machine_mode size_mode
;
1296 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1297 pseudos. We can then place those new pseudos into a VAR_DECL and
1300 dst_addr
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
1301 src_addr
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
1303 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1304 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1306 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1307 src_tree
= make_tree (ptr_type_node
, src_addr
);
1309 size_mode
= TYPE_MODE (sizetype
);
1311 size
= convert_to_mode (size_mode
, size
, 1);
1312 size
= copy_to_mode_reg (size_mode
, size
);
1314 /* It is incorrect to use the libcall calling conventions to call
1315 memcpy in this context. This could be a user call to memcpy and
1316 the user may wish to examine the return value from memcpy. For
1317 targets where libcalls and normal calls have different conventions
1318 for returning pointers, we could end up generating incorrect code. */
1320 size_tree
= make_tree (sizetype
, size
);
1322 fn
= emit_block_move_libcall_fn (true);
1323 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
1324 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1325 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1327 /* Now we have to build up the CALL_EXPR itself. */
1328 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1329 call_expr
= build3 (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1330 call_expr
, arg_list
, NULL_TREE
);
1332 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
1334 /* If we are initializing a readonly value, show the above call clobbered
1335 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1336 the delay slot scheduler might overlook conflicts and take nasty
1338 if (RTX_UNCHANGING_P (dst
))
1339 add_function_usage_to
1340 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode
,
1341 gen_rtx_CLOBBER (VOIDmode
, dst
),
1347 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1348 for the function we use for block copies. The first time FOR_CALL
1349 is true, we call assemble_external. */
1351 static GTY(()) tree block_move_fn
;
1354 init_block_move_fn (const char *asmspec
)
1360 fn
= get_identifier ("memcpy");
1361 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
1362 const_ptr_type_node
, sizetype
,
1365 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
1366 DECL_EXTERNAL (fn
) = 1;
1367 TREE_PUBLIC (fn
) = 1;
1368 DECL_ARTIFICIAL (fn
) = 1;
1369 TREE_NOTHROW (fn
) = 1;
1375 set_user_assembler_name (block_move_fn
, asmspec
);
1379 emit_block_move_libcall_fn (int for_call
)
1381 static bool emitted_extern
;
1384 init_block_move_fn (NULL
);
1386 if (for_call
&& !emitted_extern
)
1388 emitted_extern
= true;
1389 make_decl_rtl (block_move_fn
);
1390 assemble_external (block_move_fn
);
1393 return block_move_fn
;
1396 /* A subroutine of emit_block_move. Copy the data via an explicit
1397 loop. This is used only when libcalls are forbidden. */
1398 /* ??? It'd be nice to copy in hunks larger than QImode. */
1401 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1402 unsigned int align ATTRIBUTE_UNUSED
)
1404 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
1405 enum machine_mode iter_mode
;
1407 iter_mode
= GET_MODE (size
);
1408 if (iter_mode
== VOIDmode
)
1409 iter_mode
= word_mode
;
1411 top_label
= gen_label_rtx ();
1412 cmp_label
= gen_label_rtx ();
1413 iter
= gen_reg_rtx (iter_mode
);
1415 emit_move_insn (iter
, const0_rtx
);
1417 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1418 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1419 do_pending_stack_adjust ();
1421 emit_jump (cmp_label
);
1422 emit_label (top_label
);
1424 tmp
= convert_modes (Pmode
, iter_mode
, iter
, true);
1425 x_addr
= gen_rtx_PLUS (Pmode
, x_addr
, tmp
);
1426 y_addr
= gen_rtx_PLUS (Pmode
, y_addr
, tmp
);
1427 x
= change_address (x
, QImode
, x_addr
);
1428 y
= change_address (y
, QImode
, y_addr
);
1430 emit_move_insn (x
, y
);
1432 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1433 true, OPTAB_LIB_WIDEN
);
1435 emit_move_insn (iter
, tmp
);
1437 emit_label (cmp_label
);
1439 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1443 /* Copy all or part of a value X into registers starting at REGNO.
1444 The number of registers to be filled is NREGS. */
1447 move_block_to_reg (int regno
, rtx x
, int nregs
, enum machine_mode mode
)
1450 #ifdef HAVE_load_multiple
1458 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1459 x
= validize_mem (force_const_mem (mode
, x
));
1461 /* See if the machine can do this with a load multiple insn. */
1462 #ifdef HAVE_load_multiple
1463 if (HAVE_load_multiple
)
1465 last
= get_last_insn ();
1466 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1474 delete_insns_since (last
);
1478 for (i
= 0; i
< nregs
; i
++)
1479 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1480 operand_subword_force (x
, i
, mode
));
1483 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1484 The number of registers to be filled is NREGS. */
1487 move_block_from_reg (int regno
, rtx x
, int nregs
)
1494 /* See if the machine can do this with a store multiple insn. */
1495 #ifdef HAVE_store_multiple
1496 if (HAVE_store_multiple
)
1498 rtx last
= get_last_insn ();
1499 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1507 delete_insns_since (last
);
1511 for (i
= 0; i
< nregs
; i
++)
1513 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1518 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1522 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1523 ORIG, where ORIG is a non-consecutive group of registers represented by
1524 a PARALLEL. The clone is identical to the original except in that the
1525 original set of registers is replaced by a new set of pseudo registers.
1526 The new set has the same modes as the original set. */
1529 gen_group_rtx (rtx orig
)
1534 if (GET_CODE (orig
) != PARALLEL
)
1537 length
= XVECLEN (orig
, 0);
1538 tmps
= alloca (sizeof (rtx
) * length
);
1540 /* Skip a NULL entry in first slot. */
1541 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
1546 for (; i
< length
; i
++)
1548 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
1549 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
1551 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
1554 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
1557 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1558 where DST is non-consecutive registers represented by a PARALLEL.
1559 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1563 emit_group_load (rtx dst
, rtx orig_src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1568 if (GET_CODE (dst
) != PARALLEL
)
1571 /* Check for a NULL entry, used to indicate that the parameter goes
1572 both on the stack and in registers. */
1573 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1578 tmps
= alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
1580 /* Process the pieces. */
1581 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1583 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1584 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1585 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1588 /* Handle trailing fragments that run over the size of the struct. */
1589 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1591 /* Arrange to shift the fragment to where it belongs.
1592 extract_bit_field loads to the lsb of the reg. */
1594 #ifdef BLOCK_REG_PADDING
1595 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
1596 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1601 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1602 bytelen
= ssize
- bytepos
;
1607 /* If we won't be loading directly from memory, protect the real source
1608 from strange tricks we might play; but make sure that the source can
1609 be loaded directly into the destination. */
1611 if (!MEM_P (orig_src
)
1612 && (!CONSTANT_P (orig_src
)
1613 || (GET_MODE (orig_src
) != mode
1614 && GET_MODE (orig_src
) != VOIDmode
)))
1616 if (GET_MODE (orig_src
) == VOIDmode
)
1617 src
= gen_reg_rtx (mode
);
1619 src
= gen_reg_rtx (GET_MODE (orig_src
));
1621 emit_move_insn (src
, orig_src
);
1624 /* Optimize the access just a bit. */
1626 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
1627 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
1628 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1629 && bytelen
== GET_MODE_SIZE (mode
))
1631 tmps
[i
] = gen_reg_rtx (mode
);
1632 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
1634 else if (GET_CODE (src
) == CONCAT
)
1636 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
1637 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
1639 if ((bytepos
== 0 && bytelen
== slen0
)
1640 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
1642 /* The following assumes that the concatenated objects all
1643 have the same size. In this case, a simple calculation
1644 can be used to determine the object and the bit field
1646 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
1647 if (! CONSTANT_P (tmps
[i
])
1648 && (!REG_P (tmps
[i
]) || GET_MODE (tmps
[i
]) != mode
))
1649 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
1650 (bytepos
% slen0
) * BITS_PER_UNIT
,
1651 1, NULL_RTX
, mode
, mode
);
1653 else if (bytepos
== 0)
1655 rtx mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1656 emit_move_insn (mem
, src
);
1657 tmps
[i
] = adjust_address (mem
, mode
, 0);
1662 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1663 SIMD register, which is currently broken. While we get GCC
1664 to emit proper RTL for these cases, let's dump to memory. */
1665 else if (VECTOR_MODE_P (GET_MODE (dst
))
1668 int slen
= GET_MODE_SIZE (GET_MODE (src
));
1671 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1672 emit_move_insn (mem
, src
);
1673 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
1675 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
1676 && XVECLEN (dst
, 0) > 1)
1677 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE(dst
), bytepos
);
1678 else if (CONSTANT_P (src
)
1679 || (REG_P (src
) && GET_MODE (src
) == mode
))
1682 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
1683 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
1687 tmps
[i
] = expand_shift (LSHIFT_EXPR
, mode
, tmps
[i
],
1688 build_int_2 (shift
, 0), tmps
[i
], 0);
1691 /* Copy the extracted pieces into the proper (probable) hard regs. */
1692 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1693 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
1696 /* Emit code to move a block SRC to block DST, where SRC and DST are
1697 non-consecutive groups of registers, each represented by a PARALLEL. */
1700 emit_group_move (rtx dst
, rtx src
)
1704 if (GET_CODE (src
) != PARALLEL
1705 || GET_CODE (dst
) != PARALLEL
1706 || XVECLEN (src
, 0) != XVECLEN (dst
, 0))
1709 /* Skip first entry if NULL. */
1710 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
1711 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
1712 XEXP (XVECEXP (src
, 0, i
), 0));
1715 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1716 where SRC is non-consecutive registers represented by a PARALLEL.
1717 SSIZE represents the total size of block ORIG_DST, or -1 if not
1721 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1726 if (GET_CODE (src
) != PARALLEL
)
1729 /* Check for a NULL entry, used to indicate that the parameter goes
1730 both on the stack and in registers. */
1731 if (XEXP (XVECEXP (src
, 0, 0), 0))
1736 tmps
= alloca (sizeof (rtx
) * XVECLEN (src
, 0));
1738 /* Copy the (probable) hard regs into pseudos. */
1739 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
1741 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
1742 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
1743 emit_move_insn (tmps
[i
], reg
);
1746 /* If we won't be storing directly into memory, protect the real destination
1747 from strange tricks we might play. */
1749 if (GET_CODE (dst
) == PARALLEL
)
1753 /* We can get a PARALLEL dst if there is a conditional expression in
1754 a return statement. In that case, the dst and src are the same,
1755 so no action is necessary. */
1756 if (rtx_equal_p (dst
, src
))
1759 /* It is unclear if we can ever reach here, but we may as well handle
1760 it. Allocate a temporary, and split this into a store/load to/from
1763 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
1764 emit_group_store (temp
, src
, type
, ssize
);
1765 emit_group_load (dst
, temp
, type
, ssize
);
1768 else if (!MEM_P (dst
) && GET_CODE (dst
) != CONCAT
)
1770 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
1771 /* Make life a bit easier for combine. */
1772 emit_move_insn (dst
, CONST0_RTX (GET_MODE (orig_dst
)));
1775 /* Process the pieces. */
1776 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
1778 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
1779 enum machine_mode mode
= GET_MODE (tmps
[i
]);
1780 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1783 /* Handle trailing fragments that run over the size of the struct. */
1784 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1786 /* store_bit_field always takes its value from the lsb.
1787 Move the fragment to the lsb if it's not already there. */
1789 #ifdef BLOCK_REG_PADDING
1790 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
1791 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1797 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1798 tmps
[i
] = expand_shift (RSHIFT_EXPR
, mode
, tmps
[i
],
1799 build_int_2 (shift
, 0), tmps
[i
], 0);
1801 bytelen
= ssize
- bytepos
;
1804 if (GET_CODE (dst
) == CONCAT
)
1806 if (bytepos
+ bytelen
<= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
1807 dest
= XEXP (dst
, 0);
1808 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
1810 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
1811 dest
= XEXP (dst
, 1);
1813 else if (bytepos
== 0 && XVECLEN (src
, 0))
1815 dest
= assign_stack_temp (GET_MODE (dest
),
1816 GET_MODE_SIZE (GET_MODE (dest
)), 0);
1817 emit_move_insn (adjust_address (dest
, GET_MODE (tmps
[i
]), bytepos
),
1826 /* Optimize the access just a bit. */
1828 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
1829 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
1830 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1831 && bytelen
== GET_MODE_SIZE (mode
))
1832 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
1834 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
1838 /* Copy from the pseudo into the (probable) hard reg. */
1839 if (orig_dst
!= dst
)
1840 emit_move_insn (orig_dst
, dst
);
1843 /* Generate code to copy a BLKmode object of TYPE out of a
1844 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1845 is null, a stack temporary is created. TGTBLK is returned.
1847 The purpose of this routine is to handle functions that return
1848 BLKmode structures in registers. Some machines (the PA for example)
1849 want to return all small structures in registers regardless of the
1850 structure's alignment. */
1853 copy_blkmode_from_reg (rtx tgtblk
, rtx srcreg
, tree type
)
1855 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
1856 rtx src
= NULL
, dst
= NULL
;
1857 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
1858 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
1862 tgtblk
= assign_temp (build_qualified_type (type
,
1864 | TYPE_QUAL_CONST
)),
1866 preserve_temp_slots (tgtblk
);
1869 /* This code assumes srcreg is at least a full word. If it isn't, copy it
1870 into a new pseudo which is a full word. */
1872 if (GET_MODE (srcreg
) != BLKmode
1873 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
1874 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
1876 /* If the structure doesn't take up a whole number of words, see whether
1877 SRCREG is padded on the left or on the right. If it's on the left,
1878 set PADDING_CORRECTION to the number of bits to skip.
1880 In most ABIs, the structure will be returned at the least end of
1881 the register, which translates to right padding on little-endian
1882 targets and left padding on big-endian targets. The opposite
1883 holds if the structure is returned at the most significant
1884 end of the register. */
1885 if (bytes
% UNITS_PER_WORD
!= 0
1886 && (targetm
.calls
.return_in_msb (type
)
1888 : BYTES_BIG_ENDIAN
))
1890 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
1892 /* Copy the structure BITSIZE bites at a time.
1894 We could probably emit more efficient code for machines which do not use
1895 strict alignment, but it doesn't seem worth the effort at the current
1897 for (bitpos
= 0, xbitpos
= padding_correction
;
1898 bitpos
< bytes
* BITS_PER_UNIT
;
1899 bitpos
+= bitsize
, xbitpos
+= bitsize
)
1901 /* We need a new source operand each time xbitpos is on a
1902 word boundary and when xbitpos == padding_correction
1903 (the first time through). */
1904 if (xbitpos
% BITS_PER_WORD
== 0
1905 || xbitpos
== padding_correction
)
1906 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
1909 /* We need a new destination operand each time bitpos is on
1911 if (bitpos
% BITS_PER_WORD
== 0)
1912 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
1914 /* Use xbitpos for the source extraction (right justified) and
1915 xbitpos for the destination store (left justified). */
1916 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
1917 extract_bit_field (src
, bitsize
,
1918 xbitpos
% BITS_PER_WORD
, 1,
1919 NULL_RTX
, word_mode
, word_mode
));
1925 /* Add a USE expression for REG to the (possibly empty) list pointed
1926 to by CALL_FUSAGE. REG must denote a hard register. */
1929 use_reg (rtx
*call_fusage
, rtx reg
)
1932 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
1936 = gen_rtx_EXPR_LIST (VOIDmode
,
1937 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
1940 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1941 starting at REGNO. All of these registers must be hard registers. */
1944 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
1948 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
1951 for (i
= 0; i
< nregs
; i
++)
1952 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
1955 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1956 PARALLEL REGS. This is for calls that pass values in multiple
1957 non-contiguous locations. The Irix 6 ABI has examples of this. */
1960 use_group_regs (rtx
*call_fusage
, rtx regs
)
1964 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
1966 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
1968 /* A NULL entry means the parameter goes both on the stack and in
1969 registers. This can also be a MEM for targets that pass values
1970 partially on the stack and partially in registers. */
1971 if (reg
!= 0 && REG_P (reg
))
1972 use_reg (call_fusage
, reg
);
1977 /* Determine whether the LEN bytes generated by CONSTFUN can be
1978 stored to memory using several move instructions. CONSTFUNDATA is
1979 a pointer which will be passed as argument in every CONSTFUN call.
1980 ALIGN is maximum alignment we can assume. Return nonzero if a
1981 call to store_by_pieces should succeed. */
1984 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
1985 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
1986 void *constfundata
, unsigned int align
)
1988 unsigned HOST_WIDE_INT l
;
1989 unsigned int max_size
;
1990 HOST_WIDE_INT offset
= 0;
1991 enum machine_mode mode
, tmode
;
1992 enum insn_code icode
;
1999 if (! STORE_BY_PIECES_P (len
, align
))
2002 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2003 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2004 align
= MOVE_MAX
* BITS_PER_UNIT
;
2006 /* We would first store what we can in the largest integer mode, then go to
2007 successively smaller modes. */
2010 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2015 max_size
= STORE_MAX_PIECES
+ 1;
2016 while (max_size
> 1)
2018 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2019 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2020 if (GET_MODE_SIZE (tmode
) < max_size
)
2023 if (mode
== VOIDmode
)
2026 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2027 if (icode
!= CODE_FOR_nothing
2028 && align
>= GET_MODE_ALIGNMENT (mode
))
2030 unsigned int size
= GET_MODE_SIZE (mode
);
2037 cst
= (*constfun
) (constfundata
, offset
, mode
);
2038 if (!LEGITIMATE_CONSTANT_P (cst
))
2048 max_size
= GET_MODE_SIZE (mode
);
2051 /* The code above should have handled everything. */
2059 /* Generate several move instructions to store LEN bytes generated by
2060 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2061 pointer which will be passed as argument in every CONSTFUN call.
2062 ALIGN is maximum alignment we can assume.
2063 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2064 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2068 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
2069 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2070 void *constfundata
, unsigned int align
, int endp
)
2072 struct store_by_pieces data
;
2081 if (! STORE_BY_PIECES_P (len
, align
))
2083 data
.constfun
= constfun
;
2084 data
.constfundata
= constfundata
;
2087 store_by_pieces_1 (&data
, align
);
2098 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2099 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2101 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
2104 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2111 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2119 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2120 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2123 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
2125 struct store_by_pieces data
;
2130 data
.constfun
= clear_by_pieces_1
;
2131 data
.constfundata
= NULL
;
2134 store_by_pieces_1 (&data
, align
);
2137 /* Callback routine for clear_by_pieces.
2138 Return const0_rtx unconditionally. */
2141 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED
,
2142 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2143 enum machine_mode mode ATTRIBUTE_UNUSED
)
2148 /* Subroutine of clear_by_pieces and store_by_pieces.
2149 Generate several move instructions to store LEN bytes of block TO. (A MEM
2150 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2153 store_by_pieces_1 (struct store_by_pieces
*data ATTRIBUTE_UNUSED
,
2154 unsigned int align ATTRIBUTE_UNUSED
)
2156 rtx to_addr
= XEXP (data
->to
, 0);
2157 unsigned int max_size
= STORE_MAX_PIECES
+ 1;
2158 enum machine_mode mode
= VOIDmode
, tmode
;
2159 enum insn_code icode
;
2162 data
->to_addr
= to_addr
;
2164 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2165 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2167 data
->explicit_inc_to
= 0;
2169 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2171 data
->offset
= data
->len
;
2173 /* If storing requires more than two move insns,
2174 copy addresses to registers (to make displacements shorter)
2175 and use post-increment if available. */
2176 if (!data
->autinc_to
2177 && move_by_pieces_ninsns (data
->len
, align
, max_size
) > 2)
2179 /* Determine the main mode we'll be using. */
2180 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2181 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2182 if (GET_MODE_SIZE (tmode
) < max_size
)
2185 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2187 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2188 data
->autinc_to
= 1;
2189 data
->explicit_inc_to
= -1;
2192 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2193 && ! data
->autinc_to
)
2195 data
->to_addr
= copy_addr_to_reg (to_addr
);
2196 data
->autinc_to
= 1;
2197 data
->explicit_inc_to
= 1;
2200 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2201 data
->to_addr
= copy_addr_to_reg (to_addr
);
2204 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2205 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2206 align
= MOVE_MAX
* BITS_PER_UNIT
;
2208 /* First store what we can in the largest integer mode, then go to
2209 successively smaller modes. */
2211 while (max_size
> 1)
2213 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2214 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2215 if (GET_MODE_SIZE (tmode
) < max_size
)
2218 if (mode
== VOIDmode
)
2221 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2222 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2223 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2225 max_size
= GET_MODE_SIZE (mode
);
2228 /* The code above should have handled everything. */
2233 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2234 with move instructions for mode MODE. GENFUN is the gen_... function
2235 to make a move insn for that mode. DATA has all the other info. */
2238 store_by_pieces_2 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
2239 struct store_by_pieces
*data
)
2241 unsigned int size
= GET_MODE_SIZE (mode
);
2244 while (data
->len
>= size
)
2247 data
->offset
-= size
;
2249 if (data
->autinc_to
)
2250 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2253 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2255 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2256 emit_insn (gen_add2_insn (data
->to_addr
,
2257 GEN_INT (-(HOST_WIDE_INT
) size
)));
2259 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2260 emit_insn ((*genfun
) (to1
, cst
));
2262 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2263 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2265 if (! data
->reverse
)
2266 data
->offset
+= size
;
2272 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2273 its length in bytes. */
2276 clear_storage (rtx object
, rtx size
)
2279 unsigned int align
= (MEM_P (object
) ? MEM_ALIGN (object
)
2280 : GET_MODE_ALIGNMENT (GET_MODE (object
)));
2282 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2283 just move a zero. Otherwise, do this a piece at a time. */
2284 if (GET_MODE (object
) != BLKmode
2285 && GET_CODE (size
) == CONST_INT
2286 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (object
)))
2287 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2290 if (size
== const0_rtx
)
2292 else if (GET_CODE (size
) == CONST_INT
2293 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2294 clear_by_pieces (object
, INTVAL (size
), align
);
2295 else if (clear_storage_via_clrmem (object
, size
, align
))
2298 retval
= clear_storage_via_libcall (object
, size
);
2304 /* A subroutine of clear_storage. Expand a clrmem pattern;
2305 return true if successful. */
2308 clear_storage_via_clrmem (rtx object
, rtx size
, unsigned int align
)
2310 /* Try the most limited insn first, because there's no point
2311 including more than one in the machine description unless
2312 the more limited one has some advantage. */
2314 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2315 enum machine_mode mode
;
2317 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2318 mode
= GET_MODE_WIDER_MODE (mode
))
2320 enum insn_code code
= clrmem_optab
[(int) mode
];
2321 insn_operand_predicate_fn pred
;
2323 if (code
!= CODE_FOR_nothing
2324 /* We don't need MODE to be narrower than
2325 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2326 the mode mask, as it is returned by the macro, it will
2327 definitely be less than the actual mode mask. */
2328 && ((GET_CODE (size
) == CONST_INT
2329 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2330 <= (GET_MODE_MASK (mode
) >> 1)))
2331 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2332 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2333 || (*pred
) (object
, BLKmode
))
2334 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
2335 || (*pred
) (opalign
, VOIDmode
)))
2338 rtx last
= get_last_insn ();
2341 op1
= convert_to_mode (mode
, size
, 1);
2342 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2343 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
2344 op1
= copy_to_mode_reg (mode
, op1
);
2346 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2353 delete_insns_since (last
);
2360 /* A subroutine of clear_storage. Expand a call to memset.
2361 Return the return value of memset, 0 otherwise. */
2364 clear_storage_via_libcall (rtx object
, rtx size
)
2366 tree call_expr
, arg_list
, fn
, object_tree
, size_tree
;
2367 enum machine_mode size_mode
;
2370 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2371 place those into new pseudos into a VAR_DECL and use them later. */
2373 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2375 size_mode
= TYPE_MODE (sizetype
);
2376 size
= convert_to_mode (size_mode
, size
, 1);
2377 size
= copy_to_mode_reg (size_mode
, size
);
2379 /* It is incorrect to use the libcall calling conventions to call
2380 memset in this context. This could be a user call to memset and
2381 the user may wish to examine the return value from memset. For
2382 targets where libcalls and normal calls have different conventions
2383 for returning pointers, we could end up generating incorrect code. */
2385 object_tree
= make_tree (ptr_type_node
, object
);
2386 size_tree
= make_tree (sizetype
, size
);
2388 fn
= clear_storage_libcall_fn (true);
2389 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
2390 arg_list
= tree_cons (NULL_TREE
, integer_zero_node
, arg_list
);
2391 arg_list
= tree_cons (NULL_TREE
, object_tree
, arg_list
);
2393 /* Now we have to build up the CALL_EXPR itself. */
2394 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2395 call_expr
= build3 (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2396 call_expr
, arg_list
, NULL_TREE
);
2398 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
2400 /* If we are initializing a readonly value, show the above call
2401 clobbered it. Otherwise, a load from it may erroneously be
2402 hoisted from a loop. */
2403 if (RTX_UNCHANGING_P (object
))
2404 emit_insn (gen_rtx_CLOBBER (VOIDmode
, object
));
2409 /* A subroutine of clear_storage_via_libcall. Create the tree node
2410 for the function we use for block clears. The first time FOR_CALL
2411 is true, we call assemble_external. */
2413 static GTY(()) tree block_clear_fn
;
2416 init_block_clear_fn (const char *asmspec
)
2418 if (!block_clear_fn
)
2422 fn
= get_identifier ("memset");
2423 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2424 integer_type_node
, sizetype
,
2427 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
2428 DECL_EXTERNAL (fn
) = 1;
2429 TREE_PUBLIC (fn
) = 1;
2430 DECL_ARTIFICIAL (fn
) = 1;
2431 TREE_NOTHROW (fn
) = 1;
2433 block_clear_fn
= fn
;
2437 set_user_assembler_name (block_clear_fn
, asmspec
);
2441 clear_storage_libcall_fn (int for_call
)
2443 static bool emitted_extern
;
2445 if (!block_clear_fn
)
2446 init_block_clear_fn (NULL
);
2448 if (for_call
&& !emitted_extern
)
2450 emitted_extern
= true;
2451 make_decl_rtl (block_clear_fn
);
2452 assemble_external (block_clear_fn
);
2455 return block_clear_fn
;
2458 /* Generate code to copy Y into X.
2459 Both Y and X must have the same mode, except that
2460 Y can be a constant with VOIDmode.
2461 This mode cannot be BLKmode; use emit_block_move for that.
2463 Return the last instruction emitted. */
2466 emit_move_insn (rtx x
, rtx y
)
2468 enum machine_mode mode
= GET_MODE (x
);
2469 rtx y_cst
= NULL_RTX
;
2472 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
2478 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
2479 && (last_insn
= compress_float_constant (x
, y
)))
2484 if (!LEGITIMATE_CONSTANT_P (y
))
2486 y
= force_const_mem (mode
, y
);
2488 /* If the target's cannot_force_const_mem prevented the spill,
2489 assume that the target's move expanders will also take care
2490 of the non-legitimate constant. */
2496 /* If X or Y are memory references, verify that their addresses are valid
2499 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2500 && ! push_operand (x
, GET_MODE (x
)))
2502 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2503 x
= validize_mem (x
);
2506 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2508 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2509 y
= validize_mem (y
);
2511 if (mode
== BLKmode
)
2514 last_insn
= emit_move_insn_1 (x
, y
);
2516 if (y_cst
&& REG_P (x
)
2517 && (set
= single_set (last_insn
)) != NULL_RTX
2518 && SET_DEST (set
) == x
2519 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
2520 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
2525 /* Low level part of emit_move_insn.
2526 Called just like emit_move_insn, but assumes X and Y
2527 are basically valid. */
2530 emit_move_insn_1 (rtx x
, rtx y
)
2532 enum machine_mode mode
= GET_MODE (x
);
2533 enum machine_mode submode
;
2534 enum mode_class
class = GET_MODE_CLASS (mode
);
2536 if ((unsigned int) mode
>= (unsigned int) MAX_MACHINE_MODE
)
2539 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2541 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2543 /* Expand complex moves by moving real part and imag part, if possible. */
2544 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
2545 && BLKmode
!= (submode
= GET_MODE_INNER (mode
))
2546 && (mov_optab
->handlers
[(int) submode
].insn_code
2547 != CODE_FOR_nothing
))
2549 /* Don't split destination if it is a stack push. */
2550 int stack
= push_operand (x
, GET_MODE (x
));
2552 #ifdef PUSH_ROUNDING
2553 /* In case we output to the stack, but the size is smaller than the
2554 machine can push exactly, we need to use move instructions. */
2556 && (PUSH_ROUNDING (GET_MODE_SIZE (submode
))
2557 != GET_MODE_SIZE (submode
)))
2560 HOST_WIDE_INT offset1
, offset2
;
2562 /* Do not use anti_adjust_stack, since we don't want to update
2563 stack_pointer_delta. */
2564 temp
= expand_binop (Pmode
,
2565 #ifdef STACK_GROWS_DOWNWARD
2573 (GET_MODE_SIZE (GET_MODE (x
)))),
2574 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
2576 if (temp
!= stack_pointer_rtx
)
2577 emit_move_insn (stack_pointer_rtx
, temp
);
2579 #ifdef STACK_GROWS_DOWNWARD
2581 offset2
= GET_MODE_SIZE (submode
);
2583 offset1
= -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)));
2584 offset2
= (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))
2585 + GET_MODE_SIZE (submode
));
2588 emit_move_insn (change_address (x
, submode
,
2589 gen_rtx_PLUS (Pmode
,
2591 GEN_INT (offset1
))),
2592 gen_realpart (submode
, y
));
2593 emit_move_insn (change_address (x
, submode
,
2594 gen_rtx_PLUS (Pmode
,
2596 GEN_INT (offset2
))),
2597 gen_imagpart (submode
, y
));
2601 /* If this is a stack, push the highpart first, so it
2602 will be in the argument order.
2604 In that case, change_address is used only to convert
2605 the mode, not to change the address. */
2608 /* Note that the real part always precedes the imag part in memory
2609 regardless of machine's endianness. */
2610 #ifdef STACK_GROWS_DOWNWARD
2611 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2612 gen_imagpart (submode
, y
));
2613 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2614 gen_realpart (submode
, y
));
2616 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2617 gen_realpart (submode
, y
));
2618 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2619 gen_imagpart (submode
, y
));
2624 rtx realpart_x
, realpart_y
;
2625 rtx imagpart_x
, imagpart_y
;
2627 /* If this is a complex value with each part being smaller than a
2628 word, the usual calling sequence will likely pack the pieces into
2629 a single register. Unfortunately, SUBREG of hard registers only
2630 deals in terms of words, so we have a problem converting input
2631 arguments to the CONCAT of two registers that is used elsewhere
2632 for complex values. If this is before reload, we can copy it into
2633 memory and reload. FIXME, we should see about using extract and
2634 insert on integer registers, but complex short and complex char
2635 variables should be rarely used. */
2636 if (GET_MODE_BITSIZE (mode
) < 2 * BITS_PER_WORD
2637 && (reload_in_progress
| reload_completed
) == 0)
2640 = (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
2642 = (REG_P (y
) && REGNO (y
) < FIRST_PSEUDO_REGISTER
);
2644 if (packed_dest_p
|| packed_src_p
)
2646 enum mode_class reg_class
= ((class == MODE_COMPLEX_FLOAT
)
2647 ? MODE_FLOAT
: MODE_INT
);
2649 enum machine_mode reg_mode
2650 = mode_for_size (GET_MODE_BITSIZE (mode
), reg_class
, 1);
2652 if (reg_mode
!= BLKmode
)
2654 rtx mem
= assign_stack_temp (reg_mode
,
2655 GET_MODE_SIZE (mode
), 0);
2656 rtx cmem
= adjust_address (mem
, mode
, 0);
2660 rtx sreg
= gen_rtx_SUBREG (reg_mode
, x
, 0);
2662 emit_move_insn_1 (cmem
, y
);
2663 return emit_move_insn_1 (sreg
, mem
);
2667 rtx sreg
= gen_rtx_SUBREG (reg_mode
, y
, 0);
2669 emit_move_insn_1 (mem
, sreg
);
2670 return emit_move_insn_1 (x
, cmem
);
2676 realpart_x
= gen_realpart (submode
, x
);
2677 realpart_y
= gen_realpart (submode
, y
);
2678 imagpart_x
= gen_imagpart (submode
, x
);
2679 imagpart_y
= gen_imagpart (submode
, y
);
2681 /* Show the output dies here. This is necessary for SUBREGs
2682 of pseudos since we cannot track their lifetimes correctly;
2683 hard regs shouldn't appear here except as return values.
2684 We never want to emit such a clobber after reload. */
2686 && ! (reload_in_progress
|| reload_completed
)
2687 && (GET_CODE (realpart_x
) == SUBREG
2688 || GET_CODE (imagpart_x
) == SUBREG
))
2689 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
2691 emit_move_insn (realpart_x
, realpart_y
);
2692 emit_move_insn (imagpart_x
, imagpart_y
);
2695 return get_last_insn ();
2698 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
2699 find a mode to do it in. If we have a movcc, use it. Otherwise,
2700 find the MODE_INT mode of the same width. */
2701 else if (GET_MODE_CLASS (mode
) == MODE_CC
2702 && mov_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
2704 enum insn_code insn_code
;
2705 enum machine_mode tmode
= VOIDmode
;
2709 && mov_optab
->handlers
[(int) CCmode
].insn_code
!= CODE_FOR_nothing
)
2712 for (tmode
= QImode
; tmode
!= VOIDmode
;
2713 tmode
= GET_MODE_WIDER_MODE (tmode
))
2714 if (GET_MODE_SIZE (tmode
) == GET_MODE_SIZE (mode
))
2717 if (tmode
== VOIDmode
)
2720 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
2721 may call change_address which is not appropriate if we were
2722 called when a reload was in progress. We don't have to worry
2723 about changing the address since the size in bytes is supposed to
2724 be the same. Copy the MEM to change the mode and move any
2725 substitutions from the old MEM to the new one. */
2727 if (reload_in_progress
)
2729 x
= gen_lowpart_common (tmode
, x1
);
2730 if (x
== 0 && MEM_P (x1
))
2732 x
= adjust_address_nv (x1
, tmode
, 0);
2733 copy_replacements (x1
, x
);
2736 y
= gen_lowpart_common (tmode
, y1
);
2737 if (y
== 0 && MEM_P (y1
))
2739 y
= adjust_address_nv (y1
, tmode
, 0);
2740 copy_replacements (y1
, y
);
2745 x
= gen_lowpart (tmode
, x
);
2746 y
= gen_lowpart (tmode
, y
);
2749 insn_code
= mov_optab
->handlers
[(int) tmode
].insn_code
;
2750 return emit_insn (GEN_FCN (insn_code
) (x
, y
));
2753 /* Try using a move pattern for the corresponding integer mode. This is
2754 only safe when simplify_subreg can convert MODE constants into integer
2755 constants. At present, it can only do this reliably if the value
2756 fits within a HOST_WIDE_INT. */
2757 else if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
2758 && (submode
= int_mode_for_mode (mode
)) != BLKmode
2759 && mov_optab
->handlers
[submode
].insn_code
!= CODE_FOR_nothing
)
2760 return emit_insn (GEN_FCN (mov_optab
->handlers
[submode
].insn_code
)
2761 (simplify_gen_subreg (submode
, x
, mode
, 0),
2762 simplify_gen_subreg (submode
, y
, mode
, 0)));
2764 /* This will handle any multi-word or full-word mode that lacks a move_insn
2765 pattern. However, you will get better code if you define such patterns,
2766 even if they must turn into multiple assembler instructions. */
2767 else if (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
2774 #ifdef PUSH_ROUNDING
2776 /* If X is a push on the stack, do the push now and replace
2777 X with a reference to the stack pointer. */
2778 if (push_operand (x
, GET_MODE (x
)))
2783 /* Do not use anti_adjust_stack, since we don't want to update
2784 stack_pointer_delta. */
2785 temp
= expand_binop (Pmode
,
2786 #ifdef STACK_GROWS_DOWNWARD
2794 (GET_MODE_SIZE (GET_MODE (x
)))),
2795 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
2797 if (temp
!= stack_pointer_rtx
)
2798 emit_move_insn (stack_pointer_rtx
, temp
);
2800 code
= GET_CODE (XEXP (x
, 0));
2802 /* Just hope that small offsets off SP are OK. */
2803 if (code
== POST_INC
)
2804 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
2805 GEN_INT (-((HOST_WIDE_INT
)
2806 GET_MODE_SIZE (GET_MODE (x
)))));
2807 else if (code
== POST_DEC
)
2808 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
2809 GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
2811 temp
= stack_pointer_rtx
;
2813 x
= change_address (x
, VOIDmode
, temp
);
2817 /* If we are in reload, see if either operand is a MEM whose address
2818 is scheduled for replacement. */
2819 if (reload_in_progress
&& MEM_P (x
)
2820 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
2821 x
= replace_equiv_address_nv (x
, inner
);
2822 if (reload_in_progress
&& MEM_P (y
)
2823 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
2824 y
= replace_equiv_address_nv (y
, inner
);
2830 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
2833 rtx xpart
= operand_subword (x
, i
, 1, mode
);
2834 rtx ypart
= operand_subword (y
, i
, 1, mode
);
2836 /* If we can't get a part of Y, put Y into memory if it is a
2837 constant. Otherwise, force it into a register. If we still
2838 can't get a part of Y, abort. */
2839 if (ypart
== 0 && CONSTANT_P (y
))
2841 y
= force_const_mem (mode
, y
);
2842 ypart
= operand_subword (y
, i
, 1, mode
);
2844 else if (ypart
== 0)
2845 ypart
= operand_subword_force (y
, i
, mode
);
2847 if (xpart
== 0 || ypart
== 0)
2850 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
2852 last_insn
= emit_move_insn (xpart
, ypart
);
2858 /* Show the output dies here. This is necessary for SUBREGs
2859 of pseudos since we cannot track their lifetimes correctly;
2860 hard regs shouldn't appear here except as return values.
2861 We never want to emit such a clobber after reload. */
2863 && ! (reload_in_progress
|| reload_completed
)
2864 && need_clobber
!= 0)
2865 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
2875 /* If Y is representable exactly in a narrower mode, and the target can
2876 perform the extension directly from constant or memory, then emit the
2877 move as an extension. */
2880 compress_float_constant (rtx x
, rtx y
)
2882 enum machine_mode dstmode
= GET_MODE (x
);
2883 enum machine_mode orig_srcmode
= GET_MODE (y
);
2884 enum machine_mode srcmode
;
2887 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
2889 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
2890 srcmode
!= orig_srcmode
;
2891 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
2894 rtx trunc_y
, last_insn
;
2896 /* Skip if the target can't extend this way. */
2897 ic
= can_extend_p (dstmode
, srcmode
, 0);
2898 if (ic
== CODE_FOR_nothing
)
2901 /* Skip if the narrowed value isn't exact. */
2902 if (! exact_real_truncate (srcmode
, &r
))
2905 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
2907 if (LEGITIMATE_CONSTANT_P (trunc_y
))
2909 /* Skip if the target needs extra instructions to perform
2911 if (! (*insn_data
[ic
].operand
[1].predicate
) (trunc_y
, srcmode
))
2914 else if (float_extend_from_mem
[dstmode
][srcmode
])
2915 trunc_y
= validize_mem (force_const_mem (srcmode
, trunc_y
));
2919 emit_unop_insn (ic
, x
, trunc_y
, UNKNOWN
);
2920 last_insn
= get_last_insn ();
2923 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
2931 /* Pushing data onto the stack. */
2933 /* Push a block of length SIZE (perhaps variable)
2934 and return an rtx to address the beginning of the block.
2935 The value may be virtual_outgoing_args_rtx.
2937 EXTRA is the number of bytes of padding to push in addition to SIZE.
2938 BELOW nonzero means this padding comes at low addresses;
2939 otherwise, the padding comes at high addresses. */
2942 push_block (rtx size
, int extra
, int below
)
2946 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
2947 if (CONSTANT_P (size
))
2948 anti_adjust_stack (plus_constant (size
, extra
));
2949 else if (REG_P (size
) && extra
== 0)
2950 anti_adjust_stack (size
);
2953 temp
= copy_to_mode_reg (Pmode
, size
);
2955 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
2956 temp
, 0, OPTAB_LIB_WIDEN
);
2957 anti_adjust_stack (temp
);
2960 #ifndef STACK_GROWS_DOWNWARD
2966 temp
= virtual_outgoing_args_rtx
;
2967 if (extra
!= 0 && below
)
2968 temp
= plus_constant (temp
, extra
);
2972 if (GET_CODE (size
) == CONST_INT
)
2973 temp
= plus_constant (virtual_outgoing_args_rtx
,
2974 -INTVAL (size
) - (below
? 0 : extra
));
2975 else if (extra
!= 0 && !below
)
2976 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
2977 negate_rtx (Pmode
, plus_constant (size
, extra
)));
2979 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
2980 negate_rtx (Pmode
, size
));
2983 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
2986 #ifdef PUSH_ROUNDING
2988 /* Emit single push insn. */
2991 emit_single_push_insn (enum machine_mode mode
, rtx x
, tree type
)
2994 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
2996 enum insn_code icode
;
2997 insn_operand_predicate_fn pred
;
2999 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3000 /* If there is push pattern, use it. Otherwise try old way of throwing
3001 MEM representing push operation to move expander. */
3002 icode
= push_optab
->handlers
[(int) mode
].insn_code
;
3003 if (icode
!= CODE_FOR_nothing
)
3005 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3006 && !((*pred
) (x
, mode
))))
3007 x
= force_reg (mode
, x
);
3008 emit_insn (GEN_FCN (icode
) (x
));
3011 if (GET_MODE_SIZE (mode
) == rounded_size
)
3012 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3013 /* If we are to pad downward, adjust the stack pointer first and
3014 then store X into the stack location using an offset. This is
3015 because emit_move_insn does not know how to pad; it does not have
3017 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
3019 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
3020 HOST_WIDE_INT offset
;
3022 emit_move_insn (stack_pointer_rtx
,
3023 expand_binop (Pmode
,
3024 #ifdef STACK_GROWS_DOWNWARD
3030 GEN_INT (rounded_size
),
3031 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
3033 offset
= (HOST_WIDE_INT
) padding_size
;
3034 #ifdef STACK_GROWS_DOWNWARD
3035 if (STACK_PUSH_CODE
== POST_DEC
)
3036 /* We have already decremented the stack pointer, so get the
3038 offset
+= (HOST_WIDE_INT
) rounded_size
;
3040 if (STACK_PUSH_CODE
== POST_INC
)
3041 /* We have already incremented the stack pointer, so get the
3043 offset
-= (HOST_WIDE_INT
) rounded_size
;
3045 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (offset
));
3049 #ifdef STACK_GROWS_DOWNWARD
3050 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3051 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3052 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3054 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3055 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3056 GEN_INT (rounded_size
));
3058 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3061 dest
= gen_rtx_MEM (mode
, dest_addr
);
3065 set_mem_attributes (dest
, type
, 1);
3067 if (flag_optimize_sibling_calls
)
3068 /* Function incoming arguments may overlap with sibling call
3069 outgoing arguments and we cannot allow reordering of reads
3070 from function arguments with stores to outgoing arguments
3071 of sibling calls. */
3072 set_mem_alias_set (dest
, 0);
3074 emit_move_insn (dest
, x
);
3078 /* Generate code to push X onto the stack, assuming it has mode MODE and
3080 MODE is redundant except when X is a CONST_INT (since they don't
3082 SIZE is an rtx for the size of data to be copied (in bytes),
3083 needed only if X is BLKmode.
3085 ALIGN (in bits) is maximum alignment we can assume.
3087 If PARTIAL and REG are both nonzero, then copy that many of the first
3088 words of X into registers starting with REG, and push the rest of X.
3089 The amount of space pushed is decreased by PARTIAL words,
3090 rounded *down* to a multiple of PARM_BOUNDARY.
3091 REG must be a hard register in this case.
3092 If REG is zero but PARTIAL is not, take any all others actions for an
3093 argument partially in registers, but do not actually load any
3096 EXTRA is the amount in bytes of extra space to leave next to this arg.
3097 This is ignored if an argument block has already been allocated.
3099 On a machine that lacks real push insns, ARGS_ADDR is the address of
3100 the bottom of the argument block for this call. We use indexing off there
3101 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3102 argument block has not been preallocated.
3104 ARGS_SO_FAR is the size of args previously pushed for this call.
3106 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3107 for arguments passed in registers. If nonzero, it will be the number
3108 of bytes required. */
3111 emit_push_insn (rtx x
, enum machine_mode mode
, tree type
, rtx size
,
3112 unsigned int align
, int partial
, rtx reg
, int extra
,
3113 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
3117 enum direction stack_direction
3118 #ifdef STACK_GROWS_DOWNWARD
3124 /* Decide where to pad the argument: `downward' for below,
3125 `upward' for above, or `none' for don't pad it.
3126 Default is below for small data on big-endian machines; else above. */
3127 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3129 /* Invert direction if stack is post-decrement.
3131 if (STACK_PUSH_CODE
== POST_DEC
)
3132 if (where_pad
!= none
)
3133 where_pad
= (where_pad
== downward
? upward
: downward
);
3137 if (mode
== BLKmode
)
3139 /* Copy a block into the stack, entirely or partially. */
3142 int used
= partial
* UNITS_PER_WORD
;
3146 if (reg
&& GET_CODE (reg
) == PARALLEL
)
3148 /* Use the size of the elt to compute offset. */
3149 rtx elt
= XEXP (XVECEXP (reg
, 0, 0), 0);
3150 used
= partial
* GET_MODE_SIZE (GET_MODE (elt
));
3151 offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3154 offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3161 /* USED is now the # of bytes we need not copy to the stack
3162 because registers will take care of them. */
3165 xinner
= adjust_address (xinner
, BLKmode
, used
);
3167 /* If the partial register-part of the arg counts in its stack size,
3168 skip the part of stack space corresponding to the registers.
3169 Otherwise, start copying to the beginning of the stack space,
3170 by setting SKIP to 0. */
3171 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3173 #ifdef PUSH_ROUNDING
3174 /* Do it with several push insns if that doesn't take lots of insns
3175 and if there is no difficulty with push insns that skip bytes
3176 on the stack for alignment purposes. */
3179 && GET_CODE (size
) == CONST_INT
3181 && MEM_ALIGN (xinner
) >= align
3182 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3183 /* Here we avoid the case of a structure whose weak alignment
3184 forces many pushes of a small amount of data,
3185 and such small pushes do rounding that causes trouble. */
3186 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3187 || align
>= BIGGEST_ALIGNMENT
3188 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3189 == (align
/ BITS_PER_UNIT
)))
3190 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3192 /* Push padding now if padding above and stack grows down,
3193 or if padding below and stack grows up.
3194 But if space already allocated, this has already been done. */
3195 if (extra
&& args_addr
== 0
3196 && where_pad
!= none
&& where_pad
!= stack_direction
)
3197 anti_adjust_stack (GEN_INT (extra
));
3199 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
3202 #endif /* PUSH_ROUNDING */
3206 /* Otherwise make space on the stack and copy the data
3207 to the address of that space. */
3209 /* Deduct words put into registers from the size we must copy. */
3212 if (GET_CODE (size
) == CONST_INT
)
3213 size
= GEN_INT (INTVAL (size
) - used
);
3215 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3216 GEN_INT (used
), NULL_RTX
, 0,
3220 /* Get the address of the stack space.
3221 In this case, we do not deal with EXTRA separately.
3222 A single stack adjust will do. */
3225 temp
= push_block (size
, extra
, where_pad
== downward
);
3228 else if (GET_CODE (args_so_far
) == CONST_INT
)
3229 temp
= memory_address (BLKmode
,
3230 plus_constant (args_addr
,
3231 skip
+ INTVAL (args_so_far
)));
3233 temp
= memory_address (BLKmode
,
3234 plus_constant (gen_rtx_PLUS (Pmode
,
3239 if (!ACCUMULATE_OUTGOING_ARGS
)
3241 /* If the source is referenced relative to the stack pointer,
3242 copy it to another register to stabilize it. We do not need
3243 to do this if we know that we won't be changing sp. */
3245 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3246 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3247 temp
= copy_to_reg (temp
);
3250 target
= gen_rtx_MEM (BLKmode
, temp
);
3252 /* We do *not* set_mem_attributes here, because incoming arguments
3253 may overlap with sibling call outgoing arguments and we cannot
3254 allow reordering of reads from function arguments with stores
3255 to outgoing arguments of sibling calls. We do, however, want
3256 to record the alignment of the stack slot. */
3257 /* ALIGN may well be better aligned than TYPE, e.g. due to
3258 PARM_BOUNDARY. Assume the caller isn't lying. */
3259 set_mem_align (target
, align
);
3261 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
3264 else if (partial
> 0)
3266 /* Scalar partly in registers. */
3268 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3271 /* # words of start of argument
3272 that we must make space for but need not store. */
3273 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
3274 int args_offset
= INTVAL (args_so_far
);
3277 /* Push padding now if padding above and stack grows down,
3278 or if padding below and stack grows up.
3279 But if space already allocated, this has already been done. */
3280 if (extra
&& args_addr
== 0
3281 && where_pad
!= none
&& where_pad
!= stack_direction
)
3282 anti_adjust_stack (GEN_INT (extra
));
3284 /* If we make space by pushing it, we might as well push
3285 the real data. Otherwise, we can leave OFFSET nonzero
3286 and leave the space uninitialized. */
3290 /* Now NOT_STACK gets the number of words that we don't need to
3291 allocate on the stack. */
3292 not_stack
= partial
- offset
;
3294 /* If the partial register-part of the arg counts in its stack size,
3295 skip the part of stack space corresponding to the registers.
3296 Otherwise, start copying to the beginning of the stack space,
3297 by setting SKIP to 0. */
3298 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3300 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3301 x
= validize_mem (force_const_mem (mode
, x
));
3303 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3304 SUBREGs of such registers are not allowed. */
3305 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
3306 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3307 x
= copy_to_reg (x
);
3309 /* Loop over all the words allocated on the stack for this arg. */
3310 /* We can do it by words, because any scalar bigger than a word
3311 has a size a multiple of a word. */
3312 #ifndef PUSH_ARGS_REVERSED
3313 for (i
= not_stack
; i
< size
; i
++)
3315 for (i
= size
- 1; i
>= not_stack
; i
--)
3317 if (i
>= not_stack
+ offset
)
3318 emit_push_insn (operand_subword_force (x
, i
, mode
),
3319 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3321 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3323 reg_parm_stack_space
, alignment_pad
);
3330 /* Push padding now if padding above and stack grows down,
3331 or if padding below and stack grows up.
3332 But if space already allocated, this has already been done. */
3333 if (extra
&& args_addr
== 0
3334 && where_pad
!= none
&& where_pad
!= stack_direction
)
3335 anti_adjust_stack (GEN_INT (extra
));
3337 #ifdef PUSH_ROUNDING
3338 if (args_addr
== 0 && PUSH_ARGS
)
3339 emit_single_push_insn (mode
, x
, type
);
3343 if (GET_CODE (args_so_far
) == CONST_INT
)
3345 = memory_address (mode
,
3346 plus_constant (args_addr
,
3347 INTVAL (args_so_far
)));
3349 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3351 dest
= gen_rtx_MEM (mode
, addr
);
3353 /* We do *not* set_mem_attributes here, because incoming arguments
3354 may overlap with sibling call outgoing arguments and we cannot
3355 allow reordering of reads from function arguments with stores
3356 to outgoing arguments of sibling calls. We do, however, want
3357 to record the alignment of the stack slot. */
3358 /* ALIGN may well be better aligned than TYPE, e.g. due to
3359 PARM_BOUNDARY. Assume the caller isn't lying. */
3360 set_mem_align (dest
, align
);
3362 emit_move_insn (dest
, x
);
3366 /* If part should go in registers, copy that part
3367 into the appropriate registers. Do this now, at the end,
3368 since mem-to-mem copies above may do function calls. */
3369 if (partial
> 0 && reg
!= 0)
3371 /* Handle calls that pass values in multiple non-contiguous locations.
3372 The Irix 6 ABI has examples of this. */
3373 if (GET_CODE (reg
) == PARALLEL
)
3374 emit_group_load (reg
, x
, type
, -1);
3376 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
3379 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3380 anti_adjust_stack (GEN_INT (extra
));
3382 if (alignment_pad
&& args_addr
== 0)
3383 anti_adjust_stack (alignment_pad
);
3386 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3390 get_subtarget (rtx x
)
3393 /* Only registers can be subtargets. */
3395 /* If the register is readonly, it can't be set more than once. */
3396 || RTX_UNCHANGING_P (x
)
3397 /* Don't use hard regs to avoid extending their life. */
3398 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3399 /* Avoid subtargets inside loops,
3400 since they hide some invariant expressions. */
3401 || preserve_subexpressions_p ())
3405 /* Expand an assignment that stores the value of FROM into TO.
3406 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3407 (If the value is constant, this rtx is a constant.)
3408 Otherwise, the returned value is NULL_RTX. */
3411 expand_assignment (tree to
, tree from
, int want_value
)
3416 /* Don't crash if the lhs of the assignment was erroneous. */
3418 if (TREE_CODE (to
) == ERROR_MARK
)
3420 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3421 return want_value
? result
: NULL_RTX
;
3424 /* Assignment of a structure component needs special treatment
3425 if the structure component's rtx is not simply a MEM.
3426 Assignment of an array element at a constant index, and assignment of
3427 an array element in an unaligned packed structure field, has the same
3430 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
3431 || TREE_CODE (to
) == ARRAY_REF
|| TREE_CODE (to
) == ARRAY_RANGE_REF
3432 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
3434 enum machine_mode mode1
;
3435 HOST_WIDE_INT bitsize
, bitpos
;
3443 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
3444 &unsignedp
, &volatilep
);
3446 /* If we are going to use store_bit_field and extract_bit_field,
3447 make sure to_rtx will be safe for multiple use. */
3449 if (mode1
== VOIDmode
&& want_value
)
3450 tem
= stabilize_reference (tem
);
3452 orig_to_rtx
= to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, 0);
3456 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
3458 if (!MEM_P (to_rtx
))
3461 #ifdef POINTERS_EXTEND_UNSIGNED
3462 if (GET_MODE (offset_rtx
) != Pmode
)
3463 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
3465 if (GET_MODE (offset_rtx
) != ptr_mode
)
3466 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
3469 /* A constant address in TO_RTX can have VOIDmode, we must not try
3470 to call force_reg for that case. Avoid that case. */
3472 && GET_MODE (to_rtx
) == BLKmode
3473 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
3475 && (bitpos
% bitsize
) == 0
3476 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
3477 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
3479 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
3483 to_rtx
= offset_address (to_rtx
, offset_rtx
,
3484 highest_pow2_factor_for_target (to
,
3490 /* If the field is at offset zero, we could have been given the
3491 DECL_RTX of the parent struct. Don't munge it. */
3492 to_rtx
= shallow_copy_rtx (to_rtx
);
3494 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
3497 /* Deal with volatile and readonly fields. The former is only done
3498 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3499 if (volatilep
&& MEM_P (to_rtx
))
3501 if (to_rtx
== orig_to_rtx
)
3502 to_rtx
= copy_rtx (to_rtx
);
3503 MEM_VOLATILE_P (to_rtx
) = 1;
3506 if (TREE_CODE (to
) == COMPONENT_REF
3507 && TREE_READONLY (TREE_OPERAND (to
, 1))
3508 /* We can't assert that a MEM won't be set more than once
3509 if the component is not addressable because another
3510 non-addressable component may be referenced by the same MEM. */
3511 && ! (MEM_P (to_rtx
) && ! can_address_p (to
)))
3513 if (to_rtx
== orig_to_rtx
)
3514 to_rtx
= copy_rtx (to_rtx
);
3515 RTX_UNCHANGING_P (to_rtx
) = 1;
3518 if (MEM_P (to_rtx
) && ! can_address_p (to
))
3520 if (to_rtx
== orig_to_rtx
)
3521 to_rtx
= copy_rtx (to_rtx
);
3522 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
3525 /* Optimize bitfld op= val in certain cases. */
3526 while (mode1
== VOIDmode
&& !want_value
3527 && bitsize
> 0 && bitsize
< BITS_PER_WORD
3528 && GET_MODE_BITSIZE (GET_MODE (to_rtx
)) <= BITS_PER_WORD
3529 && !TREE_SIDE_EFFECTS (to
)
3530 && !TREE_THIS_VOLATILE (to
))
3533 rtx value
, str_rtx
= to_rtx
;
3534 HOST_WIDE_INT bitpos1
= bitpos
;
3539 if (TREE_CODE (TREE_TYPE (src
)) != INTEGER_TYPE
3540 || TREE_CODE_CLASS (TREE_CODE (src
)) != '2')
3543 op0
= TREE_OPERAND (src
, 0);
3544 op1
= TREE_OPERAND (src
, 1);
3547 if (! operand_equal_p (to
, op0
, 0))
3550 if (MEM_P (str_rtx
))
3552 enum machine_mode mode
= GET_MODE (str_rtx
);
3553 HOST_WIDE_INT offset1
;
3555 if (GET_MODE_BITSIZE (mode
) == 0
3556 || GET_MODE_BITSIZE (mode
) > BITS_PER_WORD
)
3558 mode
= get_best_mode (bitsize
, bitpos1
, MEM_ALIGN (str_rtx
),
3560 if (mode
== VOIDmode
)
3564 bitpos1
%= GET_MODE_BITSIZE (mode
);
3565 offset1
= (offset1
- bitpos1
) / BITS_PER_UNIT
;
3566 str_rtx
= adjust_address (str_rtx
, mode
, offset1
);
3568 else if (!REG_P (str_rtx
) && GET_CODE (str_rtx
) != SUBREG
)
3571 /* If the bit field covers the whole REG/MEM, store_field
3572 will likely generate better code. */
3573 if (bitsize
>= GET_MODE_BITSIZE (GET_MODE (str_rtx
)))
3576 /* We can't handle fields split accross multiple entities. */
3577 if (bitpos1
+ bitsize
> GET_MODE_BITSIZE (GET_MODE (str_rtx
)))
3580 if (BYTES_BIG_ENDIAN
)
3581 bitpos1
= GET_MODE_BITSIZE (GET_MODE (str_rtx
)) - bitpos1
3584 /* Special case some bitfield op= exp. */
3585 switch (TREE_CODE (src
))
3589 /* For now, just optimize the case of the topmost bitfield
3590 where we don't need to do any masking and also
3591 1 bit bitfields where xor can be used.
3592 We might win by one instruction for the other bitfields
3593 too if insv/extv instructions aren't used, so that
3594 can be added later. */
3595 if (bitpos1
+ bitsize
!= GET_MODE_BITSIZE (GET_MODE (str_rtx
))
3596 && (bitsize
!= 1 || TREE_CODE (op1
) != INTEGER_CST
))
3598 value
= expand_expr (op1
, NULL_RTX
, GET_MODE (str_rtx
), 0);
3599 value
= convert_modes (GET_MODE (str_rtx
),
3600 TYPE_MODE (TREE_TYPE (op1
)), value
,
3601 TYPE_UNSIGNED (TREE_TYPE (op1
)));
3603 /* We may be accessing data outside the field, which means
3604 we can alias adjacent data. */
3605 if (MEM_P (str_rtx
))
3607 str_rtx
= shallow_copy_rtx (str_rtx
);
3608 set_mem_alias_set (str_rtx
, 0);
3609 set_mem_expr (str_rtx
, 0);
3612 binop
= TREE_CODE (src
) == PLUS_EXPR
? add_optab
: sub_optab
;
3614 && bitpos1
+ bitsize
!= GET_MODE_BITSIZE (GET_MODE (str_rtx
)))
3616 value
= expand_and (GET_MODE (str_rtx
), value
, const1_rtx
,
3620 value
= expand_shift (LSHIFT_EXPR
, GET_MODE (str_rtx
),
3621 value
, build_int_2 (bitpos1
, 0),
3623 result
= expand_binop (GET_MODE (str_rtx
), binop
, str_rtx
,
3624 value
, str_rtx
, 1, OPTAB_WIDEN
);
3625 if (result
!= str_rtx
)
3626 emit_move_insn (str_rtx
, result
);
3638 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
3640 /* Spurious cast for HPUX compiler. */
3641 ? ((enum machine_mode
)
3642 TYPE_MODE (TREE_TYPE (to
)))
3644 unsignedp
, TREE_TYPE (tem
), get_alias_set (to
));
3646 preserve_temp_slots (result
);
3650 /* If the value is meaningful, convert RESULT to the proper mode.
3651 Otherwise, return nothing. */
3652 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
3653 TYPE_MODE (TREE_TYPE (from
)),
3655 TYPE_UNSIGNED (TREE_TYPE (to
)))
3659 /* If the rhs is a function call and its value is not an aggregate,
3660 call the function before we start to compute the lhs.
3661 This is needed for correct code for cases such as
3662 val = setjmp (buf) on machines where reference to val
3663 requires loading up part of an address in a separate insn.
3665 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3666 since it might be a promoted variable where the zero- or sign- extension
3667 needs to be done. Handling this in the normal way is safe because no
3668 computation is done before the call. */
3669 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
3670 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
3671 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
3672 && REG_P (DECL_RTL (to
))))
3677 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3679 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3681 /* Handle calls that return values in multiple non-contiguous locations.
3682 The Irix 6 ABI has examples of this. */
3683 if (GET_CODE (to_rtx
) == PARALLEL
)
3684 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
3685 int_size_in_bytes (TREE_TYPE (from
)));
3686 else if (GET_MODE (to_rtx
) == BLKmode
)
3687 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
3690 if (POINTER_TYPE_P (TREE_TYPE (to
)))
3691 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
3692 emit_move_insn (to_rtx
, value
);
3694 preserve_temp_slots (to_rtx
);
3697 return want_value
? to_rtx
: NULL_RTX
;
3700 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3701 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3704 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3706 /* Don't move directly into a return register. */
3707 if (TREE_CODE (to
) == RESULT_DECL
3708 && (REG_P (to_rtx
) || GET_CODE (to_rtx
) == PARALLEL
))
3713 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
3715 if (GET_CODE (to_rtx
) == PARALLEL
)
3716 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
3717 int_size_in_bytes (TREE_TYPE (from
)));
3719 emit_move_insn (to_rtx
, temp
);
3721 preserve_temp_slots (to_rtx
);
3724 return want_value
? to_rtx
: NULL_RTX
;
3727 /* In case we are returning the contents of an object which overlaps
3728 the place the value is being stored, use a safe function when copying
3729 a value through a pointer into a structure value return block. */
3730 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
3731 && current_function_returns_struct
3732 && !current_function_returns_pcc_struct
)
3737 size
= expr_size (from
);
3738 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3740 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
3741 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3742 XEXP (from_rtx
, 0), Pmode
,
3743 convert_to_mode (TYPE_MODE (sizetype
),
3744 size
, TYPE_UNSIGNED (sizetype
)),
3745 TYPE_MODE (sizetype
));
3747 preserve_temp_slots (to_rtx
);
3750 return want_value
? to_rtx
: NULL_RTX
;
3753 /* Compute FROM and store the value in the rtx we got. */
3756 result
= store_expr (from
, to_rtx
, want_value
);
3757 preserve_temp_slots (result
);
3760 return want_value
? result
: NULL_RTX
;
3763 /* Generate code for computing expression EXP,
3764 and storing the value into TARGET.
3766 If WANT_VALUE & 1 is nonzero, return a copy of the value
3767 not in TARGET, so that we can be sure to use the proper
3768 value in a containing expression even if TARGET has something
3769 else stored in it. If possible, we copy the value through a pseudo
3770 and return that pseudo. Or, if the value is constant, we try to
3771 return the constant. In some cases, we return a pseudo
3772 copied *from* TARGET.
3774 If the mode is BLKmode then we may return TARGET itself.
3775 It turns out that in BLKmode it doesn't cause a problem.
3776 because C has no operators that could combine two different
3777 assignments into the same BLKmode object with different values
3778 with no sequence point. Will other languages need this to
3781 If WANT_VALUE & 1 is 0, we return NULL, to make sure
3782 to catch quickly any cases where the caller uses the value
3783 and fails to set WANT_VALUE.
3785 If WANT_VALUE & 2 is set, this is a store into a call param on the
3786 stack, and block moves may need to be treated specially. */
3789 store_expr (tree exp
, rtx target
, int want_value
)
3792 rtx alt_rtl
= NULL_RTX
;
3793 int dont_return_target
= 0;
3794 int dont_store_target
= 0;
3796 if (VOID_TYPE_P (TREE_TYPE (exp
)))
3798 /* C++ can generate ?: expressions with a throw expression in one
3799 branch and an rvalue in the other. Here, we resolve attempts to
3800 store the throw expression's nonexistent result. */
3803 expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
3806 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
3808 /* Perform first part of compound expression, then assign from second
3810 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
3811 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
3812 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
3814 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
3816 /* For conditional expression, get safe form of the target. Then
3817 test the condition, doing the appropriate assignment on either
3818 side. This avoids the creation of unnecessary temporaries.
3819 For non-BLKmode, it is more efficient not to do this. */
3821 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
3823 do_pending_stack_adjust ();
3825 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
3826 store_expr (TREE_OPERAND (exp
, 1), target
, want_value
& 2);
3827 emit_jump_insn (gen_jump (lab2
));
3830 store_expr (TREE_OPERAND (exp
, 2), target
, want_value
& 2);
3834 return want_value
& 1 ? target
: NULL_RTX
;
3836 else if ((want_value
& 1) != 0
3838 && ! MEM_VOLATILE_P (target
)
3839 && GET_MODE (target
) != BLKmode
)
3840 /* If target is in memory and caller wants value in a register instead,
3841 arrange that. Pass TARGET as target for expand_expr so that,
3842 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3843 We know expand_expr will not use the target in that case.
3844 Don't do this if TARGET is volatile because we are supposed
3845 to write it and then read it. */
3847 temp
= expand_expr (exp
, target
, GET_MODE (target
),
3848 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
3849 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
3851 /* If TEMP is already in the desired TARGET, only copy it from
3852 memory and don't store it there again. */
3854 || (rtx_equal_p (temp
, target
)
3855 && ! side_effects_p (temp
) && ! side_effects_p (target
)))
3856 dont_store_target
= 1;
3857 temp
= copy_to_reg (temp
);
3859 dont_return_target
= 1;
3861 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
3862 /* If this is a scalar in a register that is stored in a wider mode
3863 than the declared mode, compute the result into its declared mode
3864 and then convert to the wider mode. Our value is the computed
3867 rtx inner_target
= 0;
3869 /* If we don't want a value, we can do the conversion inside EXP,
3870 which will often result in some optimizations. Do the conversion
3871 in two steps: first change the signedness, if needed, then
3872 the extend. But don't do this if the type of EXP is a subtype
3873 of something else since then the conversion might involve
3874 more than just converting modes. */
3875 if ((want_value
& 1) == 0
3876 && INTEGRAL_TYPE_P (TREE_TYPE (exp
))
3877 && TREE_TYPE (TREE_TYPE (exp
)) == 0
3878 && (!lang_hooks
.reduce_bit_field_operations
3879 || (GET_MODE_PRECISION (GET_MODE (target
))
3880 == TYPE_PRECISION (TREE_TYPE (exp
)))))
3882 if (TYPE_UNSIGNED (TREE_TYPE (exp
))
3883 != SUBREG_PROMOTED_UNSIGNED_P (target
))
3885 (lang_hooks
.types
.signed_or_unsigned_type
3886 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)), exp
);
3888 exp
= convert (lang_hooks
.types
.type_for_mode
3889 (GET_MODE (SUBREG_REG (target
)),
3890 SUBREG_PROMOTED_UNSIGNED_P (target
)),
3893 inner_target
= SUBREG_REG (target
);
3896 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
3897 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
3899 /* If TEMP is a MEM and we want a result value, make the access
3900 now so it gets done only once. Strictly speaking, this is
3901 only necessary if the MEM is volatile, or if the address
3902 overlaps TARGET. But not performing the load twice also
3903 reduces the amount of rtl we generate and then have to CSE. */
3904 if (MEM_P (temp
) && (want_value
& 1) != 0)
3905 temp
= copy_to_reg (temp
);
3907 /* If TEMP is a VOIDmode constant, use convert_modes to make
3908 sure that we properly convert it. */
3909 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
3911 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
3912 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
3913 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
3914 GET_MODE (target
), temp
,
3915 SUBREG_PROMOTED_UNSIGNED_P (target
));
3918 convert_move (SUBREG_REG (target
), temp
,
3919 SUBREG_PROMOTED_UNSIGNED_P (target
));
3921 /* If we promoted a constant, change the mode back down to match
3922 target. Otherwise, the caller might get confused by a result whose
3923 mode is larger than expected. */
3925 if ((want_value
& 1) != 0 && GET_MODE (temp
) != GET_MODE (target
))
3927 if (GET_MODE (temp
) != VOIDmode
)
3929 temp
= gen_lowpart_SUBREG (GET_MODE (target
), temp
);
3930 SUBREG_PROMOTED_VAR_P (temp
) = 1;
3931 SUBREG_PROMOTED_UNSIGNED_SET (temp
,
3932 SUBREG_PROMOTED_UNSIGNED_P (target
));
3935 temp
= convert_modes (GET_MODE (target
),
3936 GET_MODE (SUBREG_REG (target
)),
3937 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
3940 return want_value
& 1 ? temp
: NULL_RTX
;
3944 temp
= expand_expr_real (exp
, target
, GET_MODE (target
),
3946 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
3948 /* Return TARGET if it's a specified hardware register.
3949 If TARGET is a volatile mem ref, either return TARGET
3950 or return a reg copied *from* TARGET; ANSI requires this.
3952 Otherwise, if TEMP is not TARGET, return TEMP
3953 if it is constant (for efficiency),
3954 or if we really want the correct value. */
3955 if (!(target
&& REG_P (target
)
3956 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
3957 && !(MEM_P (target
) && MEM_VOLATILE_P (target
))
3958 && ! rtx_equal_p (temp
, target
)
3959 && (CONSTANT_P (temp
) || (want_value
& 1) != 0))
3960 dont_return_target
= 1;
3963 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3964 the same as that of TARGET, adjust the constant. This is needed, for
3965 example, in case it is a CONST_DOUBLE and we want only a word-sized
3967 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
3968 && TREE_CODE (exp
) != ERROR_MARK
3969 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
3970 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
3971 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
3973 /* If value was not generated in the target, store it there.
3974 Convert the value to TARGET's type first if necessary and emit the
3975 pending incrementations that have been queued when expanding EXP.
3976 Note that we cannot emit the whole queue blindly because this will
3977 effectively disable the POST_INC optimization later.
3979 If TEMP and TARGET compare equal according to rtx_equal_p, but
3980 one or both of them are volatile memory refs, we have to distinguish
3982 - expand_expr has used TARGET. In this case, we must not generate
3983 another copy. This can be detected by TARGET being equal according
3985 - expand_expr has not used TARGET - that means that the source just
3986 happens to have the same RTX form. Since temp will have been created
3987 by expand_expr, it will compare unequal according to == .
3988 We must generate a copy in this case, to reach the correct number
3989 of volatile memory references. */
3991 if ((! rtx_equal_p (temp
, target
)
3992 || (temp
!= target
&& (side_effects_p (temp
)
3993 || side_effects_p (target
))))
3994 && TREE_CODE (exp
) != ERROR_MARK
3995 && ! dont_store_target
3996 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
3997 but TARGET is not valid memory reference, TEMP will differ
3998 from TARGET although it is really the same location. */
3999 && !(alt_rtl
&& rtx_equal_p (alt_rtl
, target
))
4000 /* If there's nothing to copy, don't bother. Don't call expr_size
4001 unless necessary, because some front-ends (C++) expr_size-hook
4002 aborts on objects that are not supposed to be bit-copied or
4004 && expr_size (exp
) != const0_rtx
)
4006 if (GET_MODE (temp
) != GET_MODE (target
)
4007 && GET_MODE (temp
) != VOIDmode
)
4009 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
4010 if (dont_return_target
)
4012 /* In this case, we will return TEMP,
4013 so make sure it has the proper mode.
4014 But don't forget to store the value into TARGET. */
4015 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4016 emit_move_insn (target
, temp
);
4019 convert_move (target
, temp
, unsignedp
);
4022 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4024 /* Handle copying a string constant into an array. The string
4025 constant may be shorter than the array. So copy just the string's
4026 actual length, and clear the rest. First get the size of the data
4027 type of the string, which is actually the size of the target. */
4028 rtx size
= expr_size (exp
);
4030 if (GET_CODE (size
) == CONST_INT
4031 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4032 emit_block_move (target
, temp
, size
,
4034 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4037 /* Compute the size of the data to copy from the string. */
4039 = size_binop (MIN_EXPR
,
4040 make_tree (sizetype
, size
),
4041 size_int (TREE_STRING_LENGTH (exp
)));
4043 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
4045 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4048 /* Copy that much. */
4049 copy_size_rtx
= convert_to_mode (ptr_mode
, copy_size_rtx
,
4050 TYPE_UNSIGNED (sizetype
));
4051 emit_block_move (target
, temp
, copy_size_rtx
,
4053 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4055 /* Figure out how much is left in TARGET that we have to clear.
4056 Do all calculations in ptr_mode. */
4057 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4059 size
= plus_constant (size
, -INTVAL (copy_size_rtx
));
4060 target
= adjust_address (target
, BLKmode
,
4061 INTVAL (copy_size_rtx
));
4065 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
4066 copy_size_rtx
, NULL_RTX
, 0,
4069 #ifdef POINTERS_EXTEND_UNSIGNED
4070 if (GET_MODE (copy_size_rtx
) != Pmode
)
4071 copy_size_rtx
= convert_to_mode (Pmode
, copy_size_rtx
,
4072 TYPE_UNSIGNED (sizetype
));
4075 target
= offset_address (target
, copy_size_rtx
,
4076 highest_pow2_factor (copy_size
));
4077 label
= gen_label_rtx ();
4078 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4079 GET_MODE (size
), 0, label
);
4082 if (size
!= const0_rtx
)
4083 clear_storage (target
, size
);
4089 /* Handle calls that return values in multiple non-contiguous locations.
4090 The Irix 6 ABI has examples of this. */
4091 else if (GET_CODE (target
) == PARALLEL
)
4092 emit_group_load (target
, temp
, TREE_TYPE (exp
),
4093 int_size_in_bytes (TREE_TYPE (exp
)));
4094 else if (GET_MODE (temp
) == BLKmode
)
4095 emit_block_move (target
, temp
, expr_size (exp
),
4097 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4100 temp
= force_operand (temp
, target
);
4102 emit_move_insn (target
, temp
);
4106 /* If we don't want a value, return NULL_RTX. */
4107 if ((want_value
& 1) == 0)
4110 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4111 ??? The latter test doesn't seem to make sense. */
4112 else if (dont_return_target
&& !MEM_P (temp
))
4115 /* Return TARGET itself if it is a hard register. */
4116 else if ((want_value
& 1) != 0
4117 && GET_MODE (target
) != BLKmode
4118 && ! (REG_P (target
)
4119 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4120 return copy_to_reg (target
);
4126 /* Examine CTOR. Discover how many scalar fields are set to nonzero
4127 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4128 are set to non-constant values and place it in *P_NC_ELTS. */
4131 categorize_ctor_elements_1 (tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
4132 HOST_WIDE_INT
*p_nc_elts
)
4134 HOST_WIDE_INT nz_elts
, nc_elts
;
4140 for (list
= CONSTRUCTOR_ELTS (ctor
); list
; list
= TREE_CHAIN (list
))
4142 tree value
= TREE_VALUE (list
);
4143 tree purpose
= TREE_PURPOSE (list
);
4147 if (TREE_CODE (purpose
) == RANGE_EXPR
)
4149 tree lo_index
= TREE_OPERAND (purpose
, 0);
4150 tree hi_index
= TREE_OPERAND (purpose
, 1);
4152 if (host_integerp (lo_index
, 1) && host_integerp (hi_index
, 1))
4153 mult
= (tree_low_cst (hi_index
, 1)
4154 - tree_low_cst (lo_index
, 1) + 1);
4157 switch (TREE_CODE (value
))
4161 HOST_WIDE_INT nz
= 0, nc
= 0;
4162 categorize_ctor_elements_1 (value
, &nz
, &nc
);
4163 nz_elts
+= mult
* nz
;
4164 nc_elts
+= mult
* nc
;
4170 if (!initializer_zerop (value
))
4174 if (!initializer_zerop (TREE_REALPART (value
)))
4176 if (!initializer_zerop (TREE_IMAGPART (value
)))
4182 for (v
= TREE_VECTOR_CST_ELTS (value
); v
; v
= TREE_CHAIN (v
))
4183 if (!initializer_zerop (TREE_VALUE (v
)))
4190 if (!initializer_constant_valid_p (value
, TREE_TYPE (value
)))
4196 *p_nz_elts
+= nz_elts
;
4197 *p_nc_elts
+= nc_elts
;
4201 categorize_ctor_elements (tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
4202 HOST_WIDE_INT
*p_nc_elts
)
4206 categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_nc_elts
);
4209 /* Count the number of scalars in TYPE. Return -1 on overflow or
4213 count_type_elements (tree type
)
4215 const HOST_WIDE_INT max
= ~((HOST_WIDE_INT
)1 << (HOST_BITS_PER_WIDE_INT
-1));
4216 switch (TREE_CODE (type
))
4220 tree telts
= array_type_nelts (type
);
4221 if (telts
&& host_integerp (telts
, 1))
4223 HOST_WIDE_INT n
= tree_low_cst (telts
, 1) + 1;
4224 HOST_WIDE_INT m
= count_type_elements (TREE_TYPE (type
));
4227 else if (max
/ n
> m
)
4235 HOST_WIDE_INT n
= 0, t
;
4238 for (f
= TYPE_FIELDS (type
); f
; f
= TREE_CHAIN (f
))
4239 if (TREE_CODE (f
) == FIELD_DECL
)
4241 t
= count_type_elements (TREE_TYPE (f
));
4251 case QUAL_UNION_TYPE
:
4253 /* Ho hum. How in the world do we guess here? Clearly it isn't
4254 right to count the fields. Guess based on the number of words. */
4255 HOST_WIDE_INT n
= int_size_in_bytes (type
);
4258 return n
/ UNITS_PER_WORD
;
4265 return TYPE_VECTOR_SUBPARTS (type
);
4274 case REFERENCE_TYPE
:
4288 /* Return 1 if EXP contains mostly (3/4) zeros. */
4291 mostly_zeros_p (tree exp
)
4293 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4296 HOST_WIDE_INT nz_elts
, nc_elts
, elts
;
4298 /* If there are no ranges of true bits, it is all zero. */
4299 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4300 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
4302 categorize_ctor_elements (exp
, &nz_elts
, &nc_elts
);
4303 elts
= count_type_elements (TREE_TYPE (exp
));
4305 return nz_elts
< elts
/ 4;
4308 return initializer_zerop (exp
);
4311 /* Helper function for store_constructor.
4312 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4313 TYPE is the type of the CONSTRUCTOR, not the element type.
4314 CLEARED is as for store_constructor.
4315 ALIAS_SET is the alias set to use for any stores.
4317 This provides a recursive shortcut back to store_constructor when it isn't
4318 necessary to go through store_field. This is so that we can pass through
4319 the cleared field to let store_constructor know that we may not have to
4320 clear a substructure if the outer structure has already been cleared. */
4323 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
4324 HOST_WIDE_INT bitpos
, enum machine_mode mode
,
4325 tree exp
, tree type
, int cleared
, int alias_set
)
4327 if (TREE_CODE (exp
) == CONSTRUCTOR
4328 /* We can only call store_constructor recursively if the size and
4329 bit position are on a byte boundary. */
4330 && bitpos
% BITS_PER_UNIT
== 0
4331 && (bitsize
> 0 && bitsize
% BITS_PER_UNIT
== 0)
4332 /* If we have a nonzero bitpos for a register target, then we just
4333 let store_field do the bitfield handling. This is unlikely to
4334 generate unnecessary clear instructions anyways. */
4335 && (bitpos
== 0 || MEM_P (target
)))
4339 = adjust_address (target
,
4340 GET_MODE (target
) == BLKmode
4342 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4343 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
4346 /* Update the alias set, if required. */
4347 if (MEM_P (target
) && ! MEM_KEEP_ALIAS_SET_P (target
)
4348 && MEM_ALIAS_SET (target
) != 0)
4350 target
= copy_rtx (target
);
4351 set_mem_alias_set (target
, alias_set
);
4354 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
4357 store_field (target
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
4361 /* Store the value of constructor EXP into the rtx TARGET.
4362 TARGET is either a REG or a MEM; we know it cannot conflict, since
4363 safe_from_p has been called.
4364 CLEARED is true if TARGET is known to have been zero'd.
4365 SIZE is the number of bytes of TARGET we are allowed to modify: this
4366 may not be the same as the size of EXP if we are assigning to a field
4367 which has been packed to exclude padding bits. */
4370 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
)
4372 tree type
= TREE_TYPE (exp
);
4373 #ifdef WORD_REGISTER_OPERATIONS
4374 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4377 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
4378 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4382 /* If size is zero or the target is already cleared, do nothing. */
4383 if (size
== 0 || cleared
)
4385 /* We either clear the aggregate or indicate the value is dead. */
4386 else if ((TREE_CODE (type
) == UNION_TYPE
4387 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4388 && ! CONSTRUCTOR_ELTS (exp
))
4389 /* If the constructor is empty, clear the union. */
4391 clear_storage (target
, expr_size (exp
));
4395 /* If we are building a static constructor into a register,
4396 set the initial value as zero so we can fold the value into
4397 a constant. But if more than one register is involved,
4398 this probably loses. */
4399 else if (REG_P (target
) && TREE_STATIC (exp
)
4400 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4402 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4406 /* If the constructor has fewer fields than the structure
4407 or if we are initializing the structure to mostly zeros,
4408 clear the whole structure first. Don't do this if TARGET is a
4409 register whose mode size isn't equal to SIZE since clear_storage
4410 can't handle this case. */
4412 && ((list_length (CONSTRUCTOR_ELTS (exp
)) != fields_length (type
))
4413 || mostly_zeros_p (exp
))
4415 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
4418 rtx xtarget
= target
;
4420 if (readonly_fields_p (type
))
4422 xtarget
= copy_rtx (xtarget
);
4423 RTX_UNCHANGING_P (xtarget
) = 1;
4426 clear_storage (xtarget
, GEN_INT (size
));
4431 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4433 /* Store each element of the constructor into
4434 the corresponding field of TARGET. */
4436 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4438 tree field
= TREE_PURPOSE (elt
);
4439 tree value
= TREE_VALUE (elt
);
4440 enum machine_mode mode
;
4441 HOST_WIDE_INT bitsize
;
4442 HOST_WIDE_INT bitpos
= 0;
4444 rtx to_rtx
= target
;
4446 /* Just ignore missing fields.
4447 We cleared the whole structure, above,
4448 if any fields are missing. */
4452 if (cleared
&& initializer_zerop (value
))
4455 if (host_integerp (DECL_SIZE (field
), 1))
4456 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4460 mode
= DECL_MODE (field
);
4461 if (DECL_BIT_FIELD (field
))
4464 offset
= DECL_FIELD_OFFSET (field
);
4465 if (host_integerp (offset
, 0)
4466 && host_integerp (bit_position (field
), 0))
4468 bitpos
= int_bit_position (field
);
4472 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
4479 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset
,
4480 make_tree (TREE_TYPE (exp
),
4483 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4484 if (!MEM_P (to_rtx
))
4487 #ifdef POINTERS_EXTEND_UNSIGNED
4488 if (GET_MODE (offset_rtx
) != Pmode
)
4489 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
4491 if (GET_MODE (offset_rtx
) != ptr_mode
)
4492 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4495 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4496 highest_pow2_factor (offset
));
4499 if (TREE_READONLY (field
))
4502 to_rtx
= copy_rtx (to_rtx
);
4504 RTX_UNCHANGING_P (to_rtx
) = 1;
4507 #ifdef WORD_REGISTER_OPERATIONS
4508 /* If this initializes a field that is smaller than a word, at the
4509 start of a word, try to widen it to a full word.
4510 This special case allows us to output C++ member function
4511 initializations in a form that the optimizers can understand. */
4513 && bitsize
< BITS_PER_WORD
4514 && bitpos
% BITS_PER_WORD
== 0
4515 && GET_MODE_CLASS (mode
) == MODE_INT
4516 && TREE_CODE (value
) == INTEGER_CST
4518 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
4520 tree type
= TREE_TYPE (value
);
4522 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4524 type
= lang_hooks
.types
.type_for_size
4525 (BITS_PER_WORD
, TYPE_UNSIGNED (type
));
4526 value
= convert (type
, value
);
4529 if (BYTES_BIG_ENDIAN
)
4531 = fold (build2 (LSHIFT_EXPR
, type
, value
,
4532 build_int_2 (BITS_PER_WORD
- bitsize
, 0)));
4533 bitsize
= BITS_PER_WORD
;
4538 if (MEM_P (to_rtx
) && !MEM_KEEP_ALIAS_SET_P (to_rtx
)
4539 && DECL_NONADDRESSABLE_P (field
))
4541 to_rtx
= copy_rtx (to_rtx
);
4542 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4545 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
4546 value
, type
, cleared
,
4547 get_alias_set (TREE_TYPE (field
)));
4551 else if (TREE_CODE (type
) == ARRAY_TYPE
)
4557 tree elttype
= TREE_TYPE (type
);
4559 HOST_WIDE_INT minelt
= 0;
4560 HOST_WIDE_INT maxelt
= 0;
4562 domain
= TYPE_DOMAIN (type
);
4563 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
4564 && TYPE_MAX_VALUE (domain
)
4565 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
4566 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
4568 /* If we have constant bounds for the range of the type, get them. */
4571 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
4572 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
4575 /* If the constructor has fewer elements than the array,
4576 clear the whole array first. Similarly if this is
4577 static constructor of a non-BLKmode object. */
4580 else if (REG_P (target
) && TREE_STATIC (exp
))
4584 HOST_WIDE_INT count
= 0, zero_count
= 0;
4585 need_to_clear
= ! const_bounds_p
;
4587 /* This loop is a more accurate version of the loop in
4588 mostly_zeros_p (it handles RANGE_EXPR in an index).
4589 It is also needed to check for missing elements. */
4590 for (elt
= CONSTRUCTOR_ELTS (exp
);
4591 elt
!= NULL_TREE
&& ! need_to_clear
;
4592 elt
= TREE_CHAIN (elt
))
4594 tree index
= TREE_PURPOSE (elt
);
4595 HOST_WIDE_INT this_node_count
;
4597 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4599 tree lo_index
= TREE_OPERAND (index
, 0);
4600 tree hi_index
= TREE_OPERAND (index
, 1);
4602 if (! host_integerp (lo_index
, 1)
4603 || ! host_integerp (hi_index
, 1))
4609 this_node_count
= (tree_low_cst (hi_index
, 1)
4610 - tree_low_cst (lo_index
, 1) + 1);
4613 this_node_count
= 1;
4615 count
+= this_node_count
;
4616 if (mostly_zeros_p (TREE_VALUE (elt
)))
4617 zero_count
+= this_node_count
;
4620 /* Clear the entire array first if there are any missing elements,
4621 or if the incidence of zero elements is >= 75%. */
4623 && (count
< maxelt
- minelt
+ 1 || 4 * zero_count
>= 3 * count
))
4627 if (need_to_clear
&& size
> 0)
4630 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4632 clear_storage (target
, GEN_INT (size
));
4636 if (!cleared
&& REG_P (target
))
4637 /* Inform later passes that the old value is dead. */
4638 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4640 /* Store each element of the constructor into
4641 the corresponding element of TARGET, determined
4642 by counting the elements. */
4643 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
4645 elt
= TREE_CHAIN (elt
), i
++)
4647 enum machine_mode mode
;
4648 HOST_WIDE_INT bitsize
;
4649 HOST_WIDE_INT bitpos
;
4651 tree value
= TREE_VALUE (elt
);
4652 tree index
= TREE_PURPOSE (elt
);
4653 rtx xtarget
= target
;
4655 if (cleared
&& initializer_zerop (value
))
4658 unsignedp
= TYPE_UNSIGNED (elttype
);
4659 mode
= TYPE_MODE (elttype
);
4660 if (mode
== BLKmode
)
4661 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
4662 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
4665 bitsize
= GET_MODE_BITSIZE (mode
);
4667 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4669 tree lo_index
= TREE_OPERAND (index
, 0);
4670 tree hi_index
= TREE_OPERAND (index
, 1);
4671 rtx index_r
, pos_rtx
;
4672 HOST_WIDE_INT lo
, hi
, count
;
4675 /* If the range is constant and "small", unroll the loop. */
4677 && host_integerp (lo_index
, 0)
4678 && host_integerp (hi_index
, 0)
4679 && (lo
= tree_low_cst (lo_index
, 0),
4680 hi
= tree_low_cst (hi_index
, 0),
4681 count
= hi
- lo
+ 1,
4684 || (host_integerp (TYPE_SIZE (elttype
), 1)
4685 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
4688 lo
-= minelt
; hi
-= minelt
;
4689 for (; lo
<= hi
; lo
++)
4691 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
4694 && !MEM_KEEP_ALIAS_SET_P (target
)
4695 && TREE_CODE (type
) == ARRAY_TYPE
4696 && TYPE_NONALIASED_COMPONENT (type
))
4698 target
= copy_rtx (target
);
4699 MEM_KEEP_ALIAS_SET_P (target
) = 1;
4702 store_constructor_field
4703 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
4704 get_alias_set (elttype
));
4709 rtx loop_start
= gen_label_rtx ();
4710 rtx loop_end
= gen_label_rtx ();
4713 expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
4714 unsignedp
= TYPE_UNSIGNED (domain
);
4716 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
4719 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
4721 SET_DECL_RTL (index
, index_r
);
4722 store_expr (lo_index
, index_r
, 0);
4724 /* Build the head of the loop. */
4725 do_pending_stack_adjust ();
4726 emit_label (loop_start
);
4728 /* Assign value to element index. */
4730 = convert (ssizetype
,
4731 fold (build2 (MINUS_EXPR
, TREE_TYPE (index
),
4732 index
, TYPE_MIN_VALUE (domain
))));
4733 position
= size_binop (MULT_EXPR
, position
,
4735 TYPE_SIZE_UNIT (elttype
)));
4737 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
4738 xtarget
= offset_address (target
, pos_rtx
,
4739 highest_pow2_factor (position
));
4740 xtarget
= adjust_address (xtarget
, mode
, 0);
4741 if (TREE_CODE (value
) == CONSTRUCTOR
)
4742 store_constructor (value
, xtarget
, cleared
,
4743 bitsize
/ BITS_PER_UNIT
);
4745 store_expr (value
, xtarget
, 0);
4747 /* Generate a conditional jump to exit the loop. */
4748 exit_cond
= build2 (LT_EXPR
, integer_type_node
,
4750 jumpif (exit_cond
, loop_end
);
4752 /* Update the loop counter, and jump to the head of
4754 expand_assignment (index
,
4755 build2 (PLUS_EXPR
, TREE_TYPE (index
),
4756 index
, integer_one_node
), 0);
4758 emit_jump (loop_start
);
4760 /* Build the end of the loop. */
4761 emit_label (loop_end
);
4764 else if ((index
!= 0 && ! host_integerp (index
, 0))
4765 || ! host_integerp (TYPE_SIZE (elttype
), 1))
4770 index
= ssize_int (1);
4773 index
= fold_convert (ssizetype
,
4774 fold (build2 (MINUS_EXPR
,
4777 TYPE_MIN_VALUE (domain
))));
4779 position
= size_binop (MULT_EXPR
, index
,
4781 TYPE_SIZE_UNIT (elttype
)));
4782 xtarget
= offset_address (target
,
4783 expand_expr (position
, 0, VOIDmode
, 0),
4784 highest_pow2_factor (position
));
4785 xtarget
= adjust_address (xtarget
, mode
, 0);
4786 store_expr (value
, xtarget
, 0);
4791 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
4792 * tree_low_cst (TYPE_SIZE (elttype
), 1));
4794 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
4796 if (MEM_P (target
) && !MEM_KEEP_ALIAS_SET_P (target
)
4797 && TREE_CODE (type
) == ARRAY_TYPE
4798 && TYPE_NONALIASED_COMPONENT (type
))
4800 target
= copy_rtx (target
);
4801 MEM_KEEP_ALIAS_SET_P (target
) = 1;
4803 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
4804 type
, cleared
, get_alias_set (elttype
));
4809 else if (TREE_CODE (type
) == VECTOR_TYPE
)
4815 tree elttype
= TREE_TYPE (type
);
4816 int elt_size
= tree_low_cst (TYPE_SIZE (elttype
), 1);
4817 enum machine_mode eltmode
= TYPE_MODE (elttype
);
4818 HOST_WIDE_INT bitsize
;
4819 HOST_WIDE_INT bitpos
;
4823 if (eltmode
== BLKmode
)
4826 n_elts
= TYPE_VECTOR_SUBPARTS (type
);
4827 if (REG_P (target
) && VECTOR_MODE_P (GET_MODE (target
)))
4829 enum machine_mode mode
= GET_MODE (target
);
4831 icode
= (int) vec_init_optab
->handlers
[mode
].insn_code
;
4832 if (icode
!= CODE_FOR_nothing
)
4836 vector
= alloca (n_elts
);
4837 for (i
= 0; i
< n_elts
; i
++)
4838 vector
[i
] = CONST0_RTX (GET_MODE_INNER (mode
));
4842 /* If the constructor has fewer elements than the vector,
4843 clear the whole array first. Similarly if this is
4844 static constructor of a non-BLKmode object. */
4847 else if (REG_P (target
) && TREE_STATIC (exp
))
4851 unsigned HOST_WIDE_INT count
= 0, zero_count
= 0;
4853 for (elt
= CONSTRUCTOR_ELTS (exp
);
4855 elt
= TREE_CHAIN (elt
))
4859 int_const_binop (TRUNC_DIV_EXPR
,
4860 TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt
))),
4861 TYPE_SIZE (elttype
), 0), 1);
4863 count
+= n_elts_here
;
4864 if (mostly_zeros_p (TREE_VALUE (elt
)))
4865 zero_count
+= n_elts_here
;
4868 /* Clear the entire vector first if there are any missing elements,
4869 or if the incidence of zero elements is >= 75%. */
4870 need_to_clear
= (count
< n_elts
|| 4 * zero_count
>= 3 * count
);
4873 if (need_to_clear
&& size
> 0 && !vector
)
4876 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4878 clear_storage (target
, GEN_INT (size
));
4882 if (!cleared
&& REG_P (target
))
4883 /* Inform later passes that the old value is dead. */
4884 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4886 /* Store each element of the constructor into the corresponding
4887 element of TARGET, determined by counting the elements. */
4888 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
4890 elt
= TREE_CHAIN (elt
), i
+= bitsize
/ elt_size
)
4892 tree value
= TREE_VALUE (elt
);
4893 tree index
= TREE_PURPOSE (elt
);
4894 HOST_WIDE_INT eltpos
;
4896 bitsize
= tree_low_cst (TYPE_SIZE (TREE_TYPE (value
)), 1);
4897 if (cleared
&& initializer_zerop (value
))
4901 eltpos
= tree_low_cst (index
, 1);
4907 /* Vector CONSTRUCTORs should only be built from smaller
4908 vectors in the case of BLKmode vectors. */
4909 if (TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
)
4911 vector
[eltpos
] = expand_expr (value
, NULL_RTX
, VOIDmode
, 0);
4915 enum machine_mode value_mode
=
4916 TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
4917 ? TYPE_MODE (TREE_TYPE (value
))
4919 bitpos
= eltpos
* elt_size
;
4920 store_constructor_field (target
, bitsize
, bitpos
, value_mode
, value
,
4921 type
, cleared
, get_alias_set (elttype
));
4926 emit_insn (GEN_FCN (icode
) (target
,
4927 gen_rtx_PARALLEL (GET_MODE (target
),
4928 gen_rtvec_v (n_elts
, vector
))));
4931 /* Set constructor assignments. */
4932 else if (TREE_CODE (type
) == SET_TYPE
)
4934 tree elt
= CONSTRUCTOR_ELTS (exp
);
4935 unsigned HOST_WIDE_INT nbytes
= int_size_in_bytes (type
), nbits
;
4936 tree domain
= TYPE_DOMAIN (type
);
4937 tree domain_min
, domain_max
, bitlength
;
4939 /* The default implementation strategy is to extract the constant
4940 parts of the constructor, use that to initialize the target,
4941 and then "or" in whatever non-constant ranges we need in addition.
4943 If a large set is all zero or all ones, it is
4944 probably better to set it using memset.
4945 Also, if a large set has just a single range, it may also be
4946 better to first clear all the first clear the set (using
4947 memset), and set the bits we want. */
4949 /* Check for all zeros. */
4950 if (elt
== NULL_TREE
&& size
> 0)
4953 clear_storage (target
, GEN_INT (size
));
4957 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
4958 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
4959 bitlength
= size_binop (PLUS_EXPR
,
4960 size_diffop (domain_max
, domain_min
),
4963 nbits
= tree_low_cst (bitlength
, 1);
4965 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4966 are "complicated" (more than one range), initialize (the
4967 constant parts) by copying from a constant. */
4968 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
4969 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
4971 unsigned int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
4972 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
4973 char *bit_buffer
= alloca (nbits
);
4974 HOST_WIDE_INT word
= 0;
4975 unsigned int bit_pos
= 0;
4976 unsigned int ibit
= 0;
4977 unsigned int offset
= 0; /* In bytes from beginning of set. */
4979 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
4982 if (bit_buffer
[ibit
])
4984 if (BYTES_BIG_ENDIAN
)
4985 word
|= (1 << (set_word_size
- 1 - bit_pos
));
4987 word
|= 1 << bit_pos
;
4991 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
4993 if (word
!= 0 || ! cleared
)
4995 rtx datum
= gen_int_mode (word
, mode
);
4998 /* The assumption here is that it is safe to use
4999 XEXP if the set is multi-word, but not if
5000 it's single-word. */
5002 to_rtx
= adjust_address (target
, mode
, offset
);
5003 else if (offset
== 0)
5007 emit_move_insn (to_rtx
, datum
);
5014 offset
+= set_word_size
/ BITS_PER_UNIT
;
5019 /* Don't bother clearing storage if the set is all ones. */
5020 if (TREE_CHAIN (elt
) != NULL_TREE
5021 || (TREE_PURPOSE (elt
) == NULL_TREE
5023 : ( ! host_integerp (TREE_VALUE (elt
), 0)
5024 || ! host_integerp (TREE_PURPOSE (elt
), 0)
5025 || (tree_low_cst (TREE_VALUE (elt
), 0)
5026 - tree_low_cst (TREE_PURPOSE (elt
), 0) + 1
5027 != (HOST_WIDE_INT
) nbits
))))
5028 clear_storage (target
, expr_size (exp
));
5030 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
5032 /* Start of range of element or NULL. */
5033 tree startbit
= TREE_PURPOSE (elt
);
5034 /* End of range of element, or element value. */
5035 tree endbit
= TREE_VALUE (elt
);
5036 HOST_WIDE_INT startb
, endb
;
5037 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
5039 bitlength_rtx
= expand_expr (bitlength
,
5040 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
5042 /* Handle non-range tuple element like [ expr ]. */
5043 if (startbit
== NULL_TREE
)
5045 startbit
= save_expr (endbit
);
5049 startbit
= convert (sizetype
, startbit
);
5050 endbit
= convert (sizetype
, endbit
);
5051 if (! integer_zerop (domain_min
))
5053 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
5054 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
5056 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
5057 EXPAND_CONST_ADDRESS
);
5058 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
5059 EXPAND_CONST_ADDRESS
);
5065 ((build_qualified_type (lang_hooks
.types
.type_for_mode
5066 (GET_MODE (target
), 0),
5069 emit_move_insn (targetx
, target
);
5072 else if (MEM_P (target
))
5077 /* Optimization: If startbit and endbit are constants divisible
5078 by BITS_PER_UNIT, call memset instead. */
5079 if (TREE_CODE (startbit
) == INTEGER_CST
5080 && TREE_CODE (endbit
) == INTEGER_CST
5081 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
5082 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
5084 emit_library_call (memset_libfunc
, LCT_NORMAL
,
5086 plus_constant (XEXP (targetx
, 0),
5087 startb
/ BITS_PER_UNIT
),
5089 constm1_rtx
, TYPE_MODE (integer_type_node
),
5090 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
5091 TYPE_MODE (sizetype
));
5094 emit_library_call (setbits_libfunc
, LCT_NORMAL
,
5095 VOIDmode
, 4, XEXP (targetx
, 0),
5096 Pmode
, bitlength_rtx
, TYPE_MODE (sizetype
),
5097 startbit_rtx
, TYPE_MODE (sizetype
),
5098 endbit_rtx
, TYPE_MODE (sizetype
));
5101 emit_move_insn (target
, targetx
);
5109 /* Store the value of EXP (an expression tree)
5110 into a subfield of TARGET which has mode MODE and occupies
5111 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5112 If MODE is VOIDmode, it means that we are storing into a bit-field.
5114 If VALUE_MODE is VOIDmode, return nothing in particular.
5115 UNSIGNEDP is not used in this case.
5117 Otherwise, return an rtx for the value stored. This rtx
5118 has mode VALUE_MODE if that is convenient to do.
5119 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5121 TYPE is the type of the underlying object,
5123 ALIAS_SET is the alias set for the destination. This value will
5124 (in general) be different from that for TARGET, since TARGET is a
5125 reference to the containing structure. */
5128 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
5129 enum machine_mode mode
, tree exp
, enum machine_mode value_mode
,
5130 int unsignedp
, tree type
, int alias_set
)
5132 HOST_WIDE_INT width_mask
= 0;
5134 if (TREE_CODE (exp
) == ERROR_MARK
)
5137 /* If we have nothing to store, do nothing unless the expression has
5140 return expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
5141 else if (bitsize
>= 0 && bitsize
< HOST_BITS_PER_WIDE_INT
)
5142 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5144 /* If we are storing into an unaligned field of an aligned union that is
5145 in a register, we may have the mode of TARGET being an integer mode but
5146 MODE == BLKmode. In that case, get an aligned object whose size and
5147 alignment are the same as TARGET and store TARGET into it (we can avoid
5148 the store if the field being stored is the entire width of TARGET). Then
5149 call ourselves recursively to store the field into a BLKmode version of
5150 that object. Finally, load from the object into TARGET. This is not
5151 very efficient in general, but should only be slightly more expensive
5152 than the otherwise-required unaligned accesses. Perhaps this can be
5153 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5154 twice, once with emit_move_insn and once via store_field. */
5157 && (REG_P (target
) || GET_CODE (target
) == SUBREG
))
5159 rtx object
= assign_temp (type
, 0, 1, 1);
5160 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
5162 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5163 emit_move_insn (object
, target
);
5165 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
5168 emit_move_insn (target
, object
);
5170 /* We want to return the BLKmode version of the data. */
5174 if (GET_CODE (target
) == CONCAT
)
5176 /* We're storing into a struct containing a single __complex. */
5180 return store_expr (exp
, target
, value_mode
!= VOIDmode
);
5183 /* If the structure is in a register or if the component
5184 is a bit field, we cannot use addressing to access it.
5185 Use bit-field techniques or SUBREG to store in it. */
5187 if (mode
== VOIDmode
5188 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5189 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5190 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5192 || GET_CODE (target
) == SUBREG
5193 /* If the field isn't aligned enough to store as an ordinary memref,
5194 store it as a bit field. */
5196 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
5197 || bitpos
% GET_MODE_ALIGNMENT (mode
))
5198 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
5199 || (bitpos
% BITS_PER_UNIT
!= 0)))
5200 /* If the RHS and field are a constant size and the size of the
5201 RHS isn't the same size as the bitfield, we must use bitfield
5204 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5205 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5207 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
5209 /* If BITSIZE is narrower than the size of the type of EXP
5210 we will be narrowing TEMP. Normally, what's wanted are the
5211 low-order bits. However, if EXP's type is a record and this is
5212 big-endian machine, we want the upper BITSIZE bits. */
5213 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5214 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
5215 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5216 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5217 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5221 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5223 if (mode
!= VOIDmode
&& mode
!= BLKmode
5224 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5225 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5227 /* If the modes of TARGET and TEMP are both BLKmode, both
5228 must be in memory and BITPOS must be aligned on a byte
5229 boundary. If so, we simply do a block copy. */
5230 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5232 if (!MEM_P (target
) || !MEM_P (temp
)
5233 || bitpos
% BITS_PER_UNIT
!= 0)
5236 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5237 emit_block_move (target
, temp
,
5238 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5242 return value_mode
== VOIDmode
? const0_rtx
: target
;
5245 /* Store the value in the bitfield. */
5246 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
);
5248 if (value_mode
!= VOIDmode
)
5250 /* The caller wants an rtx for the value.
5251 If possible, avoid refetching from the bitfield itself. */
5253 && ! (MEM_P (target
) && MEM_VOLATILE_P (target
)))
5256 enum machine_mode tmode
;
5258 tmode
= GET_MODE (temp
);
5259 if (tmode
== VOIDmode
)
5263 return expand_and (tmode
, temp
,
5264 gen_int_mode (width_mask
, tmode
),
5267 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
5268 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5269 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5272 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
5273 NULL_RTX
, value_mode
, VOIDmode
);
5279 rtx addr
= XEXP (target
, 0);
5280 rtx to_rtx
= target
;
5282 /* If a value is wanted, it must be the lhs;
5283 so make the address stable for multiple use. */
5285 if (value_mode
!= VOIDmode
&& !REG_P (addr
)
5286 && ! CONSTANT_ADDRESS_P (addr
)
5287 /* A frame-pointer reference is already stable. */
5288 && ! (GET_CODE (addr
) == PLUS
5289 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
5290 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
5291 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
5292 to_rtx
= replace_equiv_address (to_rtx
, copy_to_reg (addr
));
5294 /* Now build a reference to just the desired component. */
5296 to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5298 if (to_rtx
== target
)
5299 to_rtx
= copy_rtx (to_rtx
);
5301 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5302 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5303 set_mem_alias_set (to_rtx
, alias_set
);
5305 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
5309 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5310 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5311 codes and find the ultimate containing object, which we return.
5313 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5314 bit position, and *PUNSIGNEDP to the signedness of the field.
5315 If the position of the field is variable, we store a tree
5316 giving the variable offset (in units) in *POFFSET.
5317 This offset is in addition to the bit position.
5318 If the position is not variable, we store 0 in *POFFSET.
5320 If any of the extraction expressions is volatile,
5321 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5323 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5324 is a mode that can be used to access the field. In that case, *PBITSIZE
5327 If the field describes a variable-sized object, *PMODE is set to
5328 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5329 this case, but the address of the object can be found. */
5332 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
5333 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
5334 enum machine_mode
*pmode
, int *punsignedp
,
5338 enum machine_mode mode
= VOIDmode
;
5339 tree offset
= size_zero_node
;
5340 tree bit_offset
= bitsize_zero_node
;
5343 /* First get the mode, signedness, and size. We do this from just the
5344 outermost expression. */
5345 if (TREE_CODE (exp
) == COMPONENT_REF
)
5347 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5348 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5349 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5351 *punsignedp
= DECL_UNSIGNED (TREE_OPERAND (exp
, 1));
5353 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5355 size_tree
= TREE_OPERAND (exp
, 1);
5356 *punsignedp
= BIT_FIELD_REF_UNSIGNED (exp
);
5360 mode
= TYPE_MODE (TREE_TYPE (exp
));
5361 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
5363 if (mode
== BLKmode
)
5364 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5366 *pbitsize
= GET_MODE_BITSIZE (mode
);
5371 if (! host_integerp (size_tree
, 1))
5372 mode
= BLKmode
, *pbitsize
= -1;
5374 *pbitsize
= tree_low_cst (size_tree
, 1);
5377 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5378 and find the ultimate containing object. */
5381 if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5382 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
, TREE_OPERAND (exp
, 2));
5383 else if (TREE_CODE (exp
) == COMPONENT_REF
)
5385 tree field
= TREE_OPERAND (exp
, 1);
5386 tree this_offset
= component_ref_field_offset (exp
);
5388 /* If this field hasn't been filled in yet, don't go
5389 past it. This should only happen when folding expressions
5390 made during type construction. */
5391 if (this_offset
== 0)
5394 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5395 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5396 DECL_FIELD_BIT_OFFSET (field
));
5398 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5401 else if (TREE_CODE (exp
) == ARRAY_REF
5402 || TREE_CODE (exp
) == ARRAY_RANGE_REF
)
5404 tree index
= TREE_OPERAND (exp
, 1);
5405 tree low_bound
= array_ref_low_bound (exp
);
5406 tree unit_size
= array_ref_element_size (exp
);
5408 /* We assume all arrays have sizes that are a multiple of a byte.
5409 First subtract the lower bound, if any, in the type of the
5410 index, then convert to sizetype and multiply by the size of the
5412 if (! integer_zerop (low_bound
))
5413 index
= fold (build2 (MINUS_EXPR
, TREE_TYPE (index
),
5416 offset
= size_binop (PLUS_EXPR
, offset
,
5417 size_binop (MULT_EXPR
,
5418 convert (sizetype
, index
),
5422 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5423 conversions that don't change the mode, and all view conversions
5424 except those that need to "step up" the alignment. */
5425 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
5426 && ! (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
5427 && ! ((TYPE_ALIGN (TREE_TYPE (exp
))
5428 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5430 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
5431 < BIGGEST_ALIGNMENT
)
5432 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
5433 || TYPE_ALIGN_OK (TREE_TYPE
5434 (TREE_OPERAND (exp
, 0))))))
5435 && ! ((TREE_CODE (exp
) == NOP_EXPR
5436 || TREE_CODE (exp
) == CONVERT_EXPR
)
5437 && (TYPE_MODE (TREE_TYPE (exp
))
5438 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
5441 /* If any reference in the chain is volatile, the effect is volatile. */
5442 if (TREE_THIS_VOLATILE (exp
))
5445 exp
= TREE_OPERAND (exp
, 0);
5448 /* If OFFSET is constant, see if we can return the whole thing as a
5449 constant bit position. Otherwise, split it up. */
5450 if (host_integerp (offset
, 0)
5451 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5453 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5454 && host_integerp (tem
, 0))
5455 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5457 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5463 /* Return a tree of sizetype representing the size, in bytes, of the element
5464 of EXP, an ARRAY_REF. */
5467 array_ref_element_size (tree exp
)
5469 tree aligned_size
= TREE_OPERAND (exp
, 3);
5470 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5472 /* If a size was specified in the ARRAY_REF, it's the size measured
5473 in alignment units of the element type. So multiply by that value. */
5475 return size_binop (MULT_EXPR
, aligned_size
,
5476 size_int (TYPE_ALIGN (elmt_type
) / BITS_PER_UNIT
));
5478 /* Otherwise, take the size from that of the element type. Substitute
5479 any PLACEHOLDER_EXPR that we have. */
5481 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type
), exp
);
5484 /* Return a tree representing the lower bound of the array mentioned in
5485 EXP, an ARRAY_REF. */
5488 array_ref_low_bound (tree exp
)
5490 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5492 /* If a lower bound is specified in EXP, use it. */
5493 if (TREE_OPERAND (exp
, 2))
5494 return TREE_OPERAND (exp
, 2);
5496 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5497 substituting for a PLACEHOLDER_EXPR as needed. */
5498 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
5499 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type
), exp
);
5501 /* Otherwise, return a zero of the appropriate type. */
5502 return fold_convert (TREE_TYPE (TREE_OPERAND (exp
, 1)), integer_zero_node
);
5505 /* Return a tree representing the upper bound of the array mentioned in
5506 EXP, an ARRAY_REF. */
5509 array_ref_up_bound (tree exp
)
5511 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5513 /* If there is a domain type and it has an upper bound, use it, substituting
5514 for a PLACEHOLDER_EXPR as needed. */
5515 if (domain_type
&& TYPE_MAX_VALUE (domain_type
))
5516 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type
), exp
);
5518 /* Otherwise fail. */
5522 /* Return a tree representing the offset, in bytes, of the field referenced
5523 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5526 component_ref_field_offset (tree exp
)
5528 tree aligned_offset
= TREE_OPERAND (exp
, 2);
5529 tree field
= TREE_OPERAND (exp
, 1);
5531 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5532 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5535 return size_binop (MULT_EXPR
, aligned_offset
,
5536 size_int (DECL_OFFSET_ALIGN (field
) / BITS_PER_UNIT
));
5538 /* Otherwise, take the offset from that of the field. Substitute
5539 any PLACEHOLDER_EXPR that we have. */
5541 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field
), exp
);
5544 /* Return 1 if T is an expression that get_inner_reference handles. */
5547 handled_component_p (tree t
)
5549 switch (TREE_CODE (t
))
5554 case ARRAY_RANGE_REF
:
5555 case NON_LVALUE_EXPR
:
5556 case VIEW_CONVERT_EXPR
:
5559 /* ??? Sure they are handled, but get_inner_reference may return
5560 a different PBITSIZE, depending upon whether the expression is
5561 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5564 return (TYPE_MODE (TREE_TYPE (t
))
5565 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t
, 0))));
5572 /* Given an rtx VALUE that may contain additions and multiplications, return
5573 an equivalent value that just refers to a register, memory, or constant.
5574 This is done by generating instructions to perform the arithmetic and
5575 returning a pseudo-register containing the value.
5577 The returned value may be a REG, SUBREG, MEM or constant. */
5580 force_operand (rtx value
, rtx target
)
5583 /* Use subtarget as the target for operand 0 of a binary operation. */
5584 rtx subtarget
= get_subtarget (target
);
5585 enum rtx_code code
= GET_CODE (value
);
5587 /* Check for subreg applied to an expression produced by loop optimizer. */
5589 && !REG_P (SUBREG_REG (value
))
5590 && !MEM_P (SUBREG_REG (value
)))
5592 value
= simplify_gen_subreg (GET_MODE (value
),
5593 force_reg (GET_MODE (SUBREG_REG (value
)),
5594 force_operand (SUBREG_REG (value
),
5596 GET_MODE (SUBREG_REG (value
)),
5597 SUBREG_BYTE (value
));
5598 code
= GET_CODE (value
);
5601 /* Check for a PIC address load. */
5602 if ((code
== PLUS
|| code
== MINUS
)
5603 && XEXP (value
, 0) == pic_offset_table_rtx
5604 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5605 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5606 || GET_CODE (XEXP (value
, 1)) == CONST
))
5609 subtarget
= gen_reg_rtx (GET_MODE (value
));
5610 emit_move_insn (subtarget
, value
);
5614 if (code
== ZERO_EXTEND
|| code
== SIGN_EXTEND
)
5617 target
= gen_reg_rtx (GET_MODE (value
));
5618 convert_move (target
, force_operand (XEXP (value
, 0), NULL
),
5619 code
== ZERO_EXTEND
);
5623 if (ARITHMETIC_P (value
))
5625 op2
= XEXP (value
, 1);
5626 if (!CONSTANT_P (op2
) && !(REG_P (op2
) && op2
!= subtarget
))
5628 if (code
== MINUS
&& GET_CODE (op2
) == CONST_INT
)
5631 op2
= negate_rtx (GET_MODE (value
), op2
);
5634 /* Check for an addition with OP2 a constant integer and our first
5635 operand a PLUS of a virtual register and something else. In that
5636 case, we want to emit the sum of the virtual register and the
5637 constant first and then add the other value. This allows virtual
5638 register instantiation to simply modify the constant rather than
5639 creating another one around this addition. */
5640 if (code
== PLUS
&& GET_CODE (op2
) == CONST_INT
5641 && GET_CODE (XEXP (value
, 0)) == PLUS
5642 && REG_P (XEXP (XEXP (value
, 0), 0))
5643 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5644 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5646 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
5647 XEXP (XEXP (value
, 0), 0), op2
,
5648 subtarget
, 0, OPTAB_LIB_WIDEN
);
5649 return expand_simple_binop (GET_MODE (value
), code
, temp
,
5650 force_operand (XEXP (XEXP (value
,
5652 target
, 0, OPTAB_LIB_WIDEN
);
5655 op1
= force_operand (XEXP (value
, 0), subtarget
);
5656 op2
= force_operand (op2
, NULL_RTX
);
5660 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
5662 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
5663 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5664 target
, 1, OPTAB_LIB_WIDEN
);
5666 return expand_divmod (0,
5667 FLOAT_MODE_P (GET_MODE (value
))
5668 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
5669 GET_MODE (value
), op1
, op2
, target
, 0);
5672 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5676 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
5680 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5684 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5685 target
, 0, OPTAB_LIB_WIDEN
);
5688 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5689 target
, 1, OPTAB_LIB_WIDEN
);
5692 if (UNARY_P (value
))
5694 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
5695 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
5698 #ifdef INSN_SCHEDULING
5699 /* On machines that have insn scheduling, we want all memory reference to be
5700 explicit, so we need to deal with such paradoxical SUBREGs. */
5701 if (GET_CODE (value
) == SUBREG
&& MEM_P (SUBREG_REG (value
))
5702 && (GET_MODE_SIZE (GET_MODE (value
))
5703 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
5705 = simplify_gen_subreg (GET_MODE (value
),
5706 force_reg (GET_MODE (SUBREG_REG (value
)),
5707 force_operand (SUBREG_REG (value
),
5709 GET_MODE (SUBREG_REG (value
)),
5710 SUBREG_BYTE (value
));
5716 /* Subroutine of expand_expr: return nonzero iff there is no way that
5717 EXP can reference X, which is being modified. TOP_P is nonzero if this
5718 call is going to be used to determine whether we need a temporary
5719 for EXP, as opposed to a recursive call to this function.
5721 It is always safe for this routine to return zero since it merely
5722 searches for optimization opportunities. */
5725 safe_from_p (rtx x
, tree exp
, int top_p
)
5731 /* If EXP has varying size, we MUST use a target since we currently
5732 have no way of allocating temporaries of variable size
5733 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5734 So we assume here that something at a higher level has prevented a
5735 clash. This is somewhat bogus, but the best we can do. Only
5736 do this when X is BLKmode and when we are at the top level. */
5737 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
5738 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
5739 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
5740 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
5741 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
5743 && GET_MODE (x
) == BLKmode
)
5744 /* If X is in the outgoing argument area, it is always safe. */
5746 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
5747 || (GET_CODE (XEXP (x
, 0)) == PLUS
5748 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
5751 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5752 find the underlying pseudo. */
5753 if (GET_CODE (x
) == SUBREG
)
5756 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5760 /* Now look at our tree code and possibly recurse. */
5761 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
5764 exp_rtl
= DECL_RTL_IF_SET (exp
);
5771 if (TREE_CODE (exp
) == TREE_LIST
)
5775 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
5777 exp
= TREE_CHAIN (exp
);
5780 if (TREE_CODE (exp
) != TREE_LIST
)
5781 return safe_from_p (x
, exp
, 0);
5784 else if (TREE_CODE (exp
) == ERROR_MARK
)
5785 return 1; /* An already-visited SAVE_EXPR? */
5790 /* The only case we look at here is the DECL_INITIAL inside a
5792 return (TREE_CODE (exp
) != DECL_EXPR
5793 || TREE_CODE (DECL_EXPR_DECL (exp
)) != VAR_DECL
5794 || !DECL_INITIAL (DECL_EXPR_DECL (exp
))
5795 || safe_from_p (x
, DECL_INITIAL (DECL_EXPR_DECL (exp
)), 0));
5799 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
5804 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5808 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5809 the expression. If it is set, we conflict iff we are that rtx or
5810 both are in memory. Otherwise, we check all operands of the
5811 expression recursively. */
5813 switch (TREE_CODE (exp
))
5816 /* If the operand is static or we are static, we can't conflict.
5817 Likewise if we don't conflict with the operand at all. */
5818 if (staticp (TREE_OPERAND (exp
, 0))
5819 || TREE_STATIC (exp
)
5820 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
5823 /* Otherwise, the only way this can conflict is if we are taking
5824 the address of a DECL a that address if part of X, which is
5826 exp
= TREE_OPERAND (exp
, 0);
5829 if (!DECL_RTL_SET_P (exp
)
5830 || !MEM_P (DECL_RTL (exp
)))
5833 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
5839 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
5840 get_alias_set (exp
)))
5845 /* Assume that the call will clobber all hard registers and
5847 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5852 case WITH_CLEANUP_EXPR
:
5853 case CLEANUP_POINT_EXPR
:
5854 /* Lowered by gimplify.c. */
5858 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5864 /* If we have an rtx, we do not need to scan our operands. */
5868 nops
= first_rtl_op (TREE_CODE (exp
));
5869 for (i
= 0; i
< nops
; i
++)
5870 if (TREE_OPERAND (exp
, i
) != 0
5871 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
5874 /* If this is a language-specific tree code, it may require
5875 special handling. */
5876 if ((unsigned int) TREE_CODE (exp
)
5877 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5878 && !lang_hooks
.safe_from_p (x
, exp
))
5882 /* If we have an rtl, find any enclosed object. Then see if we conflict
5886 if (GET_CODE (exp_rtl
) == SUBREG
)
5888 exp_rtl
= SUBREG_REG (exp_rtl
);
5890 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
5894 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5895 are memory and they conflict. */
5896 return ! (rtx_equal_p (x
, exp_rtl
)
5897 || (MEM_P (x
) && MEM_P (exp_rtl
)
5898 && true_dependence (exp_rtl
, VOIDmode
, x
,
5899 rtx_addr_varies_p
)));
5902 /* If we reach here, it is safe. */
5907 /* Return the highest power of two that EXP is known to be a multiple of.
5908 This is used in updating alignment of MEMs in array references. */
5910 static unsigned HOST_WIDE_INT
5911 highest_pow2_factor (tree exp
)
5913 unsigned HOST_WIDE_INT c0
, c1
;
5915 switch (TREE_CODE (exp
))
5918 /* We can find the lowest bit that's a one. If the low
5919 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5920 We need to handle this case since we can find it in a COND_EXPR,
5921 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5922 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5924 if (TREE_CONSTANT_OVERFLOW (exp
))
5925 return BIGGEST_ALIGNMENT
;
5928 /* Note: tree_low_cst is intentionally not used here,
5929 we don't care about the upper bits. */
5930 c0
= TREE_INT_CST_LOW (exp
);
5932 return c0
? c0
: BIGGEST_ALIGNMENT
;
5936 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
5937 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
5938 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
5939 return MIN (c0
, c1
);
5942 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
5943 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
5946 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5948 if (integer_pow2p (TREE_OPERAND (exp
, 1))
5949 && host_integerp (TREE_OPERAND (exp
, 1), 1))
5951 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
5952 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
5953 return MAX (1, c0
/ c1
);
5957 case NON_LVALUE_EXPR
: case NOP_EXPR
: case CONVERT_EXPR
:
5959 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
5962 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
5965 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
5966 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
5967 return MIN (c0
, c1
);
5976 /* Similar, except that the alignment requirements of TARGET are
5977 taken into account. Assume it is at least as aligned as its
5978 type, unless it is a COMPONENT_REF in which case the layout of
5979 the structure gives the alignment. */
5981 static unsigned HOST_WIDE_INT
5982 highest_pow2_factor_for_target (tree target
, tree exp
)
5984 unsigned HOST_WIDE_INT target_align
, factor
;
5986 factor
= highest_pow2_factor (exp
);
5987 if (TREE_CODE (target
) == COMPONENT_REF
)
5988 target_align
= DECL_ALIGN (TREE_OPERAND (target
, 1)) / BITS_PER_UNIT
;
5990 target_align
= TYPE_ALIGN (TREE_TYPE (target
)) / BITS_PER_UNIT
;
5991 return MAX (factor
, target_align
);
5994 /* Expands variable VAR. */
5997 expand_var (tree var
)
5999 if (DECL_EXTERNAL (var
))
6002 if (TREE_STATIC (var
))
6003 /* If this is an inlined copy of a static local variable,
6004 look up the original decl. */
6005 var
= DECL_ORIGIN (var
);
6007 if (TREE_STATIC (var
)
6008 ? !TREE_ASM_WRITTEN (var
)
6009 : !DECL_RTL_SET_P (var
))
6011 if (TREE_CODE (var
) == VAR_DECL
&& DECL_VALUE_EXPR (var
))
6012 /* Should be ignored. */;
6013 else if (lang_hooks
.expand_decl (var
))
6015 else if (TREE_CODE (var
) == VAR_DECL
&& !TREE_STATIC (var
))
6017 else if (TREE_CODE (var
) == VAR_DECL
&& TREE_STATIC (var
))
6018 rest_of_decl_compilation (var
, 0, 0);
6019 else if (TREE_CODE (var
) == TYPE_DECL
6020 || TREE_CODE (var
) == CONST_DECL
6021 || TREE_CODE (var
) == FUNCTION_DECL
6022 || TREE_CODE (var
) == LABEL_DECL
)
6023 /* No expansion needed. */;
6029 /* Subroutine of expand_expr. Expand the two operands of a binary
6030 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6031 The value may be stored in TARGET if TARGET is nonzero. The
6032 MODIFIER argument is as documented by expand_expr. */
6035 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
6036 enum expand_modifier modifier
)
6038 if (! safe_from_p (target
, exp1
, 1))
6040 if (operand_equal_p (exp0
, exp1
, 0))
6042 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6043 *op1
= copy_rtx (*op0
);
6047 /* If we need to preserve evaluation order, copy exp0 into its own
6048 temporary variable so that it can't be clobbered by exp1. */
6049 if (flag_evaluation_order
&& TREE_SIDE_EFFECTS (exp1
))
6050 exp0
= save_expr (exp0
);
6051 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6052 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
6057 /* expand_expr: generate code for computing expression EXP.
6058 An rtx for the computed value is returned. The value is never null.
6059 In the case of a void EXP, const0_rtx is returned.
6061 The value may be stored in TARGET if TARGET is nonzero.
6062 TARGET is just a suggestion; callers must assume that
6063 the rtx returned may not be the same as TARGET.
6065 If TARGET is CONST0_RTX, it means that the value will be ignored.
6067 If TMODE is not VOIDmode, it suggests generating the
6068 result in mode TMODE. But this is done only when convenient.
6069 Otherwise, TMODE is ignored and the value generated in its natural mode.
6070 TMODE is just a suggestion; callers must assume that
6071 the rtx returned may not have mode TMODE.
6073 Note that TARGET may have neither TMODE nor MODE. In that case, it
6074 probably will not be used.
6076 If MODIFIER is EXPAND_SUM then when EXP is an addition
6077 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6078 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6079 products as above, or REG or MEM, or constant.
6080 Ordinarily in such cases we would output mul or add instructions
6081 and then return a pseudo reg containing the sum.
6083 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6084 it also marks a label as absolutely required (it can't be dead).
6085 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6086 This is used for outputting expressions used in initializers.
6088 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6089 with a constant address even if that address is not normally legitimate.
6090 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6092 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6093 a call parameter. Such targets require special care as we haven't yet
6094 marked TARGET so that it's safe from being trashed by libcalls. We
6095 don't want to use TARGET for anything but the final result;
6096 Intermediate values must go elsewhere. Additionally, calls to
6097 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6099 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6100 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6101 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6102 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6105 static rtx
expand_expr_real_1 (tree
, rtx
, enum machine_mode
,
6106 enum expand_modifier
, rtx
*);
6109 expand_expr_real (tree exp
, rtx target
, enum machine_mode tmode
,
6110 enum expand_modifier modifier
, rtx
*alt_rtl
)
6113 rtx ret
, last
= NULL
;
6115 /* Handle ERROR_MARK before anybody tries to access its type. */
6116 if (TREE_CODE (exp
) == ERROR_MARK
6117 || TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
)
6119 ret
= CONST0_RTX (tmode
);
6120 return ret
? ret
: const0_rtx
;
6123 if (flag_non_call_exceptions
)
6125 rn
= lookup_stmt_eh_region (exp
);
6126 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6128 last
= get_last_insn ();
6131 /* If this is an expression of some kind and it has an associated line
6132 number, then emit the line number before expanding the expression.
6134 We need to save and restore the file and line information so that
6135 errors discovered during expansion are emitted with the right
6136 information. It would be better of the diagnostic routines
6137 used the file/line information embedded in the tree nodes rather
6139 if (cfun
&& EXPR_HAS_LOCATION (exp
))
6141 location_t saved_location
= input_location
;
6142 input_location
= EXPR_LOCATION (exp
);
6143 emit_line_note (input_location
);
6145 /* Record where the insns produced belong. */
6146 record_block_change (TREE_BLOCK (exp
));
6148 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
6150 input_location
= saved_location
;
6154 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
6157 /* If using non-call exceptions, mark all insns that may trap.
6158 expand_call() will mark CALL_INSNs before we get to this code,
6159 but it doesn't handle libcalls, and these may trap. */
6163 for (insn
= next_real_insn (last
); insn
;
6164 insn
= next_real_insn (insn
))
6166 if (! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
6167 /* If we want exceptions for non-call insns, any
6168 may_trap_p instruction may throw. */
6169 && GET_CODE (PATTERN (insn
)) != CLOBBER
6170 && GET_CODE (PATTERN (insn
)) != USE
6171 && (CALL_P (insn
) || may_trap_p (PATTERN (insn
))))
6173 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_EH_REGION
, GEN_INT (rn
),
6183 expand_expr_real_1 (tree exp
, rtx target
, enum machine_mode tmode
,
6184 enum expand_modifier modifier
, rtx
*alt_rtl
)
6187 tree type
= TREE_TYPE (exp
);
6189 enum machine_mode mode
;
6190 enum tree_code code
= TREE_CODE (exp
);
6192 rtx subtarget
, original_target
;
6195 bool reduce_bit_field
= false;
6196 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6197 ? reduce_to_bit_field_precision ((expr), \
6202 mode
= TYPE_MODE (type
);
6203 unsignedp
= TYPE_UNSIGNED (type
);
6204 if (lang_hooks
.reduce_bit_field_operations
6205 && TREE_CODE (type
) == INTEGER_TYPE
6206 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
))
6208 /* An operation in what may be a bit-field type needs the
6209 result to be reduced to the precision of the bit-field type,
6210 which is narrower than that of the type's mode. */
6211 reduce_bit_field
= true;
6212 if (modifier
== EXPAND_STACK_PARM
)
6216 /* Use subtarget as the target for operand 0 of a binary operation. */
6217 subtarget
= get_subtarget (target
);
6218 original_target
= target
;
6219 ignore
= (target
== const0_rtx
6220 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
6221 || code
== CONVERT_EXPR
|| code
== COND_EXPR
6222 || code
== VIEW_CONVERT_EXPR
)
6223 && TREE_CODE (type
) == VOID_TYPE
));
6225 /* If we are going to ignore this result, we need only do something
6226 if there is a side-effect somewhere in the expression. If there
6227 is, short-circuit the most common cases here. Note that we must
6228 not call expand_expr with anything but const0_rtx in case this
6229 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6233 if (! TREE_SIDE_EFFECTS (exp
))
6236 /* Ensure we reference a volatile object even if value is ignored, but
6237 don't do this if all we are doing is taking its address. */
6238 if (TREE_THIS_VOLATILE (exp
)
6239 && TREE_CODE (exp
) != FUNCTION_DECL
6240 && mode
!= VOIDmode
&& mode
!= BLKmode
6241 && modifier
!= EXPAND_CONST_ADDRESS
)
6243 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
6245 temp
= copy_to_reg (temp
);
6249 if (TREE_CODE_CLASS (code
) == '1' || code
== COMPONENT_REF
6250 || code
== INDIRECT_REF
)
6251 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6254 else if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<'
6255 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
6257 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6258 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6261 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6262 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
6263 /* If the second operand has no side effects, just evaluate
6265 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6267 else if (code
== BIT_FIELD_REF
)
6269 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6270 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6271 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, modifier
);
6278 /* If will do cse, generate all results into pseudo registers
6279 since 1) that allows cse to find more things
6280 and 2) otherwise cse could produce an insn the machine
6281 cannot support. An exception is a CONSTRUCTOR into a multi-word
6282 MEM: that's much more likely to be most efficient into the MEM.
6283 Another is a CALL_EXPR which must return in memory. */
6285 if (! cse_not_expected
&& mode
!= BLKmode
&& target
6286 && (!REG_P (target
) || REGNO (target
) < FIRST_PSEUDO_REGISTER
)
6287 && ! (code
== CONSTRUCTOR
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
6288 && ! (code
== CALL_EXPR
&& aggregate_value_p (exp
, exp
)))
6295 tree function
= decl_function_context (exp
);
6297 temp
= label_rtx (exp
);
6298 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
6300 if (function
!= current_function_decl
6302 LABEL_REF_NONLOCAL_P (temp
) = 1;
6304 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
6310 /* If a static var's type was incomplete when the decl was written,
6311 but the type is complete now, lay out the decl now. */
6312 if (DECL_SIZE (exp
) == 0
6313 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
6314 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6315 layout_decl (exp
, 0);
6317 /* ... fall through ... */
6321 if (DECL_RTL (exp
) == 0)
6324 /* Ensure variable marked as used even if it doesn't go through
6325 a parser. If it hasn't be used yet, write out an external
6327 if (! TREE_USED (exp
))
6329 assemble_external (exp
);
6330 TREE_USED (exp
) = 1;
6333 /* Show we haven't gotten RTL for this yet. */
6336 /* Variables inherited from containing functions should have
6337 been lowered by this point. */
6338 context
= decl_function_context (exp
);
6340 && context
!= current_function_decl
6341 && !TREE_STATIC (exp
)
6342 /* ??? C++ creates functions that are not TREE_STATIC. */
6343 && TREE_CODE (exp
) != FUNCTION_DECL
)
6346 /* This is the case of an array whose size is to be determined
6347 from its initializer, while the initializer is still being parsed.
6350 else if (MEM_P (DECL_RTL (exp
))
6351 && REG_P (XEXP (DECL_RTL (exp
), 0)))
6352 temp
= validize_mem (DECL_RTL (exp
));
6354 /* If DECL_RTL is memory, we are in the normal case and either
6355 the address is not valid or it is not a register and -fforce-addr
6356 is specified, get the address into a register. */
6358 else if (MEM_P (DECL_RTL (exp
))
6359 && modifier
!= EXPAND_CONST_ADDRESS
6360 && modifier
!= EXPAND_SUM
6361 && modifier
!= EXPAND_INITIALIZER
6362 && (! memory_address_p (DECL_MODE (exp
),
6363 XEXP (DECL_RTL (exp
), 0))
6365 && !REG_P (XEXP (DECL_RTL (exp
), 0)))))
6368 *alt_rtl
= DECL_RTL (exp
);
6369 temp
= replace_equiv_address (DECL_RTL (exp
),
6370 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
6373 /* If we got something, return it. But first, set the alignment
6374 if the address is a register. */
6377 if (MEM_P (temp
) && REG_P (XEXP (temp
, 0)))
6378 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6383 /* If the mode of DECL_RTL does not match that of the decl, it
6384 must be a promoted value. We return a SUBREG of the wanted mode,
6385 but mark it so that we know that it was already extended. */
6387 if (REG_P (DECL_RTL (exp
))
6388 && GET_MODE (DECL_RTL (exp
)) != DECL_MODE (exp
))
6390 /* Get the signedness used for this variable. Ensure we get the
6391 same mode we got when the variable was declared. */
6392 if (GET_MODE (DECL_RTL (exp
))
6393 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
,
6394 (TREE_CODE (exp
) == RESULT_DECL
? 1 : 0)))
6397 temp
= gen_lowpart_SUBREG (mode
, DECL_RTL (exp
));
6398 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6399 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6403 return DECL_RTL (exp
);
6406 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
6407 TREE_INT_CST_HIGH (exp
), mode
);
6409 /* ??? If overflow is set, fold will have done an incomplete job,
6410 which can result in (plus xx (const_int 0)), which can get
6411 simplified by validate_replace_rtx during virtual register
6412 instantiation, which can result in unrecognizable insns.
6413 Avoid this by forcing all overflows into registers. */
6414 if (TREE_CONSTANT_OVERFLOW (exp
)
6415 && modifier
!= EXPAND_INITIALIZER
)
6416 temp
= force_reg (mode
, temp
);
6421 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp
))) == MODE_VECTOR_INT
6422 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp
))) == MODE_VECTOR_FLOAT
)
6423 return const_vector_from_tree (exp
);
6425 return expand_expr (build1 (CONSTRUCTOR
, TREE_TYPE (exp
),
6426 TREE_VECTOR_CST_ELTS (exp
)),
6427 ignore
? const0_rtx
: target
, tmode
, modifier
);
6430 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
6433 /* If optimized, generate immediate CONST_DOUBLE
6434 which will be turned into memory by reload if necessary.
6436 We used to force a register so that loop.c could see it. But
6437 this does not allow gen_* patterns to perform optimizations with
6438 the constants. It also produces two insns in cases like "x = 1.0;".
6439 On most machines, floating-point constants are not permitted in
6440 many insns, so we'd end up copying it to a register in any case.
6442 Now, we do the copying in expand_binop, if appropriate. */
6443 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
6444 TYPE_MODE (TREE_TYPE (exp
)));
6447 /* Handle evaluating a complex constant in a CONCAT target. */
6448 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
6450 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
6453 rtarg
= XEXP (original_target
, 0);
6454 itarg
= XEXP (original_target
, 1);
6456 /* Move the real and imaginary parts separately. */
6457 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, 0);
6458 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, 0);
6461 emit_move_insn (rtarg
, op0
);
6463 emit_move_insn (itarg
, op1
);
6465 return original_target
;
6468 /* ... fall through ... */
6471 temp
= output_constant_def (exp
, 1);
6473 /* temp contains a constant address.
6474 On RISC machines where a constant address isn't valid,
6475 make some insns to get that address into a register. */
6476 if (modifier
!= EXPAND_CONST_ADDRESS
6477 && modifier
!= EXPAND_INITIALIZER
6478 && modifier
!= EXPAND_SUM
6479 && (! memory_address_p (mode
, XEXP (temp
, 0))
6480 || flag_force_addr
))
6481 return replace_equiv_address (temp
,
6482 copy_rtx (XEXP (temp
, 0)));
6487 tree val
= TREE_OPERAND (exp
, 0);
6488 rtx ret
= expand_expr_real_1 (val
, target
, tmode
, modifier
, alt_rtl
);
6490 if (TREE_CODE (val
) != VAR_DECL
|| !DECL_ARTIFICIAL (val
))
6492 /* We can indeed still hit this case, typically via builtin
6493 expanders calling save_expr immediately before expanding
6494 something. Assume this means that we only have to deal
6495 with non-BLKmode values. */
6496 if (GET_MODE (ret
) == BLKmode
)
6499 val
= build_decl (VAR_DECL
, NULL
, TREE_TYPE (exp
));
6500 DECL_ARTIFICIAL (val
) = 1;
6501 TREE_OPERAND (exp
, 0) = val
;
6503 if (!CONSTANT_P (ret
))
6504 ret
= copy_to_reg (ret
);
6505 SET_DECL_RTL (val
, ret
);
6512 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6513 expand_goto (TREE_OPERAND (exp
, 0));
6515 expand_computed_goto (TREE_OPERAND (exp
, 0));
6519 /* If we don't need the result, just ensure we evaluate any
6525 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
6526 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
, 0);
6531 /* All elts simple constants => refer to a constant in memory. But
6532 if this is a non-BLKmode mode, let it store a field at a time
6533 since that should make a CONST_INT or CONST_DOUBLE when we
6534 fold. Likewise, if we have a target we can use, it is best to
6535 store directly into the target unless the type is large enough
6536 that memcpy will be used. If we are making an initializer and
6537 all operands are constant, put it in memory as well.
6539 FIXME: Avoid trying to fill vector constructors piece-meal.
6540 Output them with output_constant_def below unless we're sure
6541 they're zeros. This should go away when vector initializers
6542 are treated like VECTOR_CST instead of arrays.
6544 else if ((TREE_STATIC (exp
)
6545 && ((mode
== BLKmode
6546 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
6547 || TREE_ADDRESSABLE (exp
)
6548 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
6549 && (! MOVE_BY_PIECES_P
6550 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
6552 && ! mostly_zeros_p (exp
))))
6553 || ((modifier
== EXPAND_INITIALIZER
6554 || modifier
== EXPAND_CONST_ADDRESS
)
6555 && TREE_CONSTANT (exp
)))
6557 rtx constructor
= output_constant_def (exp
, 1);
6559 if (modifier
!= EXPAND_CONST_ADDRESS
6560 && modifier
!= EXPAND_INITIALIZER
6561 && modifier
!= EXPAND_SUM
)
6562 constructor
= validize_mem (constructor
);
6568 /* Handle calls that pass values in multiple non-contiguous
6569 locations. The Irix 6 ABI has examples of this. */
6570 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
6571 || GET_CODE (target
) == PARALLEL
6572 || modifier
== EXPAND_STACK_PARM
)
6574 = assign_temp (build_qualified_type (type
,
6576 | (TREE_READONLY (exp
)
6577 * TYPE_QUAL_CONST
))),
6578 0, TREE_ADDRESSABLE (exp
), 1);
6580 store_constructor (exp
, target
, 0, int_expr_size (exp
));
6586 tree exp1
= TREE_OPERAND (exp
, 0);
6588 if (modifier
!= EXPAND_WRITE
)
6592 t
= fold_read_from_constant_string (exp
);
6594 return expand_expr (t
, target
, tmode
, modifier
);
6597 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
6598 op0
= memory_address (mode
, op0
);
6599 temp
= gen_rtx_MEM (mode
, op0
);
6600 set_mem_attributes (temp
, exp
, 0);
6602 /* If we are writing to this object and its type is a record with
6603 readonly fields, we must mark it as readonly so it will
6604 conflict with readonly references to those fields. */
6605 if (modifier
== EXPAND_WRITE
&& readonly_fields_p (type
))
6606 RTX_UNCHANGING_P (temp
) = 1;
6613 #ifdef ENABLE_CHECKING
6614 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
6619 tree array
= TREE_OPERAND (exp
, 0);
6620 tree low_bound
= array_ref_low_bound (exp
);
6621 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
6624 /* Optimize the special-case of a zero lower bound.
6626 We convert the low_bound to sizetype to avoid some problems
6627 with constant folding. (E.g. suppose the lower bound is 1,
6628 and its mode is QI. Without the conversion, (ARRAY
6629 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6630 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6632 if (! integer_zerop (low_bound
))
6633 index
= size_diffop (index
, convert (sizetype
, low_bound
));
6635 /* Fold an expression like: "foo"[2].
6636 This is not done in fold so it won't happen inside &.
6637 Don't fold if this is for wide characters since it's too
6638 difficult to do correctly and this is a very rare case. */
6640 if (modifier
!= EXPAND_CONST_ADDRESS
6641 && modifier
!= EXPAND_INITIALIZER
6642 && modifier
!= EXPAND_MEMORY
)
6644 tree t
= fold_read_from_constant_string (exp
);
6647 return expand_expr (t
, target
, tmode
, modifier
);
6650 /* If this is a constant index into a constant array,
6651 just get the value from the array. Handle both the cases when
6652 we have an explicit constructor and when our operand is a variable
6653 that was declared const. */
6655 if (modifier
!= EXPAND_CONST_ADDRESS
6656 && modifier
!= EXPAND_INITIALIZER
6657 && modifier
!= EXPAND_MEMORY
6658 && TREE_CODE (array
) == CONSTRUCTOR
6659 && ! TREE_SIDE_EFFECTS (array
)
6660 && TREE_CODE (index
) == INTEGER_CST
6661 && 0 > compare_tree_int (index
,
6662 list_length (CONSTRUCTOR_ELTS
6663 (TREE_OPERAND (exp
, 0)))))
6667 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
6668 i
= TREE_INT_CST_LOW (index
);
6669 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
6673 return expand_expr (fold (TREE_VALUE (elem
)), target
, tmode
,
6677 else if (optimize
>= 1
6678 && modifier
!= EXPAND_CONST_ADDRESS
6679 && modifier
!= EXPAND_INITIALIZER
6680 && modifier
!= EXPAND_MEMORY
6681 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
6682 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
6683 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
6684 && targetm
.binds_local_p (array
))
6686 if (TREE_CODE (index
) == INTEGER_CST
)
6688 tree init
= DECL_INITIAL (array
);
6690 if (TREE_CODE (init
) == CONSTRUCTOR
)
6694 for (elem
= CONSTRUCTOR_ELTS (init
);
6696 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
6697 elem
= TREE_CHAIN (elem
))
6700 if (elem
&& !TREE_SIDE_EFFECTS (TREE_VALUE (elem
)))
6701 return expand_expr (fold (TREE_VALUE (elem
)), target
,
6704 else if (TREE_CODE (init
) == STRING_CST
6705 && 0 > compare_tree_int (index
,
6706 TREE_STRING_LENGTH (init
)))
6708 tree type
= TREE_TYPE (TREE_TYPE (init
));
6709 enum machine_mode mode
= TYPE_MODE (type
);
6711 if (GET_MODE_CLASS (mode
) == MODE_INT
6712 && GET_MODE_SIZE (mode
) == 1)
6713 return gen_int_mode (TREE_STRING_POINTER (init
)
6714 [TREE_INT_CST_LOW (index
)], mode
);
6719 goto normal_inner_ref
;
6722 /* If the operand is a CONSTRUCTOR, we can just extract the
6723 appropriate field if it is present. */
6724 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
)
6728 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
6729 elt
= TREE_CHAIN (elt
))
6730 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
6731 /* We can normally use the value of the field in the
6732 CONSTRUCTOR. However, if this is a bitfield in
6733 an integral mode that we can fit in a HOST_WIDE_INT,
6734 we must mask only the number of bits in the bitfield,
6735 since this is done implicitly by the constructor. If
6736 the bitfield does not meet either of those conditions,
6737 we can't do this optimization. */
6738 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
6739 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
6741 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
6742 <= HOST_BITS_PER_WIDE_INT
))))
6744 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
))
6745 && modifier
== EXPAND_STACK_PARM
)
6747 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
6748 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
6750 HOST_WIDE_INT bitsize
6751 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt
)));
6752 enum machine_mode imode
6753 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
6755 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
6757 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
6758 op0
= expand_and (imode
, op0
, op1
, target
);
6763 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
6766 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
6768 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
6776 goto normal_inner_ref
;
6779 case ARRAY_RANGE_REF
:
6782 enum machine_mode mode1
;
6783 HOST_WIDE_INT bitsize
, bitpos
;
6786 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
6787 &mode1
, &unsignedp
, &volatilep
);
6790 /* If we got back the original object, something is wrong. Perhaps
6791 we are evaluating an expression too early. In any event, don't
6792 infinitely recurse. */
6796 /* If TEM's type is a union of variable size, pass TARGET to the inner
6797 computation, since it will need a temporary and TARGET is known
6798 to have to do. This occurs in unchecked conversion in Ada. */
6802 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
6803 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
6805 && modifier
!= EXPAND_STACK_PARM
6806 ? target
: NULL_RTX
),
6808 (modifier
== EXPAND_INITIALIZER
6809 || modifier
== EXPAND_CONST_ADDRESS
6810 || modifier
== EXPAND_STACK_PARM
)
6811 ? modifier
: EXPAND_NORMAL
);
6813 /* If this is a constant, put it into a register if it is a
6814 legitimate constant and OFFSET is 0 and memory if it isn't. */
6815 if (CONSTANT_P (op0
))
6817 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
6818 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
6820 op0
= force_reg (mode
, op0
);
6822 op0
= validize_mem (force_const_mem (mode
, op0
));
6825 /* Otherwise, if this object not in memory and we either have an
6826 offset or a BLKmode result, put it there. This case can't occur in
6827 C, but can in Ada if we have unchecked conversion of an expression
6828 from a scalar type to an array or record type or for an
6829 ARRAY_RANGE_REF whose type is BLKmode. */
6830 else if (!MEM_P (op0
)
6832 || (code
== ARRAY_RANGE_REF
&& mode
== BLKmode
)))
6834 tree nt
= build_qualified_type (TREE_TYPE (tem
),
6835 (TYPE_QUALS (TREE_TYPE (tem
))
6836 | TYPE_QUAL_CONST
));
6837 rtx memloc
= assign_temp (nt
, 1, 1, 1);
6839 emit_move_insn (memloc
, op0
);
6845 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
6851 #ifdef POINTERS_EXTEND_UNSIGNED
6852 if (GET_MODE (offset_rtx
) != Pmode
)
6853 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
6855 if (GET_MODE (offset_rtx
) != ptr_mode
)
6856 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
6859 if (GET_MODE (op0
) == BLKmode
6860 /* A constant address in OP0 can have VOIDmode, we must
6861 not try to call force_reg in that case. */
6862 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
6864 && (bitpos
% bitsize
) == 0
6865 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
6866 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
6868 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
6872 op0
= offset_address (op0
, offset_rtx
,
6873 highest_pow2_factor (offset
));
6876 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
6877 record its alignment as BIGGEST_ALIGNMENT. */
6878 if (MEM_P (op0
) && bitpos
== 0 && offset
!= 0
6879 && is_aligning_offset (offset
, tem
))
6880 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
6882 /* Don't forget about volatility even if this is a bitfield. */
6883 if (MEM_P (op0
) && volatilep
&& ! MEM_VOLATILE_P (op0
))
6885 if (op0
== orig_op0
)
6886 op0
= copy_rtx (op0
);
6888 MEM_VOLATILE_P (op0
) = 1;
6891 /* The following code doesn't handle CONCAT.
6892 Assume only bitpos == 0 can be used for CONCAT, due to
6893 one element arrays having the same mode as its element. */
6894 if (GET_CODE (op0
) == CONCAT
)
6896 if (bitpos
!= 0 || bitsize
!= GET_MODE_BITSIZE (GET_MODE (op0
)))
6901 /* In cases where an aligned union has an unaligned object
6902 as a field, we might be extracting a BLKmode value from
6903 an integer-mode (e.g., SImode) object. Handle this case
6904 by doing the extract into an object as wide as the field
6905 (which we know to be the width of a basic mode), then
6906 storing into memory, and changing the mode to BLKmode. */
6907 if (mode1
== VOIDmode
6908 || REG_P (op0
) || GET_CODE (op0
) == SUBREG
6909 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
6910 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
6911 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
6912 && modifier
!= EXPAND_CONST_ADDRESS
6913 && modifier
!= EXPAND_INITIALIZER
)
6914 /* If the field isn't aligned enough to fetch as a memref,
6915 fetch it as a bit field. */
6916 || (mode1
!= BLKmode
6917 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
6918 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)
6920 && (MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
6921 || (bitpos
% GET_MODE_ALIGNMENT (mode1
) != 0))))
6922 && ((modifier
== EXPAND_CONST_ADDRESS
6923 || modifier
== EXPAND_INITIALIZER
)
6925 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
6926 || (bitpos
% BITS_PER_UNIT
!= 0)))
6927 /* If the type and the field are a constant size and the
6928 size of the type isn't the same size as the bitfield,
6929 we must use bitfield operations. */
6931 && TYPE_SIZE (TREE_TYPE (exp
))
6932 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
6933 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
6936 enum machine_mode ext_mode
= mode
;
6938 if (ext_mode
== BLKmode
6939 && ! (target
!= 0 && MEM_P (op0
)
6941 && bitpos
% BITS_PER_UNIT
== 0))
6942 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
6944 if (ext_mode
== BLKmode
)
6947 target
= assign_temp (type
, 0, 1, 1);
6952 /* In this case, BITPOS must start at a byte boundary and
6953 TARGET, if specified, must be a MEM. */
6955 || (target
!= 0 && !MEM_P (target
))
6956 || bitpos
% BITS_PER_UNIT
!= 0)
6959 emit_block_move (target
,
6960 adjust_address (op0
, VOIDmode
,
6961 bitpos
/ BITS_PER_UNIT
),
6962 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
6964 (modifier
== EXPAND_STACK_PARM
6965 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
6970 op0
= validize_mem (op0
);
6972 if (MEM_P (op0
) && REG_P (XEXP (op0
, 0)))
6973 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
6975 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
6976 (modifier
== EXPAND_STACK_PARM
6977 ? NULL_RTX
: target
),
6978 ext_mode
, ext_mode
);
6980 /* If the result is a record type and BITSIZE is narrower than
6981 the mode of OP0, an integral mode, and this is a big endian
6982 machine, we must put the field into the high-order bits. */
6983 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
6984 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
6985 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
6986 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
6987 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
6991 /* If the result type is BLKmode, store the data into a temporary
6992 of the appropriate type, but with the mode corresponding to the
6993 mode for the data we have (op0's mode). It's tempting to make
6994 this a constant type, since we know it's only being stored once,
6995 but that can cause problems if we are taking the address of this
6996 COMPONENT_REF because the MEM of any reference via that address
6997 will have flags corresponding to the type, which will not
6998 necessarily be constant. */
6999 if (mode
== BLKmode
)
7002 = assign_stack_temp_for_type
7003 (ext_mode
, GET_MODE_BITSIZE (ext_mode
), 0, type
);
7005 emit_move_insn (new, op0
);
7006 op0
= copy_rtx (new);
7007 PUT_MODE (op0
, BLKmode
);
7008 set_mem_attributes (op0
, exp
, 1);
7014 /* If the result is BLKmode, use that to access the object
7016 if (mode
== BLKmode
)
7019 /* Get a reference to just this component. */
7020 if (modifier
== EXPAND_CONST_ADDRESS
7021 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7022 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7024 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7026 if (op0
== orig_op0
)
7027 op0
= copy_rtx (op0
);
7029 set_mem_attributes (op0
, exp
, 0);
7030 if (REG_P (XEXP (op0
, 0)))
7031 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7033 MEM_VOLATILE_P (op0
) |= volatilep
;
7034 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7035 || modifier
== EXPAND_CONST_ADDRESS
7036 || modifier
== EXPAND_INITIALIZER
)
7038 else if (target
== 0)
7039 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7041 convert_move (target
, op0
, unsignedp
);
7046 return expand_expr (OBJ_TYPE_REF_EXPR (exp
), target
, tmode
, modifier
);
7049 /* Check for a built-in function. */
7050 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7051 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7053 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7055 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7056 == BUILT_IN_FRONTEND
)
7057 return lang_hooks
.expand_expr (exp
, original_target
,
7061 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7064 return expand_call (exp
, target
, ignore
);
7066 case NON_LVALUE_EXPR
:
7069 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7072 if (TREE_CODE (type
) == UNION_TYPE
)
7074 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7076 /* If both input and output are BLKmode, this conversion isn't doing
7077 anything except possibly changing memory attribute. */
7078 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7080 rtx result
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7083 result
= copy_rtx (result
);
7084 set_mem_attributes (result
, exp
, 0);
7090 if (TYPE_MODE (type
) != BLKmode
)
7091 target
= gen_reg_rtx (TYPE_MODE (type
));
7093 target
= assign_temp (type
, 0, 1, 1);
7097 /* Store data into beginning of memory target. */
7098 store_expr (TREE_OPERAND (exp
, 0),
7099 adjust_address (target
, TYPE_MODE (valtype
), 0),
7100 modifier
== EXPAND_STACK_PARM
? 2 : 0);
7102 else if (REG_P (target
))
7103 /* Store this field into a union of the proper type. */
7104 store_field (target
,
7105 MIN ((int_size_in_bytes (TREE_TYPE
7106 (TREE_OPERAND (exp
, 0)))
7108 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7109 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7110 VOIDmode
, 0, type
, 0);
7114 /* Return the entire union. */
7118 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7120 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7123 /* If the signedness of the conversion differs and OP0 is
7124 a promoted SUBREG, clear that indication since we now
7125 have to do the proper extension. */
7126 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7127 && GET_CODE (op0
) == SUBREG
)
7128 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7130 return REDUCE_BIT_FIELD (op0
);
7133 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7134 op0
= REDUCE_BIT_FIELD (op0
);
7135 if (GET_MODE (op0
) == mode
)
7138 /* If OP0 is a constant, just convert it into the proper mode. */
7139 if (CONSTANT_P (op0
))
7141 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7142 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
7144 if (modifier
== EXPAND_INITIALIZER
)
7145 return simplify_gen_subreg (mode
, op0
, inner_mode
,
7146 subreg_lowpart_offset (mode
,
7149 return convert_modes (mode
, inner_mode
, op0
,
7150 TYPE_UNSIGNED (inner_type
));
7153 if (modifier
== EXPAND_INITIALIZER
)
7154 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7158 convert_to_mode (mode
, op0
,
7159 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7161 convert_move (target
, op0
,
7162 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7165 case VIEW_CONVERT_EXPR
:
7166 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7168 /* If the input and output modes are both the same, we are done.
7169 Otherwise, if neither mode is BLKmode and both are integral and within
7170 a word, we can use gen_lowpart. If neither is true, make sure the
7171 operand is in memory and convert the MEM to the new mode. */
7172 if (TYPE_MODE (type
) == GET_MODE (op0
))
7174 else if (TYPE_MODE (type
) != BLKmode
&& GET_MODE (op0
) != BLKmode
7175 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7176 && GET_MODE_CLASS (TYPE_MODE (type
)) == MODE_INT
7177 && GET_MODE_SIZE (TYPE_MODE (type
)) <= UNITS_PER_WORD
7178 && GET_MODE_SIZE (GET_MODE (op0
)) <= UNITS_PER_WORD
)
7179 op0
= gen_lowpart (TYPE_MODE (type
), op0
);
7180 else if (!MEM_P (op0
))
7182 /* If the operand is not a MEM, force it into memory. Since we
7183 are going to be be changing the mode of the MEM, don't call
7184 force_const_mem for constants because we don't allow pool
7185 constants to change mode. */
7186 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7188 if (TREE_ADDRESSABLE (exp
))
7191 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
7193 = assign_stack_temp_for_type
7194 (TYPE_MODE (inner_type
),
7195 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
7197 emit_move_insn (target
, op0
);
7201 /* At this point, OP0 is in the correct mode. If the output type is such
7202 that the operand is known to be aligned, indicate that it is.
7203 Otherwise, we need only be concerned about alignment for non-BLKmode
7207 op0
= copy_rtx (op0
);
7209 if (TYPE_ALIGN_OK (type
))
7210 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
7211 else if (TYPE_MODE (type
) != BLKmode
&& STRICT_ALIGNMENT
7212 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
7214 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7215 HOST_WIDE_INT temp_size
7216 = MAX (int_size_in_bytes (inner_type
),
7217 (HOST_WIDE_INT
) GET_MODE_SIZE (TYPE_MODE (type
)));
7218 rtx
new = assign_stack_temp_for_type (TYPE_MODE (type
),
7219 temp_size
, 0, type
);
7220 rtx new_with_op0_mode
= adjust_address (new, GET_MODE (op0
), 0);
7222 if (TREE_ADDRESSABLE (exp
))
7225 if (GET_MODE (op0
) == BLKmode
)
7226 emit_block_move (new_with_op0_mode
, op0
,
7227 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type
))),
7228 (modifier
== EXPAND_STACK_PARM
7229 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7231 emit_move_insn (new_with_op0_mode
, op0
);
7236 op0
= adjust_address (op0
, TYPE_MODE (type
), 0);
7242 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7243 something else, make sure we add the register to the constant and
7244 then to the other thing. This case can occur during strength
7245 reduction and doing it this way will produce better code if the
7246 frame pointer or argument pointer is eliminated.
7248 fold-const.c will ensure that the constant is always in the inner
7249 PLUS_EXPR, so the only case we need to do anything about is if
7250 sp, ap, or fp is our second argument, in which case we must swap
7251 the innermost first argument and our second argument. */
7253 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7254 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7255 && TREE_CODE (TREE_OPERAND (exp
, 1)) == VAR_DECL
7256 && (DECL_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7257 || DECL_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7258 || DECL_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7260 tree t
= TREE_OPERAND (exp
, 1);
7262 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7263 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
7266 /* If the result is to be ptr_mode and we are adding an integer to
7267 something, we might be forming a constant. So try to use
7268 plus_constant. If it produces a sum and we can't accept it,
7269 use force_operand. This allows P = &ARR[const] to generate
7270 efficient code on machines where a SYMBOL_REF is not a valid
7273 If this is an EXPAND_SUM call, always return the sum. */
7274 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7275 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
7277 if (modifier
== EXPAND_STACK_PARM
)
7279 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
7280 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7281 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
7285 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
7287 /* Use immed_double_const to ensure that the constant is
7288 truncated according to the mode of OP1, then sign extended
7289 to a HOST_WIDE_INT. Using the constant directly can result
7290 in non-canonical RTL in a 64x32 cross compile. */
7292 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
7294 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
7295 op1
= plus_constant (op1
, INTVAL (constant_part
));
7296 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7297 op1
= force_operand (op1
, target
);
7298 return REDUCE_BIT_FIELD (op1
);
7301 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7302 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
7303 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
7307 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7308 (modifier
== EXPAND_INITIALIZER
7309 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
7310 if (! CONSTANT_P (op0
))
7312 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7313 VOIDmode
, modifier
);
7314 /* Return a PLUS if modifier says it's OK. */
7315 if (modifier
== EXPAND_SUM
7316 || modifier
== EXPAND_INITIALIZER
)
7317 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7320 /* Use immed_double_const to ensure that the constant is
7321 truncated according to the mode of OP1, then sign extended
7322 to a HOST_WIDE_INT. Using the constant directly can result
7323 in non-canonical RTL in a 64x32 cross compile. */
7325 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
7327 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7328 op0
= plus_constant (op0
, INTVAL (constant_part
));
7329 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7330 op0
= force_operand (op0
, target
);
7331 return REDUCE_BIT_FIELD (op0
);
7335 /* No sense saving up arithmetic to be done
7336 if it's all in the wrong mode to form part of an address.
7337 And force_operand won't know whether to sign-extend or
7339 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7340 || mode
!= ptr_mode
)
7342 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7343 subtarget
, &op0
, &op1
, 0);
7344 if (op0
== const0_rtx
)
7346 if (op1
== const0_rtx
)
7351 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7352 subtarget
, &op0
, &op1
, modifier
);
7353 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
7356 /* For initializers, we are allowed to return a MINUS of two
7357 symbolic constants. Here we handle all cases when both operands
7359 /* Handle difference of two symbolic constants,
7360 for the sake of an initializer. */
7361 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7362 && really_constant_p (TREE_OPERAND (exp
, 0))
7363 && really_constant_p (TREE_OPERAND (exp
, 1)))
7365 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7366 NULL_RTX
, &op0
, &op1
, modifier
);
7368 /* If the last operand is a CONST_INT, use plus_constant of
7369 the negated constant. Else make the MINUS. */
7370 if (GET_CODE (op1
) == CONST_INT
)
7371 return REDUCE_BIT_FIELD (plus_constant (op0
, - INTVAL (op1
)));
7373 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode
, op0
, op1
));
7376 /* No sense saving up arithmetic to be done
7377 if it's all in the wrong mode to form part of an address.
7378 And force_operand won't know whether to sign-extend or
7380 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7381 || mode
!= ptr_mode
)
7384 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7385 subtarget
, &op0
, &op1
, modifier
);
7387 /* Convert A - const to A + (-const). */
7388 if (GET_CODE (op1
) == CONST_INT
)
7390 op1
= negate_rtx (mode
, op1
);
7391 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
7397 /* If first operand is constant, swap them.
7398 Thus the following special case checks need only
7399 check the second operand. */
7400 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
7402 tree t1
= TREE_OPERAND (exp
, 0);
7403 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
7404 TREE_OPERAND (exp
, 1) = t1
;
7407 /* Attempt to return something suitable for generating an
7408 indexed address, for machines that support that. */
7410 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
7411 && host_integerp (TREE_OPERAND (exp
, 1), 0))
7413 tree exp1
= TREE_OPERAND (exp
, 1);
7415 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7419 op0
= force_operand (op0
, NULL_RTX
);
7421 op0
= copy_to_mode_reg (mode
, op0
);
7423 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode
, op0
,
7424 gen_int_mode (tree_low_cst (exp1
, 0),
7425 TYPE_MODE (TREE_TYPE (exp1
)))));
7428 if (modifier
== EXPAND_STACK_PARM
)
7431 /* Check for multiplying things that have been extended
7432 from a narrower type. If this machine supports multiplying
7433 in that narrower type with a result in the desired type,
7434 do it that way, and avoid the explicit type-conversion. */
7435 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
7436 && TREE_CODE (type
) == INTEGER_TYPE
7437 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7438 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7439 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7440 && int_fits_type_p (TREE_OPERAND (exp
, 1),
7441 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7442 /* Don't use a widening multiply if a shift will do. */
7443 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
7444 > HOST_BITS_PER_WIDE_INT
)
7445 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
7447 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7448 && (TYPE_PRECISION (TREE_TYPE
7449 (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7450 == TYPE_PRECISION (TREE_TYPE
7452 (TREE_OPERAND (exp
, 0), 0))))
7453 /* If both operands are extended, they must either both
7454 be zero-extended or both be sign-extended. */
7455 && (TYPE_UNSIGNED (TREE_TYPE
7456 (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7457 == TYPE_UNSIGNED (TREE_TYPE
7459 (TREE_OPERAND (exp
, 0), 0)))))))
7461 tree op0type
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0));
7462 enum machine_mode innermode
= TYPE_MODE (op0type
);
7463 bool zextend_p
= TYPE_UNSIGNED (op0type
);
7464 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
7465 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
7467 if (mode
== GET_MODE_WIDER_MODE (innermode
))
7469 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
7471 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7472 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7473 TREE_OPERAND (exp
, 1),
7474 NULL_RTX
, &op0
, &op1
, 0);
7476 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7477 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7478 NULL_RTX
, &op0
, &op1
, 0);
7481 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
7482 && innermode
== word_mode
)
7485 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7486 NULL_RTX
, VOIDmode
, 0);
7487 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7488 op1
= convert_modes (innermode
, mode
,
7489 expand_expr (TREE_OPERAND (exp
, 1),
7490 NULL_RTX
, VOIDmode
, 0),
7493 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7494 NULL_RTX
, VOIDmode
, 0);
7495 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
7496 unsignedp
, OPTAB_LIB_WIDEN
);
7497 hipart
= gen_highpart (innermode
, temp
);
7498 htem
= expand_mult_highpart_adjust (innermode
, hipart
,
7502 emit_move_insn (hipart
, htem
);
7503 return REDUCE_BIT_FIELD (temp
);
7507 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7508 subtarget
, &op0
, &op1
, 0);
7509 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
7511 case TRUNC_DIV_EXPR
:
7512 case FLOOR_DIV_EXPR
:
7514 case ROUND_DIV_EXPR
:
7515 case EXACT_DIV_EXPR
:
7516 if (modifier
== EXPAND_STACK_PARM
)
7518 /* Possible optimization: compute the dividend with EXPAND_SUM
7519 then if the divisor is constant can optimize the case
7520 where some terms of the dividend have coeffs divisible by it. */
7521 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7522 subtarget
, &op0
, &op1
, 0);
7523 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
7526 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7527 expensive divide. If not, combine will rebuild the original
7529 if (flag_unsafe_math_optimizations
&& optimize
&& !optimize_size
7530 && TREE_CODE (type
) == REAL_TYPE
7531 && !real_onep (TREE_OPERAND (exp
, 0)))
7532 return expand_expr (build2 (MULT_EXPR
, type
, TREE_OPERAND (exp
, 0),
7533 build2 (RDIV_EXPR
, type
,
7534 build_real (type
, dconst1
),
7535 TREE_OPERAND (exp
, 1))),
7536 target
, tmode
, modifier
);
7540 case TRUNC_MOD_EXPR
:
7541 case FLOOR_MOD_EXPR
:
7543 case ROUND_MOD_EXPR
:
7544 if (modifier
== EXPAND_STACK_PARM
)
7546 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7547 subtarget
, &op0
, &op1
, 0);
7548 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
7550 case FIX_ROUND_EXPR
:
7551 case FIX_FLOOR_EXPR
:
7553 abort (); /* Not used for C. */
7555 case FIX_TRUNC_EXPR
:
7556 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7557 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
7558 target
= gen_reg_rtx (mode
);
7559 expand_fix (target
, op0
, unsignedp
);
7563 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7564 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
7565 target
= gen_reg_rtx (mode
);
7566 /* expand_float can't figure out what to do if FROM has VOIDmode.
7567 So give it the correct mode. With -O, cse will optimize this. */
7568 if (GET_MODE (op0
) == VOIDmode
)
7569 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7571 expand_float (target
, op0
,
7572 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7576 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7577 if (modifier
== EXPAND_STACK_PARM
)
7579 temp
= expand_unop (mode
,
7580 optab_for_tree_code (NEGATE_EXPR
, type
),
7584 return REDUCE_BIT_FIELD (temp
);
7587 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7588 if (modifier
== EXPAND_STACK_PARM
)
7591 /* ABS_EXPR is not valid for complex arguments. */
7592 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
7593 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
7596 /* Unsigned abs is simply the operand. Testing here means we don't
7597 risk generating incorrect code below. */
7598 if (TYPE_UNSIGNED (type
))
7601 return expand_abs (mode
, op0
, target
, unsignedp
,
7602 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
7606 target
= original_target
;
7608 || modifier
== EXPAND_STACK_PARM
7609 || (MEM_P (target
) && MEM_VOLATILE_P (target
))
7610 || GET_MODE (target
) != mode
7612 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
7613 target
= gen_reg_rtx (mode
);
7614 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7615 target
, &op0
, &op1
, 0);
7617 /* First try to do it with a special MIN or MAX instruction.
7618 If that does not win, use a conditional jump to select the proper
7620 this_optab
= optab_for_tree_code (code
, type
);
7621 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
7626 /* At this point, a MEM target is no longer useful; we will get better
7630 target
= gen_reg_rtx (mode
);
7632 /* If op1 was placed in target, swap op0 and op1. */
7633 if (target
!= op0
&& target
== op1
)
7641 emit_move_insn (target
, op0
);
7643 op0
= gen_label_rtx ();
7645 /* If this mode is an integer too wide to compare properly,
7646 compare word by word. Rely on cse to optimize constant cases. */
7647 if (GET_MODE_CLASS (mode
) == MODE_INT
7648 && ! can_compare_p (GE
, mode
, ccp_jump
))
7650 if (code
== MAX_EXPR
)
7651 do_jump_by_parts_greater_rtx (mode
, unsignedp
, target
, op1
,
7654 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op1
, target
,
7659 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
7660 unsignedp
, mode
, NULL_RTX
, NULL_RTX
, op0
);
7662 emit_move_insn (target
, op1
);
7667 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7668 if (modifier
== EXPAND_STACK_PARM
)
7670 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
7675 /* ??? Can optimize bitwise operations with one arg constant.
7676 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7677 and (a bitwise1 b) bitwise2 b (etc)
7678 but that is probably not worth while. */
7680 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7681 boolean values when we want in all cases to compute both of them. In
7682 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7683 as actual zero-or-1 values and then bitwise anding. In cases where
7684 there cannot be any side effects, better code would be made by
7685 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7686 how to recognize those cases. */
7688 case TRUTH_AND_EXPR
:
7689 code
= BIT_AND_EXPR
;
7694 code
= BIT_IOR_EXPR
;
7698 case TRUTH_XOR_EXPR
:
7699 code
= BIT_XOR_EXPR
;
7707 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7709 if (modifier
== EXPAND_STACK_PARM
)
7711 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7712 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
7715 /* Could determine the answer when only additive constants differ. Also,
7716 the addition of one can be handled by changing the condition. */
7723 case UNORDERED_EXPR
:
7731 temp
= do_store_flag (exp
,
7732 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
7733 tmode
!= VOIDmode
? tmode
: mode
, 0);
7737 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7738 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
7740 && REG_P (original_target
)
7741 && (GET_MODE (original_target
)
7742 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
7744 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
7747 /* If temp is constant, we can just compute the result. */
7748 if (GET_CODE (temp
) == CONST_INT
)
7750 if (INTVAL (temp
) != 0)
7751 emit_move_insn (target
, const1_rtx
);
7753 emit_move_insn (target
, const0_rtx
);
7758 if (temp
!= original_target
)
7760 enum machine_mode mode1
= GET_MODE (temp
);
7761 if (mode1
== VOIDmode
)
7762 mode1
= tmode
!= VOIDmode
? tmode
: mode
;
7764 temp
= copy_to_mode_reg (mode1
, temp
);
7767 op1
= gen_label_rtx ();
7768 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
7769 GET_MODE (temp
), unsignedp
, op1
);
7770 emit_move_insn (temp
, const1_rtx
);
7775 /* If no set-flag instruction, must generate a conditional store
7776 into a temporary variable. Drop through and handle this
7781 || modifier
== EXPAND_STACK_PARM
7782 || ! safe_from_p (target
, exp
, 1)
7783 /* Make sure we don't have a hard reg (such as function's return
7784 value) live across basic blocks, if not optimizing. */
7785 || (!optimize
&& REG_P (target
)
7786 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
7787 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7790 emit_move_insn (target
, const0_rtx
);
7792 op1
= gen_label_rtx ();
7793 jumpifnot (exp
, op1
);
7796 emit_move_insn (target
, const1_rtx
);
7799 return ignore
? const0_rtx
: target
;
7801 case TRUTH_NOT_EXPR
:
7802 if (modifier
== EXPAND_STACK_PARM
)
7804 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
7805 /* The parser is careful to generate TRUTH_NOT_EXPR
7806 only with operands that are always zero or one. */
7807 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
7808 target
, 1, OPTAB_LIB_WIDEN
);
7813 case STATEMENT_LIST
:
7815 tree_stmt_iterator iter
;
7820 for (iter
= tsi_start (exp
); !tsi_end_p (iter
); tsi_next (&iter
))
7821 expand_expr (tsi_stmt (iter
), const0_rtx
, VOIDmode
, modifier
);
7826 /* If it's void, we don't need to worry about computing a value. */
7827 if (VOID_TYPE_P (TREE_TYPE (exp
)))
7829 tree pred
= TREE_OPERAND (exp
, 0);
7830 tree then_
= TREE_OPERAND (exp
, 1);
7831 tree else_
= TREE_OPERAND (exp
, 2);
7833 if (TREE_CODE (then_
) != GOTO_EXPR
7834 || TREE_CODE (GOTO_DESTINATION (then_
)) != LABEL_DECL
7835 || TREE_CODE (else_
) != GOTO_EXPR
7836 || TREE_CODE (GOTO_DESTINATION (else_
)) != LABEL_DECL
)
7839 jumpif (pred
, label_rtx (GOTO_DESTINATION (then_
)));
7840 return expand_expr (else_
, const0_rtx
, VOIDmode
, 0);
7843 /* Note that COND_EXPRs whose type is a structure or union
7844 are required to be constructed to contain assignments of
7845 a temporary variable, so that we can evaluate them here
7846 for side effect only. If type is void, we must do likewise. */
7848 if (TREE_ADDRESSABLE (type
)
7850 || TREE_TYPE (TREE_OPERAND (exp
, 1)) == void_type_node
7851 || TREE_TYPE (TREE_OPERAND (exp
, 2)) == void_type_node
)
7854 /* If we are not to produce a result, we have no target. Otherwise,
7855 if a target was specified use it; it will not be used as an
7856 intermediate target unless it is safe. If no target, use a
7859 if (modifier
!= EXPAND_STACK_PARM
7861 && safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
7862 && GET_MODE (original_target
) == mode
7863 #ifdef HAVE_conditional_move
7864 && (! can_conditionally_move_p (mode
)
7865 || REG_P (original_target
))
7867 && !MEM_P (original_target
))
7868 temp
= original_target
;
7870 temp
= assign_temp (type
, 0, 0, 1);
7872 do_pending_stack_adjust ();
7874 op0
= gen_label_rtx ();
7875 op1
= gen_label_rtx ();
7876 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
7877 store_expr (TREE_OPERAND (exp
, 1), temp
,
7878 modifier
== EXPAND_STACK_PARM
? 2 : 0);
7880 emit_jump_insn (gen_jump (op1
));
7883 store_expr (TREE_OPERAND (exp
, 2), temp
,
7884 modifier
== EXPAND_STACK_PARM
? 2 : 0);
7892 /* If lhs is complex, expand calls in rhs before computing it.
7893 That's so we don't compute a pointer and save it over a
7894 call. If lhs is simple, compute it first so we can give it
7895 as a target if the rhs is just a call. This avoids an
7896 extra temp and copy and that prevents a partial-subsumption
7897 which makes bad code. Actually we could treat
7898 component_ref's of vars like vars. */
7900 tree lhs
= TREE_OPERAND (exp
, 0);
7901 tree rhs
= TREE_OPERAND (exp
, 1);
7905 /* Check for |= or &= of a bitfield of size one into another bitfield
7906 of size 1. In this case, (unless we need the result of the
7907 assignment) we can do this more efficiently with a
7908 test followed by an assignment, if necessary.
7910 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7911 things change so we do, this code should be enhanced to
7914 && TREE_CODE (lhs
) == COMPONENT_REF
7915 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
7916 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
7917 && TREE_OPERAND (rhs
, 0) == lhs
7918 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
7919 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
7920 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
7922 rtx label
= gen_label_rtx ();
7924 do_jump (TREE_OPERAND (rhs
, 1),
7925 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
7926 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
7927 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
7928 (TREE_CODE (rhs
) == BIT_IOR_EXPR
7930 : integer_zero_node
)),
7932 do_pending_stack_adjust ();
7937 temp
= expand_assignment (lhs
, rhs
, ! ignore
);
7943 if (!TREE_OPERAND (exp
, 0))
7944 expand_null_return ();
7946 expand_return (TREE_OPERAND (exp
, 0));
7950 if (modifier
== EXPAND_STACK_PARM
)
7952 /* If we are taking the address of something erroneous, just
7954 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
7956 /* If we are taking the address of a constant and are at the
7957 top level, we have to use output_constant_def since we can't
7958 call force_const_mem at top level. */
7960 && (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
7961 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0)))
7963 op0
= XEXP (output_constant_def (TREE_OPERAND (exp
, 0), 0), 0);
7966 /* We make sure to pass const0_rtx down if we came in with
7967 ignore set, to avoid doing the cleanups twice for something. */
7968 op0
= expand_expr (TREE_OPERAND (exp
, 0),
7969 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
7970 (modifier
== EXPAND_INITIALIZER
7971 ? modifier
: EXPAND_CONST_ADDRESS
));
7973 /* If we are going to ignore the result, OP0 will have been set
7974 to const0_rtx, so just return it. Don't get confused and
7975 think we are taking the address of the constant. */
7979 /* We would like the object in memory. If it is a constant, we can
7980 have it be statically allocated into memory. For a non-constant,
7981 we need to allocate some memory and store the value into it. */
7983 if (CONSTANT_P (op0
))
7984 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7986 else if (REG_P (op0
) || GET_CODE (op0
) == SUBREG
7987 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == PARALLEL
7988 || GET_CODE (op0
) == LO_SUM
)
7990 /* If this object is in a register, it can't be BLKmode. */
7991 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7992 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
7994 if (GET_CODE (op0
) == PARALLEL
)
7995 /* Handle calls that pass values in multiple
7996 non-contiguous locations. The Irix 6 ABI has examples
7998 emit_group_store (memloc
, op0
, inner_type
,
7999 int_size_in_bytes (inner_type
));
8001 emit_move_insn (memloc
, op0
);
8009 mark_temp_addr_taken (op0
);
8010 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8012 op0
= XEXP (op0
, 0);
8013 if (GET_MODE (op0
) == Pmode
&& mode
== ptr_mode
)
8014 op0
= convert_memory_address (ptr_mode
, op0
);
8018 /* If OP0 is not aligned as least as much as the type requires, we
8019 need to make a temporary, copy OP0 to it, and take the address of
8020 the temporary. We want to use the alignment of the type, not of
8021 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8022 the test for BLKmode means that can't happen. The test for
8023 BLKmode is because we never make mis-aligned MEMs with
8026 We don't need to do this at all if the machine doesn't have
8027 strict alignment. */
8028 if (STRICT_ALIGNMENT
&& GET_MODE (op0
) == BLKmode
8029 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
8031 && MEM_ALIGN (op0
) < BIGGEST_ALIGNMENT
)
8033 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8036 if (TYPE_ALIGN_OK (inner_type
))
8039 if (TREE_ADDRESSABLE (inner_type
))
8041 /* We can't make a bitwise copy of this object, so fail. */
8042 error ("cannot take the address of an unaligned member");
8046 new = assign_stack_temp_for_type
8047 (TYPE_MODE (inner_type
),
8048 MEM_SIZE (op0
) ? INTVAL (MEM_SIZE (op0
))
8049 : int_size_in_bytes (inner_type
),
8050 1, build_qualified_type (inner_type
,
8051 (TYPE_QUALS (inner_type
)
8052 | TYPE_QUAL_CONST
)));
8054 emit_block_move (new, op0
, expr_size (TREE_OPERAND (exp
, 0)),
8055 (modifier
== EXPAND_STACK_PARM
8056 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
8061 op0
= force_operand (XEXP (op0
, 0), target
);
8066 && modifier
!= EXPAND_CONST_ADDRESS
8067 && modifier
!= EXPAND_INITIALIZER
8068 && modifier
!= EXPAND_SUM
)
8069 op0
= force_reg (Pmode
, op0
);
8072 && ! REG_USERVAR_P (op0
))
8073 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)));
8075 if (GET_MODE (op0
) == Pmode
&& mode
== ptr_mode
)
8076 op0
= convert_memory_address (ptr_mode
, op0
);
8080 /* COMPLEX type for Extended Pascal & Fortran */
8083 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8086 /* Get the rtx code of the operands. */
8087 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8088 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
8091 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8095 /* Move the real (op0) and imaginary (op1) parts to their location. */
8096 emit_move_insn (gen_realpart (mode
, target
), op0
);
8097 emit_move_insn (gen_imagpart (mode
, target
), op1
);
8099 insns
= get_insns ();
8102 /* Complex construction should appear as a single unit. */
8103 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8104 each with a separate pseudo as destination.
8105 It's not correct for flow to treat them as a unit. */
8106 if (GET_CODE (target
) != CONCAT
)
8107 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
8115 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8116 return gen_realpart (mode
, op0
);
8119 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8120 return gen_imagpart (mode
, op0
);
8123 expand_resx_expr (exp
);
8126 case TRY_CATCH_EXPR
:
8128 case EH_FILTER_EXPR
:
8129 case TRY_FINALLY_EXPR
:
8130 /* Lowered by tree-eh.c. */
8133 case WITH_CLEANUP_EXPR
:
8134 case CLEANUP_POINT_EXPR
:
8136 case CASE_LABEL_EXPR
:
8142 case PREINCREMENT_EXPR
:
8143 case PREDECREMENT_EXPR
:
8144 case POSTINCREMENT_EXPR
:
8145 case POSTDECREMENT_EXPR
:
8148 case LABELED_BLOCK_EXPR
:
8149 case EXIT_BLOCK_EXPR
:
8150 case TRUTH_ANDIF_EXPR
:
8151 case TRUTH_ORIF_EXPR
:
8152 /* Lowered by gimplify.c. */
8156 return get_exception_pointer (cfun
);
8159 return get_exception_filter (cfun
);
8162 /* Function descriptors are not valid except for as
8163 initialization constants, and should not be expanded. */
8171 expand_label (TREE_OPERAND (exp
, 0));
8175 expand_asm_expr (exp
);
8178 case WITH_SIZE_EXPR
:
8179 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8180 have pulled out the size to use in whatever context it needed. */
8181 return expand_expr_real (TREE_OPERAND (exp
, 0), original_target
, tmode
,
8185 return lang_hooks
.expand_expr (exp
, original_target
, tmode
,
8189 /* Here to do an ordinary binary operator. */
8191 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8192 subtarget
, &op0
, &op1
, 0);
8194 this_optab
= optab_for_tree_code (code
, type
);
8196 if (modifier
== EXPAND_STACK_PARM
)
8198 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
8199 unsignedp
, OPTAB_LIB_WIDEN
);
8202 return REDUCE_BIT_FIELD (temp
);
8204 #undef REDUCE_BIT_FIELD
8206 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8207 signedness of TYPE), possibly returning the result in TARGET. */
8209 reduce_to_bit_field_precision (rtx exp
, rtx target
, tree type
)
8211 HOST_WIDE_INT prec
= TYPE_PRECISION (type
);
8212 if (target
&& GET_MODE (target
) != GET_MODE (exp
))
8214 if (TYPE_UNSIGNED (type
))
8217 if (prec
< HOST_BITS_PER_WIDE_INT
)
8218 mask
= immed_double_const (((unsigned HOST_WIDE_INT
) 1 << prec
) - 1, 0,
8221 mask
= immed_double_const ((unsigned HOST_WIDE_INT
) -1,
8222 ((unsigned HOST_WIDE_INT
) 1
8223 << (prec
- HOST_BITS_PER_WIDE_INT
)) - 1,
8225 return expand_and (GET_MODE (exp
), exp
, mask
, target
);
8229 tree count
= build_int_2 (GET_MODE_BITSIZE (GET_MODE (exp
)) - prec
, 0);
8230 exp
= expand_shift (LSHIFT_EXPR
, GET_MODE (exp
), exp
, count
, target
, 0);
8231 return expand_shift (RSHIFT_EXPR
, GET_MODE (exp
), exp
, count
, target
, 0);
8235 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8236 when applied to the address of EXP produces an address known to be
8237 aligned more than BIGGEST_ALIGNMENT. */
8240 is_aligning_offset (tree offset
, tree exp
)
8242 /* Strip off any conversions. */
8243 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
8244 || TREE_CODE (offset
) == NOP_EXPR
8245 || TREE_CODE (offset
) == CONVERT_EXPR
)
8246 offset
= TREE_OPERAND (offset
, 0);
8248 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8249 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8250 if (TREE_CODE (offset
) != BIT_AND_EXPR
8251 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
8252 || compare_tree_int (TREE_OPERAND (offset
, 1),
8253 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
8254 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
8257 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8258 It must be NEGATE_EXPR. Then strip any more conversions. */
8259 offset
= TREE_OPERAND (offset
, 0);
8260 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
8261 || TREE_CODE (offset
) == NOP_EXPR
8262 || TREE_CODE (offset
) == CONVERT_EXPR
)
8263 offset
= TREE_OPERAND (offset
, 0);
8265 if (TREE_CODE (offset
) != NEGATE_EXPR
)
8268 offset
= TREE_OPERAND (offset
, 0);
8269 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
8270 || TREE_CODE (offset
) == NOP_EXPR
8271 || TREE_CODE (offset
) == CONVERT_EXPR
)
8272 offset
= TREE_OPERAND (offset
, 0);
8274 /* This must now be the address of EXP. */
8275 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
8278 /* Return the tree node if an ARG corresponds to a string constant or zero
8279 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8280 in bytes within the string that ARG is accessing. The type of the
8281 offset will be `sizetype'. */
8284 string_constant (tree arg
, tree
*ptr_offset
)
8288 if (TREE_CODE (arg
) == ADDR_EXPR
8289 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
8291 *ptr_offset
= size_zero_node
;
8292 return TREE_OPERAND (arg
, 0);
8294 if (TREE_CODE (arg
) == ADDR_EXPR
8295 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
8296 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg
, 0), 0)) == STRING_CST
)
8298 *ptr_offset
= convert (sizetype
, TREE_OPERAND (TREE_OPERAND (arg
, 0), 1));
8299 return TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
8301 else if (TREE_CODE (arg
) == PLUS_EXPR
)
8303 tree arg0
= TREE_OPERAND (arg
, 0);
8304 tree arg1
= TREE_OPERAND (arg
, 1);
8309 if (TREE_CODE (arg0
) == ADDR_EXPR
8310 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
8312 *ptr_offset
= convert (sizetype
, arg1
);
8313 return TREE_OPERAND (arg0
, 0);
8315 else if (TREE_CODE (arg1
) == ADDR_EXPR
8316 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
8318 *ptr_offset
= convert (sizetype
, arg0
);
8319 return TREE_OPERAND (arg1
, 0);
8326 /* Generate code to calculate EXP using a store-flag instruction
8327 and return an rtx for the result. EXP is either a comparison
8328 or a TRUTH_NOT_EXPR whose operand is a comparison.
8330 If TARGET is nonzero, store the result there if convenient.
8332 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8335 Return zero if there is no suitable set-flag instruction
8336 available on this machine.
8338 Once expand_expr has been called on the arguments of the comparison,
8339 we are committed to doing the store flag, since it is not safe to
8340 re-evaluate the expression. We emit the store-flag insn by calling
8341 emit_store_flag, but only expand the arguments if we have a reason
8342 to believe that emit_store_flag will be successful. If we think that
8343 it will, but it isn't, we have to simulate the store-flag with a
8344 set/jump/set sequence. */
8347 do_store_flag (tree exp
, rtx target
, enum machine_mode mode
, int only_cheap
)
8350 tree arg0
, arg1
, type
;
8352 enum machine_mode operand_mode
;
8356 enum insn_code icode
;
8357 rtx subtarget
= target
;
8360 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8361 result at the end. We can't simply invert the test since it would
8362 have already been inverted if it were valid. This case occurs for
8363 some floating-point comparisons. */
8365 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
8366 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
8368 arg0
= TREE_OPERAND (exp
, 0);
8369 arg1
= TREE_OPERAND (exp
, 1);
8371 /* Don't crash if the comparison was erroneous. */
8372 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
8375 type
= TREE_TYPE (arg0
);
8376 operand_mode
= TYPE_MODE (type
);
8377 unsignedp
= TYPE_UNSIGNED (type
);
8379 /* We won't bother with BLKmode store-flag operations because it would mean
8380 passing a lot of information to emit_store_flag. */
8381 if (operand_mode
== BLKmode
)
8384 /* We won't bother with store-flag operations involving function pointers
8385 when function pointers must be canonicalized before comparisons. */
8386 #ifdef HAVE_canonicalize_funcptr_for_compare
8387 if (HAVE_canonicalize_funcptr_for_compare
8388 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
8389 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
8391 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
8392 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
8393 == FUNCTION_TYPE
))))
8400 /* Get the rtx comparison code to use. We know that EXP is a comparison
8401 operation of some type. Some comparisons against 1 and -1 can be
8402 converted to comparisons with zero. Do so here so that the tests
8403 below will be aware that we have a comparison with zero. These
8404 tests will not catch constants in the first operand, but constants
8405 are rarely passed as the first operand. */
8407 switch (TREE_CODE (exp
))
8416 if (integer_onep (arg1
))
8417 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
8419 code
= unsignedp
? LTU
: LT
;
8422 if (! unsignedp
&& integer_all_onesp (arg1
))
8423 arg1
= integer_zero_node
, code
= LT
;
8425 code
= unsignedp
? LEU
: LE
;
8428 if (! unsignedp
&& integer_all_onesp (arg1
))
8429 arg1
= integer_zero_node
, code
= GE
;
8431 code
= unsignedp
? GTU
: GT
;
8434 if (integer_onep (arg1
))
8435 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
8437 code
= unsignedp
? GEU
: GE
;
8440 case UNORDERED_EXPR
:
8469 /* Put a constant second. */
8470 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
8472 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
8473 code
= swap_condition (code
);
8476 /* If this is an equality or inequality test of a single bit, we can
8477 do this by shifting the bit being tested to the low-order bit and
8478 masking the result with the constant 1. If the condition was EQ,
8479 we xor it with 1. This does not require an scc insn and is faster
8480 than an scc insn even if we have it.
8482 The code to make this transformation was moved into fold_single_bit_test,
8483 so we just call into the folder and expand its result. */
8485 if ((code
== NE
|| code
== EQ
)
8486 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
8487 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
8489 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
8490 return expand_expr (fold_single_bit_test (code
== NE
? NE_EXPR
: EQ_EXPR
,
8492 target
, VOIDmode
, EXPAND_NORMAL
);
8495 /* Now see if we are likely to be able to do this. Return if not. */
8496 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
8499 icode
= setcc_gen_code
[(int) code
];
8500 if (icode
== CODE_FOR_nothing
8501 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
8503 /* We can only do this if it is one of the special cases that
8504 can be handled without an scc insn. */
8505 if ((code
== LT
&& integer_zerop (arg1
))
8506 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
8508 else if (BRANCH_COST
>= 0
8509 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
8510 && TREE_CODE (type
) != REAL_TYPE
8511 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
8512 != CODE_FOR_nothing
)
8513 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
8514 != CODE_FOR_nothing
)))
8520 if (! get_subtarget (target
)
8521 || GET_MODE (subtarget
) != operand_mode
)
8524 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, 0);
8527 target
= gen_reg_rtx (mode
);
8529 result
= emit_store_flag (target
, code
, op0
, op1
,
8530 operand_mode
, unsignedp
, 1);
8535 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
8536 result
, 0, OPTAB_LIB_WIDEN
);
8540 /* If this failed, we have to do this with set/compare/jump/set code. */
8542 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
8543 target
= gen_reg_rtx (GET_MODE (target
));
8545 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
8546 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
8547 operand_mode
, NULL_RTX
);
8548 if (GET_CODE (result
) == CONST_INT
)
8549 return (((result
== const0_rtx
&& ! invert
)
8550 || (result
!= const0_rtx
&& invert
))
8551 ? const0_rtx
: const1_rtx
);
8553 /* The code of RESULT may not match CODE if compare_from_rtx
8554 decided to swap its operands and reverse the original code.
8556 We know that compare_from_rtx returns either a CONST_INT or
8557 a new comparison code, so it is safe to just extract the
8558 code from RESULT. */
8559 code
= GET_CODE (result
);
8561 label
= gen_label_rtx ();
8562 if (bcc_gen_fctn
[(int) code
] == 0)
8565 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
8566 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
8573 /* Stubs in case we haven't got a casesi insn. */
8575 # define HAVE_casesi 0
8576 # define gen_casesi(a, b, c, d, e) (0)
8577 # define CODE_FOR_casesi CODE_FOR_nothing
8580 /* If the machine does not have a case insn that compares the bounds,
8581 this means extra overhead for dispatch tables, which raises the
8582 threshold for using them. */
8583 #ifndef CASE_VALUES_THRESHOLD
8584 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
8585 #endif /* CASE_VALUES_THRESHOLD */
8588 case_values_threshold (void)
8590 return CASE_VALUES_THRESHOLD
;
8593 /* Attempt to generate a casesi instruction. Returns 1 if successful,
8594 0 otherwise (i.e. if there is no casesi instruction). */
8596 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
8597 rtx table_label ATTRIBUTE_UNUSED
, rtx default_label
)
8599 enum machine_mode index_mode
= SImode
;
8600 int index_bits
= GET_MODE_BITSIZE (index_mode
);
8601 rtx op1
, op2
, index
;
8602 enum machine_mode op_mode
;
8607 /* Convert the index to SImode. */
8608 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
8610 enum machine_mode omode
= TYPE_MODE (index_type
);
8611 rtx rangertx
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
8613 /* We must handle the endpoints in the original mode. */
8614 index_expr
= build2 (MINUS_EXPR
, index_type
,
8615 index_expr
, minval
);
8616 minval
= integer_zero_node
;
8617 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
8618 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
8619 omode
, 1, default_label
);
8620 /* Now we can safely truncate. */
8621 index
= convert_to_mode (index_mode
, index
, 0);
8625 if (TYPE_MODE (index_type
) != index_mode
)
8627 index_expr
= convert (lang_hooks
.types
.type_for_size
8628 (index_bits
, 0), index_expr
);
8629 index_type
= TREE_TYPE (index_expr
);
8632 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
8635 do_pending_stack_adjust ();
8637 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
8638 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
8640 index
= copy_to_mode_reg (op_mode
, index
);
8642 op1
= expand_expr (minval
, NULL_RTX
, VOIDmode
, 0);
8644 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
8645 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
8646 op1
, TYPE_UNSIGNED (TREE_TYPE (minval
)));
8647 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
8649 op1
= copy_to_mode_reg (op_mode
, op1
);
8651 op2
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
8653 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
8654 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
8655 op2
, TYPE_UNSIGNED (TREE_TYPE (range
)));
8656 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
8658 op2
= copy_to_mode_reg (op_mode
, op2
);
8660 emit_jump_insn (gen_casesi (index
, op1
, op2
,
8661 table_label
, default_label
));
8665 /* Attempt to generate a tablejump instruction; same concept. */
8666 #ifndef HAVE_tablejump
8667 #define HAVE_tablejump 0
8668 #define gen_tablejump(x, y) (0)
8671 /* Subroutine of the next function.
8673 INDEX is the value being switched on, with the lowest value
8674 in the table already subtracted.
8675 MODE is its expected mode (needed if INDEX is constant).
8676 RANGE is the length of the jump table.
8677 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8679 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8680 index value is out of range. */
8683 do_tablejump (rtx index
, enum machine_mode mode
, rtx range
, rtx table_label
,
8688 if (INTVAL (range
) > cfun
->max_jumptable_ents
)
8689 cfun
->max_jumptable_ents
= INTVAL (range
);
8691 /* Do an unsigned comparison (in the proper mode) between the index
8692 expression and the value which represents the length of the range.
8693 Since we just finished subtracting the lower bound of the range
8694 from the index expression, this comparison allows us to simultaneously
8695 check that the original index expression value is both greater than
8696 or equal to the minimum value of the range and less than or equal to
8697 the maximum value of the range. */
8699 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
8702 /* If index is in range, it must fit in Pmode.
8703 Convert to Pmode so we can index with it. */
8705 index
= convert_to_mode (Pmode
, index
, 1);
8707 /* Don't let a MEM slip through, because then INDEX that comes
8708 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
8709 and break_out_memory_refs will go to work on it and mess it up. */
8710 #ifdef PIC_CASE_VECTOR_ADDRESS
8711 if (flag_pic
&& !REG_P (index
))
8712 index
= copy_to_mode_reg (Pmode
, index
);
8715 /* If flag_force_addr were to affect this address
8716 it could interfere with the tricky assumptions made
8717 about addresses that contain label-refs,
8718 which may be valid only very near the tablejump itself. */
8719 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8720 GET_MODE_SIZE, because this indicates how large insns are. The other
8721 uses should all be Pmode, because they are addresses. This code
8722 could fail if addresses and insns are not the same size. */
8723 index
= gen_rtx_PLUS (Pmode
,
8724 gen_rtx_MULT (Pmode
, index
,
8725 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
8726 gen_rtx_LABEL_REF (Pmode
, table_label
));
8727 #ifdef PIC_CASE_VECTOR_ADDRESS
8729 index
= PIC_CASE_VECTOR_ADDRESS (index
);
8732 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
8733 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
8734 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
8735 RTX_UNCHANGING_P (vector
) = 1;
8736 MEM_NOTRAP_P (vector
) = 1;
8737 convert_move (temp
, vector
, 0);
8739 emit_jump_insn (gen_tablejump (temp
, table_label
));
8741 /* If we are generating PIC code or if the table is PC-relative, the
8742 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8743 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
8748 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
8749 rtx table_label
, rtx default_label
)
8753 if (! HAVE_tablejump
)
8756 index_expr
= fold (build2 (MINUS_EXPR
, index_type
,
8757 convert (index_type
, index_expr
),
8758 convert (index_type
, minval
)));
8759 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
8760 do_pending_stack_adjust ();
8762 do_tablejump (index
, TYPE_MODE (index_type
),
8763 convert_modes (TYPE_MODE (index_type
),
8764 TYPE_MODE (TREE_TYPE (range
)),
8765 expand_expr (range
, NULL_RTX
,
8767 TYPE_UNSIGNED (TREE_TYPE (range
))),
8768 table_label
, default_label
);
8772 /* Nonzero if the mode is a valid vector mode for this architecture.
8773 This returns nonzero even if there is no hardware support for the
8774 vector mode, but we can emulate with narrower modes. */
8777 vector_mode_valid_p (enum machine_mode mode
)
8779 enum mode_class
class = GET_MODE_CLASS (mode
);
8780 enum machine_mode innermode
;
8782 /* Doh! What's going on? */
8783 if (class != MODE_VECTOR_INT
8784 && class != MODE_VECTOR_FLOAT
)
8787 /* Hardware support. Woo hoo! */
8788 if (VECTOR_MODE_SUPPORTED_P (mode
))
8791 innermode
= GET_MODE_INNER (mode
);
8793 /* We should probably return 1 if requesting V4DI and we have no DI,
8794 but we have V2DI, but this is probably very unlikely. */
8796 /* If we have support for the inner mode, we can safely emulate it.
8797 We may not have V2DI, but me can emulate with a pair of DIs. */
8798 return mov_optab
->handlers
[innermode
].insn_code
!= CODE_FOR_nothing
;
8801 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
8803 const_vector_from_tree (tree exp
)
8808 enum machine_mode inner
, mode
;
8810 mode
= TYPE_MODE (TREE_TYPE (exp
));
8812 if (initializer_zerop (exp
))
8813 return CONST0_RTX (mode
);
8815 units
= GET_MODE_NUNITS (mode
);
8816 inner
= GET_MODE_INNER (mode
);
8818 v
= rtvec_alloc (units
);
8820 link
= TREE_VECTOR_CST_ELTS (exp
);
8821 for (i
= 0; link
; link
= TREE_CHAIN (link
), ++i
)
8823 elt
= TREE_VALUE (link
);
8825 if (TREE_CODE (elt
) == REAL_CST
)
8826 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
8829 RTVEC_ELT (v
, i
) = immed_double_const (TREE_INT_CST_LOW (elt
),
8830 TREE_INT_CST_HIGH (elt
),
8834 /* Initialize remaining elements to 0. */
8835 for (; i
< units
; ++i
)
8836 RTVEC_ELT (v
, i
) = CONST0_RTX (inner
);
8838 return gen_rtx_raw_CONST_VECTOR (mode
, v
);
8840 #include "gt-expr.h"