1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
76 #define STACK_PUSH_CODE PRE_INC
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
85 #define TARGET_MEM_FUNCTIONS 0
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
97 /* This structure is used by move_by_pieces to describe the move to
108 int explicit_inc_from
;
109 unsigned HOST_WIDE_INT len
;
110 HOST_WIDE_INT offset
;
114 /* This structure is used by store_by_pieces to describe the clear to
117 struct store_by_pieces
123 unsigned HOST_WIDE_INT len
;
124 HOST_WIDE_INT offset
;
125 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
);
130 static rtx
enqueue_insn (rtx
, rtx
);
131 static unsigned HOST_WIDE_INT
move_by_pieces_ninsns (unsigned HOST_WIDE_INT
,
133 static void move_by_pieces_1 (rtx (*) (rtx
, ...), enum machine_mode
,
134 struct move_by_pieces
*);
135 static bool block_move_libcall_safe_for_call_parm (void);
136 static bool emit_block_move_via_movstr (rtx
, rtx
, rtx
, unsigned);
137 static rtx
emit_block_move_via_libcall (rtx
, rtx
, rtx
);
138 static tree
emit_block_move_libcall_fn (int);
139 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
140 static rtx
clear_by_pieces_1 (void *, HOST_WIDE_INT
, enum machine_mode
);
141 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
142 static void store_by_pieces_1 (struct store_by_pieces
*, unsigned int);
143 static void store_by_pieces_2 (rtx (*) (rtx
, ...), enum machine_mode
,
144 struct store_by_pieces
*);
145 static bool clear_storage_via_clrstr (rtx
, rtx
, unsigned);
146 static rtx
clear_storage_via_libcall (rtx
, rtx
);
147 static tree
clear_storage_libcall_fn (int);
148 static rtx
compress_float_constant (rtx
, rtx
);
149 static rtx
get_subtarget (rtx
);
150 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
151 HOST_WIDE_INT
, enum machine_mode
,
152 tree
, tree
, int, int);
153 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
);
154 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
, enum machine_mode
,
155 tree
, enum machine_mode
, int, tree
, int);
156 static rtx
var_rtx (tree
);
158 static unsigned HOST_WIDE_INT
highest_pow2_factor (tree
);
159 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (tree
, tree
);
161 static int is_aligning_offset (tree
, tree
);
162 static rtx
expand_increment (tree
, int, int);
163 static void expand_operands (tree
, tree
, rtx
, rtx
*, rtx
*,
164 enum expand_modifier
);
165 static rtx
do_store_flag (tree
, rtx
, enum machine_mode
, int);
167 static void emit_single_push_insn (enum machine_mode
, rtx
, tree
);
169 static void do_tablejump (rtx
, enum machine_mode
, rtx
, rtx
, rtx
);
170 static rtx
const_vector_from_tree (tree
);
172 /* Record for each mode whether we can move a register directly to or
173 from an object of that mode in memory. If we can't, we won't try
174 to use that mode directly when accessing a field of that mode. */
176 static char direct_load
[NUM_MACHINE_MODES
];
177 static char direct_store
[NUM_MACHINE_MODES
];
179 /* Record for each mode whether we can float-extend from memory. */
181 static bool float_extend_from_mem
[NUM_MACHINE_MODES
][NUM_MACHINE_MODES
];
183 /* This macro is used to determine whether move_by_pieces should be called
184 to perform a structure copy. */
185 #ifndef MOVE_BY_PIECES_P
186 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
187 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
190 /* This macro is used to determine whether clear_by_pieces should be
191 called to clear storage. */
192 #ifndef CLEAR_BY_PIECES_P
193 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
194 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
197 /* This macro is used to determine whether store_by_pieces should be
198 called to "memset" storage with byte values other than zero, or
199 to "memcpy" storage when the source is a constant string. */
200 #ifndef STORE_BY_PIECES_P
201 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
204 /* This array records the insn_code of insns to perform block moves. */
205 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
207 /* This array records the insn_code of insns to perform block clears. */
208 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
210 /* These arrays record the insn_code of two different kinds of insns
211 to perform block compares. */
212 enum insn_code cmpstr_optab
[NUM_MACHINE_MODES
];
213 enum insn_code cmpmem_optab
[NUM_MACHINE_MODES
];
215 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
217 #ifndef SLOW_UNALIGNED_ACCESS
218 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
221 /* This is run once per compilation to set up which modes can be used
222 directly in memory and to initialize the block move optab. */
225 init_expr_once (void)
228 enum machine_mode mode
;
233 /* Try indexing by frame ptr and try by stack ptr.
234 It is known that on the Convex the stack ptr isn't a valid index.
235 With luck, one or the other is valid on any machine. */
236 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
237 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
239 /* A scratch register we can modify in-place below to avoid
240 useless RTL allocations. */
241 reg
= gen_rtx_REG (VOIDmode
, -1);
243 insn
= rtx_alloc (INSN
);
244 pat
= gen_rtx_SET (0, NULL_RTX
, NULL_RTX
);
245 PATTERN (insn
) = pat
;
247 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
248 mode
= (enum machine_mode
) ((int) mode
+ 1))
252 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
253 PUT_MODE (mem
, mode
);
254 PUT_MODE (mem1
, mode
);
255 PUT_MODE (reg
, mode
);
257 /* See if there is some register that can be used in this mode and
258 directly loaded or stored from memory. */
260 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
261 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
262 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
265 if (! HARD_REGNO_MODE_OK (regno
, mode
))
271 SET_DEST (pat
) = reg
;
272 if (recog (pat
, insn
, &num_clobbers
) >= 0)
273 direct_load
[(int) mode
] = 1;
275 SET_SRC (pat
) = mem1
;
276 SET_DEST (pat
) = reg
;
277 if (recog (pat
, insn
, &num_clobbers
) >= 0)
278 direct_load
[(int) mode
] = 1;
281 SET_DEST (pat
) = mem
;
282 if (recog (pat
, insn
, &num_clobbers
) >= 0)
283 direct_store
[(int) mode
] = 1;
286 SET_DEST (pat
) = mem1
;
287 if (recog (pat
, insn
, &num_clobbers
) >= 0)
288 direct_store
[(int) mode
] = 1;
292 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
294 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
295 mode
= GET_MODE_WIDER_MODE (mode
))
297 enum machine_mode srcmode
;
298 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
299 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
303 ic
= can_extend_p (mode
, srcmode
, 0);
304 if (ic
== CODE_FOR_nothing
)
307 PUT_MODE (mem
, srcmode
);
309 if ((*insn_data
[ic
].operand
[1].predicate
) (mem
, srcmode
))
310 float_extend_from_mem
[mode
][srcmode
] = true;
315 /* This is run at the start of compiling a function. */
320 cfun
->expr
= ggc_alloc_cleared (sizeof (struct expr_status
));
323 /* Small sanity check that the queue is empty at the end of a function. */
326 finish_expr_for_function (void)
332 /* Manage the queue of increment instructions to be output
333 for POSTINCREMENT_EXPR expressions, etc. */
335 /* Queue up to increment (or change) VAR later. BODY says how:
336 BODY should be the same thing you would pass to emit_insn
337 to increment right away. It will go to emit_insn later on.
339 The value is a QUEUED expression to be used in place of VAR
340 where you want to guarantee the pre-incrementation value of VAR. */
343 enqueue_insn (rtx var
, rtx body
)
345 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
), var
, NULL_RTX
, NULL_RTX
,
346 body
, pending_chain
);
347 return pending_chain
;
350 /* Use protect_from_queue to convert a QUEUED expression
351 into something that you can put immediately into an instruction.
352 If the queued incrementation has not happened yet,
353 protect_from_queue returns the variable itself.
354 If the incrementation has happened, protect_from_queue returns a temp
355 that contains a copy of the old value of the variable.
357 Any time an rtx which might possibly be a QUEUED is to be put
358 into an instruction, it must be passed through protect_from_queue first.
359 QUEUED expressions are not meaningful in instructions.
361 Do not pass a value through protect_from_queue and then hold
362 on to it for a while before putting it in an instruction!
363 If the queue is flushed in between, incorrect code will result. */
366 protect_from_queue (rtx x
, int modify
)
368 RTX_CODE code
= GET_CODE (x
);
370 #if 0 /* A QUEUED can hang around after the queue is forced out. */
371 /* Shortcut for most common case. */
372 if (pending_chain
== 0)
378 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
379 use of autoincrement. Make a copy of the contents of the memory
380 location rather than a copy of the address, but not if the value is
381 of mode BLKmode. Don't modify X in place since it might be
383 if (code
== MEM
&& GET_MODE (x
) != BLKmode
384 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
387 rtx
new = replace_equiv_address_nv (x
, QUEUED_VAR (y
));
391 rtx temp
= gen_reg_rtx (GET_MODE (x
));
393 emit_insn_before (gen_move_insn (temp
, new),
398 /* Copy the address into a pseudo, so that the returned value
399 remains correct across calls to emit_queue. */
400 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
403 /* Otherwise, recursively protect the subexpressions of all
404 the kinds of rtx's that can contain a QUEUED. */
407 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
408 if (tem
!= XEXP (x
, 0))
414 else if (code
== PLUS
|| code
== MULT
)
416 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
417 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
418 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
427 /* If the increment has not happened, use the variable itself. Copy it
428 into a new pseudo so that the value remains correct across calls to
430 if (QUEUED_INSN (x
) == 0)
431 return copy_to_reg (QUEUED_VAR (x
));
432 /* If the increment has happened and a pre-increment copy exists,
434 if (QUEUED_COPY (x
) != 0)
435 return QUEUED_COPY (x
);
436 /* The increment has happened but we haven't set up a pre-increment copy.
437 Set one up now, and use it. */
438 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
439 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
441 return QUEUED_COPY (x
);
444 /* Return nonzero if X contains a QUEUED expression:
445 if it contains anything that will be altered by a queued increment.
446 We handle only combinations of MEM, PLUS, MINUS and MULT operators
447 since memory addresses generally contain only those. */
450 queued_subexp_p (rtx x
)
452 enum rtx_code code
= GET_CODE (x
);
458 return queued_subexp_p (XEXP (x
, 0));
462 return (queued_subexp_p (XEXP (x
, 0))
463 || queued_subexp_p (XEXP (x
, 1)));
469 /* Retrieve a mark on the queue. */
474 return pending_chain
;
477 /* Perform all the pending incrementations that have been enqueued
478 after MARK was retrieved. If MARK is null, perform all the
479 pending incrementations. */
482 emit_insns_enqueued_after_mark (rtx mark
)
486 /* The marked incrementation may have been emitted in the meantime
487 through a call to emit_queue. In this case, the mark is not valid
488 anymore so do nothing. */
489 if (mark
&& ! QUEUED_BODY (mark
))
492 while ((p
= pending_chain
) != mark
)
494 rtx body
= QUEUED_BODY (p
);
496 switch (GET_CODE (body
))
504 QUEUED_INSN (p
) = body
;
508 #ifdef ENABLE_CHECKING
515 QUEUED_INSN (p
) = emit_insn (body
);
520 pending_chain
= QUEUED_NEXT (p
);
524 /* Perform all the pending incrementations. */
529 emit_insns_enqueued_after_mark (NULL_RTX
);
532 /* Copy data from FROM to TO, where the machine modes are not the same.
533 Both modes may be integer, or both may be floating.
534 UNSIGNEDP should be nonzero if FROM is an unsigned type.
535 This causes zero-extension instead of sign-extension. */
538 convert_move (rtx to
, rtx from
, int unsignedp
)
540 enum machine_mode to_mode
= GET_MODE (to
);
541 enum machine_mode from_mode
= GET_MODE (from
);
542 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
543 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
547 /* rtx code for making an equivalent value. */
548 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
549 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
551 to
= protect_from_queue (to
, 1);
552 from
= protect_from_queue (from
, 0);
554 if (to_real
!= from_real
)
557 /* If the source and destination are already the same, then there's
562 /* If FROM is a SUBREG that indicates that we have already done at least
563 the required extension, strip it. We don't handle such SUBREGs as
566 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
567 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
568 >= GET_MODE_SIZE (to_mode
))
569 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
570 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
572 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
575 if (to_mode
== from_mode
576 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
578 emit_move_insn (to
, from
);
582 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
584 if (GET_MODE_BITSIZE (from_mode
) != GET_MODE_BITSIZE (to_mode
))
587 if (VECTOR_MODE_P (to_mode
))
588 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
590 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
592 emit_move_insn (to
, from
);
596 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
598 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
599 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
608 if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
610 else if (GET_MODE_PRECISION (from_mode
) > GET_MODE_PRECISION (to_mode
))
615 /* Try converting directly if the insn is supported. */
617 code
= tab
->handlers
[to_mode
][from_mode
].insn_code
;
618 if (code
!= CODE_FOR_nothing
)
620 emit_unop_insn (code
, to
, from
,
621 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
625 /* Otherwise use a libcall. */
626 libcall
= tab
->handlers
[to_mode
][from_mode
].libfunc
;
629 /* This conversion is not implemented yet. */
633 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
635 insns
= get_insns ();
637 emit_libcall_block (insns
, to
, value
,
638 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
640 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
644 /* Handle pointer conversion. */ /* SPEE 900220. */
645 /* Targets are expected to provide conversion insns between PxImode and
646 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
647 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
649 enum machine_mode full_mode
650 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode
), MODE_INT
);
652 if (trunc_optab
->handlers
[to_mode
][full_mode
].insn_code
656 if (full_mode
!= from_mode
)
657 from
= convert_to_mode (full_mode
, from
, unsignedp
);
658 emit_unop_insn (trunc_optab
->handlers
[to_mode
][full_mode
].insn_code
,
662 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
664 enum machine_mode full_mode
665 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode
), MODE_INT
);
667 if (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
671 emit_unop_insn (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
,
673 if (to_mode
== full_mode
)
676 /* else proceed to integer conversions below. */
677 from_mode
= full_mode
;
680 /* Now both modes are integers. */
682 /* Handle expanding beyond a word. */
683 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
684 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
691 enum machine_mode lowpart_mode
;
692 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
694 /* Try converting directly if the insn is supported. */
695 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
698 /* If FROM is a SUBREG, put it into a register. Do this
699 so that we always generate the same set of insns for
700 better cse'ing; if an intermediate assignment occurred,
701 we won't be doing the operation directly on the SUBREG. */
702 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
703 from
= force_reg (from_mode
, from
);
704 emit_unop_insn (code
, to
, from
, equiv_code
);
707 /* Next, try converting via full word. */
708 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
709 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
710 != CODE_FOR_nothing
))
714 if (reg_overlap_mentioned_p (to
, from
))
715 from
= force_reg (from_mode
, from
);
716 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
718 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
719 emit_unop_insn (code
, to
,
720 gen_lowpart (word_mode
, to
), equiv_code
);
724 /* No special multiword conversion insn; do it by hand. */
727 /* Since we will turn this into a no conflict block, we must ensure
728 that the source does not overlap the target. */
730 if (reg_overlap_mentioned_p (to
, from
))
731 from
= force_reg (from_mode
, from
);
733 /* Get a copy of FROM widened to a word, if necessary. */
734 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
735 lowpart_mode
= word_mode
;
737 lowpart_mode
= from_mode
;
739 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
741 lowpart
= gen_lowpart (lowpart_mode
, to
);
742 emit_move_insn (lowpart
, lowfrom
);
744 /* Compute the value to put in each remaining word. */
746 fill_value
= const0_rtx
;
751 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
752 && STORE_FLAG_VALUE
== -1)
754 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
756 fill_value
= gen_reg_rtx (word_mode
);
757 emit_insn (gen_slt (fill_value
));
763 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
764 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
766 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
770 /* Fill the remaining words. */
771 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
773 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
774 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
779 if (fill_value
!= subword
)
780 emit_move_insn (subword
, fill_value
);
783 insns
= get_insns ();
786 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
787 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
791 /* Truncating multi-word to a word or less. */
792 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
793 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
796 && ! MEM_VOLATILE_P (from
)
797 && direct_load
[(int) to_mode
]
798 && ! mode_dependent_address_p (XEXP (from
, 0)))
800 || GET_CODE (from
) == SUBREG
))
801 from
= force_reg (from_mode
, from
);
802 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
806 /* Now follow all the conversions between integers
807 no more than a word long. */
809 /* For truncation, usually we can just refer to FROM in a narrower mode. */
810 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
811 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
812 GET_MODE_BITSIZE (from_mode
)))
815 && ! MEM_VOLATILE_P (from
)
816 && direct_load
[(int) to_mode
]
817 && ! mode_dependent_address_p (XEXP (from
, 0)))
819 || GET_CODE (from
) == SUBREG
))
820 from
= force_reg (from_mode
, from
);
821 if (REG_P (from
) && REGNO (from
) < FIRST_PSEUDO_REGISTER
822 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
823 from
= copy_to_reg (from
);
824 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
828 /* Handle extension. */
829 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
831 /* Convert directly if that works. */
832 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
836 from
= force_not_mem (from
);
838 emit_unop_insn (code
, to
, from
, equiv_code
);
843 enum machine_mode intermediate
;
847 /* Search for a mode to convert via. */
848 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
849 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
850 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
852 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
853 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
854 GET_MODE_BITSIZE (intermediate
))))
855 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
856 != CODE_FOR_nothing
))
858 convert_move (to
, convert_to_mode (intermediate
, from
,
859 unsignedp
), unsignedp
);
863 /* No suitable intermediate mode.
864 Generate what we need with shifts. */
865 shift_amount
= build_int_2 (GET_MODE_BITSIZE (to_mode
)
866 - GET_MODE_BITSIZE (from_mode
), 0);
867 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
868 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
870 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
873 emit_move_insn (to
, tmp
);
878 /* Support special truncate insns for certain modes. */
879 if (trunc_optab
->handlers
[to_mode
][from_mode
].insn_code
!= CODE_FOR_nothing
)
881 emit_unop_insn (trunc_optab
->handlers
[to_mode
][from_mode
].insn_code
,
886 /* Handle truncation of volatile memrefs, and so on;
887 the things that couldn't be truncated directly,
888 and for which there was no special instruction.
890 ??? Code above formerly short-circuited this, for most integer
891 mode pairs, with a force_reg in from_mode followed by a recursive
892 call to this routine. Appears always to have been wrong. */
893 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
895 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
896 emit_move_insn (to
, temp
);
900 /* Mode combination is not recognized. */
904 /* Return an rtx for a value that would result
905 from converting X to mode MODE.
906 Both X and MODE may be floating, or both integer.
907 UNSIGNEDP is nonzero if X is an unsigned value.
908 This can be done by referring to a part of X in place
909 or by copying to a new temporary with conversion.
911 This function *must not* call protect_from_queue
912 except when putting X into an insn (in which case convert_move does it). */
915 convert_to_mode (enum machine_mode mode
, rtx x
, int unsignedp
)
917 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
920 /* Return an rtx for a value that would result
921 from converting X from mode OLDMODE to mode MODE.
922 Both modes may be floating, or both integer.
923 UNSIGNEDP is nonzero if X is an unsigned value.
925 This can be done by referring to a part of X in place
926 or by copying to a new temporary with conversion.
928 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
930 This function *must not* call protect_from_queue
931 except when putting X into an insn (in which case convert_move does it). */
934 convert_modes (enum machine_mode mode
, enum machine_mode oldmode
, rtx x
, int unsignedp
)
938 /* If FROM is a SUBREG that indicates that we have already done at least
939 the required extension, strip it. */
941 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
942 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
943 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
944 x
= gen_lowpart (mode
, x
);
946 if (GET_MODE (x
) != VOIDmode
)
947 oldmode
= GET_MODE (x
);
952 /* There is one case that we must handle specially: If we are converting
953 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
954 we are to interpret the constant as unsigned, gen_lowpart will do
955 the wrong if the constant appears negative. What we want to do is
956 make the high-order word of the constant zero, not all ones. */
958 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
959 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
960 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
962 HOST_WIDE_INT val
= INTVAL (x
);
964 if (oldmode
!= VOIDmode
965 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
967 int width
= GET_MODE_BITSIZE (oldmode
);
969 /* We need to zero extend VAL. */
970 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
973 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
976 /* We can do this with a gen_lowpart if both desired and current modes
977 are integer, and this is either a constant integer, a register, or a
978 non-volatile MEM. Except for the constant case where MODE is no
979 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
981 if ((GET_CODE (x
) == CONST_INT
982 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
983 || (GET_MODE_CLASS (mode
) == MODE_INT
984 && GET_MODE_CLASS (oldmode
) == MODE_INT
985 && (GET_CODE (x
) == CONST_DOUBLE
986 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
987 && ((MEM_P (x
) && ! MEM_VOLATILE_P (x
)
988 && direct_load
[(int) mode
])
990 && (! HARD_REGISTER_P (x
)
991 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
992 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
993 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
995 /* ?? If we don't know OLDMODE, we have to assume here that
996 X does not need sign- or zero-extension. This may not be
997 the case, but it's the best we can do. */
998 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
999 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1001 HOST_WIDE_INT val
= INTVAL (x
);
1002 int width
= GET_MODE_BITSIZE (oldmode
);
1004 /* We must sign or zero-extend in this case. Start by
1005 zero-extending, then sign extend if we need to. */
1006 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1008 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1009 val
|= (HOST_WIDE_INT
) (-1) << width
;
1011 return gen_int_mode (val
, mode
);
1014 return gen_lowpart (mode
, x
);
1017 /* Converting from integer constant into mode is always equivalent to an
1018 subreg operation. */
1019 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
1021 if (GET_MODE_BITSIZE (mode
) != GET_MODE_BITSIZE (oldmode
))
1023 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
1026 temp
= gen_reg_rtx (mode
);
1027 convert_move (temp
, x
, unsignedp
);
1031 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1032 store efficiently. Due to internal GCC limitations, this is
1033 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1034 for an immediate constant. */
1036 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1038 /* Determine whether the LEN bytes can be moved by using several move
1039 instructions. Return nonzero if a call to move_by_pieces should
1043 can_move_by_pieces (unsigned HOST_WIDE_INT len
,
1044 unsigned int align ATTRIBUTE_UNUSED
)
1046 return MOVE_BY_PIECES_P (len
, align
);
1049 /* Generate several move instructions to copy LEN bytes from block FROM to
1050 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1051 and TO through protect_from_queue before calling.
1053 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1054 used to push FROM to the stack.
1056 ALIGN is maximum stack alignment we can assume.
1058 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1059 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1063 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
1064 unsigned int align
, int endp
)
1066 struct move_by_pieces data
;
1067 rtx to_addr
, from_addr
= XEXP (from
, 0);
1068 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
1069 enum machine_mode mode
= VOIDmode
, tmode
;
1070 enum insn_code icode
;
1072 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
1075 data
.from_addr
= from_addr
;
1078 to_addr
= XEXP (to
, 0);
1081 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1082 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1084 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1091 #ifdef STACK_GROWS_DOWNWARD
1097 data
.to_addr
= to_addr
;
1100 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1101 || GET_CODE (from_addr
) == POST_INC
1102 || GET_CODE (from_addr
) == POST_DEC
);
1104 data
.explicit_inc_from
= 0;
1105 data
.explicit_inc_to
= 0;
1106 if (data
.reverse
) data
.offset
= len
;
1109 /* If copying requires more than two move insns,
1110 copy addresses to registers (to make displacements shorter)
1111 and use post-increment if available. */
1112 if (!(data
.autinc_from
&& data
.autinc_to
)
1113 && move_by_pieces_ninsns (len
, align
) > 2)
1115 /* Find the mode of the largest move... */
1116 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1117 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1118 if (GET_MODE_SIZE (tmode
) < max_size
)
1121 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
1123 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1124 data
.autinc_from
= 1;
1125 data
.explicit_inc_from
= -1;
1127 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
1129 data
.from_addr
= copy_addr_to_reg (from_addr
);
1130 data
.autinc_from
= 1;
1131 data
.explicit_inc_from
= 1;
1133 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1134 data
.from_addr
= copy_addr_to_reg (from_addr
);
1135 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
1137 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1139 data
.explicit_inc_to
= -1;
1141 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
1143 data
.to_addr
= copy_addr_to_reg (to_addr
);
1145 data
.explicit_inc_to
= 1;
1147 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1148 data
.to_addr
= copy_addr_to_reg (to_addr
);
1151 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1152 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1153 align
= MOVE_MAX
* BITS_PER_UNIT
;
1155 /* First move what we can in the largest integer mode, then go to
1156 successively smaller modes. */
1158 while (max_size
> 1)
1160 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1161 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1162 if (GET_MODE_SIZE (tmode
) < max_size
)
1165 if (mode
== VOIDmode
)
1168 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1169 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1170 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1172 max_size
= GET_MODE_SIZE (mode
);
1175 /* The code above should have handled everything. */
1189 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
1190 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
1192 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
1195 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
1202 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
1210 /* Return number of insns required to move L bytes by pieces.
1211 ALIGN (in bits) is maximum alignment we can assume. */
1213 static unsigned HOST_WIDE_INT
1214 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
)
1216 unsigned HOST_WIDE_INT n_insns
= 0;
1217 unsigned HOST_WIDE_INT max_size
= MOVE_MAX
+ 1;
1219 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1220 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1221 align
= MOVE_MAX
* BITS_PER_UNIT
;
1223 while (max_size
> 1)
1225 enum machine_mode mode
= VOIDmode
, tmode
;
1226 enum insn_code icode
;
1228 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1229 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1230 if (GET_MODE_SIZE (tmode
) < max_size
)
1233 if (mode
== VOIDmode
)
1236 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1237 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1238 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1240 max_size
= GET_MODE_SIZE (mode
);
1248 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1249 with move instructions for mode MODE. GENFUN is the gen_... function
1250 to make a move insn for that mode. DATA has all the other info. */
1253 move_by_pieces_1 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
1254 struct move_by_pieces
*data
)
1256 unsigned int size
= GET_MODE_SIZE (mode
);
1257 rtx to1
= NULL_RTX
, from1
;
1259 while (data
->len
>= size
)
1262 data
->offset
-= size
;
1266 if (data
->autinc_to
)
1267 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1270 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1273 if (data
->autinc_from
)
1274 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1277 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1279 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1280 emit_insn (gen_add2_insn (data
->to_addr
,
1281 GEN_INT (-(HOST_WIDE_INT
)size
)));
1282 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1283 emit_insn (gen_add2_insn (data
->from_addr
,
1284 GEN_INT (-(HOST_WIDE_INT
)size
)));
1287 emit_insn ((*genfun
) (to1
, from1
));
1290 #ifdef PUSH_ROUNDING
1291 emit_single_push_insn (mode
, from1
, NULL
);
1297 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1298 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1299 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1300 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1302 if (! data
->reverse
)
1303 data
->offset
+= size
;
1309 /* Emit code to move a block Y to a block X. This may be done with
1310 string-move instructions, with multiple scalar move instructions,
1311 or with a library call.
1313 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1314 SIZE is an rtx that says how long they are.
1315 ALIGN is the maximum alignment we can assume they have.
1316 METHOD describes what kind of copy this is, and what mechanisms may be used.
1318 Return the address of the new block, if memcpy is called and returns it,
1322 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1330 case BLOCK_OP_NORMAL
:
1331 may_use_call
= true;
1334 case BLOCK_OP_CALL_PARM
:
1335 may_use_call
= block_move_libcall_safe_for_call_parm ();
1337 /* Make inhibit_defer_pop nonzero around the library call
1338 to force it to pop the arguments right away. */
1342 case BLOCK_OP_NO_LIBCALL
:
1343 may_use_call
= false;
1350 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1352 x
= protect_from_queue (x
, 1);
1353 y
= protect_from_queue (y
, 0);
1354 size
= protect_from_queue (size
, 0);
1363 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1364 block copy is more efficient for other large modes, e.g. DCmode. */
1365 x
= adjust_address (x
, BLKmode
, 0);
1366 y
= adjust_address (y
, BLKmode
, 0);
1368 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1369 can be incorrect is coming from __builtin_memcpy. */
1370 if (GET_CODE (size
) == CONST_INT
)
1372 if (INTVAL (size
) == 0)
1375 x
= shallow_copy_rtx (x
);
1376 y
= shallow_copy_rtx (y
);
1377 set_mem_size (x
, size
);
1378 set_mem_size (y
, size
);
1381 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1382 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1383 else if (emit_block_move_via_movstr (x
, y
, size
, align
))
1385 else if (may_use_call
)
1386 retval
= emit_block_move_via_libcall (x
, y
, size
);
1388 emit_block_move_via_loop (x
, y
, size
, align
);
1390 if (method
== BLOCK_OP_CALL_PARM
)
1396 /* A subroutine of emit_block_move. Returns true if calling the
1397 block move libcall will not clobber any parameters which may have
1398 already been placed on the stack. */
1401 block_move_libcall_safe_for_call_parm (void)
1403 /* If arguments are pushed on the stack, then they're safe. */
1407 /* If registers go on the stack anyway, any argument is sure to clobber
1408 an outgoing argument. */
1409 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1411 tree fn
= emit_block_move_libcall_fn (false);
1413 if (REG_PARM_STACK_SPACE (fn
) != 0)
1418 /* If any argument goes in memory, then it might clobber an outgoing
1421 CUMULATIVE_ARGS args_so_far
;
1424 fn
= emit_block_move_libcall_fn (false);
1425 INIT_CUMULATIVE_ARGS (args_so_far
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1427 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1428 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1430 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1431 rtx tmp
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
1432 if (!tmp
|| !REG_P (tmp
))
1434 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1435 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, mode
,
1439 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, NULL_TREE
, 1);
1445 /* A subroutine of emit_block_move. Expand a movstr pattern;
1446 return true if successful. */
1449 emit_block_move_via_movstr (rtx x
, rtx y
, rtx size
, unsigned int align
)
1451 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1452 int save_volatile_ok
= volatile_ok
;
1453 enum machine_mode mode
;
1455 /* Since this is a move insn, we don't care about volatility. */
1458 /* Try the most limited insn first, because there's no point
1459 including more than one in the machine description unless
1460 the more limited one has some advantage. */
1462 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1463 mode
= GET_MODE_WIDER_MODE (mode
))
1465 enum insn_code code
= movstr_optab
[(int) mode
];
1466 insn_operand_predicate_fn pred
;
1468 if (code
!= CODE_FOR_nothing
1469 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1470 here because if SIZE is less than the mode mask, as it is
1471 returned by the macro, it will definitely be less than the
1472 actual mode mask. */
1473 && ((GET_CODE (size
) == CONST_INT
1474 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1475 <= (GET_MODE_MASK (mode
) >> 1)))
1476 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1477 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1478 || (*pred
) (x
, BLKmode
))
1479 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1480 || (*pred
) (y
, BLKmode
))
1481 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1482 || (*pred
) (opalign
, VOIDmode
)))
1485 rtx last
= get_last_insn ();
1488 op2
= convert_to_mode (mode
, size
, 1);
1489 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1490 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1491 op2
= copy_to_mode_reg (mode
, op2
);
1493 /* ??? When called via emit_block_move_for_call, it'd be
1494 nice if there were some way to inform the backend, so
1495 that it doesn't fail the expansion because it thinks
1496 emitting the libcall would be more efficient. */
1498 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1502 volatile_ok
= save_volatile_ok
;
1506 delete_insns_since (last
);
1510 volatile_ok
= save_volatile_ok
;
1514 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1515 Return the return value from memcpy, 0 otherwise. */
1518 emit_block_move_via_libcall (rtx dst
, rtx src
, rtx size
)
1520 rtx dst_addr
, src_addr
;
1521 tree call_expr
, arg_list
, fn
, src_tree
, dst_tree
, size_tree
;
1522 enum machine_mode size_mode
;
1525 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1527 It is unsafe to save the value generated by protect_from_queue and reuse
1528 it later. Consider what happens if emit_queue is called before the
1529 return value from protect_from_queue is used.
1531 Expansion of the CALL_EXPR below will call emit_queue before we are
1532 finished emitting RTL for argument setup. So if we are not careful we
1533 could get the wrong value for an argument.
1535 To avoid this problem we go ahead and emit code to copy the addresses of
1536 DST and SRC and SIZE into new pseudos.
1538 Note this is not strictly needed for library calls since they do not call
1539 emit_queue before loading their arguments. However, we may need to have
1540 library calls call emit_queue in the future since failing to do so could
1541 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1542 arguments in registers. */
1544 dst_addr
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
1545 src_addr
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
1547 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1548 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1550 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1551 src_tree
= make_tree (ptr_type_node
, src_addr
);
1553 if (TARGET_MEM_FUNCTIONS
)
1554 size_mode
= TYPE_MODE (sizetype
);
1556 size_mode
= TYPE_MODE (unsigned_type_node
);
1558 size
= convert_to_mode (size_mode
, size
, 1);
1559 size
= copy_to_mode_reg (size_mode
, size
);
1561 /* It is incorrect to use the libcall calling conventions to call
1562 memcpy in this context. This could be a user call to memcpy and
1563 the user may wish to examine the return value from memcpy. For
1564 targets where libcalls and normal calls have different conventions
1565 for returning pointers, we could end up generating incorrect code.
1567 For convenience, we generate the call to bcopy this way as well. */
1569 if (TARGET_MEM_FUNCTIONS
)
1570 size_tree
= make_tree (sizetype
, size
);
1572 size_tree
= make_tree (unsigned_type_node
, size
);
1574 fn
= emit_block_move_libcall_fn (true);
1575 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
1576 if (TARGET_MEM_FUNCTIONS
)
1578 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1579 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1583 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1584 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1587 /* Now we have to build up the CALL_EXPR itself. */
1588 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1589 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1590 call_expr
, arg_list
, NULL_TREE
);
1592 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
1594 /* If we are initializing a readonly value, show the above call clobbered
1595 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1596 the delay slot scheduler might overlook conflicts and take nasty
1598 if (RTX_UNCHANGING_P (dst
))
1599 add_function_usage_to
1600 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode
,
1601 gen_rtx_CLOBBER (VOIDmode
, dst
),
1604 return TARGET_MEM_FUNCTIONS
? retval
: NULL_RTX
;
1607 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1608 for the function we use for block copies. The first time FOR_CALL
1609 is true, we call assemble_external. */
1611 static GTY(()) tree block_move_fn
;
1614 init_block_move_fn (const char *asmspec
)
1620 if (TARGET_MEM_FUNCTIONS
)
1622 fn
= get_identifier ("memcpy");
1623 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
1624 const_ptr_type_node
, sizetype
,
1629 fn
= get_identifier ("bcopy");
1630 args
= build_function_type_list (void_type_node
, const_ptr_type_node
,
1631 ptr_type_node
, unsigned_type_node
,
1635 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
1636 DECL_EXTERNAL (fn
) = 1;
1637 TREE_PUBLIC (fn
) = 1;
1638 DECL_ARTIFICIAL (fn
) = 1;
1639 TREE_NOTHROW (fn
) = 1;
1646 SET_DECL_RTL (block_move_fn
, NULL_RTX
);
1647 SET_DECL_ASSEMBLER_NAME (block_move_fn
, get_identifier (asmspec
));
1652 emit_block_move_libcall_fn (int for_call
)
1654 static bool emitted_extern
;
1657 init_block_move_fn (NULL
);
1659 if (for_call
&& !emitted_extern
)
1661 emitted_extern
= true;
1662 make_decl_rtl (block_move_fn
, NULL
);
1663 assemble_external (block_move_fn
);
1666 return block_move_fn
;
1669 /* A subroutine of emit_block_move. Copy the data via an explicit
1670 loop. This is used only when libcalls are forbidden. */
1671 /* ??? It'd be nice to copy in hunks larger than QImode. */
1674 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1675 unsigned int align ATTRIBUTE_UNUSED
)
1677 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
1678 enum machine_mode iter_mode
;
1680 iter_mode
= GET_MODE (size
);
1681 if (iter_mode
== VOIDmode
)
1682 iter_mode
= word_mode
;
1684 top_label
= gen_label_rtx ();
1685 cmp_label
= gen_label_rtx ();
1686 iter
= gen_reg_rtx (iter_mode
);
1688 emit_move_insn (iter
, const0_rtx
);
1690 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1691 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1692 do_pending_stack_adjust ();
1694 emit_jump (cmp_label
);
1695 emit_label (top_label
);
1697 tmp
= convert_modes (Pmode
, iter_mode
, iter
, true);
1698 x_addr
= gen_rtx_PLUS (Pmode
, x_addr
, tmp
);
1699 y_addr
= gen_rtx_PLUS (Pmode
, y_addr
, tmp
);
1700 x
= change_address (x
, QImode
, x_addr
);
1701 y
= change_address (y
, QImode
, y_addr
);
1703 emit_move_insn (x
, y
);
1705 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1706 true, OPTAB_LIB_WIDEN
);
1708 emit_move_insn (iter
, tmp
);
1710 emit_label (cmp_label
);
1712 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1716 /* Copy all or part of a value X into registers starting at REGNO.
1717 The number of registers to be filled is NREGS. */
1720 move_block_to_reg (int regno
, rtx x
, int nregs
, enum machine_mode mode
)
1723 #ifdef HAVE_load_multiple
1731 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1732 x
= validize_mem (force_const_mem (mode
, x
));
1734 /* See if the machine can do this with a load multiple insn. */
1735 #ifdef HAVE_load_multiple
1736 if (HAVE_load_multiple
)
1738 last
= get_last_insn ();
1739 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1747 delete_insns_since (last
);
1751 for (i
= 0; i
< nregs
; i
++)
1752 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1753 operand_subword_force (x
, i
, mode
));
1756 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1757 The number of registers to be filled is NREGS. */
1760 move_block_from_reg (int regno
, rtx x
, int nregs
)
1767 /* See if the machine can do this with a store multiple insn. */
1768 #ifdef HAVE_store_multiple
1769 if (HAVE_store_multiple
)
1771 rtx last
= get_last_insn ();
1772 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1780 delete_insns_since (last
);
1784 for (i
= 0; i
< nregs
; i
++)
1786 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1791 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1795 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1796 ORIG, where ORIG is a non-consecutive group of registers represented by
1797 a PARALLEL. The clone is identical to the original except in that the
1798 original set of registers is replaced by a new set of pseudo registers.
1799 The new set has the same modes as the original set. */
1802 gen_group_rtx (rtx orig
)
1807 if (GET_CODE (orig
) != PARALLEL
)
1810 length
= XVECLEN (orig
, 0);
1811 tmps
= alloca (sizeof (rtx
) * length
);
1813 /* Skip a NULL entry in first slot. */
1814 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
1819 for (; i
< length
; i
++)
1821 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
1822 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
1824 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
1827 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
1830 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1831 where DST is non-consecutive registers represented by a PARALLEL.
1832 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1836 emit_group_load (rtx dst
, rtx orig_src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1841 if (GET_CODE (dst
) != PARALLEL
)
1844 /* Check for a NULL entry, used to indicate that the parameter goes
1845 both on the stack and in registers. */
1846 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1851 tmps
= alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
1853 /* Process the pieces. */
1854 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1856 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1857 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1858 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1861 /* Handle trailing fragments that run over the size of the struct. */
1862 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1864 /* Arrange to shift the fragment to where it belongs.
1865 extract_bit_field loads to the lsb of the reg. */
1867 #ifdef BLOCK_REG_PADDING
1868 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
1869 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1874 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1875 bytelen
= ssize
- bytepos
;
1880 /* If we won't be loading directly from memory, protect the real source
1881 from strange tricks we might play; but make sure that the source can
1882 be loaded directly into the destination. */
1884 if (!MEM_P (orig_src
)
1885 && (!CONSTANT_P (orig_src
)
1886 || (GET_MODE (orig_src
) != mode
1887 && GET_MODE (orig_src
) != VOIDmode
)))
1889 if (GET_MODE (orig_src
) == VOIDmode
)
1890 src
= gen_reg_rtx (mode
);
1892 src
= gen_reg_rtx (GET_MODE (orig_src
));
1894 emit_move_insn (src
, orig_src
);
1897 /* Optimize the access just a bit. */
1899 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
1900 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
1901 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1902 && bytelen
== GET_MODE_SIZE (mode
))
1904 tmps
[i
] = gen_reg_rtx (mode
);
1905 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
1907 else if (GET_CODE (src
) == CONCAT
)
1909 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
1910 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
1912 if ((bytepos
== 0 && bytelen
== slen0
)
1913 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
1915 /* The following assumes that the concatenated objects all
1916 have the same size. In this case, a simple calculation
1917 can be used to determine the object and the bit field
1919 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
1920 if (! CONSTANT_P (tmps
[i
])
1921 && (!REG_P (tmps
[i
]) || GET_MODE (tmps
[i
]) != mode
))
1922 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
1923 (bytepos
% slen0
) * BITS_PER_UNIT
,
1924 1, NULL_RTX
, mode
, mode
, ssize
);
1926 else if (bytepos
== 0)
1928 rtx mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1929 emit_move_insn (mem
, src
);
1930 tmps
[i
] = adjust_address (mem
, mode
, 0);
1935 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1936 SIMD register, which is currently broken. While we get GCC
1937 to emit proper RTL for these cases, let's dump to memory. */
1938 else if (VECTOR_MODE_P (GET_MODE (dst
))
1941 int slen
= GET_MODE_SIZE (GET_MODE (src
));
1944 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1945 emit_move_insn (mem
, src
);
1946 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
1948 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
1949 && XVECLEN (dst
, 0) > 1)
1950 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE(dst
), bytepos
);
1951 else if (CONSTANT_P (src
)
1952 || (REG_P (src
) && GET_MODE (src
) == mode
))
1955 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
1956 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
1960 expand_binop (mode
, ashl_optab
, tmps
[i
], GEN_INT (shift
),
1961 tmps
[i
], 0, OPTAB_WIDEN
);
1966 /* Copy the extracted pieces into the proper (probable) hard regs. */
1967 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1968 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
1971 /* Emit code to move a block SRC to block DST, where SRC and DST are
1972 non-consecutive groups of registers, each represented by a PARALLEL. */
1975 emit_group_move (rtx dst
, rtx src
)
1979 if (GET_CODE (src
) != PARALLEL
1980 || GET_CODE (dst
) != PARALLEL
1981 || XVECLEN (src
, 0) != XVECLEN (dst
, 0))
1984 /* Skip first entry if NULL. */
1985 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
1986 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
1987 XEXP (XVECEXP (src
, 0, i
), 0));
1990 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1991 where SRC is non-consecutive registers represented by a PARALLEL.
1992 SSIZE represents the total size of block ORIG_DST, or -1 if not
1996 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
2001 if (GET_CODE (src
) != PARALLEL
)
2004 /* Check for a NULL entry, used to indicate that the parameter goes
2005 both on the stack and in registers. */
2006 if (XEXP (XVECEXP (src
, 0, 0), 0))
2011 tmps
= alloca (sizeof (rtx
) * XVECLEN (src
, 0));
2013 /* Copy the (probable) hard regs into pseudos. */
2014 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2016 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2017 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2018 emit_move_insn (tmps
[i
], reg
);
2022 /* If we won't be storing directly into memory, protect the real destination
2023 from strange tricks we might play. */
2025 if (GET_CODE (dst
) == PARALLEL
)
2029 /* We can get a PARALLEL dst if there is a conditional expression in
2030 a return statement. In that case, the dst and src are the same,
2031 so no action is necessary. */
2032 if (rtx_equal_p (dst
, src
))
2035 /* It is unclear if we can ever reach here, but we may as well handle
2036 it. Allocate a temporary, and split this into a store/load to/from
2039 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
2040 emit_group_store (temp
, src
, type
, ssize
);
2041 emit_group_load (dst
, temp
, type
, ssize
);
2044 else if (!MEM_P (dst
) && GET_CODE (dst
) != CONCAT
)
2046 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
2047 /* Make life a bit easier for combine. */
2048 emit_move_insn (dst
, CONST0_RTX (GET_MODE (orig_dst
)));
2051 /* Process the pieces. */
2052 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2054 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2055 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2056 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2059 /* Handle trailing fragments that run over the size of the struct. */
2060 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2062 /* store_bit_field always takes its value from the lsb.
2063 Move the fragment to the lsb if it's not already there. */
2065 #ifdef BLOCK_REG_PADDING
2066 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
2067 == (BYTES_BIG_ENDIAN
? upward
: downward
)
2073 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2074 expand_binop (mode
, ashr_optab
, tmps
[i
], GEN_INT (shift
),
2075 tmps
[i
], 0, OPTAB_WIDEN
);
2077 bytelen
= ssize
- bytepos
;
2080 if (GET_CODE (dst
) == CONCAT
)
2082 if (bytepos
+ bytelen
<= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2083 dest
= XEXP (dst
, 0);
2084 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2086 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2087 dest
= XEXP (dst
, 1);
2089 else if (bytepos
== 0 && XVECLEN (src
, 0))
2091 dest
= assign_stack_temp (GET_MODE (dest
),
2092 GET_MODE_SIZE (GET_MODE (dest
)), 0);
2093 emit_move_insn (adjust_address (dest
, GET_MODE (tmps
[i
]), bytepos
),
2102 /* Optimize the access just a bit. */
2104 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
2105 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2106 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2107 && bytelen
== GET_MODE_SIZE (mode
))
2108 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2110 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2111 mode
, tmps
[i
], ssize
);
2116 /* Copy from the pseudo into the (probable) hard reg. */
2117 if (orig_dst
!= dst
)
2118 emit_move_insn (orig_dst
, dst
);
2121 /* Generate code to copy a BLKmode object of TYPE out of a
2122 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2123 is null, a stack temporary is created. TGTBLK is returned.
2125 The purpose of this routine is to handle functions that return
2126 BLKmode structures in registers. Some machines (the PA for example)
2127 want to return all small structures in registers regardless of the
2128 structure's alignment. */
2131 copy_blkmode_from_reg (rtx tgtblk
, rtx srcreg
, tree type
)
2133 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2134 rtx src
= NULL
, dst
= NULL
;
2135 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2136 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2140 tgtblk
= assign_temp (build_qualified_type (type
,
2142 | TYPE_QUAL_CONST
)),
2144 preserve_temp_slots (tgtblk
);
2147 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2148 into a new pseudo which is a full word. */
2150 if (GET_MODE (srcreg
) != BLKmode
2151 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2152 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
2154 /* If the structure doesn't take up a whole number of words, see whether
2155 SRCREG is padded on the left or on the right. If it's on the left,
2156 set PADDING_CORRECTION to the number of bits to skip.
2158 In most ABIs, the structure will be returned at the least end of
2159 the register, which translates to right padding on little-endian
2160 targets and left padding on big-endian targets. The opposite
2161 holds if the structure is returned at the most significant
2162 end of the register. */
2163 if (bytes
% UNITS_PER_WORD
!= 0
2164 && (targetm
.calls
.return_in_msb (type
)
2166 : BYTES_BIG_ENDIAN
))
2168 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2170 /* Copy the structure BITSIZE bites at a time.
2172 We could probably emit more efficient code for machines which do not use
2173 strict alignment, but it doesn't seem worth the effort at the current
2175 for (bitpos
= 0, xbitpos
= padding_correction
;
2176 bitpos
< bytes
* BITS_PER_UNIT
;
2177 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2179 /* We need a new source operand each time xbitpos is on a
2180 word boundary and when xbitpos == padding_correction
2181 (the first time through). */
2182 if (xbitpos
% BITS_PER_WORD
== 0
2183 || xbitpos
== padding_correction
)
2184 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2187 /* We need a new destination operand each time bitpos is on
2189 if (bitpos
% BITS_PER_WORD
== 0)
2190 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2192 /* Use xbitpos for the source extraction (right justified) and
2193 xbitpos for the destination store (left justified). */
2194 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2195 extract_bit_field (src
, bitsize
,
2196 xbitpos
% BITS_PER_WORD
, 1,
2197 NULL_RTX
, word_mode
, word_mode
,
2205 /* Add a USE expression for REG to the (possibly empty) list pointed
2206 to by CALL_FUSAGE. REG must denote a hard register. */
2209 use_reg (rtx
*call_fusage
, rtx reg
)
2212 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2216 = gen_rtx_EXPR_LIST (VOIDmode
,
2217 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2220 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2221 starting at REGNO. All of these registers must be hard registers. */
2224 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2228 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
2231 for (i
= 0; i
< nregs
; i
++)
2232 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2235 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2236 PARALLEL REGS. This is for calls that pass values in multiple
2237 non-contiguous locations. The Irix 6 ABI has examples of this. */
2240 use_group_regs (rtx
*call_fusage
, rtx regs
)
2244 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2246 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2248 /* A NULL entry means the parameter goes both on the stack and in
2249 registers. This can also be a MEM for targets that pass values
2250 partially on the stack and partially in registers. */
2251 if (reg
!= 0 && REG_P (reg
))
2252 use_reg (call_fusage
, reg
);
2257 /* Determine whether the LEN bytes generated by CONSTFUN can be
2258 stored to memory using several move instructions. CONSTFUNDATA is
2259 a pointer which will be passed as argument in every CONSTFUN call.
2260 ALIGN is maximum alignment we can assume. Return nonzero if a
2261 call to store_by_pieces should succeed. */
2264 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
2265 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2266 void *constfundata
, unsigned int align
)
2268 unsigned HOST_WIDE_INT max_size
, l
;
2269 HOST_WIDE_INT offset
= 0;
2270 enum machine_mode mode
, tmode
;
2271 enum insn_code icode
;
2278 if (! STORE_BY_PIECES_P (len
, align
))
2281 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2282 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2283 align
= MOVE_MAX
* BITS_PER_UNIT
;
2285 /* We would first store what we can in the largest integer mode, then go to
2286 successively smaller modes. */
2289 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2294 max_size
= STORE_MAX_PIECES
+ 1;
2295 while (max_size
> 1)
2297 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2298 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2299 if (GET_MODE_SIZE (tmode
) < max_size
)
2302 if (mode
== VOIDmode
)
2305 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2306 if (icode
!= CODE_FOR_nothing
2307 && align
>= GET_MODE_ALIGNMENT (mode
))
2309 unsigned int size
= GET_MODE_SIZE (mode
);
2316 cst
= (*constfun
) (constfundata
, offset
, mode
);
2317 if (!LEGITIMATE_CONSTANT_P (cst
))
2327 max_size
= GET_MODE_SIZE (mode
);
2330 /* The code above should have handled everything. */
2338 /* Generate several move instructions to store LEN bytes generated by
2339 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2340 pointer which will be passed as argument in every CONSTFUN call.
2341 ALIGN is maximum alignment we can assume.
2342 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2343 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2347 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
2348 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2349 void *constfundata
, unsigned int align
, int endp
)
2351 struct store_by_pieces data
;
2360 if (! STORE_BY_PIECES_P (len
, align
))
2362 to
= protect_from_queue (to
, 1);
2363 data
.constfun
= constfun
;
2364 data
.constfundata
= constfundata
;
2367 store_by_pieces_1 (&data
, align
);
2378 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2379 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2381 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
2384 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2391 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2399 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2400 rtx with BLKmode). The caller must pass TO through protect_from_queue
2401 before calling. ALIGN is maximum alignment we can assume. */
2404 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
2406 struct store_by_pieces data
;
2411 data
.constfun
= clear_by_pieces_1
;
2412 data
.constfundata
= NULL
;
2415 store_by_pieces_1 (&data
, align
);
2418 /* Callback routine for clear_by_pieces.
2419 Return const0_rtx unconditionally. */
2422 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED
,
2423 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2424 enum machine_mode mode ATTRIBUTE_UNUSED
)
2429 /* Subroutine of clear_by_pieces and store_by_pieces.
2430 Generate several move instructions to store LEN bytes of block TO. (A MEM
2431 rtx with BLKmode). The caller must pass TO through protect_from_queue
2432 before calling. ALIGN is maximum alignment we can assume. */
2435 store_by_pieces_1 (struct store_by_pieces
*data ATTRIBUTE_UNUSED
,
2436 unsigned int align ATTRIBUTE_UNUSED
)
2438 rtx to_addr
= XEXP (data
->to
, 0);
2439 unsigned HOST_WIDE_INT max_size
= STORE_MAX_PIECES
+ 1;
2440 enum machine_mode mode
= VOIDmode
, tmode
;
2441 enum insn_code icode
;
2444 data
->to_addr
= to_addr
;
2446 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2447 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2449 data
->explicit_inc_to
= 0;
2451 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2453 data
->offset
= data
->len
;
2455 /* If storing requires more than two move insns,
2456 copy addresses to registers (to make displacements shorter)
2457 and use post-increment if available. */
2458 if (!data
->autinc_to
2459 && move_by_pieces_ninsns (data
->len
, align
) > 2)
2461 /* Determine the main mode we'll be using. */
2462 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2463 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2464 if (GET_MODE_SIZE (tmode
) < max_size
)
2467 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2469 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2470 data
->autinc_to
= 1;
2471 data
->explicit_inc_to
= -1;
2474 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2475 && ! data
->autinc_to
)
2477 data
->to_addr
= copy_addr_to_reg (to_addr
);
2478 data
->autinc_to
= 1;
2479 data
->explicit_inc_to
= 1;
2482 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2483 data
->to_addr
= copy_addr_to_reg (to_addr
);
2486 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2487 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2488 align
= MOVE_MAX
* BITS_PER_UNIT
;
2490 /* First store what we can in the largest integer mode, then go to
2491 successively smaller modes. */
2493 while (max_size
> 1)
2495 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2496 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2497 if (GET_MODE_SIZE (tmode
) < max_size
)
2500 if (mode
== VOIDmode
)
2503 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2504 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2505 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2507 max_size
= GET_MODE_SIZE (mode
);
2510 /* The code above should have handled everything. */
2515 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2516 with move instructions for mode MODE. GENFUN is the gen_... function
2517 to make a move insn for that mode. DATA has all the other info. */
2520 store_by_pieces_2 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
2521 struct store_by_pieces
*data
)
2523 unsigned int size
= GET_MODE_SIZE (mode
);
2526 while (data
->len
>= size
)
2529 data
->offset
-= size
;
2531 if (data
->autinc_to
)
2532 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2535 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2537 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2538 emit_insn (gen_add2_insn (data
->to_addr
,
2539 GEN_INT (-(HOST_WIDE_INT
) size
)));
2541 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2542 emit_insn ((*genfun
) (to1
, cst
));
2544 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2545 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2547 if (! data
->reverse
)
2548 data
->offset
+= size
;
2554 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2555 its length in bytes. */
2558 clear_storage (rtx object
, rtx size
)
2561 unsigned int align
= (MEM_P (object
) ? MEM_ALIGN (object
)
2562 : GET_MODE_ALIGNMENT (GET_MODE (object
)));
2564 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2565 just move a zero. Otherwise, do this a piece at a time. */
2566 if (GET_MODE (object
) != BLKmode
2567 && GET_CODE (size
) == CONST_INT
2568 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (object
)))
2569 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2572 object
= protect_from_queue (object
, 1);
2573 size
= protect_from_queue (size
, 0);
2575 if (size
== const0_rtx
)
2577 else if (GET_CODE (size
) == CONST_INT
2578 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2579 clear_by_pieces (object
, INTVAL (size
), align
);
2580 else if (clear_storage_via_clrstr (object
, size
, align
))
2583 retval
= clear_storage_via_libcall (object
, size
);
2589 /* A subroutine of clear_storage. Expand a clrstr pattern;
2590 return true if successful. */
2593 clear_storage_via_clrstr (rtx object
, rtx size
, unsigned int align
)
2595 /* Try the most limited insn first, because there's no point
2596 including more than one in the machine description unless
2597 the more limited one has some advantage. */
2599 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2600 enum machine_mode mode
;
2602 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2603 mode
= GET_MODE_WIDER_MODE (mode
))
2605 enum insn_code code
= clrstr_optab
[(int) mode
];
2606 insn_operand_predicate_fn pred
;
2608 if (code
!= CODE_FOR_nothing
2609 /* We don't need MODE to be narrower than
2610 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2611 the mode mask, as it is returned by the macro, it will
2612 definitely be less than the actual mode mask. */
2613 && ((GET_CODE (size
) == CONST_INT
2614 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2615 <= (GET_MODE_MASK (mode
) >> 1)))
2616 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2617 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2618 || (*pred
) (object
, BLKmode
))
2619 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
2620 || (*pred
) (opalign
, VOIDmode
)))
2623 rtx last
= get_last_insn ();
2626 op1
= convert_to_mode (mode
, size
, 1);
2627 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2628 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
2629 op1
= copy_to_mode_reg (mode
, op1
);
2631 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2638 delete_insns_since (last
);
2645 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2646 Return the return value of memset, 0 otherwise. */
2649 clear_storage_via_libcall (rtx object
, rtx size
)
2651 tree call_expr
, arg_list
, fn
, object_tree
, size_tree
;
2652 enum machine_mode size_mode
;
2655 /* OBJECT or SIZE may have been passed through protect_from_queue.
2657 It is unsafe to save the value generated by protect_from_queue
2658 and reuse it later. Consider what happens if emit_queue is
2659 called before the return value from protect_from_queue is used.
2661 Expansion of the CALL_EXPR below will call emit_queue before
2662 we are finished emitting RTL for argument setup. So if we are
2663 not careful we could get the wrong value for an argument.
2665 To avoid this problem we go ahead and emit code to copy OBJECT
2666 and SIZE into new pseudos.
2668 Note this is not strictly needed for library calls since they
2669 do not call emit_queue before loading their arguments. However,
2670 we may need to have library calls call emit_queue in the future
2671 since failing to do so could cause problems for targets which
2672 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2674 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2676 if (TARGET_MEM_FUNCTIONS
)
2677 size_mode
= TYPE_MODE (sizetype
);
2679 size_mode
= TYPE_MODE (unsigned_type_node
);
2680 size
= convert_to_mode (size_mode
, size
, 1);
2681 size
= copy_to_mode_reg (size_mode
, size
);
2683 /* It is incorrect to use the libcall calling conventions to call
2684 memset in this context. This could be a user call to memset and
2685 the user may wish to examine the return value from memset. For
2686 targets where libcalls and normal calls have different conventions
2687 for returning pointers, we could end up generating incorrect code.
2689 For convenience, we generate the call to bzero this way as well. */
2691 object_tree
= make_tree (ptr_type_node
, object
);
2692 if (TARGET_MEM_FUNCTIONS
)
2693 size_tree
= make_tree (sizetype
, size
);
2695 size_tree
= make_tree (unsigned_type_node
, size
);
2697 fn
= clear_storage_libcall_fn (true);
2698 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
2699 if (TARGET_MEM_FUNCTIONS
)
2700 arg_list
= tree_cons (NULL_TREE
, integer_zero_node
, arg_list
);
2701 arg_list
= tree_cons (NULL_TREE
, object_tree
, arg_list
);
2703 /* Now we have to build up the CALL_EXPR itself. */
2704 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2705 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2706 call_expr
, arg_list
, NULL_TREE
);
2708 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
2710 /* If we are initializing a readonly value, show the above call
2711 clobbered it. Otherwise, a load from it may erroneously be
2712 hoisted from a loop. */
2713 if (RTX_UNCHANGING_P (object
))
2714 emit_insn (gen_rtx_CLOBBER (VOIDmode
, object
));
2716 return (TARGET_MEM_FUNCTIONS
? retval
: NULL_RTX
);
2719 /* A subroutine of clear_storage_via_libcall. Create the tree node
2720 for the function we use for block clears. The first time FOR_CALL
2721 is true, we call assemble_external. */
2723 static GTY(()) tree block_clear_fn
;
2726 init_block_clear_fn (const char *asmspec
)
2728 if (!block_clear_fn
)
2732 if (TARGET_MEM_FUNCTIONS
)
2734 fn
= get_identifier ("memset");
2735 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2736 integer_type_node
, sizetype
,
2741 fn
= get_identifier ("bzero");
2742 args
= build_function_type_list (void_type_node
, ptr_type_node
,
2743 unsigned_type_node
, NULL_TREE
);
2746 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
2747 DECL_EXTERNAL (fn
) = 1;
2748 TREE_PUBLIC (fn
) = 1;
2749 DECL_ARTIFICIAL (fn
) = 1;
2750 TREE_NOTHROW (fn
) = 1;
2752 block_clear_fn
= fn
;
2757 SET_DECL_RTL (block_clear_fn
, NULL_RTX
);
2758 SET_DECL_ASSEMBLER_NAME (block_clear_fn
, get_identifier (asmspec
));
2763 clear_storage_libcall_fn (int for_call
)
2765 static bool emitted_extern
;
2767 if (!block_clear_fn
)
2768 init_block_clear_fn (NULL
);
2770 if (for_call
&& !emitted_extern
)
2772 emitted_extern
= true;
2773 make_decl_rtl (block_clear_fn
, NULL
);
2774 assemble_external (block_clear_fn
);
2777 return block_clear_fn
;
2780 /* Generate code to copy Y into X.
2781 Both Y and X must have the same mode, except that
2782 Y can be a constant with VOIDmode.
2783 This mode cannot be BLKmode; use emit_block_move for that.
2785 Return the last instruction emitted. */
2788 emit_move_insn (rtx x
, rtx y
)
2790 enum machine_mode mode
= GET_MODE (x
);
2791 rtx y_cst
= NULL_RTX
;
2794 x
= protect_from_queue (x
, 1);
2795 y
= protect_from_queue (y
, 0);
2797 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
2803 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
2804 && (last_insn
= compress_float_constant (x
, y
)))
2809 if (!LEGITIMATE_CONSTANT_P (y
))
2811 y
= force_const_mem (mode
, y
);
2813 /* If the target's cannot_force_const_mem prevented the spill,
2814 assume that the target's move expanders will also take care
2815 of the non-legitimate constant. */
2821 /* If X or Y are memory references, verify that their addresses are valid
2824 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2825 && ! push_operand (x
, GET_MODE (x
)))
2827 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2828 x
= validize_mem (x
);
2831 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2833 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2834 y
= validize_mem (y
);
2836 if (mode
== BLKmode
)
2839 last_insn
= emit_move_insn_1 (x
, y
);
2841 if (y_cst
&& REG_P (x
)
2842 && (set
= single_set (last_insn
)) != NULL_RTX
2843 && SET_DEST (set
) == x
2844 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
2845 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
2850 /* Low level part of emit_move_insn.
2851 Called just like emit_move_insn, but assumes X and Y
2852 are basically valid. */
2855 emit_move_insn_1 (rtx x
, rtx y
)
2857 enum machine_mode mode
= GET_MODE (x
);
2858 enum machine_mode submode
;
2859 enum mode_class
class = GET_MODE_CLASS (mode
);
2861 if ((unsigned int) mode
>= (unsigned int) MAX_MACHINE_MODE
)
2864 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2866 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2868 /* Expand complex moves by moving real part and imag part, if possible. */
2869 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
2870 && BLKmode
!= (submode
= GET_MODE_INNER (mode
))
2871 && (mov_optab
->handlers
[(int) submode
].insn_code
2872 != CODE_FOR_nothing
))
2874 /* Don't split destination if it is a stack push. */
2875 int stack
= push_operand (x
, GET_MODE (x
));
2877 #ifdef PUSH_ROUNDING
2878 /* In case we output to the stack, but the size is smaller than the
2879 machine can push exactly, we need to use move instructions. */
2881 && (PUSH_ROUNDING (GET_MODE_SIZE (submode
))
2882 != GET_MODE_SIZE (submode
)))
2885 HOST_WIDE_INT offset1
, offset2
;
2887 /* Do not use anti_adjust_stack, since we don't want to update
2888 stack_pointer_delta. */
2889 temp
= expand_binop (Pmode
,
2890 #ifdef STACK_GROWS_DOWNWARD
2898 (GET_MODE_SIZE (GET_MODE (x
)))),
2899 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
2901 if (temp
!= stack_pointer_rtx
)
2902 emit_move_insn (stack_pointer_rtx
, temp
);
2904 #ifdef STACK_GROWS_DOWNWARD
2906 offset2
= GET_MODE_SIZE (submode
);
2908 offset1
= -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)));
2909 offset2
= (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))
2910 + GET_MODE_SIZE (submode
));
2913 emit_move_insn (change_address (x
, submode
,
2914 gen_rtx_PLUS (Pmode
,
2916 GEN_INT (offset1
))),
2917 gen_realpart (submode
, y
));
2918 emit_move_insn (change_address (x
, submode
,
2919 gen_rtx_PLUS (Pmode
,
2921 GEN_INT (offset2
))),
2922 gen_imagpart (submode
, y
));
2926 /* If this is a stack, push the highpart first, so it
2927 will be in the argument order.
2929 In that case, change_address is used only to convert
2930 the mode, not to change the address. */
2933 /* Note that the real part always precedes the imag part in memory
2934 regardless of machine's endianness. */
2935 #ifdef STACK_GROWS_DOWNWARD
2936 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2937 gen_imagpart (submode
, y
));
2938 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2939 gen_realpart (submode
, y
));
2941 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2942 gen_realpart (submode
, y
));
2943 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2944 gen_imagpart (submode
, y
));
2949 rtx realpart_x
, realpart_y
;
2950 rtx imagpart_x
, imagpart_y
;
2952 /* If this is a complex value with each part being smaller than a
2953 word, the usual calling sequence will likely pack the pieces into
2954 a single register. Unfortunately, SUBREG of hard registers only
2955 deals in terms of words, so we have a problem converting input
2956 arguments to the CONCAT of two registers that is used elsewhere
2957 for complex values. If this is before reload, we can copy it into
2958 memory and reload. FIXME, we should see about using extract and
2959 insert on integer registers, but complex short and complex char
2960 variables should be rarely used. */
2961 if (GET_MODE_BITSIZE (mode
) < 2 * BITS_PER_WORD
2962 && (reload_in_progress
| reload_completed
) == 0)
2965 = (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
2967 = (REG_P (y
) && REGNO (y
) < FIRST_PSEUDO_REGISTER
);
2969 if (packed_dest_p
|| packed_src_p
)
2971 enum mode_class reg_class
= ((class == MODE_COMPLEX_FLOAT
)
2972 ? MODE_FLOAT
: MODE_INT
);
2974 enum machine_mode reg_mode
2975 = mode_for_size (GET_MODE_BITSIZE (mode
), reg_class
, 1);
2977 if (reg_mode
!= BLKmode
)
2979 rtx mem
= assign_stack_temp (reg_mode
,
2980 GET_MODE_SIZE (mode
), 0);
2981 rtx cmem
= adjust_address (mem
, mode
, 0);
2985 rtx sreg
= gen_rtx_SUBREG (reg_mode
, x
, 0);
2987 emit_move_insn_1 (cmem
, y
);
2988 return emit_move_insn_1 (sreg
, mem
);
2992 rtx sreg
= gen_rtx_SUBREG (reg_mode
, y
, 0);
2994 emit_move_insn_1 (mem
, sreg
);
2995 return emit_move_insn_1 (x
, cmem
);
3001 realpart_x
= gen_realpart (submode
, x
);
3002 realpart_y
= gen_realpart (submode
, y
);
3003 imagpart_x
= gen_imagpart (submode
, x
);
3004 imagpart_y
= gen_imagpart (submode
, y
);
3006 /* Show the output dies here. This is necessary for SUBREGs
3007 of pseudos since we cannot track their lifetimes correctly;
3008 hard regs shouldn't appear here except as return values.
3009 We never want to emit such a clobber after reload. */
3011 && ! (reload_in_progress
|| reload_completed
)
3012 && (GET_CODE (realpart_x
) == SUBREG
3013 || GET_CODE (imagpart_x
) == SUBREG
))
3014 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3016 emit_move_insn (realpart_x
, realpart_y
);
3017 emit_move_insn (imagpart_x
, imagpart_y
);
3020 return get_last_insn ();
3023 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3024 find a mode to do it in. If we have a movcc, use it. Otherwise,
3025 find the MODE_INT mode of the same width. */
3026 else if (GET_MODE_CLASS (mode
) == MODE_CC
3027 && mov_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
3029 enum insn_code insn_code
;
3030 enum machine_mode tmode
= VOIDmode
;
3034 && mov_optab
->handlers
[(int) CCmode
].insn_code
!= CODE_FOR_nothing
)
3037 for (tmode
= QImode
; tmode
!= VOIDmode
;
3038 tmode
= GET_MODE_WIDER_MODE (tmode
))
3039 if (GET_MODE_SIZE (tmode
) == GET_MODE_SIZE (mode
))
3042 if (tmode
== VOIDmode
)
3045 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3046 may call change_address which is not appropriate if we were
3047 called when a reload was in progress. We don't have to worry
3048 about changing the address since the size in bytes is supposed to
3049 be the same. Copy the MEM to change the mode and move any
3050 substitutions from the old MEM to the new one. */
3052 if (reload_in_progress
)
3054 x
= gen_lowpart_common (tmode
, x1
);
3055 if (x
== 0 && MEM_P (x1
))
3057 x
= adjust_address_nv (x1
, tmode
, 0);
3058 copy_replacements (x1
, x
);
3061 y
= gen_lowpart_common (tmode
, y1
);
3062 if (y
== 0 && MEM_P (y1
))
3064 y
= adjust_address_nv (y1
, tmode
, 0);
3065 copy_replacements (y1
, y
);
3070 x
= gen_lowpart (tmode
, x
);
3071 y
= gen_lowpart (tmode
, y
);
3074 insn_code
= mov_optab
->handlers
[(int) tmode
].insn_code
;
3075 return emit_insn (GEN_FCN (insn_code
) (x
, y
));
3078 /* Try using a move pattern for the corresponding integer mode. This is
3079 only safe when simplify_subreg can convert MODE constants into integer
3080 constants. At present, it can only do this reliably if the value
3081 fits within a HOST_WIDE_INT. */
3082 else if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
3083 && (submode
= int_mode_for_mode (mode
)) != BLKmode
3084 && mov_optab
->handlers
[submode
].insn_code
!= CODE_FOR_nothing
)
3085 return emit_insn (GEN_FCN (mov_optab
->handlers
[submode
].insn_code
)
3086 (simplify_gen_subreg (submode
, x
, mode
, 0),
3087 simplify_gen_subreg (submode
, y
, mode
, 0)));
3089 /* This will handle any multi-word or full-word mode that lacks a move_insn
3090 pattern. However, you will get better code if you define such patterns,
3091 even if they must turn into multiple assembler instructions. */
3092 else if (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
3099 #ifdef PUSH_ROUNDING
3101 /* If X is a push on the stack, do the push now and replace
3102 X with a reference to the stack pointer. */
3103 if (push_operand (x
, GET_MODE (x
)))
3108 /* Do not use anti_adjust_stack, since we don't want to update
3109 stack_pointer_delta. */
3110 temp
= expand_binop (Pmode
,
3111 #ifdef STACK_GROWS_DOWNWARD
3119 (GET_MODE_SIZE (GET_MODE (x
)))),
3120 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
3122 if (temp
!= stack_pointer_rtx
)
3123 emit_move_insn (stack_pointer_rtx
, temp
);
3125 code
= GET_CODE (XEXP (x
, 0));
3127 /* Just hope that small offsets off SP are OK. */
3128 if (code
== POST_INC
)
3129 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3130 GEN_INT (-((HOST_WIDE_INT
)
3131 GET_MODE_SIZE (GET_MODE (x
)))));
3132 else if (code
== POST_DEC
)
3133 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3134 GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
3136 temp
= stack_pointer_rtx
;
3138 x
= change_address (x
, VOIDmode
, temp
);
3142 /* If we are in reload, see if either operand is a MEM whose address
3143 is scheduled for replacement. */
3144 if (reload_in_progress
&& MEM_P (x
)
3145 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3146 x
= replace_equiv_address_nv (x
, inner
);
3147 if (reload_in_progress
&& MEM_P (y
)
3148 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3149 y
= replace_equiv_address_nv (y
, inner
);
3155 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3158 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3159 rtx ypart
= operand_subword (y
, i
, 1, mode
);
3161 /* If we can't get a part of Y, put Y into memory if it is a
3162 constant. Otherwise, force it into a register. If we still
3163 can't get a part of Y, abort. */
3164 if (ypart
== 0 && CONSTANT_P (y
))
3166 y
= force_const_mem (mode
, y
);
3167 ypart
= operand_subword (y
, i
, 1, mode
);
3169 else if (ypart
== 0)
3170 ypart
= operand_subword_force (y
, i
, mode
);
3172 if (xpart
== 0 || ypart
== 0)
3175 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3177 last_insn
= emit_move_insn (xpart
, ypart
);
3183 /* Show the output dies here. This is necessary for SUBREGs
3184 of pseudos since we cannot track their lifetimes correctly;
3185 hard regs shouldn't appear here except as return values.
3186 We never want to emit such a clobber after reload. */
3188 && ! (reload_in_progress
|| reload_completed
)
3189 && need_clobber
!= 0)
3190 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3200 /* If Y is representable exactly in a narrower mode, and the target can
3201 perform the extension directly from constant or memory, then emit the
3202 move as an extension. */
3205 compress_float_constant (rtx x
, rtx y
)
3207 enum machine_mode dstmode
= GET_MODE (x
);
3208 enum machine_mode orig_srcmode
= GET_MODE (y
);
3209 enum machine_mode srcmode
;
3212 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3214 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3215 srcmode
!= orig_srcmode
;
3216 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3219 rtx trunc_y
, last_insn
;
3221 /* Skip if the target can't extend this way. */
3222 ic
= can_extend_p (dstmode
, srcmode
, 0);
3223 if (ic
== CODE_FOR_nothing
)
3226 /* Skip if the narrowed value isn't exact. */
3227 if (! exact_real_truncate (srcmode
, &r
))
3230 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3232 if (LEGITIMATE_CONSTANT_P (trunc_y
))
3234 /* Skip if the target needs extra instructions to perform
3236 if (! (*insn_data
[ic
].operand
[1].predicate
) (trunc_y
, srcmode
))
3239 else if (float_extend_from_mem
[dstmode
][srcmode
])
3240 trunc_y
= validize_mem (force_const_mem (srcmode
, trunc_y
));
3244 emit_unop_insn (ic
, x
, trunc_y
, UNKNOWN
);
3245 last_insn
= get_last_insn ();
3248 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3256 /* Pushing data onto the stack. */
3258 /* Push a block of length SIZE (perhaps variable)
3259 and return an rtx to address the beginning of the block.
3260 Note that it is not possible for the value returned to be a QUEUED.
3261 The value may be virtual_outgoing_args_rtx.
3263 EXTRA is the number of bytes of padding to push in addition to SIZE.
3264 BELOW nonzero means this padding comes at low addresses;
3265 otherwise, the padding comes at high addresses. */
3268 push_block (rtx size
, int extra
, int below
)
3272 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3273 if (CONSTANT_P (size
))
3274 anti_adjust_stack (plus_constant (size
, extra
));
3275 else if (REG_P (size
) && extra
== 0)
3276 anti_adjust_stack (size
);
3279 temp
= copy_to_mode_reg (Pmode
, size
);
3281 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3282 temp
, 0, OPTAB_LIB_WIDEN
);
3283 anti_adjust_stack (temp
);
3286 #ifndef STACK_GROWS_DOWNWARD
3292 temp
= virtual_outgoing_args_rtx
;
3293 if (extra
!= 0 && below
)
3294 temp
= plus_constant (temp
, extra
);
3298 if (GET_CODE (size
) == CONST_INT
)
3299 temp
= plus_constant (virtual_outgoing_args_rtx
,
3300 -INTVAL (size
) - (below
? 0 : extra
));
3301 else if (extra
!= 0 && !below
)
3302 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3303 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3305 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3306 negate_rtx (Pmode
, size
));
3309 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3312 #ifdef PUSH_ROUNDING
3314 /* Emit single push insn. */
3317 emit_single_push_insn (enum machine_mode mode
, rtx x
, tree type
)
3320 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3322 enum insn_code icode
;
3323 insn_operand_predicate_fn pred
;
3325 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3326 /* If there is push pattern, use it. Otherwise try old way of throwing
3327 MEM representing push operation to move expander. */
3328 icode
= push_optab
->handlers
[(int) mode
].insn_code
;
3329 if (icode
!= CODE_FOR_nothing
)
3331 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3332 && !((*pred
) (x
, mode
))))
3333 x
= force_reg (mode
, x
);
3334 emit_insn (GEN_FCN (icode
) (x
));
3337 if (GET_MODE_SIZE (mode
) == rounded_size
)
3338 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3339 /* If we are to pad downward, adjust the stack pointer first and
3340 then store X into the stack location using an offset. This is
3341 because emit_move_insn does not know how to pad; it does not have
3343 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
3345 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
3346 HOST_WIDE_INT offset
;
3348 emit_move_insn (stack_pointer_rtx
,
3349 expand_binop (Pmode
,
3350 #ifdef STACK_GROWS_DOWNWARD
3356 GEN_INT (rounded_size
),
3357 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
3359 offset
= (HOST_WIDE_INT
) padding_size
;
3360 #ifdef STACK_GROWS_DOWNWARD
3361 if (STACK_PUSH_CODE
== POST_DEC
)
3362 /* We have already decremented the stack pointer, so get the
3364 offset
+= (HOST_WIDE_INT
) rounded_size
;
3366 if (STACK_PUSH_CODE
== POST_INC
)
3367 /* We have already incremented the stack pointer, so get the
3369 offset
-= (HOST_WIDE_INT
) rounded_size
;
3371 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (offset
));
3375 #ifdef STACK_GROWS_DOWNWARD
3376 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3377 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3378 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3380 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3381 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3382 GEN_INT (rounded_size
));
3384 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3387 dest
= gen_rtx_MEM (mode
, dest_addr
);
3391 set_mem_attributes (dest
, type
, 1);
3393 if (flag_optimize_sibling_calls
)
3394 /* Function incoming arguments may overlap with sibling call
3395 outgoing arguments and we cannot allow reordering of reads
3396 from function arguments with stores to outgoing arguments
3397 of sibling calls. */
3398 set_mem_alias_set (dest
, 0);
3400 emit_move_insn (dest
, x
);
3404 /* Generate code to push X onto the stack, assuming it has mode MODE and
3406 MODE is redundant except when X is a CONST_INT (since they don't
3408 SIZE is an rtx for the size of data to be copied (in bytes),
3409 needed only if X is BLKmode.
3411 ALIGN (in bits) is maximum alignment we can assume.
3413 If PARTIAL and REG are both nonzero, then copy that many of the first
3414 words of X into registers starting with REG, and push the rest of X.
3415 The amount of space pushed is decreased by PARTIAL words,
3416 rounded *down* to a multiple of PARM_BOUNDARY.
3417 REG must be a hard register in this case.
3418 If REG is zero but PARTIAL is not, take any all others actions for an
3419 argument partially in registers, but do not actually load any
3422 EXTRA is the amount in bytes of extra space to leave next to this arg.
3423 This is ignored if an argument block has already been allocated.
3425 On a machine that lacks real push insns, ARGS_ADDR is the address of
3426 the bottom of the argument block for this call. We use indexing off there
3427 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3428 argument block has not been preallocated.
3430 ARGS_SO_FAR is the size of args previously pushed for this call.
3432 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3433 for arguments passed in registers. If nonzero, it will be the number
3434 of bytes required. */
3437 emit_push_insn (rtx x
, enum machine_mode mode
, tree type
, rtx size
,
3438 unsigned int align
, int partial
, rtx reg
, int extra
,
3439 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
3443 enum direction stack_direction
3444 #ifdef STACK_GROWS_DOWNWARD
3450 /* Decide where to pad the argument: `downward' for below,
3451 `upward' for above, or `none' for don't pad it.
3452 Default is below for small data on big-endian machines; else above. */
3453 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3455 /* Invert direction if stack is post-decrement.
3457 if (STACK_PUSH_CODE
== POST_DEC
)
3458 if (where_pad
!= none
)
3459 where_pad
= (where_pad
== downward
? upward
: downward
);
3461 xinner
= x
= protect_from_queue (x
, 0);
3463 if (mode
== BLKmode
)
3465 /* Copy a block into the stack, entirely or partially. */
3468 int used
= partial
* UNITS_PER_WORD
;
3472 if (reg
&& GET_CODE (reg
) == PARALLEL
)
3474 /* Use the size of the elt to compute offset. */
3475 rtx elt
= XEXP (XVECEXP (reg
, 0, 0), 0);
3476 used
= partial
* GET_MODE_SIZE (GET_MODE (elt
));
3477 offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3480 offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3487 /* USED is now the # of bytes we need not copy to the stack
3488 because registers will take care of them. */
3491 xinner
= adjust_address (xinner
, BLKmode
, used
);
3493 /* If the partial register-part of the arg counts in its stack size,
3494 skip the part of stack space corresponding to the registers.
3495 Otherwise, start copying to the beginning of the stack space,
3496 by setting SKIP to 0. */
3497 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3499 #ifdef PUSH_ROUNDING
3500 /* Do it with several push insns if that doesn't take lots of insns
3501 and if there is no difficulty with push insns that skip bytes
3502 on the stack for alignment purposes. */
3505 && GET_CODE (size
) == CONST_INT
3507 && MEM_ALIGN (xinner
) >= align
3508 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3509 /* Here we avoid the case of a structure whose weak alignment
3510 forces many pushes of a small amount of data,
3511 and such small pushes do rounding that causes trouble. */
3512 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3513 || align
>= BIGGEST_ALIGNMENT
3514 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3515 == (align
/ BITS_PER_UNIT
)))
3516 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3518 /* Push padding now if padding above and stack grows down,
3519 or if padding below and stack grows up.
3520 But if space already allocated, this has already been done. */
3521 if (extra
&& args_addr
== 0
3522 && where_pad
!= none
&& where_pad
!= stack_direction
)
3523 anti_adjust_stack (GEN_INT (extra
));
3525 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
3528 #endif /* PUSH_ROUNDING */
3532 /* Otherwise make space on the stack and copy the data
3533 to the address of that space. */
3535 /* Deduct words put into registers from the size we must copy. */
3538 if (GET_CODE (size
) == CONST_INT
)
3539 size
= GEN_INT (INTVAL (size
) - used
);
3541 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3542 GEN_INT (used
), NULL_RTX
, 0,
3546 /* Get the address of the stack space.
3547 In this case, we do not deal with EXTRA separately.
3548 A single stack adjust will do. */
3551 temp
= push_block (size
, extra
, where_pad
== downward
);
3554 else if (GET_CODE (args_so_far
) == CONST_INT
)
3555 temp
= memory_address (BLKmode
,
3556 plus_constant (args_addr
,
3557 skip
+ INTVAL (args_so_far
)));
3559 temp
= memory_address (BLKmode
,
3560 plus_constant (gen_rtx_PLUS (Pmode
,
3565 if (!ACCUMULATE_OUTGOING_ARGS
)
3567 /* If the source is referenced relative to the stack pointer,
3568 copy it to another register to stabilize it. We do not need
3569 to do this if we know that we won't be changing sp. */
3571 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3572 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3573 temp
= copy_to_reg (temp
);
3576 target
= gen_rtx_MEM (BLKmode
, temp
);
3580 set_mem_attributes (target
, type
, 1);
3581 /* Function incoming arguments may overlap with sibling call
3582 outgoing arguments and we cannot allow reordering of reads
3583 from function arguments with stores to outgoing arguments
3584 of sibling calls. */
3585 set_mem_alias_set (target
, 0);
3588 /* ALIGN may well be better aligned than TYPE, e.g. due to
3589 PARM_BOUNDARY. Assume the caller isn't lying. */
3590 set_mem_align (target
, align
);
3592 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
3595 else if (partial
> 0)
3597 /* Scalar partly in registers. */
3599 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3602 /* # words of start of argument
3603 that we must make space for but need not store. */
3604 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
3605 int args_offset
= INTVAL (args_so_far
);
3608 /* Push padding now if padding above and stack grows down,
3609 or if padding below and stack grows up.
3610 But if space already allocated, this has already been done. */
3611 if (extra
&& args_addr
== 0
3612 && where_pad
!= none
&& where_pad
!= stack_direction
)
3613 anti_adjust_stack (GEN_INT (extra
));
3615 /* If we make space by pushing it, we might as well push
3616 the real data. Otherwise, we can leave OFFSET nonzero
3617 and leave the space uninitialized. */
3621 /* Now NOT_STACK gets the number of words that we don't need to
3622 allocate on the stack. */
3623 not_stack
= partial
- offset
;
3625 /* If the partial register-part of the arg counts in its stack size,
3626 skip the part of stack space corresponding to the registers.
3627 Otherwise, start copying to the beginning of the stack space,
3628 by setting SKIP to 0. */
3629 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3631 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3632 x
= validize_mem (force_const_mem (mode
, x
));
3634 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3635 SUBREGs of such registers are not allowed. */
3636 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
3637 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3638 x
= copy_to_reg (x
);
3640 /* Loop over all the words allocated on the stack for this arg. */
3641 /* We can do it by words, because any scalar bigger than a word
3642 has a size a multiple of a word. */
3643 #ifndef PUSH_ARGS_REVERSED
3644 for (i
= not_stack
; i
< size
; i
++)
3646 for (i
= size
- 1; i
>= not_stack
; i
--)
3648 if (i
>= not_stack
+ offset
)
3649 emit_push_insn (operand_subword_force (x
, i
, mode
),
3650 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3652 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3654 reg_parm_stack_space
, alignment_pad
);
3661 /* Push padding now if padding above and stack grows down,
3662 or if padding below and stack grows up.
3663 But if space already allocated, this has already been done. */
3664 if (extra
&& args_addr
== 0
3665 && where_pad
!= none
&& where_pad
!= stack_direction
)
3666 anti_adjust_stack (GEN_INT (extra
));
3668 #ifdef PUSH_ROUNDING
3669 if (args_addr
== 0 && PUSH_ARGS
)
3670 emit_single_push_insn (mode
, x
, type
);
3674 if (GET_CODE (args_so_far
) == CONST_INT
)
3676 = memory_address (mode
,
3677 plus_constant (args_addr
,
3678 INTVAL (args_so_far
)));
3680 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3682 dest
= gen_rtx_MEM (mode
, addr
);
3685 set_mem_attributes (dest
, type
, 1);
3686 /* Function incoming arguments may overlap with sibling call
3687 outgoing arguments and we cannot allow reordering of reads
3688 from function arguments with stores to outgoing arguments
3689 of sibling calls. */
3690 set_mem_alias_set (dest
, 0);
3693 emit_move_insn (dest
, x
);
3697 /* If part should go in registers, copy that part
3698 into the appropriate registers. Do this now, at the end,
3699 since mem-to-mem copies above may do function calls. */
3700 if (partial
> 0 && reg
!= 0)
3702 /* Handle calls that pass values in multiple non-contiguous locations.
3703 The Irix 6 ABI has examples of this. */
3704 if (GET_CODE (reg
) == PARALLEL
)
3705 emit_group_load (reg
, x
, type
, -1);
3707 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
3710 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3711 anti_adjust_stack (GEN_INT (extra
));
3713 if (alignment_pad
&& args_addr
== 0)
3714 anti_adjust_stack (alignment_pad
);
3717 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3721 get_subtarget (rtx x
)
3724 /* Only registers can be subtargets. */
3726 /* If the register is readonly, it can't be set more than once. */
3727 || RTX_UNCHANGING_P (x
)
3728 /* Don't use hard regs to avoid extending their life. */
3729 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3730 /* Avoid subtargets inside loops,
3731 since they hide some invariant expressions. */
3732 || preserve_subexpressions_p ())
3736 /* Expand an assignment that stores the value of FROM into TO.
3737 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3738 (This may contain a QUEUED rtx;
3739 if the value is constant, this rtx is a constant.)
3740 Otherwise, the returned value is NULL_RTX. */
3743 expand_assignment (tree to
, tree from
, int want_value
)
3748 /* Don't crash if the lhs of the assignment was erroneous. */
3750 if (TREE_CODE (to
) == ERROR_MARK
)
3752 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3753 return want_value
? result
: NULL_RTX
;
3756 /* Assignment of a structure component needs special treatment
3757 if the structure component's rtx is not simply a MEM.
3758 Assignment of an array element at a constant index, and assignment of
3759 an array element in an unaligned packed structure field, has the same
3762 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
3763 || TREE_CODE (to
) == ARRAY_REF
|| TREE_CODE (to
) == ARRAY_RANGE_REF
3764 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
3766 enum machine_mode mode1
;
3767 HOST_WIDE_INT bitsize
, bitpos
;
3775 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
3776 &unsignedp
, &volatilep
);
3778 /* If we are going to use store_bit_field and extract_bit_field,
3779 make sure to_rtx will be safe for multiple use. */
3781 if (mode1
== VOIDmode
&& want_value
)
3782 tem
= stabilize_reference (tem
);
3784 orig_to_rtx
= to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, 0);
3788 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
3790 if (!MEM_P (to_rtx
))
3793 #ifdef POINTERS_EXTEND_UNSIGNED
3794 if (GET_MODE (offset_rtx
) != Pmode
)
3795 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
3797 if (GET_MODE (offset_rtx
) != ptr_mode
)
3798 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
3801 /* A constant address in TO_RTX can have VOIDmode, we must not try
3802 to call force_reg for that case. Avoid that case. */
3804 && GET_MODE (to_rtx
) == BLKmode
3805 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
3807 && (bitpos
% bitsize
) == 0
3808 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
3809 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
3811 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
3815 to_rtx
= offset_address (to_rtx
, offset_rtx
,
3816 highest_pow2_factor_for_target (to
,
3822 /* If the field is at offset zero, we could have been given the
3823 DECL_RTX of the parent struct. Don't munge it. */
3824 to_rtx
= shallow_copy_rtx (to_rtx
);
3826 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
3829 /* Deal with volatile and readonly fields. The former is only done
3830 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3831 if (volatilep
&& MEM_P (to_rtx
))
3833 if (to_rtx
== orig_to_rtx
)
3834 to_rtx
= copy_rtx (to_rtx
);
3835 MEM_VOLATILE_P (to_rtx
) = 1;
3838 if (TREE_CODE (to
) == COMPONENT_REF
3839 && TREE_READONLY (TREE_OPERAND (to
, 1))
3840 /* We can't assert that a MEM won't be set more than once
3841 if the component is not addressable because another
3842 non-addressable component may be referenced by the same MEM. */
3843 && ! (MEM_P (to_rtx
) && ! can_address_p (to
)))
3845 if (to_rtx
== orig_to_rtx
)
3846 to_rtx
= copy_rtx (to_rtx
);
3847 RTX_UNCHANGING_P (to_rtx
) = 1;
3850 if (MEM_P (to_rtx
) && ! can_address_p (to
))
3852 if (to_rtx
== orig_to_rtx
)
3853 to_rtx
= copy_rtx (to_rtx
);
3854 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
3857 while (mode1
== VOIDmode
&& !want_value
3858 && bitpos
+ bitsize
<= BITS_PER_WORD
3859 && bitsize
< BITS_PER_WORD
3860 && GET_MODE_BITSIZE (GET_MODE (to_rtx
)) <= BITS_PER_WORD
3861 && !TREE_SIDE_EFFECTS (to
)
3862 && !TREE_THIS_VOLATILE (to
))
3866 HOST_WIDE_INT count
= bitpos
;
3871 if (TREE_CODE (TREE_TYPE (src
)) != INTEGER_TYPE
3872 || TREE_CODE_CLASS (TREE_CODE (src
)) != '2')
3875 op0
= TREE_OPERAND (src
, 0);
3876 op1
= TREE_OPERAND (src
, 1);
3879 if (! operand_equal_p (to
, op0
, 0))
3882 if (BYTES_BIG_ENDIAN
)
3883 count
= GET_MODE_BITSIZE (GET_MODE (to_rtx
)) - bitpos
- bitsize
;
3885 /* Special case some bitfield op= exp. */
3886 switch (TREE_CODE (src
))
3893 /* For now, just optimize the case of the topmost bitfield
3894 where we don't need to do any masking and also
3895 1 bit bitfields where xor can be used.
3896 We might win by one instruction for the other bitfields
3897 too if insv/extv instructions aren't used, so that
3898 can be added later. */
3899 if (count
+ bitsize
!= GET_MODE_BITSIZE (GET_MODE (to_rtx
))
3900 && (bitsize
!= 1 || TREE_CODE (op1
) != INTEGER_CST
))
3902 value
= expand_expr (op1
, NULL_RTX
, VOIDmode
, 0);
3903 value
= protect_from_queue (value
, 0);
3904 to_rtx
= protect_from_queue (to_rtx
, 1);
3905 binop
= TREE_CODE (src
) == PLUS_EXPR
? add_optab
: sub_optab
;
3907 && count
+ bitsize
!= GET_MODE_BITSIZE (GET_MODE (to_rtx
)))
3909 value
= expand_and (GET_MODE (to_rtx
), value
, const1_rtx
,
3913 value
= expand_shift (LSHIFT_EXPR
, GET_MODE (to_rtx
),
3914 value
, build_int_2 (count
, 0),
3916 result
= expand_binop (GET_MODE (to_rtx
), binop
, to_rtx
,
3917 value
, to_rtx
, 1, OPTAB_WIDEN
);
3918 if (result
!= to_rtx
)
3919 emit_move_insn (to_rtx
, result
);
3930 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
3932 /* Spurious cast for HPUX compiler. */
3933 ? ((enum machine_mode
)
3934 TYPE_MODE (TREE_TYPE (to
)))
3936 unsignedp
, TREE_TYPE (tem
), get_alias_set (to
));
3938 preserve_temp_slots (result
);
3942 /* If the value is meaningful, convert RESULT to the proper mode.
3943 Otherwise, return nothing. */
3944 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
3945 TYPE_MODE (TREE_TYPE (from
)),
3947 TYPE_UNSIGNED (TREE_TYPE (to
)))
3951 /* If the rhs is a function call and its value is not an aggregate,
3952 call the function before we start to compute the lhs.
3953 This is needed for correct code for cases such as
3954 val = setjmp (buf) on machines where reference to val
3955 requires loading up part of an address in a separate insn.
3957 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3958 since it might be a promoted variable where the zero- or sign- extension
3959 needs to be done. Handling this in the normal way is safe because no
3960 computation is done before the call. */
3961 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
3962 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
3963 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
3964 && REG_P (DECL_RTL (to
))))
3969 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3971 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3973 /* Handle calls that return values in multiple non-contiguous locations.
3974 The Irix 6 ABI has examples of this. */
3975 if (GET_CODE (to_rtx
) == PARALLEL
)
3976 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
3977 int_size_in_bytes (TREE_TYPE (from
)));
3978 else if (GET_MODE (to_rtx
) == BLKmode
)
3979 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
3982 if (POINTER_TYPE_P (TREE_TYPE (to
)))
3983 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
3984 emit_move_insn (to_rtx
, value
);
3986 preserve_temp_slots (to_rtx
);
3989 return want_value
? to_rtx
: NULL_RTX
;
3992 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3993 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3996 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3998 /* Don't move directly into a return register. */
3999 if (TREE_CODE (to
) == RESULT_DECL
4000 && (REG_P (to_rtx
) || GET_CODE (to_rtx
) == PARALLEL
))
4005 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
4007 if (GET_CODE (to_rtx
) == PARALLEL
)
4008 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
4009 int_size_in_bytes (TREE_TYPE (from
)));
4011 emit_move_insn (to_rtx
, temp
);
4013 preserve_temp_slots (to_rtx
);
4016 return want_value
? to_rtx
: NULL_RTX
;
4019 /* In case we are returning the contents of an object which overlaps
4020 the place the value is being stored, use a safe function when copying
4021 a value through a pointer into a structure value return block. */
4022 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
4023 && current_function_returns_struct
4024 && !current_function_returns_pcc_struct
)
4029 size
= expr_size (from
);
4030 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
4032 if (TARGET_MEM_FUNCTIONS
)
4033 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
4034 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
4035 XEXP (from_rtx
, 0), Pmode
,
4036 convert_to_mode (TYPE_MODE (sizetype
),
4037 size
, TYPE_UNSIGNED (sizetype
)),
4038 TYPE_MODE (sizetype
));
4040 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
4041 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
4042 XEXP (to_rtx
, 0), Pmode
,
4043 convert_to_mode (TYPE_MODE (integer_type_node
),
4045 TYPE_UNSIGNED (integer_type_node
)),
4046 TYPE_MODE (integer_type_node
));
4048 preserve_temp_slots (to_rtx
);
4051 return want_value
? to_rtx
: NULL_RTX
;
4054 /* Compute FROM and store the value in the rtx we got. */
4057 result
= store_expr (from
, to_rtx
, want_value
);
4058 preserve_temp_slots (result
);
4061 return want_value
? result
: NULL_RTX
;
4064 /* Generate code for computing expression EXP,
4065 and storing the value into TARGET.
4066 TARGET may contain a QUEUED rtx.
4068 If WANT_VALUE & 1 is nonzero, return a copy of the value
4069 not in TARGET, so that we can be sure to use the proper
4070 value in a containing expression even if TARGET has something
4071 else stored in it. If possible, we copy the value through a pseudo
4072 and return that pseudo. Or, if the value is constant, we try to
4073 return the constant. In some cases, we return a pseudo
4074 copied *from* TARGET.
4076 If the mode is BLKmode then we may return TARGET itself.
4077 It turns out that in BLKmode it doesn't cause a problem.
4078 because C has no operators that could combine two different
4079 assignments into the same BLKmode object with different values
4080 with no sequence point. Will other languages need this to
4083 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4084 to catch quickly any cases where the caller uses the value
4085 and fails to set WANT_VALUE.
4087 If WANT_VALUE & 2 is set, this is a store into a call param on the
4088 stack, and block moves may need to be treated specially. */
4091 store_expr (tree exp
, rtx target
, int want_value
)
4094 rtx alt_rtl
= NULL_RTX
;
4095 rtx mark
= mark_queue ();
4096 int dont_return_target
= 0;
4097 int dont_store_target
= 0;
4099 if (VOID_TYPE_P (TREE_TYPE (exp
)))
4101 /* C++ can generate ?: expressions with a throw expression in one
4102 branch and an rvalue in the other. Here, we resolve attempts to
4103 store the throw expression's nonexistent result. */
4106 expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
4109 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4111 /* Perform first part of compound expression, then assign from second
4113 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
4114 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4116 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
4118 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4120 /* For conditional expression, get safe form of the target. Then
4121 test the condition, doing the appropriate assignment on either
4122 side. This avoids the creation of unnecessary temporaries.
4123 For non-BLKmode, it is more efficient not to do this. */
4125 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4128 target
= protect_from_queue (target
, 1);
4130 do_pending_stack_adjust ();
4132 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
4133 start_cleanup_deferral ();
4134 store_expr (TREE_OPERAND (exp
, 1), target
, want_value
& 2);
4135 end_cleanup_deferral ();
4137 emit_jump_insn (gen_jump (lab2
));
4140 start_cleanup_deferral ();
4141 store_expr (TREE_OPERAND (exp
, 2), target
, want_value
& 2);
4142 end_cleanup_deferral ();
4147 return want_value
& 1 ? target
: NULL_RTX
;
4149 else if (queued_subexp_p (target
))
4150 /* If target contains a postincrement, let's not risk
4151 using it as the place to generate the rhs. */
4153 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
4155 /* Expand EXP into a new pseudo. */
4156 temp
= gen_reg_rtx (GET_MODE (target
));
4157 temp
= expand_expr (exp
, temp
, GET_MODE (target
),
4159 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4162 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
),
4164 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4166 /* If target is volatile, ANSI requires accessing the value
4167 *from* the target, if it is accessed. So make that happen.
4168 In no case return the target itself. */
4169 if (! MEM_VOLATILE_P (target
) && (want_value
& 1) != 0)
4170 dont_return_target
= 1;
4172 else if ((want_value
& 1) != 0
4174 && ! MEM_VOLATILE_P (target
)
4175 && GET_MODE (target
) != BLKmode
)
4176 /* If target is in memory and caller wants value in a register instead,
4177 arrange that. Pass TARGET as target for expand_expr so that,
4178 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4179 We know expand_expr will not use the target in that case.
4180 Don't do this if TARGET is volatile because we are supposed
4181 to write it and then read it. */
4183 temp
= expand_expr (exp
, target
, GET_MODE (target
),
4184 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4185 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
4187 /* If TEMP is already in the desired TARGET, only copy it from
4188 memory and don't store it there again. */
4190 || (rtx_equal_p (temp
, target
)
4191 && ! side_effects_p (temp
) && ! side_effects_p (target
)))
4192 dont_store_target
= 1;
4193 temp
= copy_to_reg (temp
);
4195 dont_return_target
= 1;
4197 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4198 /* If this is a scalar in a register that is stored in a wider mode
4199 than the declared mode, compute the result into its declared mode
4200 and then convert to the wider mode. Our value is the computed
4203 rtx inner_target
= 0;
4205 /* If we don't want a value, we can do the conversion inside EXP,
4206 which will often result in some optimizations. Do the conversion
4207 in two steps: first change the signedness, if needed, then
4208 the extend. But don't do this if the type of EXP is a subtype
4209 of something else since then the conversion might involve
4210 more than just converting modes. */
4211 if ((want_value
& 1) == 0
4212 && INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4213 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
4215 if (TYPE_UNSIGNED (TREE_TYPE (exp
))
4216 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4218 (lang_hooks
.types
.signed_or_unsigned_type
4219 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)), exp
);
4221 exp
= convert (lang_hooks
.types
.type_for_mode
4222 (GET_MODE (SUBREG_REG (target
)),
4223 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4226 inner_target
= SUBREG_REG (target
);
4229 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
4230 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4232 /* If TEMP is a MEM and we want a result value, make the access
4233 now so it gets done only once. Strictly speaking, this is
4234 only necessary if the MEM is volatile, or if the address
4235 overlaps TARGET. But not performing the load twice also
4236 reduces the amount of rtl we generate and then have to CSE. */
4237 if (MEM_P (temp
) && (want_value
& 1) != 0)
4238 temp
= copy_to_reg (temp
);
4240 /* If TEMP is a VOIDmode constant, use convert_modes to make
4241 sure that we properly convert it. */
4242 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4244 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4245 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4246 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4247 GET_MODE (target
), temp
,
4248 SUBREG_PROMOTED_UNSIGNED_P (target
));
4251 convert_move (SUBREG_REG (target
), temp
,
4252 SUBREG_PROMOTED_UNSIGNED_P (target
));
4254 /* If we promoted a constant, change the mode back down to match
4255 target. Otherwise, the caller might get confused by a result whose
4256 mode is larger than expected. */
4258 if ((want_value
& 1) != 0 && GET_MODE (temp
) != GET_MODE (target
))
4260 if (GET_MODE (temp
) != VOIDmode
)
4262 temp
= gen_lowpart_SUBREG (GET_MODE (target
), temp
);
4263 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4264 SUBREG_PROMOTED_UNSIGNED_SET (temp
,
4265 SUBREG_PROMOTED_UNSIGNED_P (target
));
4268 temp
= convert_modes (GET_MODE (target
),
4269 GET_MODE (SUBREG_REG (target
)),
4270 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4273 return want_value
& 1 ? temp
: NULL_RTX
;
4277 temp
= expand_expr_real (exp
, target
, GET_MODE (target
),
4279 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
4281 /* Return TARGET if it's a specified hardware register.
4282 If TARGET is a volatile mem ref, either return TARGET
4283 or return a reg copied *from* TARGET; ANSI requires this.
4285 Otherwise, if TEMP is not TARGET, return TEMP
4286 if it is constant (for efficiency),
4287 or if we really want the correct value. */
4288 if (!(target
&& REG_P (target
)
4289 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4290 && !(MEM_P (target
) && MEM_VOLATILE_P (target
))
4291 && ! rtx_equal_p (temp
, target
)
4292 && (CONSTANT_P (temp
) || (want_value
& 1) != 0))
4293 dont_return_target
= 1;
4296 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4297 the same as that of TARGET, adjust the constant. This is needed, for
4298 example, in case it is a CONST_DOUBLE and we want only a word-sized
4300 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4301 && TREE_CODE (exp
) != ERROR_MARK
4302 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4303 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4304 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
4306 /* If value was not generated in the target, store it there.
4307 Convert the value to TARGET's type first if necessary and emit the
4308 pending incrementations that have been queued when expanding EXP.
4309 Note that we cannot emit the whole queue blindly because this will
4310 effectively disable the POST_INC optimization later.
4312 If TEMP and TARGET compare equal according to rtx_equal_p, but
4313 one or both of them are volatile memory refs, we have to distinguish
4315 - expand_expr has used TARGET. In this case, we must not generate
4316 another copy. This can be detected by TARGET being equal according
4318 - expand_expr has not used TARGET - that means that the source just
4319 happens to have the same RTX form. Since temp will have been created
4320 by expand_expr, it will compare unequal according to == .
4321 We must generate a copy in this case, to reach the correct number
4322 of volatile memory references. */
4324 if ((! rtx_equal_p (temp
, target
)
4325 || (temp
!= target
&& (side_effects_p (temp
)
4326 || side_effects_p (target
))))
4327 && TREE_CODE (exp
) != ERROR_MARK
4328 && ! dont_store_target
4329 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4330 but TARGET is not valid memory reference, TEMP will differ
4331 from TARGET although it is really the same location. */
4332 && !(alt_rtl
&& rtx_equal_p (alt_rtl
, target
))
4333 /* If there's nothing to copy, don't bother. Don't call expr_size
4334 unless necessary, because some front-ends (C++) expr_size-hook
4335 aborts on objects that are not supposed to be bit-copied or
4337 && expr_size (exp
) != const0_rtx
)
4339 emit_insns_enqueued_after_mark (mark
);
4340 target
= protect_from_queue (target
, 1);
4341 temp
= protect_from_queue (temp
, 0);
4342 if (GET_MODE (temp
) != GET_MODE (target
)
4343 && GET_MODE (temp
) != VOIDmode
)
4345 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
4346 if (dont_return_target
)
4348 /* In this case, we will return TEMP,
4349 so make sure it has the proper mode.
4350 But don't forget to store the value into TARGET. */
4351 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4352 emit_move_insn (target
, temp
);
4355 convert_move (target
, temp
, unsignedp
);
4358 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4360 /* Handle copying a string constant into an array. The string
4361 constant may be shorter than the array. So copy just the string's
4362 actual length, and clear the rest. First get the size of the data
4363 type of the string, which is actually the size of the target. */
4364 rtx size
= expr_size (exp
);
4366 if (GET_CODE (size
) == CONST_INT
4367 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4368 emit_block_move (target
, temp
, size
,
4370 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4373 /* Compute the size of the data to copy from the string. */
4375 = size_binop (MIN_EXPR
,
4376 make_tree (sizetype
, size
),
4377 size_int (TREE_STRING_LENGTH (exp
)));
4379 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
4381 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4384 /* Copy that much. */
4385 copy_size_rtx
= convert_to_mode (ptr_mode
, copy_size_rtx
,
4386 TYPE_UNSIGNED (sizetype
));
4387 emit_block_move (target
, temp
, copy_size_rtx
,
4389 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4391 /* Figure out how much is left in TARGET that we have to clear.
4392 Do all calculations in ptr_mode. */
4393 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4395 size
= plus_constant (size
, -INTVAL (copy_size_rtx
));
4396 target
= adjust_address (target
, BLKmode
,
4397 INTVAL (copy_size_rtx
));
4401 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
4402 copy_size_rtx
, NULL_RTX
, 0,
4405 #ifdef POINTERS_EXTEND_UNSIGNED
4406 if (GET_MODE (copy_size_rtx
) != Pmode
)
4407 copy_size_rtx
= convert_to_mode (Pmode
, copy_size_rtx
,
4408 TYPE_UNSIGNED (sizetype
));
4411 target
= offset_address (target
, copy_size_rtx
,
4412 highest_pow2_factor (copy_size
));
4413 label
= gen_label_rtx ();
4414 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4415 GET_MODE (size
), 0, label
);
4418 if (size
!= const0_rtx
)
4419 clear_storage (target
, size
);
4425 /* Handle calls that return values in multiple non-contiguous locations.
4426 The Irix 6 ABI has examples of this. */
4427 else if (GET_CODE (target
) == PARALLEL
)
4428 emit_group_load (target
, temp
, TREE_TYPE (exp
),
4429 int_size_in_bytes (TREE_TYPE (exp
)));
4430 else if (GET_MODE (temp
) == BLKmode
)
4431 emit_block_move (target
, temp
, expr_size (exp
),
4433 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4436 temp
= force_operand (temp
, target
);
4438 emit_move_insn (target
, temp
);
4442 /* If we don't want a value, return NULL_RTX. */
4443 if ((want_value
& 1) == 0)
4446 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4447 ??? The latter test doesn't seem to make sense. */
4448 else if (dont_return_target
&& !MEM_P (temp
))
4451 /* Return TARGET itself if it is a hard register. */
4452 else if ((want_value
& 1) != 0
4453 && GET_MODE (target
) != BLKmode
4454 && ! (REG_P (target
)
4455 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4456 return copy_to_reg (target
);
4462 /* Examine CTOR. Discover how many scalar fields are set to nonzero
4463 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4464 are set to non-constant values and place it in *P_NC_ELTS. */
4467 categorize_ctor_elements_1 (tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
4468 HOST_WIDE_INT
*p_nc_elts
)
4470 HOST_WIDE_INT nz_elts
, nc_elts
;
4476 for (list
= CONSTRUCTOR_ELTS (ctor
); list
; list
= TREE_CHAIN (list
))
4478 tree value
= TREE_VALUE (list
);
4479 tree purpose
= TREE_PURPOSE (list
);
4483 if (TREE_CODE (purpose
) == RANGE_EXPR
)
4485 tree lo_index
= TREE_OPERAND (purpose
, 0);
4486 tree hi_index
= TREE_OPERAND (purpose
, 1);
4488 if (host_integerp (lo_index
, 1) && host_integerp (hi_index
, 1))
4489 mult
= (tree_low_cst (hi_index
, 1)
4490 - tree_low_cst (lo_index
, 1) + 1);
4493 switch (TREE_CODE (value
))
4497 HOST_WIDE_INT nz
= 0, nc
= 0;
4498 categorize_ctor_elements_1 (value
, &nz
, &nc
);
4499 nz_elts
+= mult
* nz
;
4500 nc_elts
+= mult
* nc
;
4506 if (!initializer_zerop (value
))
4510 if (!initializer_zerop (TREE_REALPART (value
)))
4512 if (!initializer_zerop (TREE_IMAGPART (value
)))
4518 for (v
= TREE_VECTOR_CST_ELTS (value
); v
; v
= TREE_CHAIN (v
))
4519 if (!initializer_zerop (TREE_VALUE (v
)))
4526 if (!initializer_constant_valid_p (value
, TREE_TYPE (value
)))
4532 *p_nz_elts
+= nz_elts
;
4533 *p_nc_elts
+= nc_elts
;
4537 categorize_ctor_elements (tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
4538 HOST_WIDE_INT
*p_nc_elts
)
4542 categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_nc_elts
);
4545 /* Count the number of scalars in TYPE. Return -1 on overflow or
4549 count_type_elements (tree type
)
4551 const HOST_WIDE_INT max
= ~((HOST_WIDE_INT
)1 << (HOST_BITS_PER_WIDE_INT
-1));
4552 switch (TREE_CODE (type
))
4556 tree telts
= array_type_nelts (type
);
4557 if (telts
&& host_integerp (telts
, 1))
4559 HOST_WIDE_INT n
= tree_low_cst (telts
, 1);
4560 HOST_WIDE_INT m
= count_type_elements (TREE_TYPE (type
));
4571 HOST_WIDE_INT n
= 0, t
;
4574 for (f
= TYPE_FIELDS (type
); f
; f
= TREE_CHAIN (f
))
4575 if (TREE_CODE (f
) == FIELD_DECL
)
4577 t
= count_type_elements (TREE_TYPE (f
));
4587 case QUAL_UNION_TYPE
:
4589 /* Ho hum. How in the world do we guess here? Clearly it isn't
4590 right to count the fields. Guess based on the number of words. */
4591 HOST_WIDE_INT n
= int_size_in_bytes (type
);
4594 return n
/ UNITS_PER_WORD
;
4601 /* ??? This is broke. We should encode the vector width in the tree. */
4602 return GET_MODE_NUNITS (TYPE_MODE (type
));
4611 case REFERENCE_TYPE
:
4625 /* Return 1 if EXP contains mostly (3/4) zeros. */
4628 mostly_zeros_p (tree exp
)
4630 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4633 HOST_WIDE_INT nz_elts
, nc_elts
, elts
;
4635 /* If there are no ranges of true bits, it is all zero. */
4636 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4637 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
4639 categorize_ctor_elements (exp
, &nz_elts
, &nc_elts
);
4640 elts
= count_type_elements (TREE_TYPE (exp
));
4642 return nz_elts
< elts
/ 4;
4645 return initializer_zerop (exp
);
4648 /* Helper function for store_constructor.
4649 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4650 TYPE is the type of the CONSTRUCTOR, not the element type.
4651 CLEARED is as for store_constructor.
4652 ALIAS_SET is the alias set to use for any stores.
4654 This provides a recursive shortcut back to store_constructor when it isn't
4655 necessary to go through store_field. This is so that we can pass through
4656 the cleared field to let store_constructor know that we may not have to
4657 clear a substructure if the outer structure has already been cleared. */
4660 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
4661 HOST_WIDE_INT bitpos
, enum machine_mode mode
,
4662 tree exp
, tree type
, int cleared
, int alias_set
)
4664 if (TREE_CODE (exp
) == CONSTRUCTOR
4665 /* We can only call store_constructor recursively if the size and
4666 bit position are on a byte boundary. */
4667 && bitpos
% BITS_PER_UNIT
== 0
4668 && (bitsize
> 0 && bitsize
% BITS_PER_UNIT
== 0)
4669 /* If we have a nonzero bitpos for a register target, then we just
4670 let store_field do the bitfield handling. This is unlikely to
4671 generate unnecessary clear instructions anyways. */
4672 && (bitpos
== 0 || MEM_P (target
)))
4676 = adjust_address (target
,
4677 GET_MODE (target
) == BLKmode
4679 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4680 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
4683 /* Update the alias set, if required. */
4684 if (MEM_P (target
) && ! MEM_KEEP_ALIAS_SET_P (target
)
4685 && MEM_ALIAS_SET (target
) != 0)
4687 target
= copy_rtx (target
);
4688 set_mem_alias_set (target
, alias_set
);
4691 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
4694 store_field (target
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
4698 /* Store the value of constructor EXP into the rtx TARGET.
4699 TARGET is either a REG or a MEM; we know it cannot conflict, since
4700 safe_from_p has been called.
4701 CLEARED is true if TARGET is known to have been zero'd.
4702 SIZE is the number of bytes of TARGET we are allowed to modify: this
4703 may not be the same as the size of EXP if we are assigning to a field
4704 which has been packed to exclude padding bits. */
4707 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
)
4709 tree type
= TREE_TYPE (exp
);
4710 #ifdef WORD_REGISTER_OPERATIONS
4711 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4714 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
4715 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4719 /* If size is zero or the target is already cleared, do nothing. */
4720 if (size
== 0 || cleared
)
4722 /* We either clear the aggregate or indicate the value is dead. */
4723 else if ((TREE_CODE (type
) == UNION_TYPE
4724 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4725 && ! CONSTRUCTOR_ELTS (exp
))
4726 /* If the constructor is empty, clear the union. */
4728 clear_storage (target
, expr_size (exp
));
4732 /* If we are building a static constructor into a register,
4733 set the initial value as zero so we can fold the value into
4734 a constant. But if more than one register is involved,
4735 this probably loses. */
4736 else if (REG_P (target
) && TREE_STATIC (exp
)
4737 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4739 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4743 /* If the constructor has fewer fields than the structure
4744 or if we are initializing the structure to mostly zeros,
4745 clear the whole structure first. Don't do this if TARGET is a
4746 register whose mode size isn't equal to SIZE since clear_storage
4747 can't handle this case. */
4749 && ((list_length (CONSTRUCTOR_ELTS (exp
)) != fields_length (type
))
4750 || mostly_zeros_p (exp
))
4752 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
4755 rtx xtarget
= target
;
4757 if (readonly_fields_p (type
))
4759 xtarget
= copy_rtx (xtarget
);
4760 RTX_UNCHANGING_P (xtarget
) = 1;
4763 clear_storage (xtarget
, GEN_INT (size
));
4768 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4770 /* Store each element of the constructor into
4771 the corresponding field of TARGET. */
4773 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4775 tree field
= TREE_PURPOSE (elt
);
4776 tree value
= TREE_VALUE (elt
);
4777 enum machine_mode mode
;
4778 HOST_WIDE_INT bitsize
;
4779 HOST_WIDE_INT bitpos
= 0;
4781 rtx to_rtx
= target
;
4783 /* Just ignore missing fields.
4784 We cleared the whole structure, above,
4785 if any fields are missing. */
4789 if (cleared
&& initializer_zerop (value
))
4792 if (host_integerp (DECL_SIZE (field
), 1))
4793 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4797 mode
= DECL_MODE (field
);
4798 if (DECL_BIT_FIELD (field
))
4801 offset
= DECL_FIELD_OFFSET (field
);
4802 if (host_integerp (offset
, 0)
4803 && host_integerp (bit_position (field
), 0))
4805 bitpos
= int_bit_position (field
);
4809 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
4816 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset
,
4817 make_tree (TREE_TYPE (exp
),
4820 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4821 if (!MEM_P (to_rtx
))
4824 #ifdef POINTERS_EXTEND_UNSIGNED
4825 if (GET_MODE (offset_rtx
) != Pmode
)
4826 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
4828 if (GET_MODE (offset_rtx
) != ptr_mode
)
4829 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4832 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4833 highest_pow2_factor (offset
));
4836 if (TREE_READONLY (field
))
4839 to_rtx
= copy_rtx (to_rtx
);
4841 RTX_UNCHANGING_P (to_rtx
) = 1;
4844 #ifdef WORD_REGISTER_OPERATIONS
4845 /* If this initializes a field that is smaller than a word, at the
4846 start of a word, try to widen it to a full word.
4847 This special case allows us to output C++ member function
4848 initializations in a form that the optimizers can understand. */
4850 && bitsize
< BITS_PER_WORD
4851 && bitpos
% BITS_PER_WORD
== 0
4852 && GET_MODE_CLASS (mode
) == MODE_INT
4853 && TREE_CODE (value
) == INTEGER_CST
4855 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
4857 tree type
= TREE_TYPE (value
);
4859 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4861 type
= lang_hooks
.types
.type_for_size
4862 (BITS_PER_WORD
, TYPE_UNSIGNED (type
));
4863 value
= convert (type
, value
);
4866 if (BYTES_BIG_ENDIAN
)
4868 = fold (build (LSHIFT_EXPR
, type
, value
,
4869 build_int_2 (BITS_PER_WORD
- bitsize
, 0)));
4870 bitsize
= BITS_PER_WORD
;
4875 if (MEM_P (to_rtx
) && !MEM_KEEP_ALIAS_SET_P (to_rtx
)
4876 && DECL_NONADDRESSABLE_P (field
))
4878 to_rtx
= copy_rtx (to_rtx
);
4879 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4882 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
4883 value
, type
, cleared
,
4884 get_alias_set (TREE_TYPE (field
)));
4887 else if (TREE_CODE (type
) == ARRAY_TYPE
4888 || TREE_CODE (type
) == VECTOR_TYPE
)
4894 tree elttype
= TREE_TYPE (type
);
4896 HOST_WIDE_INT minelt
= 0;
4897 HOST_WIDE_INT maxelt
= 0;
4901 unsigned n_elts
= 0;
4903 if (TREE_CODE (type
) == ARRAY_TYPE
)
4904 domain
= TYPE_DOMAIN (type
);
4906 /* Vectors do not have domains; look up the domain of
4907 the array embedded in the debug representation type.
4908 FIXME Would probably be more efficient to treat vectors
4909 separately from arrays. */
4911 domain
= TYPE_DEBUG_REPRESENTATION_TYPE (type
);
4912 domain
= TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain
)));
4913 if (REG_P (target
) && VECTOR_MODE_P (GET_MODE (target
)))
4915 enum machine_mode mode
= GET_MODE (target
);
4917 icode
= (int) vec_init_optab
->handlers
[mode
].insn_code
;
4918 if (icode
!= CODE_FOR_nothing
)
4922 elt_size
= GET_MODE_SIZE (GET_MODE_INNER (mode
));
4923 n_elts
= (GET_MODE_SIZE (mode
) / elt_size
);
4924 vector
= alloca (n_elts
);
4925 for (i
= 0; i
< n_elts
; i
++)
4926 vector
[i
] = CONST0_RTX (GET_MODE_INNER (mode
));
4931 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
4932 && TYPE_MAX_VALUE (domain
)
4933 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
4934 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
4936 /* If we have constant bounds for the range of the type, get them. */
4939 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
4940 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
4943 /* If the constructor has fewer elements than the array,
4944 clear the whole array first. Similarly if this is
4945 static constructor of a non-BLKmode object. */
4946 if (cleared
|| (REG_P (target
) && TREE_STATIC (exp
)))
4950 HOST_WIDE_INT count
= 0, zero_count
= 0;
4951 need_to_clear
= ! const_bounds_p
;
4953 /* This loop is a more accurate version of the loop in
4954 mostly_zeros_p (it handles RANGE_EXPR in an index).
4955 It is also needed to check for missing elements. */
4956 for (elt
= CONSTRUCTOR_ELTS (exp
);
4957 elt
!= NULL_TREE
&& ! need_to_clear
;
4958 elt
= TREE_CHAIN (elt
))
4960 tree index
= TREE_PURPOSE (elt
);
4961 HOST_WIDE_INT this_node_count
;
4963 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4965 tree lo_index
= TREE_OPERAND (index
, 0);
4966 tree hi_index
= TREE_OPERAND (index
, 1);
4968 if (! host_integerp (lo_index
, 1)
4969 || ! host_integerp (hi_index
, 1))
4975 this_node_count
= (tree_low_cst (hi_index
, 1)
4976 - tree_low_cst (lo_index
, 1) + 1);
4979 this_node_count
= 1;
4981 count
+= this_node_count
;
4982 if (mostly_zeros_p (TREE_VALUE (elt
)))
4983 zero_count
+= this_node_count
;
4986 /* Clear the entire array first if there are any missing elements,
4987 or if the incidence of zero elements is >= 75%. */
4989 && (count
< maxelt
- minelt
+ 1 || 4 * zero_count
>= 3 * count
))
4993 if (need_to_clear
&& size
> 0 && !vector
)
4998 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5000 clear_storage (target
, GEN_INT (size
));
5004 else if (REG_P (target
))
5005 /* Inform later passes that the old value is dead. */
5006 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
5008 /* Store each element of the constructor into
5009 the corresponding element of TARGET, determined
5010 by counting the elements. */
5011 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
5013 elt
= TREE_CHAIN (elt
), i
++)
5015 enum machine_mode mode
;
5016 HOST_WIDE_INT bitsize
;
5017 HOST_WIDE_INT bitpos
;
5019 tree value
= TREE_VALUE (elt
);
5020 tree index
= TREE_PURPOSE (elt
);
5021 rtx xtarget
= target
;
5023 if (cleared
&& initializer_zerop (value
))
5026 unsignedp
= TYPE_UNSIGNED (elttype
);
5027 mode
= TYPE_MODE (elttype
);
5028 if (mode
== BLKmode
)
5029 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
5030 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
5033 bitsize
= GET_MODE_BITSIZE (mode
);
5035 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
5037 tree lo_index
= TREE_OPERAND (index
, 0);
5038 tree hi_index
= TREE_OPERAND (index
, 1);
5039 rtx index_r
, pos_rtx
;
5040 HOST_WIDE_INT lo
, hi
, count
;
5046 /* If the range is constant and "small", unroll the loop. */
5048 && host_integerp (lo_index
, 0)
5049 && host_integerp (hi_index
, 0)
5050 && (lo
= tree_low_cst (lo_index
, 0),
5051 hi
= tree_low_cst (hi_index
, 0),
5052 count
= hi
- lo
+ 1,
5055 || (host_integerp (TYPE_SIZE (elttype
), 1)
5056 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
5059 lo
-= minelt
; hi
-= minelt
;
5060 for (; lo
<= hi
; lo
++)
5062 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
5065 && !MEM_KEEP_ALIAS_SET_P (target
)
5066 && TREE_CODE (type
) == ARRAY_TYPE
5067 && TYPE_NONALIASED_COMPONENT (type
))
5069 target
= copy_rtx (target
);
5070 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5073 store_constructor_field
5074 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
5075 get_alias_set (elttype
));
5080 rtx loop_start
= gen_label_rtx ();
5081 rtx loop_end
= gen_label_rtx ();
5084 expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
5085 unsignedp
= TYPE_UNSIGNED (domain
);
5087 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
5090 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
5092 SET_DECL_RTL (index
, index_r
);
5093 store_expr (lo_index
, index_r
, 0);
5095 /* Build the head of the loop. */
5096 do_pending_stack_adjust ();
5098 emit_label (loop_start
);
5100 /* Assign value to element index. */
5102 = convert (ssizetype
,
5103 fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5104 index
, TYPE_MIN_VALUE (domain
))));
5105 position
= size_binop (MULT_EXPR
, position
,
5107 TYPE_SIZE_UNIT (elttype
)));
5109 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
5110 xtarget
= offset_address (target
, pos_rtx
,
5111 highest_pow2_factor (position
));
5112 xtarget
= adjust_address (xtarget
, mode
, 0);
5113 if (TREE_CODE (value
) == CONSTRUCTOR
)
5114 store_constructor (value
, xtarget
, cleared
,
5115 bitsize
/ BITS_PER_UNIT
);
5117 store_expr (value
, xtarget
, 0);
5119 /* Generate a conditional jump to exit the loop. */
5120 exit_cond
= build (LT_EXPR
, integer_type_node
,
5122 jumpif (exit_cond
, loop_end
);
5124 /* Update the loop counter, and jump to the head of
5126 expand_increment (build (PREINCREMENT_EXPR
,
5128 index
, integer_one_node
), 0, 0);
5129 emit_jump (loop_start
);
5131 /* Build the end of the loop. */
5132 emit_label (loop_end
);
5135 else if ((index
!= 0 && ! host_integerp (index
, 0))
5136 || ! host_integerp (TYPE_SIZE (elttype
), 1))
5144 index
= ssize_int (1);
5147 index
= convert (ssizetype
,
5148 fold (build (MINUS_EXPR
, index
,
5149 TYPE_MIN_VALUE (domain
))));
5151 position
= size_binop (MULT_EXPR
, index
,
5153 TYPE_SIZE_UNIT (elttype
)));
5154 xtarget
= offset_address (target
,
5155 expand_expr (position
, 0, VOIDmode
, 0),
5156 highest_pow2_factor (position
));
5157 xtarget
= adjust_address (xtarget
, mode
, 0);
5158 store_expr (value
, xtarget
, 0);
5165 pos
= tree_low_cst (index
, 0) - minelt
;
5168 vector
[pos
] = expand_expr (value
, NULL_RTX
, VOIDmode
, 0);
5173 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
5174 * tree_low_cst (TYPE_SIZE (elttype
), 1));
5176 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
5178 if (MEM_P (target
) && !MEM_KEEP_ALIAS_SET_P (target
)
5179 && TREE_CODE (type
) == ARRAY_TYPE
5180 && TYPE_NONALIASED_COMPONENT (type
))
5182 target
= copy_rtx (target
);
5183 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5185 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
5186 type
, cleared
, get_alias_set (elttype
));
5191 emit_insn (GEN_FCN (icode
) (target
,
5192 gen_rtx_PARALLEL (GET_MODE (target
),
5193 gen_rtvec_v (n_elts
, vector
))));
5197 /* Set constructor assignments. */
5198 else if (TREE_CODE (type
) == SET_TYPE
)
5200 tree elt
= CONSTRUCTOR_ELTS (exp
);
5201 unsigned HOST_WIDE_INT nbytes
= int_size_in_bytes (type
), nbits
;
5202 tree domain
= TYPE_DOMAIN (type
);
5203 tree domain_min
, domain_max
, bitlength
;
5205 /* The default implementation strategy is to extract the constant
5206 parts of the constructor, use that to initialize the target,
5207 and then "or" in whatever non-constant ranges we need in addition.
5209 If a large set is all zero or all ones, it is
5210 probably better to set it using memset (if available) or bzero.
5211 Also, if a large set has just a single range, it may also be
5212 better to first clear all the first clear the set (using
5213 bzero/memset), and set the bits we want. */
5215 /* Check for all zeros. */
5216 if (elt
== NULL_TREE
&& size
> 0)
5219 clear_storage (target
, GEN_INT (size
));
5223 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
5224 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
5225 bitlength
= size_binop (PLUS_EXPR
,
5226 size_diffop (domain_max
, domain_min
),
5229 nbits
= tree_low_cst (bitlength
, 1);
5231 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5232 are "complicated" (more than one range), initialize (the
5233 constant parts) by copying from a constant. */
5234 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
5235 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
5237 unsigned int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
5238 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
5239 char *bit_buffer
= alloca (nbits
);
5240 HOST_WIDE_INT word
= 0;
5241 unsigned int bit_pos
= 0;
5242 unsigned int ibit
= 0;
5243 unsigned int offset
= 0; /* In bytes from beginning of set. */
5245 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
5248 if (bit_buffer
[ibit
])
5250 if (BYTES_BIG_ENDIAN
)
5251 word
|= (1 << (set_word_size
- 1 - bit_pos
));
5253 word
|= 1 << bit_pos
;
5257 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
5259 if (word
!= 0 || ! cleared
)
5261 rtx datum
= gen_int_mode (word
, mode
);
5264 /* The assumption here is that it is safe to use
5265 XEXP if the set is multi-word, but not if
5266 it's single-word. */
5268 to_rtx
= adjust_address (target
, mode
, offset
);
5269 else if (offset
== 0)
5273 emit_move_insn (to_rtx
, datum
);
5280 offset
+= set_word_size
/ BITS_PER_UNIT
;
5285 /* Don't bother clearing storage if the set is all ones. */
5286 if (TREE_CHAIN (elt
) != NULL_TREE
5287 || (TREE_PURPOSE (elt
) == NULL_TREE
5289 : ( ! host_integerp (TREE_VALUE (elt
), 0)
5290 || ! host_integerp (TREE_PURPOSE (elt
), 0)
5291 || (tree_low_cst (TREE_VALUE (elt
), 0)
5292 - tree_low_cst (TREE_PURPOSE (elt
), 0) + 1
5293 != (HOST_WIDE_INT
) nbits
))))
5294 clear_storage (target
, expr_size (exp
));
5296 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
5298 /* Start of range of element or NULL. */
5299 tree startbit
= TREE_PURPOSE (elt
);
5300 /* End of range of element, or element value. */
5301 tree endbit
= TREE_VALUE (elt
);
5302 HOST_WIDE_INT startb
, endb
;
5303 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
5305 bitlength_rtx
= expand_expr (bitlength
,
5306 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
5308 /* Handle non-range tuple element like [ expr ]. */
5309 if (startbit
== NULL_TREE
)
5311 startbit
= save_expr (endbit
);
5315 startbit
= convert (sizetype
, startbit
);
5316 endbit
= convert (sizetype
, endbit
);
5317 if (! integer_zerop (domain_min
))
5319 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
5320 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
5322 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
5323 EXPAND_CONST_ADDRESS
);
5324 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
5325 EXPAND_CONST_ADDRESS
);
5331 ((build_qualified_type (lang_hooks
.types
.type_for_mode
5332 (GET_MODE (target
), 0),
5335 emit_move_insn (targetx
, target
);
5338 else if (MEM_P (target
))
5343 /* Optimization: If startbit and endbit are constants divisible
5344 by BITS_PER_UNIT, call memset instead. */
5345 if (TARGET_MEM_FUNCTIONS
5346 && TREE_CODE (startbit
) == INTEGER_CST
5347 && TREE_CODE (endbit
) == INTEGER_CST
5348 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
5349 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
5351 emit_library_call (memset_libfunc
, LCT_NORMAL
,
5353 plus_constant (XEXP (targetx
, 0),
5354 startb
/ BITS_PER_UNIT
),
5356 constm1_rtx
, TYPE_MODE (integer_type_node
),
5357 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
5358 TYPE_MODE (sizetype
));
5361 emit_library_call (setbits_libfunc
, LCT_NORMAL
,
5362 VOIDmode
, 4, XEXP (targetx
, 0),
5363 Pmode
, bitlength_rtx
, TYPE_MODE (sizetype
),
5364 startbit_rtx
, TYPE_MODE (sizetype
),
5365 endbit_rtx
, TYPE_MODE (sizetype
));
5368 emit_move_insn (target
, targetx
);
5376 /* Store the value of EXP (an expression tree)
5377 into a subfield of TARGET which has mode MODE and occupies
5378 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5379 If MODE is VOIDmode, it means that we are storing into a bit-field.
5381 If VALUE_MODE is VOIDmode, return nothing in particular.
5382 UNSIGNEDP is not used in this case.
5384 Otherwise, return an rtx for the value stored. This rtx
5385 has mode VALUE_MODE if that is convenient to do.
5386 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5388 TYPE is the type of the underlying object,
5390 ALIAS_SET is the alias set for the destination. This value will
5391 (in general) be different from that for TARGET, since TARGET is a
5392 reference to the containing structure. */
5395 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
5396 enum machine_mode mode
, tree exp
, enum machine_mode value_mode
,
5397 int unsignedp
, tree type
, int alias_set
)
5399 HOST_WIDE_INT width_mask
= 0;
5401 if (TREE_CODE (exp
) == ERROR_MARK
)
5404 /* If we have nothing to store, do nothing unless the expression has
5407 return expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
5408 else if (bitsize
>= 0 && bitsize
< HOST_BITS_PER_WIDE_INT
)
5409 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5411 /* If we are storing into an unaligned field of an aligned union that is
5412 in a register, we may have the mode of TARGET being an integer mode but
5413 MODE == BLKmode. In that case, get an aligned object whose size and
5414 alignment are the same as TARGET and store TARGET into it (we can avoid
5415 the store if the field being stored is the entire width of TARGET). Then
5416 call ourselves recursively to store the field into a BLKmode version of
5417 that object. Finally, load from the object into TARGET. This is not
5418 very efficient in general, but should only be slightly more expensive
5419 than the otherwise-required unaligned accesses. Perhaps this can be
5420 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5421 twice, once with emit_move_insn and once via store_field. */
5424 && (REG_P (target
) || GET_CODE (target
) == SUBREG
))
5426 rtx object
= assign_temp (type
, 0, 1, 1);
5427 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
5429 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5430 emit_move_insn (object
, target
);
5432 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
5435 emit_move_insn (target
, object
);
5437 /* We want to return the BLKmode version of the data. */
5441 if (GET_CODE (target
) == CONCAT
)
5443 /* We're storing into a struct containing a single __complex. */
5447 return store_expr (exp
, target
, value_mode
!= VOIDmode
);
5450 /* If the structure is in a register or if the component
5451 is a bit field, we cannot use addressing to access it.
5452 Use bit-field techniques or SUBREG to store in it. */
5454 if (mode
== VOIDmode
5455 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5456 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5457 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5459 || GET_CODE (target
) == SUBREG
5460 /* If the field isn't aligned enough to store as an ordinary memref,
5461 store it as a bit field. */
5463 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
5464 || bitpos
% GET_MODE_ALIGNMENT (mode
))
5465 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
5466 || (bitpos
% BITS_PER_UNIT
!= 0)))
5467 /* If the RHS and field are a constant size and the size of the
5468 RHS isn't the same size as the bitfield, we must use bitfield
5471 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5472 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5474 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
5476 /* If BITSIZE is narrower than the size of the type of EXP
5477 we will be narrowing TEMP. Normally, what's wanted are the
5478 low-order bits. However, if EXP's type is a record and this is
5479 big-endian machine, we want the upper BITSIZE bits. */
5480 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5481 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
5482 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5483 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5484 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5488 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5490 if (mode
!= VOIDmode
&& mode
!= BLKmode
5491 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5492 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5494 /* If the modes of TARGET and TEMP are both BLKmode, both
5495 must be in memory and BITPOS must be aligned on a byte
5496 boundary. If so, we simply do a block copy. */
5497 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5499 if (!MEM_P (target
) || !MEM_P (temp
)
5500 || bitpos
% BITS_PER_UNIT
!= 0)
5503 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5504 emit_block_move (target
, temp
,
5505 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5509 return value_mode
== VOIDmode
? const0_rtx
: target
;
5512 /* Store the value in the bitfield. */
5513 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
,
5514 int_size_in_bytes (type
));
5516 if (value_mode
!= VOIDmode
)
5518 /* The caller wants an rtx for the value.
5519 If possible, avoid refetching from the bitfield itself. */
5521 && ! (MEM_P (target
) && MEM_VOLATILE_P (target
)))
5524 enum machine_mode tmode
;
5526 tmode
= GET_MODE (temp
);
5527 if (tmode
== VOIDmode
)
5531 return expand_and (tmode
, temp
,
5532 gen_int_mode (width_mask
, tmode
),
5535 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
5536 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5537 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5540 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
5541 NULL_RTX
, value_mode
, VOIDmode
,
5542 int_size_in_bytes (type
));
5548 rtx addr
= XEXP (target
, 0);
5549 rtx to_rtx
= target
;
5551 /* If a value is wanted, it must be the lhs;
5552 so make the address stable for multiple use. */
5554 if (value_mode
!= VOIDmode
&& !REG_P (addr
)
5555 && ! CONSTANT_ADDRESS_P (addr
)
5556 /* A frame-pointer reference is already stable. */
5557 && ! (GET_CODE (addr
) == PLUS
5558 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
5559 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
5560 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
5561 to_rtx
= replace_equiv_address (to_rtx
, copy_to_reg (addr
));
5563 /* Now build a reference to just the desired component. */
5565 to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5567 if (to_rtx
== target
)
5568 to_rtx
= copy_rtx (to_rtx
);
5570 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5571 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5572 set_mem_alias_set (to_rtx
, alias_set
);
5574 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
5578 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5579 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5580 codes and find the ultimate containing object, which we return.
5582 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5583 bit position, and *PUNSIGNEDP to the signedness of the field.
5584 If the position of the field is variable, we store a tree
5585 giving the variable offset (in units) in *POFFSET.
5586 This offset is in addition to the bit position.
5587 If the position is not variable, we store 0 in *POFFSET.
5589 If any of the extraction expressions is volatile,
5590 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5592 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5593 is a mode that can be used to access the field. In that case, *PBITSIZE
5596 If the field describes a variable-sized object, *PMODE is set to
5597 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5598 this case, but the address of the object can be found. */
5601 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
5602 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
5603 enum machine_mode
*pmode
, int *punsignedp
,
5607 enum machine_mode mode
= VOIDmode
;
5608 tree offset
= size_zero_node
;
5609 tree bit_offset
= bitsize_zero_node
;
5612 /* First get the mode, signedness, and size. We do this from just the
5613 outermost expression. */
5614 if (TREE_CODE (exp
) == COMPONENT_REF
)
5616 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5617 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5618 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5620 *punsignedp
= DECL_UNSIGNED (TREE_OPERAND (exp
, 1));
5622 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5624 size_tree
= TREE_OPERAND (exp
, 1);
5625 *punsignedp
= BIT_FIELD_REF_UNSIGNED (exp
);
5629 mode
= TYPE_MODE (TREE_TYPE (exp
));
5630 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
5632 if (mode
== BLKmode
)
5633 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5635 *pbitsize
= GET_MODE_BITSIZE (mode
);
5640 if (! host_integerp (size_tree
, 1))
5641 mode
= BLKmode
, *pbitsize
= -1;
5643 *pbitsize
= tree_low_cst (size_tree
, 1);
5646 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5647 and find the ultimate containing object. */
5650 if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5651 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
, TREE_OPERAND (exp
, 2));
5652 else if (TREE_CODE (exp
) == COMPONENT_REF
)
5654 tree field
= TREE_OPERAND (exp
, 1);
5655 tree this_offset
= component_ref_field_offset (exp
);
5657 /* If this field hasn't been filled in yet, don't go
5658 past it. This should only happen when folding expressions
5659 made during type construction. */
5660 if (this_offset
== 0)
5663 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5664 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5665 DECL_FIELD_BIT_OFFSET (field
));
5667 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5670 else if (TREE_CODE (exp
) == ARRAY_REF
5671 || TREE_CODE (exp
) == ARRAY_RANGE_REF
)
5673 tree index
= TREE_OPERAND (exp
, 1);
5674 tree low_bound
= array_ref_low_bound (exp
);
5675 tree unit_size
= array_ref_element_size (exp
);
5677 /* We assume all arrays have sizes that are a multiple of a byte.
5678 First subtract the lower bound, if any, in the type of the
5679 index, then convert to sizetype and multiply by the size of the
5681 if (! integer_zerop (low_bound
))
5682 index
= fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5685 offset
= size_binop (PLUS_EXPR
, offset
,
5686 size_binop (MULT_EXPR
,
5687 convert (sizetype
, index
),
5691 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5692 conversions that don't change the mode, and all view conversions
5693 except those that need to "step up" the alignment. */
5694 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
5695 && ! (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
5696 && ! ((TYPE_ALIGN (TREE_TYPE (exp
))
5697 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5699 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
5700 < BIGGEST_ALIGNMENT
)
5701 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
5702 || TYPE_ALIGN_OK (TREE_TYPE
5703 (TREE_OPERAND (exp
, 0))))))
5704 && ! ((TREE_CODE (exp
) == NOP_EXPR
5705 || TREE_CODE (exp
) == CONVERT_EXPR
)
5706 && (TYPE_MODE (TREE_TYPE (exp
))
5707 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
5710 /* If any reference in the chain is volatile, the effect is volatile. */
5711 if (TREE_THIS_VOLATILE (exp
))
5714 exp
= TREE_OPERAND (exp
, 0);
5717 /* If OFFSET is constant, see if we can return the whole thing as a
5718 constant bit position. Otherwise, split it up. */
5719 if (host_integerp (offset
, 0)
5720 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5722 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5723 && host_integerp (tem
, 0))
5724 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5726 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5732 /* Return a tree of sizetype representing the size, in bytes, of the element
5733 of EXP, an ARRAY_REF. */
5736 array_ref_element_size (tree exp
)
5738 tree aligned_size
= TREE_OPERAND (exp
, 3);
5739 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5741 /* If a size was specified in the ARRAY_REF, it's the size measured
5742 in alignment units of the element type. So multiply by that value. */
5744 return size_binop (MULT_EXPR
, aligned_size
,
5745 size_int (TYPE_ALIGN (elmt_type
) / BITS_PER_UNIT
));
5747 /* Otherwise, take the size from that of the element type. Substitute
5748 any PLACEHOLDER_EXPR that we have. */
5750 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type
), exp
);
5753 /* Return a tree representing the lower bound of the array mentioned in
5754 EXP, an ARRAY_REF. */
5757 array_ref_low_bound (tree exp
)
5759 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5761 /* If a lower bound is specified in EXP, use it. */
5762 if (TREE_OPERAND (exp
, 2))
5763 return TREE_OPERAND (exp
, 2);
5765 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5766 substituting for a PLACEHOLDER_EXPR as needed. */
5767 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
5768 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type
), exp
);
5770 /* Otherwise, return a zero of the appropriate type. */
5771 return fold_convert (TREE_TYPE (TREE_OPERAND (exp
, 1)), integer_zero_node
);
5774 /* Return a tree representing the offset, in bytes, of the field referenced
5775 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5778 component_ref_field_offset (tree exp
)
5780 tree aligned_offset
= TREE_OPERAND (exp
, 2);
5781 tree field
= TREE_OPERAND (exp
, 1);
5783 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5784 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5787 return size_binop (MULT_EXPR
, aligned_offset
,
5788 size_int (DECL_OFFSET_ALIGN (field
) / BITS_PER_UNIT
));
5790 /* Otherwise, take the offset from that of the field. Substitute
5791 any PLACEHOLDER_EXPR that we have. */
5793 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field
), exp
);
5796 /* Return 1 if T is an expression that get_inner_reference handles. */
5799 handled_component_p (tree t
)
5801 switch (TREE_CODE (t
))
5806 case ARRAY_RANGE_REF
:
5807 case NON_LVALUE_EXPR
:
5808 case VIEW_CONVERT_EXPR
:
5811 /* ??? Sure they are handled, but get_inner_reference may return
5812 a different PBITSIZE, depending upon whether the expression is
5813 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5816 return (TYPE_MODE (TREE_TYPE (t
))
5817 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t
, 0))));
5824 /* Given an rtx VALUE that may contain additions and multiplications, return
5825 an equivalent value that just refers to a register, memory, or constant.
5826 This is done by generating instructions to perform the arithmetic and
5827 returning a pseudo-register containing the value.
5829 The returned value may be a REG, SUBREG, MEM or constant. */
5832 force_operand (rtx value
, rtx target
)
5835 /* Use subtarget as the target for operand 0 of a binary operation. */
5836 rtx subtarget
= get_subtarget (target
);
5837 enum rtx_code code
= GET_CODE (value
);
5839 /* Check for subreg applied to an expression produced by loop optimizer. */
5841 && !REG_P (SUBREG_REG (value
))
5842 && !MEM_P (SUBREG_REG (value
)))
5844 value
= simplify_gen_subreg (GET_MODE (value
),
5845 force_reg (GET_MODE (SUBREG_REG (value
)),
5846 force_operand (SUBREG_REG (value
),
5848 GET_MODE (SUBREG_REG (value
)),
5849 SUBREG_BYTE (value
));
5850 code
= GET_CODE (value
);
5853 /* Check for a PIC address load. */
5854 if ((code
== PLUS
|| code
== MINUS
)
5855 && XEXP (value
, 0) == pic_offset_table_rtx
5856 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5857 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5858 || GET_CODE (XEXP (value
, 1)) == CONST
))
5861 subtarget
= gen_reg_rtx (GET_MODE (value
));
5862 emit_move_insn (subtarget
, value
);
5866 if (code
== ZERO_EXTEND
|| code
== SIGN_EXTEND
)
5869 target
= gen_reg_rtx (GET_MODE (value
));
5870 convert_move (target
, force_operand (XEXP (value
, 0), NULL
),
5871 code
== ZERO_EXTEND
);
5875 if (ARITHMETIC_P (value
))
5877 op2
= XEXP (value
, 1);
5878 if (!CONSTANT_P (op2
) && !(REG_P (op2
) && op2
!= subtarget
))
5880 if (code
== MINUS
&& GET_CODE (op2
) == CONST_INT
)
5883 op2
= negate_rtx (GET_MODE (value
), op2
);
5886 /* Check for an addition with OP2 a constant integer and our first
5887 operand a PLUS of a virtual register and something else. In that
5888 case, we want to emit the sum of the virtual register and the
5889 constant first and then add the other value. This allows virtual
5890 register instantiation to simply modify the constant rather than
5891 creating another one around this addition. */
5892 if (code
== PLUS
&& GET_CODE (op2
) == CONST_INT
5893 && GET_CODE (XEXP (value
, 0)) == PLUS
5894 && REG_P (XEXP (XEXP (value
, 0), 0))
5895 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5896 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5898 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
5899 XEXP (XEXP (value
, 0), 0), op2
,
5900 subtarget
, 0, OPTAB_LIB_WIDEN
);
5901 return expand_simple_binop (GET_MODE (value
), code
, temp
,
5902 force_operand (XEXP (XEXP (value
,
5904 target
, 0, OPTAB_LIB_WIDEN
);
5907 op1
= force_operand (XEXP (value
, 0), subtarget
);
5908 op2
= force_operand (op2
, NULL_RTX
);
5912 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
5914 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
5915 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5916 target
, 1, OPTAB_LIB_WIDEN
);
5918 return expand_divmod (0,
5919 FLOAT_MODE_P (GET_MODE (value
))
5920 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
5921 GET_MODE (value
), op1
, op2
, target
, 0);
5924 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5928 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
5932 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5936 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5937 target
, 0, OPTAB_LIB_WIDEN
);
5940 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5941 target
, 1, OPTAB_LIB_WIDEN
);
5944 if (UNARY_P (value
))
5946 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
5947 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
5950 #ifdef INSN_SCHEDULING
5951 /* On machines that have insn scheduling, we want all memory reference to be
5952 explicit, so we need to deal with such paradoxical SUBREGs. */
5953 if (GET_CODE (value
) == SUBREG
&& MEM_P (SUBREG_REG (value
))
5954 && (GET_MODE_SIZE (GET_MODE (value
))
5955 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
5957 = simplify_gen_subreg (GET_MODE (value
),
5958 force_reg (GET_MODE (SUBREG_REG (value
)),
5959 force_operand (SUBREG_REG (value
),
5961 GET_MODE (SUBREG_REG (value
)),
5962 SUBREG_BYTE (value
));
5968 /* Subroutine of expand_expr: return nonzero iff there is no way that
5969 EXP can reference X, which is being modified. TOP_P is nonzero if this
5970 call is going to be used to determine whether we need a temporary
5971 for EXP, as opposed to a recursive call to this function.
5973 It is always safe for this routine to return zero since it merely
5974 searches for optimization opportunities. */
5977 safe_from_p (rtx x
, tree exp
, int top_p
)
5983 /* If EXP has varying size, we MUST use a target since we currently
5984 have no way of allocating temporaries of variable size
5985 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5986 So we assume here that something at a higher level has prevented a
5987 clash. This is somewhat bogus, but the best we can do. Only
5988 do this when X is BLKmode and when we are at the top level. */
5989 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
5990 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
5991 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
5992 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
5993 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
5995 && GET_MODE (x
) == BLKmode
)
5996 /* If X is in the outgoing argument area, it is always safe. */
5998 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
5999 || (GET_CODE (XEXP (x
, 0)) == PLUS
6000 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
6003 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6004 find the underlying pseudo. */
6005 if (GET_CODE (x
) == SUBREG
)
6008 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6012 /* Now look at our tree code and possibly recurse. */
6013 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
6016 exp_rtl
= DECL_RTL_IF_SET (exp
);
6023 if (TREE_CODE (exp
) == TREE_LIST
)
6027 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
6029 exp
= TREE_CHAIN (exp
);
6032 if (TREE_CODE (exp
) != TREE_LIST
)
6033 return safe_from_p (x
, exp
, 0);
6036 else if (TREE_CODE (exp
) == ERROR_MARK
)
6037 return 1; /* An already-visited SAVE_EXPR? */
6042 /* The only case we look at here is the DECL_INITIAL inside a
6044 return (TREE_CODE (exp
) != DECL_EXPR
6045 || TREE_CODE (DECL_EXPR_DECL (exp
)) != VAR_DECL
6046 || !DECL_INITIAL (DECL_EXPR_DECL (exp
))
6047 || safe_from_p (x
, DECL_INITIAL (DECL_EXPR_DECL (exp
)), 0));
6051 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
6056 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6060 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6061 the expression. If it is set, we conflict iff we are that rtx or
6062 both are in memory. Otherwise, we check all operands of the
6063 expression recursively. */
6065 switch (TREE_CODE (exp
))
6068 /* If the operand is static or we are static, we can't conflict.
6069 Likewise if we don't conflict with the operand at all. */
6070 if (staticp (TREE_OPERAND (exp
, 0))
6071 || TREE_STATIC (exp
)
6072 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
6075 /* Otherwise, the only way this can conflict is if we are taking
6076 the address of a DECL a that address if part of X, which is
6078 exp
= TREE_OPERAND (exp
, 0);
6081 if (!DECL_RTL_SET_P (exp
)
6082 || !MEM_P (DECL_RTL (exp
)))
6085 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
6091 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
6092 get_alias_set (exp
)))
6097 /* Assume that the call will clobber all hard registers and
6099 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6104 case WITH_CLEANUP_EXPR
:
6105 exp_rtl
= WITH_CLEANUP_EXPR_RTL (exp
);
6108 case CLEANUP_POINT_EXPR
:
6110 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6113 /* The only operand we look at is operand 1. The rest aren't
6114 part of the expression. */
6115 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
6121 /* If we have an rtx, we do not need to scan our operands. */
6125 nops
= first_rtl_op (TREE_CODE (exp
));
6126 for (i
= 0; i
< nops
; i
++)
6127 if (TREE_OPERAND (exp
, i
) != 0
6128 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
6131 /* If this is a language-specific tree code, it may require
6132 special handling. */
6133 if ((unsigned int) TREE_CODE (exp
)
6134 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6135 && !lang_hooks
.safe_from_p (x
, exp
))
6139 /* If we have an rtl, find any enclosed object. Then see if we conflict
6143 if (GET_CODE (exp_rtl
) == SUBREG
)
6145 exp_rtl
= SUBREG_REG (exp_rtl
);
6147 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
6151 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6152 are memory and they conflict. */
6153 return ! (rtx_equal_p (x
, exp_rtl
)
6154 || (MEM_P (x
) && MEM_P (exp_rtl
)
6155 && true_dependence (exp_rtl
, VOIDmode
, x
,
6156 rtx_addr_varies_p
)));
6159 /* If we reach here, it is safe. */
6163 /* Subroutine of expand_expr: return rtx if EXP is a
6164 variable or parameter; else return 0. */
6170 switch (TREE_CODE (exp
))
6174 return DECL_RTL (exp
);
6180 /* Return the highest power of two that EXP is known to be a multiple of.
6181 This is used in updating alignment of MEMs in array references. */
6183 static unsigned HOST_WIDE_INT
6184 highest_pow2_factor (tree exp
)
6186 unsigned HOST_WIDE_INT c0
, c1
;
6188 switch (TREE_CODE (exp
))
6191 /* We can find the lowest bit that's a one. If the low
6192 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6193 We need to handle this case since we can find it in a COND_EXPR,
6194 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6195 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6197 if (TREE_CONSTANT_OVERFLOW (exp
))
6198 return BIGGEST_ALIGNMENT
;
6201 /* Note: tree_low_cst is intentionally not used here,
6202 we don't care about the upper bits. */
6203 c0
= TREE_INT_CST_LOW (exp
);
6205 return c0
? c0
: BIGGEST_ALIGNMENT
;
6209 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
6210 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6211 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6212 return MIN (c0
, c1
);
6215 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6216 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6219 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6221 if (integer_pow2p (TREE_OPERAND (exp
, 1))
6222 && host_integerp (TREE_OPERAND (exp
, 1), 1))
6224 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6225 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
6226 return MAX (1, c0
/ c1
);
6230 case NON_LVALUE_EXPR
: case NOP_EXPR
: case CONVERT_EXPR
:
6232 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
6235 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6238 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6239 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
6240 return MIN (c0
, c1
);
6249 /* Similar, except that the alignment requirements of TARGET are
6250 taken into account. Assume it is at least as aligned as its
6251 type, unless it is a COMPONENT_REF in which case the layout of
6252 the structure gives the alignment. */
6254 static unsigned HOST_WIDE_INT
6255 highest_pow2_factor_for_target (tree target
, tree exp
)
6257 unsigned HOST_WIDE_INT target_align
, factor
;
6259 factor
= highest_pow2_factor (exp
);
6260 if (TREE_CODE (target
) == COMPONENT_REF
)
6261 target_align
= DECL_ALIGN (TREE_OPERAND (target
, 1)) / BITS_PER_UNIT
;
6263 target_align
= TYPE_ALIGN (TREE_TYPE (target
)) / BITS_PER_UNIT
;
6264 return MAX (factor
, target_align
);
6267 /* Expands variable VAR. */
6270 expand_var (tree var
)
6272 if (DECL_EXTERNAL (var
))
6275 if (TREE_STATIC (var
))
6276 /* If this is an inlined copy of a static local variable,
6277 look up the original decl. */
6278 var
= DECL_ORIGIN (var
);
6280 if (TREE_STATIC (var
)
6281 ? !TREE_ASM_WRITTEN (var
)
6282 : !DECL_RTL_SET_P (var
))
6284 if (TREE_CODE (var
) == VAR_DECL
&& DECL_DEFER_OUTPUT (var
))
6286 /* Prepare a mem & address for the decl. */
6289 if (TREE_STATIC (var
))
6292 x
= gen_rtx_MEM (DECL_MODE (var
),
6293 gen_reg_rtx (Pmode
));
6295 set_mem_attributes (x
, var
, 1);
6296 SET_DECL_RTL (var
, x
);
6298 else if (lang_hooks
.expand_decl (var
))
6300 else if (TREE_CODE (var
) == VAR_DECL
&& !TREE_STATIC (var
))
6302 else if (TREE_CODE (var
) == VAR_DECL
&& TREE_STATIC (var
))
6303 rest_of_decl_compilation (var
, NULL
, 0, 0);
6304 else if (TREE_CODE (var
) == TYPE_DECL
6305 || TREE_CODE (var
) == CONST_DECL
6306 || TREE_CODE (var
) == FUNCTION_DECL
6307 || TREE_CODE (var
) == LABEL_DECL
)
6308 /* No expansion needed. */;
6314 /* Expands declarations of variables in list VARS. */
6317 expand_vars (tree vars
)
6319 for (; vars
; vars
= TREE_CHAIN (vars
))
6323 if (DECL_EXTERNAL (var
))
6327 expand_decl_init (var
);
6331 /* Subroutine of expand_expr. Expand the two operands of a binary
6332 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6333 The value may be stored in TARGET if TARGET is nonzero. The
6334 MODIFIER argument is as documented by expand_expr. */
6337 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
6338 enum expand_modifier modifier
)
6340 if (! safe_from_p (target
, exp1
, 1))
6342 if (operand_equal_p (exp0
, exp1
, 0))
6344 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6345 *op1
= copy_rtx (*op0
);
6349 /* If we need to preserve evaluation order, copy exp0 into its own
6350 temporary variable so that it can't be clobbered by exp1. */
6351 if (flag_evaluation_order
&& TREE_SIDE_EFFECTS (exp1
))
6352 exp0
= save_expr (exp0
);
6353 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6354 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
6359 /* expand_expr: generate code for computing expression EXP.
6360 An rtx for the computed value is returned. The value is never null.
6361 In the case of a void EXP, const0_rtx is returned.
6363 The value may be stored in TARGET if TARGET is nonzero.
6364 TARGET is just a suggestion; callers must assume that
6365 the rtx returned may not be the same as TARGET.
6367 If TARGET is CONST0_RTX, it means that the value will be ignored.
6369 If TMODE is not VOIDmode, it suggests generating the
6370 result in mode TMODE. But this is done only when convenient.
6371 Otherwise, TMODE is ignored and the value generated in its natural mode.
6372 TMODE is just a suggestion; callers must assume that
6373 the rtx returned may not have mode TMODE.
6375 Note that TARGET may have neither TMODE nor MODE. In that case, it
6376 probably will not be used.
6378 If MODIFIER is EXPAND_SUM then when EXP is an addition
6379 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6380 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6381 products as above, or REG or MEM, or constant.
6382 Ordinarily in such cases we would output mul or add instructions
6383 and then return a pseudo reg containing the sum.
6385 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6386 it also marks a label as absolutely required (it can't be dead).
6387 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6388 This is used for outputting expressions used in initializers.
6390 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6391 with a constant address even if that address is not normally legitimate.
6392 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6394 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6395 a call parameter. Such targets require special care as we haven't yet
6396 marked TARGET so that it's safe from being trashed by libcalls. We
6397 don't want to use TARGET for anything but the final result;
6398 Intermediate values must go elsewhere. Additionally, calls to
6399 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6401 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6402 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6403 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6404 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6407 static rtx
expand_expr_real_1 (tree
, rtx
, enum machine_mode
,
6408 enum expand_modifier
, rtx
*);
6411 expand_expr_real (tree exp
, rtx target
, enum machine_mode tmode
,
6412 enum expand_modifier modifier
, rtx
*alt_rtl
)
6415 rtx ret
, last
= NULL
;
6417 /* Handle ERROR_MARK before anybody tries to access its type. */
6418 if (TREE_CODE (exp
) == ERROR_MARK
6419 || TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
)
6421 ret
= CONST0_RTX (tmode
);
6422 return ret
? ret
: const0_rtx
;
6425 if (flag_non_call_exceptions
)
6427 rn
= lookup_stmt_eh_region (exp
);
6428 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6430 last
= get_last_insn ();
6433 /* If this is an expression of some kind and it has an associated line
6434 number, then emit the line number before expanding the expression.
6436 We need to save and restore the file and line information so that
6437 errors discovered during expansion are emitted with the right
6438 information. It would be better of the diagnostic routines
6439 used the file/line information embedded in the tree nodes rather
6441 if (cfun
&& EXPR_HAS_LOCATION (exp
))
6443 location_t saved_location
= input_location
;
6444 input_location
= EXPR_LOCATION (exp
);
6445 emit_line_note (input_location
);
6447 /* Record where the insns produced belong. */
6448 record_block_change (TREE_BLOCK (exp
));
6450 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
6452 input_location
= saved_location
;
6456 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
6459 /* If using non-call exceptions, mark all insns that may trap.
6460 expand_call() will mark CALL_INSNs before we get to this code,
6461 but it doesn't handle libcalls, and these may trap. */
6465 for (insn
= next_real_insn (last
); insn
;
6466 insn
= next_real_insn (insn
))
6468 if (! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
6469 /* If we want exceptions for non-call insns, any
6470 may_trap_p instruction may throw. */
6471 && GET_CODE (PATTERN (insn
)) != CLOBBER
6472 && GET_CODE (PATTERN (insn
)) != USE
6473 && (GET_CODE (insn
) == CALL_INSN
|| may_trap_p (PATTERN (insn
))))
6475 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_EH_REGION
, GEN_INT (rn
),
6485 expand_expr_real_1 (tree exp
, rtx target
, enum machine_mode tmode
,
6486 enum expand_modifier modifier
, rtx
*alt_rtl
)
6489 tree type
= TREE_TYPE (exp
);
6491 enum machine_mode mode
;
6492 enum tree_code code
= TREE_CODE (exp
);
6494 rtx subtarget
, original_target
;
6498 mode
= TYPE_MODE (type
);
6499 unsignedp
= TYPE_UNSIGNED (type
);
6501 /* Use subtarget as the target for operand 0 of a binary operation. */
6502 subtarget
= get_subtarget (target
);
6503 original_target
= target
;
6504 ignore
= (target
== const0_rtx
6505 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
6506 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
6507 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
6508 && TREE_CODE (type
) == VOID_TYPE
));
6510 /* If we are going to ignore this result, we need only do something
6511 if there is a side-effect somewhere in the expression. If there
6512 is, short-circuit the most common cases here. Note that we must
6513 not call expand_expr with anything but const0_rtx in case this
6514 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6518 if (! TREE_SIDE_EFFECTS (exp
))
6521 /* Ensure we reference a volatile object even if value is ignored, but
6522 don't do this if all we are doing is taking its address. */
6523 if (TREE_THIS_VOLATILE (exp
)
6524 && TREE_CODE (exp
) != FUNCTION_DECL
6525 && mode
!= VOIDmode
&& mode
!= BLKmode
6526 && modifier
!= EXPAND_CONST_ADDRESS
)
6528 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
6530 temp
= copy_to_reg (temp
);
6534 if (TREE_CODE_CLASS (code
) == '1' || code
== COMPONENT_REF
6535 || code
== INDIRECT_REF
|| code
== BUFFER_REF
)
6536 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6539 else if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<'
6540 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
6542 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6543 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6546 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6547 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
6548 /* If the second operand has no side effects, just evaluate
6550 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6552 else if (code
== BIT_FIELD_REF
)
6554 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6555 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6556 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, modifier
);
6563 /* If will do cse, generate all results into pseudo registers
6564 since 1) that allows cse to find more things
6565 and 2) otherwise cse could produce an insn the machine
6566 cannot support. An exception is a CONSTRUCTOR into a multi-word
6567 MEM: that's much more likely to be most efficient into the MEM.
6568 Another is a CALL_EXPR which must return in memory. */
6570 if (! cse_not_expected
&& mode
!= BLKmode
&& target
6571 && (!REG_P (target
) || REGNO (target
) < FIRST_PSEUDO_REGISTER
)
6572 && ! (code
== CONSTRUCTOR
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
6573 && ! (code
== CALL_EXPR
&& aggregate_value_p (exp
, exp
)))
6580 tree function
= decl_function_context (exp
);
6582 temp
= label_rtx (exp
);
6583 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
6585 if (function
!= current_function_decl
6587 LABEL_REF_NONLOCAL_P (temp
) = 1;
6589 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
6594 if (!DECL_RTL_SET_P (exp
))
6596 error ("%Jprior parameter's size depends on '%D'", exp
, exp
);
6597 return CONST0_RTX (mode
);
6600 /* ... fall through ... */
6603 /* If a static var's type was incomplete when the decl was written,
6604 but the type is complete now, lay out the decl now. */
6605 if (DECL_SIZE (exp
) == 0
6606 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
6607 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6608 layout_decl (exp
, 0);
6610 /* ... fall through ... */
6614 if (DECL_RTL (exp
) == 0)
6617 /* Ensure variable marked as used even if it doesn't go through
6618 a parser. If it hasn't be used yet, write out an external
6620 if (! TREE_USED (exp
))
6622 assemble_external (exp
);
6623 TREE_USED (exp
) = 1;
6626 /* Show we haven't gotten RTL for this yet. */
6629 /* Handle variables inherited from containing functions. */
6630 context
= decl_function_context (exp
);
6632 if (context
!= 0 && context
!= current_function_decl
6633 /* If var is static, we don't need a static chain to access it. */
6634 && ! (MEM_P (DECL_RTL (exp
))
6635 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
6639 /* Mark as non-local and addressable. */
6640 DECL_NONLOCAL (exp
) = 1;
6641 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
6643 lang_hooks
.mark_addressable (exp
);
6644 if (!MEM_P (DECL_RTL (exp
)))
6646 addr
= XEXP (DECL_RTL (exp
), 0);
6649 = replace_equiv_address (addr
,
6650 fix_lexical_addr (XEXP (addr
, 0), exp
));
6652 addr
= fix_lexical_addr (addr
, exp
);
6654 temp
= replace_equiv_address (DECL_RTL (exp
), addr
);
6657 /* This is the case of an array whose size is to be determined
6658 from its initializer, while the initializer is still being parsed.
6661 else if (MEM_P (DECL_RTL (exp
))
6662 && REG_P (XEXP (DECL_RTL (exp
), 0)))
6663 temp
= validize_mem (DECL_RTL (exp
));
6665 /* If DECL_RTL is memory, we are in the normal case and either
6666 the address is not valid or it is not a register and -fforce-addr
6667 is specified, get the address into a register. */
6669 else if (MEM_P (DECL_RTL (exp
))
6670 && modifier
!= EXPAND_CONST_ADDRESS
6671 && modifier
!= EXPAND_SUM
6672 && modifier
!= EXPAND_INITIALIZER
6673 && (! memory_address_p (DECL_MODE (exp
),
6674 XEXP (DECL_RTL (exp
), 0))
6676 && !REG_P (XEXP (DECL_RTL (exp
), 0)))))
6679 *alt_rtl
= DECL_RTL (exp
);
6680 temp
= replace_equiv_address (DECL_RTL (exp
),
6681 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
6684 /* If we got something, return it. But first, set the alignment
6685 if the address is a register. */
6688 if (MEM_P (temp
) && REG_P (XEXP (temp
, 0)))
6689 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6694 /* If the mode of DECL_RTL does not match that of the decl, it
6695 must be a promoted value. We return a SUBREG of the wanted mode,
6696 but mark it so that we know that it was already extended. */
6698 if (REG_P (DECL_RTL (exp
))
6699 && GET_MODE (DECL_RTL (exp
)) != DECL_MODE (exp
))
6701 /* Get the signedness used for this variable. Ensure we get the
6702 same mode we got when the variable was declared. */
6703 if (GET_MODE (DECL_RTL (exp
))
6704 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
,
6705 (TREE_CODE (exp
) == RESULT_DECL
? 1 : 0)))
6708 temp
= gen_lowpart_SUBREG (mode
, DECL_RTL (exp
));
6709 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6710 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6714 return DECL_RTL (exp
);
6717 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
6718 TREE_INT_CST_HIGH (exp
), mode
);
6720 /* ??? If overflow is set, fold will have done an incomplete job,
6721 which can result in (plus xx (const_int 0)), which can get
6722 simplified by validate_replace_rtx during virtual register
6723 instantiation, which can result in unrecognizable insns.
6724 Avoid this by forcing all overflows into registers. */
6725 if (TREE_CONSTANT_OVERFLOW (exp
)
6726 && modifier
!= EXPAND_INITIALIZER
)
6727 temp
= force_reg (mode
, temp
);
6732 return const_vector_from_tree (exp
);
6735 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
6738 /* If optimized, generate immediate CONST_DOUBLE
6739 which will be turned into memory by reload if necessary.
6741 We used to force a register so that loop.c could see it. But
6742 this does not allow gen_* patterns to perform optimizations with
6743 the constants. It also produces two insns in cases like "x = 1.0;".
6744 On most machines, floating-point constants are not permitted in
6745 many insns, so we'd end up copying it to a register in any case.
6747 Now, we do the copying in expand_binop, if appropriate. */
6748 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
6749 TYPE_MODE (TREE_TYPE (exp
)));
6752 /* Handle evaluating a complex constant in a CONCAT target. */
6753 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
6755 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
6758 rtarg
= XEXP (original_target
, 0);
6759 itarg
= XEXP (original_target
, 1);
6761 /* Move the real and imaginary parts separately. */
6762 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, 0);
6763 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, 0);
6766 emit_move_insn (rtarg
, op0
);
6768 emit_move_insn (itarg
, op1
);
6770 return original_target
;
6773 /* ... fall through ... */
6776 temp
= output_constant_def (exp
, 1);
6778 /* temp contains a constant address.
6779 On RISC machines where a constant address isn't valid,
6780 make some insns to get that address into a register. */
6781 if (modifier
!= EXPAND_CONST_ADDRESS
6782 && modifier
!= EXPAND_INITIALIZER
6783 && modifier
!= EXPAND_SUM
6784 && (! memory_address_p (mode
, XEXP (temp
, 0))
6785 || flag_force_addr
))
6786 return replace_equiv_address (temp
,
6787 copy_rtx (XEXP (temp
, 0)));
6792 tree val
= TREE_OPERAND (exp
, 0);
6793 rtx ret
= expand_expr_real_1 (val
, target
, tmode
, modifier
, alt_rtl
);
6795 if (TREE_CODE (val
) != VAR_DECL
|| !DECL_ARTIFICIAL (val
))
6797 /* We can indeed still hit this case, typically via builtin
6798 expanders calling save_expr immediately before expanding
6799 something. Assume this means that we only have to deal
6800 with non-BLKmode values. */
6801 if (GET_MODE (ret
) == BLKmode
)
6804 val
= build_decl (VAR_DECL
, NULL
, TREE_TYPE (exp
));
6805 DECL_ARTIFICIAL (val
) = 1;
6806 TREE_OPERAND (exp
, 0) = val
;
6808 if (!CONSTANT_P (ret
))
6809 ret
= copy_to_reg (ret
);
6810 SET_DECL_RTL (val
, ret
);
6819 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6820 TREE_OPERAND (exp
, 0)
6821 = lang_hooks
.unsave_expr_now (TREE_OPERAND (exp
, 0));
6826 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6827 expand_goto (TREE_OPERAND (exp
, 0));
6829 expand_computed_goto (TREE_OPERAND (exp
, 0));
6832 /* These are lowered during gimplification, so we should never ever
6838 case LABELED_BLOCK_EXPR
:
6839 if (LABELED_BLOCK_BODY (exp
))
6840 expand_expr_stmt (LABELED_BLOCK_BODY (exp
));
6841 /* Should perhaps use expand_label, but this is simpler and safer. */
6842 do_pending_stack_adjust ();
6843 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp
)));
6846 case EXIT_BLOCK_EXPR
:
6847 if (EXIT_BLOCK_RETURN (exp
))
6848 sorry ("returned value in block_exit_expr");
6849 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp
)));
6854 tree block
= BIND_EXPR_BLOCK (exp
);
6857 /* If we're in functions-as-trees mode, this BIND_EXPR represents
6858 the block, so we need to emit NOTE_INSN_BLOCK_* notes. */
6859 mark_ends
= (block
!= NULL_TREE
);
6860 expand_start_bindings_and_block (mark_ends
? 0 : 2, block
);
6862 /* If VARS have not yet been expanded, expand them now. */
6863 expand_vars (BIND_EXPR_VARS (exp
));
6865 /* TARGET was clobbered early in this function. The correct
6866 indicator or whether or not we need the value of this
6867 expression is the IGNORE variable. */
6868 temp
= expand_expr (BIND_EXPR_BODY (exp
),
6869 ignore
? const0_rtx
: target
,
6872 expand_end_bindings (BIND_EXPR_VARS (exp
), mark_ends
, 0);
6878 /* If we don't need the result, just ensure we evaluate any
6884 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
6885 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
, 0);
6890 /* All elts simple constants => refer to a constant in memory. But
6891 if this is a non-BLKmode mode, let it store a field at a time
6892 since that should make a CONST_INT or CONST_DOUBLE when we
6893 fold. Likewise, if we have a target we can use, it is best to
6894 store directly into the target unless the type is large enough
6895 that memcpy will be used. If we are making an initializer and
6896 all operands are constant, put it in memory as well.
6898 FIXME: Avoid trying to fill vector constructors piece-meal.
6899 Output them with output_constant_def below unless we're sure
6900 they're zeros. This should go away when vector initializers
6901 are treated like VECTOR_CST instead of arrays.
6903 else if ((TREE_STATIC (exp
)
6904 && ((mode
== BLKmode
6905 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
6906 || TREE_ADDRESSABLE (exp
)
6907 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
6908 && (! MOVE_BY_PIECES_P
6909 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
6911 && ! mostly_zeros_p (exp
))))
6912 || ((modifier
== EXPAND_INITIALIZER
6913 || modifier
== EXPAND_CONST_ADDRESS
)
6914 && TREE_CONSTANT (exp
)))
6916 rtx constructor
= output_constant_def (exp
, 1);
6918 if (modifier
!= EXPAND_CONST_ADDRESS
6919 && modifier
!= EXPAND_INITIALIZER
6920 && modifier
!= EXPAND_SUM
)
6921 constructor
= validize_mem (constructor
);
6927 /* Handle calls that pass values in multiple non-contiguous
6928 locations. The Irix 6 ABI has examples of this. */
6929 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
6930 || GET_CODE (target
) == PARALLEL
6931 || modifier
== EXPAND_STACK_PARM
)
6933 = assign_temp (build_qualified_type (type
,
6935 | (TREE_READONLY (exp
)
6936 * TYPE_QUAL_CONST
))),
6937 0, TREE_ADDRESSABLE (exp
), 1);
6939 store_constructor (exp
, target
, 0, int_expr_size (exp
));
6945 tree exp1
= TREE_OPERAND (exp
, 0);
6947 if (modifier
!= EXPAND_WRITE
)
6951 t
= fold_read_from_constant_string (exp
);
6953 return expand_expr (t
, target
, tmode
, modifier
);
6956 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
6957 op0
= memory_address (mode
, op0
);
6958 temp
= gen_rtx_MEM (mode
, op0
);
6959 set_mem_attributes (temp
, exp
, 0);
6961 /* If we are writing to this object and its type is a record with
6962 readonly fields, we must mark it as readonly so it will
6963 conflict with readonly references to those fields. */
6964 if (modifier
== EXPAND_WRITE
&& readonly_fields_p (type
))
6965 RTX_UNCHANGING_P (temp
) = 1;
6972 #ifdef ENABLE_CHECKING
6973 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
6978 tree array
= TREE_OPERAND (exp
, 0);
6979 tree low_bound
= array_ref_low_bound (exp
);
6980 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
6983 /* Optimize the special-case of a zero lower bound.
6985 We convert the low_bound to sizetype to avoid some problems
6986 with constant folding. (E.g. suppose the lower bound is 1,
6987 and its mode is QI. Without the conversion, (ARRAY
6988 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6989 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6991 if (! integer_zerop (low_bound
))
6992 index
= size_diffop (index
, convert (sizetype
, low_bound
));
6994 /* Fold an expression like: "foo"[2].
6995 This is not done in fold so it won't happen inside &.
6996 Don't fold if this is for wide characters since it's too
6997 difficult to do correctly and this is a very rare case. */
6999 if (modifier
!= EXPAND_CONST_ADDRESS
7000 && modifier
!= EXPAND_INITIALIZER
7001 && modifier
!= EXPAND_MEMORY
)
7003 tree t
= fold_read_from_constant_string (exp
);
7006 return expand_expr (t
, target
, tmode
, modifier
);
7009 /* If this is a constant index into a constant array,
7010 just get the value from the array. Handle both the cases when
7011 we have an explicit constructor and when our operand is a variable
7012 that was declared const. */
7014 if (modifier
!= EXPAND_CONST_ADDRESS
7015 && modifier
!= EXPAND_INITIALIZER
7016 && modifier
!= EXPAND_MEMORY
7017 && TREE_CODE (array
) == CONSTRUCTOR
7018 && ! TREE_SIDE_EFFECTS (array
)
7019 && TREE_CODE (index
) == INTEGER_CST
7020 && 0 > compare_tree_int (index
,
7021 list_length (CONSTRUCTOR_ELTS
7022 (TREE_OPERAND (exp
, 0)))))
7026 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
7027 i
= TREE_INT_CST_LOW (index
);
7028 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
7032 return expand_expr (fold (TREE_VALUE (elem
)), target
, tmode
,
7036 else if (optimize
>= 1
7037 && modifier
!= EXPAND_CONST_ADDRESS
7038 && modifier
!= EXPAND_INITIALIZER
7039 && modifier
!= EXPAND_MEMORY
7040 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
7041 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
7042 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
7043 && targetm
.binds_local_p (array
))
7045 if (TREE_CODE (index
) == INTEGER_CST
)
7047 tree init
= DECL_INITIAL (array
);
7049 if (TREE_CODE (init
) == CONSTRUCTOR
)
7053 for (elem
= CONSTRUCTOR_ELTS (init
);
7055 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
7056 elem
= TREE_CHAIN (elem
))
7059 if (elem
&& !TREE_SIDE_EFFECTS (TREE_VALUE (elem
)))
7060 return expand_expr (fold (TREE_VALUE (elem
)), target
,
7063 else if (TREE_CODE (init
) == STRING_CST
7064 && 0 > compare_tree_int (index
,
7065 TREE_STRING_LENGTH (init
)))
7067 tree type
= TREE_TYPE (TREE_TYPE (init
));
7068 enum machine_mode mode
= TYPE_MODE (type
);
7070 if (GET_MODE_CLASS (mode
) == MODE_INT
7071 && GET_MODE_SIZE (mode
) == 1)
7072 return gen_int_mode (TREE_STRING_POINTER (init
)
7073 [TREE_INT_CST_LOW (index
)], mode
);
7078 goto normal_inner_ref
;
7081 /* If the operand is a CONSTRUCTOR, we can just extract the
7082 appropriate field if it is present. */
7083 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
)
7087 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
7088 elt
= TREE_CHAIN (elt
))
7089 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
7090 /* We can normally use the value of the field in the
7091 CONSTRUCTOR. However, if this is a bitfield in
7092 an integral mode that we can fit in a HOST_WIDE_INT,
7093 we must mask only the number of bits in the bitfield,
7094 since this is done implicitly by the constructor. If
7095 the bitfield does not meet either of those conditions,
7096 we can't do this optimization. */
7097 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
7098 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
7100 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
7101 <= HOST_BITS_PER_WIDE_INT
))))
7103 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
))
7104 && modifier
== EXPAND_STACK_PARM
)
7106 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
7107 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
7109 HOST_WIDE_INT bitsize
7110 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt
)));
7111 enum machine_mode imode
7112 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
7114 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
7116 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
7117 op0
= expand_and (imode
, op0
, op1
, target
);
7122 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
7125 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
7127 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
7135 goto normal_inner_ref
;
7138 case ARRAY_RANGE_REF
:
7141 enum machine_mode mode1
;
7142 HOST_WIDE_INT bitsize
, bitpos
;
7145 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7146 &mode1
, &unsignedp
, &volatilep
);
7149 /* If we got back the original object, something is wrong. Perhaps
7150 we are evaluating an expression too early. In any event, don't
7151 infinitely recurse. */
7155 /* If TEM's type is a union of variable size, pass TARGET to the inner
7156 computation, since it will need a temporary and TARGET is known
7157 to have to do. This occurs in unchecked conversion in Ada. */
7161 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
7162 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
7164 && modifier
!= EXPAND_STACK_PARM
7165 ? target
: NULL_RTX
),
7167 (modifier
== EXPAND_INITIALIZER
7168 || modifier
== EXPAND_CONST_ADDRESS
7169 || modifier
== EXPAND_STACK_PARM
)
7170 ? modifier
: EXPAND_NORMAL
);
7172 /* If this is a constant, put it into a register if it is a
7173 legitimate constant and OFFSET is 0 and memory if it isn't. */
7174 if (CONSTANT_P (op0
))
7176 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
7177 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
7179 op0
= force_reg (mode
, op0
);
7181 op0
= validize_mem (force_const_mem (mode
, op0
));
7184 /* Otherwise, if this object not in memory and we either have an
7185 offset or a BLKmode result, put it there. This case can't occur in
7186 C, but can in Ada if we have unchecked conversion of an expression
7187 from a scalar type to an array or record type or for an
7188 ARRAY_RANGE_REF whose type is BLKmode. */
7189 else if (!MEM_P (op0
)
7191 || (code
== ARRAY_RANGE_REF
&& mode
== BLKmode
)))
7193 tree nt
= build_qualified_type (TREE_TYPE (tem
),
7194 (TYPE_QUALS (TREE_TYPE (tem
))
7195 | TYPE_QUAL_CONST
));
7196 rtx memloc
= assign_temp (nt
, 1, 1, 1);
7198 emit_move_insn (memloc
, op0
);
7204 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
7210 #ifdef POINTERS_EXTEND_UNSIGNED
7211 if (GET_MODE (offset_rtx
) != Pmode
)
7212 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
7214 if (GET_MODE (offset_rtx
) != ptr_mode
)
7215 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
7218 if (GET_MODE (op0
) == BLKmode
7219 /* A constant address in OP0 can have VOIDmode, we must
7220 not try to call force_reg in that case. */
7221 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
7223 && (bitpos
% bitsize
) == 0
7224 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
7225 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
7227 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7231 op0
= offset_address (op0
, offset_rtx
,
7232 highest_pow2_factor (offset
));
7235 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7236 record its alignment as BIGGEST_ALIGNMENT. */
7237 if (MEM_P (op0
) && bitpos
== 0 && offset
!= 0
7238 && is_aligning_offset (offset
, tem
))
7239 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
7241 /* Don't forget about volatility even if this is a bitfield. */
7242 if (MEM_P (op0
) && volatilep
&& ! MEM_VOLATILE_P (op0
))
7244 if (op0
== orig_op0
)
7245 op0
= copy_rtx (op0
);
7247 MEM_VOLATILE_P (op0
) = 1;
7250 /* The following code doesn't handle CONCAT.
7251 Assume only bitpos == 0 can be used for CONCAT, due to
7252 one element arrays having the same mode as its element. */
7253 if (GET_CODE (op0
) == CONCAT
)
7255 if (bitpos
!= 0 || bitsize
!= GET_MODE_BITSIZE (GET_MODE (op0
)))
7260 /* In cases where an aligned union has an unaligned object
7261 as a field, we might be extracting a BLKmode value from
7262 an integer-mode (e.g., SImode) object. Handle this case
7263 by doing the extract into an object as wide as the field
7264 (which we know to be the width of a basic mode), then
7265 storing into memory, and changing the mode to BLKmode. */
7266 if (mode1
== VOIDmode
7267 || REG_P (op0
) || GET_CODE (op0
) == SUBREG
7268 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
7269 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7270 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
7271 && modifier
!= EXPAND_CONST_ADDRESS
7272 && modifier
!= EXPAND_INITIALIZER
)
7273 /* If the field isn't aligned enough to fetch as a memref,
7274 fetch it as a bit field. */
7275 || (mode1
!= BLKmode
7276 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
7277 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)
7279 && (MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
7280 || (bitpos
% GET_MODE_ALIGNMENT (mode1
) != 0))))
7281 && ((modifier
== EXPAND_CONST_ADDRESS
7282 || modifier
== EXPAND_INITIALIZER
)
7284 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
7285 || (bitpos
% BITS_PER_UNIT
!= 0)))
7286 /* If the type and the field are a constant size and the
7287 size of the type isn't the same size as the bitfield,
7288 we must use bitfield operations. */
7290 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
)))
7292 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7295 enum machine_mode ext_mode
= mode
;
7297 if (ext_mode
== BLKmode
7298 && ! (target
!= 0 && MEM_P (op0
)
7300 && bitpos
% BITS_PER_UNIT
== 0))
7301 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7303 if (ext_mode
== BLKmode
)
7306 target
= assign_temp (type
, 0, 1, 1);
7311 /* In this case, BITPOS must start at a byte boundary and
7312 TARGET, if specified, must be a MEM. */
7314 || (target
!= 0 && !MEM_P (target
))
7315 || bitpos
% BITS_PER_UNIT
!= 0)
7318 emit_block_move (target
,
7319 adjust_address (op0
, VOIDmode
,
7320 bitpos
/ BITS_PER_UNIT
),
7321 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7323 (modifier
== EXPAND_STACK_PARM
7324 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7329 op0
= validize_mem (op0
);
7331 if (MEM_P (op0
) && REG_P (XEXP (op0
, 0)))
7332 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7334 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
7335 (modifier
== EXPAND_STACK_PARM
7336 ? NULL_RTX
: target
),
7338 int_size_in_bytes (TREE_TYPE (tem
)));
7340 /* If the result is a record type and BITSIZE is narrower than
7341 the mode of OP0, an integral mode, and this is a big endian
7342 machine, we must put the field into the high-order bits. */
7343 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7344 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7345 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
7346 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7347 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7351 /* If the result type is BLKmode, store the data into a temporary
7352 of the appropriate type, but with the mode corresponding to the
7353 mode for the data we have (op0's mode). It's tempting to make
7354 this a constant type, since we know it's only being stored once,
7355 but that can cause problems if we are taking the address of this
7356 COMPONENT_REF because the MEM of any reference via that address
7357 will have flags corresponding to the type, which will not
7358 necessarily be constant. */
7359 if (mode
== BLKmode
)
7362 = assign_stack_temp_for_type
7363 (ext_mode
, GET_MODE_BITSIZE (ext_mode
), 0, type
);
7365 emit_move_insn (new, op0
);
7366 op0
= copy_rtx (new);
7367 PUT_MODE (op0
, BLKmode
);
7368 set_mem_attributes (op0
, exp
, 1);
7374 /* If the result is BLKmode, use that to access the object
7376 if (mode
== BLKmode
)
7379 /* Get a reference to just this component. */
7380 if (modifier
== EXPAND_CONST_ADDRESS
7381 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7382 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7384 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7386 if (op0
== orig_op0
)
7387 op0
= copy_rtx (op0
);
7389 set_mem_attributes (op0
, exp
, 0);
7390 if (REG_P (XEXP (op0
, 0)))
7391 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7393 MEM_VOLATILE_P (op0
) |= volatilep
;
7394 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7395 || modifier
== EXPAND_CONST_ADDRESS
7396 || modifier
== EXPAND_INITIALIZER
)
7398 else if (target
== 0)
7399 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7401 convert_move (target
, op0
, unsignedp
);
7406 return expand_expr (OBJ_TYPE_REF_EXPR (exp
), target
, tmode
, modifier
);
7408 /* Intended for a reference to a buffer of a file-object in Pascal.
7409 But it's not certain that a special tree code will really be
7410 necessary for these. INDIRECT_REF might work for them. */
7416 /* Pascal set IN expression.
7419 rlo = set_low - (set_low%bits_per_word);
7420 the_word = set [ (index - rlo)/bits_per_word ];
7421 bit_index = index % bits_per_word;
7422 bitmask = 1 << bit_index;
7423 return !!(the_word & bitmask); */
7425 tree set
= TREE_OPERAND (exp
, 0);
7426 tree index
= TREE_OPERAND (exp
, 1);
7427 int iunsignedp
= TYPE_UNSIGNED (TREE_TYPE (index
));
7428 tree set_type
= TREE_TYPE (set
);
7429 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
7430 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
7431 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
7432 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
7433 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
7434 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
7435 rtx setaddr
= XEXP (setval
, 0);
7436 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
7438 rtx diff
, quo
, rem
, addr
, bit
, result
;
7440 /* If domain is empty, answer is no. Likewise if index is constant
7441 and out of bounds. */
7442 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
7443 && TREE_CODE (set_low_bound
) == INTEGER_CST
7444 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
7445 || (TREE_CODE (index
) == INTEGER_CST
7446 && TREE_CODE (set_low_bound
) == INTEGER_CST
7447 && tree_int_cst_lt (index
, set_low_bound
))
7448 || (TREE_CODE (set_high_bound
) == INTEGER_CST
7449 && TREE_CODE (index
) == INTEGER_CST
7450 && tree_int_cst_lt (set_high_bound
, index
))))
7454 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7456 /* If we get here, we have to generate the code for both cases
7457 (in range and out of range). */
7459 op0
= gen_label_rtx ();
7460 op1
= gen_label_rtx ();
7462 if (! (GET_CODE (index_val
) == CONST_INT
7463 && GET_CODE (lo_r
) == CONST_INT
))
7464 emit_cmp_and_jump_insns (index_val
, lo_r
, LT
, NULL_RTX
,
7465 GET_MODE (index_val
), iunsignedp
, op1
);
7467 if (! (GET_CODE (index_val
) == CONST_INT
7468 && GET_CODE (hi_r
) == CONST_INT
))
7469 emit_cmp_and_jump_insns (index_val
, hi_r
, GT
, NULL_RTX
,
7470 GET_MODE (index_val
), iunsignedp
, op1
);
7472 /* Calculate the element number of bit zero in the first word
7474 if (GET_CODE (lo_r
) == CONST_INT
)
7475 rlow
= GEN_INT (INTVAL (lo_r
)
7476 & ~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
7478 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
7479 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
7480 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7482 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
7483 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7485 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
7486 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7487 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
7488 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7490 addr
= memory_address (byte_mode
,
7491 expand_binop (index_mode
, add_optab
, diff
,
7492 setaddr
, NULL_RTX
, iunsignedp
,
7495 /* Extract the bit we want to examine. */
7496 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
7497 gen_rtx_MEM (byte_mode
, addr
),
7498 make_tree (TREE_TYPE (index
), rem
),
7500 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
7501 GET_MODE (target
) == byte_mode
? target
: 0,
7502 1, OPTAB_LIB_WIDEN
);
7504 if (result
!= target
)
7505 convert_move (target
, result
, 1);
7507 /* Output the code to handle the out-of-range case. */
7510 emit_move_insn (target
, const0_rtx
);
7515 case WITH_CLEANUP_EXPR
:
7516 if (WITH_CLEANUP_EXPR_RTL (exp
) == 0)
7518 WITH_CLEANUP_EXPR_RTL (exp
)
7519 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7520 expand_decl_cleanup_eh (NULL_TREE
, TREE_OPERAND (exp
, 1),
7521 CLEANUP_EH_ONLY (exp
));
7523 /* That's it for this cleanup. */
7524 TREE_OPERAND (exp
, 1) = 0;
7526 return WITH_CLEANUP_EXPR_RTL (exp
);
7528 case CLEANUP_POINT_EXPR
:
7530 /* Start a new binding layer that will keep track of all cleanup
7531 actions to be performed. */
7532 expand_start_bindings (2);
7534 target_temp_slot_level
= temp_slot_level
;
7536 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7537 /* If we're going to use this value, load it up now. */
7539 op0
= force_not_mem (op0
);
7540 preserve_temp_slots (op0
);
7541 expand_end_bindings (NULL_TREE
, 0, 0);
7546 /* Check for a built-in function. */
7547 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7548 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7550 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7552 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7553 == BUILT_IN_FRONTEND
)
7554 return lang_hooks
.expand_expr (exp
, original_target
,
7558 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7561 return expand_call (exp
, target
, ignore
);
7563 case NON_LVALUE_EXPR
:
7566 case REFERENCE_EXPR
:
7567 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7570 if (TREE_CODE (type
) == UNION_TYPE
)
7572 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7574 /* If both input and output are BLKmode, this conversion isn't doing
7575 anything except possibly changing memory attribute. */
7576 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7578 rtx result
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7581 result
= copy_rtx (result
);
7582 set_mem_attributes (result
, exp
, 0);
7588 if (TYPE_MODE (type
) != BLKmode
)
7589 target
= gen_reg_rtx (TYPE_MODE (type
));
7591 target
= assign_temp (type
, 0, 1, 1);
7595 /* Store data into beginning of memory target. */
7596 store_expr (TREE_OPERAND (exp
, 0),
7597 adjust_address (target
, TYPE_MODE (valtype
), 0),
7598 modifier
== EXPAND_STACK_PARM
? 2 : 0);
7600 else if (REG_P (target
))
7601 /* Store this field into a union of the proper type. */
7602 store_field (target
,
7603 MIN ((int_size_in_bytes (TREE_TYPE
7604 (TREE_OPERAND (exp
, 0)))
7606 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7607 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7608 VOIDmode
, 0, type
, 0);
7612 /* Return the entire union. */
7616 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7618 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7621 /* If the signedness of the conversion differs and OP0 is
7622 a promoted SUBREG, clear that indication since we now
7623 have to do the proper extension. */
7624 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7625 && GET_CODE (op0
) == SUBREG
)
7626 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7631 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7632 if (GET_MODE (op0
) == mode
)
7635 /* If OP0 is a constant, just convert it into the proper mode. */
7636 if (CONSTANT_P (op0
))
7638 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7639 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
7641 if (modifier
== EXPAND_INITIALIZER
)
7642 return simplify_gen_subreg (mode
, op0
, inner_mode
,
7643 subreg_lowpart_offset (mode
,
7646 return convert_modes (mode
, inner_mode
, op0
,
7647 TYPE_UNSIGNED (inner_type
));
7650 if (modifier
== EXPAND_INITIALIZER
)
7651 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7655 convert_to_mode (mode
, op0
,
7656 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7658 convert_move (target
, op0
,
7659 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7662 case VIEW_CONVERT_EXPR
:
7663 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7665 /* If the input and output modes are both the same, we are done.
7666 Otherwise, if neither mode is BLKmode and both are integral and within
7667 a word, we can use gen_lowpart. If neither is true, make sure the
7668 operand is in memory and convert the MEM to the new mode. */
7669 if (TYPE_MODE (type
) == GET_MODE (op0
))
7671 else if (TYPE_MODE (type
) != BLKmode
&& GET_MODE (op0
) != BLKmode
7672 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7673 && GET_MODE_CLASS (TYPE_MODE (type
)) == MODE_INT
7674 && GET_MODE_SIZE (TYPE_MODE (type
)) <= UNITS_PER_WORD
7675 && GET_MODE_SIZE (GET_MODE (op0
)) <= UNITS_PER_WORD
)
7676 op0
= gen_lowpart (TYPE_MODE (type
), op0
);
7677 else if (!MEM_P (op0
))
7679 /* If the operand is not a MEM, force it into memory. Since we
7680 are going to be be changing the mode of the MEM, don't call
7681 force_const_mem for constants because we don't allow pool
7682 constants to change mode. */
7683 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7685 if (TREE_ADDRESSABLE (exp
))
7688 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
7690 = assign_stack_temp_for_type
7691 (TYPE_MODE (inner_type
),
7692 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
7694 emit_move_insn (target
, op0
);
7698 /* At this point, OP0 is in the correct mode. If the output type is such
7699 that the operand is known to be aligned, indicate that it is.
7700 Otherwise, we need only be concerned about alignment for non-BLKmode
7704 op0
= copy_rtx (op0
);
7706 if (TYPE_ALIGN_OK (type
))
7707 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
7708 else if (TYPE_MODE (type
) != BLKmode
&& STRICT_ALIGNMENT
7709 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
7711 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7712 HOST_WIDE_INT temp_size
7713 = MAX (int_size_in_bytes (inner_type
),
7714 (HOST_WIDE_INT
) GET_MODE_SIZE (TYPE_MODE (type
)));
7715 rtx
new = assign_stack_temp_for_type (TYPE_MODE (type
),
7716 temp_size
, 0, type
);
7717 rtx new_with_op0_mode
= adjust_address (new, GET_MODE (op0
), 0);
7719 if (TREE_ADDRESSABLE (exp
))
7722 if (GET_MODE (op0
) == BLKmode
)
7723 emit_block_move (new_with_op0_mode
, op0
,
7724 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type
))),
7725 (modifier
== EXPAND_STACK_PARM
7726 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7728 emit_move_insn (new_with_op0_mode
, op0
);
7733 op0
= adjust_address (op0
, TYPE_MODE (type
), 0);
7739 this_optab
= ! unsignedp
&& flag_trapv
7740 && (GET_MODE_CLASS (mode
) == MODE_INT
)
7741 ? addv_optab
: add_optab
;
7743 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7744 something else, make sure we add the register to the constant and
7745 then to the other thing. This case can occur during strength
7746 reduction and doing it this way will produce better code if the
7747 frame pointer or argument pointer is eliminated.
7749 fold-const.c will ensure that the constant is always in the inner
7750 PLUS_EXPR, so the only case we need to do anything about is if
7751 sp, ap, or fp is our second argument, in which case we must swap
7752 the innermost first argument and our second argument. */
7754 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7755 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7756 && TREE_CODE (TREE_OPERAND (exp
, 1)) == VAR_DECL
7757 && (DECL_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7758 || DECL_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7759 || DECL_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7761 tree t
= TREE_OPERAND (exp
, 1);
7763 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7764 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
7767 /* If the result is to be ptr_mode and we are adding an integer to
7768 something, we might be forming a constant. So try to use
7769 plus_constant. If it produces a sum and we can't accept it,
7770 use force_operand. This allows P = &ARR[const] to generate
7771 efficient code on machines where a SYMBOL_REF is not a valid
7774 If this is an EXPAND_SUM call, always return the sum. */
7775 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7776 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
7778 if (modifier
== EXPAND_STACK_PARM
)
7780 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
7781 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7782 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
7786 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
7788 /* Use immed_double_const to ensure that the constant is
7789 truncated according to the mode of OP1, then sign extended
7790 to a HOST_WIDE_INT. Using the constant directly can result
7791 in non-canonical RTL in a 64x32 cross compile. */
7793 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
7795 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
7796 op1
= plus_constant (op1
, INTVAL (constant_part
));
7797 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7798 op1
= force_operand (op1
, target
);
7802 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7803 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
7804 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
7808 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7809 (modifier
== EXPAND_INITIALIZER
7810 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
7811 if (! CONSTANT_P (op0
))
7813 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7814 VOIDmode
, modifier
);
7815 /* Return a PLUS if modifier says it's OK. */
7816 if (modifier
== EXPAND_SUM
7817 || modifier
== EXPAND_INITIALIZER
)
7818 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7821 /* Use immed_double_const to ensure that the constant is
7822 truncated according to the mode of OP1, then sign extended
7823 to a HOST_WIDE_INT. Using the constant directly can result
7824 in non-canonical RTL in a 64x32 cross compile. */
7826 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
7828 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7829 op0
= plus_constant (op0
, INTVAL (constant_part
));
7830 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7831 op0
= force_operand (op0
, target
);
7836 /* No sense saving up arithmetic to be done
7837 if it's all in the wrong mode to form part of an address.
7838 And force_operand won't know whether to sign-extend or
7840 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7841 || mode
!= ptr_mode
)
7843 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7844 subtarget
, &op0
, &op1
, 0);
7845 if (op0
== const0_rtx
)
7847 if (op1
== const0_rtx
)
7852 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7853 subtarget
, &op0
, &op1
, modifier
);
7854 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7857 /* For initializers, we are allowed to return a MINUS of two
7858 symbolic constants. Here we handle all cases when both operands
7860 /* Handle difference of two symbolic constants,
7861 for the sake of an initializer. */
7862 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7863 && really_constant_p (TREE_OPERAND (exp
, 0))
7864 && really_constant_p (TREE_OPERAND (exp
, 1)))
7866 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7867 NULL_RTX
, &op0
, &op1
, modifier
);
7869 /* If the last operand is a CONST_INT, use plus_constant of
7870 the negated constant. Else make the MINUS. */
7871 if (GET_CODE (op1
) == CONST_INT
)
7872 return plus_constant (op0
, - INTVAL (op1
));
7874 return gen_rtx_MINUS (mode
, op0
, op1
);
7877 this_optab
= ! unsignedp
&& flag_trapv
7878 && (GET_MODE_CLASS(mode
) == MODE_INT
)
7879 ? subv_optab
: sub_optab
;
7881 /* No sense saving up arithmetic to be done
7882 if it's all in the wrong mode to form part of an address.
7883 And force_operand won't know whether to sign-extend or
7885 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7886 || mode
!= ptr_mode
)
7889 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7890 subtarget
, &op0
, &op1
, modifier
);
7892 /* Convert A - const to A + (-const). */
7893 if (GET_CODE (op1
) == CONST_INT
)
7895 op1
= negate_rtx (mode
, op1
);
7896 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7902 /* If first operand is constant, swap them.
7903 Thus the following special case checks need only
7904 check the second operand. */
7905 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
7907 tree t1
= TREE_OPERAND (exp
, 0);
7908 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
7909 TREE_OPERAND (exp
, 1) = t1
;
7912 /* Attempt to return something suitable for generating an
7913 indexed address, for machines that support that. */
7915 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
7916 && host_integerp (TREE_OPERAND (exp
, 1), 0))
7918 tree exp1
= TREE_OPERAND (exp
, 1);
7920 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7924 op0
= force_operand (op0
, NULL_RTX
);
7926 op0
= copy_to_mode_reg (mode
, op0
);
7928 return gen_rtx_MULT (mode
, op0
,
7929 gen_int_mode (tree_low_cst (exp1
, 0),
7930 TYPE_MODE (TREE_TYPE (exp1
))));
7933 if (modifier
== EXPAND_STACK_PARM
)
7936 /* Check for multiplying things that have been extended
7937 from a narrower type. If this machine supports multiplying
7938 in that narrower type with a result in the desired type,
7939 do it that way, and avoid the explicit type-conversion. */
7940 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
7941 && TREE_CODE (type
) == INTEGER_TYPE
7942 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7943 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7944 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7945 && int_fits_type_p (TREE_OPERAND (exp
, 1),
7946 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7947 /* Don't use a widening multiply if a shift will do. */
7948 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
7949 > HOST_BITS_PER_WIDE_INT
)
7950 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
7952 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7953 && (TYPE_PRECISION (TREE_TYPE
7954 (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7955 == TYPE_PRECISION (TREE_TYPE
7957 (TREE_OPERAND (exp
, 0), 0))))
7958 /* If both operands are extended, they must either both
7959 be zero-extended or both be sign-extended. */
7960 && (TYPE_UNSIGNED (TREE_TYPE
7961 (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7962 == TYPE_UNSIGNED (TREE_TYPE
7964 (TREE_OPERAND (exp
, 0), 0)))))))
7966 tree op0type
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0));
7967 enum machine_mode innermode
= TYPE_MODE (op0type
);
7968 bool zextend_p
= TYPE_UNSIGNED (op0type
);
7969 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
7970 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
7972 if (mode
== GET_MODE_WIDER_MODE (innermode
))
7974 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
7976 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7977 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7978 TREE_OPERAND (exp
, 1),
7979 NULL_RTX
, &op0
, &op1
, 0);
7981 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7982 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7983 NULL_RTX
, &op0
, &op1
, 0);
7986 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
7987 && innermode
== word_mode
)
7990 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7991 NULL_RTX
, VOIDmode
, 0);
7992 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7993 op1
= convert_modes (innermode
, mode
,
7994 expand_expr (TREE_OPERAND (exp
, 1),
7995 NULL_RTX
, VOIDmode
, 0),
7998 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7999 NULL_RTX
, VOIDmode
, 0);
8000 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8001 unsignedp
, OPTAB_LIB_WIDEN
);
8002 hipart
= gen_highpart (innermode
, temp
);
8003 htem
= expand_mult_highpart_adjust (innermode
, hipart
,
8007 emit_move_insn (hipart
, htem
);
8012 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8013 subtarget
, &op0
, &op1
, 0);
8014 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
8016 case TRUNC_DIV_EXPR
:
8017 case FLOOR_DIV_EXPR
:
8019 case ROUND_DIV_EXPR
:
8020 case EXACT_DIV_EXPR
:
8021 if (modifier
== EXPAND_STACK_PARM
)
8023 /* Possible optimization: compute the dividend with EXPAND_SUM
8024 then if the divisor is constant can optimize the case
8025 where some terms of the dividend have coeffs divisible by it. */
8026 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8027 subtarget
, &op0
, &op1
, 0);
8028 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
8031 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8032 expensive divide. If not, combine will rebuild the original
8034 if (flag_unsafe_math_optimizations
&& optimize
&& !optimize_size
8035 && TREE_CODE (type
) == REAL_TYPE
8036 && !real_onep (TREE_OPERAND (exp
, 0)))
8037 return expand_expr (build (MULT_EXPR
, type
, TREE_OPERAND (exp
, 0),
8038 build (RDIV_EXPR
, type
,
8039 build_real (type
, dconst1
),
8040 TREE_OPERAND (exp
, 1))),
8041 target
, tmode
, modifier
);
8042 this_optab
= sdiv_optab
;
8045 case TRUNC_MOD_EXPR
:
8046 case FLOOR_MOD_EXPR
:
8048 case ROUND_MOD_EXPR
:
8049 if (modifier
== EXPAND_STACK_PARM
)
8051 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8052 subtarget
, &op0
, &op1
, 0);
8053 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
8055 case FIX_ROUND_EXPR
:
8056 case FIX_FLOOR_EXPR
:
8058 abort (); /* Not used for C. */
8060 case FIX_TRUNC_EXPR
:
8061 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8062 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8063 target
= gen_reg_rtx (mode
);
8064 expand_fix (target
, op0
, unsignedp
);
8068 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8069 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8070 target
= gen_reg_rtx (mode
);
8071 /* expand_float can't figure out what to do if FROM has VOIDmode.
8072 So give it the correct mode. With -O, cse will optimize this. */
8073 if (GET_MODE (op0
) == VOIDmode
)
8074 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8076 expand_float (target
, op0
,
8077 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8081 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8082 if (modifier
== EXPAND_STACK_PARM
)
8084 temp
= expand_unop (mode
,
8085 ! unsignedp
&& flag_trapv
8086 && (GET_MODE_CLASS(mode
) == MODE_INT
)
8087 ? negv_optab
: neg_optab
, op0
, target
, 0);
8093 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8094 if (modifier
== EXPAND_STACK_PARM
)
8097 /* ABS_EXPR is not valid for complex arguments. */
8098 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
8099 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
8102 /* Unsigned abs is simply the operand. Testing here means we don't
8103 risk generating incorrect code below. */
8104 if (TYPE_UNSIGNED (type
))
8107 return expand_abs (mode
, op0
, target
, unsignedp
,
8108 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
8112 target
= original_target
;
8114 || modifier
== EXPAND_STACK_PARM
8115 || (MEM_P (target
) && MEM_VOLATILE_P (target
))
8116 || GET_MODE (target
) != mode
8118 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8119 target
= gen_reg_rtx (mode
);
8120 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8121 target
, &op0
, &op1
, 0);
8123 /* First try to do it with a special MIN or MAX instruction.
8124 If that does not win, use a conditional jump to select the proper
8126 this_optab
= (unsignedp
8127 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
8128 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
8130 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8135 /* At this point, a MEM target is no longer useful; we will get better
8139 target
= gen_reg_rtx (mode
);
8141 /* If op1 was placed in target, swap op0 and op1. */
8142 if (target
!= op0
&& target
== op1
)
8150 emit_move_insn (target
, op0
);
8152 op0
= gen_label_rtx ();
8154 /* If this mode is an integer too wide to compare properly,
8155 compare word by word. Rely on cse to optimize constant cases. */
8156 if (GET_MODE_CLASS (mode
) == MODE_INT
8157 && ! can_compare_p (GE
, mode
, ccp_jump
))
8159 if (code
== MAX_EXPR
)
8160 do_jump_by_parts_greater_rtx (mode
, unsignedp
, target
, op1
,
8163 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op1
, target
,
8168 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
8169 unsignedp
, mode
, NULL_RTX
, NULL_RTX
, op0
);
8171 emit_move_insn (target
, op1
);
8176 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8177 if (modifier
== EXPAND_STACK_PARM
)
8179 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8184 /* ??? Can optimize bitwise operations with one arg constant.
8185 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8186 and (a bitwise1 b) bitwise2 b (etc)
8187 but that is probably not worth while. */
8189 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8190 boolean values when we want in all cases to compute both of them. In
8191 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8192 as actual zero-or-1 values and then bitwise anding. In cases where
8193 there cannot be any side effects, better code would be made by
8194 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8195 how to recognize those cases. */
8197 case TRUTH_AND_EXPR
:
8199 this_optab
= and_optab
;
8204 this_optab
= ior_optab
;
8207 case TRUTH_XOR_EXPR
:
8209 this_optab
= xor_optab
;
8216 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8218 if (modifier
== EXPAND_STACK_PARM
)
8220 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8221 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
8224 /* Could determine the answer when only additive constants differ. Also,
8225 the addition of one can be handled by changing the condition. */
8232 case UNORDERED_EXPR
:
8240 temp
= do_store_flag (exp
,
8241 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
8242 tmode
!= VOIDmode
? tmode
: mode
, 0);
8246 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8247 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
8249 && REG_P (original_target
)
8250 && (GET_MODE (original_target
)
8251 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
8253 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
8256 /* If temp is constant, we can just compute the result. */
8257 if (GET_CODE (temp
) == CONST_INT
)
8259 if (INTVAL (temp
) != 0)
8260 emit_move_insn (target
, const1_rtx
);
8262 emit_move_insn (target
, const0_rtx
);
8267 if (temp
!= original_target
)
8269 enum machine_mode mode1
= GET_MODE (temp
);
8270 if (mode1
== VOIDmode
)
8271 mode1
= tmode
!= VOIDmode
? tmode
: mode
;
8273 temp
= copy_to_mode_reg (mode1
, temp
);
8276 op1
= gen_label_rtx ();
8277 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
8278 GET_MODE (temp
), unsignedp
, op1
);
8279 emit_move_insn (temp
, const1_rtx
);
8284 /* If no set-flag instruction, must generate a conditional
8285 store into a temporary variable. Drop through
8286 and handle this like && and ||. */
8288 case TRUTH_ANDIF_EXPR
:
8289 case TRUTH_ORIF_EXPR
:
8292 || modifier
== EXPAND_STACK_PARM
8293 || ! safe_from_p (target
, exp
, 1)
8294 /* Make sure we don't have a hard reg (such as function's return
8295 value) live across basic blocks, if not optimizing. */
8296 || (!optimize
&& REG_P (target
)
8297 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8298 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8301 emit_clr_insn (target
);
8303 op1
= gen_label_rtx ();
8304 jumpifnot (exp
, op1
);
8307 emit_0_to_1_insn (target
);
8310 return ignore
? const0_rtx
: target
;
8312 case TRUTH_NOT_EXPR
:
8313 if (modifier
== EXPAND_STACK_PARM
)
8315 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8316 /* The parser is careful to generate TRUTH_NOT_EXPR
8317 only with operands that are always zero or one. */
8318 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8319 target
, 1, OPTAB_LIB_WIDEN
);
8325 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
8327 return expand_expr_real (TREE_OPERAND (exp
, 1),
8328 (ignore
? const0_rtx
: target
),
8329 VOIDmode
, modifier
, alt_rtl
);
8331 case STATEMENT_LIST
:
8333 tree_stmt_iterator iter
;
8338 for (iter
= tsi_start (exp
); !tsi_end_p (iter
); tsi_next (&iter
))
8339 expand_expr (tsi_stmt (iter
), const0_rtx
, VOIDmode
, modifier
);
8344 /* If it's void, we don't need to worry about computing a value. */
8345 if (VOID_TYPE_P (TREE_TYPE (exp
)))
8347 tree pred
= TREE_OPERAND (exp
, 0);
8348 tree then_
= TREE_OPERAND (exp
, 1);
8349 tree else_
= TREE_OPERAND (exp
, 2);
8351 /* If we do not have any pending cleanups or stack_levels
8352 to restore, and at least one arm of the COND_EXPR is a
8353 GOTO_EXPR to a local label, then we can emit more efficient
8354 code by using jumpif/jumpifnot instead of the 'if' machinery. */
8356 || containing_blocks_have_cleanups_or_stack_level ())
8358 else if (TREE_CODE (then_
) == GOTO_EXPR
8359 && TREE_CODE (GOTO_DESTINATION (then_
)) == LABEL_DECL
)
8361 jumpif (pred
, label_rtx (GOTO_DESTINATION (then_
)));
8362 return expand_expr (else_
, const0_rtx
, VOIDmode
, 0);
8364 else if (TREE_CODE (else_
) == GOTO_EXPR
8365 && TREE_CODE (GOTO_DESTINATION (else_
)) == LABEL_DECL
)
8367 jumpifnot (pred
, label_rtx (GOTO_DESTINATION (else_
)));
8368 return expand_expr (then_
, const0_rtx
, VOIDmode
, 0);
8371 /* Just use the 'if' machinery. */
8372 expand_start_cond (pred
, 0);
8373 start_cleanup_deferral ();
8374 expand_expr (then_
, const0_rtx
, VOIDmode
, 0);
8378 /* Iterate over 'else if's instead of recursing. */
8379 for (; TREE_CODE (exp
) == COND_EXPR
; exp
= TREE_OPERAND (exp
, 2))
8381 expand_start_else ();
8382 if (EXPR_HAS_LOCATION (exp
))
8384 emit_line_note (EXPR_LOCATION (exp
));
8385 record_block_change (TREE_BLOCK (exp
));
8387 expand_elseif (TREE_OPERAND (exp
, 0));
8388 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, 0);
8390 /* Don't emit the jump and label if there's no 'else' clause. */
8391 if (TREE_SIDE_EFFECTS (exp
))
8393 expand_start_else ();
8394 expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
8396 end_cleanup_deferral ();
8401 /* If we would have a "singleton" (see below) were it not for a
8402 conversion in each arm, bring that conversion back out. */
8403 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8404 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
8405 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
8406 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
8408 tree iftrue
= TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
8409 tree iffalse
= TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
8411 if ((TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '2'
8412 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8413 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '2'
8414 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0))
8415 || (TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '1'
8416 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8417 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '1'
8418 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0)))
8419 return expand_expr (build1 (NOP_EXPR
, type
,
8420 build (COND_EXPR
, TREE_TYPE (iftrue
),
8421 TREE_OPERAND (exp
, 0),
8423 target
, tmode
, modifier
);
8427 /* Note that COND_EXPRs whose type is a structure or union
8428 are required to be constructed to contain assignments of
8429 a temporary variable, so that we can evaluate them here
8430 for side effect only. If type is void, we must do likewise. */
8432 /* If an arm of the branch requires a cleanup,
8433 only that cleanup is performed. */
8436 tree binary_op
= 0, unary_op
= 0;
8438 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8439 convert it to our mode, if necessary. */
8440 if (integer_onep (TREE_OPERAND (exp
, 1))
8441 && integer_zerop (TREE_OPERAND (exp
, 2))
8442 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8446 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
8451 if (modifier
== EXPAND_STACK_PARM
)
8453 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, modifier
);
8454 if (GET_MODE (op0
) == mode
)
8458 target
= gen_reg_rtx (mode
);
8459 convert_move (target
, op0
, unsignedp
);
8463 /* Check for X ? A + B : A. If we have this, we can copy A to the
8464 output and conditionally add B. Similarly for unary operations.
8465 Don't do this if X has side-effects because those side effects
8466 might affect A or B and the "?" operation is a sequence point in
8467 ANSI. (operand_equal_p tests for side effects.) */
8469 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
8470 && operand_equal_p (TREE_OPERAND (exp
, 2),
8471 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8472 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
8473 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
8474 && operand_equal_p (TREE_OPERAND (exp
, 1),
8475 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8476 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
8477 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
8478 && operand_equal_p (TREE_OPERAND (exp
, 2),
8479 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8480 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
8481 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
8482 && operand_equal_p (TREE_OPERAND (exp
, 1),
8483 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8484 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
8486 /* If we are not to produce a result, we have no target. Otherwise,
8487 if a target was specified use it; it will not be used as an
8488 intermediate target unless it is safe. If no target, use a
8493 else if (modifier
== EXPAND_STACK_PARM
)
8494 temp
= assign_temp (type
, 0, 0, 1);
8495 else if (original_target
8496 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
8497 || (singleton
&& REG_P (original_target
)
8498 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
8499 && original_target
== var_rtx (singleton
)))
8500 && GET_MODE (original_target
) == mode
8501 #ifdef HAVE_conditional_move
8502 && (! can_conditionally_move_p (mode
)
8503 || REG_P (original_target
)
8504 || TREE_ADDRESSABLE (type
))
8506 && (!MEM_P (original_target
)
8507 || TREE_ADDRESSABLE (type
)))
8508 temp
= original_target
;
8509 else if (TREE_ADDRESSABLE (type
))
8512 temp
= assign_temp (type
, 0, 0, 1);
8514 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8515 do the test of X as a store-flag operation, do this as
8516 A + ((X != 0) << log C). Similarly for other simple binary
8517 operators. Only do for C == 1 if BRANCH_COST is low. */
8518 if (temp
&& singleton
&& binary_op
8519 && (TREE_CODE (binary_op
) == PLUS_EXPR
8520 || TREE_CODE (binary_op
) == MINUS_EXPR
8521 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
8522 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
8523 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
8524 : integer_onep (TREE_OPERAND (binary_op
, 1)))
8525 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8529 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
8530 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8531 ? addv_optab
: add_optab
)
8532 : TREE_CODE (binary_op
) == MINUS_EXPR
8533 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8534 ? subv_optab
: sub_optab
)
8535 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
8538 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8539 if (singleton
== TREE_OPERAND (exp
, 1))
8540 cond
= invert_truthvalue (TREE_OPERAND (exp
, 0));
8542 cond
= TREE_OPERAND (exp
, 0);
8544 result
= do_store_flag (cond
, (safe_from_p (temp
, singleton
, 1)
8546 mode
, BRANCH_COST
<= 1);
8548 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
8549 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
8550 build_int_2 (tree_log2
8554 (safe_from_p (temp
, singleton
, 1)
8555 ? temp
: NULL_RTX
), 0);
8559 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
8560 return expand_binop (mode
, boptab
, op1
, result
, temp
,
8561 unsignedp
, OPTAB_LIB_WIDEN
);
8565 do_pending_stack_adjust ();
8567 op0
= gen_label_rtx ();
8569 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
8573 /* If the target conflicts with the other operand of the
8574 binary op, we can't use it. Also, we can't use the target
8575 if it is a hard register, because evaluating the condition
8576 might clobber it. */
8578 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
8580 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
8581 temp
= gen_reg_rtx (mode
);
8582 store_expr (singleton
, temp
,
8583 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8586 expand_expr (singleton
,
8587 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8588 if (singleton
== TREE_OPERAND (exp
, 1))
8589 jumpif (TREE_OPERAND (exp
, 0), op0
);
8591 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8593 start_cleanup_deferral ();
8594 if (binary_op
&& temp
== 0)
8595 /* Just touch the other operand. */
8596 expand_expr (TREE_OPERAND (binary_op
, 1),
8597 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8599 store_expr (build (TREE_CODE (binary_op
), type
,
8600 make_tree (type
, temp
),
8601 TREE_OPERAND (binary_op
, 1)),
8602 temp
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8604 store_expr (build1 (TREE_CODE (unary_op
), type
,
8605 make_tree (type
, temp
)),
8606 temp
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8609 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8610 comparison operator. If we have one of these cases, set the
8611 output to A, branch on A (cse will merge these two references),
8612 then set the output to FOO. */
8614 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8615 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8616 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8617 TREE_OPERAND (exp
, 1), 0)
8618 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8619 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
8620 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
8623 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8624 temp
= gen_reg_rtx (mode
);
8625 store_expr (TREE_OPERAND (exp
, 1), temp
,
8626 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8627 jumpif (TREE_OPERAND (exp
, 0), op0
);
8629 start_cleanup_deferral ();
8630 if (TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8631 store_expr (TREE_OPERAND (exp
, 2), temp
,
8632 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8634 expand_expr (TREE_OPERAND (exp
, 2),
8635 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8639 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8640 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8641 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8642 TREE_OPERAND (exp
, 2), 0)
8643 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8644 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
8645 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
8648 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8649 temp
= gen_reg_rtx (mode
);
8650 store_expr (TREE_OPERAND (exp
, 2), temp
,
8651 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8652 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8654 start_cleanup_deferral ();
8655 if (TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8656 store_expr (TREE_OPERAND (exp
, 1), temp
,
8657 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8659 expand_expr (TREE_OPERAND (exp
, 1),
8660 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8665 op1
= gen_label_rtx ();
8666 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8668 start_cleanup_deferral ();
8670 /* One branch of the cond can be void, if it never returns. For
8671 example A ? throw : E */
8673 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8674 store_expr (TREE_OPERAND (exp
, 1), temp
,
8675 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8677 expand_expr (TREE_OPERAND (exp
, 1),
8678 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8679 end_cleanup_deferral ();
8681 emit_jump_insn (gen_jump (op1
));
8684 start_cleanup_deferral ();
8686 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8687 store_expr (TREE_OPERAND (exp
, 2), temp
,
8688 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8690 expand_expr (TREE_OPERAND (exp
, 2),
8691 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8694 end_cleanup_deferral ();
8705 /* Something needs to be initialized, but we didn't know
8706 where that thing was when building the tree. For example,
8707 it could be the return value of a function, or a parameter
8708 to a function which lays down in the stack, or a temporary
8709 variable which must be passed by reference.
8711 We guarantee that the expression will either be constructed
8712 or copied into our original target. */
8714 tree slot
= TREE_OPERAND (exp
, 0);
8715 tree cleanups
= NULL_TREE
;
8718 if (TREE_CODE (slot
) != VAR_DECL
)
8722 target
= original_target
;
8724 /* Set this here so that if we get a target that refers to a
8725 register variable that's already been used, put_reg_into_stack
8726 knows that it should fix up those uses. */
8727 TREE_USED (slot
) = 1;
8731 if (DECL_RTL_SET_P (slot
))
8733 target
= DECL_RTL (slot
);
8734 /* If we have already expanded the slot, so don't do
8736 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8741 target
= assign_temp (type
, 2, 0, 1);
8742 SET_DECL_RTL (slot
, target
);
8743 if (TREE_ADDRESSABLE (slot
))
8744 put_var_into_stack (slot
, /*rescan=*/false);
8746 /* Since SLOT is not known to the called function
8747 to belong to its stack frame, we must build an explicit
8748 cleanup. This case occurs when we must build up a reference
8749 to pass the reference as an argument. In this case,
8750 it is very likely that such a reference need not be
8753 if (TREE_OPERAND (exp
, 2) == 0)
8754 TREE_OPERAND (exp
, 2)
8755 = lang_hooks
.maybe_build_cleanup (slot
);
8756 cleanups
= TREE_OPERAND (exp
, 2);
8761 /* This case does occur, when expanding a parameter which
8762 needs to be constructed on the stack. The target
8763 is the actual stack address that we want to initialize.
8764 The function we call will perform the cleanup in this case. */
8766 /* If we have already assigned it space, use that space,
8767 not target that we were passed in, as our target
8768 parameter is only a hint. */
8769 if (DECL_RTL_SET_P (slot
))
8771 target
= DECL_RTL (slot
);
8772 /* If we have already expanded the slot, so don't do
8774 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8779 SET_DECL_RTL (slot
, target
);
8780 /* If we must have an addressable slot, then make sure that
8781 the RTL that we just stored in slot is OK. */
8782 if (TREE_ADDRESSABLE (slot
))
8783 put_var_into_stack (slot
, /*rescan=*/true);
8787 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
8788 /* Mark it as expanded. */
8789 TREE_OPERAND (exp
, 1) = NULL_TREE
;
8791 if (VOID_TYPE_P (TREE_TYPE (exp1
)))
8792 /* If the initializer is void, just expand it; it will initialize
8793 the object directly. */
8794 expand_expr (exp1
, const0_rtx
, VOIDmode
, 0);
8796 store_expr (exp1
, target
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8798 expand_decl_cleanup_eh (NULL_TREE
, cleanups
, CLEANUP_EH_ONLY (exp
));
8805 tree lhs
= TREE_OPERAND (exp
, 0);
8806 tree rhs
= TREE_OPERAND (exp
, 1);
8808 temp
= expand_assignment (lhs
, rhs
, ! ignore
);
8814 /* If lhs is complex, expand calls in rhs before computing it.
8815 That's so we don't compute a pointer and save it over a
8816 call. If lhs is simple, compute it first so we can give it
8817 as a target if the rhs is just a call. This avoids an
8818 extra temp and copy and that prevents a partial-subsumption
8819 which makes bad code. Actually we could treat
8820 component_ref's of vars like vars. */
8822 tree lhs
= TREE_OPERAND (exp
, 0);
8823 tree rhs
= TREE_OPERAND (exp
, 1);
8827 /* Check for |= or &= of a bitfield of size one into another bitfield
8828 of size 1. In this case, (unless we need the result of the
8829 assignment) we can do this more efficiently with a
8830 test followed by an assignment, if necessary.
8832 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8833 things change so we do, this code should be enhanced to
8836 && TREE_CODE (lhs
) == COMPONENT_REF
8837 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
8838 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
8839 && TREE_OPERAND (rhs
, 0) == lhs
8840 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
8841 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
8842 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
8844 rtx label
= gen_label_rtx ();
8846 do_jump (TREE_OPERAND (rhs
, 1),
8847 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
8848 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
8849 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
8850 (TREE_CODE (rhs
) == BIT_IOR_EXPR
8852 : integer_zero_node
)),
8854 do_pending_stack_adjust ();
8859 temp
= expand_assignment (lhs
, rhs
, ! ignore
);
8865 if (!TREE_OPERAND (exp
, 0))
8866 expand_null_return ();
8868 expand_return (TREE_OPERAND (exp
, 0));
8871 case PREINCREMENT_EXPR
:
8872 case PREDECREMENT_EXPR
:
8873 return expand_increment (exp
, 0, ignore
);
8875 case POSTINCREMENT_EXPR
:
8876 case POSTDECREMENT_EXPR
:
8877 /* Faster to treat as pre-increment if result is not used. */
8878 return expand_increment (exp
, ! ignore
, ignore
);
8881 if (modifier
== EXPAND_STACK_PARM
)
8883 /* If we are taking the address of something erroneous, just
8885 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
8887 /* If we are taking the address of a constant and are at the
8888 top level, we have to use output_constant_def since we can't
8889 call force_const_mem at top level. */
8891 && (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
8892 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0)))
8894 op0
= XEXP (output_constant_def (TREE_OPERAND (exp
, 0), 0), 0);
8897 /* We make sure to pass const0_rtx down if we came in with
8898 ignore set, to avoid doing the cleanups twice for something. */
8899 op0
= expand_expr (TREE_OPERAND (exp
, 0),
8900 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
8901 (modifier
== EXPAND_INITIALIZER
8902 ? modifier
: EXPAND_CONST_ADDRESS
));
8904 /* If we are going to ignore the result, OP0 will have been set
8905 to const0_rtx, so just return it. Don't get confused and
8906 think we are taking the address of the constant. */
8910 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8911 clever and returns a REG when given a MEM. */
8912 op0
= protect_from_queue (op0
, 1);
8914 /* We would like the object in memory. If it is a constant, we can
8915 have it be statically allocated into memory. For a non-constant,
8916 we need to allocate some memory and store the value into it. */
8918 if (CONSTANT_P (op0
))
8919 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8921 else if (REG_P (op0
) || GET_CODE (op0
) == SUBREG
8922 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
8923 || GET_CODE (op0
) == PARALLEL
|| GET_CODE (op0
) == LO_SUM
)
8925 /* If this object is in a register, it can't be BLKmode. */
8926 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8927 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
8929 if (GET_CODE (op0
) == PARALLEL
)
8930 /* Handle calls that pass values in multiple
8931 non-contiguous locations. The Irix 6 ABI has examples
8933 emit_group_store (memloc
, op0
, inner_type
,
8934 int_size_in_bytes (inner_type
));
8936 emit_move_insn (memloc
, op0
);
8944 mark_temp_addr_taken (op0
);
8945 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8947 op0
= XEXP (op0
, 0);
8948 if (GET_MODE (op0
) == Pmode
&& mode
== ptr_mode
)
8949 op0
= convert_memory_address (ptr_mode
, op0
);
8953 /* If OP0 is not aligned as least as much as the type requires, we
8954 need to make a temporary, copy OP0 to it, and take the address of
8955 the temporary. We want to use the alignment of the type, not of
8956 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8957 the test for BLKmode means that can't happen. The test for
8958 BLKmode is because we never make mis-aligned MEMs with
8961 We don't need to do this at all if the machine doesn't have
8962 strict alignment. */
8963 if (STRICT_ALIGNMENT
&& GET_MODE (op0
) == BLKmode
8964 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
8966 && MEM_ALIGN (op0
) < BIGGEST_ALIGNMENT
)
8968 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8971 if (TYPE_ALIGN_OK (inner_type
))
8974 if (TREE_ADDRESSABLE (inner_type
))
8976 /* We can't make a bitwise copy of this object, so fail. */
8977 error ("cannot take the address of an unaligned member");
8981 new = assign_stack_temp_for_type
8982 (TYPE_MODE (inner_type
),
8983 MEM_SIZE (op0
) ? INTVAL (MEM_SIZE (op0
))
8984 : int_size_in_bytes (inner_type
),
8985 1, build_qualified_type (inner_type
,
8986 (TYPE_QUALS (inner_type
)
8987 | TYPE_QUAL_CONST
)));
8989 emit_block_move (new, op0
, expr_size (TREE_OPERAND (exp
, 0)),
8990 (modifier
== EXPAND_STACK_PARM
8991 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
8996 op0
= force_operand (XEXP (op0
, 0), target
);
9001 && modifier
!= EXPAND_CONST_ADDRESS
9002 && modifier
!= EXPAND_INITIALIZER
9003 && modifier
!= EXPAND_SUM
)
9004 op0
= force_reg (Pmode
, op0
);
9007 && ! REG_USERVAR_P (op0
))
9008 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)));
9010 if (GET_MODE (op0
) == Pmode
&& mode
== ptr_mode
)
9011 op0
= convert_memory_address (ptr_mode
, op0
);
9015 case ENTRY_VALUE_EXPR
:
9018 /* COMPLEX type for Extended Pascal & Fortran */
9021 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
9024 /* Get the rtx code of the operands. */
9025 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9026 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
9029 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
9033 /* Move the real (op0) and imaginary (op1) parts to their location. */
9034 emit_move_insn (gen_realpart (mode
, target
), op0
);
9035 emit_move_insn (gen_imagpart (mode
, target
), op1
);
9037 insns
= get_insns ();
9040 /* Complex construction should appear as a single unit. */
9041 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9042 each with a separate pseudo as destination.
9043 It's not correct for flow to treat them as a unit. */
9044 if (GET_CODE (target
) != CONCAT
)
9045 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
9053 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9054 return gen_realpart (mode
, op0
);
9057 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9058 return gen_imagpart (mode
, op0
);
9062 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
9066 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9069 target
= gen_reg_rtx (mode
);
9073 /* Store the realpart and the negated imagpart to target. */
9074 emit_move_insn (gen_realpart (partmode
, target
),
9075 gen_realpart (partmode
, op0
));
9077 imag_t
= gen_imagpart (partmode
, target
);
9078 temp
= expand_unop (partmode
,
9079 ! unsignedp
&& flag_trapv
9080 && (GET_MODE_CLASS(partmode
) == MODE_INT
)
9081 ? negv_optab
: neg_optab
,
9082 gen_imagpart (partmode
, op0
), imag_t
, 0);
9084 emit_move_insn (imag_t
, temp
);
9086 insns
= get_insns ();
9089 /* Conjugate should appear as a single unit
9090 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9091 each with a separate pseudo as destination.
9092 It's not correct for flow to treat them as a unit. */
9093 if (GET_CODE (target
) != CONCAT
)
9094 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
9102 expand_resx_expr (exp
);
9105 case TRY_CATCH_EXPR
:
9107 tree handler
= TREE_OPERAND (exp
, 1);
9109 expand_eh_region_start ();
9110 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9111 expand_eh_handler (handler
);
9117 expand_start_catch (CATCH_TYPES (exp
));
9118 expand_expr (CATCH_BODY (exp
), const0_rtx
, VOIDmode
, 0);
9119 expand_end_catch ();
9122 case EH_FILTER_EXPR
:
9123 /* Should have been handled in expand_eh_handler. */
9126 case TRY_FINALLY_EXPR
:
9128 tree try_block
= TREE_OPERAND (exp
, 0);
9129 tree finally_block
= TREE_OPERAND (exp
, 1);
9131 if ((!optimize
&& lang_protect_cleanup_actions
== NULL
)
9132 || unsafe_for_reeval (finally_block
) > 1)
9134 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9135 is not sufficient, so we cannot expand the block twice.
9136 So we play games with GOTO_SUBROUTINE_EXPR to let us
9137 expand the thing only once. */
9138 /* When not optimizing, we go ahead with this form since
9139 (1) user breakpoints operate more predictably without
9140 code duplication, and
9141 (2) we're not running any of the global optimizers
9142 that would explode in time/space with the highly
9143 connected CFG created by the indirect branching. */
9145 rtx finally_label
= gen_label_rtx ();
9146 rtx done_label
= gen_label_rtx ();
9147 rtx return_link
= gen_reg_rtx (Pmode
);
9148 tree cleanup
= build (GOTO_SUBROUTINE_EXPR
, void_type_node
,
9149 (tree
) finally_label
, (tree
) return_link
);
9150 TREE_SIDE_EFFECTS (cleanup
) = 1;
9152 /* Start a new binding layer that will keep track of all cleanup
9153 actions to be performed. */
9154 expand_start_bindings (2);
9155 target_temp_slot_level
= temp_slot_level
;
9157 expand_decl_cleanup (NULL_TREE
, cleanup
);
9158 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
9160 preserve_temp_slots (op0
);
9161 expand_end_bindings (NULL_TREE
, 0, 0);
9162 emit_jump (done_label
);
9163 emit_label (finally_label
);
9164 expand_expr (finally_block
, const0_rtx
, VOIDmode
, 0);
9165 emit_indirect_jump (return_link
);
9166 emit_label (done_label
);
9170 expand_start_bindings (2);
9171 target_temp_slot_level
= temp_slot_level
;
9173 expand_decl_cleanup (NULL_TREE
, finally_block
);
9174 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
9176 preserve_temp_slots (op0
);
9177 expand_end_bindings (NULL_TREE
, 0, 0);
9183 case GOTO_SUBROUTINE_EXPR
:
9185 rtx subr
= (rtx
) TREE_OPERAND (exp
, 0);
9186 rtx return_link
= *(rtx
*) &TREE_OPERAND (exp
, 1);
9187 rtx return_address
= gen_label_rtx ();
9188 emit_move_insn (return_link
,
9189 gen_rtx_LABEL_REF (Pmode
, return_address
));
9191 emit_label (return_address
);
9196 return expand_builtin_va_arg (TREE_OPERAND (exp
, 0), type
);
9199 return get_exception_pointer (cfun
);
9202 return get_exception_filter (cfun
);
9205 /* Function descriptors are not valid except for as
9206 initialization constants, and should not be expanded. */
9210 expand_start_case (0, SWITCH_COND (exp
), integer_type_node
,
9212 if (SWITCH_BODY (exp
))
9213 expand_expr_stmt (SWITCH_BODY (exp
));
9214 if (SWITCH_LABELS (exp
))
9217 tree vec
= SWITCH_LABELS (exp
);
9218 size_t i
, n
= TREE_VEC_LENGTH (vec
);
9220 for (i
= 0; i
< n
; ++i
)
9222 tree elt
= TREE_VEC_ELT (vec
, i
);
9223 tree controlling_expr_type
= TREE_TYPE (SWITCH_COND (exp
));
9224 tree min_value
= TYPE_MIN_VALUE (controlling_expr_type
);
9225 tree max_value
= TYPE_MAX_VALUE (controlling_expr_type
);
9227 tree case_low
= CASE_LOW (elt
);
9228 tree case_high
= CASE_HIGH (elt
) ? CASE_HIGH (elt
) : case_low
;
9229 if (case_low
&& case_high
)
9231 /* Case label is less than minimum for type. */
9232 if ((tree_int_cst_compare (case_low
, min_value
) < 0)
9233 && (tree_int_cst_compare (case_high
, min_value
) < 0))
9235 warning ("case label value %d is less than minimum value for type",
9236 TREE_INT_CST (case_low
));
9240 /* Case value is greater than maximum for type. */
9241 if ((tree_int_cst_compare (case_low
, max_value
) > 0)
9242 && (tree_int_cst_compare (case_high
, max_value
) > 0))
9244 warning ("case label value %d exceeds maximum value for type",
9245 TREE_INT_CST (case_high
));
9249 /* Saturate lower case label value to minimum. */
9250 if ((tree_int_cst_compare (case_high
, min_value
) >= 0)
9251 && (tree_int_cst_compare (case_low
, min_value
) < 0))
9253 warning ("lower value %d in case label range less than minimum value for type",
9254 TREE_INT_CST (case_low
));
9255 case_low
= min_value
;
9258 /* Saturate upper case label value to maximum. */
9259 if ((tree_int_cst_compare (case_low
, max_value
) <= 0)
9260 && (tree_int_cst_compare (case_high
, max_value
) > 0))
9262 warning ("upper value %d in case label range exceeds maximum value for type",
9263 TREE_INT_CST (case_high
));
9264 case_high
= max_value
;
9268 add_case_node (case_low
, case_high
, CASE_LABEL (elt
), &duplicate
, true);
9273 expand_end_case_type (SWITCH_COND (exp
), TREE_TYPE (exp
));
9277 expand_label (TREE_OPERAND (exp
, 0));
9280 case CASE_LABEL_EXPR
:
9283 add_case_node (CASE_LOW (exp
), CASE_HIGH (exp
), CASE_LABEL (exp
),
9291 expand_asm_expr (exp
);
9295 return lang_hooks
.expand_expr (exp
, original_target
, tmode
,
9299 /* Here to do an ordinary binary operator, generating an instruction
9300 from the optab already placed in `this_optab'. */
9302 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
9303 subtarget
, &op0
, &op1
, 0);
9305 if (modifier
== EXPAND_STACK_PARM
)
9307 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9308 unsignedp
, OPTAB_LIB_WIDEN
);
9314 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9315 when applied to the address of EXP produces an address known to be
9316 aligned more than BIGGEST_ALIGNMENT. */
9319 is_aligning_offset (tree offset
, tree exp
)
9321 /* Strip off any conversions. */
9322 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9323 || TREE_CODE (offset
) == NOP_EXPR
9324 || TREE_CODE (offset
) == CONVERT_EXPR
)
9325 offset
= TREE_OPERAND (offset
, 0);
9327 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9328 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9329 if (TREE_CODE (offset
) != BIT_AND_EXPR
9330 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
9331 || compare_tree_int (TREE_OPERAND (offset
, 1),
9332 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
9333 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
9336 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9337 It must be NEGATE_EXPR. Then strip any more conversions. */
9338 offset
= TREE_OPERAND (offset
, 0);
9339 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9340 || TREE_CODE (offset
) == NOP_EXPR
9341 || TREE_CODE (offset
) == CONVERT_EXPR
)
9342 offset
= TREE_OPERAND (offset
, 0);
9344 if (TREE_CODE (offset
) != NEGATE_EXPR
)
9347 offset
= TREE_OPERAND (offset
, 0);
9348 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9349 || TREE_CODE (offset
) == NOP_EXPR
9350 || TREE_CODE (offset
) == CONVERT_EXPR
)
9351 offset
= TREE_OPERAND (offset
, 0);
9353 /* This must now be the address of EXP. */
9354 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
9357 /* Return the tree node if an ARG corresponds to a string constant or zero
9358 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9359 in bytes within the string that ARG is accessing. The type of the
9360 offset will be `sizetype'. */
9363 string_constant (tree arg
, tree
*ptr_offset
)
9367 if (TREE_CODE (arg
) == ADDR_EXPR
9368 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
9370 *ptr_offset
= size_zero_node
;
9371 return TREE_OPERAND (arg
, 0);
9373 if (TREE_CODE (arg
) == ADDR_EXPR
9374 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
9375 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg
, 0), 0)) == STRING_CST
)
9377 *ptr_offset
= convert (sizetype
, TREE_OPERAND (TREE_OPERAND (arg
, 0), 1));
9378 return TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
9380 else if (TREE_CODE (arg
) == PLUS_EXPR
)
9382 tree arg0
= TREE_OPERAND (arg
, 0);
9383 tree arg1
= TREE_OPERAND (arg
, 1);
9388 if (TREE_CODE (arg0
) == ADDR_EXPR
9389 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
9391 *ptr_offset
= convert (sizetype
, arg1
);
9392 return TREE_OPERAND (arg0
, 0);
9394 else if (TREE_CODE (arg1
) == ADDR_EXPR
9395 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
9397 *ptr_offset
= convert (sizetype
, arg0
);
9398 return TREE_OPERAND (arg1
, 0);
9405 /* Expand code for a post- or pre- increment or decrement
9406 and return the RTX for the result.
9407 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9410 expand_increment (tree exp
, int post
, int ignore
)
9414 tree incremented
= TREE_OPERAND (exp
, 0);
9415 optab this_optab
= add_optab
;
9417 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
9418 int op0_is_copy
= 0;
9419 int single_insn
= 0;
9420 /* 1 means we can't store into OP0 directly,
9421 because it is a subreg narrower than a word,
9422 and we don't dare clobber the rest of the word. */
9425 /* Stabilize any component ref that might need to be
9426 evaluated more than once below. */
9428 || TREE_CODE (incremented
) == BIT_FIELD_REF
9429 || (TREE_CODE (incremented
) == COMPONENT_REF
9430 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
9431 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
9432 incremented
= stabilize_reference (incremented
);
9433 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9434 ones into save exprs so that they don't accidentally get evaluated
9435 more than once by the code below. */
9436 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
9437 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
9438 incremented
= save_expr (incremented
);
9440 /* Compute the operands as RTX.
9441 Note whether OP0 is the actual lvalue or a copy of it:
9442 I believe it is a copy iff it is a register or subreg
9443 and insns were generated in computing it. */
9445 temp
= get_last_insn ();
9446 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, 0);
9448 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9449 in place but instead must do sign- or zero-extension during assignment,
9450 so we copy it into a new register and let the code below use it as
9453 Note that we can safely modify this SUBREG since it is know not to be
9454 shared (it was made by the expand_expr call above). */
9456 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9459 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9463 else if (GET_CODE (op0
) == SUBREG
9464 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9466 /* We cannot increment this SUBREG in place. If we are
9467 post-incrementing, get a copy of the old value. Otherwise,
9468 just mark that we cannot increment in place. */
9470 op0
= copy_to_reg (op0
);
9475 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| REG_P (op0
))
9476 && temp
!= get_last_insn ());
9477 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9479 /* Decide whether incrementing or decrementing. */
9480 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9481 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9482 this_optab
= sub_optab
;
9484 /* Convert decrement by a constant into a negative increment. */
9485 if (this_optab
== sub_optab
9486 && GET_CODE (op1
) == CONST_INT
)
9488 op1
= GEN_INT (-INTVAL (op1
));
9489 this_optab
= add_optab
;
9492 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp
)))
9493 this_optab
= this_optab
== add_optab
? addv_optab
: subv_optab
;
9495 /* For a preincrement, see if we can do this with a single instruction. */
9498 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9499 if (icode
!= (int) CODE_FOR_nothing
9500 /* Make sure that OP0 is valid for operands 0 and 1
9501 of the insn we want to queue. */
9502 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9503 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
)
9504 && (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9508 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9509 then we cannot just increment OP0. We must therefore contrive to
9510 increment the original value. Then, for postincrement, we can return
9511 OP0 since it is a copy of the old value. For preincrement, expand here
9512 unless we can do it with a single insn.
9514 Likewise if storing directly into OP0 would clobber high bits
9515 we need to preserve (bad_subreg). */
9516 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9518 /* This is the easiest way to increment the value wherever it is.
9519 Problems with multiple evaluation of INCREMENTED are prevented
9520 because either (1) it is a component_ref or preincrement,
9521 in which case it was stabilized above, or (2) it is an array_ref
9522 with constant index in an array in a register, which is
9523 safe to reevaluate. */
9524 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9525 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9526 ? MINUS_EXPR
: PLUS_EXPR
),
9529 TREE_OPERAND (exp
, 1));
9531 while (TREE_CODE (incremented
) == NOP_EXPR
9532 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9534 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9535 incremented
= TREE_OPERAND (incremented
, 0);
9538 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
);
9539 return post
? op0
: temp
;
9544 /* We have a true reference to the value in OP0.
9545 If there is an insn to add or subtract in this mode, queue it.
9546 Queuing the increment insn avoids the register shuffling
9547 that often results if we must increment now and first save
9548 the old value for subsequent use. */
9550 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9551 op0
= stabilize (op0
);
9554 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9555 if (icode
!= (int) CODE_FOR_nothing
9556 /* Make sure that OP0 is valid for operands 0 and 1
9557 of the insn we want to queue. */
9558 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9559 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
))
9561 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9562 op1
= force_reg (mode
, op1
);
9564 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9566 if (icode
!= (int) CODE_FOR_nothing
&& MEM_P (op0
))
9568 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
9569 ? force_reg (Pmode
, XEXP (op0
, 0))
9570 : copy_to_reg (XEXP (op0
, 0)));
9573 op0
= replace_equiv_address (op0
, addr
);
9574 temp
= force_reg (GET_MODE (op0
), op0
);
9575 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9576 op1
= force_reg (mode
, op1
);
9578 /* The increment queue is LIFO, thus we have to `queue'
9579 the instructions in reverse order. */
9580 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9581 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9586 /* Preincrement, or we can't increment with one simple insn. */
9588 /* Save a copy of the value before inc or dec, to return it later. */
9589 temp
= value
= copy_to_reg (op0
);
9591 /* Arrange to return the incremented value. */
9592 /* Copy the rtx because expand_binop will protect from the queue,
9593 and the results of that would be invalid for us to return
9594 if our caller does emit_queue before using our result. */
9595 temp
= copy_rtx (value
= op0
);
9597 /* Increment however we can. */
9598 op1
= expand_binop (mode
, this_optab
, value
, op1
, op0
,
9599 TYPE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9601 /* Make sure the value is stored into OP0. */
9603 emit_move_insn (op0
, op1
);
9608 /* Generate code to calculate EXP using a store-flag instruction
9609 and return an rtx for the result. EXP is either a comparison
9610 or a TRUTH_NOT_EXPR whose operand is a comparison.
9612 If TARGET is nonzero, store the result there if convenient.
9614 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9617 Return zero if there is no suitable set-flag instruction
9618 available on this machine.
9620 Once expand_expr has been called on the arguments of the comparison,
9621 we are committed to doing the store flag, since it is not safe to
9622 re-evaluate the expression. We emit the store-flag insn by calling
9623 emit_store_flag, but only expand the arguments if we have a reason
9624 to believe that emit_store_flag will be successful. If we think that
9625 it will, but it isn't, we have to simulate the store-flag with a
9626 set/jump/set sequence. */
9629 do_store_flag (tree exp
, rtx target
, enum machine_mode mode
, int only_cheap
)
9632 tree arg0
, arg1
, type
;
9634 enum machine_mode operand_mode
;
9638 enum insn_code icode
;
9639 rtx subtarget
= target
;
9642 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9643 result at the end. We can't simply invert the test since it would
9644 have already been inverted if it were valid. This case occurs for
9645 some floating-point comparisons. */
9647 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
9648 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
9650 arg0
= TREE_OPERAND (exp
, 0);
9651 arg1
= TREE_OPERAND (exp
, 1);
9653 /* Don't crash if the comparison was erroneous. */
9654 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
9657 type
= TREE_TYPE (arg0
);
9658 operand_mode
= TYPE_MODE (type
);
9659 unsignedp
= TYPE_UNSIGNED (type
);
9661 /* We won't bother with BLKmode store-flag operations because it would mean
9662 passing a lot of information to emit_store_flag. */
9663 if (operand_mode
== BLKmode
)
9666 /* We won't bother with store-flag operations involving function pointers
9667 when function pointers must be canonicalized before comparisons. */
9668 #ifdef HAVE_canonicalize_funcptr_for_compare
9669 if (HAVE_canonicalize_funcptr_for_compare
9670 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
9671 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
9673 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
9674 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
9675 == FUNCTION_TYPE
))))
9682 /* Get the rtx comparison code to use. We know that EXP is a comparison
9683 operation of some type. Some comparisons against 1 and -1 can be
9684 converted to comparisons with zero. Do so here so that the tests
9685 below will be aware that we have a comparison with zero. These
9686 tests will not catch constants in the first operand, but constants
9687 are rarely passed as the first operand. */
9689 switch (TREE_CODE (exp
))
9698 if (integer_onep (arg1
))
9699 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
9701 code
= unsignedp
? LTU
: LT
;
9704 if (! unsignedp
&& integer_all_onesp (arg1
))
9705 arg1
= integer_zero_node
, code
= LT
;
9707 code
= unsignedp
? LEU
: LE
;
9710 if (! unsignedp
&& integer_all_onesp (arg1
))
9711 arg1
= integer_zero_node
, code
= GE
;
9713 code
= unsignedp
? GTU
: GT
;
9716 if (integer_onep (arg1
))
9717 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
9719 code
= unsignedp
? GEU
: GE
;
9722 case UNORDERED_EXPR
:
9751 /* Put a constant second. */
9752 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
9754 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
9755 code
= swap_condition (code
);
9758 /* If this is an equality or inequality test of a single bit, we can
9759 do this by shifting the bit being tested to the low-order bit and
9760 masking the result with the constant 1. If the condition was EQ,
9761 we xor it with 1. This does not require an scc insn and is faster
9762 than an scc insn even if we have it.
9764 The code to make this transformation was moved into fold_single_bit_test,
9765 so we just call into the folder and expand its result. */
9767 if ((code
== NE
|| code
== EQ
)
9768 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
9769 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
9771 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
9772 return expand_expr (fold_single_bit_test (code
== NE
? NE_EXPR
: EQ_EXPR
,
9774 target
, VOIDmode
, EXPAND_NORMAL
);
9777 /* Now see if we are likely to be able to do this. Return if not. */
9778 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
9781 icode
= setcc_gen_code
[(int) code
];
9782 if (icode
== CODE_FOR_nothing
9783 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
9785 /* We can only do this if it is one of the special cases that
9786 can be handled without an scc insn. */
9787 if ((code
== LT
&& integer_zerop (arg1
))
9788 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
9790 else if (BRANCH_COST
>= 0
9791 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
9792 && TREE_CODE (type
) != REAL_TYPE
9793 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
9794 != CODE_FOR_nothing
)
9795 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
9796 != CODE_FOR_nothing
)))
9802 if (! get_subtarget (target
)
9803 || GET_MODE (subtarget
) != operand_mode
)
9806 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, 0);
9809 target
= gen_reg_rtx (mode
);
9811 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9812 because, if the emit_store_flag does anything it will succeed and
9813 OP0 and OP1 will not be used subsequently. */
9815 result
= emit_store_flag (target
, code
,
9816 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
9817 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
9818 operand_mode
, unsignedp
, 1);
9823 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
9824 result
, 0, OPTAB_LIB_WIDEN
);
9828 /* If this failed, we have to do this with set/compare/jump/set code. */
9830 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
9831 target
= gen_reg_rtx (GET_MODE (target
));
9833 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
9834 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
9835 operand_mode
, NULL_RTX
);
9836 if (GET_CODE (result
) == CONST_INT
)
9837 return (((result
== const0_rtx
&& ! invert
)
9838 || (result
!= const0_rtx
&& invert
))
9839 ? const0_rtx
: const1_rtx
);
9841 /* The code of RESULT may not match CODE if compare_from_rtx
9842 decided to swap its operands and reverse the original code.
9844 We know that compare_from_rtx returns either a CONST_INT or
9845 a new comparison code, so it is safe to just extract the
9846 code from RESULT. */
9847 code
= GET_CODE (result
);
9849 label
= gen_label_rtx ();
9850 if (bcc_gen_fctn
[(int) code
] == 0)
9853 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
9854 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
9861 /* Stubs in case we haven't got a casesi insn. */
9863 # define HAVE_casesi 0
9864 # define gen_casesi(a, b, c, d, e) (0)
9865 # define CODE_FOR_casesi CODE_FOR_nothing
9868 /* If the machine does not have a case insn that compares the bounds,
9869 this means extra overhead for dispatch tables, which raises the
9870 threshold for using them. */
9871 #ifndef CASE_VALUES_THRESHOLD
9872 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9873 #endif /* CASE_VALUES_THRESHOLD */
9876 case_values_threshold (void)
9878 return CASE_VALUES_THRESHOLD
;
9881 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9882 0 otherwise (i.e. if there is no casesi instruction). */
9884 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
9885 rtx table_label ATTRIBUTE_UNUSED
, rtx default_label
)
9887 enum machine_mode index_mode
= SImode
;
9888 int index_bits
= GET_MODE_BITSIZE (index_mode
);
9889 rtx op1
, op2
, index
;
9890 enum machine_mode op_mode
;
9895 /* Convert the index to SImode. */
9896 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
9898 enum machine_mode omode
= TYPE_MODE (index_type
);
9899 rtx rangertx
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
9901 /* We must handle the endpoints in the original mode. */
9902 index_expr
= build (MINUS_EXPR
, index_type
,
9903 index_expr
, minval
);
9904 minval
= integer_zero_node
;
9905 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
9906 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
9907 omode
, 1, default_label
);
9908 /* Now we can safely truncate. */
9909 index
= convert_to_mode (index_mode
, index
, 0);
9913 if (TYPE_MODE (index_type
) != index_mode
)
9915 index_expr
= convert (lang_hooks
.types
.type_for_size
9916 (index_bits
, 0), index_expr
);
9917 index_type
= TREE_TYPE (index_expr
);
9920 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
9923 index
= protect_from_queue (index
, 0);
9924 do_pending_stack_adjust ();
9926 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
9927 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
9929 index
= copy_to_mode_reg (op_mode
, index
);
9931 op1
= expand_expr (minval
, NULL_RTX
, VOIDmode
, 0);
9933 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
9934 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
9935 op1
, TYPE_UNSIGNED (TREE_TYPE (minval
)));
9936 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
9938 op1
= copy_to_mode_reg (op_mode
, op1
);
9940 op2
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
9942 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
9943 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
9944 op2
, TYPE_UNSIGNED (TREE_TYPE (range
)));
9945 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
9947 op2
= copy_to_mode_reg (op_mode
, op2
);
9949 emit_jump_insn (gen_casesi (index
, op1
, op2
,
9950 table_label
, default_label
));
9954 /* Attempt to generate a tablejump instruction; same concept. */
9955 #ifndef HAVE_tablejump
9956 #define HAVE_tablejump 0
9957 #define gen_tablejump(x, y) (0)
9960 /* Subroutine of the next function.
9962 INDEX is the value being switched on, with the lowest value
9963 in the table already subtracted.
9964 MODE is its expected mode (needed if INDEX is constant).
9965 RANGE is the length of the jump table.
9966 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9968 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9969 index value is out of range. */
9972 do_tablejump (rtx index
, enum machine_mode mode
, rtx range
, rtx table_label
,
9977 if (INTVAL (range
) > cfun
->max_jumptable_ents
)
9978 cfun
->max_jumptable_ents
= INTVAL (range
);
9980 /* Do an unsigned comparison (in the proper mode) between the index
9981 expression and the value which represents the length of the range.
9982 Since we just finished subtracting the lower bound of the range
9983 from the index expression, this comparison allows us to simultaneously
9984 check that the original index expression value is both greater than
9985 or equal to the minimum value of the range and less than or equal to
9986 the maximum value of the range. */
9988 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
9991 /* If index is in range, it must fit in Pmode.
9992 Convert to Pmode so we can index with it. */
9994 index
= convert_to_mode (Pmode
, index
, 1);
9996 /* Don't let a MEM slip through, because then INDEX that comes
9997 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9998 and break_out_memory_refs will go to work on it and mess it up. */
9999 #ifdef PIC_CASE_VECTOR_ADDRESS
10000 if (flag_pic
&& !REG_P (index
))
10001 index
= copy_to_mode_reg (Pmode
, index
);
10004 /* If flag_force_addr were to affect this address
10005 it could interfere with the tricky assumptions made
10006 about addresses that contain label-refs,
10007 which may be valid only very near the tablejump itself. */
10008 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10009 GET_MODE_SIZE, because this indicates how large insns are. The other
10010 uses should all be Pmode, because they are addresses. This code
10011 could fail if addresses and insns are not the same size. */
10012 index
= gen_rtx_PLUS (Pmode
,
10013 gen_rtx_MULT (Pmode
, index
,
10014 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
10015 gen_rtx_LABEL_REF (Pmode
, table_label
));
10016 #ifdef PIC_CASE_VECTOR_ADDRESS
10018 index
= PIC_CASE_VECTOR_ADDRESS (index
);
10021 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
10022 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
10023 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
10024 RTX_UNCHANGING_P (vector
) = 1;
10025 MEM_NOTRAP_P (vector
) = 1;
10026 convert_move (temp
, vector
, 0);
10028 emit_jump_insn (gen_tablejump (temp
, table_label
));
10030 /* If we are generating PIC code or if the table is PC-relative, the
10031 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10032 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
10037 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
10038 rtx table_label
, rtx default_label
)
10042 if (! HAVE_tablejump
)
10045 index_expr
= fold (build (MINUS_EXPR
, index_type
,
10046 convert (index_type
, index_expr
),
10047 convert (index_type
, minval
)));
10048 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10050 index
= protect_from_queue (index
, 0);
10051 do_pending_stack_adjust ();
10053 do_tablejump (index
, TYPE_MODE (index_type
),
10054 convert_modes (TYPE_MODE (index_type
),
10055 TYPE_MODE (TREE_TYPE (range
)),
10056 expand_expr (range
, NULL_RTX
,
10058 TYPE_UNSIGNED (TREE_TYPE (range
))),
10059 table_label
, default_label
);
10063 /* Nonzero if the mode is a valid vector mode for this architecture.
10064 This returns nonzero even if there is no hardware support for the
10065 vector mode, but we can emulate with narrower modes. */
10068 vector_mode_valid_p (enum machine_mode mode
)
10070 enum mode_class
class = GET_MODE_CLASS (mode
);
10071 enum machine_mode innermode
;
10073 /* Doh! What's going on? */
10074 if (class != MODE_VECTOR_INT
10075 && class != MODE_VECTOR_FLOAT
)
10078 /* Hardware support. Woo hoo! */
10079 if (VECTOR_MODE_SUPPORTED_P (mode
))
10082 innermode
= GET_MODE_INNER (mode
);
10084 /* We should probably return 1 if requesting V4DI and we have no DI,
10085 but we have V2DI, but this is probably very unlikely. */
10087 /* If we have support for the inner mode, we can safely emulate it.
10088 We may not have V2DI, but me can emulate with a pair of DIs. */
10089 return mov_optab
->handlers
[innermode
].insn_code
!= CODE_FOR_nothing
;
10092 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10094 const_vector_from_tree (tree exp
)
10099 enum machine_mode inner
, mode
;
10101 mode
= TYPE_MODE (TREE_TYPE (exp
));
10103 if (initializer_zerop (exp
))
10104 return CONST0_RTX (mode
);
10106 units
= GET_MODE_NUNITS (mode
);
10107 inner
= GET_MODE_INNER (mode
);
10109 v
= rtvec_alloc (units
);
10111 link
= TREE_VECTOR_CST_ELTS (exp
);
10112 for (i
= 0; link
; link
= TREE_CHAIN (link
), ++i
)
10114 elt
= TREE_VALUE (link
);
10116 if (TREE_CODE (elt
) == REAL_CST
)
10117 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
10120 RTVEC_ELT (v
, i
) = immed_double_const (TREE_INT_CST_LOW (elt
),
10121 TREE_INT_CST_HIGH (elt
),
10125 /* Initialize remaining elements to 0. */
10126 for (; i
< units
; ++i
)
10127 RTVEC_ELT (v
, i
) = CONST0_RTX (inner
);
10129 return gen_rtx_raw_CONST_VECTOR (mode
, v
);
10131 #include "gt-expr.h"