1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
76 #define STACK_PUSH_CODE PRE_INC
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
85 #define TARGET_MEM_FUNCTIONS 0
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
97 /* This structure is used by move_by_pieces to describe the move to
108 int explicit_inc_from
;
109 unsigned HOST_WIDE_INT len
;
110 HOST_WIDE_INT offset
;
114 /* This structure is used by store_by_pieces to describe the clear to
117 struct store_by_pieces
123 unsigned HOST_WIDE_INT len
;
124 HOST_WIDE_INT offset
;
125 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
);
130 static rtx
enqueue_insn (rtx
, rtx
);
131 static unsigned HOST_WIDE_INT
move_by_pieces_ninsns (unsigned HOST_WIDE_INT
,
133 static void move_by_pieces_1 (rtx (*) (rtx
, ...), enum machine_mode
,
134 struct move_by_pieces
*);
135 static bool block_move_libcall_safe_for_call_parm (void);
136 static bool emit_block_move_via_movstr (rtx
, rtx
, rtx
, unsigned);
137 static rtx
emit_block_move_via_libcall (rtx
, rtx
, rtx
);
138 static tree
emit_block_move_libcall_fn (int);
139 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
140 static rtx
clear_by_pieces_1 (void *, HOST_WIDE_INT
, enum machine_mode
);
141 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
142 static void store_by_pieces_1 (struct store_by_pieces
*, unsigned int);
143 static void store_by_pieces_2 (rtx (*) (rtx
, ...), enum machine_mode
,
144 struct store_by_pieces
*);
145 static bool clear_storage_via_clrstr (rtx
, rtx
, unsigned);
146 static rtx
clear_storage_via_libcall (rtx
, rtx
);
147 static tree
clear_storage_libcall_fn (int);
148 static rtx
compress_float_constant (rtx
, rtx
);
149 static rtx
get_subtarget (rtx
);
150 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
151 HOST_WIDE_INT
, enum machine_mode
,
152 tree
, tree
, int, int);
153 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
);
154 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
, enum machine_mode
,
155 tree
, enum machine_mode
, int, tree
, int);
156 static rtx
var_rtx (tree
);
158 static unsigned HOST_WIDE_INT
highest_pow2_factor (tree
);
159 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (tree
, tree
);
161 static int is_aligning_offset (tree
, tree
);
162 static rtx
expand_increment (tree
, int, int);
163 static void expand_operands (tree
, tree
, rtx
, rtx
*, rtx
*,
164 enum expand_modifier
);
165 static rtx
do_store_flag (tree
, rtx
, enum machine_mode
, int);
167 static void emit_single_push_insn (enum machine_mode
, rtx
, tree
);
169 static void do_tablejump (rtx
, enum machine_mode
, rtx
, rtx
, rtx
);
170 static rtx
const_vector_from_tree (tree
);
171 static void execute_expand (void);
173 /* Record for each mode whether we can move a register directly to or
174 from an object of that mode in memory. If we can't, we won't try
175 to use that mode directly when accessing a field of that mode. */
177 static char direct_load
[NUM_MACHINE_MODES
];
178 static char direct_store
[NUM_MACHINE_MODES
];
180 /* Record for each mode whether we can float-extend from memory. */
182 static bool float_extend_from_mem
[NUM_MACHINE_MODES
][NUM_MACHINE_MODES
];
184 /* This macro is used to determine whether move_by_pieces should be called
185 to perform a structure copy. */
186 #ifndef MOVE_BY_PIECES_P
187 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
188 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
191 /* This macro is used to determine whether clear_by_pieces should be
192 called to clear storage. */
193 #ifndef CLEAR_BY_PIECES_P
194 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
198 /* This macro is used to determine whether store_by_pieces should be
199 called to "memset" storage with byte values other than zero, or
200 to "memcpy" storage when the source is a constant string. */
201 #ifndef STORE_BY_PIECES_P
202 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
205 /* This array records the insn_code of insns to perform block moves. */
206 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
208 /* This array records the insn_code of insns to perform block clears. */
209 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
211 /* These arrays record the insn_code of two different kinds of insns
212 to perform block compares. */
213 enum insn_code cmpstr_optab
[NUM_MACHINE_MODES
];
214 enum insn_code cmpmem_optab
[NUM_MACHINE_MODES
];
216 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
218 #ifndef SLOW_UNALIGNED_ACCESS
219 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
222 /* This is run once per compilation to set up which modes can be used
223 directly in memory and to initialize the block move optab. */
226 init_expr_once (void)
229 enum machine_mode mode
;
234 /* Try indexing by frame ptr and try by stack ptr.
235 It is known that on the Convex the stack ptr isn't a valid index.
236 With luck, one or the other is valid on any machine. */
237 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
238 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
240 /* A scratch register we can modify in-place below to avoid
241 useless RTL allocations. */
242 reg
= gen_rtx_REG (VOIDmode
, -1);
244 insn
= rtx_alloc (INSN
);
245 pat
= gen_rtx_SET (0, NULL_RTX
, NULL_RTX
);
246 PATTERN (insn
) = pat
;
248 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
249 mode
= (enum machine_mode
) ((int) mode
+ 1))
253 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
254 PUT_MODE (mem
, mode
);
255 PUT_MODE (mem1
, mode
);
256 PUT_MODE (reg
, mode
);
258 /* See if there is some register that can be used in this mode and
259 directly loaded or stored from memory. */
261 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
262 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
263 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
266 if (! HARD_REGNO_MODE_OK (regno
, mode
))
272 SET_DEST (pat
) = reg
;
273 if (recog (pat
, insn
, &num_clobbers
) >= 0)
274 direct_load
[(int) mode
] = 1;
276 SET_SRC (pat
) = mem1
;
277 SET_DEST (pat
) = reg
;
278 if (recog (pat
, insn
, &num_clobbers
) >= 0)
279 direct_load
[(int) mode
] = 1;
282 SET_DEST (pat
) = mem
;
283 if (recog (pat
, insn
, &num_clobbers
) >= 0)
284 direct_store
[(int) mode
] = 1;
287 SET_DEST (pat
) = mem1
;
288 if (recog (pat
, insn
, &num_clobbers
) >= 0)
289 direct_store
[(int) mode
] = 1;
293 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
295 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
296 mode
= GET_MODE_WIDER_MODE (mode
))
298 enum machine_mode srcmode
;
299 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
300 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
304 ic
= can_extend_p (mode
, srcmode
, 0);
305 if (ic
== CODE_FOR_nothing
)
308 PUT_MODE (mem
, srcmode
);
310 if ((*insn_data
[ic
].operand
[1].predicate
) (mem
, srcmode
))
311 float_extend_from_mem
[mode
][srcmode
] = true;
316 /* This is run at the start of compiling a function. */
321 cfun
->expr
= ggc_alloc_cleared (sizeof (struct expr_status
));
324 /* Small sanity check that the queue is empty at the end of a function. */
327 finish_expr_for_function (void)
333 /* Manage the queue of increment instructions to be output
334 for POSTINCREMENT_EXPR expressions, etc. */
336 /* Queue up to increment (or change) VAR later. BODY says how:
337 BODY should be the same thing you would pass to emit_insn
338 to increment right away. It will go to emit_insn later on.
340 The value is a QUEUED expression to be used in place of VAR
341 where you want to guarantee the pre-incrementation value of VAR. */
344 enqueue_insn (rtx var
, rtx body
)
346 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
), var
, NULL_RTX
, NULL_RTX
,
347 body
, pending_chain
);
348 return pending_chain
;
351 /* Use protect_from_queue to convert a QUEUED expression
352 into something that you can put immediately into an instruction.
353 If the queued incrementation has not happened yet,
354 protect_from_queue returns the variable itself.
355 If the incrementation has happened, protect_from_queue returns a temp
356 that contains a copy of the old value of the variable.
358 Any time an rtx which might possibly be a QUEUED is to be put
359 into an instruction, it must be passed through protect_from_queue first.
360 QUEUED expressions are not meaningful in instructions.
362 Do not pass a value through protect_from_queue and then hold
363 on to it for a while before putting it in an instruction!
364 If the queue is flushed in between, incorrect code will result. */
367 protect_from_queue (rtx x
, int modify
)
369 RTX_CODE code
= GET_CODE (x
);
371 #if 0 /* A QUEUED can hang around after the queue is forced out. */
372 /* Shortcut for most common case. */
373 if (pending_chain
== 0)
379 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
380 use of autoincrement. Make a copy of the contents of the memory
381 location rather than a copy of the address, but not if the value is
382 of mode BLKmode. Don't modify X in place since it might be
384 if (code
== MEM
&& GET_MODE (x
) != BLKmode
385 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
388 rtx
new = replace_equiv_address_nv (x
, QUEUED_VAR (y
));
392 rtx temp
= gen_reg_rtx (GET_MODE (x
));
394 emit_insn_before (gen_move_insn (temp
, new),
399 /* Copy the address into a pseudo, so that the returned value
400 remains correct across calls to emit_queue. */
401 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
404 /* Otherwise, recursively protect the subexpressions of all
405 the kinds of rtx's that can contain a QUEUED. */
408 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
409 if (tem
!= XEXP (x
, 0))
415 else if (code
== PLUS
|| code
== MULT
)
417 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
418 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
419 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
428 /* If the increment has not happened, use the variable itself. Copy it
429 into a new pseudo so that the value remains correct across calls to
431 if (QUEUED_INSN (x
) == 0)
432 return copy_to_reg (QUEUED_VAR (x
));
433 /* If the increment has happened and a pre-increment copy exists,
435 if (QUEUED_COPY (x
) != 0)
436 return QUEUED_COPY (x
);
437 /* The increment has happened but we haven't set up a pre-increment copy.
438 Set one up now, and use it. */
439 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
440 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
442 return QUEUED_COPY (x
);
445 /* Return nonzero if X contains a QUEUED expression:
446 if it contains anything that will be altered by a queued increment.
447 We handle only combinations of MEM, PLUS, MINUS and MULT operators
448 since memory addresses generally contain only those. */
451 queued_subexp_p (rtx x
)
453 enum rtx_code code
= GET_CODE (x
);
459 return queued_subexp_p (XEXP (x
, 0));
463 return (queued_subexp_p (XEXP (x
, 0))
464 || queued_subexp_p (XEXP (x
, 1)));
470 /* Retrieve a mark on the queue. */
475 return pending_chain
;
478 /* Perform all the pending incrementations that have been enqueued
479 after MARK was retrieved. If MARK is null, perform all the
480 pending incrementations. */
483 emit_insns_enqueued_after_mark (rtx mark
)
487 /* The marked incrementation may have been emitted in the meantime
488 through a call to emit_queue. In this case, the mark is not valid
489 anymore so do nothing. */
490 if (mark
&& ! QUEUED_BODY (mark
))
493 while ((p
= pending_chain
) != mark
)
495 rtx body
= QUEUED_BODY (p
);
497 switch (GET_CODE (body
))
505 QUEUED_INSN (p
) = body
;
509 #ifdef ENABLE_CHECKING
516 QUEUED_INSN (p
) = emit_insn (body
);
521 pending_chain
= QUEUED_NEXT (p
);
525 /* Perform all the pending incrementations. */
530 emit_insns_enqueued_after_mark (NULL_RTX
);
533 /* Copy data from FROM to TO, where the machine modes are not the same.
534 Both modes may be integer, or both may be floating.
535 UNSIGNEDP should be nonzero if FROM is an unsigned type.
536 This causes zero-extension instead of sign-extension. */
539 convert_move (rtx to
, rtx from
, int unsignedp
)
541 enum machine_mode to_mode
= GET_MODE (to
);
542 enum machine_mode from_mode
= GET_MODE (from
);
543 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
544 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
548 /* rtx code for making an equivalent value. */
549 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
550 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
552 to
= protect_from_queue (to
, 1);
553 from
= protect_from_queue (from
, 0);
555 if (to_real
!= from_real
)
558 /* If the source and destination are already the same, then there's
563 /* If FROM is a SUBREG that indicates that we have already done at least
564 the required extension, strip it. We don't handle such SUBREGs as
567 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
568 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
569 >= GET_MODE_SIZE (to_mode
))
570 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
571 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
573 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
576 if (to_mode
== from_mode
577 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
579 emit_move_insn (to
, from
);
583 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
585 if (GET_MODE_BITSIZE (from_mode
) != GET_MODE_BITSIZE (to_mode
))
588 if (VECTOR_MODE_P (to_mode
))
589 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
591 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
593 emit_move_insn (to
, from
);
597 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
599 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
600 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
609 if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
611 else if (GET_MODE_PRECISION (from_mode
) > GET_MODE_PRECISION (to_mode
))
616 /* Try converting directly if the insn is supported. */
618 code
= tab
->handlers
[to_mode
][from_mode
].insn_code
;
619 if (code
!= CODE_FOR_nothing
)
621 emit_unop_insn (code
, to
, from
,
622 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
626 /* Otherwise use a libcall. */
627 libcall
= tab
->handlers
[to_mode
][from_mode
].libfunc
;
630 /* This conversion is not implemented yet. */
634 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
636 insns
= get_insns ();
638 emit_libcall_block (insns
, to
, value
,
639 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
641 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
645 /* Handle pointer conversion. */ /* SPEE 900220. */
646 /* Targets are expected to provide conversion insns between PxImode and
647 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
648 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
650 enum machine_mode full_mode
651 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode
), MODE_INT
);
653 if (trunc_optab
->handlers
[to_mode
][full_mode
].insn_code
657 if (full_mode
!= from_mode
)
658 from
= convert_to_mode (full_mode
, from
, unsignedp
);
659 emit_unop_insn (trunc_optab
->handlers
[to_mode
][full_mode
].insn_code
,
663 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
665 enum machine_mode full_mode
666 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode
), MODE_INT
);
668 if (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
672 emit_unop_insn (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
,
674 if (to_mode
== full_mode
)
677 /* else proceed to integer conversions below. */
678 from_mode
= full_mode
;
681 /* Now both modes are integers. */
683 /* Handle expanding beyond a word. */
684 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
685 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
692 enum machine_mode lowpart_mode
;
693 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
695 /* Try converting directly if the insn is supported. */
696 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
699 /* If FROM is a SUBREG, put it into a register. Do this
700 so that we always generate the same set of insns for
701 better cse'ing; if an intermediate assignment occurred,
702 we won't be doing the operation directly on the SUBREG. */
703 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
704 from
= force_reg (from_mode
, from
);
705 emit_unop_insn (code
, to
, from
, equiv_code
);
708 /* Next, try converting via full word. */
709 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
710 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
711 != CODE_FOR_nothing
))
715 if (reg_overlap_mentioned_p (to
, from
))
716 from
= force_reg (from_mode
, from
);
717 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
719 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
720 emit_unop_insn (code
, to
,
721 gen_lowpart (word_mode
, to
), equiv_code
);
725 /* No special multiword conversion insn; do it by hand. */
728 /* Since we will turn this into a no conflict block, we must ensure
729 that the source does not overlap the target. */
731 if (reg_overlap_mentioned_p (to
, from
))
732 from
= force_reg (from_mode
, from
);
734 /* Get a copy of FROM widened to a word, if necessary. */
735 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
736 lowpart_mode
= word_mode
;
738 lowpart_mode
= from_mode
;
740 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
742 lowpart
= gen_lowpart (lowpart_mode
, to
);
743 emit_move_insn (lowpart
, lowfrom
);
745 /* Compute the value to put in each remaining word. */
747 fill_value
= const0_rtx
;
752 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
753 && STORE_FLAG_VALUE
== -1)
755 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
757 fill_value
= gen_reg_rtx (word_mode
);
758 emit_insn (gen_slt (fill_value
));
764 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
765 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
767 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
771 /* Fill the remaining words. */
772 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
774 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
775 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
780 if (fill_value
!= subword
)
781 emit_move_insn (subword
, fill_value
);
784 insns
= get_insns ();
787 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
788 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
792 /* Truncating multi-word to a word or less. */
793 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
794 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
796 if (!((GET_CODE (from
) == MEM
797 && ! MEM_VOLATILE_P (from
)
798 && direct_load
[(int) to_mode
]
799 && ! mode_dependent_address_p (XEXP (from
, 0)))
801 || GET_CODE (from
) == SUBREG
))
802 from
= force_reg (from_mode
, from
);
803 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
807 /* Now follow all the conversions between integers
808 no more than a word long. */
810 /* For truncation, usually we can just refer to FROM in a narrower mode. */
811 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
812 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
813 GET_MODE_BITSIZE (from_mode
)))
815 if (!((GET_CODE (from
) == MEM
816 && ! MEM_VOLATILE_P (from
)
817 && direct_load
[(int) to_mode
]
818 && ! mode_dependent_address_p (XEXP (from
, 0)))
820 || GET_CODE (from
) == SUBREG
))
821 from
= force_reg (from_mode
, from
);
822 if (REG_P (from
) && REGNO (from
) < FIRST_PSEUDO_REGISTER
823 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
824 from
= copy_to_reg (from
);
825 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
829 /* Handle extension. */
830 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
832 /* Convert directly if that works. */
833 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
837 from
= force_not_mem (from
);
839 emit_unop_insn (code
, to
, from
, equiv_code
);
844 enum machine_mode intermediate
;
848 /* Search for a mode to convert via. */
849 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
850 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
851 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
853 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
854 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
855 GET_MODE_BITSIZE (intermediate
))))
856 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
857 != CODE_FOR_nothing
))
859 convert_move (to
, convert_to_mode (intermediate
, from
,
860 unsignedp
), unsignedp
);
864 /* No suitable intermediate mode.
865 Generate what we need with shifts. */
866 shift_amount
= build_int_2 (GET_MODE_BITSIZE (to_mode
)
867 - GET_MODE_BITSIZE (from_mode
), 0);
868 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
869 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
871 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
874 emit_move_insn (to
, tmp
);
879 /* Support special truncate insns for certain modes. */
880 if (trunc_optab
->handlers
[to_mode
][from_mode
].insn_code
!= CODE_FOR_nothing
)
882 emit_unop_insn (trunc_optab
->handlers
[to_mode
][from_mode
].insn_code
,
887 /* Handle truncation of volatile memrefs, and so on;
888 the things that couldn't be truncated directly,
889 and for which there was no special instruction.
891 ??? Code above formerly short-circuited this, for most integer
892 mode pairs, with a force_reg in from_mode followed by a recursive
893 call to this routine. Appears always to have been wrong. */
894 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
896 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
897 emit_move_insn (to
, temp
);
901 /* Mode combination is not recognized. */
905 /* Return an rtx for a value that would result
906 from converting X to mode MODE.
907 Both X and MODE may be floating, or both integer.
908 UNSIGNEDP is nonzero if X is an unsigned value.
909 This can be done by referring to a part of X in place
910 or by copying to a new temporary with conversion.
912 This function *must not* call protect_from_queue
913 except when putting X into an insn (in which case convert_move does it). */
916 convert_to_mode (enum machine_mode mode
, rtx x
, int unsignedp
)
918 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
921 /* Return an rtx for a value that would result
922 from converting X from mode OLDMODE to mode MODE.
923 Both modes may be floating, or both integer.
924 UNSIGNEDP is nonzero if X is an unsigned value.
926 This can be done by referring to a part of X in place
927 or by copying to a new temporary with conversion.
929 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
931 This function *must not* call protect_from_queue
932 except when putting X into an insn (in which case convert_move does it). */
935 convert_modes (enum machine_mode mode
, enum machine_mode oldmode
, rtx x
, int unsignedp
)
939 /* If FROM is a SUBREG that indicates that we have already done at least
940 the required extension, strip it. */
942 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
943 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
944 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
945 x
= gen_lowpart (mode
, x
);
947 if (GET_MODE (x
) != VOIDmode
)
948 oldmode
= GET_MODE (x
);
953 /* There is one case that we must handle specially: If we are converting
954 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
955 we are to interpret the constant as unsigned, gen_lowpart will do
956 the wrong if the constant appears negative. What we want to do is
957 make the high-order word of the constant zero, not all ones. */
959 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
960 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
961 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
963 HOST_WIDE_INT val
= INTVAL (x
);
965 if (oldmode
!= VOIDmode
966 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
968 int width
= GET_MODE_BITSIZE (oldmode
);
970 /* We need to zero extend VAL. */
971 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
974 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
977 /* We can do this with a gen_lowpart if both desired and current modes
978 are integer, and this is either a constant integer, a register, or a
979 non-volatile MEM. Except for the constant case where MODE is no
980 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
982 if ((GET_CODE (x
) == CONST_INT
983 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
984 || (GET_MODE_CLASS (mode
) == MODE_INT
985 && GET_MODE_CLASS (oldmode
) == MODE_INT
986 && (GET_CODE (x
) == CONST_DOUBLE
987 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
988 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
989 && direct_load
[(int) mode
])
991 && (! HARD_REGISTER_P (x
)
992 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
993 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
994 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
996 /* ?? If we don't know OLDMODE, we have to assume here that
997 X does not need sign- or zero-extension. This may not be
998 the case, but it's the best we can do. */
999 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1000 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1002 HOST_WIDE_INT val
= INTVAL (x
);
1003 int width
= GET_MODE_BITSIZE (oldmode
);
1005 /* We must sign or zero-extend in this case. Start by
1006 zero-extending, then sign extend if we need to. */
1007 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1009 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1010 val
|= (HOST_WIDE_INT
) (-1) << width
;
1012 return gen_int_mode (val
, mode
);
1015 return gen_lowpart (mode
, x
);
1018 /* Converting from integer constant into mode is always equivalent to an
1019 subreg operation. */
1020 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
1022 if (GET_MODE_BITSIZE (mode
) != GET_MODE_BITSIZE (oldmode
))
1024 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
1027 temp
= gen_reg_rtx (mode
);
1028 convert_move (temp
, x
, unsignedp
);
1032 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1033 store efficiently. Due to internal GCC limitations, this is
1034 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1035 for an immediate constant. */
1037 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1039 /* Determine whether the LEN bytes can be moved by using several move
1040 instructions. Return nonzero if a call to move_by_pieces should
1044 can_move_by_pieces (unsigned HOST_WIDE_INT len
,
1045 unsigned int align ATTRIBUTE_UNUSED
)
1047 return MOVE_BY_PIECES_P (len
, align
);
1050 /* Generate several move instructions to copy LEN bytes from block FROM to
1051 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1052 and TO through protect_from_queue before calling.
1054 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1055 used to push FROM to the stack.
1057 ALIGN is maximum stack alignment we can assume.
1059 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1060 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1064 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
1065 unsigned int align
, int endp
)
1067 struct move_by_pieces data
;
1068 rtx to_addr
, from_addr
= XEXP (from
, 0);
1069 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
1070 enum machine_mode mode
= VOIDmode
, tmode
;
1071 enum insn_code icode
;
1073 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
1076 data
.from_addr
= from_addr
;
1079 to_addr
= XEXP (to
, 0);
1082 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1083 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1085 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1092 #ifdef STACK_GROWS_DOWNWARD
1098 data
.to_addr
= to_addr
;
1101 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1102 || GET_CODE (from_addr
) == POST_INC
1103 || GET_CODE (from_addr
) == POST_DEC
);
1105 data
.explicit_inc_from
= 0;
1106 data
.explicit_inc_to
= 0;
1107 if (data
.reverse
) data
.offset
= len
;
1110 /* If copying requires more than two move insns,
1111 copy addresses to registers (to make displacements shorter)
1112 and use post-increment if available. */
1113 if (!(data
.autinc_from
&& data
.autinc_to
)
1114 && move_by_pieces_ninsns (len
, align
) > 2)
1116 /* Find the mode of the largest move... */
1117 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1118 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1119 if (GET_MODE_SIZE (tmode
) < max_size
)
1122 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
1124 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1125 data
.autinc_from
= 1;
1126 data
.explicit_inc_from
= -1;
1128 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
1130 data
.from_addr
= copy_addr_to_reg (from_addr
);
1131 data
.autinc_from
= 1;
1132 data
.explicit_inc_from
= 1;
1134 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1135 data
.from_addr
= copy_addr_to_reg (from_addr
);
1136 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
1138 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1140 data
.explicit_inc_to
= -1;
1142 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
1144 data
.to_addr
= copy_addr_to_reg (to_addr
);
1146 data
.explicit_inc_to
= 1;
1148 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1149 data
.to_addr
= copy_addr_to_reg (to_addr
);
1152 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1153 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1154 align
= MOVE_MAX
* BITS_PER_UNIT
;
1156 /* First move what we can in the largest integer mode, then go to
1157 successively smaller modes. */
1159 while (max_size
> 1)
1161 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1162 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1163 if (GET_MODE_SIZE (tmode
) < max_size
)
1166 if (mode
== VOIDmode
)
1169 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1170 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1171 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1173 max_size
= GET_MODE_SIZE (mode
);
1176 /* The code above should have handled everything. */
1190 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
1191 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
1193 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
1196 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
1203 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
1211 /* Return number of insns required to move L bytes by pieces.
1212 ALIGN (in bits) is maximum alignment we can assume. */
1214 static unsigned HOST_WIDE_INT
1215 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
)
1217 unsigned HOST_WIDE_INT n_insns
= 0;
1218 unsigned HOST_WIDE_INT max_size
= MOVE_MAX
+ 1;
1220 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1221 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1222 align
= MOVE_MAX
* BITS_PER_UNIT
;
1224 while (max_size
> 1)
1226 enum machine_mode mode
= VOIDmode
, tmode
;
1227 enum insn_code icode
;
1229 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1230 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1231 if (GET_MODE_SIZE (tmode
) < max_size
)
1234 if (mode
== VOIDmode
)
1237 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1238 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1239 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1241 max_size
= GET_MODE_SIZE (mode
);
1249 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1250 with move instructions for mode MODE. GENFUN is the gen_... function
1251 to make a move insn for that mode. DATA has all the other info. */
1254 move_by_pieces_1 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
1255 struct move_by_pieces
*data
)
1257 unsigned int size
= GET_MODE_SIZE (mode
);
1258 rtx to1
= NULL_RTX
, from1
;
1260 while (data
->len
>= size
)
1263 data
->offset
-= size
;
1267 if (data
->autinc_to
)
1268 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1271 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1274 if (data
->autinc_from
)
1275 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1278 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1280 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1281 emit_insn (gen_add2_insn (data
->to_addr
,
1282 GEN_INT (-(HOST_WIDE_INT
)size
)));
1283 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1284 emit_insn (gen_add2_insn (data
->from_addr
,
1285 GEN_INT (-(HOST_WIDE_INT
)size
)));
1288 emit_insn ((*genfun
) (to1
, from1
));
1291 #ifdef PUSH_ROUNDING
1292 emit_single_push_insn (mode
, from1
, NULL
);
1298 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1299 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1300 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1301 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1303 if (! data
->reverse
)
1304 data
->offset
+= size
;
1310 /* Emit code to move a block Y to a block X. This may be done with
1311 string-move instructions, with multiple scalar move instructions,
1312 or with a library call.
1314 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1315 SIZE is an rtx that says how long they are.
1316 ALIGN is the maximum alignment we can assume they have.
1317 METHOD describes what kind of copy this is, and what mechanisms may be used.
1319 Return the address of the new block, if memcpy is called and returns it,
1323 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1331 case BLOCK_OP_NORMAL
:
1332 may_use_call
= true;
1335 case BLOCK_OP_CALL_PARM
:
1336 may_use_call
= block_move_libcall_safe_for_call_parm ();
1338 /* Make inhibit_defer_pop nonzero around the library call
1339 to force it to pop the arguments right away. */
1343 case BLOCK_OP_NO_LIBCALL
:
1344 may_use_call
= false;
1351 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1353 if (GET_MODE (x
) != BLKmode
)
1355 if (GET_MODE (y
) != BLKmode
)
1358 x
= protect_from_queue (x
, 1);
1359 y
= protect_from_queue (y
, 0);
1360 size
= protect_from_queue (size
, 0);
1362 if (GET_CODE (x
) != MEM
)
1364 if (GET_CODE (y
) != MEM
)
1369 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1370 can be incorrect is coming from __builtin_memcpy. */
1371 if (GET_CODE (size
) == CONST_INT
)
1373 if (INTVAL (size
) == 0)
1376 x
= shallow_copy_rtx (x
);
1377 y
= shallow_copy_rtx (y
);
1378 set_mem_size (x
, size
);
1379 set_mem_size (y
, size
);
1382 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1383 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1384 else if (emit_block_move_via_movstr (x
, y
, size
, align
))
1386 else if (may_use_call
)
1387 retval
= emit_block_move_via_libcall (x
, y
, size
);
1389 emit_block_move_via_loop (x
, y
, size
, align
);
1391 if (method
== BLOCK_OP_CALL_PARM
)
1397 /* A subroutine of emit_block_move. Returns true if calling the
1398 block move libcall will not clobber any parameters which may have
1399 already been placed on the stack. */
1402 block_move_libcall_safe_for_call_parm (void)
1404 /* If arguments are pushed on the stack, then they're safe. */
1408 /* If registers go on the stack anyway, any argument is sure to clobber
1409 an outgoing argument. */
1410 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1412 tree fn
= emit_block_move_libcall_fn (false);
1414 if (REG_PARM_STACK_SPACE (fn
) != 0)
1419 /* If any argument goes in memory, then it might clobber an outgoing
1422 CUMULATIVE_ARGS args_so_far
;
1425 fn
= emit_block_move_libcall_fn (false);
1426 INIT_CUMULATIVE_ARGS (args_so_far
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1428 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1429 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1431 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1432 rtx tmp
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
1433 if (!tmp
|| !REG_P (tmp
))
1435 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1436 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, mode
,
1440 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, NULL_TREE
, 1);
1446 /* A subroutine of emit_block_move. Expand a movstr pattern;
1447 return true if successful. */
1450 emit_block_move_via_movstr (rtx x
, rtx y
, rtx size
, unsigned int align
)
1452 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1453 int save_volatile_ok
= volatile_ok
;
1454 enum machine_mode mode
;
1456 /* Since this is a move insn, we don't care about volatility. */
1459 /* Try the most limited insn first, because there's no point
1460 including more than one in the machine description unless
1461 the more limited one has some advantage. */
1463 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1464 mode
= GET_MODE_WIDER_MODE (mode
))
1466 enum insn_code code
= movstr_optab
[(int) mode
];
1467 insn_operand_predicate_fn pred
;
1469 if (code
!= CODE_FOR_nothing
1470 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1471 here because if SIZE is less than the mode mask, as it is
1472 returned by the macro, it will definitely be less than the
1473 actual mode mask. */
1474 && ((GET_CODE (size
) == CONST_INT
1475 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1476 <= (GET_MODE_MASK (mode
) >> 1)))
1477 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1478 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1479 || (*pred
) (x
, BLKmode
))
1480 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1481 || (*pred
) (y
, BLKmode
))
1482 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1483 || (*pred
) (opalign
, VOIDmode
)))
1486 rtx last
= get_last_insn ();
1489 op2
= convert_to_mode (mode
, size
, 1);
1490 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1491 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1492 op2
= copy_to_mode_reg (mode
, op2
);
1494 /* ??? When called via emit_block_move_for_call, it'd be
1495 nice if there were some way to inform the backend, so
1496 that it doesn't fail the expansion because it thinks
1497 emitting the libcall would be more efficient. */
1499 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1503 volatile_ok
= save_volatile_ok
;
1507 delete_insns_since (last
);
1511 volatile_ok
= save_volatile_ok
;
1515 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1516 Return the return value from memcpy, 0 otherwise. */
1519 emit_block_move_via_libcall (rtx dst
, rtx src
, rtx size
)
1521 rtx dst_addr
, src_addr
;
1522 tree call_expr
, arg_list
, fn
, src_tree
, dst_tree
, size_tree
;
1523 enum machine_mode size_mode
;
1526 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1528 It is unsafe to save the value generated by protect_from_queue and reuse
1529 it later. Consider what happens if emit_queue is called before the
1530 return value from protect_from_queue is used.
1532 Expansion of the CALL_EXPR below will call emit_queue before we are
1533 finished emitting RTL for argument setup. So if we are not careful we
1534 could get the wrong value for an argument.
1536 To avoid this problem we go ahead and emit code to copy the addresses of
1537 DST and SRC and SIZE into new pseudos. We can then place those new
1538 pseudos into an RTL_EXPR and use them later, even after a call to
1541 Note this is not strictly needed for library calls since they do not call
1542 emit_queue before loading their arguments. However, we may need to have
1543 library calls call emit_queue in the future since failing to do so could
1544 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1545 arguments in registers. */
1547 dst_addr
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
1548 src_addr
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
1550 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1551 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1553 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1554 src_tree
= make_tree (ptr_type_node
, src_addr
);
1556 if (TARGET_MEM_FUNCTIONS
)
1557 size_mode
= TYPE_MODE (sizetype
);
1559 size_mode
= TYPE_MODE (unsigned_type_node
);
1561 size
= convert_to_mode (size_mode
, size
, 1);
1562 size
= copy_to_mode_reg (size_mode
, size
);
1564 /* It is incorrect to use the libcall calling conventions to call
1565 memcpy in this context. This could be a user call to memcpy and
1566 the user may wish to examine the return value from memcpy. For
1567 targets where libcalls and normal calls have different conventions
1568 for returning pointers, we could end up generating incorrect code.
1570 For convenience, we generate the call to bcopy this way as well. */
1572 if (TARGET_MEM_FUNCTIONS
)
1573 size_tree
= make_tree (sizetype
, size
);
1575 size_tree
= make_tree (unsigned_type_node
, size
);
1577 fn
= emit_block_move_libcall_fn (true);
1578 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
1579 if (TARGET_MEM_FUNCTIONS
)
1581 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1582 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1586 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1587 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1590 /* Now we have to build up the CALL_EXPR itself. */
1591 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1592 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1593 call_expr
, arg_list
, NULL_TREE
);
1595 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
1597 /* If we are initializing a readonly value, show the above call clobbered
1598 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1599 the delay slot scheduler might overlook conflicts and take nasty
1601 if (RTX_UNCHANGING_P (dst
))
1602 add_function_usage_to
1603 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode
,
1604 gen_rtx_CLOBBER (VOIDmode
, dst
),
1607 return TARGET_MEM_FUNCTIONS
? retval
: NULL_RTX
;
1610 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1611 for the function we use for block copies. The first time FOR_CALL
1612 is true, we call assemble_external. */
1614 static GTY(()) tree block_move_fn
;
1617 init_block_move_fn (const char *asmspec
)
1623 if (TARGET_MEM_FUNCTIONS
)
1625 fn
= get_identifier ("memcpy");
1626 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
1627 const_ptr_type_node
, sizetype
,
1632 fn
= get_identifier ("bcopy");
1633 args
= build_function_type_list (void_type_node
, const_ptr_type_node
,
1634 ptr_type_node
, unsigned_type_node
,
1638 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
1639 DECL_EXTERNAL (fn
) = 1;
1640 TREE_PUBLIC (fn
) = 1;
1641 DECL_ARTIFICIAL (fn
) = 1;
1642 TREE_NOTHROW (fn
) = 1;
1649 SET_DECL_RTL (block_move_fn
, NULL_RTX
);
1650 SET_DECL_ASSEMBLER_NAME (block_move_fn
, get_identifier (asmspec
));
1655 emit_block_move_libcall_fn (int for_call
)
1657 static bool emitted_extern
;
1660 init_block_move_fn (NULL
);
1662 if (for_call
&& !emitted_extern
)
1664 emitted_extern
= true;
1665 make_decl_rtl (block_move_fn
, NULL
);
1666 assemble_external (block_move_fn
);
1669 return block_move_fn
;
1672 /* A subroutine of emit_block_move. Copy the data via an explicit
1673 loop. This is used only when libcalls are forbidden. */
1674 /* ??? It'd be nice to copy in hunks larger than QImode. */
1677 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1678 unsigned int align ATTRIBUTE_UNUSED
)
1680 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
1681 enum machine_mode iter_mode
;
1683 iter_mode
= GET_MODE (size
);
1684 if (iter_mode
== VOIDmode
)
1685 iter_mode
= word_mode
;
1687 top_label
= gen_label_rtx ();
1688 cmp_label
= gen_label_rtx ();
1689 iter
= gen_reg_rtx (iter_mode
);
1691 emit_move_insn (iter
, const0_rtx
);
1693 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1694 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1695 do_pending_stack_adjust ();
1697 emit_jump (cmp_label
);
1698 emit_label (top_label
);
1700 tmp
= convert_modes (Pmode
, iter_mode
, iter
, true);
1701 x_addr
= gen_rtx_PLUS (Pmode
, x_addr
, tmp
);
1702 y_addr
= gen_rtx_PLUS (Pmode
, y_addr
, tmp
);
1703 x
= change_address (x
, QImode
, x_addr
);
1704 y
= change_address (y
, QImode
, y_addr
);
1706 emit_move_insn (x
, y
);
1708 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1709 true, OPTAB_LIB_WIDEN
);
1711 emit_move_insn (iter
, tmp
);
1713 emit_label (cmp_label
);
1715 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1719 /* Copy all or part of a value X into registers starting at REGNO.
1720 The number of registers to be filled is NREGS. */
1723 move_block_to_reg (int regno
, rtx x
, int nregs
, enum machine_mode mode
)
1726 #ifdef HAVE_load_multiple
1734 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1735 x
= validize_mem (force_const_mem (mode
, x
));
1737 /* See if the machine can do this with a load multiple insn. */
1738 #ifdef HAVE_load_multiple
1739 if (HAVE_load_multiple
)
1741 last
= get_last_insn ();
1742 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1750 delete_insns_since (last
);
1754 for (i
= 0; i
< nregs
; i
++)
1755 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1756 operand_subword_force (x
, i
, mode
));
1759 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1760 The number of registers to be filled is NREGS. */
1763 move_block_from_reg (int regno
, rtx x
, int nregs
)
1770 /* See if the machine can do this with a store multiple insn. */
1771 #ifdef HAVE_store_multiple
1772 if (HAVE_store_multiple
)
1774 rtx last
= get_last_insn ();
1775 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1783 delete_insns_since (last
);
1787 for (i
= 0; i
< nregs
; i
++)
1789 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1794 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1798 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1799 ORIG, where ORIG is a non-consecutive group of registers represented by
1800 a PARALLEL. The clone is identical to the original except in that the
1801 original set of registers is replaced by a new set of pseudo registers.
1802 The new set has the same modes as the original set. */
1805 gen_group_rtx (rtx orig
)
1810 if (GET_CODE (orig
) != PARALLEL
)
1813 length
= XVECLEN (orig
, 0);
1814 tmps
= alloca (sizeof (rtx
) * length
);
1816 /* Skip a NULL entry in first slot. */
1817 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
1822 for (; i
< length
; i
++)
1824 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
1825 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
1827 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
1830 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
1833 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1834 where DST is non-consecutive registers represented by a PARALLEL.
1835 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1839 emit_group_load (rtx dst
, rtx orig_src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1844 if (GET_CODE (dst
) != PARALLEL
)
1847 /* Check for a NULL entry, used to indicate that the parameter goes
1848 both on the stack and in registers. */
1849 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1854 tmps
= alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
1856 /* Process the pieces. */
1857 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1859 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1860 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1861 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1864 /* Handle trailing fragments that run over the size of the struct. */
1865 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1867 /* Arrange to shift the fragment to where it belongs.
1868 extract_bit_field loads to the lsb of the reg. */
1870 #ifdef BLOCK_REG_PADDING
1871 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
1872 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1877 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1878 bytelen
= ssize
- bytepos
;
1883 /* If we won't be loading directly from memory, protect the real source
1884 from strange tricks we might play; but make sure that the source can
1885 be loaded directly into the destination. */
1887 if (GET_CODE (orig_src
) != MEM
1888 && (!CONSTANT_P (orig_src
)
1889 || (GET_MODE (orig_src
) != mode
1890 && GET_MODE (orig_src
) != VOIDmode
)))
1892 if (GET_MODE (orig_src
) == VOIDmode
)
1893 src
= gen_reg_rtx (mode
);
1895 src
= gen_reg_rtx (GET_MODE (orig_src
));
1897 emit_move_insn (src
, orig_src
);
1900 /* Optimize the access just a bit. */
1901 if (GET_CODE (src
) == MEM
1902 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
1903 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
1904 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1905 && bytelen
== GET_MODE_SIZE (mode
))
1907 tmps
[i
] = gen_reg_rtx (mode
);
1908 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
1910 else if (GET_CODE (src
) == CONCAT
)
1912 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
1913 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
1915 if ((bytepos
== 0 && bytelen
== slen0
)
1916 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
1918 /* The following assumes that the concatenated objects all
1919 have the same size. In this case, a simple calculation
1920 can be used to determine the object and the bit field
1922 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
1923 if (! CONSTANT_P (tmps
[i
])
1924 && (!REG_P (tmps
[i
]) || GET_MODE (tmps
[i
]) != mode
))
1925 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
1926 (bytepos
% slen0
) * BITS_PER_UNIT
,
1927 1, NULL_RTX
, mode
, mode
, ssize
);
1929 else if (bytepos
== 0)
1931 rtx mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1932 emit_move_insn (mem
, src
);
1933 tmps
[i
] = adjust_address (mem
, mode
, 0);
1938 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1939 SIMD register, which is currently broken. While we get GCC
1940 to emit proper RTL for these cases, let's dump to memory. */
1941 else if (VECTOR_MODE_P (GET_MODE (dst
))
1944 int slen
= GET_MODE_SIZE (GET_MODE (src
));
1947 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1948 emit_move_insn (mem
, src
);
1949 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
1951 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
1952 && XVECLEN (dst
, 0) > 1)
1953 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE(dst
), bytepos
);
1954 else if (CONSTANT_P (src
)
1955 || (REG_P (src
) && GET_MODE (src
) == mode
))
1958 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
1959 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
1963 expand_binop (mode
, ashl_optab
, tmps
[i
], GEN_INT (shift
),
1964 tmps
[i
], 0, OPTAB_WIDEN
);
1969 /* Copy the extracted pieces into the proper (probable) hard regs. */
1970 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1971 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
1974 /* Emit code to move a block SRC to block DST, where SRC and DST are
1975 non-consecutive groups of registers, each represented by a PARALLEL. */
1978 emit_group_move (rtx dst
, rtx src
)
1982 if (GET_CODE (src
) != PARALLEL
1983 || GET_CODE (dst
) != PARALLEL
1984 || XVECLEN (src
, 0) != XVECLEN (dst
, 0))
1987 /* Skip first entry if NULL. */
1988 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
1989 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
1990 XEXP (XVECEXP (src
, 0, i
), 0));
1993 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1994 where SRC is non-consecutive registers represented by a PARALLEL.
1995 SSIZE represents the total size of block ORIG_DST, or -1 if not
1999 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
2004 if (GET_CODE (src
) != PARALLEL
)
2007 /* Check for a NULL entry, used to indicate that the parameter goes
2008 both on the stack and in registers. */
2009 if (XEXP (XVECEXP (src
, 0, 0), 0))
2014 tmps
= alloca (sizeof (rtx
) * XVECLEN (src
, 0));
2016 /* Copy the (probable) hard regs into pseudos. */
2017 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2019 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2020 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2021 emit_move_insn (tmps
[i
], reg
);
2025 /* If we won't be storing directly into memory, protect the real destination
2026 from strange tricks we might play. */
2028 if (GET_CODE (dst
) == PARALLEL
)
2032 /* We can get a PARALLEL dst if there is a conditional expression in
2033 a return statement. In that case, the dst and src are the same,
2034 so no action is necessary. */
2035 if (rtx_equal_p (dst
, src
))
2038 /* It is unclear if we can ever reach here, but we may as well handle
2039 it. Allocate a temporary, and split this into a store/load to/from
2042 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
2043 emit_group_store (temp
, src
, type
, ssize
);
2044 emit_group_load (dst
, temp
, type
, ssize
);
2047 else if (GET_CODE (dst
) != MEM
&& GET_CODE (dst
) != CONCAT
)
2049 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
2050 /* Make life a bit easier for combine. */
2051 emit_move_insn (dst
, CONST0_RTX (GET_MODE (orig_dst
)));
2054 /* Process the pieces. */
2055 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2057 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2058 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2059 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2062 /* Handle trailing fragments that run over the size of the struct. */
2063 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2065 /* store_bit_field always takes its value from the lsb.
2066 Move the fragment to the lsb if it's not already there. */
2068 #ifdef BLOCK_REG_PADDING
2069 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
2070 == (BYTES_BIG_ENDIAN
? upward
: downward
)
2076 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2077 expand_binop (mode
, ashr_optab
, tmps
[i
], GEN_INT (shift
),
2078 tmps
[i
], 0, OPTAB_WIDEN
);
2080 bytelen
= ssize
- bytepos
;
2083 if (GET_CODE (dst
) == CONCAT
)
2085 if (bytepos
+ bytelen
<= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2086 dest
= XEXP (dst
, 0);
2087 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2089 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2090 dest
= XEXP (dst
, 1);
2092 else if (bytepos
== 0 && XVECLEN (src
, 0))
2094 dest
= assign_stack_temp (GET_MODE (dest
),
2095 GET_MODE_SIZE (GET_MODE (dest
)), 0);
2096 emit_move_insn (adjust_address (dest
, GET_MODE (tmps
[i
]), bytepos
),
2105 /* Optimize the access just a bit. */
2106 if (GET_CODE (dest
) == MEM
2107 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
2108 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2109 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2110 && bytelen
== GET_MODE_SIZE (mode
))
2111 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2113 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2114 mode
, tmps
[i
], ssize
);
2119 /* Copy from the pseudo into the (probable) hard reg. */
2120 if (orig_dst
!= dst
)
2121 emit_move_insn (orig_dst
, dst
);
2124 /* Generate code to copy a BLKmode object of TYPE out of a
2125 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2126 is null, a stack temporary is created. TGTBLK is returned.
2128 The purpose of this routine is to handle functions that return
2129 BLKmode structures in registers. Some machines (the PA for example)
2130 want to return all small structures in registers regardless of the
2131 structure's alignment. */
2134 copy_blkmode_from_reg (rtx tgtblk
, rtx srcreg
, tree type
)
2136 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2137 rtx src
= NULL
, dst
= NULL
;
2138 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2139 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2143 tgtblk
= assign_temp (build_qualified_type (type
,
2145 | TYPE_QUAL_CONST
)),
2147 preserve_temp_slots (tgtblk
);
2150 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2151 into a new pseudo which is a full word. */
2153 if (GET_MODE (srcreg
) != BLKmode
2154 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2155 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
2157 /* If the structure doesn't take up a whole number of words, see whether
2158 SRCREG is padded on the left or on the right. If it's on the left,
2159 set PADDING_CORRECTION to the number of bits to skip.
2161 In most ABIs, the structure will be returned at the least end of
2162 the register, which translates to right padding on little-endian
2163 targets and left padding on big-endian targets. The opposite
2164 holds if the structure is returned at the most significant
2165 end of the register. */
2166 if (bytes
% UNITS_PER_WORD
!= 0
2167 && (targetm
.calls
.return_in_msb (type
)
2169 : BYTES_BIG_ENDIAN
))
2171 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2173 /* Copy the structure BITSIZE bites at a time.
2175 We could probably emit more efficient code for machines which do not use
2176 strict alignment, but it doesn't seem worth the effort at the current
2178 for (bitpos
= 0, xbitpos
= padding_correction
;
2179 bitpos
< bytes
* BITS_PER_UNIT
;
2180 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2182 /* We need a new source operand each time xbitpos is on a
2183 word boundary and when xbitpos == padding_correction
2184 (the first time through). */
2185 if (xbitpos
% BITS_PER_WORD
== 0
2186 || xbitpos
== padding_correction
)
2187 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2190 /* We need a new destination operand each time bitpos is on
2192 if (bitpos
% BITS_PER_WORD
== 0)
2193 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2195 /* Use xbitpos for the source extraction (right justified) and
2196 xbitpos for the destination store (left justified). */
2197 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2198 extract_bit_field (src
, bitsize
,
2199 xbitpos
% BITS_PER_WORD
, 1,
2200 NULL_RTX
, word_mode
, word_mode
,
2208 /* Add a USE expression for REG to the (possibly empty) list pointed
2209 to by CALL_FUSAGE. REG must denote a hard register. */
2212 use_reg (rtx
*call_fusage
, rtx reg
)
2215 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2219 = gen_rtx_EXPR_LIST (VOIDmode
,
2220 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2223 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2224 starting at REGNO. All of these registers must be hard registers. */
2227 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2231 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
2234 for (i
= 0; i
< nregs
; i
++)
2235 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2238 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2239 PARALLEL REGS. This is for calls that pass values in multiple
2240 non-contiguous locations. The Irix 6 ABI has examples of this. */
2243 use_group_regs (rtx
*call_fusage
, rtx regs
)
2247 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2249 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2251 /* A NULL entry means the parameter goes both on the stack and in
2252 registers. This can also be a MEM for targets that pass values
2253 partially on the stack and partially in registers. */
2254 if (reg
!= 0 && REG_P (reg
))
2255 use_reg (call_fusage
, reg
);
2260 /* Determine whether the LEN bytes generated by CONSTFUN can be
2261 stored to memory using several move instructions. CONSTFUNDATA is
2262 a pointer which will be passed as argument in every CONSTFUN call.
2263 ALIGN is maximum alignment we can assume. Return nonzero if a
2264 call to store_by_pieces should succeed. */
2267 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
2268 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2269 void *constfundata
, unsigned int align
)
2271 unsigned HOST_WIDE_INT max_size
, l
;
2272 HOST_WIDE_INT offset
= 0;
2273 enum machine_mode mode
, tmode
;
2274 enum insn_code icode
;
2281 if (! STORE_BY_PIECES_P (len
, align
))
2284 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2285 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2286 align
= MOVE_MAX
* BITS_PER_UNIT
;
2288 /* We would first store what we can in the largest integer mode, then go to
2289 successively smaller modes. */
2292 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2297 max_size
= STORE_MAX_PIECES
+ 1;
2298 while (max_size
> 1)
2300 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2301 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2302 if (GET_MODE_SIZE (tmode
) < max_size
)
2305 if (mode
== VOIDmode
)
2308 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2309 if (icode
!= CODE_FOR_nothing
2310 && align
>= GET_MODE_ALIGNMENT (mode
))
2312 unsigned int size
= GET_MODE_SIZE (mode
);
2319 cst
= (*constfun
) (constfundata
, offset
, mode
);
2320 if (!LEGITIMATE_CONSTANT_P (cst
))
2330 max_size
= GET_MODE_SIZE (mode
);
2333 /* The code above should have handled everything. */
2341 /* Generate several move instructions to store LEN bytes generated by
2342 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2343 pointer which will be passed as argument in every CONSTFUN call.
2344 ALIGN is maximum alignment we can assume.
2345 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2346 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2350 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
2351 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2352 void *constfundata
, unsigned int align
, int endp
)
2354 struct store_by_pieces data
;
2363 if (! STORE_BY_PIECES_P (len
, align
))
2365 to
= protect_from_queue (to
, 1);
2366 data
.constfun
= constfun
;
2367 data
.constfundata
= constfundata
;
2370 store_by_pieces_1 (&data
, align
);
2381 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2382 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2384 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
2387 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2394 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2402 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2403 rtx with BLKmode). The caller must pass TO through protect_from_queue
2404 before calling. ALIGN is maximum alignment we can assume. */
2407 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
2409 struct store_by_pieces data
;
2414 data
.constfun
= clear_by_pieces_1
;
2415 data
.constfundata
= NULL
;
2418 store_by_pieces_1 (&data
, align
);
2421 /* Callback routine for clear_by_pieces.
2422 Return const0_rtx unconditionally. */
2425 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED
,
2426 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2427 enum machine_mode mode ATTRIBUTE_UNUSED
)
2432 /* Subroutine of clear_by_pieces and store_by_pieces.
2433 Generate several move instructions to store LEN bytes of block TO. (A MEM
2434 rtx with BLKmode). The caller must pass TO through protect_from_queue
2435 before calling. ALIGN is maximum alignment we can assume. */
2438 store_by_pieces_1 (struct store_by_pieces
*data ATTRIBUTE_UNUSED
,
2439 unsigned int align ATTRIBUTE_UNUSED
)
2441 rtx to_addr
= XEXP (data
->to
, 0);
2442 unsigned HOST_WIDE_INT max_size
= STORE_MAX_PIECES
+ 1;
2443 enum machine_mode mode
= VOIDmode
, tmode
;
2444 enum insn_code icode
;
2447 data
->to_addr
= to_addr
;
2449 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2450 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2452 data
->explicit_inc_to
= 0;
2454 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2456 data
->offset
= data
->len
;
2458 /* If storing requires more than two move insns,
2459 copy addresses to registers (to make displacements shorter)
2460 and use post-increment if available. */
2461 if (!data
->autinc_to
2462 && move_by_pieces_ninsns (data
->len
, align
) > 2)
2464 /* Determine the main mode we'll be using. */
2465 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2466 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2467 if (GET_MODE_SIZE (tmode
) < max_size
)
2470 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2472 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2473 data
->autinc_to
= 1;
2474 data
->explicit_inc_to
= -1;
2477 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2478 && ! data
->autinc_to
)
2480 data
->to_addr
= copy_addr_to_reg (to_addr
);
2481 data
->autinc_to
= 1;
2482 data
->explicit_inc_to
= 1;
2485 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2486 data
->to_addr
= copy_addr_to_reg (to_addr
);
2489 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2490 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2491 align
= MOVE_MAX
* BITS_PER_UNIT
;
2493 /* First store what we can in the largest integer mode, then go to
2494 successively smaller modes. */
2496 while (max_size
> 1)
2498 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2499 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2500 if (GET_MODE_SIZE (tmode
) < max_size
)
2503 if (mode
== VOIDmode
)
2506 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2507 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2508 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2510 max_size
= GET_MODE_SIZE (mode
);
2513 /* The code above should have handled everything. */
2518 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2519 with move instructions for mode MODE. GENFUN is the gen_... function
2520 to make a move insn for that mode. DATA has all the other info. */
2523 store_by_pieces_2 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
2524 struct store_by_pieces
*data
)
2526 unsigned int size
= GET_MODE_SIZE (mode
);
2529 while (data
->len
>= size
)
2532 data
->offset
-= size
;
2534 if (data
->autinc_to
)
2535 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2538 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2540 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2541 emit_insn (gen_add2_insn (data
->to_addr
,
2542 GEN_INT (-(HOST_WIDE_INT
) size
)));
2544 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2545 emit_insn ((*genfun
) (to1
, cst
));
2547 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2548 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2550 if (! data
->reverse
)
2551 data
->offset
+= size
;
2557 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2558 its length in bytes. */
2561 clear_storage (rtx object
, rtx size
)
2564 unsigned int align
= (GET_CODE (object
) == MEM
? MEM_ALIGN (object
)
2565 : GET_MODE_ALIGNMENT (GET_MODE (object
)));
2567 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2568 just move a zero. Otherwise, do this a piece at a time. */
2569 if (GET_MODE (object
) != BLKmode
2570 && GET_CODE (size
) == CONST_INT
2571 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (object
)))
2572 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2575 object
= protect_from_queue (object
, 1);
2576 size
= protect_from_queue (size
, 0);
2578 if (size
== const0_rtx
)
2580 else if (GET_CODE (size
) == CONST_INT
2581 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2582 clear_by_pieces (object
, INTVAL (size
), align
);
2583 else if (clear_storage_via_clrstr (object
, size
, align
))
2586 retval
= clear_storage_via_libcall (object
, size
);
2592 /* A subroutine of clear_storage. Expand a clrstr pattern;
2593 return true if successful. */
2596 clear_storage_via_clrstr (rtx object
, rtx size
, unsigned int align
)
2598 /* Try the most limited insn first, because there's no point
2599 including more than one in the machine description unless
2600 the more limited one has some advantage. */
2602 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2603 enum machine_mode mode
;
2605 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2606 mode
= GET_MODE_WIDER_MODE (mode
))
2608 enum insn_code code
= clrstr_optab
[(int) mode
];
2609 insn_operand_predicate_fn pred
;
2611 if (code
!= CODE_FOR_nothing
2612 /* We don't need MODE to be narrower than
2613 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2614 the mode mask, as it is returned by the macro, it will
2615 definitely be less than the actual mode mask. */
2616 && ((GET_CODE (size
) == CONST_INT
2617 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2618 <= (GET_MODE_MASK (mode
) >> 1)))
2619 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2620 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2621 || (*pred
) (object
, BLKmode
))
2622 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
2623 || (*pred
) (opalign
, VOIDmode
)))
2626 rtx last
= get_last_insn ();
2629 op1
= convert_to_mode (mode
, size
, 1);
2630 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2631 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
2632 op1
= copy_to_mode_reg (mode
, op1
);
2634 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2641 delete_insns_since (last
);
2648 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2649 Return the return value of memset, 0 otherwise. */
2652 clear_storage_via_libcall (rtx object
, rtx size
)
2654 tree call_expr
, arg_list
, fn
, object_tree
, size_tree
;
2655 enum machine_mode size_mode
;
2658 /* OBJECT or SIZE may have been passed through protect_from_queue.
2660 It is unsafe to save the value generated by protect_from_queue
2661 and reuse it later. Consider what happens if emit_queue is
2662 called before the return value from protect_from_queue is used.
2664 Expansion of the CALL_EXPR below will call emit_queue before
2665 we are finished emitting RTL for argument setup. So if we are
2666 not careful we could get the wrong value for an argument.
2668 To avoid this problem we go ahead and emit code to copy OBJECT
2669 and SIZE into new pseudos. We can then place those new pseudos
2670 into an RTL_EXPR and use them later, even after a call to
2673 Note this is not strictly needed for library calls since they
2674 do not call emit_queue before loading their arguments. However,
2675 we may need to have library calls call emit_queue in the future
2676 since failing to do so could cause problems for targets which
2677 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2679 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2681 if (TARGET_MEM_FUNCTIONS
)
2682 size_mode
= TYPE_MODE (sizetype
);
2684 size_mode
= TYPE_MODE (unsigned_type_node
);
2685 size
= convert_to_mode (size_mode
, size
, 1);
2686 size
= copy_to_mode_reg (size_mode
, size
);
2688 /* It is incorrect to use the libcall calling conventions to call
2689 memset in this context. This could be a user call to memset and
2690 the user may wish to examine the return value from memset. For
2691 targets where libcalls and normal calls have different conventions
2692 for returning pointers, we could end up generating incorrect code.
2694 For convenience, we generate the call to bzero this way as well. */
2696 object_tree
= make_tree (ptr_type_node
, object
);
2697 if (TARGET_MEM_FUNCTIONS
)
2698 size_tree
= make_tree (sizetype
, size
);
2700 size_tree
= make_tree (unsigned_type_node
, size
);
2702 fn
= clear_storage_libcall_fn (true);
2703 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
2704 if (TARGET_MEM_FUNCTIONS
)
2705 arg_list
= tree_cons (NULL_TREE
, integer_zero_node
, arg_list
);
2706 arg_list
= tree_cons (NULL_TREE
, object_tree
, arg_list
);
2708 /* Now we have to build up the CALL_EXPR itself. */
2709 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2710 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2711 call_expr
, arg_list
, NULL_TREE
);
2713 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
2715 /* If we are initializing a readonly value, show the above call
2716 clobbered it. Otherwise, a load from it may erroneously be
2717 hoisted from a loop. */
2718 if (RTX_UNCHANGING_P (object
))
2719 emit_insn (gen_rtx_CLOBBER (VOIDmode
, object
));
2721 return (TARGET_MEM_FUNCTIONS
? retval
: NULL_RTX
);
2724 /* A subroutine of clear_storage_via_libcall. Create the tree node
2725 for the function we use for block clears. The first time FOR_CALL
2726 is true, we call assemble_external. */
2728 static GTY(()) tree block_clear_fn
;
2731 init_block_clear_fn (const char *asmspec
)
2733 if (!block_clear_fn
)
2737 if (TARGET_MEM_FUNCTIONS
)
2739 fn
= get_identifier ("memset");
2740 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2741 integer_type_node
, sizetype
,
2746 fn
= get_identifier ("bzero");
2747 args
= build_function_type_list (void_type_node
, ptr_type_node
,
2748 unsigned_type_node
, NULL_TREE
);
2751 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
2752 DECL_EXTERNAL (fn
) = 1;
2753 TREE_PUBLIC (fn
) = 1;
2754 DECL_ARTIFICIAL (fn
) = 1;
2755 TREE_NOTHROW (fn
) = 1;
2757 block_clear_fn
= fn
;
2762 SET_DECL_RTL (block_clear_fn
, NULL_RTX
);
2763 SET_DECL_ASSEMBLER_NAME (block_clear_fn
, get_identifier (asmspec
));
2768 clear_storage_libcall_fn (int for_call
)
2770 static bool emitted_extern
;
2772 if (!block_clear_fn
)
2773 init_block_clear_fn (NULL
);
2775 if (for_call
&& !emitted_extern
)
2777 emitted_extern
= true;
2778 make_decl_rtl (block_clear_fn
, NULL
);
2779 assemble_external (block_clear_fn
);
2782 return block_clear_fn
;
2785 /* Generate code to copy Y into X.
2786 Both Y and X must have the same mode, except that
2787 Y can be a constant with VOIDmode.
2788 This mode cannot be BLKmode; use emit_block_move for that.
2790 Return the last instruction emitted. */
2793 emit_move_insn (rtx x
, rtx y
)
2795 enum machine_mode mode
= GET_MODE (x
);
2796 rtx y_cst
= NULL_RTX
;
2799 x
= protect_from_queue (x
, 1);
2800 y
= protect_from_queue (y
, 0);
2802 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
2808 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
2809 && (last_insn
= compress_float_constant (x
, y
)))
2814 if (!LEGITIMATE_CONSTANT_P (y
))
2816 y
= force_const_mem (mode
, y
);
2818 /* If the target's cannot_force_const_mem prevented the spill,
2819 assume that the target's move expanders will also take care
2820 of the non-legitimate constant. */
2826 /* If X or Y are memory references, verify that their addresses are valid
2828 if (GET_CODE (x
) == MEM
2829 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2830 && ! push_operand (x
, GET_MODE (x
)))
2832 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2833 x
= validize_mem (x
);
2835 if (GET_CODE (y
) == MEM
2836 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2838 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2839 y
= validize_mem (y
);
2841 if (mode
== BLKmode
)
2844 last_insn
= emit_move_insn_1 (x
, y
);
2846 if (y_cst
&& REG_P (x
)
2847 && (set
= single_set (last_insn
)) != NULL_RTX
2848 && SET_DEST (set
) == x
2849 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
2850 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
2855 /* Low level part of emit_move_insn.
2856 Called just like emit_move_insn, but assumes X and Y
2857 are basically valid. */
2860 emit_move_insn_1 (rtx x
, rtx y
)
2862 enum machine_mode mode
= GET_MODE (x
);
2863 enum machine_mode submode
;
2864 enum mode_class
class = GET_MODE_CLASS (mode
);
2866 if ((unsigned int) mode
>= (unsigned int) MAX_MACHINE_MODE
)
2869 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2871 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2873 /* Expand complex moves by moving real part and imag part, if possible. */
2874 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
2875 && BLKmode
!= (submode
= GET_MODE_INNER (mode
))
2876 && (mov_optab
->handlers
[(int) submode
].insn_code
2877 != CODE_FOR_nothing
))
2879 /* Don't split destination if it is a stack push. */
2880 int stack
= push_operand (x
, GET_MODE (x
));
2882 #ifdef PUSH_ROUNDING
2883 /* In case we output to the stack, but the size is smaller than the
2884 machine can push exactly, we need to use move instructions. */
2886 && (PUSH_ROUNDING (GET_MODE_SIZE (submode
))
2887 != GET_MODE_SIZE (submode
)))
2890 HOST_WIDE_INT offset1
, offset2
;
2892 /* Do not use anti_adjust_stack, since we don't want to update
2893 stack_pointer_delta. */
2894 temp
= expand_binop (Pmode
,
2895 #ifdef STACK_GROWS_DOWNWARD
2903 (GET_MODE_SIZE (GET_MODE (x
)))),
2904 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
2906 if (temp
!= stack_pointer_rtx
)
2907 emit_move_insn (stack_pointer_rtx
, temp
);
2909 #ifdef STACK_GROWS_DOWNWARD
2911 offset2
= GET_MODE_SIZE (submode
);
2913 offset1
= -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)));
2914 offset2
= (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))
2915 + GET_MODE_SIZE (submode
));
2918 emit_move_insn (change_address (x
, submode
,
2919 gen_rtx_PLUS (Pmode
,
2921 GEN_INT (offset1
))),
2922 gen_realpart (submode
, y
));
2923 emit_move_insn (change_address (x
, submode
,
2924 gen_rtx_PLUS (Pmode
,
2926 GEN_INT (offset2
))),
2927 gen_imagpart (submode
, y
));
2931 /* If this is a stack, push the highpart first, so it
2932 will be in the argument order.
2934 In that case, change_address is used only to convert
2935 the mode, not to change the address. */
2938 /* Note that the real part always precedes the imag part in memory
2939 regardless of machine's endianness. */
2940 #ifdef STACK_GROWS_DOWNWARD
2941 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2942 gen_imagpart (submode
, y
));
2943 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2944 gen_realpart (submode
, y
));
2946 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2947 gen_realpart (submode
, y
));
2948 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2949 gen_imagpart (submode
, y
));
2954 rtx realpart_x
, realpart_y
;
2955 rtx imagpart_x
, imagpart_y
;
2957 /* If this is a complex value with each part being smaller than a
2958 word, the usual calling sequence will likely pack the pieces into
2959 a single register. Unfortunately, SUBREG of hard registers only
2960 deals in terms of words, so we have a problem converting input
2961 arguments to the CONCAT of two registers that is used elsewhere
2962 for complex values. If this is before reload, we can copy it into
2963 memory and reload. FIXME, we should see about using extract and
2964 insert on integer registers, but complex short and complex char
2965 variables should be rarely used. */
2966 if (GET_MODE_BITSIZE (mode
) < 2 * BITS_PER_WORD
2967 && (reload_in_progress
| reload_completed
) == 0)
2970 = (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
2972 = (REG_P (y
) && REGNO (y
) < FIRST_PSEUDO_REGISTER
);
2974 if (packed_dest_p
|| packed_src_p
)
2976 enum mode_class reg_class
= ((class == MODE_COMPLEX_FLOAT
)
2977 ? MODE_FLOAT
: MODE_INT
);
2979 enum machine_mode reg_mode
2980 = mode_for_size (GET_MODE_BITSIZE (mode
), reg_class
, 1);
2982 if (reg_mode
!= BLKmode
)
2984 rtx mem
= assign_stack_temp (reg_mode
,
2985 GET_MODE_SIZE (mode
), 0);
2986 rtx cmem
= adjust_address (mem
, mode
, 0);
2990 rtx sreg
= gen_rtx_SUBREG (reg_mode
, x
, 0);
2992 emit_move_insn_1 (cmem
, y
);
2993 return emit_move_insn_1 (sreg
, mem
);
2997 rtx sreg
= gen_rtx_SUBREG (reg_mode
, y
, 0);
2999 emit_move_insn_1 (mem
, sreg
);
3000 return emit_move_insn_1 (x
, cmem
);
3006 realpart_x
= gen_realpart (submode
, x
);
3007 realpart_y
= gen_realpart (submode
, y
);
3008 imagpart_x
= gen_imagpart (submode
, x
);
3009 imagpart_y
= gen_imagpart (submode
, y
);
3011 /* Show the output dies here. This is necessary for SUBREGs
3012 of pseudos since we cannot track their lifetimes correctly;
3013 hard regs shouldn't appear here except as return values.
3014 We never want to emit such a clobber after reload. */
3016 && ! (reload_in_progress
|| reload_completed
)
3017 && (GET_CODE (realpart_x
) == SUBREG
3018 || GET_CODE (imagpart_x
) == SUBREG
))
3019 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3021 emit_move_insn (realpart_x
, realpart_y
);
3022 emit_move_insn (imagpart_x
, imagpart_y
);
3025 return get_last_insn ();
3028 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3029 find a mode to do it in. If we have a movcc, use it. Otherwise,
3030 find the MODE_INT mode of the same width. */
3031 else if (GET_MODE_CLASS (mode
) == MODE_CC
3032 && mov_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
3034 enum insn_code insn_code
;
3035 enum machine_mode tmode
= VOIDmode
;
3039 && mov_optab
->handlers
[(int) CCmode
].insn_code
!= CODE_FOR_nothing
)
3042 for (tmode
= QImode
; tmode
!= VOIDmode
;
3043 tmode
= GET_MODE_WIDER_MODE (tmode
))
3044 if (GET_MODE_SIZE (tmode
) == GET_MODE_SIZE (mode
))
3047 if (tmode
== VOIDmode
)
3050 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3051 may call change_address which is not appropriate if we were
3052 called when a reload was in progress. We don't have to worry
3053 about changing the address since the size in bytes is supposed to
3054 be the same. Copy the MEM to change the mode and move any
3055 substitutions from the old MEM to the new one. */
3057 if (reload_in_progress
)
3059 x
= gen_lowpart_common (tmode
, x1
);
3060 if (x
== 0 && GET_CODE (x1
) == MEM
)
3062 x
= adjust_address_nv (x1
, tmode
, 0);
3063 copy_replacements (x1
, x
);
3066 y
= gen_lowpart_common (tmode
, y1
);
3067 if (y
== 0 && GET_CODE (y1
) == MEM
)
3069 y
= adjust_address_nv (y1
, tmode
, 0);
3070 copy_replacements (y1
, y
);
3075 x
= gen_lowpart (tmode
, x
);
3076 y
= gen_lowpart (tmode
, y
);
3079 insn_code
= mov_optab
->handlers
[(int) tmode
].insn_code
;
3080 return emit_insn (GEN_FCN (insn_code
) (x
, y
));
3083 /* Try using a move pattern for the corresponding integer mode. This is
3084 only safe when simplify_subreg can convert MODE constants into integer
3085 constants. At present, it can only do this reliably if the value
3086 fits within a HOST_WIDE_INT. */
3087 else if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
3088 && (submode
= int_mode_for_mode (mode
)) != BLKmode
3089 && mov_optab
->handlers
[submode
].insn_code
!= CODE_FOR_nothing
)
3090 return emit_insn (GEN_FCN (mov_optab
->handlers
[submode
].insn_code
)
3091 (simplify_gen_subreg (submode
, x
, mode
, 0),
3092 simplify_gen_subreg (submode
, y
, mode
, 0)));
3094 /* This will handle any multi-word or full-word mode that lacks a move_insn
3095 pattern. However, you will get better code if you define such patterns,
3096 even if they must turn into multiple assembler instructions. */
3097 else if (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
3104 #ifdef PUSH_ROUNDING
3106 /* If X is a push on the stack, do the push now and replace
3107 X with a reference to the stack pointer. */
3108 if (push_operand (x
, GET_MODE (x
)))
3113 /* Do not use anti_adjust_stack, since we don't want to update
3114 stack_pointer_delta. */
3115 temp
= expand_binop (Pmode
,
3116 #ifdef STACK_GROWS_DOWNWARD
3124 (GET_MODE_SIZE (GET_MODE (x
)))),
3125 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
3127 if (temp
!= stack_pointer_rtx
)
3128 emit_move_insn (stack_pointer_rtx
, temp
);
3130 code
= GET_CODE (XEXP (x
, 0));
3132 /* Just hope that small offsets off SP are OK. */
3133 if (code
== POST_INC
)
3134 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3135 GEN_INT (-((HOST_WIDE_INT
)
3136 GET_MODE_SIZE (GET_MODE (x
)))));
3137 else if (code
== POST_DEC
)
3138 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3139 GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
3141 temp
= stack_pointer_rtx
;
3143 x
= change_address (x
, VOIDmode
, temp
);
3147 /* If we are in reload, see if either operand is a MEM whose address
3148 is scheduled for replacement. */
3149 if (reload_in_progress
&& GET_CODE (x
) == MEM
3150 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3151 x
= replace_equiv_address_nv (x
, inner
);
3152 if (reload_in_progress
&& GET_CODE (y
) == MEM
3153 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3154 y
= replace_equiv_address_nv (y
, inner
);
3160 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3163 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3164 rtx ypart
= operand_subword (y
, i
, 1, mode
);
3166 /* If we can't get a part of Y, put Y into memory if it is a
3167 constant. Otherwise, force it into a register. If we still
3168 can't get a part of Y, abort. */
3169 if (ypart
== 0 && CONSTANT_P (y
))
3171 y
= force_const_mem (mode
, y
);
3172 ypart
= operand_subword (y
, i
, 1, mode
);
3174 else if (ypart
== 0)
3175 ypart
= operand_subword_force (y
, i
, mode
);
3177 if (xpart
== 0 || ypart
== 0)
3180 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3182 last_insn
= emit_move_insn (xpart
, ypart
);
3188 /* Show the output dies here. This is necessary for SUBREGs
3189 of pseudos since we cannot track their lifetimes correctly;
3190 hard regs shouldn't appear here except as return values.
3191 We never want to emit such a clobber after reload. */
3193 && ! (reload_in_progress
|| reload_completed
)
3194 && need_clobber
!= 0)
3195 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3205 /* If Y is representable exactly in a narrower mode, and the target can
3206 perform the extension directly from constant or memory, then emit the
3207 move as an extension. */
3210 compress_float_constant (rtx x
, rtx y
)
3212 enum machine_mode dstmode
= GET_MODE (x
);
3213 enum machine_mode orig_srcmode
= GET_MODE (y
);
3214 enum machine_mode srcmode
;
3217 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3219 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3220 srcmode
!= orig_srcmode
;
3221 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3224 rtx trunc_y
, last_insn
;
3226 /* Skip if the target can't extend this way. */
3227 ic
= can_extend_p (dstmode
, srcmode
, 0);
3228 if (ic
== CODE_FOR_nothing
)
3231 /* Skip if the narrowed value isn't exact. */
3232 if (! exact_real_truncate (srcmode
, &r
))
3235 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3237 if (LEGITIMATE_CONSTANT_P (trunc_y
))
3239 /* Skip if the target needs extra instructions to perform
3241 if (! (*insn_data
[ic
].operand
[1].predicate
) (trunc_y
, srcmode
))
3244 else if (float_extend_from_mem
[dstmode
][srcmode
])
3245 trunc_y
= validize_mem (force_const_mem (srcmode
, trunc_y
));
3249 emit_unop_insn (ic
, x
, trunc_y
, UNKNOWN
);
3250 last_insn
= get_last_insn ();
3253 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3261 /* Pushing data onto the stack. */
3263 /* Push a block of length SIZE (perhaps variable)
3264 and return an rtx to address the beginning of the block.
3265 Note that it is not possible for the value returned to be a QUEUED.
3266 The value may be virtual_outgoing_args_rtx.
3268 EXTRA is the number of bytes of padding to push in addition to SIZE.
3269 BELOW nonzero means this padding comes at low addresses;
3270 otherwise, the padding comes at high addresses. */
3273 push_block (rtx size
, int extra
, int below
)
3277 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3278 if (CONSTANT_P (size
))
3279 anti_adjust_stack (plus_constant (size
, extra
));
3280 else if (REG_P (size
) && extra
== 0)
3281 anti_adjust_stack (size
);
3284 temp
= copy_to_mode_reg (Pmode
, size
);
3286 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3287 temp
, 0, OPTAB_LIB_WIDEN
);
3288 anti_adjust_stack (temp
);
3291 #ifndef STACK_GROWS_DOWNWARD
3297 temp
= virtual_outgoing_args_rtx
;
3298 if (extra
!= 0 && below
)
3299 temp
= plus_constant (temp
, extra
);
3303 if (GET_CODE (size
) == CONST_INT
)
3304 temp
= plus_constant (virtual_outgoing_args_rtx
,
3305 -INTVAL (size
) - (below
? 0 : extra
));
3306 else if (extra
!= 0 && !below
)
3307 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3308 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3310 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3311 negate_rtx (Pmode
, size
));
3314 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3317 #ifdef PUSH_ROUNDING
3319 /* Emit single push insn. */
3322 emit_single_push_insn (enum machine_mode mode
, rtx x
, tree type
)
3325 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3327 enum insn_code icode
;
3328 insn_operand_predicate_fn pred
;
3330 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3331 /* If there is push pattern, use it. Otherwise try old way of throwing
3332 MEM representing push operation to move expander. */
3333 icode
= push_optab
->handlers
[(int) mode
].insn_code
;
3334 if (icode
!= CODE_FOR_nothing
)
3336 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3337 && !((*pred
) (x
, mode
))))
3338 x
= force_reg (mode
, x
);
3339 emit_insn (GEN_FCN (icode
) (x
));
3342 if (GET_MODE_SIZE (mode
) == rounded_size
)
3343 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3344 /* If we are to pad downward, adjust the stack pointer first and
3345 then store X into the stack location using an offset. This is
3346 because emit_move_insn does not know how to pad; it does not have
3348 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
3350 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
3351 HOST_WIDE_INT offset
;
3353 emit_move_insn (stack_pointer_rtx
,
3354 expand_binop (Pmode
,
3355 #ifdef STACK_GROWS_DOWNWARD
3361 GEN_INT (rounded_size
),
3362 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
3364 offset
= (HOST_WIDE_INT
) padding_size
;
3365 #ifdef STACK_GROWS_DOWNWARD
3366 if (STACK_PUSH_CODE
== POST_DEC
)
3367 /* We have already decremented the stack pointer, so get the
3369 offset
+= (HOST_WIDE_INT
) rounded_size
;
3371 if (STACK_PUSH_CODE
== POST_INC
)
3372 /* We have already incremented the stack pointer, so get the
3374 offset
-= (HOST_WIDE_INT
) rounded_size
;
3376 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (offset
));
3380 #ifdef STACK_GROWS_DOWNWARD
3381 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3382 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3383 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3385 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3386 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3387 GEN_INT (rounded_size
));
3389 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3392 dest
= gen_rtx_MEM (mode
, dest_addr
);
3396 set_mem_attributes (dest
, type
, 1);
3398 if (flag_optimize_sibling_calls
)
3399 /* Function incoming arguments may overlap with sibling call
3400 outgoing arguments and we cannot allow reordering of reads
3401 from function arguments with stores to outgoing arguments
3402 of sibling calls. */
3403 set_mem_alias_set (dest
, 0);
3405 emit_move_insn (dest
, x
);
3409 /* Generate code to push X onto the stack, assuming it has mode MODE and
3411 MODE is redundant except when X is a CONST_INT (since they don't
3413 SIZE is an rtx for the size of data to be copied (in bytes),
3414 needed only if X is BLKmode.
3416 ALIGN (in bits) is maximum alignment we can assume.
3418 If PARTIAL and REG are both nonzero, then copy that many of the first
3419 words of X into registers starting with REG, and push the rest of X.
3420 The amount of space pushed is decreased by PARTIAL words,
3421 rounded *down* to a multiple of PARM_BOUNDARY.
3422 REG must be a hard register in this case.
3423 If REG is zero but PARTIAL is not, take any all others actions for an
3424 argument partially in registers, but do not actually load any
3427 EXTRA is the amount in bytes of extra space to leave next to this arg.
3428 This is ignored if an argument block has already been allocated.
3430 On a machine that lacks real push insns, ARGS_ADDR is the address of
3431 the bottom of the argument block for this call. We use indexing off there
3432 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3433 argument block has not been preallocated.
3435 ARGS_SO_FAR is the size of args previously pushed for this call.
3437 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3438 for arguments passed in registers. If nonzero, it will be the number
3439 of bytes required. */
3442 emit_push_insn (rtx x
, enum machine_mode mode
, tree type
, rtx size
,
3443 unsigned int align
, int partial
, rtx reg
, int extra
,
3444 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
3448 enum direction stack_direction
3449 #ifdef STACK_GROWS_DOWNWARD
3455 /* Decide where to pad the argument: `downward' for below,
3456 `upward' for above, or `none' for don't pad it.
3457 Default is below for small data on big-endian machines; else above. */
3458 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3460 /* Invert direction if stack is post-decrement.
3462 if (STACK_PUSH_CODE
== POST_DEC
)
3463 if (where_pad
!= none
)
3464 where_pad
= (where_pad
== downward
? upward
: downward
);
3466 xinner
= x
= protect_from_queue (x
, 0);
3468 if (mode
== BLKmode
)
3470 /* Copy a block into the stack, entirely or partially. */
3473 int used
= partial
* UNITS_PER_WORD
;
3477 if (reg
&& GET_CODE (reg
) == PARALLEL
)
3479 /* Use the size of the elt to compute offset. */
3480 rtx elt
= XEXP (XVECEXP (reg
, 0, 0), 0);
3481 used
= partial
* GET_MODE_SIZE (GET_MODE (elt
));
3482 offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3485 offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3492 /* USED is now the # of bytes we need not copy to the stack
3493 because registers will take care of them. */
3496 xinner
= adjust_address (xinner
, BLKmode
, used
);
3498 /* If the partial register-part of the arg counts in its stack size,
3499 skip the part of stack space corresponding to the registers.
3500 Otherwise, start copying to the beginning of the stack space,
3501 by setting SKIP to 0. */
3502 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3504 #ifdef PUSH_ROUNDING
3505 /* Do it with several push insns if that doesn't take lots of insns
3506 and if there is no difficulty with push insns that skip bytes
3507 on the stack for alignment purposes. */
3510 && GET_CODE (size
) == CONST_INT
3512 && MEM_ALIGN (xinner
) >= align
3513 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3514 /* Here we avoid the case of a structure whose weak alignment
3515 forces many pushes of a small amount of data,
3516 and such small pushes do rounding that causes trouble. */
3517 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3518 || align
>= BIGGEST_ALIGNMENT
3519 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3520 == (align
/ BITS_PER_UNIT
)))
3521 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3523 /* Push padding now if padding above and stack grows down,
3524 or if padding below and stack grows up.
3525 But if space already allocated, this has already been done. */
3526 if (extra
&& args_addr
== 0
3527 && where_pad
!= none
&& where_pad
!= stack_direction
)
3528 anti_adjust_stack (GEN_INT (extra
));
3530 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
3533 #endif /* PUSH_ROUNDING */
3537 /* Otherwise make space on the stack and copy the data
3538 to the address of that space. */
3540 /* Deduct words put into registers from the size we must copy. */
3543 if (GET_CODE (size
) == CONST_INT
)
3544 size
= GEN_INT (INTVAL (size
) - used
);
3546 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3547 GEN_INT (used
), NULL_RTX
, 0,
3551 /* Get the address of the stack space.
3552 In this case, we do not deal with EXTRA separately.
3553 A single stack adjust will do. */
3556 temp
= push_block (size
, extra
, where_pad
== downward
);
3559 else if (GET_CODE (args_so_far
) == CONST_INT
)
3560 temp
= memory_address (BLKmode
,
3561 plus_constant (args_addr
,
3562 skip
+ INTVAL (args_so_far
)));
3564 temp
= memory_address (BLKmode
,
3565 plus_constant (gen_rtx_PLUS (Pmode
,
3570 if (!ACCUMULATE_OUTGOING_ARGS
)
3572 /* If the source is referenced relative to the stack pointer,
3573 copy it to another register to stabilize it. We do not need
3574 to do this if we know that we won't be changing sp. */
3576 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3577 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3578 temp
= copy_to_reg (temp
);
3581 target
= gen_rtx_MEM (BLKmode
, temp
);
3585 set_mem_attributes (target
, type
, 1);
3586 /* Function incoming arguments may overlap with sibling call
3587 outgoing arguments and we cannot allow reordering of reads
3588 from function arguments with stores to outgoing arguments
3589 of sibling calls. */
3590 set_mem_alias_set (target
, 0);
3593 /* ALIGN may well be better aligned than TYPE, e.g. due to
3594 PARM_BOUNDARY. Assume the caller isn't lying. */
3595 set_mem_align (target
, align
);
3597 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
3600 else if (partial
> 0)
3602 /* Scalar partly in registers. */
3604 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3607 /* # words of start of argument
3608 that we must make space for but need not store. */
3609 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
3610 int args_offset
= INTVAL (args_so_far
);
3613 /* Push padding now if padding above and stack grows down,
3614 or if padding below and stack grows up.
3615 But if space already allocated, this has already been done. */
3616 if (extra
&& args_addr
== 0
3617 && where_pad
!= none
&& where_pad
!= stack_direction
)
3618 anti_adjust_stack (GEN_INT (extra
));
3620 /* If we make space by pushing it, we might as well push
3621 the real data. Otherwise, we can leave OFFSET nonzero
3622 and leave the space uninitialized. */
3626 /* Now NOT_STACK gets the number of words that we don't need to
3627 allocate on the stack. */
3628 not_stack
= partial
- offset
;
3630 /* If the partial register-part of the arg counts in its stack size,
3631 skip the part of stack space corresponding to the registers.
3632 Otherwise, start copying to the beginning of the stack space,
3633 by setting SKIP to 0. */
3634 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3636 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3637 x
= validize_mem (force_const_mem (mode
, x
));
3639 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3640 SUBREGs of such registers are not allowed. */
3641 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
3642 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3643 x
= copy_to_reg (x
);
3645 /* Loop over all the words allocated on the stack for this arg. */
3646 /* We can do it by words, because any scalar bigger than a word
3647 has a size a multiple of a word. */
3648 #ifndef PUSH_ARGS_REVERSED
3649 for (i
= not_stack
; i
< size
; i
++)
3651 for (i
= size
- 1; i
>= not_stack
; i
--)
3653 if (i
>= not_stack
+ offset
)
3654 emit_push_insn (operand_subword_force (x
, i
, mode
),
3655 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3657 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3659 reg_parm_stack_space
, alignment_pad
);
3666 /* Push padding now if padding above and stack grows down,
3667 or if padding below and stack grows up.
3668 But if space already allocated, this has already been done. */
3669 if (extra
&& args_addr
== 0
3670 && where_pad
!= none
&& where_pad
!= stack_direction
)
3671 anti_adjust_stack (GEN_INT (extra
));
3673 #ifdef PUSH_ROUNDING
3674 if (args_addr
== 0 && PUSH_ARGS
)
3675 emit_single_push_insn (mode
, x
, type
);
3679 if (GET_CODE (args_so_far
) == CONST_INT
)
3681 = memory_address (mode
,
3682 plus_constant (args_addr
,
3683 INTVAL (args_so_far
)));
3685 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3687 dest
= gen_rtx_MEM (mode
, addr
);
3690 set_mem_attributes (dest
, type
, 1);
3691 /* Function incoming arguments may overlap with sibling call
3692 outgoing arguments and we cannot allow reordering of reads
3693 from function arguments with stores to outgoing arguments
3694 of sibling calls. */
3695 set_mem_alias_set (dest
, 0);
3698 emit_move_insn (dest
, x
);
3702 /* If part should go in registers, copy that part
3703 into the appropriate registers. Do this now, at the end,
3704 since mem-to-mem copies above may do function calls. */
3705 if (partial
> 0 && reg
!= 0)
3707 /* Handle calls that pass values in multiple non-contiguous locations.
3708 The Irix 6 ABI has examples of this. */
3709 if (GET_CODE (reg
) == PARALLEL
)
3710 emit_group_load (reg
, x
, type
, -1);
3712 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
3715 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3716 anti_adjust_stack (GEN_INT (extra
));
3718 if (alignment_pad
&& args_addr
== 0)
3719 anti_adjust_stack (alignment_pad
);
3722 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3726 get_subtarget (rtx x
)
3729 /* Only registers can be subtargets. */
3731 /* If the register is readonly, it can't be set more than once. */
3732 || RTX_UNCHANGING_P (x
)
3733 /* Don't use hard regs to avoid extending their life. */
3734 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3735 /* Avoid subtargets inside loops,
3736 since they hide some invariant expressions. */
3737 || preserve_subexpressions_p ())
3741 /* Expand an assignment that stores the value of FROM into TO.
3742 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3743 (This may contain a QUEUED rtx;
3744 if the value is constant, this rtx is a constant.)
3745 Otherwise, the returned value is NULL_RTX. */
3748 expand_assignment (tree to
, tree from
, int want_value
)
3753 /* Don't crash if the lhs of the assignment was erroneous. */
3755 if (TREE_CODE (to
) == ERROR_MARK
)
3757 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3758 return want_value
? result
: NULL_RTX
;
3761 /* Assignment of a structure component needs special treatment
3762 if the structure component's rtx is not simply a MEM.
3763 Assignment of an array element at a constant index, and assignment of
3764 an array element in an unaligned packed structure field, has the same
3767 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
3768 || TREE_CODE (to
) == ARRAY_REF
|| TREE_CODE (to
) == ARRAY_RANGE_REF
3769 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
3771 enum machine_mode mode1
;
3772 HOST_WIDE_INT bitsize
, bitpos
;
3780 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
3781 &unsignedp
, &volatilep
);
3783 /* If we are going to use store_bit_field and extract_bit_field,
3784 make sure to_rtx will be safe for multiple use. */
3786 if (mode1
== VOIDmode
&& want_value
)
3787 tem
= stabilize_reference (tem
);
3789 orig_to_rtx
= to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, 0);
3793 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
3795 if (GET_CODE (to_rtx
) != MEM
)
3798 #ifdef POINTERS_EXTEND_UNSIGNED
3799 if (GET_MODE (offset_rtx
) != Pmode
)
3800 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
3802 if (GET_MODE (offset_rtx
) != ptr_mode
)
3803 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
3806 /* A constant address in TO_RTX can have VOIDmode, we must not try
3807 to call force_reg for that case. Avoid that case. */
3808 if (GET_CODE (to_rtx
) == MEM
3809 && GET_MODE (to_rtx
) == BLKmode
3810 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
3812 && (bitpos
% bitsize
) == 0
3813 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
3814 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
3816 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
3820 to_rtx
= offset_address (to_rtx
, offset_rtx
,
3821 highest_pow2_factor_for_target (to
,
3825 if (GET_CODE (to_rtx
) == MEM
)
3827 /* If the field is at offset zero, we could have been given the
3828 DECL_RTX of the parent struct. Don't munge it. */
3829 to_rtx
= shallow_copy_rtx (to_rtx
);
3831 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
3834 /* Deal with volatile and readonly fields. The former is only done
3835 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3836 if (volatilep
&& GET_CODE (to_rtx
) == MEM
)
3838 if (to_rtx
== orig_to_rtx
)
3839 to_rtx
= copy_rtx (to_rtx
);
3840 MEM_VOLATILE_P (to_rtx
) = 1;
3843 if (TREE_CODE (to
) == COMPONENT_REF
3844 && TREE_READONLY (TREE_OPERAND (to
, 1))
3845 /* We can't assert that a MEM won't be set more than once
3846 if the component is not addressable because another
3847 non-addressable component may be referenced by the same MEM. */
3848 && ! (GET_CODE (to_rtx
) == MEM
&& ! can_address_p (to
)))
3850 if (to_rtx
== orig_to_rtx
)
3851 to_rtx
= copy_rtx (to_rtx
);
3852 RTX_UNCHANGING_P (to_rtx
) = 1;
3855 if (GET_CODE (to_rtx
) == MEM
&& ! can_address_p (to
))
3857 if (to_rtx
== orig_to_rtx
)
3858 to_rtx
= copy_rtx (to_rtx
);
3859 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
3862 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
3864 /* Spurious cast for HPUX compiler. */
3865 ? ((enum machine_mode
)
3866 TYPE_MODE (TREE_TYPE (to
)))
3868 unsignedp
, TREE_TYPE (tem
), get_alias_set (to
));
3870 preserve_temp_slots (result
);
3874 /* If the value is meaningful, convert RESULT to the proper mode.
3875 Otherwise, return nothing. */
3876 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
3877 TYPE_MODE (TREE_TYPE (from
)),
3879 TYPE_UNSIGNED (TREE_TYPE (to
)))
3883 /* If the rhs is a function call and its value is not an aggregate,
3884 call the function before we start to compute the lhs.
3885 This is needed for correct code for cases such as
3886 val = setjmp (buf) on machines where reference to val
3887 requires loading up part of an address in a separate insn.
3889 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3890 since it might be a promoted variable where the zero- or sign- extension
3891 needs to be done. Handling this in the normal way is safe because no
3892 computation is done before the call. */
3893 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
3894 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
3895 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
3896 && REG_P (DECL_RTL (to
))))
3901 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3903 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3905 /* Handle calls that return values in multiple non-contiguous locations.
3906 The Irix 6 ABI has examples of this. */
3907 if (GET_CODE (to_rtx
) == PARALLEL
)
3908 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
3909 int_size_in_bytes (TREE_TYPE (from
)));
3910 else if (GET_MODE (to_rtx
) == BLKmode
)
3911 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
3914 if (POINTER_TYPE_P (TREE_TYPE (to
)))
3915 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
3916 emit_move_insn (to_rtx
, value
);
3918 preserve_temp_slots (to_rtx
);
3921 return want_value
? to_rtx
: NULL_RTX
;
3924 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3925 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3928 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3930 /* Don't move directly into a return register. */
3931 if (TREE_CODE (to
) == RESULT_DECL
3932 && (REG_P (to_rtx
) || GET_CODE (to_rtx
) == PARALLEL
))
3937 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
3939 if (GET_CODE (to_rtx
) == PARALLEL
)
3940 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
3941 int_size_in_bytes (TREE_TYPE (from
)));
3943 emit_move_insn (to_rtx
, temp
);
3945 preserve_temp_slots (to_rtx
);
3948 return want_value
? to_rtx
: NULL_RTX
;
3951 /* In case we are returning the contents of an object which overlaps
3952 the place the value is being stored, use a safe function when copying
3953 a value through a pointer into a structure value return block. */
3954 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
3955 && current_function_returns_struct
3956 && !current_function_returns_pcc_struct
)
3961 size
= expr_size (from
);
3962 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3964 if (TARGET_MEM_FUNCTIONS
)
3965 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
3966 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3967 XEXP (from_rtx
, 0), Pmode
,
3968 convert_to_mode (TYPE_MODE (sizetype
),
3969 size
, TYPE_UNSIGNED (sizetype
)),
3970 TYPE_MODE (sizetype
));
3972 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
3973 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
3974 XEXP (to_rtx
, 0), Pmode
,
3975 convert_to_mode (TYPE_MODE (integer_type_node
),
3977 TYPE_UNSIGNED (integer_type_node
)),
3978 TYPE_MODE (integer_type_node
));
3980 preserve_temp_slots (to_rtx
);
3983 return want_value
? to_rtx
: NULL_RTX
;
3986 /* Compute FROM and store the value in the rtx we got. */
3989 result
= store_expr (from
, to_rtx
, want_value
);
3990 preserve_temp_slots (result
);
3993 return want_value
? result
: NULL_RTX
;
3996 /* Generate code for computing expression EXP,
3997 and storing the value into TARGET.
3998 TARGET may contain a QUEUED rtx.
4000 If WANT_VALUE & 1 is nonzero, return a copy of the value
4001 not in TARGET, so that we can be sure to use the proper
4002 value in a containing expression even if TARGET has something
4003 else stored in it. If possible, we copy the value through a pseudo
4004 and return that pseudo. Or, if the value is constant, we try to
4005 return the constant. In some cases, we return a pseudo
4006 copied *from* TARGET.
4008 If the mode is BLKmode then we may return TARGET itself.
4009 It turns out that in BLKmode it doesn't cause a problem.
4010 because C has no operators that could combine two different
4011 assignments into the same BLKmode object with different values
4012 with no sequence point. Will other languages need this to
4015 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4016 to catch quickly any cases where the caller uses the value
4017 and fails to set WANT_VALUE.
4019 If WANT_VALUE & 2 is set, this is a store into a call param on the
4020 stack, and block moves may need to be treated specially. */
4023 store_expr (tree exp
, rtx target
, int want_value
)
4026 rtx alt_rtl
= NULL_RTX
;
4027 rtx mark
= mark_queue ();
4028 int dont_return_target
= 0;
4029 int dont_store_target
= 0;
4031 if (VOID_TYPE_P (TREE_TYPE (exp
)))
4033 /* C++ can generate ?: expressions with a throw expression in one
4034 branch and an rvalue in the other. Here, we resolve attempts to
4035 store the throw expression's nonexistent result. */
4038 expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
4041 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4043 /* Perform first part of compound expression, then assign from second
4045 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
4046 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4048 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
4050 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4052 /* For conditional expression, get safe form of the target. Then
4053 test the condition, doing the appropriate assignment on either
4054 side. This avoids the creation of unnecessary temporaries.
4055 For non-BLKmode, it is more efficient not to do this. */
4057 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4060 target
= protect_from_queue (target
, 1);
4062 do_pending_stack_adjust ();
4064 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
4065 start_cleanup_deferral ();
4066 store_expr (TREE_OPERAND (exp
, 1), target
, want_value
& 2);
4067 end_cleanup_deferral ();
4069 emit_jump_insn (gen_jump (lab2
));
4072 start_cleanup_deferral ();
4073 store_expr (TREE_OPERAND (exp
, 2), target
, want_value
& 2);
4074 end_cleanup_deferral ();
4079 return want_value
& 1 ? target
: NULL_RTX
;
4081 else if (queued_subexp_p (target
))
4082 /* If target contains a postincrement, let's not risk
4083 using it as the place to generate the rhs. */
4085 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
4087 /* Expand EXP into a new pseudo. */
4088 temp
= gen_reg_rtx (GET_MODE (target
));
4089 temp
= expand_expr (exp
, temp
, GET_MODE (target
),
4091 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4094 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
),
4096 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4098 /* If target is volatile, ANSI requires accessing the value
4099 *from* the target, if it is accessed. So make that happen.
4100 In no case return the target itself. */
4101 if (! MEM_VOLATILE_P (target
) && (want_value
& 1) != 0)
4102 dont_return_target
= 1;
4104 else if ((want_value
& 1) != 0
4105 && GET_CODE (target
) == MEM
4106 && ! MEM_VOLATILE_P (target
)
4107 && GET_MODE (target
) != BLKmode
)
4108 /* If target is in memory and caller wants value in a register instead,
4109 arrange that. Pass TARGET as target for expand_expr so that,
4110 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4111 We know expand_expr will not use the target in that case.
4112 Don't do this if TARGET is volatile because we are supposed
4113 to write it and then read it. */
4115 temp
= expand_expr (exp
, target
, GET_MODE (target
),
4116 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4117 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
4119 /* If TEMP is already in the desired TARGET, only copy it from
4120 memory and don't store it there again. */
4122 || (rtx_equal_p (temp
, target
)
4123 && ! side_effects_p (temp
) && ! side_effects_p (target
)))
4124 dont_store_target
= 1;
4125 temp
= copy_to_reg (temp
);
4127 dont_return_target
= 1;
4129 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4130 /* If this is a scalar in a register that is stored in a wider mode
4131 than the declared mode, compute the result into its declared mode
4132 and then convert to the wider mode. Our value is the computed
4135 rtx inner_target
= 0;
4137 /* If we don't want a value, we can do the conversion inside EXP,
4138 which will often result in some optimizations. Do the conversion
4139 in two steps: first change the signedness, if needed, then
4140 the extend. But don't do this if the type of EXP is a subtype
4141 of something else since then the conversion might involve
4142 more than just converting modes. */
4143 if ((want_value
& 1) == 0
4144 && INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4145 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
4147 if (TYPE_UNSIGNED (TREE_TYPE (exp
))
4148 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4150 (lang_hooks
.types
.signed_or_unsigned_type
4151 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)), exp
);
4153 exp
= convert (lang_hooks
.types
.type_for_mode
4154 (GET_MODE (SUBREG_REG (target
)),
4155 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4158 inner_target
= SUBREG_REG (target
);
4161 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
4162 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4164 /* If TEMP is a MEM and we want a result value, make the access
4165 now so it gets done only once. Strictly speaking, this is
4166 only necessary if the MEM is volatile, or if the address
4167 overlaps TARGET. But not performing the load twice also
4168 reduces the amount of rtl we generate and then have to CSE. */
4169 if (GET_CODE (temp
) == MEM
&& (want_value
& 1) != 0)
4170 temp
= copy_to_reg (temp
);
4172 /* If TEMP is a VOIDmode constant, use convert_modes to make
4173 sure that we properly convert it. */
4174 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4176 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4177 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4178 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4179 GET_MODE (target
), temp
,
4180 SUBREG_PROMOTED_UNSIGNED_P (target
));
4183 convert_move (SUBREG_REG (target
), temp
,
4184 SUBREG_PROMOTED_UNSIGNED_P (target
));
4186 /* If we promoted a constant, change the mode back down to match
4187 target. Otherwise, the caller might get confused by a result whose
4188 mode is larger than expected. */
4190 if ((want_value
& 1) != 0 && GET_MODE (temp
) != GET_MODE (target
))
4192 if (GET_MODE (temp
) != VOIDmode
)
4194 temp
= gen_lowpart_SUBREG (GET_MODE (target
), temp
);
4195 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4196 SUBREG_PROMOTED_UNSIGNED_SET (temp
,
4197 SUBREG_PROMOTED_UNSIGNED_P (target
));
4200 temp
= convert_modes (GET_MODE (target
),
4201 GET_MODE (SUBREG_REG (target
)),
4202 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4205 return want_value
& 1 ? temp
: NULL_RTX
;
4209 temp
= expand_expr_real (exp
, target
, GET_MODE (target
),
4211 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
4213 /* Return TARGET if it's a specified hardware register.
4214 If TARGET is a volatile mem ref, either return TARGET
4215 or return a reg copied *from* TARGET; ANSI requires this.
4217 Otherwise, if TEMP is not TARGET, return TEMP
4218 if it is constant (for efficiency),
4219 or if we really want the correct value. */
4220 if (!(target
&& REG_P (target
)
4221 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4222 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
4223 && ! rtx_equal_p (temp
, target
)
4224 && (CONSTANT_P (temp
) || (want_value
& 1) != 0))
4225 dont_return_target
= 1;
4228 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4229 the same as that of TARGET, adjust the constant. This is needed, for
4230 example, in case it is a CONST_DOUBLE and we want only a word-sized
4232 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4233 && TREE_CODE (exp
) != ERROR_MARK
4234 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4235 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4236 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
4238 /* If value was not generated in the target, store it there.
4239 Convert the value to TARGET's type first if necessary and emit the
4240 pending incrementations that have been queued when expanding EXP.
4241 Note that we cannot emit the whole queue blindly because this will
4242 effectively disable the POST_INC optimization later.
4244 If TEMP and TARGET compare equal according to rtx_equal_p, but
4245 one or both of them are volatile memory refs, we have to distinguish
4247 - expand_expr has used TARGET. In this case, we must not generate
4248 another copy. This can be detected by TARGET being equal according
4250 - expand_expr has not used TARGET - that means that the source just
4251 happens to have the same RTX form. Since temp will have been created
4252 by expand_expr, it will compare unequal according to == .
4253 We must generate a copy in this case, to reach the correct number
4254 of volatile memory references. */
4256 if ((! rtx_equal_p (temp
, target
)
4257 || (temp
!= target
&& (side_effects_p (temp
)
4258 || side_effects_p (target
))))
4259 && TREE_CODE (exp
) != ERROR_MARK
4260 && ! dont_store_target
4261 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4262 but TARGET is not valid memory reference, TEMP will differ
4263 from TARGET although it is really the same location. */
4264 && !(alt_rtl
&& rtx_equal_p (alt_rtl
, target
))
4265 /* If there's nothing to copy, don't bother. Don't call expr_size
4266 unless necessary, because some front-ends (C++) expr_size-hook
4267 aborts on objects that are not supposed to be bit-copied or
4269 && expr_size (exp
) != const0_rtx
)
4271 emit_insns_enqueued_after_mark (mark
);
4272 target
= protect_from_queue (target
, 1);
4273 temp
= protect_from_queue (temp
, 0);
4274 if (GET_MODE (temp
) != GET_MODE (target
)
4275 && GET_MODE (temp
) != VOIDmode
)
4277 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
4278 if (dont_return_target
)
4280 /* In this case, we will return TEMP,
4281 so make sure it has the proper mode.
4282 But don't forget to store the value into TARGET. */
4283 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4284 emit_move_insn (target
, temp
);
4287 convert_move (target
, temp
, unsignedp
);
4290 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4292 /* Handle copying a string constant into an array. The string
4293 constant may be shorter than the array. So copy just the string's
4294 actual length, and clear the rest. First get the size of the data
4295 type of the string, which is actually the size of the target. */
4296 rtx size
= expr_size (exp
);
4298 if (GET_CODE (size
) == CONST_INT
4299 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4300 emit_block_move (target
, temp
, size
,
4302 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4305 /* Compute the size of the data to copy from the string. */
4307 = size_binop (MIN_EXPR
,
4308 make_tree (sizetype
, size
),
4309 size_int (TREE_STRING_LENGTH (exp
)));
4311 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
4313 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4316 /* Copy that much. */
4317 copy_size_rtx
= convert_to_mode (ptr_mode
, copy_size_rtx
,
4318 TYPE_UNSIGNED (sizetype
));
4319 emit_block_move (target
, temp
, copy_size_rtx
,
4321 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4323 /* Figure out how much is left in TARGET that we have to clear.
4324 Do all calculations in ptr_mode. */
4325 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4327 size
= plus_constant (size
, -INTVAL (copy_size_rtx
));
4328 target
= adjust_address (target
, BLKmode
,
4329 INTVAL (copy_size_rtx
));
4333 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
4334 copy_size_rtx
, NULL_RTX
, 0,
4337 #ifdef POINTERS_EXTEND_UNSIGNED
4338 if (GET_MODE (copy_size_rtx
) != Pmode
)
4339 copy_size_rtx
= convert_to_mode (Pmode
, copy_size_rtx
,
4340 TYPE_UNSIGNED (sizetype
));
4343 target
= offset_address (target
, copy_size_rtx
,
4344 highest_pow2_factor (copy_size
));
4345 label
= gen_label_rtx ();
4346 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4347 GET_MODE (size
), 0, label
);
4350 if (size
!= const0_rtx
)
4351 clear_storage (target
, size
);
4357 /* Handle calls that return values in multiple non-contiguous locations.
4358 The Irix 6 ABI has examples of this. */
4359 else if (GET_CODE (target
) == PARALLEL
)
4360 emit_group_load (target
, temp
, TREE_TYPE (exp
),
4361 int_size_in_bytes (TREE_TYPE (exp
)));
4362 else if (GET_MODE (temp
) == BLKmode
)
4363 emit_block_move (target
, temp
, expr_size (exp
),
4365 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4368 temp
= force_operand (temp
, target
);
4370 emit_move_insn (target
, temp
);
4374 /* If we don't want a value, return NULL_RTX. */
4375 if ((want_value
& 1) == 0)
4378 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4379 ??? The latter test doesn't seem to make sense. */
4380 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
4383 /* Return TARGET itself if it is a hard register. */
4384 else if ((want_value
& 1) != 0
4385 && GET_MODE (target
) != BLKmode
4386 && ! (REG_P (target
)
4387 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4388 return copy_to_reg (target
);
4394 /* Examine CTOR. Discover how many scalar fields are set to nonzero
4395 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4396 are set to non-constant values and place it in *P_NC_ELTS. */
4399 categorize_ctor_elements_1 (tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
4400 HOST_WIDE_INT
*p_nc_elts
)
4402 HOST_WIDE_INT nz_elts
, nc_elts
;
4408 for (list
= CONSTRUCTOR_ELTS (ctor
); list
; list
= TREE_CHAIN (list
))
4410 tree value
= TREE_VALUE (list
);
4411 tree purpose
= TREE_PURPOSE (list
);
4415 if (TREE_CODE (purpose
) == RANGE_EXPR
)
4417 tree lo_index
= TREE_OPERAND (purpose
, 0);
4418 tree hi_index
= TREE_OPERAND (purpose
, 1);
4420 if (host_integerp (lo_index
, 1) && host_integerp (hi_index
, 1))
4421 mult
= (tree_low_cst (hi_index
, 1)
4422 - tree_low_cst (lo_index
, 1) + 1);
4425 switch (TREE_CODE (value
))
4429 HOST_WIDE_INT nz
= 0, nc
= 0;
4430 categorize_ctor_elements_1 (value
, &nz
, &nc
);
4431 nz_elts
+= mult
* nz
;
4432 nc_elts
+= mult
* nc
;
4438 if (!initializer_zerop (value
))
4442 if (!initializer_zerop (TREE_REALPART (value
)))
4444 if (!initializer_zerop (TREE_IMAGPART (value
)))
4450 for (v
= TREE_VECTOR_CST_ELTS (value
); v
; v
= TREE_CHAIN (v
))
4451 if (!initializer_zerop (TREE_VALUE (v
)))
4458 if (!initializer_constant_valid_p (value
, TREE_TYPE (value
)))
4464 *p_nz_elts
+= nz_elts
;
4465 *p_nc_elts
+= nc_elts
;
4469 categorize_ctor_elements (tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
4470 HOST_WIDE_INT
*p_nc_elts
)
4474 categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_nc_elts
);
4477 /* Count the number of scalars in TYPE. Return -1 on overflow or
4481 count_type_elements (tree type
)
4483 const HOST_WIDE_INT max
= ~((HOST_WIDE_INT
)1 << (HOST_BITS_PER_WIDE_INT
-1));
4484 switch (TREE_CODE (type
))
4488 tree telts
= array_type_nelts (type
);
4489 if (telts
&& host_integerp (telts
, 1))
4491 HOST_WIDE_INT n
= tree_low_cst (telts
, 1);
4492 HOST_WIDE_INT m
= count_type_elements (TREE_TYPE (type
));
4503 HOST_WIDE_INT n
= 0, t
;
4506 for (f
= TYPE_FIELDS (type
); f
; f
= TREE_CHAIN (f
))
4507 if (TREE_CODE (f
) == FIELD_DECL
)
4509 t
= count_type_elements (TREE_TYPE (f
));
4519 case QUAL_UNION_TYPE
:
4521 /* Ho hum. How in the world do we guess here? Clearly it isn't
4522 right to count the fields. Guess based on the number of words. */
4523 HOST_WIDE_INT n
= int_size_in_bytes (type
);
4526 return n
/ UNITS_PER_WORD
;
4533 /* ??? This is broke. We should encode the vector width in the tree. */
4534 return GET_MODE_NUNITS (TYPE_MODE (type
));
4543 case REFERENCE_TYPE
:
4557 /* Return 1 if EXP contains mostly (3/4) zeros. */
4560 mostly_zeros_p (tree exp
)
4562 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4565 HOST_WIDE_INT nz_elts
, nc_elts
, elts
;
4567 /* If there are no ranges of true bits, it is all zero. */
4568 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4569 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
4571 categorize_ctor_elements (exp
, &nz_elts
, &nc_elts
);
4572 elts
= count_type_elements (TREE_TYPE (exp
));
4574 return nz_elts
< elts
/ 4;
4577 return initializer_zerop (exp
);
4580 /* Helper function for store_constructor.
4581 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4582 TYPE is the type of the CONSTRUCTOR, not the element type.
4583 CLEARED is as for store_constructor.
4584 ALIAS_SET is the alias set to use for any stores.
4586 This provides a recursive shortcut back to store_constructor when it isn't
4587 necessary to go through store_field. This is so that we can pass through
4588 the cleared field to let store_constructor know that we may not have to
4589 clear a substructure if the outer structure has already been cleared. */
4592 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
4593 HOST_WIDE_INT bitpos
, enum machine_mode mode
,
4594 tree exp
, tree type
, int cleared
, int alias_set
)
4596 if (TREE_CODE (exp
) == CONSTRUCTOR
4597 /* We can only call store_constructor recursively if the size and
4598 bit position are on a byte boundary. */
4599 && bitpos
% BITS_PER_UNIT
== 0
4600 && (bitsize
> 0 && bitsize
% BITS_PER_UNIT
== 0)
4601 /* If we have a nonzero bitpos for a register target, then we just
4602 let store_field do the bitfield handling. This is unlikely to
4603 generate unnecessary clear instructions anyways. */
4604 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
4606 if (GET_CODE (target
) == MEM
)
4608 = adjust_address (target
,
4609 GET_MODE (target
) == BLKmode
4611 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4612 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
4615 /* Update the alias set, if required. */
4616 if (GET_CODE (target
) == MEM
&& ! MEM_KEEP_ALIAS_SET_P (target
)
4617 && MEM_ALIAS_SET (target
) != 0)
4619 target
= copy_rtx (target
);
4620 set_mem_alias_set (target
, alias_set
);
4623 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
4626 store_field (target
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
4630 /* Store the value of constructor EXP into the rtx TARGET.
4631 TARGET is either a REG or a MEM; we know it cannot conflict, since
4632 safe_from_p has been called.
4633 CLEARED is true if TARGET is known to have been zero'd.
4634 SIZE is the number of bytes of TARGET we are allowed to modify: this
4635 may not be the same as the size of EXP if we are assigning to a field
4636 which has been packed to exclude padding bits. */
4639 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
)
4641 tree type
= TREE_TYPE (exp
);
4642 #ifdef WORD_REGISTER_OPERATIONS
4643 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4646 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
4647 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4651 /* If size is zero or the target is already cleared, do nothing. */
4652 if (size
== 0 || cleared
)
4654 /* We either clear the aggregate or indicate the value is dead. */
4655 else if ((TREE_CODE (type
) == UNION_TYPE
4656 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4657 && ! CONSTRUCTOR_ELTS (exp
))
4658 /* If the constructor is empty, clear the union. */
4660 clear_storage (target
, expr_size (exp
));
4664 /* If we are building a static constructor into a register,
4665 set the initial value as zero so we can fold the value into
4666 a constant. But if more than one register is involved,
4667 this probably loses. */
4668 else if (REG_P (target
) && TREE_STATIC (exp
)
4669 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4671 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4675 /* If the constructor has fewer fields than the structure
4676 or if we are initializing the structure to mostly zeros,
4677 clear the whole structure first. Don't do this if TARGET is a
4678 register whose mode size isn't equal to SIZE since clear_storage
4679 can't handle this case. */
4681 && ((list_length (CONSTRUCTOR_ELTS (exp
)) != fields_length (type
))
4682 || mostly_zeros_p (exp
))
4684 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
4687 rtx xtarget
= target
;
4689 if (readonly_fields_p (type
))
4691 xtarget
= copy_rtx (xtarget
);
4692 RTX_UNCHANGING_P (xtarget
) = 1;
4695 clear_storage (xtarget
, GEN_INT (size
));
4700 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4702 /* Store each element of the constructor into
4703 the corresponding field of TARGET. */
4705 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4707 tree field
= TREE_PURPOSE (elt
);
4708 tree value
= TREE_VALUE (elt
);
4709 enum machine_mode mode
;
4710 HOST_WIDE_INT bitsize
;
4711 HOST_WIDE_INT bitpos
= 0;
4713 rtx to_rtx
= target
;
4715 /* Just ignore missing fields.
4716 We cleared the whole structure, above,
4717 if any fields are missing. */
4721 if (cleared
&& initializer_zerop (value
))
4724 if (host_integerp (DECL_SIZE (field
), 1))
4725 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4729 mode
= DECL_MODE (field
);
4730 if (DECL_BIT_FIELD (field
))
4733 offset
= DECL_FIELD_OFFSET (field
);
4734 if (host_integerp (offset
, 0)
4735 && host_integerp (bit_position (field
), 0))
4737 bitpos
= int_bit_position (field
);
4741 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
4748 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset
,
4749 make_tree (TREE_TYPE (exp
),
4752 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4753 if (GET_CODE (to_rtx
) != MEM
)
4756 #ifdef POINTERS_EXTEND_UNSIGNED
4757 if (GET_MODE (offset_rtx
) != Pmode
)
4758 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
4760 if (GET_MODE (offset_rtx
) != ptr_mode
)
4761 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4764 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4765 highest_pow2_factor (offset
));
4768 if (TREE_READONLY (field
))
4770 if (GET_CODE (to_rtx
) == MEM
)
4771 to_rtx
= copy_rtx (to_rtx
);
4773 RTX_UNCHANGING_P (to_rtx
) = 1;
4776 #ifdef WORD_REGISTER_OPERATIONS
4777 /* If this initializes a field that is smaller than a word, at the
4778 start of a word, try to widen it to a full word.
4779 This special case allows us to output C++ member function
4780 initializations in a form that the optimizers can understand. */
4782 && bitsize
< BITS_PER_WORD
4783 && bitpos
% BITS_PER_WORD
== 0
4784 && GET_MODE_CLASS (mode
) == MODE_INT
4785 && TREE_CODE (value
) == INTEGER_CST
4787 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
4789 tree type
= TREE_TYPE (value
);
4791 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4793 type
= lang_hooks
.types
.type_for_size
4794 (BITS_PER_WORD
, TYPE_UNSIGNED (type
));
4795 value
= convert (type
, value
);
4798 if (BYTES_BIG_ENDIAN
)
4800 = fold (build (LSHIFT_EXPR
, type
, value
,
4801 build_int_2 (BITS_PER_WORD
- bitsize
, 0)));
4802 bitsize
= BITS_PER_WORD
;
4807 if (GET_CODE (to_rtx
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (to_rtx
)
4808 && DECL_NONADDRESSABLE_P (field
))
4810 to_rtx
= copy_rtx (to_rtx
);
4811 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4814 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
4815 value
, type
, cleared
,
4816 get_alias_set (TREE_TYPE (field
)));
4819 else if (TREE_CODE (type
) == ARRAY_TYPE
4820 || TREE_CODE (type
) == VECTOR_TYPE
)
4826 tree elttype
= TREE_TYPE (type
);
4828 HOST_WIDE_INT minelt
= 0;
4829 HOST_WIDE_INT maxelt
= 0;
4833 unsigned n_elts
= 0;
4835 if (TREE_CODE (type
) == ARRAY_TYPE
)
4836 domain
= TYPE_DOMAIN (type
);
4838 /* Vectors do not have domains; look up the domain of
4839 the array embedded in the debug representation type.
4840 FIXME Would probably be more efficient to treat vectors
4841 separately from arrays. */
4843 domain
= TYPE_DEBUG_REPRESENTATION_TYPE (type
);
4844 domain
= TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain
)));
4845 if (REG_P (target
) && VECTOR_MODE_P (GET_MODE (target
)))
4847 enum machine_mode mode
= GET_MODE (target
);
4849 icode
= (int) vec_init_optab
->handlers
[mode
].insn_code
;
4850 if (icode
!= CODE_FOR_nothing
)
4854 elt_size
= GET_MODE_SIZE (GET_MODE_INNER (mode
));
4855 n_elts
= (GET_MODE_SIZE (mode
) / elt_size
);
4856 vector
= alloca (n_elts
);
4857 for (i
= 0; i
< n_elts
; i
++)
4858 vector
[i
] = CONST0_RTX (GET_MODE_INNER (mode
));
4863 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
4864 && TYPE_MAX_VALUE (domain
)
4865 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
4866 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
4868 /* If we have constant bounds for the range of the type, get them. */
4871 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
4872 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
4875 /* If the constructor has fewer elements than the array,
4876 clear the whole array first. Similarly if this is
4877 static constructor of a non-BLKmode object. */
4878 if (cleared
|| (REG_P (target
) && TREE_STATIC (exp
)))
4882 HOST_WIDE_INT count
= 0, zero_count
= 0;
4883 need_to_clear
= ! const_bounds_p
;
4885 /* This loop is a more accurate version of the loop in
4886 mostly_zeros_p (it handles RANGE_EXPR in an index).
4887 It is also needed to check for missing elements. */
4888 for (elt
= CONSTRUCTOR_ELTS (exp
);
4889 elt
!= NULL_TREE
&& ! need_to_clear
;
4890 elt
= TREE_CHAIN (elt
))
4892 tree index
= TREE_PURPOSE (elt
);
4893 HOST_WIDE_INT this_node_count
;
4895 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4897 tree lo_index
= TREE_OPERAND (index
, 0);
4898 tree hi_index
= TREE_OPERAND (index
, 1);
4900 if (! host_integerp (lo_index
, 1)
4901 || ! host_integerp (hi_index
, 1))
4907 this_node_count
= (tree_low_cst (hi_index
, 1)
4908 - tree_low_cst (lo_index
, 1) + 1);
4911 this_node_count
= 1;
4913 count
+= this_node_count
;
4914 if (mostly_zeros_p (TREE_VALUE (elt
)))
4915 zero_count
+= this_node_count
;
4918 /* Clear the entire array first if there are any missing elements,
4919 or if the incidence of zero elements is >= 75%. */
4921 && (count
< maxelt
- minelt
+ 1 || 4 * zero_count
>= 3 * count
))
4925 if (need_to_clear
&& size
> 0 && !vector
)
4930 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4932 clear_storage (target
, GEN_INT (size
));
4936 else if (REG_P (target
))
4937 /* Inform later passes that the old value is dead. */
4938 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4940 /* Store each element of the constructor into
4941 the corresponding element of TARGET, determined
4942 by counting the elements. */
4943 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
4945 elt
= TREE_CHAIN (elt
), i
++)
4947 enum machine_mode mode
;
4948 HOST_WIDE_INT bitsize
;
4949 HOST_WIDE_INT bitpos
;
4951 tree value
= TREE_VALUE (elt
);
4952 tree index
= TREE_PURPOSE (elt
);
4953 rtx xtarget
= target
;
4955 if (cleared
&& initializer_zerop (value
))
4958 unsignedp
= TYPE_UNSIGNED (elttype
);
4959 mode
= TYPE_MODE (elttype
);
4960 if (mode
== BLKmode
)
4961 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
4962 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
4965 bitsize
= GET_MODE_BITSIZE (mode
);
4967 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4969 tree lo_index
= TREE_OPERAND (index
, 0);
4970 tree hi_index
= TREE_OPERAND (index
, 1);
4971 rtx index_r
, pos_rtx
;
4972 HOST_WIDE_INT lo
, hi
, count
;
4978 /* If the range is constant and "small", unroll the loop. */
4980 && host_integerp (lo_index
, 0)
4981 && host_integerp (hi_index
, 0)
4982 && (lo
= tree_low_cst (lo_index
, 0),
4983 hi
= tree_low_cst (hi_index
, 0),
4984 count
= hi
- lo
+ 1,
4985 (GET_CODE (target
) != MEM
4987 || (host_integerp (TYPE_SIZE (elttype
), 1)
4988 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
4991 lo
-= minelt
; hi
-= minelt
;
4992 for (; lo
<= hi
; lo
++)
4994 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
4996 if (GET_CODE (target
) == MEM
4997 && !MEM_KEEP_ALIAS_SET_P (target
)
4998 && TREE_CODE (type
) == ARRAY_TYPE
4999 && TYPE_NONALIASED_COMPONENT (type
))
5001 target
= copy_rtx (target
);
5002 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5005 store_constructor_field
5006 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
5007 get_alias_set (elttype
));
5012 rtx loop_start
= gen_label_rtx ();
5013 rtx loop_end
= gen_label_rtx ();
5016 expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
5017 unsignedp
= TYPE_UNSIGNED (domain
);
5019 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
5022 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
5024 SET_DECL_RTL (index
, index_r
);
5025 if (TREE_CODE (value
) == SAVE_EXPR
5026 && SAVE_EXPR_RTL (value
) == 0)
5028 /* Make sure value gets expanded once before the
5030 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
5033 store_expr (lo_index
, index_r
, 0);
5035 /* Build the head of the loop. */
5036 do_pending_stack_adjust ();
5038 emit_label (loop_start
);
5040 /* Assign value to element index. */
5042 = convert (ssizetype
,
5043 fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5044 index
, TYPE_MIN_VALUE (domain
))));
5045 position
= size_binop (MULT_EXPR
, position
,
5047 TYPE_SIZE_UNIT (elttype
)));
5049 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
5050 xtarget
= offset_address (target
, pos_rtx
,
5051 highest_pow2_factor (position
));
5052 xtarget
= adjust_address (xtarget
, mode
, 0);
5053 if (TREE_CODE (value
) == CONSTRUCTOR
)
5054 store_constructor (value
, xtarget
, cleared
,
5055 bitsize
/ BITS_PER_UNIT
);
5057 store_expr (value
, xtarget
, 0);
5059 /* Generate a conditional jump to exit the loop. */
5060 exit_cond
= build (LT_EXPR
, integer_type_node
,
5062 jumpif (exit_cond
, loop_end
);
5064 /* Update the loop counter, and jump to the head of
5066 expand_increment (build (PREINCREMENT_EXPR
,
5068 index
, integer_one_node
), 0, 0);
5069 emit_jump (loop_start
);
5071 /* Build the end of the loop. */
5072 emit_label (loop_end
);
5075 else if ((index
!= 0 && ! host_integerp (index
, 0))
5076 || ! host_integerp (TYPE_SIZE (elttype
), 1))
5084 index
= ssize_int (1);
5087 index
= convert (ssizetype
,
5088 fold (build (MINUS_EXPR
, index
,
5089 TYPE_MIN_VALUE (domain
))));
5091 position
= size_binop (MULT_EXPR
, index
,
5093 TYPE_SIZE_UNIT (elttype
)));
5094 xtarget
= offset_address (target
,
5095 expand_expr (position
, 0, VOIDmode
, 0),
5096 highest_pow2_factor (position
));
5097 xtarget
= adjust_address (xtarget
, mode
, 0);
5098 store_expr (value
, xtarget
, 0);
5105 pos
= tree_low_cst (index
, 0) - minelt
;
5108 vector
[pos
] = expand_expr (value
, NULL_RTX
, VOIDmode
, 0);
5113 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
5114 * tree_low_cst (TYPE_SIZE (elttype
), 1));
5116 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
5118 if (GET_CODE (target
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (target
)
5119 && TREE_CODE (type
) == ARRAY_TYPE
5120 && TYPE_NONALIASED_COMPONENT (type
))
5122 target
= copy_rtx (target
);
5123 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5125 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
5126 type
, cleared
, get_alias_set (elttype
));
5131 emit_insn (GEN_FCN (icode
) (target
,
5132 gen_rtx_PARALLEL (GET_MODE (target
),
5133 gen_rtvec_v (n_elts
, vector
))));
5137 /* Set constructor assignments. */
5138 else if (TREE_CODE (type
) == SET_TYPE
)
5140 tree elt
= CONSTRUCTOR_ELTS (exp
);
5141 unsigned HOST_WIDE_INT nbytes
= int_size_in_bytes (type
), nbits
;
5142 tree domain
= TYPE_DOMAIN (type
);
5143 tree domain_min
, domain_max
, bitlength
;
5145 /* The default implementation strategy is to extract the constant
5146 parts of the constructor, use that to initialize the target,
5147 and then "or" in whatever non-constant ranges we need in addition.
5149 If a large set is all zero or all ones, it is
5150 probably better to set it using memset (if available) or bzero.
5151 Also, if a large set has just a single range, it may also be
5152 better to first clear all the first clear the set (using
5153 bzero/memset), and set the bits we want. */
5155 /* Check for all zeros. */
5156 if (elt
== NULL_TREE
&& size
> 0)
5159 clear_storage (target
, GEN_INT (size
));
5163 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
5164 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
5165 bitlength
= size_binop (PLUS_EXPR
,
5166 size_diffop (domain_max
, domain_min
),
5169 nbits
= tree_low_cst (bitlength
, 1);
5171 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5172 are "complicated" (more than one range), initialize (the
5173 constant parts) by copying from a constant. */
5174 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
5175 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
5177 unsigned int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
5178 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
5179 char *bit_buffer
= alloca (nbits
);
5180 HOST_WIDE_INT word
= 0;
5181 unsigned int bit_pos
= 0;
5182 unsigned int ibit
= 0;
5183 unsigned int offset
= 0; /* In bytes from beginning of set. */
5185 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
5188 if (bit_buffer
[ibit
])
5190 if (BYTES_BIG_ENDIAN
)
5191 word
|= (1 << (set_word_size
- 1 - bit_pos
));
5193 word
|= 1 << bit_pos
;
5197 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
5199 if (word
!= 0 || ! cleared
)
5201 rtx datum
= gen_int_mode (word
, mode
);
5204 /* The assumption here is that it is safe to use
5205 XEXP if the set is multi-word, but not if
5206 it's single-word. */
5207 if (GET_CODE (target
) == MEM
)
5208 to_rtx
= adjust_address (target
, mode
, offset
);
5209 else if (offset
== 0)
5213 emit_move_insn (to_rtx
, datum
);
5220 offset
+= set_word_size
/ BITS_PER_UNIT
;
5225 /* Don't bother clearing storage if the set is all ones. */
5226 if (TREE_CHAIN (elt
) != NULL_TREE
5227 || (TREE_PURPOSE (elt
) == NULL_TREE
5229 : ( ! host_integerp (TREE_VALUE (elt
), 0)
5230 || ! host_integerp (TREE_PURPOSE (elt
), 0)
5231 || (tree_low_cst (TREE_VALUE (elt
), 0)
5232 - tree_low_cst (TREE_PURPOSE (elt
), 0) + 1
5233 != (HOST_WIDE_INT
) nbits
))))
5234 clear_storage (target
, expr_size (exp
));
5236 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
5238 /* Start of range of element or NULL. */
5239 tree startbit
= TREE_PURPOSE (elt
);
5240 /* End of range of element, or element value. */
5241 tree endbit
= TREE_VALUE (elt
);
5242 HOST_WIDE_INT startb
, endb
;
5243 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
5245 bitlength_rtx
= expand_expr (bitlength
,
5246 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
5248 /* Handle non-range tuple element like [ expr ]. */
5249 if (startbit
== NULL_TREE
)
5251 startbit
= save_expr (endbit
);
5255 startbit
= convert (sizetype
, startbit
);
5256 endbit
= convert (sizetype
, endbit
);
5257 if (! integer_zerop (domain_min
))
5259 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
5260 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
5262 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
5263 EXPAND_CONST_ADDRESS
);
5264 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
5265 EXPAND_CONST_ADDRESS
);
5271 ((build_qualified_type (lang_hooks
.types
.type_for_mode
5272 (GET_MODE (target
), 0),
5275 emit_move_insn (targetx
, target
);
5278 else if (GET_CODE (target
) == MEM
)
5283 /* Optimization: If startbit and endbit are constants divisible
5284 by BITS_PER_UNIT, call memset instead. */
5285 if (TARGET_MEM_FUNCTIONS
5286 && TREE_CODE (startbit
) == INTEGER_CST
5287 && TREE_CODE (endbit
) == INTEGER_CST
5288 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
5289 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
5291 emit_library_call (memset_libfunc
, LCT_NORMAL
,
5293 plus_constant (XEXP (targetx
, 0),
5294 startb
/ BITS_PER_UNIT
),
5296 constm1_rtx
, TYPE_MODE (integer_type_node
),
5297 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
5298 TYPE_MODE (sizetype
));
5301 emit_library_call (setbits_libfunc
, LCT_NORMAL
,
5302 VOIDmode
, 4, XEXP (targetx
, 0),
5303 Pmode
, bitlength_rtx
, TYPE_MODE (sizetype
),
5304 startbit_rtx
, TYPE_MODE (sizetype
),
5305 endbit_rtx
, TYPE_MODE (sizetype
));
5308 emit_move_insn (target
, targetx
);
5316 /* Store the value of EXP (an expression tree)
5317 into a subfield of TARGET which has mode MODE and occupies
5318 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5319 If MODE is VOIDmode, it means that we are storing into a bit-field.
5321 If VALUE_MODE is VOIDmode, return nothing in particular.
5322 UNSIGNEDP is not used in this case.
5324 Otherwise, return an rtx for the value stored. This rtx
5325 has mode VALUE_MODE if that is convenient to do.
5326 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5328 TYPE is the type of the underlying object,
5330 ALIAS_SET is the alias set for the destination. This value will
5331 (in general) be different from that for TARGET, since TARGET is a
5332 reference to the containing structure. */
5335 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
5336 enum machine_mode mode
, tree exp
, enum machine_mode value_mode
,
5337 int unsignedp
, tree type
, int alias_set
)
5339 HOST_WIDE_INT width_mask
= 0;
5341 if (TREE_CODE (exp
) == ERROR_MARK
)
5344 /* If we have nothing to store, do nothing unless the expression has
5347 return expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
5348 else if (bitsize
>= 0 && bitsize
< HOST_BITS_PER_WIDE_INT
)
5349 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5351 /* If we are storing into an unaligned field of an aligned union that is
5352 in a register, we may have the mode of TARGET being an integer mode but
5353 MODE == BLKmode. In that case, get an aligned object whose size and
5354 alignment are the same as TARGET and store TARGET into it (we can avoid
5355 the store if the field being stored is the entire width of TARGET). Then
5356 call ourselves recursively to store the field into a BLKmode version of
5357 that object. Finally, load from the object into TARGET. This is not
5358 very efficient in general, but should only be slightly more expensive
5359 than the otherwise-required unaligned accesses. Perhaps this can be
5360 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5361 twice, once with emit_move_insn and once via store_field. */
5364 && (REG_P (target
) || GET_CODE (target
) == SUBREG
))
5366 rtx object
= assign_temp (type
, 0, 1, 1);
5367 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
5369 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5370 emit_move_insn (object
, target
);
5372 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
5375 emit_move_insn (target
, object
);
5377 /* We want to return the BLKmode version of the data. */
5381 if (GET_CODE (target
) == CONCAT
)
5383 /* We're storing into a struct containing a single __complex. */
5387 return store_expr (exp
, target
, value_mode
!= VOIDmode
);
5390 /* If the structure is in a register or if the component
5391 is a bit field, we cannot use addressing to access it.
5392 Use bit-field techniques or SUBREG to store in it. */
5394 if (mode
== VOIDmode
5395 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5396 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5397 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5399 || GET_CODE (target
) == SUBREG
5400 /* If the field isn't aligned enough to store as an ordinary memref,
5401 store it as a bit field. */
5403 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
5404 || bitpos
% GET_MODE_ALIGNMENT (mode
))
5405 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
5406 || (bitpos
% BITS_PER_UNIT
!= 0)))
5407 /* If the RHS and field are a constant size and the size of the
5408 RHS isn't the same size as the bitfield, we must use bitfield
5411 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5412 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5414 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
5416 /* If BITSIZE is narrower than the size of the type of EXP
5417 we will be narrowing TEMP. Normally, what's wanted are the
5418 low-order bits. However, if EXP's type is a record and this is
5419 big-endian machine, we want the upper BITSIZE bits. */
5420 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5421 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
5422 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5423 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5424 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5428 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5430 if (mode
!= VOIDmode
&& mode
!= BLKmode
5431 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5432 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5434 /* If the modes of TARGET and TEMP are both BLKmode, both
5435 must be in memory and BITPOS must be aligned on a byte
5436 boundary. If so, we simply do a block copy. */
5437 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5439 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
5440 || bitpos
% BITS_PER_UNIT
!= 0)
5443 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5444 emit_block_move (target
, temp
,
5445 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5449 return value_mode
== VOIDmode
? const0_rtx
: target
;
5452 /* Store the value in the bitfield. */
5453 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
,
5454 int_size_in_bytes (type
));
5456 if (value_mode
!= VOIDmode
)
5458 /* The caller wants an rtx for the value.
5459 If possible, avoid refetching from the bitfield itself. */
5461 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
5464 enum machine_mode tmode
;
5466 tmode
= GET_MODE (temp
);
5467 if (tmode
== VOIDmode
)
5471 return expand_and (tmode
, temp
,
5472 gen_int_mode (width_mask
, tmode
),
5475 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
5476 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5477 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5480 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
5481 NULL_RTX
, value_mode
, VOIDmode
,
5482 int_size_in_bytes (type
));
5488 rtx addr
= XEXP (target
, 0);
5489 rtx to_rtx
= target
;
5491 /* If a value is wanted, it must be the lhs;
5492 so make the address stable for multiple use. */
5494 if (value_mode
!= VOIDmode
&& !REG_P (addr
)
5495 && ! CONSTANT_ADDRESS_P (addr
)
5496 /* A frame-pointer reference is already stable. */
5497 && ! (GET_CODE (addr
) == PLUS
5498 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
5499 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
5500 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
5501 to_rtx
= replace_equiv_address (to_rtx
, copy_to_reg (addr
));
5503 /* Now build a reference to just the desired component. */
5505 to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5507 if (to_rtx
== target
)
5508 to_rtx
= copy_rtx (to_rtx
);
5510 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5511 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5512 set_mem_alias_set (to_rtx
, alias_set
);
5514 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
5518 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5519 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5520 codes and find the ultimate containing object, which we return.
5522 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5523 bit position, and *PUNSIGNEDP to the signedness of the field.
5524 If the position of the field is variable, we store a tree
5525 giving the variable offset (in units) in *POFFSET.
5526 This offset is in addition to the bit position.
5527 If the position is not variable, we store 0 in *POFFSET.
5529 If any of the extraction expressions is volatile,
5530 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5532 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5533 is a mode that can be used to access the field. In that case, *PBITSIZE
5536 If the field describes a variable-sized object, *PMODE is set to
5537 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5538 this case, but the address of the object can be found. */
5541 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
5542 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
5543 enum machine_mode
*pmode
, int *punsignedp
,
5547 enum machine_mode mode
= VOIDmode
;
5548 tree offset
= size_zero_node
;
5549 tree bit_offset
= bitsize_zero_node
;
5552 /* First get the mode, signedness, and size. We do this from just the
5553 outermost expression. */
5554 if (TREE_CODE (exp
) == COMPONENT_REF
)
5556 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5557 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5558 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5560 *punsignedp
= DECL_UNSIGNED (TREE_OPERAND (exp
, 1));
5562 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5564 size_tree
= TREE_OPERAND (exp
, 1);
5565 *punsignedp
= BIT_FIELD_REF_UNSIGNED (exp
);
5569 mode
= TYPE_MODE (TREE_TYPE (exp
));
5570 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
5572 if (mode
== BLKmode
)
5573 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5575 *pbitsize
= GET_MODE_BITSIZE (mode
);
5580 if (! host_integerp (size_tree
, 1))
5581 mode
= BLKmode
, *pbitsize
= -1;
5583 *pbitsize
= tree_low_cst (size_tree
, 1);
5586 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5587 and find the ultimate containing object. */
5590 if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5591 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
, TREE_OPERAND (exp
, 2));
5592 else if (TREE_CODE (exp
) == COMPONENT_REF
)
5594 tree field
= TREE_OPERAND (exp
, 1);
5595 tree this_offset
= DECL_FIELD_OFFSET (field
);
5597 /* If this field hasn't been filled in yet, don't go
5598 past it. This should only happen when folding expressions
5599 made during type construction. */
5600 if (this_offset
== 0)
5603 this_offset
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (this_offset
, exp
);
5605 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5606 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5607 DECL_FIELD_BIT_OFFSET (field
));
5609 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5612 else if (TREE_CODE (exp
) == ARRAY_REF
5613 || TREE_CODE (exp
) == ARRAY_RANGE_REF
)
5615 tree index
= TREE_OPERAND (exp
, 1);
5616 tree array
= TREE_OPERAND (exp
, 0);
5617 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
5618 tree low_bound
= (domain
? TYPE_MIN_VALUE (domain
) : 0);
5619 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array
)));
5621 /* We assume all arrays have sizes that are a multiple of a byte.
5622 First subtract the lower bound, if any, in the type of the
5623 index, then convert to sizetype and multiply by the size of the
5625 if (low_bound
!= 0 && ! integer_zerop (low_bound
))
5626 index
= fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5629 /* If the index has a self-referential type, instantiate it with
5630 the object; likewise for the component size. */
5631 index
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (index
, exp
);
5632 unit_size
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (unit_size
, array
);
5633 offset
= size_binop (PLUS_EXPR
, offset
,
5634 size_binop (MULT_EXPR
,
5635 convert (sizetype
, index
),
5639 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5640 conversions that don't change the mode, and all view conversions
5641 except those that need to "step up" the alignment. */
5642 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
5643 && ! (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
5644 && ! ((TYPE_ALIGN (TREE_TYPE (exp
))
5645 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5647 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
5648 < BIGGEST_ALIGNMENT
)
5649 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
5650 || TYPE_ALIGN_OK (TREE_TYPE
5651 (TREE_OPERAND (exp
, 0))))))
5652 && ! ((TREE_CODE (exp
) == NOP_EXPR
5653 || TREE_CODE (exp
) == CONVERT_EXPR
)
5654 && (TYPE_MODE (TREE_TYPE (exp
))
5655 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
5658 /* If any reference in the chain is volatile, the effect is volatile. */
5659 if (TREE_THIS_VOLATILE (exp
))
5662 exp
= TREE_OPERAND (exp
, 0);
5665 /* If OFFSET is constant, see if we can return the whole thing as a
5666 constant bit position. Otherwise, split it up. */
5667 if (host_integerp (offset
, 0)
5668 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5670 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5671 && host_integerp (tem
, 0))
5672 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5674 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5680 /* Return 1 if T is an expression that get_inner_reference handles. */
5683 handled_component_p (tree t
)
5685 switch (TREE_CODE (t
))
5690 case ARRAY_RANGE_REF
:
5691 case NON_LVALUE_EXPR
:
5692 case VIEW_CONVERT_EXPR
:
5695 /* ??? Sure they are handled, but get_inner_reference may return
5696 a different PBITSIZE, depending upon whether the expression is
5697 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5700 return (TYPE_MODE (TREE_TYPE (t
))
5701 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t
, 0))));
5708 /* Given an rtx VALUE that may contain additions and multiplications, return
5709 an equivalent value that just refers to a register, memory, or constant.
5710 This is done by generating instructions to perform the arithmetic and
5711 returning a pseudo-register containing the value.
5713 The returned value may be a REG, SUBREG, MEM or constant. */
5716 force_operand (rtx value
, rtx target
)
5719 /* Use subtarget as the target for operand 0 of a binary operation. */
5720 rtx subtarget
= get_subtarget (target
);
5721 enum rtx_code code
= GET_CODE (value
);
5723 /* Check for subreg applied to an expression produced by loop optimizer. */
5725 && !REG_P (SUBREG_REG (value
))
5726 && GET_CODE (SUBREG_REG (value
)) != MEM
)
5728 value
= simplify_gen_subreg (GET_MODE (value
),
5729 force_reg (GET_MODE (SUBREG_REG (value
)),
5730 force_operand (SUBREG_REG (value
),
5732 GET_MODE (SUBREG_REG (value
)),
5733 SUBREG_BYTE (value
));
5734 code
= GET_CODE (value
);
5737 /* Check for a PIC address load. */
5738 if ((code
== PLUS
|| code
== MINUS
)
5739 && XEXP (value
, 0) == pic_offset_table_rtx
5740 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5741 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5742 || GET_CODE (XEXP (value
, 1)) == CONST
))
5745 subtarget
= gen_reg_rtx (GET_MODE (value
));
5746 emit_move_insn (subtarget
, value
);
5750 if (code
== ZERO_EXTEND
|| code
== SIGN_EXTEND
)
5753 target
= gen_reg_rtx (GET_MODE (value
));
5754 convert_move (target
, force_operand (XEXP (value
, 0), NULL
),
5755 code
== ZERO_EXTEND
);
5759 if (ARITHMETIC_P (value
))
5761 op2
= XEXP (value
, 1);
5762 if (!CONSTANT_P (op2
) && !(REG_P (op2
) && op2
!= subtarget
))
5764 if (code
== MINUS
&& GET_CODE (op2
) == CONST_INT
)
5767 op2
= negate_rtx (GET_MODE (value
), op2
);
5770 /* Check for an addition with OP2 a constant integer and our first
5771 operand a PLUS of a virtual register and something else. In that
5772 case, we want to emit the sum of the virtual register and the
5773 constant first and then add the other value. This allows virtual
5774 register instantiation to simply modify the constant rather than
5775 creating another one around this addition. */
5776 if (code
== PLUS
&& GET_CODE (op2
) == CONST_INT
5777 && GET_CODE (XEXP (value
, 0)) == PLUS
5778 && REG_P (XEXP (XEXP (value
, 0), 0))
5779 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5780 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5782 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
5783 XEXP (XEXP (value
, 0), 0), op2
,
5784 subtarget
, 0, OPTAB_LIB_WIDEN
);
5785 return expand_simple_binop (GET_MODE (value
), code
, temp
,
5786 force_operand (XEXP (XEXP (value
,
5788 target
, 0, OPTAB_LIB_WIDEN
);
5791 op1
= force_operand (XEXP (value
, 0), subtarget
);
5792 op2
= force_operand (op2
, NULL_RTX
);
5796 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
5798 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
5799 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5800 target
, 1, OPTAB_LIB_WIDEN
);
5802 return expand_divmod (0,
5803 FLOAT_MODE_P (GET_MODE (value
))
5804 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
5805 GET_MODE (value
), op1
, op2
, target
, 0);
5808 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5812 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
5816 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5820 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5821 target
, 0, OPTAB_LIB_WIDEN
);
5824 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5825 target
, 1, OPTAB_LIB_WIDEN
);
5828 if (UNARY_P (value
))
5830 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
5831 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
5834 #ifdef INSN_SCHEDULING
5835 /* On machines that have insn scheduling, we want all memory reference to be
5836 explicit, so we need to deal with such paradoxical SUBREGs. */
5837 if (GET_CODE (value
) == SUBREG
&& GET_CODE (SUBREG_REG (value
)) == MEM
5838 && (GET_MODE_SIZE (GET_MODE (value
))
5839 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
5841 = simplify_gen_subreg (GET_MODE (value
),
5842 force_reg (GET_MODE (SUBREG_REG (value
)),
5843 force_operand (SUBREG_REG (value
),
5845 GET_MODE (SUBREG_REG (value
)),
5846 SUBREG_BYTE (value
));
5852 /* Subroutine of expand_expr: return nonzero iff there is no way that
5853 EXP can reference X, which is being modified. TOP_P is nonzero if this
5854 call is going to be used to determine whether we need a temporary
5855 for EXP, as opposed to a recursive call to this function.
5857 It is always safe for this routine to return zero since it merely
5858 searches for optimization opportunities. */
5861 safe_from_p (rtx x
, tree exp
, int top_p
)
5865 static tree save_expr_list
;
5868 /* If EXP has varying size, we MUST use a target since we currently
5869 have no way of allocating temporaries of variable size
5870 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5871 So we assume here that something at a higher level has prevented a
5872 clash. This is somewhat bogus, but the best we can do. Only
5873 do this when X is BLKmode and when we are at the top level. */
5874 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
5875 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
5876 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
5877 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
5878 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
5880 && GET_MODE (x
) == BLKmode
)
5881 /* If X is in the outgoing argument area, it is always safe. */
5882 || (GET_CODE (x
) == MEM
5883 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
5884 || (GET_CODE (XEXP (x
, 0)) == PLUS
5885 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
5888 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5889 find the underlying pseudo. */
5890 if (GET_CODE (x
) == SUBREG
)
5893 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5897 /* A SAVE_EXPR might appear many times in the expression passed to the
5898 top-level safe_from_p call, and if it has a complex subexpression,
5899 examining it multiple times could result in a combinatorial explosion.
5900 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
5901 with optimization took about 28 minutes to compile -- even though it was
5902 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5903 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5904 we have processed. Note that the only test of top_p was above. */
5913 rtn
= safe_from_p (x
, exp
, 0);
5915 for (t
= save_expr_list
; t
!= 0; t
= TREE_CHAIN (t
))
5916 TREE_PRIVATE (TREE_PURPOSE (t
)) = 0;
5921 /* Now look at our tree code and possibly recurse. */
5922 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
5925 exp_rtl
= DECL_RTL_IF_SET (exp
);
5932 if (TREE_CODE (exp
) == TREE_LIST
)
5936 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
5938 exp
= TREE_CHAIN (exp
);
5941 if (TREE_CODE (exp
) != TREE_LIST
)
5942 return safe_from_p (x
, exp
, 0);
5945 else if (TREE_CODE (exp
) == ERROR_MARK
)
5946 return 1; /* An already-visited SAVE_EXPR? */
5952 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
5957 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5961 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5962 the expression. If it is set, we conflict iff we are that rtx or
5963 both are in memory. Otherwise, we check all operands of the
5964 expression recursively. */
5966 switch (TREE_CODE (exp
))
5969 /* If the operand is static or we are static, we can't conflict.
5970 Likewise if we don't conflict with the operand at all. */
5971 if (staticp (TREE_OPERAND (exp
, 0))
5972 || TREE_STATIC (exp
)
5973 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
5976 /* Otherwise, the only way this can conflict is if we are taking
5977 the address of a DECL a that address if part of X, which is
5979 exp
= TREE_OPERAND (exp
, 0);
5982 if (!DECL_RTL_SET_P (exp
)
5983 || GET_CODE (DECL_RTL (exp
)) != MEM
)
5986 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
5991 if (GET_CODE (x
) == MEM
5992 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
5993 get_alias_set (exp
)))
5998 /* Assume that the call will clobber all hard registers and
6000 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6001 || GET_CODE (x
) == MEM
)
6006 /* If a sequence exists, we would have to scan every instruction
6007 in the sequence to see if it was safe. This is probably not
6009 if (RTL_EXPR_SEQUENCE (exp
))
6012 exp_rtl
= RTL_EXPR_RTL (exp
);
6015 case WITH_CLEANUP_EXPR
:
6016 exp_rtl
= WITH_CLEANUP_EXPR_RTL (exp
);
6019 case CLEANUP_POINT_EXPR
:
6020 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6023 exp_rtl
= SAVE_EXPR_RTL (exp
);
6027 /* If we've already scanned this, don't do it again. Otherwise,
6028 show we've scanned it and record for clearing the flag if we're
6030 if (TREE_PRIVATE (exp
))
6033 TREE_PRIVATE (exp
) = 1;
6034 if (! safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
6036 TREE_PRIVATE (exp
) = 0;
6040 save_expr_list
= tree_cons (exp
, NULL_TREE
, save_expr_list
);
6044 /* The only operand we look at is operand 1. The rest aren't
6045 part of the expression. */
6046 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
6052 /* If we have an rtx, we do not need to scan our operands. */
6056 nops
= first_rtl_op (TREE_CODE (exp
));
6057 for (i
= 0; i
< nops
; i
++)
6058 if (TREE_OPERAND (exp
, i
) != 0
6059 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
6062 /* If this is a language-specific tree code, it may require
6063 special handling. */
6064 if ((unsigned int) TREE_CODE (exp
)
6065 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6066 && !lang_hooks
.safe_from_p (x
, exp
))
6070 /* If we have an rtl, find any enclosed object. Then see if we conflict
6074 if (GET_CODE (exp_rtl
) == SUBREG
)
6076 exp_rtl
= SUBREG_REG (exp_rtl
);
6078 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
6082 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6083 are memory and they conflict. */
6084 return ! (rtx_equal_p (x
, exp_rtl
)
6085 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
6086 && true_dependence (exp_rtl
, VOIDmode
, x
,
6087 rtx_addr_varies_p
)));
6090 /* If we reach here, it is safe. */
6094 /* Subroutine of expand_expr: return rtx if EXP is a
6095 variable or parameter; else return 0. */
6101 switch (TREE_CODE (exp
))
6105 return DECL_RTL (exp
);
6111 /* Return the highest power of two that EXP is known to be a multiple of.
6112 This is used in updating alignment of MEMs in array references. */
6114 static unsigned HOST_WIDE_INT
6115 highest_pow2_factor (tree exp
)
6117 unsigned HOST_WIDE_INT c0
, c1
;
6119 switch (TREE_CODE (exp
))
6122 /* We can find the lowest bit that's a one. If the low
6123 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6124 We need to handle this case since we can find it in a COND_EXPR,
6125 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6126 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6128 if (TREE_CONSTANT_OVERFLOW (exp
))
6129 return BIGGEST_ALIGNMENT
;
6132 /* Note: tree_low_cst is intentionally not used here,
6133 we don't care about the upper bits. */
6134 c0
= TREE_INT_CST_LOW (exp
);
6136 return c0
? c0
: BIGGEST_ALIGNMENT
;
6140 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
6141 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6142 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6143 return MIN (c0
, c1
);
6146 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6147 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6150 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6152 if (integer_pow2p (TREE_OPERAND (exp
, 1))
6153 && host_integerp (TREE_OPERAND (exp
, 1), 1))
6155 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6156 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
6157 return MAX (1, c0
/ c1
);
6161 case NON_LVALUE_EXPR
: case NOP_EXPR
: case CONVERT_EXPR
:
6163 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
6166 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6169 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6170 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
6171 return MIN (c0
, c1
);
6180 /* Similar, except that the alignment requirements of TARGET are
6181 taken into account. Assume it is at least as aligned as its
6182 type, unless it is a COMPONENT_REF in which case the layout of
6183 the structure gives the alignment. */
6185 static unsigned HOST_WIDE_INT
6186 highest_pow2_factor_for_target (tree target
, tree exp
)
6188 unsigned HOST_WIDE_INT target_align
, factor
;
6190 factor
= highest_pow2_factor (exp
);
6191 if (TREE_CODE (target
) == COMPONENT_REF
)
6192 target_align
= DECL_ALIGN (TREE_OPERAND (target
, 1)) / BITS_PER_UNIT
;
6194 target_align
= TYPE_ALIGN (TREE_TYPE (target
)) / BITS_PER_UNIT
;
6195 return MAX (factor
, target_align
);
6198 /* Expands variable VAR. */
6201 expand_var (tree var
)
6203 if (DECL_EXTERNAL (var
))
6206 if (TREE_STATIC (var
))
6207 /* If this is an inlined copy of a static local variable,
6208 look up the original decl. */
6209 var
= DECL_ORIGIN (var
);
6211 if (TREE_STATIC (var
)
6212 ? !TREE_ASM_WRITTEN (var
)
6213 : !DECL_RTL_SET_P (var
))
6215 if (TREE_CODE (var
) == VAR_DECL
&& DECL_DEFER_OUTPUT (var
))
6217 /* Prepare a mem & address for the decl. */
6220 if (TREE_STATIC (var
))
6223 x
= gen_rtx_MEM (DECL_MODE (var
),
6224 gen_reg_rtx (Pmode
));
6226 set_mem_attributes (x
, var
, 1);
6227 SET_DECL_RTL (var
, x
);
6229 else if (lang_hooks
.expand_decl (var
))
6231 else if (TREE_CODE (var
) == VAR_DECL
&& !TREE_STATIC (var
))
6233 else if (TREE_CODE (var
) == VAR_DECL
&& TREE_STATIC (var
))
6234 rest_of_decl_compilation (var
, NULL
, 0, 0);
6235 else if (TREE_CODE (var
) == TYPE_DECL
6236 || TREE_CODE (var
) == CONST_DECL
6237 || TREE_CODE (var
) == FUNCTION_DECL
6238 || TREE_CODE (var
) == LABEL_DECL
)
6239 /* No expansion needed. */;
6245 /* Expands declarations of variables in list VARS. */
6248 expand_vars (tree vars
)
6250 for (; vars
; vars
= TREE_CHAIN (vars
))
6254 if (DECL_EXTERNAL (var
))
6258 expand_decl_init (var
);
6262 /* Subroutine of expand_expr. Expand the two operands of a binary
6263 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6264 The value may be stored in TARGET if TARGET is nonzero. The
6265 MODIFIER argument is as documented by expand_expr. */
6268 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
6269 enum expand_modifier modifier
)
6271 if (! safe_from_p (target
, exp1
, 1))
6273 if (operand_equal_p (exp0
, exp1
, 0))
6275 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6276 *op1
= copy_rtx (*op0
);
6280 /* If we need to preserve evaluation order, copy exp0 into its own
6281 temporary variable so that it can't be clobbered by exp1. */
6282 if (flag_evaluation_order
&& TREE_SIDE_EFFECTS (exp1
))
6283 exp0
= save_expr (exp0
);
6284 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6285 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
6290 /* expand_expr: generate code for computing expression EXP.
6291 An rtx for the computed value is returned. The value is never null.
6292 In the case of a void EXP, const0_rtx is returned.
6294 The value may be stored in TARGET if TARGET is nonzero.
6295 TARGET is just a suggestion; callers must assume that
6296 the rtx returned may not be the same as TARGET.
6298 If TARGET is CONST0_RTX, it means that the value will be ignored.
6300 If TMODE is not VOIDmode, it suggests generating the
6301 result in mode TMODE. But this is done only when convenient.
6302 Otherwise, TMODE is ignored and the value generated in its natural mode.
6303 TMODE is just a suggestion; callers must assume that
6304 the rtx returned may not have mode TMODE.
6306 Note that TARGET may have neither TMODE nor MODE. In that case, it
6307 probably will not be used.
6309 If MODIFIER is EXPAND_SUM then when EXP is an addition
6310 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6311 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6312 products as above, or REG or MEM, or constant.
6313 Ordinarily in such cases we would output mul or add instructions
6314 and then return a pseudo reg containing the sum.
6316 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6317 it also marks a label as absolutely required (it can't be dead).
6318 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6319 This is used for outputting expressions used in initializers.
6321 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6322 with a constant address even if that address is not normally legitimate.
6323 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6325 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6326 a call parameter. Such targets require special care as we haven't yet
6327 marked TARGET so that it's safe from being trashed by libcalls. We
6328 don't want to use TARGET for anything but the final result;
6329 Intermediate values must go elsewhere. Additionally, calls to
6330 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6332 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6333 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6334 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6335 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6338 static rtx
expand_expr_real_1 (tree
, rtx
, enum machine_mode
,
6339 enum expand_modifier
, rtx
*);
6342 expand_expr_real (tree exp
, rtx target
, enum machine_mode tmode
,
6343 enum expand_modifier modifier
, rtx
*alt_rtl
)
6346 rtx ret
, last
= NULL
;
6348 /* Handle ERROR_MARK before anybody tries to access its type. */
6349 if (TREE_CODE (exp
) == ERROR_MARK
6350 || TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
)
6352 ret
= CONST0_RTX (tmode
);
6353 return ret
? ret
: const0_rtx
;
6356 if (flag_non_call_exceptions
)
6358 rn
= lookup_stmt_eh_region (exp
);
6359 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6361 last
= get_last_insn ();
6364 /* If this is an expression of some kind and it has an associated line
6365 number, then emit the line number before expanding the expression.
6367 We need to save and restore the file and line information so that
6368 errors discovered during expansion are emitted with the right
6369 information. It would be better of the diagnostic routines
6370 used the file/line information embedded in the tree nodes rather
6372 if (cfun
&& EXPR_HAS_LOCATION (exp
))
6374 location_t saved_location
= input_location
;
6375 input_location
= EXPR_LOCATION (exp
);
6376 emit_line_note (input_location
);
6378 /* Record where the insns produced belong. */
6379 if (cfun
->dont_emit_block_notes
)
6380 record_block_change (TREE_BLOCK (exp
));
6382 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
6384 input_location
= saved_location
;
6388 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
6391 /* If using non-call exceptions, mark all insns that may trap.
6392 expand_call() will mark CALL_INSNs before we get to this code,
6393 but it doesn't handle libcalls, and these may trap. */
6397 for (insn
= next_real_insn (last
); insn
;
6398 insn
= next_real_insn (insn
))
6400 if (! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
6401 /* If we want exceptions for non-call insns, any
6402 may_trap_p instruction may throw. */
6403 && GET_CODE (PATTERN (insn
)) != CLOBBER
6404 && GET_CODE (PATTERN (insn
)) != USE
6405 && (GET_CODE (insn
) == CALL_INSN
|| may_trap_p (PATTERN (insn
))))
6407 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_EH_REGION
, GEN_INT (rn
),
6417 expand_expr_real_1 (tree exp
, rtx target
, enum machine_mode tmode
,
6418 enum expand_modifier modifier
, rtx
*alt_rtl
)
6421 tree type
= TREE_TYPE (exp
);
6423 enum machine_mode mode
;
6424 enum tree_code code
= TREE_CODE (exp
);
6426 rtx subtarget
, original_target
;
6430 mode
= TYPE_MODE (type
);
6431 unsignedp
= TYPE_UNSIGNED (type
);
6433 /* Use subtarget as the target for operand 0 of a binary operation. */
6434 subtarget
= get_subtarget (target
);
6435 original_target
= target
;
6436 ignore
= (target
== const0_rtx
6437 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
6438 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
6439 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
6440 && TREE_CODE (type
) == VOID_TYPE
));
6442 /* If we are going to ignore this result, we need only do something
6443 if there is a side-effect somewhere in the expression. If there
6444 is, short-circuit the most common cases here. Note that we must
6445 not call expand_expr with anything but const0_rtx in case this
6446 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6450 if (! TREE_SIDE_EFFECTS (exp
))
6453 /* Ensure we reference a volatile object even if value is ignored, but
6454 don't do this if all we are doing is taking its address. */
6455 if (TREE_THIS_VOLATILE (exp
)
6456 && TREE_CODE (exp
) != FUNCTION_DECL
6457 && mode
!= VOIDmode
&& mode
!= BLKmode
6458 && modifier
!= EXPAND_CONST_ADDRESS
)
6460 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
6461 if (GET_CODE (temp
) == MEM
)
6462 temp
= copy_to_reg (temp
);
6466 if (TREE_CODE_CLASS (code
) == '1' || code
== COMPONENT_REF
6467 || code
== INDIRECT_REF
|| code
== BUFFER_REF
)
6468 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6471 else if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<'
6472 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
6474 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6475 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6478 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6479 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
6480 /* If the second operand has no side effects, just evaluate
6482 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6484 else if (code
== BIT_FIELD_REF
)
6486 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6487 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6488 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, modifier
);
6495 /* If will do cse, generate all results into pseudo registers
6496 since 1) that allows cse to find more things
6497 and 2) otherwise cse could produce an insn the machine
6498 cannot support. An exception is a CONSTRUCTOR into a multi-word
6499 MEM: that's much more likely to be most efficient into the MEM.
6500 Another is a CALL_EXPR which must return in memory. */
6502 if (! cse_not_expected
&& mode
!= BLKmode
&& target
6503 && (!REG_P (target
) || REGNO (target
) < FIRST_PSEUDO_REGISTER
)
6504 && ! (code
== CONSTRUCTOR
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
6505 && ! (code
== CALL_EXPR
&& aggregate_value_p (exp
, exp
)))
6512 tree function
= decl_function_context (exp
);
6514 temp
= label_rtx (exp
);
6515 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
6517 if (function
!= current_function_decl
6519 LABEL_REF_NONLOCAL_P (temp
) = 1;
6521 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
6526 if (!DECL_RTL_SET_P (exp
))
6528 error ("%Jprior parameter's size depends on '%D'", exp
, exp
);
6529 return CONST0_RTX (mode
);
6532 /* ... fall through ... */
6535 /* If a static var's type was incomplete when the decl was written,
6536 but the type is complete now, lay out the decl now. */
6537 if (DECL_SIZE (exp
) == 0
6538 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
6539 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6540 layout_decl (exp
, 0);
6542 /* ... fall through ... */
6546 if (DECL_RTL (exp
) == 0)
6549 /* Ensure variable marked as used even if it doesn't go through
6550 a parser. If it hasn't be used yet, write out an external
6552 if (! TREE_USED (exp
))
6554 assemble_external (exp
);
6555 TREE_USED (exp
) = 1;
6558 /* Show we haven't gotten RTL for this yet. */
6561 /* Handle variables inherited from containing functions. */
6562 context
= decl_function_context (exp
);
6564 if (context
!= 0 && context
!= current_function_decl
6565 /* If var is static, we don't need a static chain to access it. */
6566 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
6567 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
6571 /* Mark as non-local and addressable. */
6572 DECL_NONLOCAL (exp
) = 1;
6573 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
6575 lang_hooks
.mark_addressable (exp
);
6576 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
6578 addr
= XEXP (DECL_RTL (exp
), 0);
6579 if (GET_CODE (addr
) == MEM
)
6581 = replace_equiv_address (addr
,
6582 fix_lexical_addr (XEXP (addr
, 0), exp
));
6584 addr
= fix_lexical_addr (addr
, exp
);
6586 temp
= replace_equiv_address (DECL_RTL (exp
), addr
);
6589 /* This is the case of an array whose size is to be determined
6590 from its initializer, while the initializer is still being parsed.
6593 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6594 && REG_P (XEXP (DECL_RTL (exp
), 0)))
6595 temp
= validize_mem (DECL_RTL (exp
));
6597 /* If DECL_RTL is memory, we are in the normal case and either
6598 the address is not valid or it is not a register and -fforce-addr
6599 is specified, get the address into a register. */
6601 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6602 && modifier
!= EXPAND_CONST_ADDRESS
6603 && modifier
!= EXPAND_SUM
6604 && modifier
!= EXPAND_INITIALIZER
6605 && (! memory_address_p (DECL_MODE (exp
),
6606 XEXP (DECL_RTL (exp
), 0))
6608 && !REG_P (XEXP (DECL_RTL (exp
), 0)))))
6611 *alt_rtl
= DECL_RTL (exp
);
6612 temp
= replace_equiv_address (DECL_RTL (exp
),
6613 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
6616 /* If we got something, return it. But first, set the alignment
6617 if the address is a register. */
6620 if (GET_CODE (temp
) == MEM
&& REG_P (XEXP (temp
, 0)))
6621 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6626 /* If the mode of DECL_RTL does not match that of the decl, it
6627 must be a promoted value. We return a SUBREG of the wanted mode,
6628 but mark it so that we know that it was already extended. */
6630 if (REG_P (DECL_RTL (exp
))
6631 && GET_MODE (DECL_RTL (exp
)) != DECL_MODE (exp
))
6633 /* Get the signedness used for this variable. Ensure we get the
6634 same mode we got when the variable was declared. */
6635 if (GET_MODE (DECL_RTL (exp
))
6636 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
,
6637 (TREE_CODE (exp
) == RESULT_DECL
? 1 : 0)))
6640 temp
= gen_lowpart_SUBREG (mode
, DECL_RTL (exp
));
6641 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6642 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6646 return DECL_RTL (exp
);
6649 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
6650 TREE_INT_CST_HIGH (exp
), mode
);
6652 /* ??? If overflow is set, fold will have done an incomplete job,
6653 which can result in (plus xx (const_int 0)), which can get
6654 simplified by validate_replace_rtx during virtual register
6655 instantiation, which can result in unrecognizable insns.
6656 Avoid this by forcing all overflows into registers. */
6657 if (TREE_CONSTANT_OVERFLOW (exp
)
6658 && modifier
!= EXPAND_INITIALIZER
)
6659 temp
= force_reg (mode
, temp
);
6664 return const_vector_from_tree (exp
);
6667 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
6670 /* If optimized, generate immediate CONST_DOUBLE
6671 which will be turned into memory by reload if necessary.
6673 We used to force a register so that loop.c could see it. But
6674 this does not allow gen_* patterns to perform optimizations with
6675 the constants. It also produces two insns in cases like "x = 1.0;".
6676 On most machines, floating-point constants are not permitted in
6677 many insns, so we'd end up copying it to a register in any case.
6679 Now, we do the copying in expand_binop, if appropriate. */
6680 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
6681 TYPE_MODE (TREE_TYPE (exp
)));
6684 /* Handle evaluating a complex constant in a CONCAT target. */
6685 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
6687 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
6690 rtarg
= XEXP (original_target
, 0);
6691 itarg
= XEXP (original_target
, 1);
6693 /* Move the real and imaginary parts separately. */
6694 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, 0);
6695 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, 0);
6698 emit_move_insn (rtarg
, op0
);
6700 emit_move_insn (itarg
, op1
);
6702 return original_target
;
6705 /* ... fall through ... */
6708 temp
= output_constant_def (exp
, 1);
6710 /* temp contains a constant address.
6711 On RISC machines where a constant address isn't valid,
6712 make some insns to get that address into a register. */
6713 if (modifier
!= EXPAND_CONST_ADDRESS
6714 && modifier
!= EXPAND_INITIALIZER
6715 && modifier
!= EXPAND_SUM
6716 && (! memory_address_p (mode
, XEXP (temp
, 0))
6717 || flag_force_addr
))
6718 return replace_equiv_address (temp
,
6719 copy_rtx (XEXP (temp
, 0)));
6723 context
= decl_function_context (exp
);
6725 /* If this SAVE_EXPR was at global context, assume we are an
6726 initialization function and move it into our context. */
6728 SAVE_EXPR_CONTEXT (exp
) = current_function_decl
;
6730 if (context
== current_function_decl
)
6733 /* If this is non-local, handle it. */
6736 /* The following call just exists to abort if the context is
6737 not of a containing function. */
6738 find_function_data (context
);
6740 temp
= SAVE_EXPR_RTL (exp
);
6741 if (temp
&& REG_P (temp
))
6743 put_var_into_stack (exp
, /*rescan=*/true);
6744 temp
= SAVE_EXPR_RTL (exp
);
6746 if (temp
== 0 || GET_CODE (temp
) != MEM
)
6749 replace_equiv_address (temp
,
6750 fix_lexical_addr (XEXP (temp
, 0), exp
));
6752 if (SAVE_EXPR_RTL (exp
) == 0)
6754 if (mode
== VOIDmode
)
6757 temp
= assign_temp (build_qualified_type (type
,
6759 | TYPE_QUAL_CONST
)),
6762 SAVE_EXPR_RTL (exp
) = temp
;
6763 if (!optimize
&& REG_P (temp
))
6764 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, temp
,
6767 /* If the mode of TEMP does not match that of the expression, it
6768 must be a promoted value. We pass store_expr a SUBREG of the
6769 wanted mode but mark it so that we know that it was already
6772 if (REG_P (temp
) && GET_MODE (temp
) != mode
)
6774 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6775 promote_mode (type
, mode
, &unsignedp
, 0);
6776 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6777 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6780 if (temp
== const0_rtx
)
6781 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
6783 store_expr (TREE_OPERAND (exp
, 0), temp
,
6784 modifier
== EXPAND_STACK_PARM
? 2 : 0);
6786 TREE_USED (exp
) = 1;
6789 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6790 must be a promoted value. We return a SUBREG of the wanted mode,
6791 but mark it so that we know that it was already extended. */
6793 if (REG_P (SAVE_EXPR_RTL (exp
))
6794 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
6796 /* Compute the signedness and make the proper SUBREG. */
6797 promote_mode (type
, mode
, &unsignedp
, 0);
6798 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6799 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6800 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6804 return SAVE_EXPR_RTL (exp
);
6809 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6810 TREE_OPERAND (exp
, 0)
6811 = lang_hooks
.unsave_expr_now (TREE_OPERAND (exp
, 0));
6816 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6817 expand_goto (TREE_OPERAND (exp
, 0));
6819 expand_computed_goto (TREE_OPERAND (exp
, 0));
6822 /* These are lowered during gimplification, so we should never ever
6828 case LABELED_BLOCK_EXPR
:
6829 if (LABELED_BLOCK_BODY (exp
))
6830 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp
), 0, 1);
6831 /* Should perhaps use expand_label, but this is simpler and safer. */
6832 do_pending_stack_adjust ();
6833 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp
)));
6836 case EXIT_BLOCK_EXPR
:
6837 if (EXIT_BLOCK_RETURN (exp
))
6838 sorry ("returned value in block_exit_expr");
6839 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp
)));
6844 tree block
= BIND_EXPR_BLOCK (exp
);
6847 if (TREE_CODE (BIND_EXPR_BODY (exp
)) != RTL_EXPR
)
6849 /* If we're in functions-as-trees mode, this BIND_EXPR represents
6850 the block, so we need to emit NOTE_INSN_BLOCK_* notes. */
6851 mark_ends
= (block
!= NULL_TREE
);
6852 expand_start_bindings_and_block (mark_ends
? 0 : 2, block
);
6856 /* If we're not in functions-as-trees mode, we've already emitted
6857 those notes into our RTL_EXPR, so we just want to splice our BLOCK
6858 into the enclosing one. */
6861 /* Need to open a binding contour here because
6862 if there are any cleanups they must be contained here. */
6863 expand_start_bindings_and_block (2, NULL_TREE
);
6865 /* Mark the corresponding BLOCK for output in its proper place. */
6868 if (TREE_USED (block
))
6870 lang_hooks
.decls
.insert_block (block
);
6874 /* If VARS have not yet been expanded, expand them now. */
6875 expand_vars (BIND_EXPR_VARS (exp
));
6877 /* TARGET was clobbered early in this function. The correct
6878 indicator or whether or not we need the value of this
6879 expression is the IGNORE variable. */
6880 temp
= expand_expr (BIND_EXPR_BODY (exp
),
6881 ignore
? const0_rtx
: target
,
6884 expand_end_bindings (BIND_EXPR_VARS (exp
), mark_ends
, 0);
6890 if (RTL_EXPR_SEQUENCE (exp
))
6892 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
6894 emit_insn (RTL_EXPR_SEQUENCE (exp
));
6895 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
6897 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
6898 free_temps_for_rtl_expr (exp
);
6900 *alt_rtl
= RTL_EXPR_ALT_RTL (exp
);
6901 return RTL_EXPR_RTL (exp
);
6904 /* If we don't need the result, just ensure we evaluate any
6910 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
6911 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
, 0);
6916 /* All elts simple constants => refer to a constant in memory. But
6917 if this is a non-BLKmode mode, let it store a field at a time
6918 since that should make a CONST_INT or CONST_DOUBLE when we
6919 fold. Likewise, if we have a target we can use, it is best to
6920 store directly into the target unless the type is large enough
6921 that memcpy will be used. If we are making an initializer and
6922 all operands are constant, put it in memory as well.
6924 FIXME: Avoid trying to fill vector constructors piece-meal.
6925 Output them with output_constant_def below unless we're sure
6926 they're zeros. This should go away when vector initializers
6927 are treated like VECTOR_CST instead of arrays.
6929 else if ((TREE_STATIC (exp
)
6930 && ((mode
== BLKmode
6931 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
6932 || TREE_ADDRESSABLE (exp
)
6933 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
6934 && (! MOVE_BY_PIECES_P
6935 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
6937 && ! mostly_zeros_p (exp
))))
6938 || ((modifier
== EXPAND_INITIALIZER
6939 || modifier
== EXPAND_CONST_ADDRESS
)
6940 && TREE_CONSTANT (exp
)))
6942 rtx constructor
= output_constant_def (exp
, 1);
6944 if (modifier
!= EXPAND_CONST_ADDRESS
6945 && modifier
!= EXPAND_INITIALIZER
6946 && modifier
!= EXPAND_SUM
)
6947 constructor
= validize_mem (constructor
);
6953 /* Handle calls that pass values in multiple non-contiguous
6954 locations. The Irix 6 ABI has examples of this. */
6955 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
6956 || GET_CODE (target
) == PARALLEL
6957 || modifier
== EXPAND_STACK_PARM
)
6959 = assign_temp (build_qualified_type (type
,
6961 | (TREE_READONLY (exp
)
6962 * TYPE_QUAL_CONST
))),
6963 0, TREE_ADDRESSABLE (exp
), 1);
6965 store_constructor (exp
, target
, 0, int_expr_size (exp
));
6971 tree exp1
= TREE_OPERAND (exp
, 0);
6973 if (modifier
!= EXPAND_WRITE
)
6977 t
= fold_read_from_constant_string (exp
);
6979 return expand_expr (t
, target
, tmode
, modifier
);
6982 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
6983 op0
= memory_address (mode
, op0
);
6984 temp
= gen_rtx_MEM (mode
, op0
);
6985 set_mem_attributes (temp
, exp
, 0);
6987 /* If we are writing to this object and its type is a record with
6988 readonly fields, we must mark it as readonly so it will
6989 conflict with readonly references to those fields. */
6990 if (modifier
== EXPAND_WRITE
&& readonly_fields_p (type
))
6991 RTX_UNCHANGING_P (temp
) = 1;
6998 #ifdef ENABLE_CHECKING
6999 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
7004 tree array
= TREE_OPERAND (exp
, 0);
7005 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
7006 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
7007 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
7010 /* Optimize the special-case of a zero lower bound.
7012 We convert the low_bound to sizetype to avoid some problems
7013 with constant folding. (E.g. suppose the lower bound is 1,
7014 and its mode is QI. Without the conversion, (ARRAY
7015 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7016 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7018 if (! integer_zerop (low_bound
))
7019 index
= size_diffop (index
, convert (sizetype
, low_bound
));
7021 /* Fold an expression like: "foo"[2].
7022 This is not done in fold so it won't happen inside &.
7023 Don't fold if this is for wide characters since it's too
7024 difficult to do correctly and this is a very rare case. */
7026 if (modifier
!= EXPAND_CONST_ADDRESS
7027 && modifier
!= EXPAND_INITIALIZER
7028 && modifier
!= EXPAND_MEMORY
)
7030 tree t
= fold_read_from_constant_string (exp
);
7033 return expand_expr (t
, target
, tmode
, modifier
);
7036 /* If this is a constant index into a constant array,
7037 just get the value from the array. Handle both the cases when
7038 we have an explicit constructor and when our operand is a variable
7039 that was declared const. */
7041 if (modifier
!= EXPAND_CONST_ADDRESS
7042 && modifier
!= EXPAND_INITIALIZER
7043 && modifier
!= EXPAND_MEMORY
7044 && TREE_CODE (array
) == CONSTRUCTOR
7045 && ! TREE_SIDE_EFFECTS (array
)
7046 && TREE_CODE (index
) == INTEGER_CST
7047 && 0 > compare_tree_int (index
,
7048 list_length (CONSTRUCTOR_ELTS
7049 (TREE_OPERAND (exp
, 0)))))
7053 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
7054 i
= TREE_INT_CST_LOW (index
);
7055 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
7059 return expand_expr (fold (TREE_VALUE (elem
)), target
, tmode
,
7063 else if (optimize
>= 1
7064 && modifier
!= EXPAND_CONST_ADDRESS
7065 && modifier
!= EXPAND_INITIALIZER
7066 && modifier
!= EXPAND_MEMORY
7067 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
7068 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
7069 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
7070 && targetm
.binds_local_p (array
))
7072 if (TREE_CODE (index
) == INTEGER_CST
)
7074 tree init
= DECL_INITIAL (array
);
7076 if (TREE_CODE (init
) == CONSTRUCTOR
)
7080 for (elem
= CONSTRUCTOR_ELTS (init
);
7082 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
7083 elem
= TREE_CHAIN (elem
))
7086 if (elem
&& !TREE_SIDE_EFFECTS (TREE_VALUE (elem
)))
7087 return expand_expr (fold (TREE_VALUE (elem
)), target
,
7090 else if (TREE_CODE (init
) == STRING_CST
7091 && 0 > compare_tree_int (index
,
7092 TREE_STRING_LENGTH (init
)))
7094 tree type
= TREE_TYPE (TREE_TYPE (init
));
7095 enum machine_mode mode
= TYPE_MODE (type
);
7097 if (GET_MODE_CLASS (mode
) == MODE_INT
7098 && GET_MODE_SIZE (mode
) == 1)
7099 return gen_int_mode (TREE_STRING_POINTER (init
)
7100 [TREE_INT_CST_LOW (index
)], mode
);
7105 goto normal_inner_ref
;
7108 /* If the operand is a CONSTRUCTOR, we can just extract the
7109 appropriate field if it is present. */
7110 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
)
7114 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
7115 elt
= TREE_CHAIN (elt
))
7116 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
7117 /* We can normally use the value of the field in the
7118 CONSTRUCTOR. However, if this is a bitfield in
7119 an integral mode that we can fit in a HOST_WIDE_INT,
7120 we must mask only the number of bits in the bitfield,
7121 since this is done implicitly by the constructor. If
7122 the bitfield does not meet either of those conditions,
7123 we can't do this optimization. */
7124 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
7125 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
7127 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
7128 <= HOST_BITS_PER_WIDE_INT
))))
7130 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
))
7131 && modifier
== EXPAND_STACK_PARM
)
7133 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
7134 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
7136 HOST_WIDE_INT bitsize
7137 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt
)));
7138 enum machine_mode imode
7139 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
7141 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
7143 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
7144 op0
= expand_and (imode
, op0
, op1
, target
);
7149 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
7152 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
7154 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
7162 goto normal_inner_ref
;
7165 case ARRAY_RANGE_REF
:
7168 enum machine_mode mode1
;
7169 HOST_WIDE_INT bitsize
, bitpos
;
7172 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7173 &mode1
, &unsignedp
, &volatilep
);
7176 /* If we got back the original object, something is wrong. Perhaps
7177 we are evaluating an expression too early. In any event, don't
7178 infinitely recurse. */
7182 /* If TEM's type is a union of variable size, pass TARGET to the inner
7183 computation, since it will need a temporary and TARGET is known
7184 to have to do. This occurs in unchecked conversion in Ada. */
7188 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
7189 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
7191 && modifier
!= EXPAND_STACK_PARM
7192 ? target
: NULL_RTX
),
7194 (modifier
== EXPAND_INITIALIZER
7195 || modifier
== EXPAND_CONST_ADDRESS
7196 || modifier
== EXPAND_STACK_PARM
)
7197 ? modifier
: EXPAND_NORMAL
);
7199 /* If this is a constant, put it into a register if it is a
7200 legitimate constant and OFFSET is 0 and memory if it isn't. */
7201 if (CONSTANT_P (op0
))
7203 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
7204 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
7206 op0
= force_reg (mode
, op0
);
7208 op0
= validize_mem (force_const_mem (mode
, op0
));
7211 /* Otherwise, if this object not in memory and we either have an
7212 offset or a BLKmode result, put it there. This case can't occur in
7213 C, but can in Ada if we have unchecked conversion of an expression
7214 from a scalar type to an array or record type or for an
7215 ARRAY_RANGE_REF whose type is BLKmode. */
7216 else if (GET_CODE (op0
) != MEM
7218 || (code
== ARRAY_RANGE_REF
&& mode
== BLKmode
)))
7220 /* If the operand is a SAVE_EXPR, we can deal with this by
7221 forcing the SAVE_EXPR into memory. */
7222 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
7224 put_var_into_stack (TREE_OPERAND (exp
, 0),
7226 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
7231 = build_qualified_type (TREE_TYPE (tem
),
7232 (TYPE_QUALS (TREE_TYPE (tem
))
7233 | TYPE_QUAL_CONST
));
7234 rtx memloc
= assign_temp (nt
, 1, 1, 1);
7236 emit_move_insn (memloc
, op0
);
7243 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
7246 if (GET_CODE (op0
) != MEM
)
7249 #ifdef POINTERS_EXTEND_UNSIGNED
7250 if (GET_MODE (offset_rtx
) != Pmode
)
7251 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
7253 if (GET_MODE (offset_rtx
) != ptr_mode
)
7254 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
7257 if (GET_MODE (op0
) == BLKmode
7258 /* A constant address in OP0 can have VOIDmode, we must
7259 not try to call force_reg in that case. */
7260 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
7262 && (bitpos
% bitsize
) == 0
7263 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
7264 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
7266 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7270 op0
= offset_address (op0
, offset_rtx
,
7271 highest_pow2_factor (offset
));
7274 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7275 record its alignment as BIGGEST_ALIGNMENT. */
7276 if (GET_CODE (op0
) == MEM
&& bitpos
== 0 && offset
!= 0
7277 && is_aligning_offset (offset
, tem
))
7278 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
7280 /* Don't forget about volatility even if this is a bitfield. */
7281 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
7283 if (op0
== orig_op0
)
7284 op0
= copy_rtx (op0
);
7286 MEM_VOLATILE_P (op0
) = 1;
7289 /* The following code doesn't handle CONCAT.
7290 Assume only bitpos == 0 can be used for CONCAT, due to
7291 one element arrays having the same mode as its element. */
7292 if (GET_CODE (op0
) == CONCAT
)
7294 if (bitpos
!= 0 || bitsize
!= GET_MODE_BITSIZE (GET_MODE (op0
)))
7299 /* In cases where an aligned union has an unaligned object
7300 as a field, we might be extracting a BLKmode value from
7301 an integer-mode (e.g., SImode) object. Handle this case
7302 by doing the extract into an object as wide as the field
7303 (which we know to be the width of a basic mode), then
7304 storing into memory, and changing the mode to BLKmode. */
7305 if (mode1
== VOIDmode
7306 || REG_P (op0
) || GET_CODE (op0
) == SUBREG
7307 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
7308 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7309 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
7310 && modifier
!= EXPAND_CONST_ADDRESS
7311 && modifier
!= EXPAND_INITIALIZER
)
7312 /* If the field isn't aligned enough to fetch as a memref,
7313 fetch it as a bit field. */
7314 || (mode1
!= BLKmode
7315 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
7316 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)
7317 || (GET_CODE (op0
) == MEM
7318 && (MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
7319 || (bitpos
% GET_MODE_ALIGNMENT (mode1
) != 0))))
7320 && ((modifier
== EXPAND_CONST_ADDRESS
7321 || modifier
== EXPAND_INITIALIZER
)
7323 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
7324 || (bitpos
% BITS_PER_UNIT
!= 0)))
7325 /* If the type and the field are a constant size and the
7326 size of the type isn't the same size as the bitfield,
7327 we must use bitfield operations. */
7329 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
)))
7331 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7334 enum machine_mode ext_mode
= mode
;
7336 if (ext_mode
== BLKmode
7337 && ! (target
!= 0 && GET_CODE (op0
) == MEM
7338 && GET_CODE (target
) == MEM
7339 && bitpos
% BITS_PER_UNIT
== 0))
7340 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7342 if (ext_mode
== BLKmode
)
7345 target
= assign_temp (type
, 0, 1, 1);
7350 /* In this case, BITPOS must start at a byte boundary and
7351 TARGET, if specified, must be a MEM. */
7352 if (GET_CODE (op0
) != MEM
7353 || (target
!= 0 && GET_CODE (target
) != MEM
)
7354 || bitpos
% BITS_PER_UNIT
!= 0)
7357 emit_block_move (target
,
7358 adjust_address (op0
, VOIDmode
,
7359 bitpos
/ BITS_PER_UNIT
),
7360 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7362 (modifier
== EXPAND_STACK_PARM
7363 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7368 op0
= validize_mem (op0
);
7370 if (GET_CODE (op0
) == MEM
&& REG_P (XEXP (op0
, 0)))
7371 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7373 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
7374 (modifier
== EXPAND_STACK_PARM
7375 ? NULL_RTX
: target
),
7377 int_size_in_bytes (TREE_TYPE (tem
)));
7379 /* If the result is a record type and BITSIZE is narrower than
7380 the mode of OP0, an integral mode, and this is a big endian
7381 machine, we must put the field into the high-order bits. */
7382 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7383 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7384 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
7385 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7386 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7390 /* If the result type is BLKmode, store the data into a temporary
7391 of the appropriate type, but with the mode corresponding to the
7392 mode for the data we have (op0's mode). It's tempting to make
7393 this a constant type, since we know it's only being stored once,
7394 but that can cause problems if we are taking the address of this
7395 COMPONENT_REF because the MEM of any reference via that address
7396 will have flags corresponding to the type, which will not
7397 necessarily be constant. */
7398 if (mode
== BLKmode
)
7401 = assign_stack_temp_for_type
7402 (ext_mode
, GET_MODE_BITSIZE (ext_mode
), 0, type
);
7404 emit_move_insn (new, op0
);
7405 op0
= copy_rtx (new);
7406 PUT_MODE (op0
, BLKmode
);
7407 set_mem_attributes (op0
, exp
, 1);
7413 /* If the result is BLKmode, use that to access the object
7415 if (mode
== BLKmode
)
7418 /* Get a reference to just this component. */
7419 if (modifier
== EXPAND_CONST_ADDRESS
7420 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7421 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7423 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7425 if (op0
== orig_op0
)
7426 op0
= copy_rtx (op0
);
7428 set_mem_attributes (op0
, exp
, 0);
7429 if (REG_P (XEXP (op0
, 0)))
7430 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7432 MEM_VOLATILE_P (op0
) |= volatilep
;
7433 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7434 || modifier
== EXPAND_CONST_ADDRESS
7435 || modifier
== EXPAND_INITIALIZER
)
7437 else if (target
== 0)
7438 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7440 convert_move (target
, op0
, unsignedp
);
7446 rtx insn
, before
= get_last_insn (), vtbl_ref
;
7448 /* Evaluate the interior expression. */
7449 subtarget
= expand_expr (TREE_OPERAND (exp
, 0), target
,
7452 /* Get or create an instruction off which to hang a note. */
7453 if (REG_P (subtarget
))
7456 insn
= get_last_insn ();
7459 if (! INSN_P (insn
))
7460 insn
= prev_nonnote_insn (insn
);
7464 target
= gen_reg_rtx (GET_MODE (subtarget
));
7465 insn
= emit_move_insn (target
, subtarget
);
7468 /* Collect the data for the note. */
7469 vtbl_ref
= XEXP (DECL_RTL (TREE_OPERAND (exp
, 1)), 0);
7470 vtbl_ref
= plus_constant (vtbl_ref
,
7471 tree_low_cst (TREE_OPERAND (exp
, 2), 0));
7472 /* Discard the initial CONST that was added. */
7473 vtbl_ref
= XEXP (vtbl_ref
, 0);
7476 = gen_rtx_EXPR_LIST (REG_VTABLE_REF
, vtbl_ref
, REG_NOTES (insn
));
7481 /* Intended for a reference to a buffer of a file-object in Pascal.
7482 But it's not certain that a special tree code will really be
7483 necessary for these. INDIRECT_REF might work for them. */
7489 /* Pascal set IN expression.
7492 rlo = set_low - (set_low%bits_per_word);
7493 the_word = set [ (index - rlo)/bits_per_word ];
7494 bit_index = index % bits_per_word;
7495 bitmask = 1 << bit_index;
7496 return !!(the_word & bitmask); */
7498 tree set
= TREE_OPERAND (exp
, 0);
7499 tree index
= TREE_OPERAND (exp
, 1);
7500 int iunsignedp
= TYPE_UNSIGNED (TREE_TYPE (index
));
7501 tree set_type
= TREE_TYPE (set
);
7502 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
7503 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
7504 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
7505 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
7506 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
7507 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
7508 rtx setaddr
= XEXP (setval
, 0);
7509 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
7511 rtx diff
, quo
, rem
, addr
, bit
, result
;
7513 /* If domain is empty, answer is no. Likewise if index is constant
7514 and out of bounds. */
7515 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
7516 && TREE_CODE (set_low_bound
) == INTEGER_CST
7517 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
7518 || (TREE_CODE (index
) == INTEGER_CST
7519 && TREE_CODE (set_low_bound
) == INTEGER_CST
7520 && tree_int_cst_lt (index
, set_low_bound
))
7521 || (TREE_CODE (set_high_bound
) == INTEGER_CST
7522 && TREE_CODE (index
) == INTEGER_CST
7523 && tree_int_cst_lt (set_high_bound
, index
))))
7527 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7529 /* If we get here, we have to generate the code for both cases
7530 (in range and out of range). */
7532 op0
= gen_label_rtx ();
7533 op1
= gen_label_rtx ();
7535 if (! (GET_CODE (index_val
) == CONST_INT
7536 && GET_CODE (lo_r
) == CONST_INT
))
7537 emit_cmp_and_jump_insns (index_val
, lo_r
, LT
, NULL_RTX
,
7538 GET_MODE (index_val
), iunsignedp
, op1
);
7540 if (! (GET_CODE (index_val
) == CONST_INT
7541 && GET_CODE (hi_r
) == CONST_INT
))
7542 emit_cmp_and_jump_insns (index_val
, hi_r
, GT
, NULL_RTX
,
7543 GET_MODE (index_val
), iunsignedp
, op1
);
7545 /* Calculate the element number of bit zero in the first word
7547 if (GET_CODE (lo_r
) == CONST_INT
)
7548 rlow
= GEN_INT (INTVAL (lo_r
)
7549 & ~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
7551 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
7552 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
7553 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7555 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
7556 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7558 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
7559 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7560 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
7561 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7563 addr
= memory_address (byte_mode
,
7564 expand_binop (index_mode
, add_optab
, diff
,
7565 setaddr
, NULL_RTX
, iunsignedp
,
7568 /* Extract the bit we want to examine. */
7569 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
7570 gen_rtx_MEM (byte_mode
, addr
),
7571 make_tree (TREE_TYPE (index
), rem
),
7573 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
7574 GET_MODE (target
) == byte_mode
? target
: 0,
7575 1, OPTAB_LIB_WIDEN
);
7577 if (result
!= target
)
7578 convert_move (target
, result
, 1);
7580 /* Output the code to handle the out-of-range case. */
7583 emit_move_insn (target
, const0_rtx
);
7588 case WITH_CLEANUP_EXPR
:
7589 if (WITH_CLEANUP_EXPR_RTL (exp
) == 0)
7591 WITH_CLEANUP_EXPR_RTL (exp
)
7592 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7593 expand_decl_cleanup_eh (NULL_TREE
, TREE_OPERAND (exp
, 1),
7594 CLEANUP_EH_ONLY (exp
));
7596 /* That's it for this cleanup. */
7597 TREE_OPERAND (exp
, 1) = 0;
7599 return WITH_CLEANUP_EXPR_RTL (exp
);
7601 case CLEANUP_POINT_EXPR
:
7603 /* Start a new binding layer that will keep track of all cleanup
7604 actions to be performed. */
7605 expand_start_bindings (2);
7607 target_temp_slot_level
= temp_slot_level
;
7609 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7610 /* If we're going to use this value, load it up now. */
7612 op0
= force_not_mem (op0
);
7613 preserve_temp_slots (op0
);
7614 expand_end_bindings (NULL_TREE
, 0, 0);
7619 /* Check for a built-in function. */
7620 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7621 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7623 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7625 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7626 == BUILT_IN_FRONTEND
)
7627 return lang_hooks
.expand_expr (exp
, original_target
,
7631 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7634 return expand_call (exp
, target
, ignore
);
7636 case NON_LVALUE_EXPR
:
7639 case REFERENCE_EXPR
:
7640 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7643 if (TREE_CODE (type
) == UNION_TYPE
)
7645 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7647 /* If both input and output are BLKmode, this conversion isn't doing
7648 anything except possibly changing memory attribute. */
7649 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7651 rtx result
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7654 result
= copy_rtx (result
);
7655 set_mem_attributes (result
, exp
, 0);
7661 if (TYPE_MODE (type
) != BLKmode
)
7662 target
= gen_reg_rtx (TYPE_MODE (type
));
7664 target
= assign_temp (type
, 0, 1, 1);
7667 if (GET_CODE (target
) == MEM
)
7668 /* Store data into beginning of memory target. */
7669 store_expr (TREE_OPERAND (exp
, 0),
7670 adjust_address (target
, TYPE_MODE (valtype
), 0),
7671 modifier
== EXPAND_STACK_PARM
? 2 : 0);
7673 else if (REG_P (target
))
7674 /* Store this field into a union of the proper type. */
7675 store_field (target
,
7676 MIN ((int_size_in_bytes (TREE_TYPE
7677 (TREE_OPERAND (exp
, 0)))
7679 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7680 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7681 VOIDmode
, 0, type
, 0);
7685 /* Return the entire union. */
7689 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7691 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7694 /* If the signedness of the conversion differs and OP0 is
7695 a promoted SUBREG, clear that indication since we now
7696 have to do the proper extension. */
7697 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7698 && GET_CODE (op0
) == SUBREG
)
7699 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7704 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7705 if (GET_MODE (op0
) == mode
)
7708 /* If OP0 is a constant, just convert it into the proper mode. */
7709 if (CONSTANT_P (op0
))
7711 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7712 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
7714 if (modifier
== EXPAND_INITIALIZER
)
7715 return simplify_gen_subreg (mode
, op0
, inner_mode
,
7716 subreg_lowpart_offset (mode
,
7719 return convert_modes (mode
, inner_mode
, op0
,
7720 TYPE_UNSIGNED (inner_type
));
7723 if (modifier
== EXPAND_INITIALIZER
)
7724 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7728 convert_to_mode (mode
, op0
,
7729 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7731 convert_move (target
, op0
,
7732 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7735 case VIEW_CONVERT_EXPR
:
7736 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7738 /* If the input and output modes are both the same, we are done.
7739 Otherwise, if neither mode is BLKmode and both are integral and within
7740 a word, we can use gen_lowpart. If neither is true, make sure the
7741 operand is in memory and convert the MEM to the new mode. */
7742 if (TYPE_MODE (type
) == GET_MODE (op0
))
7744 else if (TYPE_MODE (type
) != BLKmode
&& GET_MODE (op0
) != BLKmode
7745 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7746 && GET_MODE_CLASS (TYPE_MODE (type
)) == MODE_INT
7747 && GET_MODE_SIZE (TYPE_MODE (type
)) <= UNITS_PER_WORD
7748 && GET_MODE_SIZE (GET_MODE (op0
)) <= UNITS_PER_WORD
)
7749 op0
= gen_lowpart (TYPE_MODE (type
), op0
);
7750 else if (GET_CODE (op0
) != MEM
)
7752 /* If the operand is not a MEM, force it into memory. Since we
7753 are going to be be changing the mode of the MEM, don't call
7754 force_const_mem for constants because we don't allow pool
7755 constants to change mode. */
7756 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7758 if (TREE_ADDRESSABLE (exp
))
7761 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
7763 = assign_stack_temp_for_type
7764 (TYPE_MODE (inner_type
),
7765 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
7767 emit_move_insn (target
, op0
);
7771 /* At this point, OP0 is in the correct mode. If the output type is such
7772 that the operand is known to be aligned, indicate that it is.
7773 Otherwise, we need only be concerned about alignment for non-BLKmode
7775 if (GET_CODE (op0
) == MEM
)
7777 op0
= copy_rtx (op0
);
7779 if (TYPE_ALIGN_OK (type
))
7780 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
7781 else if (TYPE_MODE (type
) != BLKmode
&& STRICT_ALIGNMENT
7782 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
7784 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7785 HOST_WIDE_INT temp_size
7786 = MAX (int_size_in_bytes (inner_type
),
7787 (HOST_WIDE_INT
) GET_MODE_SIZE (TYPE_MODE (type
)));
7788 rtx
new = assign_stack_temp_for_type (TYPE_MODE (type
),
7789 temp_size
, 0, type
);
7790 rtx new_with_op0_mode
= adjust_address (new, GET_MODE (op0
), 0);
7792 if (TREE_ADDRESSABLE (exp
))
7795 if (GET_MODE (op0
) == BLKmode
)
7796 emit_block_move (new_with_op0_mode
, op0
,
7797 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type
))),
7798 (modifier
== EXPAND_STACK_PARM
7799 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7801 emit_move_insn (new_with_op0_mode
, op0
);
7806 op0
= adjust_address (op0
, TYPE_MODE (type
), 0);
7812 this_optab
= ! unsignedp
&& flag_trapv
7813 && (GET_MODE_CLASS (mode
) == MODE_INT
)
7814 ? addv_optab
: add_optab
;
7816 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7817 something else, make sure we add the register to the constant and
7818 then to the other thing. This case can occur during strength
7819 reduction and doing it this way will produce better code if the
7820 frame pointer or argument pointer is eliminated.
7822 fold-const.c will ensure that the constant is always in the inner
7823 PLUS_EXPR, so the only case we need to do anything about is if
7824 sp, ap, or fp is our second argument, in which case we must swap
7825 the innermost first argument and our second argument. */
7827 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7828 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7829 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
7830 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7831 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7832 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7834 tree t
= TREE_OPERAND (exp
, 1);
7836 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7837 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
7840 /* If the result is to be ptr_mode and we are adding an integer to
7841 something, we might be forming a constant. So try to use
7842 plus_constant. If it produces a sum and we can't accept it,
7843 use force_operand. This allows P = &ARR[const] to generate
7844 efficient code on machines where a SYMBOL_REF is not a valid
7847 If this is an EXPAND_SUM call, always return the sum. */
7848 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7849 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
7851 if (modifier
== EXPAND_STACK_PARM
)
7853 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
7854 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7855 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
7859 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
7861 /* Use immed_double_const to ensure that the constant is
7862 truncated according to the mode of OP1, then sign extended
7863 to a HOST_WIDE_INT. Using the constant directly can result
7864 in non-canonical RTL in a 64x32 cross compile. */
7866 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
7868 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
7869 op1
= plus_constant (op1
, INTVAL (constant_part
));
7870 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7871 op1
= force_operand (op1
, target
);
7875 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7876 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
7877 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
7881 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7882 (modifier
== EXPAND_INITIALIZER
7883 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
7884 if (! CONSTANT_P (op0
))
7886 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7887 VOIDmode
, modifier
);
7888 /* Return a PLUS if modifier says it's OK. */
7889 if (modifier
== EXPAND_SUM
7890 || modifier
== EXPAND_INITIALIZER
)
7891 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7894 /* Use immed_double_const to ensure that the constant is
7895 truncated according to the mode of OP1, then sign extended
7896 to a HOST_WIDE_INT. Using the constant directly can result
7897 in non-canonical RTL in a 64x32 cross compile. */
7899 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
7901 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7902 op0
= plus_constant (op0
, INTVAL (constant_part
));
7903 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7904 op0
= force_operand (op0
, target
);
7909 /* No sense saving up arithmetic to be done
7910 if it's all in the wrong mode to form part of an address.
7911 And force_operand won't know whether to sign-extend or
7913 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7914 || mode
!= ptr_mode
)
7916 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7917 subtarget
, &op0
, &op1
, 0);
7918 if (op0
== const0_rtx
)
7920 if (op1
== const0_rtx
)
7925 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7926 subtarget
, &op0
, &op1
, modifier
);
7927 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7930 /* For initializers, we are allowed to return a MINUS of two
7931 symbolic constants. Here we handle all cases when both operands
7933 /* Handle difference of two symbolic constants,
7934 for the sake of an initializer. */
7935 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7936 && really_constant_p (TREE_OPERAND (exp
, 0))
7937 && really_constant_p (TREE_OPERAND (exp
, 1)))
7939 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7940 NULL_RTX
, &op0
, &op1
, modifier
);
7942 /* If the last operand is a CONST_INT, use plus_constant of
7943 the negated constant. Else make the MINUS. */
7944 if (GET_CODE (op1
) == CONST_INT
)
7945 return plus_constant (op0
, - INTVAL (op1
));
7947 return gen_rtx_MINUS (mode
, op0
, op1
);
7950 this_optab
= ! unsignedp
&& flag_trapv
7951 && (GET_MODE_CLASS(mode
) == MODE_INT
)
7952 ? subv_optab
: sub_optab
;
7954 /* No sense saving up arithmetic to be done
7955 if it's all in the wrong mode to form part of an address.
7956 And force_operand won't know whether to sign-extend or
7958 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7959 || mode
!= ptr_mode
)
7962 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7963 subtarget
, &op0
, &op1
, modifier
);
7965 /* Convert A - const to A + (-const). */
7966 if (GET_CODE (op1
) == CONST_INT
)
7968 op1
= negate_rtx (mode
, op1
);
7969 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7975 /* If first operand is constant, swap them.
7976 Thus the following special case checks need only
7977 check the second operand. */
7978 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
7980 tree t1
= TREE_OPERAND (exp
, 0);
7981 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
7982 TREE_OPERAND (exp
, 1) = t1
;
7985 /* Attempt to return something suitable for generating an
7986 indexed address, for machines that support that. */
7988 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
7989 && host_integerp (TREE_OPERAND (exp
, 1), 0))
7991 tree exp1
= TREE_OPERAND (exp
, 1);
7993 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7997 op0
= force_operand (op0
, NULL_RTX
);
7999 op0
= copy_to_mode_reg (mode
, op0
);
8001 return gen_rtx_MULT (mode
, op0
,
8002 gen_int_mode (tree_low_cst (exp1
, 0),
8003 TYPE_MODE (TREE_TYPE (exp1
))));
8006 if (modifier
== EXPAND_STACK_PARM
)
8009 /* Check for multiplying things that have been extended
8010 from a narrower type. If this machine supports multiplying
8011 in that narrower type with a result in the desired type,
8012 do it that way, and avoid the explicit type-conversion. */
8013 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
8014 && TREE_CODE (type
) == INTEGER_TYPE
8015 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8016 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
8017 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
8018 && int_fits_type_p (TREE_OPERAND (exp
, 1),
8019 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8020 /* Don't use a widening multiply if a shift will do. */
8021 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
8022 > HOST_BITS_PER_WIDE_INT
)
8023 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
8025 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8026 && (TYPE_PRECISION (TREE_TYPE
8027 (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
8028 == TYPE_PRECISION (TREE_TYPE
8030 (TREE_OPERAND (exp
, 0), 0))))
8031 /* If both operands are extended, they must either both
8032 be zero-extended or both be sign-extended. */
8033 && (TYPE_UNSIGNED (TREE_TYPE
8034 (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
8035 == TYPE_UNSIGNED (TREE_TYPE
8037 (TREE_OPERAND (exp
, 0), 0)))))))
8039 tree op0type
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0));
8040 enum machine_mode innermode
= TYPE_MODE (op0type
);
8041 bool zextend_p
= TYPE_UNSIGNED (op0type
);
8042 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
8043 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
8045 if (mode
== GET_MODE_WIDER_MODE (innermode
))
8047 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
8049 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
8050 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8051 TREE_OPERAND (exp
, 1),
8052 NULL_RTX
, &op0
, &op1
, 0);
8054 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8055 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
8056 NULL_RTX
, &op0
, &op1
, 0);
8059 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
8060 && innermode
== word_mode
)
8063 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8064 NULL_RTX
, VOIDmode
, 0);
8065 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
8066 op1
= convert_modes (innermode
, mode
,
8067 expand_expr (TREE_OPERAND (exp
, 1),
8068 NULL_RTX
, VOIDmode
, 0),
8071 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
8072 NULL_RTX
, VOIDmode
, 0);
8073 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8074 unsignedp
, OPTAB_LIB_WIDEN
);
8075 hipart
= gen_highpart (innermode
, temp
);
8076 htem
= expand_mult_highpart_adjust (innermode
, hipart
,
8080 emit_move_insn (hipart
, htem
);
8085 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8086 subtarget
, &op0
, &op1
, 0);
8087 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
8089 case TRUNC_DIV_EXPR
:
8090 case FLOOR_DIV_EXPR
:
8092 case ROUND_DIV_EXPR
:
8093 case EXACT_DIV_EXPR
:
8094 if (modifier
== EXPAND_STACK_PARM
)
8096 /* Possible optimization: compute the dividend with EXPAND_SUM
8097 then if the divisor is constant can optimize the case
8098 where some terms of the dividend have coeffs divisible by it. */
8099 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8100 subtarget
, &op0
, &op1
, 0);
8101 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
8104 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8105 expensive divide. If not, combine will rebuild the original
8107 if (flag_unsafe_math_optimizations
&& optimize
&& !optimize_size
8108 && TREE_CODE (type
) == REAL_TYPE
8109 && !real_onep (TREE_OPERAND (exp
, 0)))
8110 return expand_expr (build (MULT_EXPR
, type
, TREE_OPERAND (exp
, 0),
8111 build (RDIV_EXPR
, type
,
8112 build_real (type
, dconst1
),
8113 TREE_OPERAND (exp
, 1))),
8114 target
, tmode
, modifier
);
8115 this_optab
= sdiv_optab
;
8118 case TRUNC_MOD_EXPR
:
8119 case FLOOR_MOD_EXPR
:
8121 case ROUND_MOD_EXPR
:
8122 if (modifier
== EXPAND_STACK_PARM
)
8124 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8125 subtarget
, &op0
, &op1
, 0);
8126 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
8128 case FIX_ROUND_EXPR
:
8129 case FIX_FLOOR_EXPR
:
8131 abort (); /* Not used for C. */
8133 case FIX_TRUNC_EXPR
:
8134 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8135 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8136 target
= gen_reg_rtx (mode
);
8137 expand_fix (target
, op0
, unsignedp
);
8141 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8142 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8143 target
= gen_reg_rtx (mode
);
8144 /* expand_float can't figure out what to do if FROM has VOIDmode.
8145 So give it the correct mode. With -O, cse will optimize this. */
8146 if (GET_MODE (op0
) == VOIDmode
)
8147 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8149 expand_float (target
, op0
,
8150 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8154 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8155 if (modifier
== EXPAND_STACK_PARM
)
8157 temp
= expand_unop (mode
,
8158 ! unsignedp
&& flag_trapv
8159 && (GET_MODE_CLASS(mode
) == MODE_INT
)
8160 ? negv_optab
: neg_optab
, op0
, target
, 0);
8166 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8167 if (modifier
== EXPAND_STACK_PARM
)
8170 /* ABS_EXPR is not valid for complex arguments. */
8171 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
8172 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
8175 /* Unsigned abs is simply the operand. Testing here means we don't
8176 risk generating incorrect code below. */
8177 if (TYPE_UNSIGNED (type
))
8180 return expand_abs (mode
, op0
, target
, unsignedp
,
8181 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
8185 target
= original_target
;
8187 || modifier
== EXPAND_STACK_PARM
8188 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
8189 || GET_MODE (target
) != mode
8191 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8192 target
= gen_reg_rtx (mode
);
8193 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8194 target
, &op0
, &op1
, 0);
8196 /* First try to do it with a special MIN or MAX instruction.
8197 If that does not win, use a conditional jump to select the proper
8199 this_optab
= (unsignedp
8200 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
8201 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
8203 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8208 /* At this point, a MEM target is no longer useful; we will get better
8211 if (GET_CODE (target
) == MEM
)
8212 target
= gen_reg_rtx (mode
);
8214 /* If op1 was placed in target, swap op0 and op1. */
8215 if (target
!= op0
&& target
== op1
)
8223 emit_move_insn (target
, op0
);
8225 op0
= gen_label_rtx ();
8227 /* If this mode is an integer too wide to compare properly,
8228 compare word by word. Rely on cse to optimize constant cases. */
8229 if (GET_MODE_CLASS (mode
) == MODE_INT
8230 && ! can_compare_p (GE
, mode
, ccp_jump
))
8232 if (code
== MAX_EXPR
)
8233 do_jump_by_parts_greater_rtx (mode
, unsignedp
, target
, op1
,
8236 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op1
, target
,
8241 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
8242 unsignedp
, mode
, NULL_RTX
, NULL_RTX
, op0
);
8244 emit_move_insn (target
, op1
);
8249 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8250 if (modifier
== EXPAND_STACK_PARM
)
8252 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8257 /* ??? Can optimize bitwise operations with one arg constant.
8258 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8259 and (a bitwise1 b) bitwise2 b (etc)
8260 but that is probably not worth while. */
8262 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8263 boolean values when we want in all cases to compute both of them. In
8264 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8265 as actual zero-or-1 values and then bitwise anding. In cases where
8266 there cannot be any side effects, better code would be made by
8267 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8268 how to recognize those cases. */
8270 case TRUTH_AND_EXPR
:
8272 this_optab
= and_optab
;
8277 this_optab
= ior_optab
;
8280 case TRUTH_XOR_EXPR
:
8282 this_optab
= xor_optab
;
8289 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8291 if (modifier
== EXPAND_STACK_PARM
)
8293 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8294 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
8297 /* Could determine the answer when only additive constants differ. Also,
8298 the addition of one can be handled by changing the condition. */
8305 case UNORDERED_EXPR
:
8313 temp
= do_store_flag (exp
,
8314 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
8315 tmode
!= VOIDmode
? tmode
: mode
, 0);
8319 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8320 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
8322 && REG_P (original_target
)
8323 && (GET_MODE (original_target
)
8324 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
8326 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
8329 /* If temp is constant, we can just compute the result. */
8330 if (GET_CODE (temp
) == CONST_INT
)
8332 if (INTVAL (temp
) != 0)
8333 emit_move_insn (target
, const1_rtx
);
8335 emit_move_insn (target
, const0_rtx
);
8340 if (temp
!= original_target
)
8342 enum machine_mode mode1
= GET_MODE (temp
);
8343 if (mode1
== VOIDmode
)
8344 mode1
= tmode
!= VOIDmode
? tmode
: mode
;
8346 temp
= copy_to_mode_reg (mode1
, temp
);
8349 op1
= gen_label_rtx ();
8350 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
8351 GET_MODE (temp
), unsignedp
, op1
);
8352 emit_move_insn (temp
, const1_rtx
);
8357 /* If no set-flag instruction, must generate a conditional
8358 store into a temporary variable. Drop through
8359 and handle this like && and ||. */
8361 case TRUTH_ANDIF_EXPR
:
8362 case TRUTH_ORIF_EXPR
:
8365 || modifier
== EXPAND_STACK_PARM
8366 || ! safe_from_p (target
, exp
, 1)
8367 /* Make sure we don't have a hard reg (such as function's return
8368 value) live across basic blocks, if not optimizing. */
8369 || (!optimize
&& REG_P (target
)
8370 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8371 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8374 emit_clr_insn (target
);
8376 op1
= gen_label_rtx ();
8377 jumpifnot (exp
, op1
);
8380 emit_0_to_1_insn (target
);
8383 return ignore
? const0_rtx
: target
;
8385 case TRUTH_NOT_EXPR
:
8386 if (modifier
== EXPAND_STACK_PARM
)
8388 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8389 /* The parser is careful to generate TRUTH_NOT_EXPR
8390 only with operands that are always zero or one. */
8391 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8392 target
, 1, OPTAB_LIB_WIDEN
);
8398 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
8400 return expand_expr_real (TREE_OPERAND (exp
, 1),
8401 (ignore
? const0_rtx
: target
),
8402 VOIDmode
, modifier
, alt_rtl
);
8404 case STATEMENT_LIST
:
8406 tree_stmt_iterator iter
;
8411 for (iter
= tsi_start (exp
); !tsi_end_p (iter
); tsi_next (&iter
))
8412 expand_expr (tsi_stmt (iter
), const0_rtx
, VOIDmode
, modifier
);
8417 /* If it's void, we don't need to worry about computing a value. */
8418 if (VOID_TYPE_P (TREE_TYPE (exp
)))
8420 tree pred
= TREE_OPERAND (exp
, 0);
8421 tree then_
= TREE_OPERAND (exp
, 1);
8422 tree else_
= TREE_OPERAND (exp
, 2);
8424 /* If we do not have any pending cleanups or stack_levels
8425 to restore, and at least one arm of the COND_EXPR is a
8426 GOTO_EXPR to a local label, then we can emit more efficient
8427 code by using jumpif/jumpifnot instead of the 'if' machinery. */
8429 || containing_blocks_have_cleanups_or_stack_level ())
8431 else if (TREE_CODE (then_
) == GOTO_EXPR
8432 && TREE_CODE (GOTO_DESTINATION (then_
)) == LABEL_DECL
)
8434 jumpif (pred
, label_rtx (GOTO_DESTINATION (then_
)));
8435 return expand_expr (else_
, const0_rtx
, VOIDmode
, 0);
8437 else if (TREE_CODE (else_
) == GOTO_EXPR
8438 && TREE_CODE (GOTO_DESTINATION (else_
)) == LABEL_DECL
)
8440 jumpifnot (pred
, label_rtx (GOTO_DESTINATION (else_
)));
8441 return expand_expr (then_
, const0_rtx
, VOIDmode
, 0);
8444 /* Just use the 'if' machinery. */
8445 expand_start_cond (pred
, 0);
8446 start_cleanup_deferral ();
8447 expand_expr (then_
, const0_rtx
, VOIDmode
, 0);
8451 /* Iterate over 'else if's instead of recursing. */
8452 for (; TREE_CODE (exp
) == COND_EXPR
; exp
= TREE_OPERAND (exp
, 2))
8454 expand_start_else ();
8455 if (EXPR_HAS_LOCATION (exp
))
8457 emit_line_note (EXPR_LOCATION (exp
));
8458 if (cfun
->dont_emit_block_notes
)
8459 record_block_change (TREE_BLOCK (exp
));
8461 expand_elseif (TREE_OPERAND (exp
, 0));
8462 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, 0);
8464 /* Don't emit the jump and label if there's no 'else' clause. */
8465 if (TREE_SIDE_EFFECTS (exp
))
8467 expand_start_else ();
8468 expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
8470 end_cleanup_deferral ();
8475 /* If we would have a "singleton" (see below) were it not for a
8476 conversion in each arm, bring that conversion back out. */
8477 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8478 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
8479 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
8480 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
8482 tree iftrue
= TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
8483 tree iffalse
= TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
8485 if ((TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '2'
8486 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8487 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '2'
8488 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0))
8489 || (TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '1'
8490 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8491 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '1'
8492 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0)))
8493 return expand_expr (build1 (NOP_EXPR
, type
,
8494 build (COND_EXPR
, TREE_TYPE (iftrue
),
8495 TREE_OPERAND (exp
, 0),
8497 target
, tmode
, modifier
);
8501 /* Note that COND_EXPRs whose type is a structure or union
8502 are required to be constructed to contain assignments of
8503 a temporary variable, so that we can evaluate them here
8504 for side effect only. If type is void, we must do likewise. */
8506 /* If an arm of the branch requires a cleanup,
8507 only that cleanup is performed. */
8510 tree binary_op
= 0, unary_op
= 0;
8512 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8513 convert it to our mode, if necessary. */
8514 if (integer_onep (TREE_OPERAND (exp
, 1))
8515 && integer_zerop (TREE_OPERAND (exp
, 2))
8516 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8520 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
8525 if (modifier
== EXPAND_STACK_PARM
)
8527 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, modifier
);
8528 if (GET_MODE (op0
) == mode
)
8532 target
= gen_reg_rtx (mode
);
8533 convert_move (target
, op0
, unsignedp
);
8537 /* Check for X ? A + B : A. If we have this, we can copy A to the
8538 output and conditionally add B. Similarly for unary operations.
8539 Don't do this if X has side-effects because those side effects
8540 might affect A or B and the "?" operation is a sequence point in
8541 ANSI. (operand_equal_p tests for side effects.) */
8543 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
8544 && operand_equal_p (TREE_OPERAND (exp
, 2),
8545 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8546 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
8547 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
8548 && operand_equal_p (TREE_OPERAND (exp
, 1),
8549 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8550 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
8551 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
8552 && operand_equal_p (TREE_OPERAND (exp
, 2),
8553 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8554 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
8555 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
8556 && operand_equal_p (TREE_OPERAND (exp
, 1),
8557 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8558 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
8560 /* If we are not to produce a result, we have no target. Otherwise,
8561 if a target was specified use it; it will not be used as an
8562 intermediate target unless it is safe. If no target, use a
8567 else if (modifier
== EXPAND_STACK_PARM
)
8568 temp
= assign_temp (type
, 0, 0, 1);
8569 else if (original_target
8570 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
8571 || (singleton
&& REG_P (original_target
)
8572 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
8573 && original_target
== var_rtx (singleton
)))
8574 && GET_MODE (original_target
) == mode
8575 #ifdef HAVE_conditional_move
8576 && (! can_conditionally_move_p (mode
)
8577 || REG_P (original_target
)
8578 || TREE_ADDRESSABLE (type
))
8580 && (GET_CODE (original_target
) != MEM
8581 || TREE_ADDRESSABLE (type
)))
8582 temp
= original_target
;
8583 else if (TREE_ADDRESSABLE (type
))
8586 temp
= assign_temp (type
, 0, 0, 1);
8588 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8589 do the test of X as a store-flag operation, do this as
8590 A + ((X != 0) << log C). Similarly for other simple binary
8591 operators. Only do for C == 1 if BRANCH_COST is low. */
8592 if (temp
&& singleton
&& binary_op
8593 && (TREE_CODE (binary_op
) == PLUS_EXPR
8594 || TREE_CODE (binary_op
) == MINUS_EXPR
8595 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
8596 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
8597 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
8598 : integer_onep (TREE_OPERAND (binary_op
, 1)))
8599 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8603 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
8604 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8605 ? addv_optab
: add_optab
)
8606 : TREE_CODE (binary_op
) == MINUS_EXPR
8607 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8608 ? subv_optab
: sub_optab
)
8609 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
8612 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8613 if (singleton
== TREE_OPERAND (exp
, 1))
8614 cond
= invert_truthvalue (TREE_OPERAND (exp
, 0));
8616 cond
= TREE_OPERAND (exp
, 0);
8618 result
= do_store_flag (cond
, (safe_from_p (temp
, singleton
, 1)
8620 mode
, BRANCH_COST
<= 1);
8622 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
8623 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
8624 build_int_2 (tree_log2
8628 (safe_from_p (temp
, singleton
, 1)
8629 ? temp
: NULL_RTX
), 0);
8633 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
8634 return expand_binop (mode
, boptab
, op1
, result
, temp
,
8635 unsignedp
, OPTAB_LIB_WIDEN
);
8639 do_pending_stack_adjust ();
8641 op0
= gen_label_rtx ();
8643 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
8647 /* If the target conflicts with the other operand of the
8648 binary op, we can't use it. Also, we can't use the target
8649 if it is a hard register, because evaluating the condition
8650 might clobber it. */
8652 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
8654 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
8655 temp
= gen_reg_rtx (mode
);
8656 store_expr (singleton
, temp
,
8657 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8660 expand_expr (singleton
,
8661 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8662 if (singleton
== TREE_OPERAND (exp
, 1))
8663 jumpif (TREE_OPERAND (exp
, 0), op0
);
8665 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8667 start_cleanup_deferral ();
8668 if (binary_op
&& temp
== 0)
8669 /* Just touch the other operand. */
8670 expand_expr (TREE_OPERAND (binary_op
, 1),
8671 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8673 store_expr (build (TREE_CODE (binary_op
), type
,
8674 make_tree (type
, temp
),
8675 TREE_OPERAND (binary_op
, 1)),
8676 temp
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8678 store_expr (build1 (TREE_CODE (unary_op
), type
,
8679 make_tree (type
, temp
)),
8680 temp
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8683 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8684 comparison operator. If we have one of these cases, set the
8685 output to A, branch on A (cse will merge these two references),
8686 then set the output to FOO. */
8688 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8689 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8690 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8691 TREE_OPERAND (exp
, 1), 0)
8692 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8693 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
8694 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
8697 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8698 temp
= gen_reg_rtx (mode
);
8699 store_expr (TREE_OPERAND (exp
, 1), temp
,
8700 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8701 jumpif (TREE_OPERAND (exp
, 0), op0
);
8703 start_cleanup_deferral ();
8704 if (TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8705 store_expr (TREE_OPERAND (exp
, 2), temp
,
8706 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8708 expand_expr (TREE_OPERAND (exp
, 2),
8709 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8713 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8714 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8715 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8716 TREE_OPERAND (exp
, 2), 0)
8717 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8718 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
8719 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
8722 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8723 temp
= gen_reg_rtx (mode
);
8724 store_expr (TREE_OPERAND (exp
, 2), temp
,
8725 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8726 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8728 start_cleanup_deferral ();
8729 if (TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8730 store_expr (TREE_OPERAND (exp
, 1), temp
,
8731 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8733 expand_expr (TREE_OPERAND (exp
, 1),
8734 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8739 op1
= gen_label_rtx ();
8740 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8742 start_cleanup_deferral ();
8744 /* One branch of the cond can be void, if it never returns. For
8745 example A ? throw : E */
8747 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8748 store_expr (TREE_OPERAND (exp
, 1), temp
,
8749 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8751 expand_expr (TREE_OPERAND (exp
, 1),
8752 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8753 end_cleanup_deferral ();
8755 emit_jump_insn (gen_jump (op1
));
8758 start_cleanup_deferral ();
8760 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8761 store_expr (TREE_OPERAND (exp
, 2), temp
,
8762 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8764 expand_expr (TREE_OPERAND (exp
, 2),
8765 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8768 end_cleanup_deferral ();
8779 /* Something needs to be initialized, but we didn't know
8780 where that thing was when building the tree. For example,
8781 it could be the return value of a function, or a parameter
8782 to a function which lays down in the stack, or a temporary
8783 variable which must be passed by reference.
8785 We guarantee that the expression will either be constructed
8786 or copied into our original target. */
8788 tree slot
= TREE_OPERAND (exp
, 0);
8789 tree cleanups
= NULL_TREE
;
8792 if (TREE_CODE (slot
) != VAR_DECL
)
8796 target
= original_target
;
8798 /* Set this here so that if we get a target that refers to a
8799 register variable that's already been used, put_reg_into_stack
8800 knows that it should fix up those uses. */
8801 TREE_USED (slot
) = 1;
8805 if (DECL_RTL_SET_P (slot
))
8807 target
= DECL_RTL (slot
);
8808 /* If we have already expanded the slot, so don't do
8810 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8815 target
= assign_temp (type
, 2, 0, 1);
8816 SET_DECL_RTL (slot
, target
);
8817 if (TREE_ADDRESSABLE (slot
))
8818 put_var_into_stack (slot
, /*rescan=*/false);
8820 /* Since SLOT is not known to the called function
8821 to belong to its stack frame, we must build an explicit
8822 cleanup. This case occurs when we must build up a reference
8823 to pass the reference as an argument. In this case,
8824 it is very likely that such a reference need not be
8827 if (TREE_OPERAND (exp
, 2) == 0)
8828 TREE_OPERAND (exp
, 2)
8829 = lang_hooks
.maybe_build_cleanup (slot
);
8830 cleanups
= TREE_OPERAND (exp
, 2);
8835 /* This case does occur, when expanding a parameter which
8836 needs to be constructed on the stack. The target
8837 is the actual stack address that we want to initialize.
8838 The function we call will perform the cleanup in this case. */
8840 /* If we have already assigned it space, use that space,
8841 not target that we were passed in, as our target
8842 parameter is only a hint. */
8843 if (DECL_RTL_SET_P (slot
))
8845 target
= DECL_RTL (slot
);
8846 /* If we have already expanded the slot, so don't do
8848 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8853 SET_DECL_RTL (slot
, target
);
8854 /* If we must have an addressable slot, then make sure that
8855 the RTL that we just stored in slot is OK. */
8856 if (TREE_ADDRESSABLE (slot
))
8857 put_var_into_stack (slot
, /*rescan=*/true);
8861 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
8862 /* Mark it as expanded. */
8863 TREE_OPERAND (exp
, 1) = NULL_TREE
;
8865 if (VOID_TYPE_P (TREE_TYPE (exp1
)))
8866 /* If the initializer is void, just expand it; it will initialize
8867 the object directly. */
8868 expand_expr (exp1
, const0_rtx
, VOIDmode
, 0);
8870 store_expr (exp1
, target
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8872 expand_decl_cleanup_eh (NULL_TREE
, cleanups
, CLEANUP_EH_ONLY (exp
));
8879 tree lhs
= TREE_OPERAND (exp
, 0);
8880 tree rhs
= TREE_OPERAND (exp
, 1);
8882 temp
= expand_assignment (lhs
, rhs
, ! ignore
);
8888 /* If lhs is complex, expand calls in rhs before computing it.
8889 That's so we don't compute a pointer and save it over a
8890 call. If lhs is simple, compute it first so we can give it
8891 as a target if the rhs is just a call. This avoids an
8892 extra temp and copy and that prevents a partial-subsumption
8893 which makes bad code. Actually we could treat
8894 component_ref's of vars like vars. */
8896 tree lhs
= TREE_OPERAND (exp
, 0);
8897 tree rhs
= TREE_OPERAND (exp
, 1);
8901 /* Check for |= or &= of a bitfield of size one into another bitfield
8902 of size 1. In this case, (unless we need the result of the
8903 assignment) we can do this more efficiently with a
8904 test followed by an assignment, if necessary.
8906 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8907 things change so we do, this code should be enhanced to
8910 && TREE_CODE (lhs
) == COMPONENT_REF
8911 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
8912 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
8913 && TREE_OPERAND (rhs
, 0) == lhs
8914 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
8915 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
8916 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
8918 rtx label
= gen_label_rtx ();
8920 do_jump (TREE_OPERAND (rhs
, 1),
8921 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
8922 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
8923 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
8924 (TREE_CODE (rhs
) == BIT_IOR_EXPR
8926 : integer_zero_node
)),
8928 do_pending_stack_adjust ();
8933 temp
= expand_assignment (lhs
, rhs
, ! ignore
);
8939 if (!TREE_OPERAND (exp
, 0))
8940 expand_null_return ();
8942 expand_return (TREE_OPERAND (exp
, 0));
8945 case PREINCREMENT_EXPR
:
8946 case PREDECREMENT_EXPR
:
8947 return expand_increment (exp
, 0, ignore
);
8949 case POSTINCREMENT_EXPR
:
8950 case POSTDECREMENT_EXPR
:
8951 /* Faster to treat as pre-increment if result is not used. */
8952 return expand_increment (exp
, ! ignore
, ignore
);
8955 if (modifier
== EXPAND_STACK_PARM
)
8957 /* If we are taking the address of something erroneous, just
8959 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
8961 /* If we are taking the address of a constant and are at the
8962 top level, we have to use output_constant_def since we can't
8963 call force_const_mem at top level. */
8965 && (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
8966 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0)))
8968 op0
= XEXP (output_constant_def (TREE_OPERAND (exp
, 0), 0), 0);
8971 /* We make sure to pass const0_rtx down if we came in with
8972 ignore set, to avoid doing the cleanups twice for something. */
8973 op0
= expand_expr (TREE_OPERAND (exp
, 0),
8974 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
8975 (modifier
== EXPAND_INITIALIZER
8976 ? modifier
: EXPAND_CONST_ADDRESS
));
8978 /* If we are going to ignore the result, OP0 will have been set
8979 to const0_rtx, so just return it. Don't get confused and
8980 think we are taking the address of the constant. */
8984 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8985 clever and returns a REG when given a MEM. */
8986 op0
= protect_from_queue (op0
, 1);
8988 /* We would like the object in memory. If it is a constant, we can
8989 have it be statically allocated into memory. For a non-constant,
8990 we need to allocate some memory and store the value into it. */
8992 if (CONSTANT_P (op0
))
8993 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8995 else if (REG_P (op0
) || GET_CODE (op0
) == SUBREG
8996 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
8997 || GET_CODE (op0
) == PARALLEL
|| GET_CODE (op0
) == LO_SUM
)
8999 /* If the operand is a SAVE_EXPR, we can deal with this by
9000 forcing the SAVE_EXPR into memory. */
9001 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
9003 put_var_into_stack (TREE_OPERAND (exp
, 0),
9005 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
9009 /* If this object is in a register, it can't be BLKmode. */
9010 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9011 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
9013 if (GET_CODE (op0
) == PARALLEL
)
9014 /* Handle calls that pass values in multiple
9015 non-contiguous locations. The Irix 6 ABI has examples
9017 emit_group_store (memloc
, op0
, inner_type
,
9018 int_size_in_bytes (inner_type
));
9020 emit_move_insn (memloc
, op0
);
9026 if (GET_CODE (op0
) != MEM
)
9029 mark_temp_addr_taken (op0
);
9030 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
9032 op0
= XEXP (op0
, 0);
9033 if (GET_MODE (op0
) == Pmode
&& mode
== ptr_mode
)
9034 op0
= convert_memory_address (ptr_mode
, op0
);
9038 /* If OP0 is not aligned as least as much as the type requires, we
9039 need to make a temporary, copy OP0 to it, and take the address of
9040 the temporary. We want to use the alignment of the type, not of
9041 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9042 the test for BLKmode means that can't happen. The test for
9043 BLKmode is because we never make mis-aligned MEMs with
9046 We don't need to do this at all if the machine doesn't have
9047 strict alignment. */
9048 if (STRICT_ALIGNMENT
&& GET_MODE (op0
) == BLKmode
9049 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
9051 && MEM_ALIGN (op0
) < BIGGEST_ALIGNMENT
)
9053 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9056 if (TYPE_ALIGN_OK (inner_type
))
9059 if (TREE_ADDRESSABLE (inner_type
))
9061 /* We can't make a bitwise copy of this object, so fail. */
9062 error ("cannot take the address of an unaligned member");
9066 new = assign_stack_temp_for_type
9067 (TYPE_MODE (inner_type
),
9068 MEM_SIZE (op0
) ? INTVAL (MEM_SIZE (op0
))
9069 : int_size_in_bytes (inner_type
),
9070 1, build_qualified_type (inner_type
,
9071 (TYPE_QUALS (inner_type
)
9072 | TYPE_QUAL_CONST
)));
9074 emit_block_move (new, op0
, expr_size (TREE_OPERAND (exp
, 0)),
9075 (modifier
== EXPAND_STACK_PARM
9076 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
9081 op0
= force_operand (XEXP (op0
, 0), target
);
9086 && modifier
!= EXPAND_CONST_ADDRESS
9087 && modifier
!= EXPAND_INITIALIZER
9088 && modifier
!= EXPAND_SUM
)
9089 op0
= force_reg (Pmode
, op0
);
9092 && ! REG_USERVAR_P (op0
))
9093 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)));
9095 if (GET_MODE (op0
) == Pmode
&& mode
== ptr_mode
)
9096 op0
= convert_memory_address (ptr_mode
, op0
);
9100 case ENTRY_VALUE_EXPR
:
9103 /* COMPLEX type for Extended Pascal & Fortran */
9106 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
9109 /* Get the rtx code of the operands. */
9110 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9111 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
9114 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
9118 /* Move the real (op0) and imaginary (op1) parts to their location. */
9119 emit_move_insn (gen_realpart (mode
, target
), op0
);
9120 emit_move_insn (gen_imagpart (mode
, target
), op1
);
9122 insns
= get_insns ();
9125 /* Complex construction should appear as a single unit. */
9126 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9127 each with a separate pseudo as destination.
9128 It's not correct for flow to treat them as a unit. */
9129 if (GET_CODE (target
) != CONCAT
)
9130 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
9138 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9139 return gen_realpart (mode
, op0
);
9142 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9143 return gen_imagpart (mode
, op0
);
9147 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
9151 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9154 target
= gen_reg_rtx (mode
);
9158 /* Store the realpart and the negated imagpart to target. */
9159 emit_move_insn (gen_realpart (partmode
, target
),
9160 gen_realpart (partmode
, op0
));
9162 imag_t
= gen_imagpart (partmode
, target
);
9163 temp
= expand_unop (partmode
,
9164 ! unsignedp
&& flag_trapv
9165 && (GET_MODE_CLASS(partmode
) == MODE_INT
)
9166 ? negv_optab
: neg_optab
,
9167 gen_imagpart (partmode
, op0
), imag_t
, 0);
9169 emit_move_insn (imag_t
, temp
);
9171 insns
= get_insns ();
9174 /* Conjugate should appear as a single unit
9175 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9176 each with a separate pseudo as destination.
9177 It's not correct for flow to treat them as a unit. */
9178 if (GET_CODE (target
) != CONCAT
)
9179 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
9187 expand_resx_expr (exp
);
9190 case TRY_CATCH_EXPR
:
9192 tree handler
= TREE_OPERAND (exp
, 1);
9194 expand_eh_region_start ();
9195 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9196 expand_eh_handler (handler
);
9202 expand_start_catch (CATCH_TYPES (exp
));
9203 expand_expr (CATCH_BODY (exp
), const0_rtx
, VOIDmode
, 0);
9204 expand_end_catch ();
9207 case EH_FILTER_EXPR
:
9208 /* Should have been handled in expand_eh_handler. */
9211 case TRY_FINALLY_EXPR
:
9213 tree try_block
= TREE_OPERAND (exp
, 0);
9214 tree finally_block
= TREE_OPERAND (exp
, 1);
9216 if ((!optimize
&& lang_protect_cleanup_actions
== NULL
)
9217 || unsafe_for_reeval (finally_block
) > 1)
9219 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9220 is not sufficient, so we cannot expand the block twice.
9221 So we play games with GOTO_SUBROUTINE_EXPR to let us
9222 expand the thing only once. */
9223 /* When not optimizing, we go ahead with this form since
9224 (1) user breakpoints operate more predictably without
9225 code duplication, and
9226 (2) we're not running any of the global optimizers
9227 that would explode in time/space with the highly
9228 connected CFG created by the indirect branching. */
9230 rtx finally_label
= gen_label_rtx ();
9231 rtx done_label
= gen_label_rtx ();
9232 rtx return_link
= gen_reg_rtx (Pmode
);
9233 tree cleanup
= build (GOTO_SUBROUTINE_EXPR
, void_type_node
,
9234 (tree
) finally_label
, (tree
) return_link
);
9235 TREE_SIDE_EFFECTS (cleanup
) = 1;
9237 /* Start a new binding layer that will keep track of all cleanup
9238 actions to be performed. */
9239 expand_start_bindings (2);
9240 target_temp_slot_level
= temp_slot_level
;
9242 expand_decl_cleanup (NULL_TREE
, cleanup
);
9243 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
9245 preserve_temp_slots (op0
);
9246 expand_end_bindings (NULL_TREE
, 0, 0);
9247 emit_jump (done_label
);
9248 emit_label (finally_label
);
9249 expand_expr (finally_block
, const0_rtx
, VOIDmode
, 0);
9250 emit_indirect_jump (return_link
);
9251 emit_label (done_label
);
9255 expand_start_bindings (2);
9256 target_temp_slot_level
= temp_slot_level
;
9258 expand_decl_cleanup (NULL_TREE
, finally_block
);
9259 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
9261 preserve_temp_slots (op0
);
9262 expand_end_bindings (NULL_TREE
, 0, 0);
9268 case GOTO_SUBROUTINE_EXPR
:
9270 rtx subr
= (rtx
) TREE_OPERAND (exp
, 0);
9271 rtx return_link
= *(rtx
*) &TREE_OPERAND (exp
, 1);
9272 rtx return_address
= gen_label_rtx ();
9273 emit_move_insn (return_link
,
9274 gen_rtx_LABEL_REF (Pmode
, return_address
));
9276 emit_label (return_address
);
9281 return expand_builtin_va_arg (TREE_OPERAND (exp
, 0), type
);
9284 return get_exception_pointer (cfun
);
9287 return get_exception_filter (cfun
);
9290 /* Function descriptors are not valid except for as
9291 initialization constants, and should not be expanded. */
9295 expand_start_case (0, SWITCH_COND (exp
), integer_type_node
,
9297 if (SWITCH_BODY (exp
))
9298 expand_expr_stmt (SWITCH_BODY (exp
));
9299 if (SWITCH_LABELS (exp
))
9302 tree vec
= SWITCH_LABELS (exp
);
9303 size_t i
, n
= TREE_VEC_LENGTH (vec
);
9305 for (i
= 0; i
< n
; ++i
)
9307 tree elt
= TREE_VEC_ELT (vec
, i
);
9308 tree controlling_expr_type
= TREE_TYPE (SWITCH_COND (exp
));
9309 tree min_value
= TYPE_MIN_VALUE (controlling_expr_type
);
9310 tree max_value
= TYPE_MAX_VALUE (controlling_expr_type
);
9312 tree case_low
= CASE_LOW (elt
);
9313 tree case_high
= CASE_HIGH (elt
) ? CASE_HIGH (elt
) : case_low
;
9314 if (case_low
&& case_high
)
9316 /* Case label is less than minimum for type. */
9317 if ((tree_int_cst_compare (case_low
, min_value
) < 0)
9318 && (tree_int_cst_compare (case_high
, min_value
) < 0))
9320 warning ("case label value %d is less than minimum value for type",
9321 TREE_INT_CST (case_low
));
9325 /* Case value is greater than maximum for type. */
9326 if ((tree_int_cst_compare (case_low
, max_value
) > 0)
9327 && (tree_int_cst_compare (case_high
, max_value
) > 0))
9329 warning ("case label value %d exceeds maximum value for type",
9330 TREE_INT_CST (case_high
));
9334 /* Saturate lower case label value to minimum. */
9335 if ((tree_int_cst_compare (case_high
, min_value
) >= 0)
9336 && (tree_int_cst_compare (case_low
, min_value
) < 0))
9338 warning ("lower value %d in case label range less than minimum value for type",
9339 TREE_INT_CST (case_low
));
9340 case_low
= min_value
;
9343 /* Saturate upper case label value to maximum. */
9344 if ((tree_int_cst_compare (case_low
, max_value
) <= 0)
9345 && (tree_int_cst_compare (case_high
, max_value
) > 0))
9347 warning ("upper value %d in case label range exceeds maximum value for type",
9348 TREE_INT_CST (case_high
));
9349 case_high
= max_value
;
9353 add_case_node (case_low
, case_high
, CASE_LABEL (elt
), &duplicate
, true);
9358 expand_end_case_type (SWITCH_COND (exp
), TREE_TYPE (exp
));
9362 expand_label (TREE_OPERAND (exp
, 0));
9365 case CASE_LABEL_EXPR
:
9368 add_case_node (CASE_LOW (exp
), CASE_HIGH (exp
), CASE_LABEL (exp
),
9376 expand_asm_expr (exp
);
9380 return lang_hooks
.expand_expr (exp
, original_target
, tmode
,
9384 /* Here to do an ordinary binary operator, generating an instruction
9385 from the optab already placed in `this_optab'. */
9387 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
9388 subtarget
, &op0
, &op1
, 0);
9390 if (modifier
== EXPAND_STACK_PARM
)
9392 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9393 unsignedp
, OPTAB_LIB_WIDEN
);
9399 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9400 when applied to the address of EXP produces an address known to be
9401 aligned more than BIGGEST_ALIGNMENT. */
9404 is_aligning_offset (tree offset
, tree exp
)
9406 /* Strip off any conversions. */
9407 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9408 || TREE_CODE (offset
) == NOP_EXPR
9409 || TREE_CODE (offset
) == CONVERT_EXPR
)
9410 offset
= TREE_OPERAND (offset
, 0);
9412 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9413 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9414 if (TREE_CODE (offset
) != BIT_AND_EXPR
9415 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
9416 || compare_tree_int (TREE_OPERAND (offset
, 1),
9417 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
9418 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
9421 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9422 It must be NEGATE_EXPR. Then strip any more conversions. */
9423 offset
= TREE_OPERAND (offset
, 0);
9424 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9425 || TREE_CODE (offset
) == NOP_EXPR
9426 || TREE_CODE (offset
) == CONVERT_EXPR
)
9427 offset
= TREE_OPERAND (offset
, 0);
9429 if (TREE_CODE (offset
) != NEGATE_EXPR
)
9432 offset
= TREE_OPERAND (offset
, 0);
9433 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9434 || TREE_CODE (offset
) == NOP_EXPR
9435 || TREE_CODE (offset
) == CONVERT_EXPR
)
9436 offset
= TREE_OPERAND (offset
, 0);
9438 /* This must now be the address of EXP. */
9439 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
9442 /* Return the tree node if an ARG corresponds to a string constant or zero
9443 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9444 in bytes within the string that ARG is accessing. The type of the
9445 offset will be `sizetype'. */
9448 string_constant (tree arg
, tree
*ptr_offset
)
9452 if (TREE_CODE (arg
) == ADDR_EXPR
9453 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
9455 *ptr_offset
= size_zero_node
;
9456 return TREE_OPERAND (arg
, 0);
9458 if (TREE_CODE (arg
) == ADDR_EXPR
9459 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
9460 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg
, 0), 0)) == STRING_CST
)
9462 *ptr_offset
= convert (sizetype
, TREE_OPERAND (TREE_OPERAND (arg
, 0), 1));
9463 return TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
9465 else if (TREE_CODE (arg
) == PLUS_EXPR
)
9467 tree arg0
= TREE_OPERAND (arg
, 0);
9468 tree arg1
= TREE_OPERAND (arg
, 1);
9473 if (TREE_CODE (arg0
) == ADDR_EXPR
9474 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
9476 *ptr_offset
= convert (sizetype
, arg1
);
9477 return TREE_OPERAND (arg0
, 0);
9479 else if (TREE_CODE (arg1
) == ADDR_EXPR
9480 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
9482 *ptr_offset
= convert (sizetype
, arg0
);
9483 return TREE_OPERAND (arg1
, 0);
9490 /* Expand code for a post- or pre- increment or decrement
9491 and return the RTX for the result.
9492 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9495 expand_increment (tree exp
, int post
, int ignore
)
9499 tree incremented
= TREE_OPERAND (exp
, 0);
9500 optab this_optab
= add_optab
;
9502 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
9503 int op0_is_copy
= 0;
9504 int single_insn
= 0;
9505 /* 1 means we can't store into OP0 directly,
9506 because it is a subreg narrower than a word,
9507 and we don't dare clobber the rest of the word. */
9510 /* Stabilize any component ref that might need to be
9511 evaluated more than once below. */
9513 || TREE_CODE (incremented
) == BIT_FIELD_REF
9514 || (TREE_CODE (incremented
) == COMPONENT_REF
9515 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
9516 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
9517 incremented
= stabilize_reference (incremented
);
9518 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9519 ones into save exprs so that they don't accidentally get evaluated
9520 more than once by the code below. */
9521 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
9522 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
9523 incremented
= save_expr (incremented
);
9525 /* Compute the operands as RTX.
9526 Note whether OP0 is the actual lvalue or a copy of it:
9527 I believe it is a copy iff it is a register or subreg
9528 and insns were generated in computing it. */
9530 temp
= get_last_insn ();
9531 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, 0);
9533 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9534 in place but instead must do sign- or zero-extension during assignment,
9535 so we copy it into a new register and let the code below use it as
9538 Note that we can safely modify this SUBREG since it is know not to be
9539 shared (it was made by the expand_expr call above). */
9541 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9544 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9548 else if (GET_CODE (op0
) == SUBREG
9549 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9551 /* We cannot increment this SUBREG in place. If we are
9552 post-incrementing, get a copy of the old value. Otherwise,
9553 just mark that we cannot increment in place. */
9555 op0
= copy_to_reg (op0
);
9560 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| REG_P (op0
))
9561 && temp
!= get_last_insn ());
9562 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9564 /* Decide whether incrementing or decrementing. */
9565 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9566 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9567 this_optab
= sub_optab
;
9569 /* Convert decrement by a constant into a negative increment. */
9570 if (this_optab
== sub_optab
9571 && GET_CODE (op1
) == CONST_INT
)
9573 op1
= GEN_INT (-INTVAL (op1
));
9574 this_optab
= add_optab
;
9577 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp
)))
9578 this_optab
= this_optab
== add_optab
? addv_optab
: subv_optab
;
9580 /* For a preincrement, see if we can do this with a single instruction. */
9583 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9584 if (icode
!= (int) CODE_FOR_nothing
9585 /* Make sure that OP0 is valid for operands 0 and 1
9586 of the insn we want to queue. */
9587 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9588 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
)
9589 && (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9593 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9594 then we cannot just increment OP0. We must therefore contrive to
9595 increment the original value. Then, for postincrement, we can return
9596 OP0 since it is a copy of the old value. For preincrement, expand here
9597 unless we can do it with a single insn.
9599 Likewise if storing directly into OP0 would clobber high bits
9600 we need to preserve (bad_subreg). */
9601 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9603 /* This is the easiest way to increment the value wherever it is.
9604 Problems with multiple evaluation of INCREMENTED are prevented
9605 because either (1) it is a component_ref or preincrement,
9606 in which case it was stabilized above, or (2) it is an array_ref
9607 with constant index in an array in a register, which is
9608 safe to reevaluate. */
9609 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9610 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9611 ? MINUS_EXPR
: PLUS_EXPR
),
9614 TREE_OPERAND (exp
, 1));
9616 while (TREE_CODE (incremented
) == NOP_EXPR
9617 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9619 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9620 incremented
= TREE_OPERAND (incremented
, 0);
9623 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
);
9624 return post
? op0
: temp
;
9629 /* We have a true reference to the value in OP0.
9630 If there is an insn to add or subtract in this mode, queue it.
9631 Queuing the increment insn avoids the register shuffling
9632 that often results if we must increment now and first save
9633 the old value for subsequent use. */
9635 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9636 op0
= stabilize (op0
);
9639 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9640 if (icode
!= (int) CODE_FOR_nothing
9641 /* Make sure that OP0 is valid for operands 0 and 1
9642 of the insn we want to queue. */
9643 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9644 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
))
9646 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9647 op1
= force_reg (mode
, op1
);
9649 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9651 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9653 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
9654 ? force_reg (Pmode
, XEXP (op0
, 0))
9655 : copy_to_reg (XEXP (op0
, 0)));
9658 op0
= replace_equiv_address (op0
, addr
);
9659 temp
= force_reg (GET_MODE (op0
), op0
);
9660 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9661 op1
= force_reg (mode
, op1
);
9663 /* The increment queue is LIFO, thus we have to `queue'
9664 the instructions in reverse order. */
9665 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9666 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9671 /* Preincrement, or we can't increment with one simple insn. */
9673 /* Save a copy of the value before inc or dec, to return it later. */
9674 temp
= value
= copy_to_reg (op0
);
9676 /* Arrange to return the incremented value. */
9677 /* Copy the rtx because expand_binop will protect from the queue,
9678 and the results of that would be invalid for us to return
9679 if our caller does emit_queue before using our result. */
9680 temp
= copy_rtx (value
= op0
);
9682 /* Increment however we can. */
9683 op1
= expand_binop (mode
, this_optab
, value
, op1
, op0
,
9684 TYPE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9686 /* Make sure the value is stored into OP0. */
9688 emit_move_insn (op0
, op1
);
9693 /* Generate code to calculate EXP using a store-flag instruction
9694 and return an rtx for the result. EXP is either a comparison
9695 or a TRUTH_NOT_EXPR whose operand is a comparison.
9697 If TARGET is nonzero, store the result there if convenient.
9699 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9702 Return zero if there is no suitable set-flag instruction
9703 available on this machine.
9705 Once expand_expr has been called on the arguments of the comparison,
9706 we are committed to doing the store flag, since it is not safe to
9707 re-evaluate the expression. We emit the store-flag insn by calling
9708 emit_store_flag, but only expand the arguments if we have a reason
9709 to believe that emit_store_flag will be successful. If we think that
9710 it will, but it isn't, we have to simulate the store-flag with a
9711 set/jump/set sequence. */
9714 do_store_flag (tree exp
, rtx target
, enum machine_mode mode
, int only_cheap
)
9717 tree arg0
, arg1
, type
;
9719 enum machine_mode operand_mode
;
9723 enum insn_code icode
;
9724 rtx subtarget
= target
;
9727 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9728 result at the end. We can't simply invert the test since it would
9729 have already been inverted if it were valid. This case occurs for
9730 some floating-point comparisons. */
9732 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
9733 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
9735 arg0
= TREE_OPERAND (exp
, 0);
9736 arg1
= TREE_OPERAND (exp
, 1);
9738 /* Don't crash if the comparison was erroneous. */
9739 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
9742 type
= TREE_TYPE (arg0
);
9743 operand_mode
= TYPE_MODE (type
);
9744 unsignedp
= TYPE_UNSIGNED (type
);
9746 /* We won't bother with BLKmode store-flag operations because it would mean
9747 passing a lot of information to emit_store_flag. */
9748 if (operand_mode
== BLKmode
)
9751 /* We won't bother with store-flag operations involving function pointers
9752 when function pointers must be canonicalized before comparisons. */
9753 #ifdef HAVE_canonicalize_funcptr_for_compare
9754 if (HAVE_canonicalize_funcptr_for_compare
9755 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
9756 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
9758 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
9759 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
9760 == FUNCTION_TYPE
))))
9767 /* Get the rtx comparison code to use. We know that EXP is a comparison
9768 operation of some type. Some comparisons against 1 and -1 can be
9769 converted to comparisons with zero. Do so here so that the tests
9770 below will be aware that we have a comparison with zero. These
9771 tests will not catch constants in the first operand, but constants
9772 are rarely passed as the first operand. */
9774 switch (TREE_CODE (exp
))
9783 if (integer_onep (arg1
))
9784 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
9786 code
= unsignedp
? LTU
: LT
;
9789 if (! unsignedp
&& integer_all_onesp (arg1
))
9790 arg1
= integer_zero_node
, code
= LT
;
9792 code
= unsignedp
? LEU
: LE
;
9795 if (! unsignedp
&& integer_all_onesp (arg1
))
9796 arg1
= integer_zero_node
, code
= GE
;
9798 code
= unsignedp
? GTU
: GT
;
9801 if (integer_onep (arg1
))
9802 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
9804 code
= unsignedp
? GEU
: GE
;
9807 case UNORDERED_EXPR
:
9836 /* Put a constant second. */
9837 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
9839 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
9840 code
= swap_condition (code
);
9843 /* If this is an equality or inequality test of a single bit, we can
9844 do this by shifting the bit being tested to the low-order bit and
9845 masking the result with the constant 1. If the condition was EQ,
9846 we xor it with 1. This does not require an scc insn and is faster
9847 than an scc insn even if we have it.
9849 The code to make this transformation was moved into fold_single_bit_test,
9850 so we just call into the folder and expand its result. */
9852 if ((code
== NE
|| code
== EQ
)
9853 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
9854 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
9856 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
9857 return expand_expr (fold_single_bit_test (code
== NE
? NE_EXPR
: EQ_EXPR
,
9859 target
, VOIDmode
, EXPAND_NORMAL
);
9862 /* Now see if we are likely to be able to do this. Return if not. */
9863 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
9866 icode
= setcc_gen_code
[(int) code
];
9867 if (icode
== CODE_FOR_nothing
9868 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
9870 /* We can only do this if it is one of the special cases that
9871 can be handled without an scc insn. */
9872 if ((code
== LT
&& integer_zerop (arg1
))
9873 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
9875 else if (BRANCH_COST
>= 0
9876 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
9877 && TREE_CODE (type
) != REAL_TYPE
9878 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
9879 != CODE_FOR_nothing
)
9880 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
9881 != CODE_FOR_nothing
)))
9887 if (! get_subtarget (target
)
9888 || GET_MODE (subtarget
) != operand_mode
)
9891 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, 0);
9894 target
= gen_reg_rtx (mode
);
9896 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9897 because, if the emit_store_flag does anything it will succeed and
9898 OP0 and OP1 will not be used subsequently. */
9900 result
= emit_store_flag (target
, code
,
9901 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
9902 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
9903 operand_mode
, unsignedp
, 1);
9908 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
9909 result
, 0, OPTAB_LIB_WIDEN
);
9913 /* If this failed, we have to do this with set/compare/jump/set code. */
9915 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
9916 target
= gen_reg_rtx (GET_MODE (target
));
9918 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
9919 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
9920 operand_mode
, NULL_RTX
);
9921 if (GET_CODE (result
) == CONST_INT
)
9922 return (((result
== const0_rtx
&& ! invert
)
9923 || (result
!= const0_rtx
&& invert
))
9924 ? const0_rtx
: const1_rtx
);
9926 /* The code of RESULT may not match CODE if compare_from_rtx
9927 decided to swap its operands and reverse the original code.
9929 We know that compare_from_rtx returns either a CONST_INT or
9930 a new comparison code, so it is safe to just extract the
9931 code from RESULT. */
9932 code
= GET_CODE (result
);
9934 label
= gen_label_rtx ();
9935 if (bcc_gen_fctn
[(int) code
] == 0)
9938 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
9939 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
9946 /* Stubs in case we haven't got a casesi insn. */
9948 # define HAVE_casesi 0
9949 # define gen_casesi(a, b, c, d, e) (0)
9950 # define CODE_FOR_casesi CODE_FOR_nothing
9953 /* If the machine does not have a case insn that compares the bounds,
9954 this means extra overhead for dispatch tables, which raises the
9955 threshold for using them. */
9956 #ifndef CASE_VALUES_THRESHOLD
9957 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9958 #endif /* CASE_VALUES_THRESHOLD */
9961 case_values_threshold (void)
9963 return CASE_VALUES_THRESHOLD
;
9966 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9967 0 otherwise (i.e. if there is no casesi instruction). */
9969 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
9970 rtx table_label ATTRIBUTE_UNUSED
, rtx default_label
)
9972 enum machine_mode index_mode
= SImode
;
9973 int index_bits
= GET_MODE_BITSIZE (index_mode
);
9974 rtx op1
, op2
, index
;
9975 enum machine_mode op_mode
;
9980 /* Convert the index to SImode. */
9981 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
9983 enum machine_mode omode
= TYPE_MODE (index_type
);
9984 rtx rangertx
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
9986 /* We must handle the endpoints in the original mode. */
9987 index_expr
= build (MINUS_EXPR
, index_type
,
9988 index_expr
, minval
);
9989 minval
= integer_zero_node
;
9990 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
9991 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
9992 omode
, 1, default_label
);
9993 /* Now we can safely truncate. */
9994 index
= convert_to_mode (index_mode
, index
, 0);
9998 if (TYPE_MODE (index_type
) != index_mode
)
10000 index_expr
= convert (lang_hooks
.types
.type_for_size
10001 (index_bits
, 0), index_expr
);
10002 index_type
= TREE_TYPE (index_expr
);
10005 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10008 index
= protect_from_queue (index
, 0);
10009 do_pending_stack_adjust ();
10011 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
10012 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
10014 index
= copy_to_mode_reg (op_mode
, index
);
10016 op1
= expand_expr (minval
, NULL_RTX
, VOIDmode
, 0);
10018 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
10019 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
10020 op1
, TYPE_UNSIGNED (TREE_TYPE (minval
)));
10021 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
10023 op1
= copy_to_mode_reg (op_mode
, op1
);
10025 op2
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
10027 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
10028 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
10029 op2
, TYPE_UNSIGNED (TREE_TYPE (range
)));
10030 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
10032 op2
= copy_to_mode_reg (op_mode
, op2
);
10034 emit_jump_insn (gen_casesi (index
, op1
, op2
,
10035 table_label
, default_label
));
10039 /* Attempt to generate a tablejump instruction; same concept. */
10040 #ifndef HAVE_tablejump
10041 #define HAVE_tablejump 0
10042 #define gen_tablejump(x, y) (0)
10045 /* Subroutine of the next function.
10047 INDEX is the value being switched on, with the lowest value
10048 in the table already subtracted.
10049 MODE is its expected mode (needed if INDEX is constant).
10050 RANGE is the length of the jump table.
10051 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10053 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10054 index value is out of range. */
10057 do_tablejump (rtx index
, enum machine_mode mode
, rtx range
, rtx table_label
,
10062 if (INTVAL (range
) > cfun
->max_jumptable_ents
)
10063 cfun
->max_jumptable_ents
= INTVAL (range
);
10065 /* Do an unsigned comparison (in the proper mode) between the index
10066 expression and the value which represents the length of the range.
10067 Since we just finished subtracting the lower bound of the range
10068 from the index expression, this comparison allows us to simultaneously
10069 check that the original index expression value is both greater than
10070 or equal to the minimum value of the range and less than or equal to
10071 the maximum value of the range. */
10073 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
10076 /* If index is in range, it must fit in Pmode.
10077 Convert to Pmode so we can index with it. */
10079 index
= convert_to_mode (Pmode
, index
, 1);
10081 /* Don't let a MEM slip through, because then INDEX that comes
10082 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10083 and break_out_memory_refs will go to work on it and mess it up. */
10084 #ifdef PIC_CASE_VECTOR_ADDRESS
10085 if (flag_pic
&& !REG_P (index
))
10086 index
= copy_to_mode_reg (Pmode
, index
);
10089 /* If flag_force_addr were to affect this address
10090 it could interfere with the tricky assumptions made
10091 about addresses that contain label-refs,
10092 which may be valid only very near the tablejump itself. */
10093 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10094 GET_MODE_SIZE, because this indicates how large insns are. The other
10095 uses should all be Pmode, because they are addresses. This code
10096 could fail if addresses and insns are not the same size. */
10097 index
= gen_rtx_PLUS (Pmode
,
10098 gen_rtx_MULT (Pmode
, index
,
10099 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
10100 gen_rtx_LABEL_REF (Pmode
, table_label
));
10101 #ifdef PIC_CASE_VECTOR_ADDRESS
10103 index
= PIC_CASE_VECTOR_ADDRESS (index
);
10106 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
10107 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
10108 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
10109 RTX_UNCHANGING_P (vector
) = 1;
10110 MEM_NOTRAP_P (vector
) = 1;
10111 convert_move (temp
, vector
, 0);
10113 emit_jump_insn (gen_tablejump (temp
, table_label
));
10115 /* If we are generating PIC code or if the table is PC-relative, the
10116 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10117 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
10122 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
10123 rtx table_label
, rtx default_label
)
10127 if (! HAVE_tablejump
)
10130 index_expr
= fold (build (MINUS_EXPR
, index_type
,
10131 convert (index_type
, index_expr
),
10132 convert (index_type
, minval
)));
10133 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10135 index
= protect_from_queue (index
, 0);
10136 do_pending_stack_adjust ();
10138 do_tablejump (index
, TYPE_MODE (index_type
),
10139 convert_modes (TYPE_MODE (index_type
),
10140 TYPE_MODE (TREE_TYPE (range
)),
10141 expand_expr (range
, NULL_RTX
,
10143 TYPE_UNSIGNED (TREE_TYPE (range
))),
10144 table_label
, default_label
);
10148 /* Nonzero if the mode is a valid vector mode for this architecture.
10149 This returns nonzero even if there is no hardware support for the
10150 vector mode, but we can emulate with narrower modes. */
10153 vector_mode_valid_p (enum machine_mode mode
)
10155 enum mode_class
class = GET_MODE_CLASS (mode
);
10156 enum machine_mode innermode
;
10158 /* Doh! What's going on? */
10159 if (class != MODE_VECTOR_INT
10160 && class != MODE_VECTOR_FLOAT
)
10163 /* Hardware support. Woo hoo! */
10164 if (VECTOR_MODE_SUPPORTED_P (mode
))
10167 innermode
= GET_MODE_INNER (mode
);
10169 /* We should probably return 1 if requesting V4DI and we have no DI,
10170 but we have V2DI, but this is probably very unlikely. */
10172 /* If we have support for the inner mode, we can safely emulate it.
10173 We may not have V2DI, but me can emulate with a pair of DIs. */
10174 return mov_optab
->handlers
[innermode
].insn_code
!= CODE_FOR_nothing
;
10177 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10179 const_vector_from_tree (tree exp
)
10184 enum machine_mode inner
, mode
;
10186 mode
= TYPE_MODE (TREE_TYPE (exp
));
10188 if (initializer_zerop (exp
))
10189 return CONST0_RTX (mode
);
10191 units
= GET_MODE_NUNITS (mode
);
10192 inner
= GET_MODE_INNER (mode
);
10194 v
= rtvec_alloc (units
);
10196 link
= TREE_VECTOR_CST_ELTS (exp
);
10197 for (i
= 0; link
; link
= TREE_CHAIN (link
), ++i
)
10199 elt
= TREE_VALUE (link
);
10201 if (TREE_CODE (elt
) == REAL_CST
)
10202 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
10205 RTVEC_ELT (v
, i
) = immed_double_const (TREE_INT_CST_LOW (elt
),
10206 TREE_INT_CST_HIGH (elt
),
10210 /* Initialize remaining elements to 0. */
10211 for (; i
< units
; ++i
)
10212 RTVEC_ELT (v
, i
) = CONST0_RTX (inner
);
10214 return gen_rtx_raw_CONST_VECTOR (mode
, v
);
10217 /* Called to move the SAVE_EXPRs for parameter declarations in a
10218 nested function into the nested function. DATA is really the
10219 nested FUNCTION_DECL. */
10222 set_save_expr_context (tree
*tp
,
10223 int *walk_subtrees
,
10226 if (TREE_CODE (*tp
) == SAVE_EXPR
&& !SAVE_EXPR_CONTEXT (*tp
))
10227 SAVE_EXPR_CONTEXT (*tp
) = (tree
) data
;
10228 /* Do not walk back into the SAVE_EXPR_CONTEXT; that will cause
10230 else if (DECL_P (*tp
))
10231 *walk_subtrees
= 0;
10238 execute_expand (void)
10240 /* If the function has a variably modified type, there may be
10241 SAVE_EXPRs in the parameter types. Their context must be set to
10242 refer to this function; they cannot be expanded in the containing
10244 if (decl_function_context (current_function_decl
) == current_function_decl
10245 && variably_modified_type_p (TREE_TYPE (current_function_decl
)))
10246 walk_tree (&TREE_TYPE (current_function_decl
), set_save_expr_context
,
10247 current_function_decl
, NULL
);
10249 /* Expand the variables recorded during gimple lowering. This must
10250 occur before the call to expand_function_start to ensure that
10251 all used variables are expanded before we expand anything on the
10252 PENDING_SIZES list. */
10253 expand_used_vars ();
10255 /* Set up parameters and prepare for return, for the function. */
10256 expand_function_start (current_function_decl
, 0);
10258 /* If this function is `main', emit a call to `__main'
10259 to run global initializers, etc. */
10260 if (DECL_NAME (current_function_decl
)
10261 && MAIN_NAME_P (DECL_NAME (current_function_decl
))
10262 && DECL_FILE_SCOPE_P (current_function_decl
))
10263 expand_main_function ();
10265 /* Generate the RTL for this function. */
10266 expand_expr_stmt_value (DECL_SAVED_TREE (current_function_decl
), 0, 0);
10268 /* We hard-wired immediate_size_expand to zero above.
10269 expand_function_end will decrement this variable. So, we set the
10270 variable to one here, so that after the decrement it will remain
10272 immediate_size_expand
= 1;
10274 /* Make sure the locus is set to the end of the function, so that
10275 epilogue line numbers and warnings are set properly. */
10276 if (cfun
->function_end_locus
.file
)
10277 input_location
= cfun
->function_end_locus
;
10279 /* The following insns belong to the top scope. */
10280 record_block_change (DECL_INITIAL (current_function_decl
));
10282 /* Generate rtl for function exit. */
10283 expand_function_end ();
10286 struct tree_opt_pass pass_expand
=
10288 "expand", /* name */
10290 execute_expand
, /* execute */
10293 0, /* static_pass_number */
10294 TV_EXPAND
, /* tv_id */
10295 /* ??? If TER is enabled, we actually receive GENERIC. */
10296 PROP_gimple_leh
, /* properties_required */
10297 PROP_rtl
, /* properties_provided */
10298 PROP_cfg
| PROP_gimple_leh
, /* properties_destroyed */
10299 0, /* todo_flags_start */
10300 0 /* todo_flags_finish */
10304 #include "gt-expr.h"