1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
52 /* Decide whether a function's arguments should be processed
53 from first to last or from last to first.
55 They should if the stack and args grow in opposite directions, but
56 only if we have push insns. */
60 #ifndef PUSH_ARGS_REVERSED
61 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
62 #define PUSH_ARGS_REVERSED /* If it's last to first. */
68 #ifndef STACK_PUSH_CODE
69 #ifdef STACK_GROWS_DOWNWARD
70 #define STACK_PUSH_CODE PRE_DEC
72 #define STACK_PUSH_CODE PRE_INC
76 /* Assume that case vectors are not pc-relative. */
77 #ifndef CASE_VECTOR_PC_RELATIVE
78 #define CASE_VECTOR_PC_RELATIVE 0
81 /* Convert defined/undefined to boolean. */
82 #ifdef TARGET_MEM_FUNCTIONS
83 #undef TARGET_MEM_FUNCTIONS
84 #define TARGET_MEM_FUNCTIONS 1
86 #define TARGET_MEM_FUNCTIONS 0
90 /* If this is nonzero, we do not bother generating VOLATILE
91 around volatile memory references, and we are willing to
92 output indirect addresses. If cse is to follow, we reject
93 indirect addresses so a useful potential cse is generated;
94 if it is used only once, instruction combination will produce
95 the same indirect address eventually. */
98 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
99 tree placeholder_list
= 0;
101 /* This structure is used by move_by_pieces to describe the move to
103 struct move_by_pieces
112 int explicit_inc_from
;
113 unsigned HOST_WIDE_INT len
;
114 HOST_WIDE_INT offset
;
118 /* This structure is used by store_by_pieces to describe the clear to
121 struct store_by_pieces
127 unsigned HOST_WIDE_INT len
;
128 HOST_WIDE_INT offset
;
129 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
);
134 static rtx
enqueue_insn (rtx
, rtx
);
135 static unsigned HOST_WIDE_INT
move_by_pieces_ninsns (unsigned HOST_WIDE_INT
,
137 static void move_by_pieces_1 (rtx (*) (rtx
, ...), enum machine_mode
,
138 struct move_by_pieces
*);
139 static bool block_move_libcall_safe_for_call_parm (void);
140 static bool emit_block_move_via_movstr (rtx
, rtx
, rtx
, unsigned);
141 static rtx
emit_block_move_via_libcall (rtx
, rtx
, rtx
);
142 static tree
emit_block_move_libcall_fn (int);
143 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
144 static rtx
clear_by_pieces_1 (void *, HOST_WIDE_INT
, enum machine_mode
);
145 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
146 static void store_by_pieces_1 (struct store_by_pieces
*, unsigned int);
147 static void store_by_pieces_2 (rtx (*) (rtx
, ...), enum machine_mode
,
148 struct store_by_pieces
*);
149 static bool clear_storage_via_clrstr (rtx
, rtx
, unsigned);
150 static rtx
clear_storage_via_libcall (rtx
, rtx
);
151 static tree
clear_storage_libcall_fn (int);
152 static rtx
compress_float_constant (rtx
, rtx
);
153 static rtx
get_subtarget (rtx
);
154 static int is_zeros_p (tree
);
155 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
156 HOST_WIDE_INT
, enum machine_mode
,
157 tree
, tree
, int, int);
158 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
);
159 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
, enum machine_mode
,
160 tree
, enum machine_mode
, int, tree
, int);
161 static rtx
var_rtx (tree
);
163 static unsigned HOST_WIDE_INT
highest_pow2_factor (tree
);
164 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_type (tree
, tree
);
166 static int is_aligning_offset (tree
, tree
);
167 static rtx
expand_increment (tree
, int, int);
168 static void expand_operands (tree
, tree
, rtx
, rtx
*, rtx
*,
169 enum expand_modifier
);
170 static rtx
do_store_flag (tree
, rtx
, enum machine_mode
, int);
172 static void emit_single_push_insn (enum machine_mode
, rtx
, tree
);
174 static void do_tablejump (rtx
, enum machine_mode
, rtx
, rtx
, rtx
);
175 static rtx
const_vector_from_tree (tree
);
177 /* Record for each mode whether we can move a register directly to or
178 from an object of that mode in memory. If we can't, we won't try
179 to use that mode directly when accessing a field of that mode. */
181 static char direct_load
[NUM_MACHINE_MODES
];
182 static char direct_store
[NUM_MACHINE_MODES
];
184 /* Record for each mode whether we can float-extend from memory. */
186 static bool float_extend_from_mem
[NUM_MACHINE_MODES
][NUM_MACHINE_MODES
];
188 /* If a memory-to-memory move would take MOVE_RATIO or more simple
189 move-instruction sequences, we will do a movstr or libcall instead. */
192 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
195 /* If we are optimizing for space (-Os), cut down the default move ratio. */
196 #define MOVE_RATIO (optimize_size ? 3 : 15)
200 /* This macro is used to determine whether move_by_pieces should be called
201 to perform a structure copy. */
202 #ifndef MOVE_BY_PIECES_P
203 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
204 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
207 /* If a clear memory operation would take CLEAR_RATIO or more simple
208 move-instruction sequences, we will do a clrstr or libcall instead. */
211 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
212 #define CLEAR_RATIO 2
214 /* If we are optimizing for space, cut down the default clear ratio. */
215 #define CLEAR_RATIO (optimize_size ? 3 : 15)
219 /* This macro is used to determine whether clear_by_pieces should be
220 called to clear storage. */
221 #ifndef CLEAR_BY_PIECES_P
222 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
223 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
226 /* This macro is used to determine whether store_by_pieces should be
227 called to "memset" storage with byte values other than zero, or
228 to "memcpy" storage when the source is a constant string. */
229 #ifndef STORE_BY_PIECES_P
230 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
233 /* This array records the insn_code of insns to perform block moves. */
234 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
236 /* This array records the insn_code of insns to perform block clears. */
237 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
239 /* These arrays record the insn_code of two different kinds of insns
240 to perform block compares. */
241 enum insn_code cmpstr_optab
[NUM_MACHINE_MODES
];
242 enum insn_code cmpmem_optab
[NUM_MACHINE_MODES
];
244 /* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
245 struct file_stack
*expr_wfl_stack
;
247 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
249 #ifndef SLOW_UNALIGNED_ACCESS
250 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
253 /* This is run once per compilation to set up which modes can be used
254 directly in memory and to initialize the block move optab. */
257 init_expr_once (void)
260 enum machine_mode mode
;
265 /* Try indexing by frame ptr and try by stack ptr.
266 It is known that on the Convex the stack ptr isn't a valid index.
267 With luck, one or the other is valid on any machine. */
268 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
269 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
271 /* A scratch register we can modify in-place below to avoid
272 useless RTL allocations. */
273 reg
= gen_rtx_REG (VOIDmode
, -1);
275 insn
= rtx_alloc (INSN
);
276 pat
= gen_rtx_SET (0, NULL_RTX
, NULL_RTX
);
277 PATTERN (insn
) = pat
;
279 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
280 mode
= (enum machine_mode
) ((int) mode
+ 1))
284 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
285 PUT_MODE (mem
, mode
);
286 PUT_MODE (mem1
, mode
);
287 PUT_MODE (reg
, mode
);
289 /* See if there is some register that can be used in this mode and
290 directly loaded or stored from memory. */
292 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
293 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
294 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
297 if (! HARD_REGNO_MODE_OK (regno
, mode
))
303 SET_DEST (pat
) = reg
;
304 if (recog (pat
, insn
, &num_clobbers
) >= 0)
305 direct_load
[(int) mode
] = 1;
307 SET_SRC (pat
) = mem1
;
308 SET_DEST (pat
) = reg
;
309 if (recog (pat
, insn
, &num_clobbers
) >= 0)
310 direct_load
[(int) mode
] = 1;
313 SET_DEST (pat
) = mem
;
314 if (recog (pat
, insn
, &num_clobbers
) >= 0)
315 direct_store
[(int) mode
] = 1;
318 SET_DEST (pat
) = mem1
;
319 if (recog (pat
, insn
, &num_clobbers
) >= 0)
320 direct_store
[(int) mode
] = 1;
324 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
326 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
327 mode
= GET_MODE_WIDER_MODE (mode
))
329 enum machine_mode srcmode
;
330 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
331 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
335 ic
= can_extend_p (mode
, srcmode
, 0);
336 if (ic
== CODE_FOR_nothing
)
339 PUT_MODE (mem
, srcmode
);
341 if ((*insn_data
[ic
].operand
[1].predicate
) (mem
, srcmode
))
342 float_extend_from_mem
[mode
][srcmode
] = true;
347 /* This is run at the start of compiling a function. */
352 cfun
->expr
= ggc_alloc_cleared (sizeof (struct expr_status
));
355 /* Small sanity check that the queue is empty at the end of a function. */
358 finish_expr_for_function (void)
364 /* Manage the queue of increment instructions to be output
365 for POSTINCREMENT_EXPR expressions, etc. */
367 /* Queue up to increment (or change) VAR later. BODY says how:
368 BODY should be the same thing you would pass to emit_insn
369 to increment right away. It will go to emit_insn later on.
371 The value is a QUEUED expression to be used in place of VAR
372 where you want to guarantee the pre-incrementation value of VAR. */
375 enqueue_insn (rtx var
, rtx body
)
377 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
), var
, NULL_RTX
, NULL_RTX
,
378 body
, pending_chain
);
379 return pending_chain
;
382 /* Use protect_from_queue to convert a QUEUED expression
383 into something that you can put immediately into an instruction.
384 If the queued incrementation has not happened yet,
385 protect_from_queue returns the variable itself.
386 If the incrementation has happened, protect_from_queue returns a temp
387 that contains a copy of the old value of the variable.
389 Any time an rtx which might possibly be a QUEUED is to be put
390 into an instruction, it must be passed through protect_from_queue first.
391 QUEUED expressions are not meaningful in instructions.
393 Do not pass a value through protect_from_queue and then hold
394 on to it for a while before putting it in an instruction!
395 If the queue is flushed in between, incorrect code will result. */
398 protect_from_queue (rtx x
, int modify
)
400 RTX_CODE code
= GET_CODE (x
);
402 #if 0 /* A QUEUED can hang around after the queue is forced out. */
403 /* Shortcut for most common case. */
404 if (pending_chain
== 0)
410 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
411 use of autoincrement. Make a copy of the contents of the memory
412 location rather than a copy of the address, but not if the value is
413 of mode BLKmode. Don't modify X in place since it might be
415 if (code
== MEM
&& GET_MODE (x
) != BLKmode
416 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
419 rtx
new = replace_equiv_address_nv (x
, QUEUED_VAR (y
));
423 rtx temp
= gen_reg_rtx (GET_MODE (x
));
425 emit_insn_before (gen_move_insn (temp
, new),
430 /* Copy the address into a pseudo, so that the returned value
431 remains correct across calls to emit_queue. */
432 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
435 /* Otherwise, recursively protect the subexpressions of all
436 the kinds of rtx's that can contain a QUEUED. */
439 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
440 if (tem
!= XEXP (x
, 0))
446 else if (code
== PLUS
|| code
== MULT
)
448 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
449 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
450 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
459 /* If the increment has not happened, use the variable itself. Copy it
460 into a new pseudo so that the value remains correct across calls to
462 if (QUEUED_INSN (x
) == 0)
463 return copy_to_reg (QUEUED_VAR (x
));
464 /* If the increment has happened and a pre-increment copy exists,
466 if (QUEUED_COPY (x
) != 0)
467 return QUEUED_COPY (x
);
468 /* The increment has happened but we haven't set up a pre-increment copy.
469 Set one up now, and use it. */
470 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
471 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
473 return QUEUED_COPY (x
);
476 /* Return nonzero if X contains a QUEUED expression:
477 if it contains anything that will be altered by a queued increment.
478 We handle only combinations of MEM, PLUS, MINUS and MULT operators
479 since memory addresses generally contain only those. */
482 queued_subexp_p (rtx x
)
484 enum rtx_code code
= GET_CODE (x
);
490 return queued_subexp_p (XEXP (x
, 0));
494 return (queued_subexp_p (XEXP (x
, 0))
495 || queued_subexp_p (XEXP (x
, 1)));
501 /* Perform all the pending incrementations. */
507 while ((p
= pending_chain
))
509 rtx body
= QUEUED_BODY (p
);
511 switch (GET_CODE (body
))
519 QUEUED_INSN (p
) = body
;
523 #ifdef ENABLE_CHECKING
530 QUEUED_INSN (p
) = emit_insn (body
);
534 pending_chain
= QUEUED_NEXT (p
);
538 /* Copy data from FROM to TO, where the machine modes are not the same.
539 Both modes may be integer, or both may be floating.
540 UNSIGNEDP should be nonzero if FROM is an unsigned type.
541 This causes zero-extension instead of sign-extension. */
544 convert_move (rtx to
, rtx from
, int unsignedp
)
546 enum machine_mode to_mode
= GET_MODE (to
);
547 enum machine_mode from_mode
= GET_MODE (from
);
548 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
549 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
553 /* rtx code for making an equivalent value. */
554 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
555 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
557 to
= protect_from_queue (to
, 1);
558 from
= protect_from_queue (from
, 0);
560 if (to_real
!= from_real
)
563 /* If FROM is a SUBREG that indicates that we have already done at least
564 the required extension, strip it. We don't handle such SUBREGs as
567 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
568 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
569 >= GET_MODE_SIZE (to_mode
))
570 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
571 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
573 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
576 if (to_mode
== from_mode
577 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
579 emit_move_insn (to
, from
);
583 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
585 if (GET_MODE_BITSIZE (from_mode
) != GET_MODE_BITSIZE (to_mode
))
588 if (VECTOR_MODE_P (to_mode
))
589 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
591 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
593 emit_move_insn (to
, from
);
597 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
599 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
600 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
609 if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
611 else if (GET_MODE_PRECISION (from_mode
) > GET_MODE_PRECISION (to_mode
))
616 /* Try converting directly if the insn is supported. */
618 code
= tab
->handlers
[to_mode
][from_mode
].insn_code
;
619 if (code
!= CODE_FOR_nothing
)
621 emit_unop_insn (code
, to
, from
,
622 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
626 /* Otherwise use a libcall. */
627 libcall
= tab
->handlers
[to_mode
][from_mode
].libfunc
;
630 /* This conversion is not implemented yet. */
634 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
636 insns
= get_insns ();
638 emit_libcall_block (insns
, to
, value
,
639 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
641 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
645 /* Handle pointer conversion. */ /* SPEE 900220. */
646 /* Targets are expected to provide conversion insns between PxImode and
647 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
648 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
650 enum machine_mode full_mode
651 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode
), MODE_INT
);
653 if (trunc_optab
->handlers
[to_mode
][full_mode
].insn_code
657 if (full_mode
!= from_mode
)
658 from
= convert_to_mode (full_mode
, from
, unsignedp
);
659 emit_unop_insn (trunc_optab
->handlers
[to_mode
][full_mode
].insn_code
,
663 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
665 enum machine_mode full_mode
666 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode
), MODE_INT
);
668 if (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
672 emit_unop_insn (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
,
674 if (to_mode
== full_mode
)
677 /* else proceed to integer conversions below */
678 from_mode
= full_mode
;
681 /* Now both modes are integers. */
683 /* Handle expanding beyond a word. */
684 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
685 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
692 enum machine_mode lowpart_mode
;
693 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
695 /* Try converting directly if the insn is supported. */
696 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
699 /* If FROM is a SUBREG, put it into a register. Do this
700 so that we always generate the same set of insns for
701 better cse'ing; if an intermediate assignment occurred,
702 we won't be doing the operation directly on the SUBREG. */
703 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
704 from
= force_reg (from_mode
, from
);
705 emit_unop_insn (code
, to
, from
, equiv_code
);
708 /* Next, try converting via full word. */
709 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
710 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
711 != CODE_FOR_nothing
))
713 if (GET_CODE (to
) == REG
)
714 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
715 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
716 emit_unop_insn (code
, to
,
717 gen_lowpart (word_mode
, to
), equiv_code
);
721 /* No special multiword conversion insn; do it by hand. */
724 /* Since we will turn this into a no conflict block, we must ensure
725 that the source does not overlap the target. */
727 if (reg_overlap_mentioned_p (to
, from
))
728 from
= force_reg (from_mode
, from
);
730 /* Get a copy of FROM widened to a word, if necessary. */
731 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
732 lowpart_mode
= word_mode
;
734 lowpart_mode
= from_mode
;
736 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
738 lowpart
= gen_lowpart (lowpart_mode
, to
);
739 emit_move_insn (lowpart
, lowfrom
);
741 /* Compute the value to put in each remaining word. */
743 fill_value
= const0_rtx
;
748 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
749 && STORE_FLAG_VALUE
== -1)
751 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
753 fill_value
= gen_reg_rtx (word_mode
);
754 emit_insn (gen_slt (fill_value
));
760 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
761 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
763 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
767 /* Fill the remaining words. */
768 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
770 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
771 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
776 if (fill_value
!= subword
)
777 emit_move_insn (subword
, fill_value
);
780 insns
= get_insns ();
783 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
784 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
788 /* Truncating multi-word to a word or less. */
789 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
790 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
792 if (!((GET_CODE (from
) == MEM
793 && ! MEM_VOLATILE_P (from
)
794 && direct_load
[(int) to_mode
]
795 && ! mode_dependent_address_p (XEXP (from
, 0)))
796 || GET_CODE (from
) == REG
797 || GET_CODE (from
) == SUBREG
))
798 from
= force_reg (from_mode
, from
);
799 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
803 /* Now follow all the conversions between integers
804 no more than a word long. */
806 /* For truncation, usually we can just refer to FROM in a narrower mode. */
807 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
808 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
809 GET_MODE_BITSIZE (from_mode
)))
811 if (!((GET_CODE (from
) == MEM
812 && ! MEM_VOLATILE_P (from
)
813 && direct_load
[(int) to_mode
]
814 && ! mode_dependent_address_p (XEXP (from
, 0)))
815 || GET_CODE (from
) == REG
816 || GET_CODE (from
) == SUBREG
))
817 from
= force_reg (from_mode
, from
);
818 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
819 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
820 from
= copy_to_reg (from
);
821 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
825 /* Handle extension. */
826 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
828 /* Convert directly if that works. */
829 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
833 from
= force_not_mem (from
);
835 emit_unop_insn (code
, to
, from
, equiv_code
);
840 enum machine_mode intermediate
;
844 /* Search for a mode to convert via. */
845 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
846 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
847 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
849 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
850 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
851 GET_MODE_BITSIZE (intermediate
))))
852 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
853 != CODE_FOR_nothing
))
855 convert_move (to
, convert_to_mode (intermediate
, from
,
856 unsignedp
), unsignedp
);
860 /* No suitable intermediate mode.
861 Generate what we need with shifts. */
862 shift_amount
= build_int_2 (GET_MODE_BITSIZE (to_mode
)
863 - GET_MODE_BITSIZE (from_mode
), 0);
864 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
865 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
867 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
870 emit_move_insn (to
, tmp
);
875 /* Support special truncate insns for certain modes. */
876 if (trunc_optab
->handlers
[to_mode
][from_mode
].insn_code
!= CODE_FOR_nothing
)
878 emit_unop_insn (trunc_optab
->handlers
[to_mode
][from_mode
].insn_code
,
883 /* Handle truncation of volatile memrefs, and so on;
884 the things that couldn't be truncated directly,
885 and for which there was no special instruction.
887 ??? Code above formerly short-circuited this, for most integer
888 mode pairs, with a force_reg in from_mode followed by a recursive
889 call to this routine. Appears always to have been wrong. */
890 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
892 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
893 emit_move_insn (to
, temp
);
897 /* Mode combination is not recognized. */
901 /* Return an rtx for a value that would result
902 from converting X to mode MODE.
903 Both X and MODE may be floating, or both integer.
904 UNSIGNEDP is nonzero if X is an unsigned value.
905 This can be done by referring to a part of X in place
906 or by copying to a new temporary with conversion.
908 This function *must not* call protect_from_queue
909 except when putting X into an insn (in which case convert_move does it). */
912 convert_to_mode (enum machine_mode mode
, rtx x
, int unsignedp
)
914 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
917 /* Return an rtx for a value that would result
918 from converting X from mode OLDMODE to mode MODE.
919 Both modes may be floating, or both integer.
920 UNSIGNEDP is nonzero if X is an unsigned value.
922 This can be done by referring to a part of X in place
923 or by copying to a new temporary with conversion.
925 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
927 This function *must not* call protect_from_queue
928 except when putting X into an insn (in which case convert_move does it). */
931 convert_modes (enum machine_mode mode
, enum machine_mode oldmode
, rtx x
, int unsignedp
)
935 /* If FROM is a SUBREG that indicates that we have already done at least
936 the required extension, strip it. */
938 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
939 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
940 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
941 x
= gen_lowpart (mode
, x
);
943 if (GET_MODE (x
) != VOIDmode
)
944 oldmode
= GET_MODE (x
);
949 /* There is one case that we must handle specially: If we are converting
950 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
951 we are to interpret the constant as unsigned, gen_lowpart will do
952 the wrong if the constant appears negative. What we want to do is
953 make the high-order word of the constant zero, not all ones. */
955 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
956 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
957 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
959 HOST_WIDE_INT val
= INTVAL (x
);
961 if (oldmode
!= VOIDmode
962 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
964 int width
= GET_MODE_BITSIZE (oldmode
);
966 /* We need to zero extend VAL. */
967 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
970 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
973 /* We can do this with a gen_lowpart if both desired and current modes
974 are integer, and this is either a constant integer, a register, or a
975 non-volatile MEM. Except for the constant case where MODE is no
976 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
978 if ((GET_CODE (x
) == CONST_INT
979 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
980 || (GET_MODE_CLASS (mode
) == MODE_INT
981 && GET_MODE_CLASS (oldmode
) == MODE_INT
982 && (GET_CODE (x
) == CONST_DOUBLE
983 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
984 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
985 && direct_load
[(int) mode
])
986 || (GET_CODE (x
) == REG
987 && (! HARD_REGISTER_P (x
)
988 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
989 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
990 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
992 /* ?? If we don't know OLDMODE, we have to assume here that
993 X does not need sign- or zero-extension. This may not be
994 the case, but it's the best we can do. */
995 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
996 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
998 HOST_WIDE_INT val
= INTVAL (x
);
999 int width
= GET_MODE_BITSIZE (oldmode
);
1001 /* We must sign or zero-extend in this case. Start by
1002 zero-extending, then sign extend if we need to. */
1003 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1005 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1006 val
|= (HOST_WIDE_INT
) (-1) << width
;
1008 return gen_int_mode (val
, mode
);
1011 return gen_lowpart (mode
, x
);
1014 /* Converting from integer constant into mode is always equivalent to an
1015 subreg operation. */
1016 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
1018 if (GET_MODE_BITSIZE (mode
) != GET_MODE_BITSIZE (oldmode
))
1020 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
1023 temp
= gen_reg_rtx (mode
);
1024 convert_move (temp
, x
, unsignedp
);
1028 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1029 store efficiently. Due to internal GCC limitations, this is
1030 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1031 for an immediate constant. */
1033 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1035 /* Determine whether the LEN bytes can be moved by using several move
1036 instructions. Return nonzero if a call to move_by_pieces should
1040 can_move_by_pieces (unsigned HOST_WIDE_INT len
,
1041 unsigned int align ATTRIBUTE_UNUSED
)
1043 return MOVE_BY_PIECES_P (len
, align
);
1046 /* Generate several move instructions to copy LEN bytes from block FROM to
1047 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1048 and TO through protect_from_queue before calling.
1050 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1051 used to push FROM to the stack.
1053 ALIGN is maximum stack alignment we can assume.
1055 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1056 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1060 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
1061 unsigned int align
, int endp
)
1063 struct move_by_pieces data
;
1064 rtx to_addr
, from_addr
= XEXP (from
, 0);
1065 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
1066 enum machine_mode mode
= VOIDmode
, tmode
;
1067 enum insn_code icode
;
1069 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
1072 data
.from_addr
= from_addr
;
1075 to_addr
= XEXP (to
, 0);
1078 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1079 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1081 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1088 #ifdef STACK_GROWS_DOWNWARD
1094 data
.to_addr
= to_addr
;
1097 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1098 || GET_CODE (from_addr
) == POST_INC
1099 || GET_CODE (from_addr
) == POST_DEC
);
1101 data
.explicit_inc_from
= 0;
1102 data
.explicit_inc_to
= 0;
1103 if (data
.reverse
) data
.offset
= len
;
1106 /* If copying requires more than two move insns,
1107 copy addresses to registers (to make displacements shorter)
1108 and use post-increment if available. */
1109 if (!(data
.autinc_from
&& data
.autinc_to
)
1110 && move_by_pieces_ninsns (len
, align
) > 2)
1112 /* Find the mode of the largest move... */
1113 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1114 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1115 if (GET_MODE_SIZE (tmode
) < max_size
)
1118 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
1120 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1121 data
.autinc_from
= 1;
1122 data
.explicit_inc_from
= -1;
1124 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
1126 data
.from_addr
= copy_addr_to_reg (from_addr
);
1127 data
.autinc_from
= 1;
1128 data
.explicit_inc_from
= 1;
1130 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1131 data
.from_addr
= copy_addr_to_reg (from_addr
);
1132 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
1134 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1136 data
.explicit_inc_to
= -1;
1138 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
1140 data
.to_addr
= copy_addr_to_reg (to_addr
);
1142 data
.explicit_inc_to
= 1;
1144 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1145 data
.to_addr
= copy_addr_to_reg (to_addr
);
1148 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1149 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1150 align
= MOVE_MAX
* BITS_PER_UNIT
;
1152 /* First move what we can in the largest integer mode, then go to
1153 successively smaller modes. */
1155 while (max_size
> 1)
1157 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1158 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1159 if (GET_MODE_SIZE (tmode
) < max_size
)
1162 if (mode
== VOIDmode
)
1165 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1166 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1167 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1169 max_size
= GET_MODE_SIZE (mode
);
1172 /* The code above should have handled everything. */
1186 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
1187 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
1189 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
1192 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
1199 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
1207 /* Return number of insns required to move L bytes by pieces.
1208 ALIGN (in bits) is maximum alignment we can assume. */
1210 static unsigned HOST_WIDE_INT
1211 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
)
1213 unsigned HOST_WIDE_INT n_insns
= 0;
1214 unsigned HOST_WIDE_INT max_size
= MOVE_MAX
+ 1;
1216 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1217 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1218 align
= MOVE_MAX
* BITS_PER_UNIT
;
1220 while (max_size
> 1)
1222 enum machine_mode mode
= VOIDmode
, tmode
;
1223 enum insn_code icode
;
1225 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1226 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1227 if (GET_MODE_SIZE (tmode
) < max_size
)
1230 if (mode
== VOIDmode
)
1233 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1234 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1235 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1237 max_size
= GET_MODE_SIZE (mode
);
1245 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1246 with move instructions for mode MODE. GENFUN is the gen_... function
1247 to make a move insn for that mode. DATA has all the other info. */
1250 move_by_pieces_1 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
1251 struct move_by_pieces
*data
)
1253 unsigned int size
= GET_MODE_SIZE (mode
);
1254 rtx to1
= NULL_RTX
, from1
;
1256 while (data
->len
>= size
)
1259 data
->offset
-= size
;
1263 if (data
->autinc_to
)
1264 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1267 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1270 if (data
->autinc_from
)
1271 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1274 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1276 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1277 emit_insn (gen_add2_insn (data
->to_addr
,
1278 GEN_INT (-(HOST_WIDE_INT
)size
)));
1279 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1280 emit_insn (gen_add2_insn (data
->from_addr
,
1281 GEN_INT (-(HOST_WIDE_INT
)size
)));
1284 emit_insn ((*genfun
) (to1
, from1
));
1287 #ifdef PUSH_ROUNDING
1288 emit_single_push_insn (mode
, from1
, NULL
);
1294 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1295 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1296 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1297 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1299 if (! data
->reverse
)
1300 data
->offset
+= size
;
1306 /* Emit code to move a block Y to a block X. This may be done with
1307 string-move instructions, with multiple scalar move instructions,
1308 or with a library call.
1310 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1311 SIZE is an rtx that says how long they are.
1312 ALIGN is the maximum alignment we can assume they have.
1313 METHOD describes what kind of copy this is, and what mechanisms may be used.
1315 Return the address of the new block, if memcpy is called and returns it,
1319 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1327 case BLOCK_OP_NORMAL
:
1328 may_use_call
= true;
1331 case BLOCK_OP_CALL_PARM
:
1332 may_use_call
= block_move_libcall_safe_for_call_parm ();
1334 /* Make inhibit_defer_pop nonzero around the library call
1335 to force it to pop the arguments right away. */
1339 case BLOCK_OP_NO_LIBCALL
:
1340 may_use_call
= false;
1347 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1349 if (GET_MODE (x
) != BLKmode
)
1351 if (GET_MODE (y
) != BLKmode
)
1354 x
= protect_from_queue (x
, 1);
1355 y
= protect_from_queue (y
, 0);
1356 size
= protect_from_queue (size
, 0);
1358 if (GET_CODE (x
) != MEM
)
1360 if (GET_CODE (y
) != MEM
)
1365 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1366 can be incorrect is coming from __builtin_memcpy. */
1367 if (GET_CODE (size
) == CONST_INT
)
1369 if (INTVAL (size
) == 0)
1372 x
= shallow_copy_rtx (x
);
1373 y
= shallow_copy_rtx (y
);
1374 set_mem_size (x
, size
);
1375 set_mem_size (y
, size
);
1378 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1379 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1380 else if (emit_block_move_via_movstr (x
, y
, size
, align
))
1382 else if (may_use_call
)
1383 retval
= emit_block_move_via_libcall (x
, y
, size
);
1385 emit_block_move_via_loop (x
, y
, size
, align
);
1387 if (method
== BLOCK_OP_CALL_PARM
)
1393 /* A subroutine of emit_block_move. Returns true if calling the
1394 block move libcall will not clobber any parameters which may have
1395 already been placed on the stack. */
1398 block_move_libcall_safe_for_call_parm (void)
1400 /* If arguments are pushed on the stack, then they're safe. */
1404 /* If registers go on the stack anyway, any argument is sure to clobber
1405 an outgoing argument. */
1406 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1408 tree fn
= emit_block_move_libcall_fn (false);
1410 if (REG_PARM_STACK_SPACE (fn
) != 0)
1415 /* If any argument goes in memory, then it might clobber an outgoing
1418 CUMULATIVE_ARGS args_so_far
;
1421 fn
= emit_block_move_libcall_fn (false);
1422 INIT_CUMULATIVE_ARGS (args_so_far
, TREE_TYPE (fn
), NULL_RTX
, 0);
1424 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1425 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1427 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1428 rtx tmp
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
1429 if (!tmp
|| !REG_P (tmp
))
1431 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1432 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, mode
,
1436 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, NULL_TREE
, 1);
1442 /* A subroutine of emit_block_move. Expand a movstr pattern;
1443 return true if successful. */
1446 emit_block_move_via_movstr (rtx x
, rtx y
, rtx size
, unsigned int align
)
1448 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1449 enum machine_mode mode
;
1451 /* Since this is a move insn, we don't care about volatility. */
1454 /* Try the most limited insn first, because there's no point
1455 including more than one in the machine description unless
1456 the more limited one has some advantage. */
1458 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1459 mode
= GET_MODE_WIDER_MODE (mode
))
1461 enum insn_code code
= movstr_optab
[(int) mode
];
1462 insn_operand_predicate_fn pred
;
1464 if (code
!= CODE_FOR_nothing
1465 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1466 here because if SIZE is less than the mode mask, as it is
1467 returned by the macro, it will definitely be less than the
1468 actual mode mask. */
1469 && ((GET_CODE (size
) == CONST_INT
1470 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1471 <= (GET_MODE_MASK (mode
) >> 1)))
1472 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1473 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1474 || (*pred
) (x
, BLKmode
))
1475 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1476 || (*pred
) (y
, BLKmode
))
1477 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1478 || (*pred
) (opalign
, VOIDmode
)))
1481 rtx last
= get_last_insn ();
1484 op2
= convert_to_mode (mode
, size
, 1);
1485 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1486 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1487 op2
= copy_to_mode_reg (mode
, op2
);
1489 /* ??? When called via emit_block_move_for_call, it'd be
1490 nice if there were some way to inform the backend, so
1491 that it doesn't fail the expansion because it thinks
1492 emitting the libcall would be more efficient. */
1494 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1502 delete_insns_since (last
);
1510 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1511 Return the return value from memcpy, 0 otherwise. */
1514 emit_block_move_via_libcall (rtx dst
, rtx src
, rtx size
)
1516 rtx dst_addr
, src_addr
;
1517 tree call_expr
, arg_list
, fn
, src_tree
, dst_tree
, size_tree
;
1518 enum machine_mode size_mode
;
1521 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1523 It is unsafe to save the value generated by protect_from_queue and reuse
1524 it later. Consider what happens if emit_queue is called before the
1525 return value from protect_from_queue is used.
1527 Expansion of the CALL_EXPR below will call emit_queue before we are
1528 finished emitting RTL for argument setup. So if we are not careful we
1529 could get the wrong value for an argument.
1531 To avoid this problem we go ahead and emit code to copy the addresses of
1532 DST and SRC and SIZE into new pseudos. We can then place those new
1533 pseudos into an RTL_EXPR and use them later, even after a call to
1536 Note this is not strictly needed for library calls since they do not call
1537 emit_queue before loading their arguments. However, we may need to have
1538 library calls call emit_queue in the future since failing to do so could
1539 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1540 arguments in registers. */
1542 dst_addr
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
1543 src_addr
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
1545 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1546 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1548 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1549 src_tree
= make_tree (ptr_type_node
, src_addr
);
1551 if (TARGET_MEM_FUNCTIONS
)
1552 size_mode
= TYPE_MODE (sizetype
);
1554 size_mode
= TYPE_MODE (unsigned_type_node
);
1556 size
= convert_to_mode (size_mode
, size
, 1);
1557 size
= copy_to_mode_reg (size_mode
, size
);
1559 /* It is incorrect to use the libcall calling conventions to call
1560 memcpy in this context. This could be a user call to memcpy and
1561 the user may wish to examine the return value from memcpy. For
1562 targets where libcalls and normal calls have different conventions
1563 for returning pointers, we could end up generating incorrect code.
1565 For convenience, we generate the call to bcopy this way as well. */
1567 if (TARGET_MEM_FUNCTIONS
)
1568 size_tree
= make_tree (sizetype
, size
);
1570 size_tree
= make_tree (unsigned_type_node
, size
);
1572 fn
= emit_block_move_libcall_fn (true);
1573 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
1574 if (TARGET_MEM_FUNCTIONS
)
1576 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1577 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1581 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1582 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1585 /* Now we have to build up the CALL_EXPR itself. */
1586 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1587 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1588 call_expr
, arg_list
, NULL_TREE
);
1590 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
1592 /* If we are initializing a readonly value, show the above call clobbered
1593 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1594 the delay slot scheduler might overlook conflicts and take nasty
1596 if (RTX_UNCHANGING_P (dst
))
1597 add_function_usage_to
1598 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode
,
1599 gen_rtx_CLOBBER (VOIDmode
, dst
),
1602 return TARGET_MEM_FUNCTIONS
? retval
: NULL_RTX
;
1605 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1606 for the function we use for block copies. The first time FOR_CALL
1607 is true, we call assemble_external. */
1609 static GTY(()) tree block_move_fn
;
1612 init_block_move_fn (const char *asmspec
)
1618 if (TARGET_MEM_FUNCTIONS
)
1620 fn
= get_identifier ("memcpy");
1621 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
1622 const_ptr_type_node
, sizetype
,
1627 fn
= get_identifier ("bcopy");
1628 args
= build_function_type_list (void_type_node
, const_ptr_type_node
,
1629 ptr_type_node
, unsigned_type_node
,
1633 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
1634 DECL_EXTERNAL (fn
) = 1;
1635 TREE_PUBLIC (fn
) = 1;
1636 DECL_ARTIFICIAL (fn
) = 1;
1637 TREE_NOTHROW (fn
) = 1;
1644 SET_DECL_RTL (block_move_fn
, NULL_RTX
);
1645 SET_DECL_ASSEMBLER_NAME (block_move_fn
, get_identifier (asmspec
));
1650 emit_block_move_libcall_fn (int for_call
)
1652 static bool emitted_extern
;
1655 init_block_move_fn (NULL
);
1657 if (for_call
&& !emitted_extern
)
1659 emitted_extern
= true;
1660 make_decl_rtl (block_move_fn
, NULL
);
1661 assemble_external (block_move_fn
);
1664 return block_move_fn
;
1667 /* A subroutine of emit_block_move. Copy the data via an explicit
1668 loop. This is used only when libcalls are forbidden. */
1669 /* ??? It'd be nice to copy in hunks larger than QImode. */
1672 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1673 unsigned int align ATTRIBUTE_UNUSED
)
1675 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
1676 enum machine_mode iter_mode
;
1678 iter_mode
= GET_MODE (size
);
1679 if (iter_mode
== VOIDmode
)
1680 iter_mode
= word_mode
;
1682 top_label
= gen_label_rtx ();
1683 cmp_label
= gen_label_rtx ();
1684 iter
= gen_reg_rtx (iter_mode
);
1686 emit_move_insn (iter
, const0_rtx
);
1688 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1689 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1690 do_pending_stack_adjust ();
1692 emit_note (NOTE_INSN_LOOP_BEG
);
1694 emit_jump (cmp_label
);
1695 emit_label (top_label
);
1697 tmp
= convert_modes (Pmode
, iter_mode
, iter
, true);
1698 x_addr
= gen_rtx_PLUS (Pmode
, x_addr
, tmp
);
1699 y_addr
= gen_rtx_PLUS (Pmode
, y_addr
, tmp
);
1700 x
= change_address (x
, QImode
, x_addr
);
1701 y
= change_address (y
, QImode
, y_addr
);
1703 emit_move_insn (x
, y
);
1705 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1706 true, OPTAB_LIB_WIDEN
);
1708 emit_move_insn (iter
, tmp
);
1710 emit_note (NOTE_INSN_LOOP_CONT
);
1711 emit_label (cmp_label
);
1713 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1716 emit_note (NOTE_INSN_LOOP_END
);
1719 /* Copy all or part of a value X into registers starting at REGNO.
1720 The number of registers to be filled is NREGS. */
1723 move_block_to_reg (int regno
, rtx x
, int nregs
, enum machine_mode mode
)
1726 #ifdef HAVE_load_multiple
1734 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1735 x
= validize_mem (force_const_mem (mode
, x
));
1737 /* See if the machine can do this with a load multiple insn. */
1738 #ifdef HAVE_load_multiple
1739 if (HAVE_load_multiple
)
1741 last
= get_last_insn ();
1742 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1750 delete_insns_since (last
);
1754 for (i
= 0; i
< nregs
; i
++)
1755 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1756 operand_subword_force (x
, i
, mode
));
1759 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1760 The number of registers to be filled is NREGS. */
1763 move_block_from_reg (int regno
, rtx x
, int nregs
)
1770 /* See if the machine can do this with a store multiple insn. */
1771 #ifdef HAVE_store_multiple
1772 if (HAVE_store_multiple
)
1774 rtx last
= get_last_insn ();
1775 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1783 delete_insns_since (last
);
1787 for (i
= 0; i
< nregs
; i
++)
1789 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1794 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1798 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1799 ORIG, where ORIG is a non-consecutive group of registers represented by
1800 a PARALLEL. The clone is identical to the original except in that the
1801 original set of registers is replaced by a new set of pseudo registers.
1802 The new set has the same modes as the original set. */
1805 gen_group_rtx (rtx orig
)
1810 if (GET_CODE (orig
) != PARALLEL
)
1813 length
= XVECLEN (orig
, 0);
1814 tmps
= alloca (sizeof (rtx
) * length
);
1816 /* Skip a NULL entry in first slot. */
1817 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
1822 for (; i
< length
; i
++)
1824 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
1825 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
1827 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
1830 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
1833 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1834 where DST is non-consecutive registers represented by a PARALLEL.
1835 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1839 emit_group_load (rtx dst
, rtx orig_src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1844 if (GET_CODE (dst
) != PARALLEL
)
1847 /* Check for a NULL entry, used to indicate that the parameter goes
1848 both on the stack and in registers. */
1849 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1854 tmps
= alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
1856 /* Process the pieces. */
1857 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1859 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1860 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1861 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1864 /* Handle trailing fragments that run over the size of the struct. */
1865 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1867 /* Arrange to shift the fragment to where it belongs.
1868 extract_bit_field loads to the lsb of the reg. */
1870 #ifdef BLOCK_REG_PADDING
1871 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
1872 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1877 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1878 bytelen
= ssize
- bytepos
;
1883 /* If we won't be loading directly from memory, protect the real source
1884 from strange tricks we might play; but make sure that the source can
1885 be loaded directly into the destination. */
1887 if (GET_CODE (orig_src
) != MEM
1888 && (!CONSTANT_P (orig_src
)
1889 || (GET_MODE (orig_src
) != mode
1890 && GET_MODE (orig_src
) != VOIDmode
)))
1892 if (GET_MODE (orig_src
) == VOIDmode
)
1893 src
= gen_reg_rtx (mode
);
1895 src
= gen_reg_rtx (GET_MODE (orig_src
));
1897 emit_move_insn (src
, orig_src
);
1900 /* Optimize the access just a bit. */
1901 if (GET_CODE (src
) == MEM
1902 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
1903 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
1904 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1905 && bytelen
== GET_MODE_SIZE (mode
))
1907 tmps
[i
] = gen_reg_rtx (mode
);
1908 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
1910 else if (GET_CODE (src
) == CONCAT
)
1912 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
1913 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
1915 if ((bytepos
== 0 && bytelen
== slen0
)
1916 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
1918 /* The following assumes that the concatenated objects all
1919 have the same size. In this case, a simple calculation
1920 can be used to determine the object and the bit field
1922 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
1923 if (! CONSTANT_P (tmps
[i
])
1924 && (GET_CODE (tmps
[i
]) != REG
|| GET_MODE (tmps
[i
]) != mode
))
1925 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
1926 (bytepos
% slen0
) * BITS_PER_UNIT
,
1927 1, NULL_RTX
, mode
, mode
, ssize
);
1929 else if (bytepos
== 0)
1931 rtx mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1932 emit_move_insn (mem
, src
);
1933 tmps
[i
] = adjust_address (mem
, mode
, 0);
1938 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1939 SIMD register, which is currently broken. While we get GCC
1940 to emit proper RTL for these cases, let's dump to memory. */
1941 else if (VECTOR_MODE_P (GET_MODE (dst
))
1942 && GET_CODE (src
) == REG
)
1944 int slen
= GET_MODE_SIZE (GET_MODE (src
));
1947 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1948 emit_move_insn (mem
, src
);
1949 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
1951 else if (CONSTANT_P (src
)
1952 || (GET_CODE (src
) == REG
&& GET_MODE (src
) == mode
))
1955 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
1956 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
1960 expand_binop (mode
, ashl_optab
, tmps
[i
], GEN_INT (shift
),
1961 tmps
[i
], 0, OPTAB_WIDEN
);
1966 /* Copy the extracted pieces into the proper (probable) hard regs. */
1967 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1968 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
1971 /* Emit code to move a block SRC to block DST, where SRC and DST are
1972 non-consecutive groups of registers, each represented by a PARALLEL. */
1975 emit_group_move (rtx dst
, rtx src
)
1979 if (GET_CODE (src
) != PARALLEL
1980 || GET_CODE (dst
) != PARALLEL
1981 || XVECLEN (src
, 0) != XVECLEN (dst
, 0))
1984 /* Skip first entry if NULL. */
1985 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
1986 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
1987 XEXP (XVECEXP (src
, 0, i
), 0));
1990 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1991 where SRC is non-consecutive registers represented by a PARALLEL.
1992 SSIZE represents the total size of block ORIG_DST, or -1 if not
1996 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
2001 if (GET_CODE (src
) != PARALLEL
)
2004 /* Check for a NULL entry, used to indicate that the parameter goes
2005 both on the stack and in registers. */
2006 if (XEXP (XVECEXP (src
, 0, 0), 0))
2011 tmps
= alloca (sizeof (rtx
) * XVECLEN (src
, 0));
2013 /* Copy the (probable) hard regs into pseudos. */
2014 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2016 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2017 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2018 emit_move_insn (tmps
[i
], reg
);
2022 /* If we won't be storing directly into memory, protect the real destination
2023 from strange tricks we might play. */
2025 if (GET_CODE (dst
) == PARALLEL
)
2029 /* We can get a PARALLEL dst if there is a conditional expression in
2030 a return statement. In that case, the dst and src are the same,
2031 so no action is necessary. */
2032 if (rtx_equal_p (dst
, src
))
2035 /* It is unclear if we can ever reach here, but we may as well handle
2036 it. Allocate a temporary, and split this into a store/load to/from
2039 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
2040 emit_group_store (temp
, src
, type
, ssize
);
2041 emit_group_load (dst
, temp
, type
, ssize
);
2044 else if (GET_CODE (dst
) != MEM
&& GET_CODE (dst
) != CONCAT
)
2046 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
2047 /* Make life a bit easier for combine. */
2048 emit_move_insn (dst
, CONST0_RTX (GET_MODE (orig_dst
)));
2051 /* Process the pieces. */
2052 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2054 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2055 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2056 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2059 /* Handle trailing fragments that run over the size of the struct. */
2060 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2062 /* store_bit_field always takes its value from the lsb.
2063 Move the fragment to the lsb if it's not already there. */
2065 #ifdef BLOCK_REG_PADDING
2066 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
2067 == (BYTES_BIG_ENDIAN
? upward
: downward
)
2073 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2074 expand_binop (mode
, ashr_optab
, tmps
[i
], GEN_INT (shift
),
2075 tmps
[i
], 0, OPTAB_WIDEN
);
2077 bytelen
= ssize
- bytepos
;
2080 if (GET_CODE (dst
) == CONCAT
)
2082 if (bytepos
+ bytelen
<= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2083 dest
= XEXP (dst
, 0);
2084 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2086 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2087 dest
= XEXP (dst
, 1);
2089 else if (bytepos
== 0 && XVECLEN (src
, 0))
2091 dest
= assign_stack_temp (GET_MODE (dest
),
2092 GET_MODE_SIZE (GET_MODE (dest
)), 0);
2093 emit_move_insn (adjust_address (dest
, GET_MODE (tmps
[i
]), bytepos
),
2102 /* Optimize the access just a bit. */
2103 if (GET_CODE (dest
) == MEM
2104 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
2105 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2106 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2107 && bytelen
== GET_MODE_SIZE (mode
))
2108 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2110 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2111 mode
, tmps
[i
], ssize
);
2116 /* Copy from the pseudo into the (probable) hard reg. */
2117 if (orig_dst
!= dst
)
2118 emit_move_insn (orig_dst
, dst
);
2121 /* Generate code to copy a BLKmode object of TYPE out of a
2122 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2123 is null, a stack temporary is created. TGTBLK is returned.
2125 The purpose of this routine is to handle functions that return
2126 BLKmode structures in registers. Some machines (the PA for example)
2127 want to return all small structures in registers regardless of the
2128 structure's alignment. */
2131 copy_blkmode_from_reg (rtx tgtblk
, rtx srcreg
, tree type
)
2133 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2134 rtx src
= NULL
, dst
= NULL
;
2135 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2136 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2140 tgtblk
= assign_temp (build_qualified_type (type
,
2142 | TYPE_QUAL_CONST
)),
2144 preserve_temp_slots (tgtblk
);
2147 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2148 into a new pseudo which is a full word. */
2150 if (GET_MODE (srcreg
) != BLKmode
2151 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2152 srcreg
= convert_to_mode (word_mode
, srcreg
, TREE_UNSIGNED (type
));
2154 /* If the structure doesn't take up a whole number of words, see whether
2155 SRCREG is padded on the left or on the right. If it's on the left,
2156 set PADDING_CORRECTION to the number of bits to skip.
2158 In most ABIs, the structure will be returned at the least end of
2159 the register, which translates to right padding on little-endian
2160 targets and left padding on big-endian targets. The opposite
2161 holds if the structure is returned at the most significant
2162 end of the register. */
2163 if (bytes
% UNITS_PER_WORD
!= 0
2164 && (targetm
.calls
.return_in_msb (type
)
2166 : BYTES_BIG_ENDIAN
))
2168 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2170 /* Copy the structure BITSIZE bites at a time.
2172 We could probably emit more efficient code for machines which do not use
2173 strict alignment, but it doesn't seem worth the effort at the current
2175 for (bitpos
= 0, xbitpos
= padding_correction
;
2176 bitpos
< bytes
* BITS_PER_UNIT
;
2177 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2179 /* We need a new source operand each time xbitpos is on a
2180 word boundary and when xbitpos == padding_correction
2181 (the first time through). */
2182 if (xbitpos
% BITS_PER_WORD
== 0
2183 || xbitpos
== padding_correction
)
2184 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2187 /* We need a new destination operand each time bitpos is on
2189 if (bitpos
% BITS_PER_WORD
== 0)
2190 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2192 /* Use xbitpos for the source extraction (right justified) and
2193 xbitpos for the destination store (left justified). */
2194 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2195 extract_bit_field (src
, bitsize
,
2196 xbitpos
% BITS_PER_WORD
, 1,
2197 NULL_RTX
, word_mode
, word_mode
,
2205 /* Add a USE expression for REG to the (possibly empty) list pointed
2206 to by CALL_FUSAGE. REG must denote a hard register. */
2209 use_reg (rtx
*call_fusage
, rtx reg
)
2211 if (GET_CODE (reg
) != REG
2212 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2216 = gen_rtx_EXPR_LIST (VOIDmode
,
2217 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2220 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2221 starting at REGNO. All of these registers must be hard registers. */
2224 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2228 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
2231 for (i
= 0; i
< nregs
; i
++)
2232 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2235 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2236 PARALLEL REGS. This is for calls that pass values in multiple
2237 non-contiguous locations. The Irix 6 ABI has examples of this. */
2240 use_group_regs (rtx
*call_fusage
, rtx regs
)
2244 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2246 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2248 /* A NULL entry means the parameter goes both on the stack and in
2249 registers. This can also be a MEM for targets that pass values
2250 partially on the stack and partially in registers. */
2251 if (reg
!= 0 && GET_CODE (reg
) == REG
)
2252 use_reg (call_fusage
, reg
);
2257 /* Determine whether the LEN bytes generated by CONSTFUN can be
2258 stored to memory using several move instructions. CONSTFUNDATA is
2259 a pointer which will be passed as argument in every CONSTFUN call.
2260 ALIGN is maximum alignment we can assume. Return nonzero if a
2261 call to store_by_pieces should succeed. */
2264 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
2265 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2266 void *constfundata
, unsigned int align
)
2268 unsigned HOST_WIDE_INT max_size
, l
;
2269 HOST_WIDE_INT offset
= 0;
2270 enum machine_mode mode
, tmode
;
2271 enum insn_code icode
;
2278 if (! STORE_BY_PIECES_P (len
, align
))
2281 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2282 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2283 align
= MOVE_MAX
* BITS_PER_UNIT
;
2285 /* We would first store what we can in the largest integer mode, then go to
2286 successively smaller modes. */
2289 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2294 max_size
= STORE_MAX_PIECES
+ 1;
2295 while (max_size
> 1)
2297 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2298 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2299 if (GET_MODE_SIZE (tmode
) < max_size
)
2302 if (mode
== VOIDmode
)
2305 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2306 if (icode
!= CODE_FOR_nothing
2307 && align
>= GET_MODE_ALIGNMENT (mode
))
2309 unsigned int size
= GET_MODE_SIZE (mode
);
2316 cst
= (*constfun
) (constfundata
, offset
, mode
);
2317 if (!LEGITIMATE_CONSTANT_P (cst
))
2327 max_size
= GET_MODE_SIZE (mode
);
2330 /* The code above should have handled everything. */
2338 /* Generate several move instructions to store LEN bytes generated by
2339 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2340 pointer which will be passed as argument in every CONSTFUN call.
2341 ALIGN is maximum alignment we can assume.
2342 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2343 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2347 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
2348 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2349 void *constfundata
, unsigned int align
, int endp
)
2351 struct store_by_pieces data
;
2360 if (! STORE_BY_PIECES_P (len
, align
))
2362 to
= protect_from_queue (to
, 1);
2363 data
.constfun
= constfun
;
2364 data
.constfundata
= constfundata
;
2367 store_by_pieces_1 (&data
, align
);
2378 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2379 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2381 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
2384 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2391 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2399 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2400 rtx with BLKmode). The caller must pass TO through protect_from_queue
2401 before calling. ALIGN is maximum alignment we can assume. */
2404 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
2406 struct store_by_pieces data
;
2411 data
.constfun
= clear_by_pieces_1
;
2412 data
.constfundata
= NULL
;
2415 store_by_pieces_1 (&data
, align
);
2418 /* Callback routine for clear_by_pieces.
2419 Return const0_rtx unconditionally. */
2422 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED
,
2423 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2424 enum machine_mode mode ATTRIBUTE_UNUSED
)
2429 /* Subroutine of clear_by_pieces and store_by_pieces.
2430 Generate several move instructions to store LEN bytes of block TO. (A MEM
2431 rtx with BLKmode). The caller must pass TO through protect_from_queue
2432 before calling. ALIGN is maximum alignment we can assume. */
2435 store_by_pieces_1 (struct store_by_pieces
*data ATTRIBUTE_UNUSED
,
2436 unsigned int align ATTRIBUTE_UNUSED
)
2438 rtx to_addr
= XEXP (data
->to
, 0);
2439 unsigned HOST_WIDE_INT max_size
= STORE_MAX_PIECES
+ 1;
2440 enum machine_mode mode
= VOIDmode
, tmode
;
2441 enum insn_code icode
;
2444 data
->to_addr
= to_addr
;
2446 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2447 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2449 data
->explicit_inc_to
= 0;
2451 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2453 data
->offset
= data
->len
;
2455 /* If storing requires more than two move insns,
2456 copy addresses to registers (to make displacements shorter)
2457 and use post-increment if available. */
2458 if (!data
->autinc_to
2459 && move_by_pieces_ninsns (data
->len
, align
) > 2)
2461 /* Determine the main mode we'll be using. */
2462 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2463 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2464 if (GET_MODE_SIZE (tmode
) < max_size
)
2467 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2469 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2470 data
->autinc_to
= 1;
2471 data
->explicit_inc_to
= -1;
2474 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2475 && ! data
->autinc_to
)
2477 data
->to_addr
= copy_addr_to_reg (to_addr
);
2478 data
->autinc_to
= 1;
2479 data
->explicit_inc_to
= 1;
2482 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2483 data
->to_addr
= copy_addr_to_reg (to_addr
);
2486 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2487 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2488 align
= MOVE_MAX
* BITS_PER_UNIT
;
2490 /* First store what we can in the largest integer mode, then go to
2491 successively smaller modes. */
2493 while (max_size
> 1)
2495 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2496 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2497 if (GET_MODE_SIZE (tmode
) < max_size
)
2500 if (mode
== VOIDmode
)
2503 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2504 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2505 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2507 max_size
= GET_MODE_SIZE (mode
);
2510 /* The code above should have handled everything. */
2515 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2516 with move instructions for mode MODE. GENFUN is the gen_... function
2517 to make a move insn for that mode. DATA has all the other info. */
2520 store_by_pieces_2 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
2521 struct store_by_pieces
*data
)
2523 unsigned int size
= GET_MODE_SIZE (mode
);
2526 while (data
->len
>= size
)
2529 data
->offset
-= size
;
2531 if (data
->autinc_to
)
2532 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2535 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2537 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2538 emit_insn (gen_add2_insn (data
->to_addr
,
2539 GEN_INT (-(HOST_WIDE_INT
) size
)));
2541 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2542 emit_insn ((*genfun
) (to1
, cst
));
2544 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2545 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2547 if (! data
->reverse
)
2548 data
->offset
+= size
;
2554 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2555 its length in bytes. */
2558 clear_storage (rtx object
, rtx size
)
2561 unsigned int align
= (GET_CODE (object
) == MEM
? MEM_ALIGN (object
)
2562 : GET_MODE_ALIGNMENT (GET_MODE (object
)));
2564 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2565 just move a zero. Otherwise, do this a piece at a time. */
2566 if (GET_MODE (object
) != BLKmode
2567 && GET_CODE (size
) == CONST_INT
2568 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (object
)))
2569 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2572 object
= protect_from_queue (object
, 1);
2573 size
= protect_from_queue (size
, 0);
2575 if (size
== const0_rtx
)
2577 else if (GET_CODE (size
) == CONST_INT
2578 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2579 clear_by_pieces (object
, INTVAL (size
), align
);
2580 else if (clear_storage_via_clrstr (object
, size
, align
))
2583 retval
= clear_storage_via_libcall (object
, size
);
2589 /* A subroutine of clear_storage. Expand a clrstr pattern;
2590 return true if successful. */
2593 clear_storage_via_clrstr (rtx object
, rtx size
, unsigned int align
)
2595 /* Try the most limited insn first, because there's no point
2596 including more than one in the machine description unless
2597 the more limited one has some advantage. */
2599 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2600 enum machine_mode mode
;
2602 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2603 mode
= GET_MODE_WIDER_MODE (mode
))
2605 enum insn_code code
= clrstr_optab
[(int) mode
];
2606 insn_operand_predicate_fn pred
;
2608 if (code
!= CODE_FOR_nothing
2609 /* We don't need MODE to be narrower than
2610 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2611 the mode mask, as it is returned by the macro, it will
2612 definitely be less than the actual mode mask. */
2613 && ((GET_CODE (size
) == CONST_INT
2614 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2615 <= (GET_MODE_MASK (mode
) >> 1)))
2616 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2617 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2618 || (*pred
) (object
, BLKmode
))
2619 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
2620 || (*pred
) (opalign
, VOIDmode
)))
2623 rtx last
= get_last_insn ();
2626 op1
= convert_to_mode (mode
, size
, 1);
2627 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2628 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
2629 op1
= copy_to_mode_reg (mode
, op1
);
2631 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2638 delete_insns_since (last
);
2645 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2646 Return the return value of memset, 0 otherwise. */
2649 clear_storage_via_libcall (rtx object
, rtx size
)
2651 tree call_expr
, arg_list
, fn
, object_tree
, size_tree
;
2652 enum machine_mode size_mode
;
2655 /* OBJECT or SIZE may have been passed through protect_from_queue.
2657 It is unsafe to save the value generated by protect_from_queue
2658 and reuse it later. Consider what happens if emit_queue is
2659 called before the return value from protect_from_queue is used.
2661 Expansion of the CALL_EXPR below will call emit_queue before
2662 we are finished emitting RTL for argument setup. So if we are
2663 not careful we could get the wrong value for an argument.
2665 To avoid this problem we go ahead and emit code to copy OBJECT
2666 and SIZE into new pseudos. We can then place those new pseudos
2667 into an RTL_EXPR and use them later, even after a call to
2670 Note this is not strictly needed for library calls since they
2671 do not call emit_queue before loading their arguments. However,
2672 we may need to have library calls call emit_queue in the future
2673 since failing to do so could cause problems for targets which
2674 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2676 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2678 if (TARGET_MEM_FUNCTIONS
)
2679 size_mode
= TYPE_MODE (sizetype
);
2681 size_mode
= TYPE_MODE (unsigned_type_node
);
2682 size
= convert_to_mode (size_mode
, size
, 1);
2683 size
= copy_to_mode_reg (size_mode
, size
);
2685 /* It is incorrect to use the libcall calling conventions to call
2686 memset in this context. This could be a user call to memset and
2687 the user may wish to examine the return value from memset. For
2688 targets where libcalls and normal calls have different conventions
2689 for returning pointers, we could end up generating incorrect code.
2691 For convenience, we generate the call to bzero this way as well. */
2693 object_tree
= make_tree (ptr_type_node
, object
);
2694 if (TARGET_MEM_FUNCTIONS
)
2695 size_tree
= make_tree (sizetype
, size
);
2697 size_tree
= make_tree (unsigned_type_node
, size
);
2699 fn
= clear_storage_libcall_fn (true);
2700 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
2701 if (TARGET_MEM_FUNCTIONS
)
2702 arg_list
= tree_cons (NULL_TREE
, integer_zero_node
, arg_list
);
2703 arg_list
= tree_cons (NULL_TREE
, object_tree
, arg_list
);
2705 /* Now we have to build up the CALL_EXPR itself. */
2706 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2707 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2708 call_expr
, arg_list
, NULL_TREE
);
2710 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
2712 /* If we are initializing a readonly value, show the above call
2713 clobbered it. Otherwise, a load from it may erroneously be
2714 hoisted from a loop. */
2715 if (RTX_UNCHANGING_P (object
))
2716 emit_insn (gen_rtx_CLOBBER (VOIDmode
, object
));
2718 return (TARGET_MEM_FUNCTIONS
? retval
: NULL_RTX
);
2721 /* A subroutine of clear_storage_via_libcall. Create the tree node
2722 for the function we use for block clears. The first time FOR_CALL
2723 is true, we call assemble_external. */
2725 static GTY(()) tree block_clear_fn
;
2728 init_block_clear_fn (const char *asmspec
)
2730 if (!block_clear_fn
)
2734 if (TARGET_MEM_FUNCTIONS
)
2736 fn
= get_identifier ("memset");
2737 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2738 integer_type_node
, sizetype
,
2743 fn
= get_identifier ("bzero");
2744 args
= build_function_type_list (void_type_node
, ptr_type_node
,
2745 unsigned_type_node
, NULL_TREE
);
2748 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
2749 DECL_EXTERNAL (fn
) = 1;
2750 TREE_PUBLIC (fn
) = 1;
2751 DECL_ARTIFICIAL (fn
) = 1;
2752 TREE_NOTHROW (fn
) = 1;
2754 block_clear_fn
= fn
;
2759 SET_DECL_RTL (block_clear_fn
, NULL_RTX
);
2760 SET_DECL_ASSEMBLER_NAME (block_clear_fn
, get_identifier (asmspec
));
2765 clear_storage_libcall_fn (int for_call
)
2767 static bool emitted_extern
;
2769 if (!block_clear_fn
)
2770 init_block_clear_fn (NULL
);
2772 if (for_call
&& !emitted_extern
)
2774 emitted_extern
= true;
2775 make_decl_rtl (block_clear_fn
, NULL
);
2776 assemble_external (block_clear_fn
);
2779 return block_clear_fn
;
2782 /* Generate code to copy Y into X.
2783 Both Y and X must have the same mode, except that
2784 Y can be a constant with VOIDmode.
2785 This mode cannot be BLKmode; use emit_block_move for that.
2787 Return the last instruction emitted. */
2790 emit_move_insn (rtx x
, rtx y
)
2792 enum machine_mode mode
= GET_MODE (x
);
2793 rtx y_cst
= NULL_RTX
;
2796 x
= protect_from_queue (x
, 1);
2797 y
= protect_from_queue (y
, 0);
2799 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
2802 /* Never force constant_p_rtx to memory. */
2803 if (GET_CODE (y
) == CONSTANT_P_RTX
)
2805 else if (CONSTANT_P (y
))
2808 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
2809 && (last_insn
= compress_float_constant (x
, y
)))
2814 if (!LEGITIMATE_CONSTANT_P (y
))
2816 y
= force_const_mem (mode
, y
);
2818 /* If the target's cannot_force_const_mem prevented the spill,
2819 assume that the target's move expanders will also take care
2820 of the non-legitimate constant. */
2826 /* If X or Y are memory references, verify that their addresses are valid
2828 if (GET_CODE (x
) == MEM
2829 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2830 && ! push_operand (x
, GET_MODE (x
)))
2832 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2833 x
= validize_mem (x
);
2835 if (GET_CODE (y
) == MEM
2836 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2838 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2839 y
= validize_mem (y
);
2841 if (mode
== BLKmode
)
2844 last_insn
= emit_move_insn_1 (x
, y
);
2846 if (y_cst
&& GET_CODE (x
) == REG
2847 && (set
= single_set (last_insn
)) != NULL_RTX
2848 && SET_DEST (set
) == x
2849 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
2850 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
2855 /* Low level part of emit_move_insn.
2856 Called just like emit_move_insn, but assumes X and Y
2857 are basically valid. */
2860 emit_move_insn_1 (rtx x
, rtx y
)
2862 enum machine_mode mode
= GET_MODE (x
);
2863 enum machine_mode submode
;
2864 enum mode_class
class = GET_MODE_CLASS (mode
);
2866 if ((unsigned int) mode
>= (unsigned int) MAX_MACHINE_MODE
)
2869 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2871 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2873 /* Expand complex moves by moving real part and imag part, if possible. */
2874 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
2875 && BLKmode
!= (submode
= GET_MODE_INNER (mode
))
2876 && (mov_optab
->handlers
[(int) submode
].insn_code
2877 != CODE_FOR_nothing
))
2879 /* Don't split destination if it is a stack push. */
2880 int stack
= push_operand (x
, GET_MODE (x
));
2882 #ifdef PUSH_ROUNDING
2883 /* In case we output to the stack, but the size is smaller than the
2884 machine can push exactly, we need to use move instructions. */
2886 && (PUSH_ROUNDING (GET_MODE_SIZE (submode
))
2887 != GET_MODE_SIZE (submode
)))
2890 HOST_WIDE_INT offset1
, offset2
;
2892 /* Do not use anti_adjust_stack, since we don't want to update
2893 stack_pointer_delta. */
2894 temp
= expand_binop (Pmode
,
2895 #ifdef STACK_GROWS_DOWNWARD
2903 (GET_MODE_SIZE (GET_MODE (x
)))),
2904 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
2906 if (temp
!= stack_pointer_rtx
)
2907 emit_move_insn (stack_pointer_rtx
, temp
);
2909 #ifdef STACK_GROWS_DOWNWARD
2911 offset2
= GET_MODE_SIZE (submode
);
2913 offset1
= -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)));
2914 offset2
= (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))
2915 + GET_MODE_SIZE (submode
));
2918 emit_move_insn (change_address (x
, submode
,
2919 gen_rtx_PLUS (Pmode
,
2921 GEN_INT (offset1
))),
2922 gen_realpart (submode
, y
));
2923 emit_move_insn (change_address (x
, submode
,
2924 gen_rtx_PLUS (Pmode
,
2926 GEN_INT (offset2
))),
2927 gen_imagpart (submode
, y
));
2931 /* If this is a stack, push the highpart first, so it
2932 will be in the argument order.
2934 In that case, change_address is used only to convert
2935 the mode, not to change the address. */
2938 /* Note that the real part always precedes the imag part in memory
2939 regardless of machine's endianness. */
2940 #ifdef STACK_GROWS_DOWNWARD
2941 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2942 gen_imagpart (submode
, y
));
2943 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2944 gen_realpart (submode
, y
));
2946 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2947 gen_realpart (submode
, y
));
2948 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2949 gen_imagpart (submode
, y
));
2954 rtx realpart_x
, realpart_y
;
2955 rtx imagpart_x
, imagpart_y
;
2957 /* If this is a complex value with each part being smaller than a
2958 word, the usual calling sequence will likely pack the pieces into
2959 a single register. Unfortunately, SUBREG of hard registers only
2960 deals in terms of words, so we have a problem converting input
2961 arguments to the CONCAT of two registers that is used elsewhere
2962 for complex values. If this is before reload, we can copy it into
2963 memory and reload. FIXME, we should see about using extract and
2964 insert on integer registers, but complex short and complex char
2965 variables should be rarely used. */
2966 if (GET_MODE_BITSIZE (mode
) < 2 * BITS_PER_WORD
2967 && (reload_in_progress
| reload_completed
) == 0)
2970 = (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
2972 = (REG_P (y
) && REGNO (y
) < FIRST_PSEUDO_REGISTER
);
2974 if (packed_dest_p
|| packed_src_p
)
2976 enum mode_class reg_class
= ((class == MODE_COMPLEX_FLOAT
)
2977 ? MODE_FLOAT
: MODE_INT
);
2979 enum machine_mode reg_mode
2980 = mode_for_size (GET_MODE_BITSIZE (mode
), reg_class
, 1);
2982 if (reg_mode
!= BLKmode
)
2984 rtx mem
= assign_stack_temp (reg_mode
,
2985 GET_MODE_SIZE (mode
), 0);
2986 rtx cmem
= adjust_address (mem
, mode
, 0);
2989 = N_("function using short complex types cannot be inline");
2993 rtx sreg
= gen_rtx_SUBREG (reg_mode
, x
, 0);
2995 emit_move_insn_1 (cmem
, y
);
2996 return emit_move_insn_1 (sreg
, mem
);
3000 rtx sreg
= gen_rtx_SUBREG (reg_mode
, y
, 0);
3002 emit_move_insn_1 (mem
, sreg
);
3003 return emit_move_insn_1 (x
, cmem
);
3009 realpart_x
= gen_realpart (submode
, x
);
3010 realpart_y
= gen_realpart (submode
, y
);
3011 imagpart_x
= gen_imagpart (submode
, x
);
3012 imagpart_y
= gen_imagpart (submode
, y
);
3014 /* Show the output dies here. This is necessary for SUBREGs
3015 of pseudos since we cannot track their lifetimes correctly;
3016 hard regs shouldn't appear here except as return values.
3017 We never want to emit such a clobber after reload. */
3019 && ! (reload_in_progress
|| reload_completed
)
3020 && (GET_CODE (realpart_x
) == SUBREG
3021 || GET_CODE (imagpart_x
) == SUBREG
))
3022 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3024 emit_move_insn (realpart_x
, realpart_y
);
3025 emit_move_insn (imagpart_x
, imagpart_y
);
3028 return get_last_insn ();
3031 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3032 find a mode to do it in. If we have a movcc, use it. Otherwise,
3033 find the MODE_INT mode of the same width. */
3034 else if (GET_MODE_CLASS (mode
) == MODE_CC
3035 && mov_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
3037 enum insn_code insn_code
;
3038 enum machine_mode tmode
= VOIDmode
;
3042 && mov_optab
->handlers
[(int) CCmode
].insn_code
!= CODE_FOR_nothing
)
3045 for (tmode
= QImode
; tmode
!= VOIDmode
;
3046 tmode
= GET_MODE_WIDER_MODE (tmode
))
3047 if (GET_MODE_SIZE (tmode
) == GET_MODE_SIZE (mode
))
3050 if (tmode
== VOIDmode
)
3053 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3054 may call change_address which is not appropriate if we were
3055 called when a reload was in progress. We don't have to worry
3056 about changing the address since the size in bytes is supposed to
3057 be the same. Copy the MEM to change the mode and move any
3058 substitutions from the old MEM to the new one. */
3060 if (reload_in_progress
)
3062 x
= gen_lowpart_common (tmode
, x1
);
3063 if (x
== 0 && GET_CODE (x1
) == MEM
)
3065 x
= adjust_address_nv (x1
, tmode
, 0);
3066 copy_replacements (x1
, x
);
3069 y
= gen_lowpart_common (tmode
, y1
);
3070 if (y
== 0 && GET_CODE (y1
) == MEM
)
3072 y
= adjust_address_nv (y1
, tmode
, 0);
3073 copy_replacements (y1
, y
);
3078 x
= gen_lowpart (tmode
, x
);
3079 y
= gen_lowpart (tmode
, y
);
3082 insn_code
= mov_optab
->handlers
[(int) tmode
].insn_code
;
3083 return emit_insn (GEN_FCN (insn_code
) (x
, y
));
3086 /* Try using a move pattern for the corresponding integer mode. This is
3087 only safe when simplify_subreg can convert MODE constants into integer
3088 constants. At present, it can only do this reliably if the value
3089 fits within a HOST_WIDE_INT. */
3090 else if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
3091 && (submode
= int_mode_for_mode (mode
)) != BLKmode
3092 && mov_optab
->handlers
[submode
].insn_code
!= CODE_FOR_nothing
)
3093 return emit_insn (GEN_FCN (mov_optab
->handlers
[submode
].insn_code
)
3094 (simplify_gen_subreg (submode
, x
, mode
, 0),
3095 simplify_gen_subreg (submode
, y
, mode
, 0)));
3097 /* This will handle any multi-word or full-word mode that lacks a move_insn
3098 pattern. However, you will get better code if you define such patterns,
3099 even if they must turn into multiple assembler instructions. */
3100 else if (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
3107 #ifdef PUSH_ROUNDING
3109 /* If X is a push on the stack, do the push now and replace
3110 X with a reference to the stack pointer. */
3111 if (push_operand (x
, GET_MODE (x
)))
3116 /* Do not use anti_adjust_stack, since we don't want to update
3117 stack_pointer_delta. */
3118 temp
= expand_binop (Pmode
,
3119 #ifdef STACK_GROWS_DOWNWARD
3127 (GET_MODE_SIZE (GET_MODE (x
)))),
3128 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
3130 if (temp
!= stack_pointer_rtx
)
3131 emit_move_insn (stack_pointer_rtx
, temp
);
3133 code
= GET_CODE (XEXP (x
, 0));
3135 /* Just hope that small offsets off SP are OK. */
3136 if (code
== POST_INC
)
3137 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3138 GEN_INT (-((HOST_WIDE_INT
)
3139 GET_MODE_SIZE (GET_MODE (x
)))));
3140 else if (code
== POST_DEC
)
3141 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3142 GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
3144 temp
= stack_pointer_rtx
;
3146 x
= change_address (x
, VOIDmode
, temp
);
3150 /* If we are in reload, see if either operand is a MEM whose address
3151 is scheduled for replacement. */
3152 if (reload_in_progress
&& GET_CODE (x
) == MEM
3153 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3154 x
= replace_equiv_address_nv (x
, inner
);
3155 if (reload_in_progress
&& GET_CODE (y
) == MEM
3156 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3157 y
= replace_equiv_address_nv (y
, inner
);
3163 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3166 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3167 rtx ypart
= operand_subword (y
, i
, 1, mode
);
3169 /* If we can't get a part of Y, put Y into memory if it is a
3170 constant. Otherwise, force it into a register. If we still
3171 can't get a part of Y, abort. */
3172 if (ypart
== 0 && CONSTANT_P (y
))
3174 y
= force_const_mem (mode
, y
);
3175 ypart
= operand_subword (y
, i
, 1, mode
);
3177 else if (ypart
== 0)
3178 ypart
= operand_subword_force (y
, i
, mode
);
3180 if (xpart
== 0 || ypart
== 0)
3183 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3185 last_insn
= emit_move_insn (xpart
, ypart
);
3191 /* Show the output dies here. This is necessary for SUBREGs
3192 of pseudos since we cannot track their lifetimes correctly;
3193 hard regs shouldn't appear here except as return values.
3194 We never want to emit such a clobber after reload. */
3196 && ! (reload_in_progress
|| reload_completed
)
3197 && need_clobber
!= 0)
3198 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3208 /* If Y is representable exactly in a narrower mode, and the target can
3209 perform the extension directly from constant or memory, then emit the
3210 move as an extension. */
3213 compress_float_constant (rtx x
, rtx y
)
3215 enum machine_mode dstmode
= GET_MODE (x
);
3216 enum machine_mode orig_srcmode
= GET_MODE (y
);
3217 enum machine_mode srcmode
;
3220 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3222 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3223 srcmode
!= orig_srcmode
;
3224 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3227 rtx trunc_y
, last_insn
;
3229 /* Skip if the target can't extend this way. */
3230 ic
= can_extend_p (dstmode
, srcmode
, 0);
3231 if (ic
== CODE_FOR_nothing
)
3234 /* Skip if the narrowed value isn't exact. */
3235 if (! exact_real_truncate (srcmode
, &r
))
3238 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3240 if (LEGITIMATE_CONSTANT_P (trunc_y
))
3242 /* Skip if the target needs extra instructions to perform
3244 if (! (*insn_data
[ic
].operand
[1].predicate
) (trunc_y
, srcmode
))
3247 else if (float_extend_from_mem
[dstmode
][srcmode
])
3248 trunc_y
= validize_mem (force_const_mem (srcmode
, trunc_y
));
3252 emit_unop_insn (ic
, x
, trunc_y
, UNKNOWN
);
3253 last_insn
= get_last_insn ();
3255 if (GET_CODE (x
) == REG
)
3256 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3264 /* Pushing data onto the stack. */
3266 /* Push a block of length SIZE (perhaps variable)
3267 and return an rtx to address the beginning of the block.
3268 Note that it is not possible for the value returned to be a QUEUED.
3269 The value may be virtual_outgoing_args_rtx.
3271 EXTRA is the number of bytes of padding to push in addition to SIZE.
3272 BELOW nonzero means this padding comes at low addresses;
3273 otherwise, the padding comes at high addresses. */
3276 push_block (rtx size
, int extra
, int below
)
3280 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3281 if (CONSTANT_P (size
))
3282 anti_adjust_stack (plus_constant (size
, extra
));
3283 else if (GET_CODE (size
) == REG
&& extra
== 0)
3284 anti_adjust_stack (size
);
3287 temp
= copy_to_mode_reg (Pmode
, size
);
3289 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3290 temp
, 0, OPTAB_LIB_WIDEN
);
3291 anti_adjust_stack (temp
);
3294 #ifndef STACK_GROWS_DOWNWARD
3300 temp
= virtual_outgoing_args_rtx
;
3301 if (extra
!= 0 && below
)
3302 temp
= plus_constant (temp
, extra
);
3306 if (GET_CODE (size
) == CONST_INT
)
3307 temp
= plus_constant (virtual_outgoing_args_rtx
,
3308 -INTVAL (size
) - (below
? 0 : extra
));
3309 else if (extra
!= 0 && !below
)
3310 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3311 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3313 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3314 negate_rtx (Pmode
, size
));
3317 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3320 #ifdef PUSH_ROUNDING
3322 /* Emit single push insn. */
3325 emit_single_push_insn (enum machine_mode mode
, rtx x
, tree type
)
3328 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3330 enum insn_code icode
;
3331 insn_operand_predicate_fn pred
;
3333 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3334 /* If there is push pattern, use it. Otherwise try old way of throwing
3335 MEM representing push operation to move expander. */
3336 icode
= push_optab
->handlers
[(int) mode
].insn_code
;
3337 if (icode
!= CODE_FOR_nothing
)
3339 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3340 && !((*pred
) (x
, mode
))))
3341 x
= force_reg (mode
, x
);
3342 emit_insn (GEN_FCN (icode
) (x
));
3345 if (GET_MODE_SIZE (mode
) == rounded_size
)
3346 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3347 /* If we are to pad downward, adjust the stack pointer first and
3348 then store X into the stack location using an offset. This is
3349 because emit_move_insn does not know how to pad; it does not have
3351 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
3353 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
3354 HOST_WIDE_INT offset
;
3356 emit_move_insn (stack_pointer_rtx
,
3357 expand_binop (Pmode
,
3358 #ifdef STACK_GROWS_DOWNWARD
3364 GEN_INT (rounded_size
),
3365 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
3367 offset
= (HOST_WIDE_INT
) padding_size
;
3368 #ifdef STACK_GROWS_DOWNWARD
3369 if (STACK_PUSH_CODE
== POST_DEC
)
3370 /* We have already decremented the stack pointer, so get the
3372 offset
+= (HOST_WIDE_INT
) rounded_size
;
3374 if (STACK_PUSH_CODE
== POST_INC
)
3375 /* We have already incremented the stack pointer, so get the
3377 offset
-= (HOST_WIDE_INT
) rounded_size
;
3379 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (offset
));
3383 #ifdef STACK_GROWS_DOWNWARD
3384 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3385 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3386 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3388 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3389 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3390 GEN_INT (rounded_size
));
3392 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3395 dest
= gen_rtx_MEM (mode
, dest_addr
);
3399 set_mem_attributes (dest
, type
, 1);
3401 if (flag_optimize_sibling_calls
)
3402 /* Function incoming arguments may overlap with sibling call
3403 outgoing arguments and we cannot allow reordering of reads
3404 from function arguments with stores to outgoing arguments
3405 of sibling calls. */
3406 set_mem_alias_set (dest
, 0);
3408 emit_move_insn (dest
, x
);
3412 /* Generate code to push X onto the stack, assuming it has mode MODE and
3414 MODE is redundant except when X is a CONST_INT (since they don't
3416 SIZE is an rtx for the size of data to be copied (in bytes),
3417 needed only if X is BLKmode.
3419 ALIGN (in bits) is maximum alignment we can assume.
3421 If PARTIAL and REG are both nonzero, then copy that many of the first
3422 words of X into registers starting with REG, and push the rest of X.
3423 The amount of space pushed is decreased by PARTIAL words,
3424 rounded *down* to a multiple of PARM_BOUNDARY.
3425 REG must be a hard register in this case.
3426 If REG is zero but PARTIAL is not, take any all others actions for an
3427 argument partially in registers, but do not actually load any
3430 EXTRA is the amount in bytes of extra space to leave next to this arg.
3431 This is ignored if an argument block has already been allocated.
3433 On a machine that lacks real push insns, ARGS_ADDR is the address of
3434 the bottom of the argument block for this call. We use indexing off there
3435 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3436 argument block has not been preallocated.
3438 ARGS_SO_FAR is the size of args previously pushed for this call.
3440 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3441 for arguments passed in registers. If nonzero, it will be the number
3442 of bytes required. */
3445 emit_push_insn (rtx x
, enum machine_mode mode
, tree type
, rtx size
,
3446 unsigned int align
, int partial
, rtx reg
, int extra
,
3447 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
3451 enum direction stack_direction
3452 #ifdef STACK_GROWS_DOWNWARD
3458 /* Decide where to pad the argument: `downward' for below,
3459 `upward' for above, or `none' for don't pad it.
3460 Default is below for small data on big-endian machines; else above. */
3461 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3463 /* Invert direction if stack is post-decrement.
3465 if (STACK_PUSH_CODE
== POST_DEC
)
3466 if (where_pad
!= none
)
3467 where_pad
= (where_pad
== downward
? upward
: downward
);
3469 xinner
= x
= protect_from_queue (x
, 0);
3471 if (mode
== BLKmode
)
3473 /* Copy a block into the stack, entirely or partially. */
3476 int used
= partial
* UNITS_PER_WORD
;
3477 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3485 /* USED is now the # of bytes we need not copy to the stack
3486 because registers will take care of them. */
3489 xinner
= adjust_address (xinner
, BLKmode
, used
);
3491 /* If the partial register-part of the arg counts in its stack size,
3492 skip the part of stack space corresponding to the registers.
3493 Otherwise, start copying to the beginning of the stack space,
3494 by setting SKIP to 0. */
3495 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3497 #ifdef PUSH_ROUNDING
3498 /* Do it with several push insns if that doesn't take lots of insns
3499 and if there is no difficulty with push insns that skip bytes
3500 on the stack for alignment purposes. */
3503 && GET_CODE (size
) == CONST_INT
3505 && MEM_ALIGN (xinner
) >= align
3506 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3507 /* Here we avoid the case of a structure whose weak alignment
3508 forces many pushes of a small amount of data,
3509 and such small pushes do rounding that causes trouble. */
3510 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3511 || align
>= BIGGEST_ALIGNMENT
3512 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3513 == (align
/ BITS_PER_UNIT
)))
3514 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3516 /* Push padding now if padding above and stack grows down,
3517 or if padding below and stack grows up.
3518 But if space already allocated, this has already been done. */
3519 if (extra
&& args_addr
== 0
3520 && where_pad
!= none
&& where_pad
!= stack_direction
)
3521 anti_adjust_stack (GEN_INT (extra
));
3523 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
3526 #endif /* PUSH_ROUNDING */
3530 /* Otherwise make space on the stack and copy the data
3531 to the address of that space. */
3533 /* Deduct words put into registers from the size we must copy. */
3536 if (GET_CODE (size
) == CONST_INT
)
3537 size
= GEN_INT (INTVAL (size
) - used
);
3539 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3540 GEN_INT (used
), NULL_RTX
, 0,
3544 /* Get the address of the stack space.
3545 In this case, we do not deal with EXTRA separately.
3546 A single stack adjust will do. */
3549 temp
= push_block (size
, extra
, where_pad
== downward
);
3552 else if (GET_CODE (args_so_far
) == CONST_INT
)
3553 temp
= memory_address (BLKmode
,
3554 plus_constant (args_addr
,
3555 skip
+ INTVAL (args_so_far
)));
3557 temp
= memory_address (BLKmode
,
3558 plus_constant (gen_rtx_PLUS (Pmode
,
3563 if (!ACCUMULATE_OUTGOING_ARGS
)
3565 /* If the source is referenced relative to the stack pointer,
3566 copy it to another register to stabilize it. We do not need
3567 to do this if we know that we won't be changing sp. */
3569 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3570 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3571 temp
= copy_to_reg (temp
);
3574 target
= gen_rtx_MEM (BLKmode
, temp
);
3578 set_mem_attributes (target
, type
, 1);
3579 /* Function incoming arguments may overlap with sibling call
3580 outgoing arguments and we cannot allow reordering of reads
3581 from function arguments with stores to outgoing arguments
3582 of sibling calls. */
3583 set_mem_alias_set (target
, 0);
3586 /* ALIGN may well be better aligned than TYPE, e.g. due to
3587 PARM_BOUNDARY. Assume the caller isn't lying. */
3588 set_mem_align (target
, align
);
3590 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
3593 else if (partial
> 0)
3595 /* Scalar partly in registers. */
3597 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3600 /* # words of start of argument
3601 that we must make space for but need not store. */
3602 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
3603 int args_offset
= INTVAL (args_so_far
);
3606 /* Push padding now if padding above and stack grows down,
3607 or if padding below and stack grows up.
3608 But if space already allocated, this has already been done. */
3609 if (extra
&& args_addr
== 0
3610 && where_pad
!= none
&& where_pad
!= stack_direction
)
3611 anti_adjust_stack (GEN_INT (extra
));
3613 /* If we make space by pushing it, we might as well push
3614 the real data. Otherwise, we can leave OFFSET nonzero
3615 and leave the space uninitialized. */
3619 /* Now NOT_STACK gets the number of words that we don't need to
3620 allocate on the stack. */
3621 not_stack
= partial
- offset
;
3623 /* If the partial register-part of the arg counts in its stack size,
3624 skip the part of stack space corresponding to the registers.
3625 Otherwise, start copying to the beginning of the stack space,
3626 by setting SKIP to 0. */
3627 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3629 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3630 x
= validize_mem (force_const_mem (mode
, x
));
3632 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3633 SUBREGs of such registers are not allowed. */
3634 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
3635 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3636 x
= copy_to_reg (x
);
3638 /* Loop over all the words allocated on the stack for this arg. */
3639 /* We can do it by words, because any scalar bigger than a word
3640 has a size a multiple of a word. */
3641 #ifndef PUSH_ARGS_REVERSED
3642 for (i
= not_stack
; i
< size
; i
++)
3644 for (i
= size
- 1; i
>= not_stack
; i
--)
3646 if (i
>= not_stack
+ offset
)
3647 emit_push_insn (operand_subword_force (x
, i
, mode
),
3648 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3650 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3652 reg_parm_stack_space
, alignment_pad
);
3659 /* Push padding now if padding above and stack grows down,
3660 or if padding below and stack grows up.
3661 But if space already allocated, this has already been done. */
3662 if (extra
&& args_addr
== 0
3663 && where_pad
!= none
&& where_pad
!= stack_direction
)
3664 anti_adjust_stack (GEN_INT (extra
));
3666 #ifdef PUSH_ROUNDING
3667 if (args_addr
== 0 && PUSH_ARGS
)
3668 emit_single_push_insn (mode
, x
, type
);
3672 if (GET_CODE (args_so_far
) == CONST_INT
)
3674 = memory_address (mode
,
3675 plus_constant (args_addr
,
3676 INTVAL (args_so_far
)));
3678 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3680 dest
= gen_rtx_MEM (mode
, addr
);
3683 set_mem_attributes (dest
, type
, 1);
3684 /* Function incoming arguments may overlap with sibling call
3685 outgoing arguments and we cannot allow reordering of reads
3686 from function arguments with stores to outgoing arguments
3687 of sibling calls. */
3688 set_mem_alias_set (dest
, 0);
3691 emit_move_insn (dest
, x
);
3695 /* If part should go in registers, copy that part
3696 into the appropriate registers. Do this now, at the end,
3697 since mem-to-mem copies above may do function calls. */
3698 if (partial
> 0 && reg
!= 0)
3700 /* Handle calls that pass values in multiple non-contiguous locations.
3701 The Irix 6 ABI has examples of this. */
3702 if (GET_CODE (reg
) == PARALLEL
)
3703 emit_group_load (reg
, x
, type
, -1);
3705 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
3708 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3709 anti_adjust_stack (GEN_INT (extra
));
3711 if (alignment_pad
&& args_addr
== 0)
3712 anti_adjust_stack (alignment_pad
);
3715 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3719 get_subtarget (rtx x
)
3722 /* Only registers can be subtargets. */
3723 || GET_CODE (x
) != REG
3724 /* If the register is readonly, it can't be set more than once. */
3725 || RTX_UNCHANGING_P (x
)
3726 /* Don't use hard regs to avoid extending their life. */
3727 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3728 /* Avoid subtargets inside loops,
3729 since they hide some invariant expressions. */
3730 || preserve_subexpressions_p ())
3734 /* Expand an assignment that stores the value of FROM into TO.
3735 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3736 (This may contain a QUEUED rtx;
3737 if the value is constant, this rtx is a constant.)
3738 Otherwise, the returned value is NULL_RTX. */
3741 expand_assignment (tree to
, tree from
, int want_value
)
3746 /* Don't crash if the lhs of the assignment was erroneous. */
3748 if (TREE_CODE (to
) == ERROR_MARK
)
3750 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3751 return want_value
? result
: NULL_RTX
;
3754 /* Assignment of a structure component needs special treatment
3755 if the structure component's rtx is not simply a MEM.
3756 Assignment of an array element at a constant index, and assignment of
3757 an array element in an unaligned packed structure field, has the same
3760 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
3761 || TREE_CODE (to
) == ARRAY_REF
|| TREE_CODE (to
) == ARRAY_RANGE_REF
3762 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
3764 enum machine_mode mode1
;
3765 HOST_WIDE_INT bitsize
, bitpos
;
3773 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
3774 &unsignedp
, &volatilep
);
3776 /* If we are going to use store_bit_field and extract_bit_field,
3777 make sure to_rtx will be safe for multiple use. */
3779 if (mode1
== VOIDmode
&& want_value
)
3780 tem
= stabilize_reference (tem
);
3782 orig_to_rtx
= to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, 0);
3786 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
3788 if (GET_CODE (to_rtx
) != MEM
)
3791 #ifdef POINTERS_EXTEND_UNSIGNED
3792 if (GET_MODE (offset_rtx
) != Pmode
)
3793 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
3795 if (GET_MODE (offset_rtx
) != ptr_mode
)
3796 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
3799 /* A constant address in TO_RTX can have VOIDmode, we must not try
3800 to call force_reg for that case. Avoid that case. */
3801 if (GET_CODE (to_rtx
) == MEM
3802 && GET_MODE (to_rtx
) == BLKmode
3803 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
3805 && (bitpos
% bitsize
) == 0
3806 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
3807 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
3809 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
3813 to_rtx
= offset_address (to_rtx
, offset_rtx
,
3814 highest_pow2_factor_for_type (TREE_TYPE (to
),
3818 if (GET_CODE (to_rtx
) == MEM
)
3820 /* If the field is at offset zero, we could have been given the
3821 DECL_RTX of the parent struct. Don't munge it. */
3822 to_rtx
= shallow_copy_rtx (to_rtx
);
3824 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
3827 /* Deal with volatile and readonly fields. The former is only done
3828 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3829 if (volatilep
&& GET_CODE (to_rtx
) == MEM
)
3831 if (to_rtx
== orig_to_rtx
)
3832 to_rtx
= copy_rtx (to_rtx
);
3833 MEM_VOLATILE_P (to_rtx
) = 1;
3836 if (TREE_CODE (to
) == COMPONENT_REF
3837 && TREE_READONLY (TREE_OPERAND (to
, 1))
3838 /* We can't assert that a MEM won't be set more than once
3839 if the component is not addressable because another
3840 non-addressable component may be referenced by the same MEM. */
3841 && ! (GET_CODE (to_rtx
) == MEM
&& ! can_address_p (to
)))
3843 if (to_rtx
== orig_to_rtx
)
3844 to_rtx
= copy_rtx (to_rtx
);
3845 RTX_UNCHANGING_P (to_rtx
) = 1;
3848 if (GET_CODE (to_rtx
) == MEM
&& ! can_address_p (to
))
3850 if (to_rtx
== orig_to_rtx
)
3851 to_rtx
= copy_rtx (to_rtx
);
3852 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
3855 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
3857 /* Spurious cast for HPUX compiler. */
3858 ? ((enum machine_mode
)
3859 TYPE_MODE (TREE_TYPE (to
)))
3861 unsignedp
, TREE_TYPE (tem
), get_alias_set (to
));
3863 preserve_temp_slots (result
);
3867 /* If the value is meaningful, convert RESULT to the proper mode.
3868 Otherwise, return nothing. */
3869 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
3870 TYPE_MODE (TREE_TYPE (from
)),
3872 TREE_UNSIGNED (TREE_TYPE (to
)))
3876 /* If the rhs is a function call and its value is not an aggregate,
3877 call the function before we start to compute the lhs.
3878 This is needed for correct code for cases such as
3879 val = setjmp (buf) on machines where reference to val
3880 requires loading up part of an address in a separate insn.
3882 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3883 since it might be a promoted variable where the zero- or sign- extension
3884 needs to be done. Handling this in the normal way is safe because no
3885 computation is done before the call. */
3886 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
3887 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
3888 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
3889 && GET_CODE (DECL_RTL (to
)) == REG
))
3894 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3896 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3898 /* Handle calls that return values in multiple non-contiguous locations.
3899 The Irix 6 ABI has examples of this. */
3900 if (GET_CODE (to_rtx
) == PARALLEL
)
3901 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
3902 int_size_in_bytes (TREE_TYPE (from
)));
3903 else if (GET_MODE (to_rtx
) == BLKmode
)
3904 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
3907 if (POINTER_TYPE_P (TREE_TYPE (to
)))
3908 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
3909 emit_move_insn (to_rtx
, value
);
3911 preserve_temp_slots (to_rtx
);
3914 return want_value
? to_rtx
: NULL_RTX
;
3917 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3918 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3921 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3923 /* Don't move directly into a return register. */
3924 if (TREE_CODE (to
) == RESULT_DECL
3925 && (GET_CODE (to_rtx
) == REG
|| GET_CODE (to_rtx
) == PARALLEL
))
3930 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
3932 if (GET_CODE (to_rtx
) == PARALLEL
)
3933 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
3934 int_size_in_bytes (TREE_TYPE (from
)));
3936 emit_move_insn (to_rtx
, temp
);
3938 preserve_temp_slots (to_rtx
);
3941 return want_value
? to_rtx
: NULL_RTX
;
3944 /* In case we are returning the contents of an object which overlaps
3945 the place the value is being stored, use a safe function when copying
3946 a value through a pointer into a structure value return block. */
3947 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
3948 && current_function_returns_struct
3949 && !current_function_returns_pcc_struct
)
3954 size
= expr_size (from
);
3955 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3957 if (TARGET_MEM_FUNCTIONS
)
3958 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
3959 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3960 XEXP (from_rtx
, 0), Pmode
,
3961 convert_to_mode (TYPE_MODE (sizetype
),
3962 size
, TREE_UNSIGNED (sizetype
)),
3963 TYPE_MODE (sizetype
));
3965 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
3966 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
3967 XEXP (to_rtx
, 0), Pmode
,
3968 convert_to_mode (TYPE_MODE (integer_type_node
),
3970 TREE_UNSIGNED (integer_type_node
)),
3971 TYPE_MODE (integer_type_node
));
3973 preserve_temp_slots (to_rtx
);
3976 return want_value
? to_rtx
: NULL_RTX
;
3979 /* Compute FROM and store the value in the rtx we got. */
3982 result
= store_expr (from
, to_rtx
, want_value
);
3983 preserve_temp_slots (result
);
3986 return want_value
? result
: NULL_RTX
;
3989 /* Generate code for computing expression EXP,
3990 and storing the value into TARGET.
3991 TARGET may contain a QUEUED rtx.
3993 If WANT_VALUE & 1 is nonzero, return a copy of the value
3994 not in TARGET, so that we can be sure to use the proper
3995 value in a containing expression even if TARGET has something
3996 else stored in it. If possible, we copy the value through a pseudo
3997 and return that pseudo. Or, if the value is constant, we try to
3998 return the constant. In some cases, we return a pseudo
3999 copied *from* TARGET.
4001 If the mode is BLKmode then we may return TARGET itself.
4002 It turns out that in BLKmode it doesn't cause a problem.
4003 because C has no operators that could combine two different
4004 assignments into the same BLKmode object with different values
4005 with no sequence point. Will other languages need this to
4008 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4009 to catch quickly any cases where the caller uses the value
4010 and fails to set WANT_VALUE.
4012 If WANT_VALUE & 2 is set, this is a store into a call param on the
4013 stack, and block moves may need to be treated specially. */
4016 store_expr (tree exp
, rtx target
, int want_value
)
4019 int dont_return_target
= 0;
4020 int dont_store_target
= 0;
4022 if (VOID_TYPE_P (TREE_TYPE (exp
)))
4024 /* C++ can generate ?: expressions with a throw expression in one
4025 branch and an rvalue in the other. Here, we resolve attempts to
4026 store the throw expression's nonexistent result. */
4029 expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
4032 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4034 /* Perform first part of compound expression, then assign from second
4036 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
4037 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4039 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
4041 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4043 /* For conditional expression, get safe form of the target. Then
4044 test the condition, doing the appropriate assignment on either
4045 side. This avoids the creation of unnecessary temporaries.
4046 For non-BLKmode, it is more efficient not to do this. */
4048 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4051 target
= protect_from_queue (target
, 1);
4053 do_pending_stack_adjust ();
4055 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
4056 start_cleanup_deferral ();
4057 store_expr (TREE_OPERAND (exp
, 1), target
, want_value
& 2);
4058 end_cleanup_deferral ();
4060 emit_jump_insn (gen_jump (lab2
));
4063 start_cleanup_deferral ();
4064 store_expr (TREE_OPERAND (exp
, 2), target
, want_value
& 2);
4065 end_cleanup_deferral ();
4070 return want_value
& 1 ? target
: NULL_RTX
;
4072 else if (queued_subexp_p (target
))
4073 /* If target contains a postincrement, let's not risk
4074 using it as the place to generate the rhs. */
4076 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
4078 /* Expand EXP into a new pseudo. */
4079 temp
= gen_reg_rtx (GET_MODE (target
));
4080 temp
= expand_expr (exp
, temp
, GET_MODE (target
),
4082 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4085 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
),
4087 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4089 /* If target is volatile, ANSI requires accessing the value
4090 *from* the target, if it is accessed. So make that happen.
4091 In no case return the target itself. */
4092 if (! MEM_VOLATILE_P (target
) && (want_value
& 1) != 0)
4093 dont_return_target
= 1;
4095 else if ((want_value
& 1) != 0
4096 && GET_CODE (target
) == MEM
4097 && ! MEM_VOLATILE_P (target
)
4098 && GET_MODE (target
) != BLKmode
)
4099 /* If target is in memory and caller wants value in a register instead,
4100 arrange that. Pass TARGET as target for expand_expr so that,
4101 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4102 We know expand_expr will not use the target in that case.
4103 Don't do this if TARGET is volatile because we are supposed
4104 to write it and then read it. */
4106 temp
= expand_expr (exp
, target
, GET_MODE (target
),
4107 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4108 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
4110 /* If TEMP is already in the desired TARGET, only copy it from
4111 memory and don't store it there again. */
4113 || (rtx_equal_p (temp
, target
)
4114 && ! side_effects_p (temp
) && ! side_effects_p (target
)))
4115 dont_store_target
= 1;
4116 temp
= copy_to_reg (temp
);
4118 dont_return_target
= 1;
4120 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4121 /* If this is a scalar in a register that is stored in a wider mode
4122 than the declared mode, compute the result into its declared mode
4123 and then convert to the wider mode. Our value is the computed
4126 rtx inner_target
= 0;
4128 /* If we don't want a value, we can do the conversion inside EXP,
4129 which will often result in some optimizations. Do the conversion
4130 in two steps: first change the signedness, if needed, then
4131 the extend. But don't do this if the type of EXP is a subtype
4132 of something else since then the conversion might involve
4133 more than just converting modes. */
4134 if ((want_value
& 1) == 0
4135 && INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4136 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
4138 if (TREE_UNSIGNED (TREE_TYPE (exp
))
4139 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4141 ((*lang_hooks
.types
.signed_or_unsigned_type
)
4142 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)), exp
);
4144 exp
= convert ((*lang_hooks
.types
.type_for_mode
)
4145 (GET_MODE (SUBREG_REG (target
)),
4146 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4149 inner_target
= SUBREG_REG (target
);
4152 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
4153 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4155 /* If TEMP is a MEM and we want a result value, make the access
4156 now so it gets done only once. Strictly speaking, this is
4157 only necessary if the MEM is volatile, or if the address
4158 overlaps TARGET. But not performing the load twice also
4159 reduces the amount of rtl we generate and then have to CSE. */
4160 if (GET_CODE (temp
) == MEM
&& (want_value
& 1) != 0)
4161 temp
= copy_to_reg (temp
);
4163 /* If TEMP is a VOIDmode constant, use convert_modes to make
4164 sure that we properly convert it. */
4165 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4167 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4168 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4169 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4170 GET_MODE (target
), temp
,
4171 SUBREG_PROMOTED_UNSIGNED_P (target
));
4174 convert_move (SUBREG_REG (target
), temp
,
4175 SUBREG_PROMOTED_UNSIGNED_P (target
));
4177 /* If we promoted a constant, change the mode back down to match
4178 target. Otherwise, the caller might get confused by a result whose
4179 mode is larger than expected. */
4181 if ((want_value
& 1) != 0 && GET_MODE (temp
) != GET_MODE (target
))
4183 if (GET_MODE (temp
) != VOIDmode
)
4185 temp
= gen_lowpart_SUBREG (GET_MODE (target
), temp
);
4186 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4187 SUBREG_PROMOTED_UNSIGNED_SET (temp
,
4188 SUBREG_PROMOTED_UNSIGNED_P (target
));
4191 temp
= convert_modes (GET_MODE (target
),
4192 GET_MODE (SUBREG_REG (target
)),
4193 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4196 return want_value
& 1 ? temp
: NULL_RTX
;
4200 temp
= expand_expr (exp
, target
, GET_MODE (target
),
4201 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4202 /* Return TARGET if it's a specified hardware register.
4203 If TARGET is a volatile mem ref, either return TARGET
4204 or return a reg copied *from* TARGET; ANSI requires this.
4206 Otherwise, if TEMP is not TARGET, return TEMP
4207 if it is constant (for efficiency),
4208 or if we really want the correct value. */
4209 if (!(target
&& GET_CODE (target
) == REG
4210 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4211 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
4212 && ! rtx_equal_p (temp
, target
)
4213 && (CONSTANT_P (temp
) || (want_value
& 1) != 0))
4214 dont_return_target
= 1;
4217 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4218 the same as that of TARGET, adjust the constant. This is needed, for
4219 example, in case it is a CONST_DOUBLE and we want only a word-sized
4221 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4222 && TREE_CODE (exp
) != ERROR_MARK
4223 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4224 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4225 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
4227 /* If value was not generated in the target, store it there.
4228 Convert the value to TARGET's type first if necessary.
4229 If TEMP and TARGET compare equal according to rtx_equal_p, but
4230 one or both of them are volatile memory refs, we have to distinguish
4232 - expand_expr has used TARGET. In this case, we must not generate
4233 another copy. This can be detected by TARGET being equal according
4235 - expand_expr has not used TARGET - that means that the source just
4236 happens to have the same RTX form. Since temp will have been created
4237 by expand_expr, it will compare unequal according to == .
4238 We must generate a copy in this case, to reach the correct number
4239 of volatile memory references. */
4241 if ((! rtx_equal_p (temp
, target
)
4242 || (temp
!= target
&& (side_effects_p (temp
)
4243 || side_effects_p (target
))))
4244 && TREE_CODE (exp
) != ERROR_MARK
4245 && ! dont_store_target
4246 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4247 but TARGET is not valid memory reference, TEMP will differ
4248 from TARGET although it is really the same location. */
4249 && (TREE_CODE_CLASS (TREE_CODE (exp
)) != 'd'
4250 || target
!= DECL_RTL_IF_SET (exp
))
4251 /* If there's nothing to copy, don't bother. Don't call expr_size
4252 unless necessary, because some front-ends (C++) expr_size-hook
4253 aborts on objects that are not supposed to be bit-copied or
4255 && expr_size (exp
) != const0_rtx
)
4257 target
= protect_from_queue (target
, 1);
4258 if (GET_MODE (temp
) != GET_MODE (target
)
4259 && GET_MODE (temp
) != VOIDmode
)
4261 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4262 if (dont_return_target
)
4264 /* In this case, we will return TEMP,
4265 so make sure it has the proper mode.
4266 But don't forget to store the value into TARGET. */
4267 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4268 emit_move_insn (target
, temp
);
4271 convert_move (target
, temp
, unsignedp
);
4274 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4276 /* Handle copying a string constant into an array. The string
4277 constant may be shorter than the array. So copy just the string's
4278 actual length, and clear the rest. First get the size of the data
4279 type of the string, which is actually the size of the target. */
4280 rtx size
= expr_size (exp
);
4282 if (GET_CODE (size
) == CONST_INT
4283 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4284 emit_block_move (target
, temp
, size
,
4286 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4289 /* Compute the size of the data to copy from the string. */
4291 = size_binop (MIN_EXPR
,
4292 make_tree (sizetype
, size
),
4293 size_int (TREE_STRING_LENGTH (exp
)));
4295 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
4297 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4300 /* Copy that much. */
4301 copy_size_rtx
= convert_to_mode (ptr_mode
, copy_size_rtx
,
4302 TREE_UNSIGNED (sizetype
));
4303 emit_block_move (target
, temp
, copy_size_rtx
,
4305 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4307 /* Figure out how much is left in TARGET that we have to clear.
4308 Do all calculations in ptr_mode. */
4309 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4311 size
= plus_constant (size
, -INTVAL (copy_size_rtx
));
4312 target
= adjust_address (target
, BLKmode
,
4313 INTVAL (copy_size_rtx
));
4317 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
4318 copy_size_rtx
, NULL_RTX
, 0,
4321 #ifdef POINTERS_EXTEND_UNSIGNED
4322 if (GET_MODE (copy_size_rtx
) != Pmode
)
4323 copy_size_rtx
= convert_to_mode (Pmode
, copy_size_rtx
,
4324 TREE_UNSIGNED (sizetype
));
4327 target
= offset_address (target
, copy_size_rtx
,
4328 highest_pow2_factor (copy_size
));
4329 label
= gen_label_rtx ();
4330 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4331 GET_MODE (size
), 0, label
);
4334 if (size
!= const0_rtx
)
4335 clear_storage (target
, size
);
4341 /* Handle calls that return values in multiple non-contiguous locations.
4342 The Irix 6 ABI has examples of this. */
4343 else if (GET_CODE (target
) == PARALLEL
)
4344 emit_group_load (target
, temp
, TREE_TYPE (exp
),
4345 int_size_in_bytes (TREE_TYPE (exp
)));
4346 else if (GET_MODE (temp
) == BLKmode
)
4347 emit_block_move (target
, temp
, expr_size (exp
),
4349 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4351 emit_move_insn (target
, temp
);
4354 /* If we don't want a value, return NULL_RTX. */
4355 if ((want_value
& 1) == 0)
4358 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4359 ??? The latter test doesn't seem to make sense. */
4360 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
4363 /* Return TARGET itself if it is a hard register. */
4364 else if ((want_value
& 1) != 0
4365 && GET_MODE (target
) != BLKmode
4366 && ! (GET_CODE (target
) == REG
4367 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4368 return copy_to_reg (target
);
4374 /* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
4377 is_zeros_p (tree exp
)
4381 switch (TREE_CODE (exp
))
4385 case NON_LVALUE_EXPR
:
4386 case VIEW_CONVERT_EXPR
:
4387 return is_zeros_p (TREE_OPERAND (exp
, 0));
4390 return integer_zerop (exp
);
4394 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
4397 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp
), dconst0
);
4400 for (elt
= TREE_VECTOR_CST_ELTS (exp
); elt
;
4401 elt
= TREE_CHAIN (elt
))
4402 if (!is_zeros_p (TREE_VALUE (elt
)))
4408 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4409 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
4410 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4411 if (! is_zeros_p (TREE_VALUE (elt
)))
4421 /* Return 1 if EXP contains mostly (3/4) zeros. */
4424 mostly_zeros_p (tree exp
)
4426 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4428 int elts
= 0, zeros
= 0;
4429 tree elt
= CONSTRUCTOR_ELTS (exp
);
4430 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4432 /* If there are no ranges of true bits, it is all zero. */
4433 return elt
== NULL_TREE
;
4435 for (; elt
; elt
= TREE_CHAIN (elt
))
4437 /* We do not handle the case where the index is a RANGE_EXPR,
4438 so the statistic will be somewhat inaccurate.
4439 We do make a more accurate count in store_constructor itself,
4440 so since this function is only used for nested array elements,
4441 this should be close enough. */
4442 if (mostly_zeros_p (TREE_VALUE (elt
)))
4447 return 4 * zeros
>= 3 * elts
;
4450 return is_zeros_p (exp
);
4453 /* Helper function for store_constructor.
4454 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4455 TYPE is the type of the CONSTRUCTOR, not the element type.
4456 CLEARED is as for store_constructor.
4457 ALIAS_SET is the alias set to use for any stores.
4459 This provides a recursive shortcut back to store_constructor when it isn't
4460 necessary to go through store_field. This is so that we can pass through
4461 the cleared field to let store_constructor know that we may not have to
4462 clear a substructure if the outer structure has already been cleared. */
4465 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
4466 HOST_WIDE_INT bitpos
, enum machine_mode mode
,
4467 tree exp
, tree type
, int cleared
, int alias_set
)
4469 if (TREE_CODE (exp
) == CONSTRUCTOR
4470 && bitpos
% BITS_PER_UNIT
== 0
4471 /* If we have a nonzero bitpos for a register target, then we just
4472 let store_field do the bitfield handling. This is unlikely to
4473 generate unnecessary clear instructions anyways. */
4474 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
4476 if (GET_CODE (target
) == MEM
)
4478 = adjust_address (target
,
4479 GET_MODE (target
) == BLKmode
4481 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4482 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
4485 /* Update the alias set, if required. */
4486 if (GET_CODE (target
) == MEM
&& ! MEM_KEEP_ALIAS_SET_P (target
)
4487 && MEM_ALIAS_SET (target
) != 0)
4489 target
= copy_rtx (target
);
4490 set_mem_alias_set (target
, alias_set
);
4493 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
4496 store_field (target
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
4500 /* Store the value of constructor EXP into the rtx TARGET.
4501 TARGET is either a REG or a MEM; we know it cannot conflict, since
4502 safe_from_p has been called.
4503 CLEARED is true if TARGET is known to have been zero'd.
4504 SIZE is the number of bytes of TARGET we are allowed to modify: this
4505 may not be the same as the size of EXP if we are assigning to a field
4506 which has been packed to exclude padding bits. */
4509 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
)
4511 tree type
= TREE_TYPE (exp
);
4512 #ifdef WORD_REGISTER_OPERATIONS
4513 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4516 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
4517 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4521 /* If size is zero or the target is already cleared, do nothing. */
4522 if (size
== 0 || cleared
)
4524 /* We either clear the aggregate or indicate the value is dead. */
4525 else if ((TREE_CODE (type
) == UNION_TYPE
4526 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4527 && ! CONSTRUCTOR_ELTS (exp
))
4528 /* If the constructor is empty, clear the union. */
4530 clear_storage (target
, expr_size (exp
));
4534 /* If we are building a static constructor into a register,
4535 set the initial value as zero so we can fold the value into
4536 a constant. But if more than one register is involved,
4537 this probably loses. */
4538 else if (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
4539 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4541 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4545 /* If the constructor has fewer fields than the structure
4546 or if we are initializing the structure to mostly zeros,
4547 clear the whole structure first. Don't do this if TARGET is a
4548 register whose mode size isn't equal to SIZE since clear_storage
4549 can't handle this case. */
4550 else if (((list_length (CONSTRUCTOR_ELTS (exp
)) != fields_length (type
))
4551 || mostly_zeros_p (exp
))
4552 && (GET_CODE (target
) != REG
4553 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
4556 rtx xtarget
= target
;
4558 if (readonly_fields_p (type
))
4560 xtarget
= copy_rtx (xtarget
);
4561 RTX_UNCHANGING_P (xtarget
) = 1;
4564 clear_storage (xtarget
, GEN_INT (size
));
4569 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4571 /* Store each element of the constructor into
4572 the corresponding field of TARGET. */
4574 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4576 tree field
= TREE_PURPOSE (elt
);
4577 tree value
= TREE_VALUE (elt
);
4578 enum machine_mode mode
;
4579 HOST_WIDE_INT bitsize
;
4580 HOST_WIDE_INT bitpos
= 0;
4582 rtx to_rtx
= target
;
4584 /* Just ignore missing fields.
4585 We cleared the whole structure, above,
4586 if any fields are missing. */
4590 if (cleared
&& is_zeros_p (value
))
4593 if (host_integerp (DECL_SIZE (field
), 1))
4594 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4598 mode
= DECL_MODE (field
);
4599 if (DECL_BIT_FIELD (field
))
4602 offset
= DECL_FIELD_OFFSET (field
);
4603 if (host_integerp (offset
, 0)
4604 && host_integerp (bit_position (field
), 0))
4606 bitpos
= int_bit_position (field
);
4610 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
4616 if (CONTAINS_PLACEHOLDER_P (offset
))
4617 offset
= build (WITH_RECORD_EXPR
, sizetype
,
4618 offset
, make_tree (TREE_TYPE (exp
), target
));
4620 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4621 if (GET_CODE (to_rtx
) != MEM
)
4624 #ifdef POINTERS_EXTEND_UNSIGNED
4625 if (GET_MODE (offset_rtx
) != Pmode
)
4626 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
4628 if (GET_MODE (offset_rtx
) != ptr_mode
)
4629 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4632 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4633 highest_pow2_factor (offset
));
4636 if (TREE_READONLY (field
))
4638 if (GET_CODE (to_rtx
) == MEM
)
4639 to_rtx
= copy_rtx (to_rtx
);
4641 RTX_UNCHANGING_P (to_rtx
) = 1;
4644 #ifdef WORD_REGISTER_OPERATIONS
4645 /* If this initializes a field that is smaller than a word, at the
4646 start of a word, try to widen it to a full word.
4647 This special case allows us to output C++ member function
4648 initializations in a form that the optimizers can understand. */
4649 if (GET_CODE (target
) == REG
4650 && bitsize
< BITS_PER_WORD
4651 && bitpos
% BITS_PER_WORD
== 0
4652 && GET_MODE_CLASS (mode
) == MODE_INT
4653 && TREE_CODE (value
) == INTEGER_CST
4655 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
4657 tree type
= TREE_TYPE (value
);
4659 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4661 type
= (*lang_hooks
.types
.type_for_size
)
4662 (BITS_PER_WORD
, TREE_UNSIGNED (type
));
4663 value
= convert (type
, value
);
4666 if (BYTES_BIG_ENDIAN
)
4668 = fold (build (LSHIFT_EXPR
, type
, value
,
4669 build_int_2 (BITS_PER_WORD
- bitsize
, 0)));
4670 bitsize
= BITS_PER_WORD
;
4675 if (GET_CODE (to_rtx
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (to_rtx
)
4676 && DECL_NONADDRESSABLE_P (field
))
4678 to_rtx
= copy_rtx (to_rtx
);
4679 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4682 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
4683 value
, type
, cleared
,
4684 get_alias_set (TREE_TYPE (field
)));
4687 else if (TREE_CODE (type
) == ARRAY_TYPE
4688 || TREE_CODE (type
) == VECTOR_TYPE
)
4693 tree domain
= TYPE_DOMAIN (type
);
4694 tree elttype
= TREE_TYPE (type
);
4696 HOST_WIDE_INT minelt
= 0;
4697 HOST_WIDE_INT maxelt
= 0;
4699 /* Vectors are like arrays, but the domain is stored via an array
4701 if (TREE_CODE (type
) == VECTOR_TYPE
)
4703 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4704 the same field as TYPE_DOMAIN, we are not guaranteed that
4706 domain
= TYPE_DEBUG_REPRESENTATION_TYPE (type
);
4707 domain
= TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain
)));
4710 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
4711 && TYPE_MAX_VALUE (domain
)
4712 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
4713 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
4715 /* If we have constant bounds for the range of the type, get them. */
4718 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
4719 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
4722 /* If the constructor has fewer elements than the array,
4723 clear the whole array first. Similarly if this is
4724 static constructor of a non-BLKmode object. */
4725 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
4729 HOST_WIDE_INT count
= 0, zero_count
= 0;
4730 need_to_clear
= ! const_bounds_p
;
4732 /* This loop is a more accurate version of the loop in
4733 mostly_zeros_p (it handles RANGE_EXPR in an index).
4734 It is also needed to check for missing elements. */
4735 for (elt
= CONSTRUCTOR_ELTS (exp
);
4736 elt
!= NULL_TREE
&& ! need_to_clear
;
4737 elt
= TREE_CHAIN (elt
))
4739 tree index
= TREE_PURPOSE (elt
);
4740 HOST_WIDE_INT this_node_count
;
4742 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4744 tree lo_index
= TREE_OPERAND (index
, 0);
4745 tree hi_index
= TREE_OPERAND (index
, 1);
4747 if (! host_integerp (lo_index
, 1)
4748 || ! host_integerp (hi_index
, 1))
4754 this_node_count
= (tree_low_cst (hi_index
, 1)
4755 - tree_low_cst (lo_index
, 1) + 1);
4758 this_node_count
= 1;
4760 count
+= this_node_count
;
4761 if (mostly_zeros_p (TREE_VALUE (elt
)))
4762 zero_count
+= this_node_count
;
4765 /* Clear the entire array first if there are any missing elements,
4766 or if the incidence of zero elements is >= 75%. */
4768 && (count
< maxelt
- minelt
+ 1 || 4 * zero_count
>= 3 * count
))
4772 if (need_to_clear
&& size
> 0)
4777 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4779 clear_storage (target
, GEN_INT (size
));
4783 else if (REG_P (target
))
4784 /* Inform later passes that the old value is dead. */
4785 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4787 /* Store each element of the constructor into
4788 the corresponding element of TARGET, determined
4789 by counting the elements. */
4790 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
4792 elt
= TREE_CHAIN (elt
), i
++)
4794 enum machine_mode mode
;
4795 HOST_WIDE_INT bitsize
;
4796 HOST_WIDE_INT bitpos
;
4798 tree value
= TREE_VALUE (elt
);
4799 tree index
= TREE_PURPOSE (elt
);
4800 rtx xtarget
= target
;
4802 if (cleared
&& is_zeros_p (value
))
4805 unsignedp
= TREE_UNSIGNED (elttype
);
4806 mode
= TYPE_MODE (elttype
);
4807 if (mode
== BLKmode
)
4808 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
4809 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
4812 bitsize
= GET_MODE_BITSIZE (mode
);
4814 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4816 tree lo_index
= TREE_OPERAND (index
, 0);
4817 tree hi_index
= TREE_OPERAND (index
, 1);
4818 rtx index_r
, pos_rtx
, loop_end
;
4819 struct nesting
*loop
;
4820 HOST_WIDE_INT lo
, hi
, count
;
4823 /* If the range is constant and "small", unroll the loop. */
4825 && host_integerp (lo_index
, 0)
4826 && host_integerp (hi_index
, 0)
4827 && (lo
= tree_low_cst (lo_index
, 0),
4828 hi
= tree_low_cst (hi_index
, 0),
4829 count
= hi
- lo
+ 1,
4830 (GET_CODE (target
) != MEM
4832 || (host_integerp (TYPE_SIZE (elttype
), 1)
4833 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
4836 lo
-= minelt
; hi
-= minelt
;
4837 for (; lo
<= hi
; lo
++)
4839 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
4841 if (GET_CODE (target
) == MEM
4842 && !MEM_KEEP_ALIAS_SET_P (target
)
4843 && TREE_CODE (type
) == ARRAY_TYPE
4844 && TYPE_NONALIASED_COMPONENT (type
))
4846 target
= copy_rtx (target
);
4847 MEM_KEEP_ALIAS_SET_P (target
) = 1;
4850 store_constructor_field
4851 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
4852 get_alias_set (elttype
));
4857 expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
4858 loop_end
= gen_label_rtx ();
4860 unsignedp
= TREE_UNSIGNED (domain
);
4862 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
4865 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
4867 SET_DECL_RTL (index
, index_r
);
4868 if (TREE_CODE (value
) == SAVE_EXPR
4869 && SAVE_EXPR_RTL (value
) == 0)
4871 /* Make sure value gets expanded once before the
4873 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
4876 store_expr (lo_index
, index_r
, 0);
4877 loop
= expand_start_loop (0);
4879 /* Assign value to element index. */
4881 = convert (ssizetype
,
4882 fold (build (MINUS_EXPR
, TREE_TYPE (index
),
4883 index
, TYPE_MIN_VALUE (domain
))));
4884 position
= size_binop (MULT_EXPR
, position
,
4886 TYPE_SIZE_UNIT (elttype
)));
4888 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
4889 xtarget
= offset_address (target
, pos_rtx
,
4890 highest_pow2_factor (position
));
4891 xtarget
= adjust_address (xtarget
, mode
, 0);
4892 if (TREE_CODE (value
) == CONSTRUCTOR
)
4893 store_constructor (value
, xtarget
, cleared
,
4894 bitsize
/ BITS_PER_UNIT
);
4896 store_expr (value
, xtarget
, 0);
4898 expand_exit_loop_if_false (loop
,
4899 build (LT_EXPR
, integer_type_node
,
4902 expand_increment (build (PREINCREMENT_EXPR
,
4904 index
, integer_one_node
), 0, 0);
4906 emit_label (loop_end
);
4909 else if ((index
!= 0 && ! host_integerp (index
, 0))
4910 || ! host_integerp (TYPE_SIZE (elttype
), 1))
4915 index
= ssize_int (1);
4918 index
= convert (ssizetype
,
4919 fold (build (MINUS_EXPR
, index
,
4920 TYPE_MIN_VALUE (domain
))));
4922 position
= size_binop (MULT_EXPR
, index
,
4924 TYPE_SIZE_UNIT (elttype
)));
4925 xtarget
= offset_address (target
,
4926 expand_expr (position
, 0, VOIDmode
, 0),
4927 highest_pow2_factor (position
));
4928 xtarget
= adjust_address (xtarget
, mode
, 0);
4929 store_expr (value
, xtarget
, 0);
4934 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
4935 * tree_low_cst (TYPE_SIZE (elttype
), 1));
4937 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
4939 if (GET_CODE (target
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (target
)
4940 && TREE_CODE (type
) == ARRAY_TYPE
4941 && TYPE_NONALIASED_COMPONENT (type
))
4943 target
= copy_rtx (target
);
4944 MEM_KEEP_ALIAS_SET_P (target
) = 1;
4947 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
4948 type
, cleared
, get_alias_set (elttype
));
4954 /* Set constructor assignments. */
4955 else if (TREE_CODE (type
) == SET_TYPE
)
4957 tree elt
= CONSTRUCTOR_ELTS (exp
);
4958 unsigned HOST_WIDE_INT nbytes
= int_size_in_bytes (type
), nbits
;
4959 tree domain
= TYPE_DOMAIN (type
);
4960 tree domain_min
, domain_max
, bitlength
;
4962 /* The default implementation strategy is to extract the constant
4963 parts of the constructor, use that to initialize the target,
4964 and then "or" in whatever non-constant ranges we need in addition.
4966 If a large set is all zero or all ones, it is
4967 probably better to set it using memset (if available) or bzero.
4968 Also, if a large set has just a single range, it may also be
4969 better to first clear all the first clear the set (using
4970 bzero/memset), and set the bits we want. */
4972 /* Check for all zeros. */
4973 if (elt
== NULL_TREE
&& size
> 0)
4976 clear_storage (target
, GEN_INT (size
));
4980 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
4981 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
4982 bitlength
= size_binop (PLUS_EXPR
,
4983 size_diffop (domain_max
, domain_min
),
4986 nbits
= tree_low_cst (bitlength
, 1);
4988 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4989 are "complicated" (more than one range), initialize (the
4990 constant parts) by copying from a constant. */
4991 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
4992 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
4994 unsigned int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
4995 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
4996 char *bit_buffer
= alloca (nbits
);
4997 HOST_WIDE_INT word
= 0;
4998 unsigned int bit_pos
= 0;
4999 unsigned int ibit
= 0;
5000 unsigned int offset
= 0; /* In bytes from beginning of set. */
5002 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
5005 if (bit_buffer
[ibit
])
5007 if (BYTES_BIG_ENDIAN
)
5008 word
|= (1 << (set_word_size
- 1 - bit_pos
));
5010 word
|= 1 << bit_pos
;
5014 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
5016 if (word
!= 0 || ! cleared
)
5018 rtx datum
= GEN_INT (word
);
5021 /* The assumption here is that it is safe to use
5022 XEXP if the set is multi-word, but not if
5023 it's single-word. */
5024 if (GET_CODE (target
) == MEM
)
5025 to_rtx
= adjust_address (target
, mode
, offset
);
5026 else if (offset
== 0)
5030 emit_move_insn (to_rtx
, datum
);
5037 offset
+= set_word_size
/ BITS_PER_UNIT
;
5042 /* Don't bother clearing storage if the set is all ones. */
5043 if (TREE_CHAIN (elt
) != NULL_TREE
5044 || (TREE_PURPOSE (elt
) == NULL_TREE
5046 : ( ! host_integerp (TREE_VALUE (elt
), 0)
5047 || ! host_integerp (TREE_PURPOSE (elt
), 0)
5048 || (tree_low_cst (TREE_VALUE (elt
), 0)
5049 - tree_low_cst (TREE_PURPOSE (elt
), 0) + 1
5050 != (HOST_WIDE_INT
) nbits
))))
5051 clear_storage (target
, expr_size (exp
));
5053 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
5055 /* Start of range of element or NULL. */
5056 tree startbit
= TREE_PURPOSE (elt
);
5057 /* End of range of element, or element value. */
5058 tree endbit
= TREE_VALUE (elt
);
5059 HOST_WIDE_INT startb
, endb
;
5060 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
5062 bitlength_rtx
= expand_expr (bitlength
,
5063 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
5065 /* Handle non-range tuple element like [ expr ]. */
5066 if (startbit
== NULL_TREE
)
5068 startbit
= save_expr (endbit
);
5072 startbit
= convert (sizetype
, startbit
);
5073 endbit
= convert (sizetype
, endbit
);
5074 if (! integer_zerop (domain_min
))
5076 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
5077 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
5079 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
5080 EXPAND_CONST_ADDRESS
);
5081 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
5082 EXPAND_CONST_ADDRESS
);
5088 ((build_qualified_type ((*lang_hooks
.types
.type_for_mode
)
5089 (GET_MODE (target
), 0),
5092 emit_move_insn (targetx
, target
);
5095 else if (GET_CODE (target
) == MEM
)
5100 /* Optimization: If startbit and endbit are constants divisible
5101 by BITS_PER_UNIT, call memset instead. */
5102 if (TARGET_MEM_FUNCTIONS
5103 && TREE_CODE (startbit
) == INTEGER_CST
5104 && TREE_CODE (endbit
) == INTEGER_CST
5105 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
5106 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
5108 emit_library_call (memset_libfunc
, LCT_NORMAL
,
5110 plus_constant (XEXP (targetx
, 0),
5111 startb
/ BITS_PER_UNIT
),
5113 constm1_rtx
, TYPE_MODE (integer_type_node
),
5114 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
5115 TYPE_MODE (sizetype
));
5118 emit_library_call (setbits_libfunc
, LCT_NORMAL
,
5119 VOIDmode
, 4, XEXP (targetx
, 0),
5120 Pmode
, bitlength_rtx
, TYPE_MODE (sizetype
),
5121 startbit_rtx
, TYPE_MODE (sizetype
),
5122 endbit_rtx
, TYPE_MODE (sizetype
));
5125 emit_move_insn (target
, targetx
);
5133 /* Store the value of EXP (an expression tree)
5134 into a subfield of TARGET which has mode MODE and occupies
5135 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5136 If MODE is VOIDmode, it means that we are storing into a bit-field.
5138 If VALUE_MODE is VOIDmode, return nothing in particular.
5139 UNSIGNEDP is not used in this case.
5141 Otherwise, return an rtx for the value stored. This rtx
5142 has mode VALUE_MODE if that is convenient to do.
5143 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5145 TYPE is the type of the underlying object,
5147 ALIAS_SET is the alias set for the destination. This value will
5148 (in general) be different from that for TARGET, since TARGET is a
5149 reference to the containing structure. */
5152 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
5153 enum machine_mode mode
, tree exp
, enum machine_mode value_mode
,
5154 int unsignedp
, tree type
, int alias_set
)
5156 HOST_WIDE_INT width_mask
= 0;
5158 if (TREE_CODE (exp
) == ERROR_MARK
)
5161 /* If we have nothing to store, do nothing unless the expression has
5164 return expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
5165 else if (bitsize
>= 0 && bitsize
< HOST_BITS_PER_WIDE_INT
)
5166 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5168 /* If we are storing into an unaligned field of an aligned union that is
5169 in a register, we may have the mode of TARGET being an integer mode but
5170 MODE == BLKmode. In that case, get an aligned object whose size and
5171 alignment are the same as TARGET and store TARGET into it (we can avoid
5172 the store if the field being stored is the entire width of TARGET). Then
5173 call ourselves recursively to store the field into a BLKmode version of
5174 that object. Finally, load from the object into TARGET. This is not
5175 very efficient in general, but should only be slightly more expensive
5176 than the otherwise-required unaligned accesses. Perhaps this can be
5177 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5178 twice, once with emit_move_insn and once via store_field. */
5181 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
5183 rtx object
= assign_temp (type
, 0, 1, 1);
5184 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
5186 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5187 emit_move_insn (object
, target
);
5189 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
5192 emit_move_insn (target
, object
);
5194 /* We want to return the BLKmode version of the data. */
5198 if (GET_CODE (target
) == CONCAT
)
5200 /* We're storing into a struct containing a single __complex. */
5204 return store_expr (exp
, target
, 0);
5207 /* If the structure is in a register or if the component
5208 is a bit field, we cannot use addressing to access it.
5209 Use bit-field techniques or SUBREG to store in it. */
5211 if (mode
== VOIDmode
5212 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5213 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5214 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5215 || GET_CODE (target
) == REG
5216 || GET_CODE (target
) == SUBREG
5217 /* If the field isn't aligned enough to store as an ordinary memref,
5218 store it as a bit field. */
5220 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
5221 || bitpos
% GET_MODE_ALIGNMENT (mode
))
5222 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
5223 || (bitpos
% BITS_PER_UNIT
!= 0)))
5224 /* If the RHS and field are a constant size and the size of the
5225 RHS isn't the same size as the bitfield, we must use bitfield
5228 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5229 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5231 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
5233 /* If BITSIZE is narrower than the size of the type of EXP
5234 we will be narrowing TEMP. Normally, what's wanted are the
5235 low-order bits. However, if EXP's type is a record and this is
5236 big-endian machine, we want the upper BITSIZE bits. */
5237 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5238 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
5239 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5240 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5241 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5245 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5247 if (mode
!= VOIDmode
&& mode
!= BLKmode
5248 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5249 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5251 /* If the modes of TARGET and TEMP are both BLKmode, both
5252 must be in memory and BITPOS must be aligned on a byte
5253 boundary. If so, we simply do a block copy. */
5254 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5256 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
5257 || bitpos
% BITS_PER_UNIT
!= 0)
5260 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5261 emit_block_move (target
, temp
,
5262 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5266 return value_mode
== VOIDmode
? const0_rtx
: target
;
5269 /* Store the value in the bitfield. */
5270 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
,
5271 int_size_in_bytes (type
));
5273 if (value_mode
!= VOIDmode
)
5275 /* The caller wants an rtx for the value.
5276 If possible, avoid refetching from the bitfield itself. */
5278 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
5281 enum machine_mode tmode
;
5283 tmode
= GET_MODE (temp
);
5284 if (tmode
== VOIDmode
)
5288 return expand_and (tmode
, temp
,
5289 gen_int_mode (width_mask
, tmode
),
5292 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
5293 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5294 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5297 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
5298 NULL_RTX
, value_mode
, VOIDmode
,
5299 int_size_in_bytes (type
));
5305 rtx addr
= XEXP (target
, 0);
5306 rtx to_rtx
= target
;
5308 /* If a value is wanted, it must be the lhs;
5309 so make the address stable for multiple use. */
5311 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
5312 && ! CONSTANT_ADDRESS_P (addr
)
5313 /* A frame-pointer reference is already stable. */
5314 && ! (GET_CODE (addr
) == PLUS
5315 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
5316 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
5317 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
5318 to_rtx
= replace_equiv_address (to_rtx
, copy_to_reg (addr
));
5320 /* Now build a reference to just the desired component. */
5322 to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5324 if (to_rtx
== target
)
5325 to_rtx
= copy_rtx (to_rtx
);
5327 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5328 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5329 set_mem_alias_set (to_rtx
, alias_set
);
5331 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
5335 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5336 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5337 codes and find the ultimate containing object, which we return.
5339 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5340 bit position, and *PUNSIGNEDP to the signedness of the field.
5341 If the position of the field is variable, we store a tree
5342 giving the variable offset (in units) in *POFFSET.
5343 This offset is in addition to the bit position.
5344 If the position is not variable, we store 0 in *POFFSET.
5346 If any of the extraction expressions is volatile,
5347 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5349 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5350 is a mode that can be used to access the field. In that case, *PBITSIZE
5353 If the field describes a variable-sized object, *PMODE is set to
5354 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5355 this case, but the address of the object can be found. */
5358 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
5359 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
5360 enum machine_mode
*pmode
, int *punsignedp
,
5364 enum machine_mode mode
= VOIDmode
;
5365 tree offset
= size_zero_node
;
5366 tree bit_offset
= bitsize_zero_node
;
5367 tree placeholder_ptr
= 0;
5370 /* First get the mode, signedness, and size. We do this from just the
5371 outermost expression. */
5372 if (TREE_CODE (exp
) == COMPONENT_REF
)
5374 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5375 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5376 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5378 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
5380 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5382 size_tree
= TREE_OPERAND (exp
, 1);
5383 *punsignedp
= TREE_UNSIGNED (exp
);
5387 mode
= TYPE_MODE (TREE_TYPE (exp
));
5388 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
5390 if (mode
== BLKmode
)
5391 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5393 *pbitsize
= GET_MODE_BITSIZE (mode
);
5398 if (! host_integerp (size_tree
, 1))
5399 mode
= BLKmode
, *pbitsize
= -1;
5401 *pbitsize
= tree_low_cst (size_tree
, 1);
5404 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5405 and find the ultimate containing object. */
5408 if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5409 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
, TREE_OPERAND (exp
, 2));
5410 else if (TREE_CODE (exp
) == COMPONENT_REF
)
5412 tree field
= TREE_OPERAND (exp
, 1);
5413 tree this_offset
= DECL_FIELD_OFFSET (field
);
5415 /* If this field hasn't been filled in yet, don't go
5416 past it. This should only happen when folding expressions
5417 made during type construction. */
5418 if (this_offset
== 0)
5420 else if (CONTAINS_PLACEHOLDER_P (this_offset
))
5421 this_offset
= build (WITH_RECORD_EXPR
, sizetype
, this_offset
, exp
);
5423 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5424 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5425 DECL_FIELD_BIT_OFFSET (field
));
5427 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5430 else if (TREE_CODE (exp
) == ARRAY_REF
5431 || TREE_CODE (exp
) == ARRAY_RANGE_REF
)
5433 tree index
= TREE_OPERAND (exp
, 1);
5434 tree array
= TREE_OPERAND (exp
, 0);
5435 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
5436 tree low_bound
= (domain
? TYPE_MIN_VALUE (domain
) : 0);
5437 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array
)));
5439 /* We assume all arrays have sizes that are a multiple of a byte.
5440 First subtract the lower bound, if any, in the type of the
5441 index, then convert to sizetype and multiply by the size of the
5443 if (low_bound
!= 0 && ! integer_zerop (low_bound
))
5444 index
= fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5447 /* If the index has a self-referential type, pass it to a
5448 WITH_RECORD_EXPR; if the component size is, pass our
5449 component to one. */
5450 if (CONTAINS_PLACEHOLDER_P (index
))
5451 index
= build (WITH_RECORD_EXPR
, TREE_TYPE (index
), index
, exp
);
5452 if (CONTAINS_PLACEHOLDER_P (unit_size
))
5453 unit_size
= build (WITH_RECORD_EXPR
, sizetype
, unit_size
, array
);
5455 offset
= size_binop (PLUS_EXPR
, offset
,
5456 size_binop (MULT_EXPR
,
5457 convert (sizetype
, index
),
5461 else if (TREE_CODE (exp
) == PLACEHOLDER_EXPR
)
5463 tree
new = find_placeholder (exp
, &placeholder_ptr
);
5465 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5466 We might have been called from tree optimization where we
5467 haven't set up an object yet. */
5476 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5477 conversions that don't change the mode, and all view conversions
5478 except those that need to "step up" the alignment. */
5479 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
5480 && ! (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
5481 && ! ((TYPE_ALIGN (TREE_TYPE (exp
))
5482 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5484 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
5485 < BIGGEST_ALIGNMENT
)
5486 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
5487 || TYPE_ALIGN_OK (TREE_TYPE
5488 (TREE_OPERAND (exp
, 0))))))
5489 && ! ((TREE_CODE (exp
) == NOP_EXPR
5490 || TREE_CODE (exp
) == CONVERT_EXPR
)
5491 && (TYPE_MODE (TREE_TYPE (exp
))
5492 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
5495 /* If any reference in the chain is volatile, the effect is volatile. */
5496 if (TREE_THIS_VOLATILE (exp
))
5499 exp
= TREE_OPERAND (exp
, 0);
5502 /* If OFFSET is constant, see if we can return the whole thing as a
5503 constant bit position. Otherwise, split it up. */
5504 if (host_integerp (offset
, 0)
5505 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5507 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5508 && host_integerp (tem
, 0))
5509 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5511 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5517 /* Return 1 if T is an expression that get_inner_reference handles. */
5520 handled_component_p (tree t
)
5522 switch (TREE_CODE (t
))
5527 case ARRAY_RANGE_REF
:
5528 case NON_LVALUE_EXPR
:
5529 case VIEW_CONVERT_EXPR
:
5532 /* ??? Sure they are handled, but get_inner_reference may return
5533 a different PBITSIZE, depending upon whether the expression is
5534 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5537 return (TYPE_MODE (TREE_TYPE (t
))
5538 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t
, 0))));
5545 /* Given an rtx VALUE that may contain additions and multiplications, return
5546 an equivalent value that just refers to a register, memory, or constant.
5547 This is done by generating instructions to perform the arithmetic and
5548 returning a pseudo-register containing the value.
5550 The returned value may be a REG, SUBREG, MEM or constant. */
5553 force_operand (rtx value
, rtx target
)
5556 /* Use subtarget as the target for operand 0 of a binary operation. */
5557 rtx subtarget
= get_subtarget (target
);
5558 enum rtx_code code
= GET_CODE (value
);
5560 /* Check for a PIC address load. */
5561 if ((code
== PLUS
|| code
== MINUS
)
5562 && XEXP (value
, 0) == pic_offset_table_rtx
5563 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5564 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5565 || GET_CODE (XEXP (value
, 1)) == CONST
))
5568 subtarget
= gen_reg_rtx (GET_MODE (value
));
5569 emit_move_insn (subtarget
, value
);
5573 if (code
== ZERO_EXTEND
|| code
== SIGN_EXTEND
)
5576 target
= gen_reg_rtx (GET_MODE (value
));
5577 convert_move (target
, force_operand (XEXP (value
, 0), NULL
),
5578 code
== ZERO_EXTEND
);
5582 if (GET_RTX_CLASS (code
) == '2' || GET_RTX_CLASS (code
) == 'c')
5584 op2
= XEXP (value
, 1);
5585 if (!CONSTANT_P (op2
) && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5587 if (code
== MINUS
&& GET_CODE (op2
) == CONST_INT
)
5590 op2
= negate_rtx (GET_MODE (value
), op2
);
5593 /* Check for an addition with OP2 a constant integer and our first
5594 operand a PLUS of a virtual register and something else. In that
5595 case, we want to emit the sum of the virtual register and the
5596 constant first and then add the other value. This allows virtual
5597 register instantiation to simply modify the constant rather than
5598 creating another one around this addition. */
5599 if (code
== PLUS
&& GET_CODE (op2
) == CONST_INT
5600 && GET_CODE (XEXP (value
, 0)) == PLUS
5601 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
5602 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5603 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5605 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
5606 XEXP (XEXP (value
, 0), 0), op2
,
5607 subtarget
, 0, OPTAB_LIB_WIDEN
);
5608 return expand_simple_binop (GET_MODE (value
), code
, temp
,
5609 force_operand (XEXP (XEXP (value
,
5611 target
, 0, OPTAB_LIB_WIDEN
);
5614 op1
= force_operand (XEXP (value
, 0), subtarget
);
5615 op2
= force_operand (op2
, NULL_RTX
);
5619 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
5621 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
5622 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5623 target
, 1, OPTAB_LIB_WIDEN
);
5625 return expand_divmod (0,
5626 FLOAT_MODE_P (GET_MODE (value
))
5627 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
5628 GET_MODE (value
), op1
, op2
, target
, 0);
5631 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5635 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
5639 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5643 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5644 target
, 0, OPTAB_LIB_WIDEN
);
5647 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5648 target
, 1, OPTAB_LIB_WIDEN
);
5651 if (GET_RTX_CLASS (code
) == '1')
5653 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
5654 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
5657 #ifdef INSN_SCHEDULING
5658 /* On machines that have insn scheduling, we want all memory reference to be
5659 explicit, so we need to deal with such paradoxical SUBREGs. */
5660 if (GET_CODE (value
) == SUBREG
&& GET_CODE (SUBREG_REG (value
)) == MEM
5661 && (GET_MODE_SIZE (GET_MODE (value
))
5662 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
5664 = simplify_gen_subreg (GET_MODE (value
),
5665 force_reg (GET_MODE (SUBREG_REG (value
)),
5666 force_operand (SUBREG_REG (value
),
5668 GET_MODE (SUBREG_REG (value
)),
5669 SUBREG_BYTE (value
));
5675 /* Subroutine of expand_expr: return nonzero iff there is no way that
5676 EXP can reference X, which is being modified. TOP_P is nonzero if this
5677 call is going to be used to determine whether we need a temporary
5678 for EXP, as opposed to a recursive call to this function.
5680 It is always safe for this routine to return zero since it merely
5681 searches for optimization opportunities. */
5684 safe_from_p (rtx x
, tree exp
, int top_p
)
5688 static tree save_expr_list
;
5691 /* If EXP has varying size, we MUST use a target since we currently
5692 have no way of allocating temporaries of variable size
5693 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5694 So we assume here that something at a higher level has prevented a
5695 clash. This is somewhat bogus, but the best we can do. Only
5696 do this when X is BLKmode and when we are at the top level. */
5697 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
5698 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
5699 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
5700 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
5701 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
5703 && GET_MODE (x
) == BLKmode
)
5704 /* If X is in the outgoing argument area, it is always safe. */
5705 || (GET_CODE (x
) == MEM
5706 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
5707 || (GET_CODE (XEXP (x
, 0)) == PLUS
5708 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
5711 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5712 find the underlying pseudo. */
5713 if (GET_CODE (x
) == SUBREG
)
5716 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5720 /* A SAVE_EXPR might appear many times in the expression passed to the
5721 top-level safe_from_p call, and if it has a complex subexpression,
5722 examining it multiple times could result in a combinatorial explosion.
5723 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
5724 with optimization took about 28 minutes to compile -- even though it was
5725 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5726 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5727 we have processed. Note that the only test of top_p was above. */
5736 rtn
= safe_from_p (x
, exp
, 0);
5738 for (t
= save_expr_list
; t
!= 0; t
= TREE_CHAIN (t
))
5739 TREE_PRIVATE (TREE_PURPOSE (t
)) = 0;
5744 /* Now look at our tree code and possibly recurse. */
5745 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
5748 exp_rtl
= DECL_RTL_IF_SET (exp
);
5755 if (TREE_CODE (exp
) == TREE_LIST
)
5759 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
5761 exp
= TREE_CHAIN (exp
);
5764 if (TREE_CODE (exp
) != TREE_LIST
)
5765 return safe_from_p (x
, exp
, 0);
5768 else if (TREE_CODE (exp
) == ERROR_MARK
)
5769 return 1; /* An already-visited SAVE_EXPR? */
5775 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
5780 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5784 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5785 the expression. If it is set, we conflict iff we are that rtx or
5786 both are in memory. Otherwise, we check all operands of the
5787 expression recursively. */
5789 switch (TREE_CODE (exp
))
5792 /* If the operand is static or we are static, we can't conflict.
5793 Likewise if we don't conflict with the operand at all. */
5794 if (staticp (TREE_OPERAND (exp
, 0))
5795 || TREE_STATIC (exp
)
5796 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
5799 /* Otherwise, the only way this can conflict is if we are taking
5800 the address of a DECL a that address if part of X, which is
5802 exp
= TREE_OPERAND (exp
, 0);
5805 if (!DECL_RTL_SET_P (exp
)
5806 || GET_CODE (DECL_RTL (exp
)) != MEM
)
5809 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
5814 if (GET_CODE (x
) == MEM
5815 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
5816 get_alias_set (exp
)))
5821 /* Assume that the call will clobber all hard registers and
5823 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5824 || GET_CODE (x
) == MEM
)
5829 /* If a sequence exists, we would have to scan every instruction
5830 in the sequence to see if it was safe. This is probably not
5832 if (RTL_EXPR_SEQUENCE (exp
))
5835 exp_rtl
= RTL_EXPR_RTL (exp
);
5838 case WITH_CLEANUP_EXPR
:
5839 exp_rtl
= WITH_CLEANUP_EXPR_RTL (exp
);
5842 case CLEANUP_POINT_EXPR
:
5843 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5846 exp_rtl
= SAVE_EXPR_RTL (exp
);
5850 /* If we've already scanned this, don't do it again. Otherwise,
5851 show we've scanned it and record for clearing the flag if we're
5853 if (TREE_PRIVATE (exp
))
5856 TREE_PRIVATE (exp
) = 1;
5857 if (! safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
5859 TREE_PRIVATE (exp
) = 0;
5863 save_expr_list
= tree_cons (exp
, NULL_TREE
, save_expr_list
);
5867 /* The only operand we look at is operand 1. The rest aren't
5868 part of the expression. */
5869 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
5875 /* If we have an rtx, we do not need to scan our operands. */
5879 nops
= first_rtl_op (TREE_CODE (exp
));
5880 for (i
= 0; i
< nops
; i
++)
5881 if (TREE_OPERAND (exp
, i
) != 0
5882 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
5885 /* If this is a language-specific tree code, it may require
5886 special handling. */
5887 if ((unsigned int) TREE_CODE (exp
)
5888 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5889 && !(*lang_hooks
.safe_from_p
) (x
, exp
))
5893 /* If we have an rtl, find any enclosed object. Then see if we conflict
5897 if (GET_CODE (exp_rtl
) == SUBREG
)
5899 exp_rtl
= SUBREG_REG (exp_rtl
);
5900 if (GET_CODE (exp_rtl
) == REG
5901 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
5905 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5906 are memory and they conflict. */
5907 return ! (rtx_equal_p (x
, exp_rtl
)
5908 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
5909 && true_dependence (exp_rtl
, VOIDmode
, x
,
5910 rtx_addr_varies_p
)));
5913 /* If we reach here, it is safe. */
5917 /* Subroutine of expand_expr: return rtx if EXP is a
5918 variable or parameter; else return 0. */
5924 switch (TREE_CODE (exp
))
5928 return DECL_RTL (exp
);
5934 #ifdef MAX_INTEGER_COMPUTATION_MODE
5937 check_max_integer_computation_mode (tree exp
)
5939 enum tree_code code
;
5940 enum machine_mode mode
;
5942 /* Strip any NOPs that don't change the mode. */
5944 code
= TREE_CODE (exp
);
5946 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5947 if (code
== NOP_EXPR
5948 && TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
5951 /* First check the type of the overall operation. We need only look at
5952 unary, binary and relational operations. */
5953 if (TREE_CODE_CLASS (code
) == '1'
5954 || TREE_CODE_CLASS (code
) == '2'
5955 || TREE_CODE_CLASS (code
) == '<')
5957 mode
= TYPE_MODE (TREE_TYPE (exp
));
5958 if (GET_MODE_CLASS (mode
) == MODE_INT
5959 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5960 internal_error ("unsupported wide integer operation");
5963 /* Check operand of a unary op. */
5964 if (TREE_CODE_CLASS (code
) == '1')
5966 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5967 if (GET_MODE_CLASS (mode
) == MODE_INT
5968 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5969 internal_error ("unsupported wide integer operation");
5972 /* Check operands of a binary/comparison op. */
5973 if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<')
5975 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5976 if (GET_MODE_CLASS (mode
) == MODE_INT
5977 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5978 internal_error ("unsupported wide integer operation");
5980 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
5981 if (GET_MODE_CLASS (mode
) == MODE_INT
5982 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5983 internal_error ("unsupported wide integer operation");
5988 /* Return the highest power of two that EXP is known to be a multiple of.
5989 This is used in updating alignment of MEMs in array references. */
5991 static unsigned HOST_WIDE_INT
5992 highest_pow2_factor (tree exp
)
5994 unsigned HOST_WIDE_INT c0
, c1
;
5996 switch (TREE_CODE (exp
))
5999 /* We can find the lowest bit that's a one. If the low
6000 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6001 We need to handle this case since we can find it in a COND_EXPR,
6002 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6003 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6005 if (TREE_CONSTANT_OVERFLOW (exp
))
6006 return BIGGEST_ALIGNMENT
;
6009 /* Note: tree_low_cst is intentionally not used here,
6010 we don't care about the upper bits. */
6011 c0
= TREE_INT_CST_LOW (exp
);
6013 return c0
? c0
: BIGGEST_ALIGNMENT
;
6017 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
6018 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6019 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6020 return MIN (c0
, c1
);
6023 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6024 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6027 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6029 if (integer_pow2p (TREE_OPERAND (exp
, 1))
6030 && host_integerp (TREE_OPERAND (exp
, 1), 1))
6032 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6033 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
6034 return MAX (1, c0
/ c1
);
6038 case NON_LVALUE_EXPR
: case NOP_EXPR
: case CONVERT_EXPR
:
6039 case SAVE_EXPR
: case WITH_RECORD_EXPR
:
6040 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
6043 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6046 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6047 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
6048 return MIN (c0
, c1
);
6057 /* Similar, except that it is known that the expression must be a multiple
6058 of the alignment of TYPE. */
6060 static unsigned HOST_WIDE_INT
6061 highest_pow2_factor_for_type (tree type
, tree exp
)
6063 unsigned HOST_WIDE_INT type_align
, factor
;
6065 factor
= highest_pow2_factor (exp
);
6066 type_align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
6067 return MAX (factor
, type_align
);
6070 /* Return an object on the placeholder list that matches EXP, a
6071 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6072 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6073 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6074 is a location which initially points to a starting location in the
6075 placeholder list (zero means start of the list) and where a pointer into
6076 the placeholder list at which the object is found is placed. */
6079 find_placeholder (tree exp
, tree
*plist
)
6081 tree type
= TREE_TYPE (exp
);
6082 tree placeholder_expr
;
6084 for (placeholder_expr
6085 = plist
&& *plist
? TREE_CHAIN (*plist
) : placeholder_list
;
6086 placeholder_expr
!= 0;
6087 placeholder_expr
= TREE_CHAIN (placeholder_expr
))
6089 tree need_type
= TYPE_MAIN_VARIANT (type
);
6092 /* Find the outermost reference that is of the type we want. If none,
6093 see if any object has a type that is a pointer to the type we
6095 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6096 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
6097 || TREE_CODE (elt
) == COND_EXPR
)
6098 ? TREE_OPERAND (elt
, 1)
6099 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6100 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6101 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6102 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6103 ? TREE_OPERAND (elt
, 0) : 0))
6104 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
6107 *plist
= placeholder_expr
;
6111 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6113 = ((TREE_CODE (elt
) == COMPOUND_EXPR
6114 || TREE_CODE (elt
) == COND_EXPR
)
6115 ? TREE_OPERAND (elt
, 1)
6116 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6117 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6118 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6119 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6120 ? TREE_OPERAND (elt
, 0) : 0))
6121 if (POINTER_TYPE_P (TREE_TYPE (elt
))
6122 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
6126 *plist
= placeholder_expr
;
6127 return build1 (INDIRECT_REF
, need_type
, elt
);
6134 /* Subroutine of expand_expr. Expand the two operands of a binary
6135 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6136 The value may be stored in TARGET if TARGET is nonzero. The
6137 MODIFIER argument is as documented by expand_expr. */
6140 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
6141 enum expand_modifier modifier
)
6143 if (! safe_from_p (target
, exp1
, 1))
6145 if (operand_equal_p (exp0
, exp1
, 0))
6147 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6148 *op1
= copy_rtx (*op0
);
6152 /* If we need to preserve evaluation order, copy exp0 into its own
6153 temporary variable so that it can't be clobbered by exp1. */
6154 if (flag_evaluation_order
&& TREE_SIDE_EFFECTS (exp1
))
6155 exp0
= save_expr (exp0
);
6156 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6157 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
6162 /* expand_expr: generate code for computing expression EXP.
6163 An rtx for the computed value is returned. The value is never null.
6164 In the case of a void EXP, const0_rtx is returned.
6166 The value may be stored in TARGET if TARGET is nonzero.
6167 TARGET is just a suggestion; callers must assume that
6168 the rtx returned may not be the same as TARGET.
6170 If TARGET is CONST0_RTX, it means that the value will be ignored.
6172 If TMODE is not VOIDmode, it suggests generating the
6173 result in mode TMODE. But this is done only when convenient.
6174 Otherwise, TMODE is ignored and the value generated in its natural mode.
6175 TMODE is just a suggestion; callers must assume that
6176 the rtx returned may not have mode TMODE.
6178 Note that TARGET may have neither TMODE nor MODE. In that case, it
6179 probably will not be used.
6181 If MODIFIER is EXPAND_SUM then when EXP is an addition
6182 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6183 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6184 products as above, or REG or MEM, or constant.
6185 Ordinarily in such cases we would output mul or add instructions
6186 and then return a pseudo reg containing the sum.
6188 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6189 it also marks a label as absolutely required (it can't be dead).
6190 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6191 This is used for outputting expressions used in initializers.
6193 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6194 with a constant address even if that address is not normally legitimate.
6195 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6197 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6198 a call parameter. Such targets require special care as we haven't yet
6199 marked TARGET so that it's safe from being trashed by libcalls. We
6200 don't want to use TARGET for anything but the final result;
6201 Intermediate values must go elsewhere. Additionally, calls to
6202 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6205 expand_expr (tree exp
, rtx target
, enum machine_mode tmode
,
6206 enum expand_modifier modifier
)
6209 tree type
= TREE_TYPE (exp
);
6210 int unsignedp
= TREE_UNSIGNED (type
);
6211 enum machine_mode mode
;
6212 enum tree_code code
= TREE_CODE (exp
);
6214 rtx subtarget
, original_target
;
6218 /* Handle ERROR_MARK before anybody tries to access its type. */
6219 if (TREE_CODE (exp
) == ERROR_MARK
|| TREE_CODE (type
) == ERROR_MARK
)
6221 op0
= CONST0_RTX (tmode
);
6227 mode
= TYPE_MODE (type
);
6228 /* Use subtarget as the target for operand 0 of a binary operation. */
6229 subtarget
= get_subtarget (target
);
6230 original_target
= target
;
6231 ignore
= (target
== const0_rtx
6232 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
6233 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
6234 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
6235 && TREE_CODE (type
) == VOID_TYPE
));
6237 /* If we are going to ignore this result, we need only do something
6238 if there is a side-effect somewhere in the expression. If there
6239 is, short-circuit the most common cases here. Note that we must
6240 not call expand_expr with anything but const0_rtx in case this
6241 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6245 if (! TREE_SIDE_EFFECTS (exp
))
6248 /* Ensure we reference a volatile object even if value is ignored, but
6249 don't do this if all we are doing is taking its address. */
6250 if (TREE_THIS_VOLATILE (exp
)
6251 && TREE_CODE (exp
) != FUNCTION_DECL
6252 && mode
!= VOIDmode
&& mode
!= BLKmode
6253 && modifier
!= EXPAND_CONST_ADDRESS
)
6255 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
6256 if (GET_CODE (temp
) == MEM
)
6257 temp
= copy_to_reg (temp
);
6261 if (TREE_CODE_CLASS (code
) == '1' || code
== COMPONENT_REF
6262 || code
== INDIRECT_REF
|| code
== BUFFER_REF
)
6263 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6266 else if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<'
6267 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
6269 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6270 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6273 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6274 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
6275 /* If the second operand has no side effects, just evaluate
6277 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6279 else if (code
== BIT_FIELD_REF
)
6281 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6282 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6283 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, modifier
);
6290 #ifdef MAX_INTEGER_COMPUTATION_MODE
6291 /* Only check stuff here if the mode we want is different from the mode
6292 of the expression; if it's the same, check_max_integer_computation_mode
6293 will handle it. Do we really need to check this stuff at all? */
6296 && GET_MODE (target
) != mode
6297 && TREE_CODE (exp
) != INTEGER_CST
6298 && TREE_CODE (exp
) != PARM_DECL
6299 && TREE_CODE (exp
) != ARRAY_REF
6300 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6301 && TREE_CODE (exp
) != COMPONENT_REF
6302 && TREE_CODE (exp
) != BIT_FIELD_REF
6303 && TREE_CODE (exp
) != INDIRECT_REF
6304 && TREE_CODE (exp
) != CALL_EXPR
6305 && TREE_CODE (exp
) != VAR_DECL
6306 && TREE_CODE (exp
) != RTL_EXPR
)
6308 enum machine_mode mode
= GET_MODE (target
);
6310 if (GET_MODE_CLASS (mode
) == MODE_INT
6311 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6312 internal_error ("unsupported wide integer operation");
6316 && TREE_CODE (exp
) != INTEGER_CST
6317 && TREE_CODE (exp
) != PARM_DECL
6318 && TREE_CODE (exp
) != ARRAY_REF
6319 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6320 && TREE_CODE (exp
) != COMPONENT_REF
6321 && TREE_CODE (exp
) != BIT_FIELD_REF
6322 && TREE_CODE (exp
) != INDIRECT_REF
6323 && TREE_CODE (exp
) != VAR_DECL
6324 && TREE_CODE (exp
) != CALL_EXPR
6325 && TREE_CODE (exp
) != RTL_EXPR
6326 && GET_MODE_CLASS (tmode
) == MODE_INT
6327 && tmode
> MAX_INTEGER_COMPUTATION_MODE
)
6328 internal_error ("unsupported wide integer operation");
6330 check_max_integer_computation_mode (exp
);
6333 /* If will do cse, generate all results into pseudo registers
6334 since 1) that allows cse to find more things
6335 and 2) otherwise cse could produce an insn the machine
6336 cannot support. An exception is a CONSTRUCTOR into a multi-word
6337 MEM: that's much more likely to be most efficient into the MEM.
6338 Another is a CALL_EXPR which must return in memory. */
6340 if (! cse_not_expected
&& mode
!= BLKmode
&& target
6341 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
)
6342 && ! (code
== CONSTRUCTOR
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
6343 && ! (code
== CALL_EXPR
&& aggregate_value_p (exp
, exp
)))
6350 tree function
= decl_function_context (exp
);
6351 /* Labels in containing functions, or labels used from initializers,
6353 if (modifier
== EXPAND_INITIALIZER
6354 || (function
!= current_function_decl
6355 && function
!= inline_function_decl
6357 temp
= force_label_rtx (exp
);
6359 temp
= label_rtx (exp
);
6361 temp
= gen_rtx_MEM (FUNCTION_MODE
, gen_rtx_LABEL_REF (Pmode
, temp
));
6362 if (function
!= current_function_decl
6363 && function
!= inline_function_decl
&& function
!= 0)
6364 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
6369 if (!DECL_RTL_SET_P (exp
))
6371 error ("%Jprior parameter's size depends on '%D'", exp
, exp
);
6372 return CONST0_RTX (mode
);
6375 /* ... fall through ... */
6378 /* If a static var's type was incomplete when the decl was written,
6379 but the type is complete now, lay out the decl now. */
6380 if (DECL_SIZE (exp
) == 0
6381 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
6382 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6383 layout_decl (exp
, 0);
6385 /* ... fall through ... */
6389 if (DECL_RTL (exp
) == 0)
6392 /* Ensure variable marked as used even if it doesn't go through
6393 a parser. If it hasn't be used yet, write out an external
6395 if (! TREE_USED (exp
))
6397 assemble_external (exp
);
6398 TREE_USED (exp
) = 1;
6401 /* Show we haven't gotten RTL for this yet. */
6404 /* Handle variables inherited from containing functions. */
6405 context
= decl_function_context (exp
);
6407 /* We treat inline_function_decl as an alias for the current function
6408 because that is the inline function whose vars, types, etc.
6409 are being merged into the current function.
6410 See expand_inline_function. */
6412 if (context
!= 0 && context
!= current_function_decl
6413 && context
!= inline_function_decl
6414 /* If var is static, we don't need a static chain to access it. */
6415 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
6416 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
6420 /* Mark as non-local and addressable. */
6421 DECL_NONLOCAL (exp
) = 1;
6422 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
6424 (*lang_hooks
.mark_addressable
) (exp
);
6425 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
6427 addr
= XEXP (DECL_RTL (exp
), 0);
6428 if (GET_CODE (addr
) == MEM
)
6430 = replace_equiv_address (addr
,
6431 fix_lexical_addr (XEXP (addr
, 0), exp
));
6433 addr
= fix_lexical_addr (addr
, exp
);
6435 temp
= replace_equiv_address (DECL_RTL (exp
), addr
);
6438 /* This is the case of an array whose size is to be determined
6439 from its initializer, while the initializer is still being parsed.
6442 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6443 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
6444 temp
= validize_mem (DECL_RTL (exp
));
6446 /* If DECL_RTL is memory, we are in the normal case and either
6447 the address is not valid or it is not a register and -fforce-addr
6448 is specified, get the address into a register. */
6450 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6451 && modifier
!= EXPAND_CONST_ADDRESS
6452 && modifier
!= EXPAND_SUM
6453 && modifier
!= EXPAND_INITIALIZER
6454 && (! memory_address_p (DECL_MODE (exp
),
6455 XEXP (DECL_RTL (exp
), 0))
6457 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
6458 temp
= replace_equiv_address (DECL_RTL (exp
),
6459 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
6461 /* If we got something, return it. But first, set the alignment
6462 if the address is a register. */
6465 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
6466 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6471 /* If the mode of DECL_RTL does not match that of the decl, it
6472 must be a promoted value. We return a SUBREG of the wanted mode,
6473 but mark it so that we know that it was already extended. */
6475 if (GET_CODE (DECL_RTL (exp
)) == REG
6476 && GET_MODE (DECL_RTL (exp
)) != DECL_MODE (exp
))
6478 /* Get the signedness used for this variable. Ensure we get the
6479 same mode we got when the variable was declared. */
6480 if (GET_MODE (DECL_RTL (exp
))
6481 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
,
6482 (TREE_CODE (exp
) == RESULT_DECL
? 1 : 0)))
6485 temp
= gen_lowpart_SUBREG (mode
, DECL_RTL (exp
));
6486 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6487 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6491 return DECL_RTL (exp
);
6494 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
6495 TREE_INT_CST_HIGH (exp
), mode
);
6497 /* ??? If overflow is set, fold will have done an incomplete job,
6498 which can result in (plus xx (const_int 0)), which can get
6499 simplified by validate_replace_rtx during virtual register
6500 instantiation, which can result in unrecognizable insns.
6501 Avoid this by forcing all overflows into registers. */
6502 if (TREE_CONSTANT_OVERFLOW (exp
)
6503 && modifier
!= EXPAND_INITIALIZER
)
6504 temp
= force_reg (mode
, temp
);
6509 return const_vector_from_tree (exp
);
6512 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
6515 /* If optimized, generate immediate CONST_DOUBLE
6516 which will be turned into memory by reload if necessary.
6518 We used to force a register so that loop.c could see it. But
6519 this does not allow gen_* patterns to perform optimizations with
6520 the constants. It also produces two insns in cases like "x = 1.0;".
6521 On most machines, floating-point constants are not permitted in
6522 many insns, so we'd end up copying it to a register in any case.
6524 Now, we do the copying in expand_binop, if appropriate. */
6525 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
6526 TYPE_MODE (TREE_TYPE (exp
)));
6529 /* Handle evaluating a complex constant in a CONCAT target. */
6530 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
6532 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
6535 rtarg
= XEXP (original_target
, 0);
6536 itarg
= XEXP (original_target
, 1);
6538 /* Move the real and imaginary parts separately. */
6539 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, 0);
6540 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, 0);
6543 emit_move_insn (rtarg
, op0
);
6545 emit_move_insn (itarg
, op1
);
6547 return original_target
;
6550 /* ... fall through ... */
6553 temp
= output_constant_def (exp
, 1);
6555 /* temp contains a constant address.
6556 On RISC machines where a constant address isn't valid,
6557 make some insns to get that address into a register. */
6558 if (modifier
!= EXPAND_CONST_ADDRESS
6559 && modifier
!= EXPAND_INITIALIZER
6560 && modifier
!= EXPAND_SUM
6561 && (! memory_address_p (mode
, XEXP (temp
, 0))
6562 || flag_force_addr
))
6563 return replace_equiv_address (temp
,
6564 copy_rtx (XEXP (temp
, 0)));
6567 case EXPR_WITH_FILE_LOCATION
:
6570 struct file_stack fs
;
6572 fs
.location
= input_location
;
6573 fs
.next
= expr_wfl_stack
;
6574 input_filename
= EXPR_WFL_FILENAME (exp
);
6575 input_line
= EXPR_WFL_LINENO (exp
);
6576 expr_wfl_stack
= &fs
;
6577 if (EXPR_WFL_EMIT_LINE_NOTE (exp
))
6578 emit_line_note (input_location
);
6579 /* Possibly avoid switching back and forth here. */
6580 to_return
= expand_expr (EXPR_WFL_NODE (exp
),
6581 (ignore
? const0_rtx
: target
),
6583 if (expr_wfl_stack
!= &fs
)
6585 input_location
= fs
.location
;
6586 expr_wfl_stack
= fs
.next
;
6591 context
= decl_function_context (exp
);
6593 /* If this SAVE_EXPR was at global context, assume we are an
6594 initialization function and move it into our context. */
6596 SAVE_EXPR_CONTEXT (exp
) = current_function_decl
;
6598 /* We treat inline_function_decl as an alias for the current function
6599 because that is the inline function whose vars, types, etc.
6600 are being merged into the current function.
6601 See expand_inline_function. */
6602 if (context
== current_function_decl
|| context
== inline_function_decl
)
6605 /* If this is non-local, handle it. */
6608 /* The following call just exists to abort if the context is
6609 not of a containing function. */
6610 find_function_data (context
);
6612 temp
= SAVE_EXPR_RTL (exp
);
6613 if (temp
&& GET_CODE (temp
) == REG
)
6615 put_var_into_stack (exp
, /*rescan=*/true);
6616 temp
= SAVE_EXPR_RTL (exp
);
6618 if (temp
== 0 || GET_CODE (temp
) != MEM
)
6621 replace_equiv_address (temp
,
6622 fix_lexical_addr (XEXP (temp
, 0), exp
));
6624 if (SAVE_EXPR_RTL (exp
) == 0)
6626 if (mode
== VOIDmode
)
6629 temp
= assign_temp (build_qualified_type (type
,
6631 | TYPE_QUAL_CONST
)),
6634 SAVE_EXPR_RTL (exp
) = temp
;
6635 if (!optimize
&& GET_CODE (temp
) == REG
)
6636 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, temp
,
6639 /* If the mode of TEMP does not match that of the expression, it
6640 must be a promoted value. We pass store_expr a SUBREG of the
6641 wanted mode but mark it so that we know that it was already
6644 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
6646 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6647 promote_mode (type
, mode
, &unsignedp
, 0);
6648 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6649 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6652 if (temp
== const0_rtx
)
6653 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
6655 store_expr (TREE_OPERAND (exp
, 0), temp
,
6656 modifier
== EXPAND_STACK_PARM
? 2 : 0);
6658 TREE_USED (exp
) = 1;
6661 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6662 must be a promoted value. We return a SUBREG of the wanted mode,
6663 but mark it so that we know that it was already extended. */
6665 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
6666 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
6668 /* Compute the signedness and make the proper SUBREG. */
6669 promote_mode (type
, mode
, &unsignedp
, 0);
6670 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6671 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6672 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6676 return SAVE_EXPR_RTL (exp
);
6681 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6682 TREE_OPERAND (exp
, 0)
6683 = (*lang_hooks
.unsave_expr_now
) (TREE_OPERAND (exp
, 0));
6687 case PLACEHOLDER_EXPR
:
6689 tree old_list
= placeholder_list
;
6690 tree placeholder_expr
= 0;
6692 exp
= find_placeholder (exp
, &placeholder_expr
);
6696 placeholder_list
= TREE_CHAIN (placeholder_expr
);
6697 temp
= expand_expr (exp
, original_target
, tmode
, modifier
);
6698 placeholder_list
= old_list
;
6702 case WITH_RECORD_EXPR
:
6703 /* Put the object on the placeholder list, expand our first operand,
6704 and pop the list. */
6705 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
6707 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
, tmode
,
6709 placeholder_list
= TREE_CHAIN (placeholder_list
);
6713 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6714 expand_goto (TREE_OPERAND (exp
, 0));
6716 expand_computed_goto (TREE_OPERAND (exp
, 0));
6720 expand_exit_loop_if_false (NULL
,
6721 invert_truthvalue (TREE_OPERAND (exp
, 0)));
6724 case LABELED_BLOCK_EXPR
:
6725 if (LABELED_BLOCK_BODY (exp
))
6726 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp
), 0, 1);
6727 /* Should perhaps use expand_label, but this is simpler and safer. */
6728 do_pending_stack_adjust ();
6729 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp
)));
6732 case EXIT_BLOCK_EXPR
:
6733 if (EXIT_BLOCK_RETURN (exp
))
6734 sorry ("returned value in block_exit_expr");
6735 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp
)));
6740 expand_start_loop (1);
6741 expand_expr_stmt_value (TREE_OPERAND (exp
, 0), 0, 1);
6749 tree vars
= TREE_OPERAND (exp
, 0);
6751 /* Need to open a binding contour here because
6752 if there are any cleanups they must be contained here. */
6753 expand_start_bindings (2);
6755 /* Mark the corresponding BLOCK for output in its proper place. */
6756 if (TREE_OPERAND (exp
, 2) != 0
6757 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
6758 (*lang_hooks
.decls
.insert_block
) (TREE_OPERAND (exp
, 2));
6760 /* If VARS have not yet been expanded, expand them now. */
6763 if (!DECL_RTL_SET_P (vars
))
6765 expand_decl_init (vars
);
6766 vars
= TREE_CHAIN (vars
);
6769 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, modifier
);
6771 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
6777 if (RTL_EXPR_SEQUENCE (exp
))
6779 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
6781 emit_insn (RTL_EXPR_SEQUENCE (exp
));
6782 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
6784 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
6785 free_temps_for_rtl_expr (exp
);
6786 return RTL_EXPR_RTL (exp
);
6789 /* If we don't need the result, just ensure we evaluate any
6795 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
6796 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
, 0);
6801 /* All elts simple constants => refer to a constant in memory. But
6802 if this is a non-BLKmode mode, let it store a field at a time
6803 since that should make a CONST_INT or CONST_DOUBLE when we
6804 fold. Likewise, if we have a target we can use, it is best to
6805 store directly into the target unless the type is large enough
6806 that memcpy will be used. If we are making an initializer and
6807 all operands are constant, put it in memory as well.
6809 FIXME: Avoid trying to fill vector constructors piece-meal.
6810 Output them with output_constant_def below unless we're sure
6811 they're zeros. This should go away when vector initializers
6812 are treated like VECTOR_CST instead of arrays.
6814 else if ((TREE_STATIC (exp
)
6815 && ((mode
== BLKmode
6816 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
6817 || TREE_ADDRESSABLE (exp
)
6818 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
6819 && (! MOVE_BY_PIECES_P
6820 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
6822 && ((TREE_CODE (type
) == VECTOR_TYPE
6823 && !is_zeros_p (exp
))
6824 || ! mostly_zeros_p (exp
)))))
6825 || ((modifier
== EXPAND_INITIALIZER
6826 || modifier
== EXPAND_CONST_ADDRESS
)
6827 && TREE_CONSTANT (exp
)))
6829 rtx constructor
= output_constant_def (exp
, 1);
6831 if (modifier
!= EXPAND_CONST_ADDRESS
6832 && modifier
!= EXPAND_INITIALIZER
6833 && modifier
!= EXPAND_SUM
)
6834 constructor
= validize_mem (constructor
);
6840 /* Handle calls that pass values in multiple non-contiguous
6841 locations. The Irix 6 ABI has examples of this. */
6842 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
6843 || GET_CODE (target
) == PARALLEL
6844 || modifier
== EXPAND_STACK_PARM
)
6846 = assign_temp (build_qualified_type (type
,
6848 | (TREE_READONLY (exp
)
6849 * TYPE_QUAL_CONST
))),
6850 0, TREE_ADDRESSABLE (exp
), 1);
6852 store_constructor (exp
, target
, 0, int_expr_size (exp
));
6858 tree exp1
= TREE_OPERAND (exp
, 0);
6860 tree string
= string_constant (exp1
, &index
);
6862 /* Try to optimize reads from const strings. */
6864 && TREE_CODE (string
) == STRING_CST
6865 && TREE_CODE (index
) == INTEGER_CST
6866 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
6867 && GET_MODE_CLASS (mode
) == MODE_INT
6868 && GET_MODE_SIZE (mode
) == 1
6869 && modifier
!= EXPAND_WRITE
)
6870 return gen_int_mode (TREE_STRING_POINTER (string
)
6871 [TREE_INT_CST_LOW (index
)], mode
);
6873 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
6874 op0
= memory_address (mode
, op0
);
6875 temp
= gen_rtx_MEM (mode
, op0
);
6876 set_mem_attributes (temp
, exp
, 0);
6878 /* If we are writing to this object and its type is a record with
6879 readonly fields, we must mark it as readonly so it will
6880 conflict with readonly references to those fields. */
6881 if (modifier
== EXPAND_WRITE
&& readonly_fields_p (type
))
6882 RTX_UNCHANGING_P (temp
) = 1;
6888 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
6892 tree array
= TREE_OPERAND (exp
, 0);
6893 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
6894 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
6895 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
6898 /* Optimize the special-case of a zero lower bound.
6900 We convert the low_bound to sizetype to avoid some problems
6901 with constant folding. (E.g. suppose the lower bound is 1,
6902 and its mode is QI. Without the conversion, (ARRAY
6903 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6904 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6906 if (! integer_zerop (low_bound
))
6907 index
= size_diffop (index
, convert (sizetype
, low_bound
));
6909 /* Fold an expression like: "foo"[2].
6910 This is not done in fold so it won't happen inside &.
6911 Don't fold if this is for wide characters since it's too
6912 difficult to do correctly and this is a very rare case. */
6914 if (modifier
!= EXPAND_CONST_ADDRESS
6915 && modifier
!= EXPAND_INITIALIZER
6916 && modifier
!= EXPAND_MEMORY
6917 && TREE_CODE (array
) == STRING_CST
6918 && TREE_CODE (index
) == INTEGER_CST
6919 && compare_tree_int (index
, TREE_STRING_LENGTH (array
)) < 0
6920 && GET_MODE_CLASS (mode
) == MODE_INT
6921 && GET_MODE_SIZE (mode
) == 1)
6922 return gen_int_mode (TREE_STRING_POINTER (array
)
6923 [TREE_INT_CST_LOW (index
)], mode
);
6925 /* If this is a constant index into a constant array,
6926 just get the value from the array. Handle both the cases when
6927 we have an explicit constructor and when our operand is a variable
6928 that was declared const. */
6930 if (modifier
!= EXPAND_CONST_ADDRESS
6931 && modifier
!= EXPAND_INITIALIZER
6932 && modifier
!= EXPAND_MEMORY
6933 && TREE_CODE (array
) == CONSTRUCTOR
6934 && ! TREE_SIDE_EFFECTS (array
)
6935 && TREE_CODE (index
) == INTEGER_CST
6936 && 0 > compare_tree_int (index
,
6937 list_length (CONSTRUCTOR_ELTS
6938 (TREE_OPERAND (exp
, 0)))))
6942 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
6943 i
= TREE_INT_CST_LOW (index
);
6944 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
6948 return expand_expr (fold (TREE_VALUE (elem
)), target
, tmode
,
6952 else if (optimize
>= 1
6953 && modifier
!= EXPAND_CONST_ADDRESS
6954 && modifier
!= EXPAND_INITIALIZER
6955 && modifier
!= EXPAND_MEMORY
6956 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
6957 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
6958 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
6960 if (TREE_CODE (index
) == INTEGER_CST
)
6962 tree init
= DECL_INITIAL (array
);
6964 if (TREE_CODE (init
) == CONSTRUCTOR
)
6968 for (elem
= CONSTRUCTOR_ELTS (init
);
6970 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
6971 elem
= TREE_CHAIN (elem
))
6974 if (elem
&& !TREE_SIDE_EFFECTS (TREE_VALUE (elem
)))
6975 return expand_expr (fold (TREE_VALUE (elem
)), target
,
6978 else if (TREE_CODE (init
) == STRING_CST
6979 && 0 > compare_tree_int (index
,
6980 TREE_STRING_LENGTH (init
)))
6982 tree type
= TREE_TYPE (TREE_TYPE (init
));
6983 enum machine_mode mode
= TYPE_MODE (type
);
6985 if (GET_MODE_CLASS (mode
) == MODE_INT
6986 && GET_MODE_SIZE (mode
) == 1)
6987 return gen_int_mode (TREE_STRING_POINTER (init
)
6988 [TREE_INT_CST_LOW (index
)], mode
);
6993 goto normal_inner_ref
;
6996 /* If the operand is a CONSTRUCTOR, we can just extract the
6997 appropriate field if it is present. */
6998 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
)
7002 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
7003 elt
= TREE_CHAIN (elt
))
7004 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
7005 /* We can normally use the value of the field in the
7006 CONSTRUCTOR. However, if this is a bitfield in
7007 an integral mode that we can fit in a HOST_WIDE_INT,
7008 we must mask only the number of bits in the bitfield,
7009 since this is done implicitly by the constructor. If
7010 the bitfield does not meet either of those conditions,
7011 we can't do this optimization. */
7012 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
7013 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
7015 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
7016 <= HOST_BITS_PER_WIDE_INT
))))
7018 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
))
7019 && modifier
== EXPAND_STACK_PARM
)
7021 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
7022 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
7024 HOST_WIDE_INT bitsize
7025 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt
)));
7026 enum machine_mode imode
7027 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
7029 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
7031 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
7032 op0
= expand_and (imode
, op0
, op1
, target
);
7037 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
7040 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
7042 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
7050 goto normal_inner_ref
;
7053 case ARRAY_RANGE_REF
:
7056 enum machine_mode mode1
;
7057 HOST_WIDE_INT bitsize
, bitpos
;
7060 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7061 &mode1
, &unsignedp
, &volatilep
);
7064 /* If we got back the original object, something is wrong. Perhaps
7065 we are evaluating an expression too early. In any event, don't
7066 infinitely recurse. */
7070 /* If TEM's type is a union of variable size, pass TARGET to the inner
7071 computation, since it will need a temporary and TARGET is known
7072 to have to do. This occurs in unchecked conversion in Ada. */
7076 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
7077 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
7079 && modifier
!= EXPAND_STACK_PARM
7080 ? target
: NULL_RTX
),
7082 (modifier
== EXPAND_INITIALIZER
7083 || modifier
== EXPAND_CONST_ADDRESS
7084 || modifier
== EXPAND_STACK_PARM
)
7085 ? modifier
: EXPAND_NORMAL
);
7087 /* If this is a constant, put it into a register if it is a
7088 legitimate constant and OFFSET is 0 and memory if it isn't. */
7089 if (CONSTANT_P (op0
))
7091 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
7092 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
7094 op0
= force_reg (mode
, op0
);
7096 op0
= validize_mem (force_const_mem (mode
, op0
));
7099 /* Otherwise, if this object not in memory and we either have an
7100 offset or a BLKmode result, put it there. This case can't occur in
7101 C, but can in Ada if we have unchecked conversion of an expression
7102 from a scalar type to an array or record type or for an
7103 ARRAY_RANGE_REF whose type is BLKmode. */
7104 else if (GET_CODE (op0
) != MEM
7106 || (code
== ARRAY_RANGE_REF
&& mode
== BLKmode
)))
7108 /* If the operand is a SAVE_EXPR, we can deal with this by
7109 forcing the SAVE_EXPR into memory. */
7110 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
7112 put_var_into_stack (TREE_OPERAND (exp
, 0),
7114 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
7119 = build_qualified_type (TREE_TYPE (tem
),
7120 (TYPE_QUALS (TREE_TYPE (tem
))
7121 | TYPE_QUAL_CONST
));
7122 rtx memloc
= assign_temp (nt
, 1, 1, 1);
7124 emit_move_insn (memloc
, op0
);
7131 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
7134 if (GET_CODE (op0
) != MEM
)
7137 #ifdef POINTERS_EXTEND_UNSIGNED
7138 if (GET_MODE (offset_rtx
) != Pmode
)
7139 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
7141 if (GET_MODE (offset_rtx
) != ptr_mode
)
7142 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
7145 /* A constant address in OP0 can have VOIDmode, we must not try
7146 to call force_reg for that case. Avoid that case. */
7147 if (GET_CODE (op0
) == MEM
7148 && GET_MODE (op0
) == BLKmode
7149 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
7151 && (bitpos
% bitsize
) == 0
7152 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
7153 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
7155 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7159 op0
= offset_address (op0
, offset_rtx
,
7160 highest_pow2_factor (offset
));
7163 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7164 record its alignment as BIGGEST_ALIGNMENT. */
7165 if (GET_CODE (op0
) == MEM
&& bitpos
== 0 && offset
!= 0
7166 && is_aligning_offset (offset
, tem
))
7167 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
7169 /* Don't forget about volatility even if this is a bitfield. */
7170 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
7172 if (op0
== orig_op0
)
7173 op0
= copy_rtx (op0
);
7175 MEM_VOLATILE_P (op0
) = 1;
7178 /* The following code doesn't handle CONCAT.
7179 Assume only bitpos == 0 can be used for CONCAT, due to
7180 one element arrays having the same mode as its element. */
7181 if (GET_CODE (op0
) == CONCAT
)
7183 if (bitpos
!= 0 || bitsize
!= GET_MODE_BITSIZE (GET_MODE (op0
)))
7188 /* In cases where an aligned union has an unaligned object
7189 as a field, we might be extracting a BLKmode value from
7190 an integer-mode (e.g., SImode) object. Handle this case
7191 by doing the extract into an object as wide as the field
7192 (which we know to be the width of a basic mode), then
7193 storing into memory, and changing the mode to BLKmode. */
7194 if (mode1
== VOIDmode
7195 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7196 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
7197 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7198 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
7199 && modifier
!= EXPAND_CONST_ADDRESS
7200 && modifier
!= EXPAND_INITIALIZER
)
7201 /* If the field isn't aligned enough to fetch as a memref,
7202 fetch it as a bit field. */
7203 || (mode1
!= BLKmode
7204 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
7205 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0))
7206 && ((modifier
== EXPAND_CONST_ADDRESS
7207 || modifier
== EXPAND_INITIALIZER
)
7209 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
7210 || (bitpos
% BITS_PER_UNIT
!= 0)))
7211 /* If the type and the field are a constant size and the
7212 size of the type isn't the same size as the bitfield,
7213 we must use bitfield operations. */
7215 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
)))
7217 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7220 enum machine_mode ext_mode
= mode
;
7222 if (ext_mode
== BLKmode
7223 && ! (target
!= 0 && GET_CODE (op0
) == MEM
7224 && GET_CODE (target
) == MEM
7225 && bitpos
% BITS_PER_UNIT
== 0))
7226 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7228 if (ext_mode
== BLKmode
)
7231 target
= assign_temp (type
, 0, 1, 1);
7236 /* In this case, BITPOS must start at a byte boundary and
7237 TARGET, if specified, must be a MEM. */
7238 if (GET_CODE (op0
) != MEM
7239 || (target
!= 0 && GET_CODE (target
) != MEM
)
7240 || bitpos
% BITS_PER_UNIT
!= 0)
7243 emit_block_move (target
,
7244 adjust_address (op0
, VOIDmode
,
7245 bitpos
/ BITS_PER_UNIT
),
7246 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7248 (modifier
== EXPAND_STACK_PARM
7249 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7254 op0
= validize_mem (op0
);
7256 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
7257 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7259 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
7260 (modifier
== EXPAND_STACK_PARM
7261 ? NULL_RTX
: target
),
7263 int_size_in_bytes (TREE_TYPE (tem
)));
7265 /* If the result is a record type and BITSIZE is narrower than
7266 the mode of OP0, an integral mode, and this is a big endian
7267 machine, we must put the field into the high-order bits. */
7268 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7269 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7270 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
7271 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7272 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7276 if (mode
== BLKmode
)
7278 rtx
new = assign_temp (build_qualified_type
7279 ((*lang_hooks
.types
.type_for_mode
)
7281 TYPE_QUAL_CONST
), 0, 1, 1);
7283 emit_move_insn (new, op0
);
7284 op0
= copy_rtx (new);
7285 PUT_MODE (op0
, BLKmode
);
7286 set_mem_attributes (op0
, exp
, 1);
7292 /* If the result is BLKmode, use that to access the object
7294 if (mode
== BLKmode
)
7297 /* Get a reference to just this component. */
7298 if (modifier
== EXPAND_CONST_ADDRESS
7299 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7300 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7302 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7304 if (op0
== orig_op0
)
7305 op0
= copy_rtx (op0
);
7307 set_mem_attributes (op0
, exp
, 0);
7308 if (GET_CODE (XEXP (op0
, 0)) == REG
)
7309 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7311 MEM_VOLATILE_P (op0
) |= volatilep
;
7312 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7313 || modifier
== EXPAND_CONST_ADDRESS
7314 || modifier
== EXPAND_INITIALIZER
)
7316 else if (target
== 0)
7317 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7319 convert_move (target
, op0
, unsignedp
);
7325 rtx insn
, before
= get_last_insn (), vtbl_ref
;
7327 /* Evaluate the interior expression. */
7328 subtarget
= expand_expr (TREE_OPERAND (exp
, 0), target
,
7331 /* Get or create an instruction off which to hang a note. */
7332 if (REG_P (subtarget
))
7335 insn
= get_last_insn ();
7338 if (! INSN_P (insn
))
7339 insn
= prev_nonnote_insn (insn
);
7343 target
= gen_reg_rtx (GET_MODE (subtarget
));
7344 insn
= emit_move_insn (target
, subtarget
);
7347 /* Collect the data for the note. */
7348 vtbl_ref
= XEXP (DECL_RTL (TREE_OPERAND (exp
, 1)), 0);
7349 vtbl_ref
= plus_constant (vtbl_ref
,
7350 tree_low_cst (TREE_OPERAND (exp
, 2), 0));
7351 /* Discard the initial CONST that was added. */
7352 vtbl_ref
= XEXP (vtbl_ref
, 0);
7355 = gen_rtx_EXPR_LIST (REG_VTABLE_REF
, vtbl_ref
, REG_NOTES (insn
));
7360 /* Intended for a reference to a buffer of a file-object in Pascal.
7361 But it's not certain that a special tree code will really be
7362 necessary for these. INDIRECT_REF might work for them. */
7368 /* Pascal set IN expression.
7371 rlo = set_low - (set_low%bits_per_word);
7372 the_word = set [ (index - rlo)/bits_per_word ];
7373 bit_index = index % bits_per_word;
7374 bitmask = 1 << bit_index;
7375 return !!(the_word & bitmask); */
7377 tree set
= TREE_OPERAND (exp
, 0);
7378 tree index
= TREE_OPERAND (exp
, 1);
7379 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
7380 tree set_type
= TREE_TYPE (set
);
7381 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
7382 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
7383 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
7384 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
7385 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
7386 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
7387 rtx setaddr
= XEXP (setval
, 0);
7388 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
7390 rtx diff
, quo
, rem
, addr
, bit
, result
;
7392 /* If domain is empty, answer is no. Likewise if index is constant
7393 and out of bounds. */
7394 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
7395 && TREE_CODE (set_low_bound
) == INTEGER_CST
7396 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
7397 || (TREE_CODE (index
) == INTEGER_CST
7398 && TREE_CODE (set_low_bound
) == INTEGER_CST
7399 && tree_int_cst_lt (index
, set_low_bound
))
7400 || (TREE_CODE (set_high_bound
) == INTEGER_CST
7401 && TREE_CODE (index
) == INTEGER_CST
7402 && tree_int_cst_lt (set_high_bound
, index
))))
7406 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7408 /* If we get here, we have to generate the code for both cases
7409 (in range and out of range). */
7411 op0
= gen_label_rtx ();
7412 op1
= gen_label_rtx ();
7414 if (! (GET_CODE (index_val
) == CONST_INT
7415 && GET_CODE (lo_r
) == CONST_INT
))
7416 emit_cmp_and_jump_insns (index_val
, lo_r
, LT
, NULL_RTX
,
7417 GET_MODE (index_val
), iunsignedp
, op1
);
7419 if (! (GET_CODE (index_val
) == CONST_INT
7420 && GET_CODE (hi_r
) == CONST_INT
))
7421 emit_cmp_and_jump_insns (index_val
, hi_r
, GT
, NULL_RTX
,
7422 GET_MODE (index_val
), iunsignedp
, op1
);
7424 /* Calculate the element number of bit zero in the first word
7426 if (GET_CODE (lo_r
) == CONST_INT
)
7427 rlow
= GEN_INT (INTVAL (lo_r
)
7428 & ~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
7430 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
7431 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
7432 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7434 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
7435 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7437 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
7438 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7439 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
7440 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7442 addr
= memory_address (byte_mode
,
7443 expand_binop (index_mode
, add_optab
, diff
,
7444 setaddr
, NULL_RTX
, iunsignedp
,
7447 /* Extract the bit we want to examine. */
7448 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
7449 gen_rtx_MEM (byte_mode
, addr
),
7450 make_tree (TREE_TYPE (index
), rem
),
7452 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
7453 GET_MODE (target
) == byte_mode
? target
: 0,
7454 1, OPTAB_LIB_WIDEN
);
7456 if (result
!= target
)
7457 convert_move (target
, result
, 1);
7459 /* Output the code to handle the out-of-range case. */
7462 emit_move_insn (target
, const0_rtx
);
7467 case WITH_CLEANUP_EXPR
:
7468 if (WITH_CLEANUP_EXPR_RTL (exp
) == 0)
7470 WITH_CLEANUP_EXPR_RTL (exp
)
7471 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7472 expand_decl_cleanup_eh (NULL_TREE
, TREE_OPERAND (exp
, 1),
7473 CLEANUP_EH_ONLY (exp
));
7475 /* That's it for this cleanup. */
7476 TREE_OPERAND (exp
, 1) = 0;
7478 return WITH_CLEANUP_EXPR_RTL (exp
);
7480 case CLEANUP_POINT_EXPR
:
7482 /* Start a new binding layer that will keep track of all cleanup
7483 actions to be performed. */
7484 expand_start_bindings (2);
7486 target_temp_slot_level
= temp_slot_level
;
7488 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7489 /* If we're going to use this value, load it up now. */
7491 op0
= force_not_mem (op0
);
7492 preserve_temp_slots (op0
);
7493 expand_end_bindings (NULL_TREE
, 0, 0);
7498 /* Check for a built-in function. */
7499 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7500 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7502 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7504 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7505 == BUILT_IN_FRONTEND
)
7506 return (*lang_hooks
.expand_expr
) (exp
, original_target
,
7509 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7512 return expand_call (exp
, target
, ignore
);
7514 case NON_LVALUE_EXPR
:
7517 case REFERENCE_EXPR
:
7518 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7521 if (TREE_CODE (type
) == UNION_TYPE
)
7523 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7525 /* If both input and output are BLKmode, this conversion isn't doing
7526 anything except possibly changing memory attribute. */
7527 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7529 rtx result
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7532 result
= copy_rtx (result
);
7533 set_mem_attributes (result
, exp
, 0);
7538 target
= assign_temp (type
, 0, 1, 1);
7540 if (GET_CODE (target
) == MEM
)
7541 /* Store data into beginning of memory target. */
7542 store_expr (TREE_OPERAND (exp
, 0),
7543 adjust_address (target
, TYPE_MODE (valtype
), 0),
7544 modifier
== EXPAND_STACK_PARM
? 2 : 0);
7546 else if (GET_CODE (target
) == REG
)
7547 /* Store this field into a union of the proper type. */
7548 store_field (target
,
7549 MIN ((int_size_in_bytes (TREE_TYPE
7550 (TREE_OPERAND (exp
, 0)))
7552 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7553 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7554 VOIDmode
, 0, type
, 0);
7558 /* Return the entire union. */
7562 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7564 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7567 /* If the signedness of the conversion differs and OP0 is
7568 a promoted SUBREG, clear that indication since we now
7569 have to do the proper extension. */
7570 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7571 && GET_CODE (op0
) == SUBREG
)
7572 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7577 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7578 if (GET_MODE (op0
) == mode
)
7581 /* If OP0 is a constant, just convert it into the proper mode. */
7582 if (CONSTANT_P (op0
))
7584 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7585 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
7587 if (modifier
== EXPAND_INITIALIZER
)
7588 return simplify_gen_subreg (mode
, op0
, inner_mode
,
7589 subreg_lowpart_offset (mode
,
7592 return convert_modes (mode
, inner_mode
, op0
,
7593 TREE_UNSIGNED (inner_type
));
7596 if (modifier
== EXPAND_INITIALIZER
)
7597 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7601 convert_to_mode (mode
, op0
,
7602 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7604 convert_move (target
, op0
,
7605 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7608 case VIEW_CONVERT_EXPR
:
7609 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7611 /* If the input and output modes are both the same, we are done.
7612 Otherwise, if neither mode is BLKmode and both are integral and within
7613 a word, we can use gen_lowpart. If neither is true, make sure the
7614 operand is in memory and convert the MEM to the new mode. */
7615 if (TYPE_MODE (type
) == GET_MODE (op0
))
7617 else if (TYPE_MODE (type
) != BLKmode
&& GET_MODE (op0
) != BLKmode
7618 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7619 && GET_MODE_CLASS (TYPE_MODE (type
)) == MODE_INT
7620 && GET_MODE_SIZE (TYPE_MODE (type
)) <= UNITS_PER_WORD
7621 && GET_MODE_SIZE (GET_MODE (op0
)) <= UNITS_PER_WORD
)
7622 op0
= gen_lowpart (TYPE_MODE (type
), op0
);
7623 else if (GET_CODE (op0
) != MEM
)
7625 /* If the operand is not a MEM, force it into memory. Since we
7626 are going to be be changing the mode of the MEM, don't call
7627 force_const_mem for constants because we don't allow pool
7628 constants to change mode. */
7629 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7631 if (TREE_ADDRESSABLE (exp
))
7634 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
7636 = assign_stack_temp_for_type
7637 (TYPE_MODE (inner_type
),
7638 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
7640 emit_move_insn (target
, op0
);
7644 /* At this point, OP0 is in the correct mode. If the output type is such
7645 that the operand is known to be aligned, indicate that it is.
7646 Otherwise, we need only be concerned about alignment for non-BLKmode
7648 if (GET_CODE (op0
) == MEM
)
7650 op0
= copy_rtx (op0
);
7652 if (TYPE_ALIGN_OK (type
))
7653 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
7654 else if (TYPE_MODE (type
) != BLKmode
&& STRICT_ALIGNMENT
7655 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
7657 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7658 HOST_WIDE_INT temp_size
7659 = MAX (int_size_in_bytes (inner_type
),
7660 (HOST_WIDE_INT
) GET_MODE_SIZE (TYPE_MODE (type
)));
7661 rtx
new = assign_stack_temp_for_type (TYPE_MODE (type
),
7662 temp_size
, 0, type
);
7663 rtx new_with_op0_mode
= adjust_address (new, GET_MODE (op0
), 0);
7665 if (TREE_ADDRESSABLE (exp
))
7668 if (GET_MODE (op0
) == BLKmode
)
7669 emit_block_move (new_with_op0_mode
, op0
,
7670 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type
))),
7671 (modifier
== EXPAND_STACK_PARM
7672 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7674 emit_move_insn (new_with_op0_mode
, op0
);
7679 op0
= adjust_address (op0
, TYPE_MODE (type
), 0);
7685 this_optab
= ! unsignedp
&& flag_trapv
7686 && (GET_MODE_CLASS (mode
) == MODE_INT
)
7687 ? addv_optab
: add_optab
;
7689 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7690 something else, make sure we add the register to the constant and
7691 then to the other thing. This case can occur during strength
7692 reduction and doing it this way will produce better code if the
7693 frame pointer or argument pointer is eliminated.
7695 fold-const.c will ensure that the constant is always in the inner
7696 PLUS_EXPR, so the only case we need to do anything about is if
7697 sp, ap, or fp is our second argument, in which case we must swap
7698 the innermost first argument and our second argument. */
7700 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7701 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7702 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
7703 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7704 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7705 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7707 tree t
= TREE_OPERAND (exp
, 1);
7709 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7710 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
7713 /* If the result is to be ptr_mode and we are adding an integer to
7714 something, we might be forming a constant. So try to use
7715 plus_constant. If it produces a sum and we can't accept it,
7716 use force_operand. This allows P = &ARR[const] to generate
7717 efficient code on machines where a SYMBOL_REF is not a valid
7720 If this is an EXPAND_SUM call, always return the sum. */
7721 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7722 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
7724 if (modifier
== EXPAND_STACK_PARM
)
7726 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
7727 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7728 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
7732 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
7734 /* Use immed_double_const to ensure that the constant is
7735 truncated according to the mode of OP1, then sign extended
7736 to a HOST_WIDE_INT. Using the constant directly can result
7737 in non-canonical RTL in a 64x32 cross compile. */
7739 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
7741 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
7742 op1
= plus_constant (op1
, INTVAL (constant_part
));
7743 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7744 op1
= force_operand (op1
, target
);
7748 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7749 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
7750 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
7754 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7755 (modifier
== EXPAND_INITIALIZER
7756 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
7757 if (! CONSTANT_P (op0
))
7759 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7760 VOIDmode
, modifier
);
7761 /* Return a PLUS if modifier says it's OK. */
7762 if (modifier
== EXPAND_SUM
7763 || modifier
== EXPAND_INITIALIZER
)
7764 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7767 /* Use immed_double_const to ensure that the constant is
7768 truncated according to the mode of OP1, then sign extended
7769 to a HOST_WIDE_INT. Using the constant directly can result
7770 in non-canonical RTL in a 64x32 cross compile. */
7772 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
7774 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7775 op0
= plus_constant (op0
, INTVAL (constant_part
));
7776 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7777 op0
= force_operand (op0
, target
);
7782 /* No sense saving up arithmetic to be done
7783 if it's all in the wrong mode to form part of an address.
7784 And force_operand won't know whether to sign-extend or
7786 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7787 || mode
!= ptr_mode
)
7789 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7790 subtarget
, &op0
, &op1
, 0);
7791 if (op0
== const0_rtx
)
7793 if (op1
== const0_rtx
)
7798 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7799 subtarget
, &op0
, &op1
, modifier
);
7800 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7803 /* For initializers, we are allowed to return a MINUS of two
7804 symbolic constants. Here we handle all cases when both operands
7806 /* Handle difference of two symbolic constants,
7807 for the sake of an initializer. */
7808 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7809 && really_constant_p (TREE_OPERAND (exp
, 0))
7810 && really_constant_p (TREE_OPERAND (exp
, 1)))
7812 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7813 NULL_RTX
, &op0
, &op1
, modifier
);
7815 /* If the last operand is a CONST_INT, use plus_constant of
7816 the negated constant. Else make the MINUS. */
7817 if (GET_CODE (op1
) == CONST_INT
)
7818 return plus_constant (op0
, - INTVAL (op1
));
7820 return gen_rtx_MINUS (mode
, op0
, op1
);
7823 this_optab
= ! unsignedp
&& flag_trapv
7824 && (GET_MODE_CLASS(mode
) == MODE_INT
)
7825 ? subv_optab
: sub_optab
;
7827 /* No sense saving up arithmetic to be done
7828 if it's all in the wrong mode to form part of an address.
7829 And force_operand won't know whether to sign-extend or
7831 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7832 || mode
!= ptr_mode
)
7835 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7836 subtarget
, &op0
, &op1
, modifier
);
7838 /* Convert A - const to A + (-const). */
7839 if (GET_CODE (op1
) == CONST_INT
)
7841 op1
= negate_rtx (mode
, op1
);
7842 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7848 /* If first operand is constant, swap them.
7849 Thus the following special case checks need only
7850 check the second operand. */
7851 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
7853 tree t1
= TREE_OPERAND (exp
, 0);
7854 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
7855 TREE_OPERAND (exp
, 1) = t1
;
7858 /* Attempt to return something suitable for generating an
7859 indexed address, for machines that support that. */
7861 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
7862 && host_integerp (TREE_OPERAND (exp
, 1), 0))
7864 tree exp1
= TREE_OPERAND (exp
, 1);
7866 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7869 if (GET_CODE (op0
) != REG
)
7870 op0
= force_operand (op0
, NULL_RTX
);
7871 if (GET_CODE (op0
) != REG
)
7872 op0
= copy_to_mode_reg (mode
, op0
);
7874 return gen_rtx_MULT (mode
, op0
,
7875 gen_int_mode (tree_low_cst (exp1
, 0),
7876 TYPE_MODE (TREE_TYPE (exp1
))));
7879 if (modifier
== EXPAND_STACK_PARM
)
7882 /* Check for multiplying things that have been extended
7883 from a narrower type. If this machine supports multiplying
7884 in that narrower type with a result in the desired type,
7885 do it that way, and avoid the explicit type-conversion. */
7886 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
7887 && TREE_CODE (type
) == INTEGER_TYPE
7888 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7889 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7890 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7891 && int_fits_type_p (TREE_OPERAND (exp
, 1),
7892 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7893 /* Don't use a widening multiply if a shift will do. */
7894 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
7895 > HOST_BITS_PER_WIDE_INT
)
7896 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
7898 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7899 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7901 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
7902 /* If both operands are extended, they must either both
7903 be zero-extended or both be sign-extended. */
7904 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7906 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
7908 enum machine_mode innermode
7909 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
7910 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7911 ? smul_widen_optab
: umul_widen_optab
);
7912 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7913 ? umul_widen_optab
: smul_widen_optab
);
7914 if (mode
== GET_MODE_WIDER_MODE (innermode
))
7916 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
7918 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7919 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7920 TREE_OPERAND (exp
, 1),
7921 NULL_RTX
, &op0
, &op1
, 0);
7923 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7924 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7925 NULL_RTX
, &op0
, &op1
, 0);
7928 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
7929 && innermode
== word_mode
)
7932 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7933 NULL_RTX
, VOIDmode
, 0);
7934 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7935 op1
= convert_modes (innermode
, mode
,
7936 expand_expr (TREE_OPERAND (exp
, 1),
7937 NULL_RTX
, VOIDmode
, 0),
7940 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7941 NULL_RTX
, VOIDmode
, 0);
7942 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
7943 unsignedp
, OPTAB_LIB_WIDEN
);
7944 htem
= expand_mult_highpart_adjust (innermode
,
7945 gen_highpart (innermode
, temp
),
7947 gen_highpart (innermode
, temp
),
7949 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
7954 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7955 subtarget
, &op0
, &op1
, 0);
7956 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
7958 case TRUNC_DIV_EXPR
:
7959 case FLOOR_DIV_EXPR
:
7961 case ROUND_DIV_EXPR
:
7962 case EXACT_DIV_EXPR
:
7963 if (modifier
== EXPAND_STACK_PARM
)
7965 /* Possible optimization: compute the dividend with EXPAND_SUM
7966 then if the divisor is constant can optimize the case
7967 where some terms of the dividend have coeffs divisible by it. */
7968 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7969 subtarget
, &op0
, &op1
, 0);
7970 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
7973 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7974 expensive divide. If not, combine will rebuild the original
7976 if (flag_unsafe_math_optimizations
&& optimize
&& !optimize_size
7977 && TREE_CODE (type
) == REAL_TYPE
7978 && !real_onep (TREE_OPERAND (exp
, 0)))
7979 return expand_expr (build (MULT_EXPR
, type
, TREE_OPERAND (exp
, 0),
7980 build (RDIV_EXPR
, type
,
7981 build_real (type
, dconst1
),
7982 TREE_OPERAND (exp
, 1))),
7983 target
, tmode
, modifier
);
7984 this_optab
= sdiv_optab
;
7987 case TRUNC_MOD_EXPR
:
7988 case FLOOR_MOD_EXPR
:
7990 case ROUND_MOD_EXPR
:
7991 if (modifier
== EXPAND_STACK_PARM
)
7993 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7994 subtarget
, &op0
, &op1
, 0);
7995 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
7997 case FIX_ROUND_EXPR
:
7998 case FIX_FLOOR_EXPR
:
8000 abort (); /* Not used for C. */
8002 case FIX_TRUNC_EXPR
:
8003 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8004 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8005 target
= gen_reg_rtx (mode
);
8006 expand_fix (target
, op0
, unsignedp
);
8010 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8011 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8012 target
= gen_reg_rtx (mode
);
8013 /* expand_float can't figure out what to do if FROM has VOIDmode.
8014 So give it the correct mode. With -O, cse will optimize this. */
8015 if (GET_MODE (op0
) == VOIDmode
)
8016 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8018 expand_float (target
, op0
,
8019 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8023 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8024 if (modifier
== EXPAND_STACK_PARM
)
8026 temp
= expand_unop (mode
,
8027 ! unsignedp
&& flag_trapv
8028 && (GET_MODE_CLASS(mode
) == MODE_INT
)
8029 ? negv_optab
: neg_optab
, op0
, target
, 0);
8035 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8036 if (modifier
== EXPAND_STACK_PARM
)
8039 /* ABS_EXPR is not valid for complex arguments. */
8040 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
8041 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
8044 /* Unsigned abs is simply the operand. Testing here means we don't
8045 risk generating incorrect code below. */
8046 if (TREE_UNSIGNED (type
))
8049 return expand_abs (mode
, op0
, target
, unsignedp
,
8050 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
8054 target
= original_target
;
8056 || modifier
== EXPAND_STACK_PARM
8057 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
8058 || GET_MODE (target
) != mode
8059 || (GET_CODE (target
) == REG
8060 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8061 target
= gen_reg_rtx (mode
);
8062 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8063 target
, &op0
, &op1
, 0);
8065 /* First try to do it with a special MIN or MAX instruction.
8066 If that does not win, use a conditional jump to select the proper
8068 this_optab
= (TREE_UNSIGNED (type
)
8069 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
8070 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
8072 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8077 /* At this point, a MEM target is no longer useful; we will get better
8080 if (GET_CODE (target
) == MEM
)
8081 target
= gen_reg_rtx (mode
);
8083 /* If op1 was placed in target, swap op0 and op1. */
8084 if (target
!= op0
&& target
== op1
)
8092 emit_move_insn (target
, op0
);
8094 op0
= gen_label_rtx ();
8096 /* If this mode is an integer too wide to compare properly,
8097 compare word by word. Rely on cse to optimize constant cases. */
8098 if (GET_MODE_CLASS (mode
) == MODE_INT
8099 && ! can_compare_p (GE
, mode
, ccp_jump
))
8101 if (code
== MAX_EXPR
)
8102 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8103 target
, op1
, NULL_RTX
, op0
);
8105 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8106 op1
, target
, NULL_RTX
, op0
);
8110 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)));
8111 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
8112 unsignedp
, mode
, NULL_RTX
, NULL_RTX
,
8115 emit_move_insn (target
, op1
);
8120 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8121 if (modifier
== EXPAND_STACK_PARM
)
8123 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8128 /* ??? Can optimize bitwise operations with one arg constant.
8129 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8130 and (a bitwise1 b) bitwise2 b (etc)
8131 but that is probably not worth while. */
8133 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8134 boolean values when we want in all cases to compute both of them. In
8135 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8136 as actual zero-or-1 values and then bitwise anding. In cases where
8137 there cannot be any side effects, better code would be made by
8138 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8139 how to recognize those cases. */
8141 case TRUTH_AND_EXPR
:
8143 this_optab
= and_optab
;
8148 this_optab
= ior_optab
;
8151 case TRUTH_XOR_EXPR
:
8153 this_optab
= xor_optab
;
8160 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8162 if (modifier
== EXPAND_STACK_PARM
)
8164 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8165 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
8168 /* Could determine the answer when only additive constants differ. Also,
8169 the addition of one can be handled by changing the condition. */
8176 case UNORDERED_EXPR
:
8183 temp
= do_store_flag (exp
,
8184 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
8185 tmode
!= VOIDmode
? tmode
: mode
, 0);
8189 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8190 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
8192 && GET_CODE (original_target
) == REG
8193 && (GET_MODE (original_target
)
8194 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
8196 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
8199 /* If temp is constant, we can just compute the result. */
8200 if (GET_CODE (temp
) == CONST_INT
)
8202 if (INTVAL (temp
) != 0)
8203 emit_move_insn (target
, const1_rtx
);
8205 emit_move_insn (target
, const0_rtx
);
8210 if (temp
!= original_target
)
8212 enum machine_mode mode1
= GET_MODE (temp
);
8213 if (mode1
== VOIDmode
)
8214 mode1
= tmode
!= VOIDmode
? tmode
: mode
;
8216 temp
= copy_to_mode_reg (mode1
, temp
);
8219 op1
= gen_label_rtx ();
8220 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
8221 GET_MODE (temp
), unsignedp
, op1
);
8222 emit_move_insn (temp
, const1_rtx
);
8227 /* If no set-flag instruction, must generate a conditional
8228 store into a temporary variable. Drop through
8229 and handle this like && and ||. */
8231 case TRUTH_ANDIF_EXPR
:
8232 case TRUTH_ORIF_EXPR
:
8235 || modifier
== EXPAND_STACK_PARM
8236 || ! safe_from_p (target
, exp
, 1)
8237 /* Make sure we don't have a hard reg (such as function's return
8238 value) live across basic blocks, if not optimizing. */
8239 || (!optimize
&& GET_CODE (target
) == REG
8240 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8241 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8244 emit_clr_insn (target
);
8246 op1
= gen_label_rtx ();
8247 jumpifnot (exp
, op1
);
8250 emit_0_to_1_insn (target
);
8253 return ignore
? const0_rtx
: target
;
8255 case TRUTH_NOT_EXPR
:
8256 if (modifier
== EXPAND_STACK_PARM
)
8258 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8259 /* The parser is careful to generate TRUTH_NOT_EXPR
8260 only with operands that are always zero or one. */
8261 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8262 target
, 1, OPTAB_LIB_WIDEN
);
8268 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
8270 return expand_expr (TREE_OPERAND (exp
, 1),
8271 (ignore
? const0_rtx
: target
),
8272 VOIDmode
, modifier
);
8275 /* If we would have a "singleton" (see below) were it not for a
8276 conversion in each arm, bring that conversion back out. */
8277 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8278 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
8279 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
8280 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
8282 tree iftrue
= TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
8283 tree iffalse
= TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
8285 if ((TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '2'
8286 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8287 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '2'
8288 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0))
8289 || (TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '1'
8290 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8291 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '1'
8292 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0)))
8293 return expand_expr (build1 (NOP_EXPR
, type
,
8294 build (COND_EXPR
, TREE_TYPE (iftrue
),
8295 TREE_OPERAND (exp
, 0),
8297 target
, tmode
, modifier
);
8301 /* Note that COND_EXPRs whose type is a structure or union
8302 are required to be constructed to contain assignments of
8303 a temporary variable, so that we can evaluate them here
8304 for side effect only. If type is void, we must do likewise. */
8306 /* If an arm of the branch requires a cleanup,
8307 only that cleanup is performed. */
8310 tree binary_op
= 0, unary_op
= 0;
8312 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8313 convert it to our mode, if necessary. */
8314 if (integer_onep (TREE_OPERAND (exp
, 1))
8315 && integer_zerop (TREE_OPERAND (exp
, 2))
8316 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8320 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
8325 if (modifier
== EXPAND_STACK_PARM
)
8327 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, modifier
);
8328 if (GET_MODE (op0
) == mode
)
8332 target
= gen_reg_rtx (mode
);
8333 convert_move (target
, op0
, unsignedp
);
8337 /* Check for X ? A + B : A. If we have this, we can copy A to the
8338 output and conditionally add B. Similarly for unary operations.
8339 Don't do this if X has side-effects because those side effects
8340 might affect A or B and the "?" operation is a sequence point in
8341 ANSI. (operand_equal_p tests for side effects.) */
8343 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
8344 && operand_equal_p (TREE_OPERAND (exp
, 2),
8345 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8346 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
8347 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
8348 && operand_equal_p (TREE_OPERAND (exp
, 1),
8349 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8350 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
8351 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
8352 && operand_equal_p (TREE_OPERAND (exp
, 2),
8353 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8354 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
8355 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
8356 && operand_equal_p (TREE_OPERAND (exp
, 1),
8357 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8358 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
8360 /* If we are not to produce a result, we have no target. Otherwise,
8361 if a target was specified use it; it will not be used as an
8362 intermediate target unless it is safe. If no target, use a
8367 else if (modifier
== EXPAND_STACK_PARM
)
8368 temp
= assign_temp (type
, 0, 0, 1);
8369 else if (original_target
8370 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
8371 || (singleton
&& GET_CODE (original_target
) == REG
8372 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
8373 && original_target
== var_rtx (singleton
)))
8374 && GET_MODE (original_target
) == mode
8375 #ifdef HAVE_conditional_move
8376 && (! can_conditionally_move_p (mode
)
8377 || GET_CODE (original_target
) == REG
8378 || TREE_ADDRESSABLE (type
))
8380 && (GET_CODE (original_target
) != MEM
8381 || TREE_ADDRESSABLE (type
)))
8382 temp
= original_target
;
8383 else if (TREE_ADDRESSABLE (type
))
8386 temp
= assign_temp (type
, 0, 0, 1);
8388 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8389 do the test of X as a store-flag operation, do this as
8390 A + ((X != 0) << log C). Similarly for other simple binary
8391 operators. Only do for C == 1 if BRANCH_COST is low. */
8392 if (temp
&& singleton
&& binary_op
8393 && (TREE_CODE (binary_op
) == PLUS_EXPR
8394 || TREE_CODE (binary_op
) == MINUS_EXPR
8395 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
8396 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
8397 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
8398 : integer_onep (TREE_OPERAND (binary_op
, 1)))
8399 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8403 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
8404 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8405 ? addv_optab
: add_optab
)
8406 : TREE_CODE (binary_op
) == MINUS_EXPR
8407 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8408 ? subv_optab
: sub_optab
)
8409 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
8412 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8413 if (singleton
== TREE_OPERAND (exp
, 1))
8414 cond
= invert_truthvalue (TREE_OPERAND (exp
, 0));
8416 cond
= TREE_OPERAND (exp
, 0);
8418 result
= do_store_flag (cond
, (safe_from_p (temp
, singleton
, 1)
8420 mode
, BRANCH_COST
<= 1);
8422 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
8423 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
8424 build_int_2 (tree_log2
8428 (safe_from_p (temp
, singleton
, 1)
8429 ? temp
: NULL_RTX
), 0);
8433 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
8434 return expand_binop (mode
, boptab
, op1
, result
, temp
,
8435 unsignedp
, OPTAB_LIB_WIDEN
);
8439 do_pending_stack_adjust ();
8441 op0
= gen_label_rtx ();
8443 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
8447 /* If the target conflicts with the other operand of the
8448 binary op, we can't use it. Also, we can't use the target
8449 if it is a hard register, because evaluating the condition
8450 might clobber it. */
8452 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
8453 || (GET_CODE (temp
) == REG
8454 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
8455 temp
= gen_reg_rtx (mode
);
8456 store_expr (singleton
, temp
,
8457 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8460 expand_expr (singleton
,
8461 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8462 if (singleton
== TREE_OPERAND (exp
, 1))
8463 jumpif (TREE_OPERAND (exp
, 0), op0
);
8465 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8467 start_cleanup_deferral ();
8468 if (binary_op
&& temp
== 0)
8469 /* Just touch the other operand. */
8470 expand_expr (TREE_OPERAND (binary_op
, 1),
8471 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8473 store_expr (build (TREE_CODE (binary_op
), type
,
8474 make_tree (type
, temp
),
8475 TREE_OPERAND (binary_op
, 1)),
8476 temp
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8478 store_expr (build1 (TREE_CODE (unary_op
), type
,
8479 make_tree (type
, temp
)),
8480 temp
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8483 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8484 comparison operator. If we have one of these cases, set the
8485 output to A, branch on A (cse will merge these two references),
8486 then set the output to FOO. */
8488 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8489 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8490 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8491 TREE_OPERAND (exp
, 1), 0)
8492 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8493 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
8494 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
8496 if (GET_CODE (temp
) == REG
8497 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8498 temp
= gen_reg_rtx (mode
);
8499 store_expr (TREE_OPERAND (exp
, 1), temp
,
8500 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8501 jumpif (TREE_OPERAND (exp
, 0), op0
);
8503 start_cleanup_deferral ();
8504 if (TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8505 store_expr (TREE_OPERAND (exp
, 2), temp
,
8506 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8508 expand_expr (TREE_OPERAND (exp
, 2),
8509 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8513 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8514 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8515 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8516 TREE_OPERAND (exp
, 2), 0)
8517 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8518 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
8519 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
8521 if (GET_CODE (temp
) == REG
8522 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8523 temp
= gen_reg_rtx (mode
);
8524 store_expr (TREE_OPERAND (exp
, 2), temp
,
8525 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8526 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8528 start_cleanup_deferral ();
8529 if (TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8530 store_expr (TREE_OPERAND (exp
, 1), temp
,
8531 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8533 expand_expr (TREE_OPERAND (exp
, 1),
8534 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8539 op1
= gen_label_rtx ();
8540 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8542 start_cleanup_deferral ();
8544 /* One branch of the cond can be void, if it never returns. For
8545 example A ? throw : E */
8547 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8548 store_expr (TREE_OPERAND (exp
, 1), temp
,
8549 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8551 expand_expr (TREE_OPERAND (exp
, 1),
8552 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8553 end_cleanup_deferral ();
8555 emit_jump_insn (gen_jump (op1
));
8558 start_cleanup_deferral ();
8560 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8561 store_expr (TREE_OPERAND (exp
, 2), temp
,
8562 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8564 expand_expr (TREE_OPERAND (exp
, 2),
8565 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8568 end_cleanup_deferral ();
8579 /* Something needs to be initialized, but we didn't know
8580 where that thing was when building the tree. For example,
8581 it could be the return value of a function, or a parameter
8582 to a function which lays down in the stack, or a temporary
8583 variable which must be passed by reference.
8585 We guarantee that the expression will either be constructed
8586 or copied into our original target. */
8588 tree slot
= TREE_OPERAND (exp
, 0);
8589 tree cleanups
= NULL_TREE
;
8592 if (TREE_CODE (slot
) != VAR_DECL
)
8596 target
= original_target
;
8598 /* Set this here so that if we get a target that refers to a
8599 register variable that's already been used, put_reg_into_stack
8600 knows that it should fix up those uses. */
8601 TREE_USED (slot
) = 1;
8605 if (DECL_RTL_SET_P (slot
))
8607 target
= DECL_RTL (slot
);
8608 /* If we have already expanded the slot, so don't do
8610 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8615 target
= assign_temp (type
, 2, 0, 1);
8616 /* All temp slots at this level must not conflict. */
8617 preserve_temp_slots (target
);
8618 SET_DECL_RTL (slot
, target
);
8619 if (TREE_ADDRESSABLE (slot
))
8620 put_var_into_stack (slot
, /*rescan=*/false);
8622 /* Since SLOT is not known to the called function
8623 to belong to its stack frame, we must build an explicit
8624 cleanup. This case occurs when we must build up a reference
8625 to pass the reference as an argument. In this case,
8626 it is very likely that such a reference need not be
8629 if (TREE_OPERAND (exp
, 2) == 0)
8630 TREE_OPERAND (exp
, 2)
8631 = (*lang_hooks
.maybe_build_cleanup
) (slot
);
8632 cleanups
= TREE_OPERAND (exp
, 2);
8637 /* This case does occur, when expanding a parameter which
8638 needs to be constructed on the stack. The target
8639 is the actual stack address that we want to initialize.
8640 The function we call will perform the cleanup in this case. */
8642 /* If we have already assigned it space, use that space,
8643 not target that we were passed in, as our target
8644 parameter is only a hint. */
8645 if (DECL_RTL_SET_P (slot
))
8647 target
= DECL_RTL (slot
);
8648 /* If we have already expanded the slot, so don't do
8650 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8655 SET_DECL_RTL (slot
, target
);
8656 /* If we must have an addressable slot, then make sure that
8657 the RTL that we just stored in slot is OK. */
8658 if (TREE_ADDRESSABLE (slot
))
8659 put_var_into_stack (slot
, /*rescan=*/true);
8663 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
8664 /* Mark it as expanded. */
8665 TREE_OPERAND (exp
, 1) = NULL_TREE
;
8667 store_expr (exp1
, target
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8669 expand_decl_cleanup_eh (NULL_TREE
, cleanups
, CLEANUP_EH_ONLY (exp
));
8676 tree lhs
= TREE_OPERAND (exp
, 0);
8677 tree rhs
= TREE_OPERAND (exp
, 1);
8679 temp
= expand_assignment (lhs
, rhs
, ! ignore
);
8685 /* If lhs is complex, expand calls in rhs before computing it.
8686 That's so we don't compute a pointer and save it over a
8687 call. If lhs is simple, compute it first so we can give it
8688 as a target if the rhs is just a call. This avoids an
8689 extra temp and copy and that prevents a partial-subsumption
8690 which makes bad code. Actually we could treat
8691 component_ref's of vars like vars. */
8693 tree lhs
= TREE_OPERAND (exp
, 0);
8694 tree rhs
= TREE_OPERAND (exp
, 1);
8698 /* Check for |= or &= of a bitfield of size one into another bitfield
8699 of size 1. In this case, (unless we need the result of the
8700 assignment) we can do this more efficiently with a
8701 test followed by an assignment, if necessary.
8703 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8704 things change so we do, this code should be enhanced to
8707 && TREE_CODE (lhs
) == COMPONENT_REF
8708 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
8709 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
8710 && TREE_OPERAND (rhs
, 0) == lhs
8711 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
8712 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
8713 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
8715 rtx label
= gen_label_rtx ();
8717 do_jump (TREE_OPERAND (rhs
, 1),
8718 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
8719 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
8720 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
8721 (TREE_CODE (rhs
) == BIT_IOR_EXPR
8723 : integer_zero_node
)),
8725 do_pending_stack_adjust ();
8730 temp
= expand_assignment (lhs
, rhs
, ! ignore
);
8736 if (!TREE_OPERAND (exp
, 0))
8737 expand_null_return ();
8739 expand_return (TREE_OPERAND (exp
, 0));
8742 case PREINCREMENT_EXPR
:
8743 case PREDECREMENT_EXPR
:
8744 return expand_increment (exp
, 0, ignore
);
8746 case POSTINCREMENT_EXPR
:
8747 case POSTDECREMENT_EXPR
:
8748 /* Faster to treat as pre-increment if result is not used. */
8749 return expand_increment (exp
, ! ignore
, ignore
);
8752 if (modifier
== EXPAND_STACK_PARM
)
8754 /* Are we taking the address of a nested function? */
8755 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
8756 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
8757 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0))
8758 && ! TREE_STATIC (exp
))
8760 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
8761 op0
= force_operand (op0
, target
);
8763 /* If we are taking the address of something erroneous, just
8765 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
8767 /* If we are taking the address of a constant and are at the
8768 top level, we have to use output_constant_def since we can't
8769 call force_const_mem at top level. */
8771 && (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
8772 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0)))
8774 op0
= XEXP (output_constant_def (TREE_OPERAND (exp
, 0), 0), 0);
8777 /* We make sure to pass const0_rtx down if we came in with
8778 ignore set, to avoid doing the cleanups twice for something. */
8779 op0
= expand_expr (TREE_OPERAND (exp
, 0),
8780 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
8781 (modifier
== EXPAND_INITIALIZER
8782 ? modifier
: EXPAND_CONST_ADDRESS
));
8784 /* If we are going to ignore the result, OP0 will have been set
8785 to const0_rtx, so just return it. Don't get confused and
8786 think we are taking the address of the constant. */
8790 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8791 clever and returns a REG when given a MEM. */
8792 op0
= protect_from_queue (op0
, 1);
8794 /* We would like the object in memory. If it is a constant, we can
8795 have it be statically allocated into memory. For a non-constant,
8796 we need to allocate some memory and store the value into it. */
8798 if (CONSTANT_P (op0
))
8799 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8801 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
8802 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
8803 || GET_CODE (op0
) == PARALLEL
|| GET_CODE (op0
) == LO_SUM
)
8805 /* If the operand is a SAVE_EXPR, we can deal with this by
8806 forcing the SAVE_EXPR into memory. */
8807 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
8809 put_var_into_stack (TREE_OPERAND (exp
, 0),
8811 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
8815 /* If this object is in a register, it can't be BLKmode. */
8816 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8817 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
8819 if (GET_CODE (op0
) == PARALLEL
)
8820 /* Handle calls that pass values in multiple
8821 non-contiguous locations. The Irix 6 ABI has examples
8823 emit_group_store (memloc
, op0
, inner_type
,
8824 int_size_in_bytes (inner_type
));
8826 emit_move_insn (memloc
, op0
);
8832 if (GET_CODE (op0
) != MEM
)
8835 mark_temp_addr_taken (op0
);
8836 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8838 op0
= XEXP (op0
, 0);
8839 if (GET_MODE (op0
) == Pmode
&& mode
== ptr_mode
)
8840 op0
= convert_memory_address (ptr_mode
, op0
);
8844 /* If OP0 is not aligned as least as much as the type requires, we
8845 need to make a temporary, copy OP0 to it, and take the address of
8846 the temporary. We want to use the alignment of the type, not of
8847 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8848 the test for BLKmode means that can't happen. The test for
8849 BLKmode is because we never make mis-aligned MEMs with
8852 We don't need to do this at all if the machine doesn't have
8853 strict alignment. */
8854 if (STRICT_ALIGNMENT
&& GET_MODE (op0
) == BLKmode
8855 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
8857 && MEM_ALIGN (op0
) < BIGGEST_ALIGNMENT
)
8859 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8862 if (TYPE_ALIGN_OK (inner_type
))
8865 if (TREE_ADDRESSABLE (inner_type
))
8867 /* We can't make a bitwise copy of this object, so fail. */
8868 error ("cannot take the address of an unaligned member");
8872 new = assign_stack_temp_for_type
8873 (TYPE_MODE (inner_type
),
8874 MEM_SIZE (op0
) ? INTVAL (MEM_SIZE (op0
))
8875 : int_size_in_bytes (inner_type
),
8876 1, build_qualified_type (inner_type
,
8877 (TYPE_QUALS (inner_type
)
8878 | TYPE_QUAL_CONST
)));
8880 emit_block_move (new, op0
, expr_size (TREE_OPERAND (exp
, 0)),
8881 (modifier
== EXPAND_STACK_PARM
8882 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
8887 op0
= force_operand (XEXP (op0
, 0), target
);
8891 && GET_CODE (op0
) != REG
8892 && modifier
!= EXPAND_CONST_ADDRESS
8893 && modifier
!= EXPAND_INITIALIZER
8894 && modifier
!= EXPAND_SUM
)
8895 op0
= force_reg (Pmode
, op0
);
8897 if (GET_CODE (op0
) == REG
8898 && ! REG_USERVAR_P (op0
))
8899 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)));
8901 if (GET_MODE (op0
) == Pmode
&& mode
== ptr_mode
)
8902 op0
= convert_memory_address (ptr_mode
, op0
);
8906 case ENTRY_VALUE_EXPR
:
8909 /* COMPLEX type for Extended Pascal & Fortran */
8912 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8915 /* Get the rtx code of the operands. */
8916 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8917 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
8920 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8924 /* Move the real (op0) and imaginary (op1) parts to their location. */
8925 emit_move_insn (gen_realpart (mode
, target
), op0
);
8926 emit_move_insn (gen_imagpart (mode
, target
), op1
);
8928 insns
= get_insns ();
8931 /* Complex construction should appear as a single unit. */
8932 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8933 each with a separate pseudo as destination.
8934 It's not correct for flow to treat them as a unit. */
8935 if (GET_CODE (target
) != CONCAT
)
8936 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
8944 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8945 return gen_realpart (mode
, op0
);
8948 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8949 return gen_imagpart (mode
, op0
);
8953 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8957 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8960 target
= gen_reg_rtx (mode
);
8964 /* Store the realpart and the negated imagpart to target. */
8965 emit_move_insn (gen_realpart (partmode
, target
),
8966 gen_realpart (partmode
, op0
));
8968 imag_t
= gen_imagpart (partmode
, target
);
8969 temp
= expand_unop (partmode
,
8970 ! unsignedp
&& flag_trapv
8971 && (GET_MODE_CLASS(partmode
) == MODE_INT
)
8972 ? negv_optab
: neg_optab
,
8973 gen_imagpart (partmode
, op0
), imag_t
, 0);
8975 emit_move_insn (imag_t
, temp
);
8977 insns
= get_insns ();
8980 /* Conjugate should appear as a single unit
8981 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8982 each with a separate pseudo as destination.
8983 It's not correct for flow to treat them as a unit. */
8984 if (GET_CODE (target
) != CONCAT
)
8985 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
8992 case TRY_CATCH_EXPR
:
8994 tree handler
= TREE_OPERAND (exp
, 1);
8996 expand_eh_region_start ();
8998 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9000 expand_eh_region_end_cleanup (handler
);
9005 case TRY_FINALLY_EXPR
:
9007 tree try_block
= TREE_OPERAND (exp
, 0);
9008 tree finally_block
= TREE_OPERAND (exp
, 1);
9010 if (!optimize
|| unsafe_for_reeval (finally_block
) > 1)
9012 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9013 is not sufficient, so we cannot expand the block twice.
9014 So we play games with GOTO_SUBROUTINE_EXPR to let us
9015 expand the thing only once. */
9016 /* When not optimizing, we go ahead with this form since
9017 (1) user breakpoints operate more predictably without
9018 code duplication, and
9019 (2) we're not running any of the global optimizers
9020 that would explode in time/space with the highly
9021 connected CFG created by the indirect branching. */
9023 rtx finally_label
= gen_label_rtx ();
9024 rtx done_label
= gen_label_rtx ();
9025 rtx return_link
= gen_reg_rtx (Pmode
);
9026 tree cleanup
= build (GOTO_SUBROUTINE_EXPR
, void_type_node
,
9027 (tree
) finally_label
, (tree
) return_link
);
9028 TREE_SIDE_EFFECTS (cleanup
) = 1;
9030 /* Start a new binding layer that will keep track of all cleanup
9031 actions to be performed. */
9032 expand_start_bindings (2);
9033 target_temp_slot_level
= temp_slot_level
;
9035 expand_decl_cleanup (NULL_TREE
, cleanup
);
9036 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
9038 preserve_temp_slots (op0
);
9039 expand_end_bindings (NULL_TREE
, 0, 0);
9040 emit_jump (done_label
);
9041 emit_label (finally_label
);
9042 expand_expr (finally_block
, const0_rtx
, VOIDmode
, 0);
9043 emit_indirect_jump (return_link
);
9044 emit_label (done_label
);
9048 expand_start_bindings (2);
9049 target_temp_slot_level
= temp_slot_level
;
9051 expand_decl_cleanup (NULL_TREE
, finally_block
);
9052 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
9054 preserve_temp_slots (op0
);
9055 expand_end_bindings (NULL_TREE
, 0, 0);
9061 case GOTO_SUBROUTINE_EXPR
:
9063 rtx subr
= (rtx
) TREE_OPERAND (exp
, 0);
9064 rtx return_link
= *(rtx
*) &TREE_OPERAND (exp
, 1);
9065 rtx return_address
= gen_label_rtx ();
9066 emit_move_insn (return_link
,
9067 gen_rtx_LABEL_REF (Pmode
, return_address
));
9069 emit_label (return_address
);
9074 return expand_builtin_va_arg (TREE_OPERAND (exp
, 0), type
);
9077 return get_exception_pointer (cfun
);
9080 /* Function descriptors are not valid except for as
9081 initialization constants, and should not be expanded. */
9085 return (*lang_hooks
.expand_expr
) (exp
, original_target
, tmode
, modifier
);
9088 /* Here to do an ordinary binary operator, generating an instruction
9089 from the optab already placed in `this_optab'. */
9091 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
9092 subtarget
, &op0
, &op1
, 0);
9094 if (modifier
== EXPAND_STACK_PARM
)
9096 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9097 unsignedp
, OPTAB_LIB_WIDEN
);
9103 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9104 when applied to the address of EXP produces an address known to be
9105 aligned more than BIGGEST_ALIGNMENT. */
9108 is_aligning_offset (tree offset
, tree exp
)
9110 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9111 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9112 || TREE_CODE (offset
) == NOP_EXPR
9113 || TREE_CODE (offset
) == CONVERT_EXPR
9114 || TREE_CODE (offset
) == WITH_RECORD_EXPR
)
9115 offset
= TREE_OPERAND (offset
, 0);
9117 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9118 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9119 if (TREE_CODE (offset
) != BIT_AND_EXPR
9120 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
9121 || compare_tree_int (TREE_OPERAND (offset
, 1), BIGGEST_ALIGNMENT
) <= 0
9122 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
9125 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9126 It must be NEGATE_EXPR. Then strip any more conversions. */
9127 offset
= TREE_OPERAND (offset
, 0);
9128 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9129 || TREE_CODE (offset
) == NOP_EXPR
9130 || TREE_CODE (offset
) == CONVERT_EXPR
)
9131 offset
= TREE_OPERAND (offset
, 0);
9133 if (TREE_CODE (offset
) != NEGATE_EXPR
)
9136 offset
= TREE_OPERAND (offset
, 0);
9137 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9138 || TREE_CODE (offset
) == NOP_EXPR
9139 || TREE_CODE (offset
) == CONVERT_EXPR
)
9140 offset
= TREE_OPERAND (offset
, 0);
9142 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9143 whose type is the same as EXP. */
9144 return (TREE_CODE (offset
) == ADDR_EXPR
9145 && (TREE_OPERAND (offset
, 0) == exp
9146 || (TREE_CODE (TREE_OPERAND (offset
, 0)) == PLACEHOLDER_EXPR
9147 && (TREE_TYPE (TREE_OPERAND (offset
, 0))
9148 == TREE_TYPE (exp
)))));
9151 /* Return the tree node if an ARG corresponds to a string constant or zero
9152 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9153 in bytes within the string that ARG is accessing. The type of the
9154 offset will be `sizetype'. */
9157 string_constant (tree arg
, tree
*ptr_offset
)
9161 if (TREE_CODE (arg
) == ADDR_EXPR
9162 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
9164 *ptr_offset
= size_zero_node
;
9165 return TREE_OPERAND (arg
, 0);
9167 else if (TREE_CODE (arg
) == PLUS_EXPR
)
9169 tree arg0
= TREE_OPERAND (arg
, 0);
9170 tree arg1
= TREE_OPERAND (arg
, 1);
9175 if (TREE_CODE (arg0
) == ADDR_EXPR
9176 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
9178 *ptr_offset
= convert (sizetype
, arg1
);
9179 return TREE_OPERAND (arg0
, 0);
9181 else if (TREE_CODE (arg1
) == ADDR_EXPR
9182 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
9184 *ptr_offset
= convert (sizetype
, arg0
);
9185 return TREE_OPERAND (arg1
, 0);
9192 /* Expand code for a post- or pre- increment or decrement
9193 and return the RTX for the result.
9194 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9197 expand_increment (tree exp
, int post
, int ignore
)
9201 tree incremented
= TREE_OPERAND (exp
, 0);
9202 optab this_optab
= add_optab
;
9204 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
9205 int op0_is_copy
= 0;
9206 int single_insn
= 0;
9207 /* 1 means we can't store into OP0 directly,
9208 because it is a subreg narrower than a word,
9209 and we don't dare clobber the rest of the word. */
9212 /* Stabilize any component ref that might need to be
9213 evaluated more than once below. */
9215 || TREE_CODE (incremented
) == BIT_FIELD_REF
9216 || (TREE_CODE (incremented
) == COMPONENT_REF
9217 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
9218 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
9219 incremented
= stabilize_reference (incremented
);
9220 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9221 ones into save exprs so that they don't accidentally get evaluated
9222 more than once by the code below. */
9223 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
9224 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
9225 incremented
= save_expr (incremented
);
9227 /* Compute the operands as RTX.
9228 Note whether OP0 is the actual lvalue or a copy of it:
9229 I believe it is a copy iff it is a register or subreg
9230 and insns were generated in computing it. */
9232 temp
= get_last_insn ();
9233 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, 0);
9235 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9236 in place but instead must do sign- or zero-extension during assignment,
9237 so we copy it into a new register and let the code below use it as
9240 Note that we can safely modify this SUBREG since it is know not to be
9241 shared (it was made by the expand_expr call above). */
9243 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9246 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9250 else if (GET_CODE (op0
) == SUBREG
9251 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9253 /* We cannot increment this SUBREG in place. If we are
9254 post-incrementing, get a copy of the old value. Otherwise,
9255 just mark that we cannot increment in place. */
9257 op0
= copy_to_reg (op0
);
9262 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
9263 && temp
!= get_last_insn ());
9264 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9266 /* Decide whether incrementing or decrementing. */
9267 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9268 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9269 this_optab
= sub_optab
;
9271 /* Convert decrement by a constant into a negative increment. */
9272 if (this_optab
== sub_optab
9273 && GET_CODE (op1
) == CONST_INT
)
9275 op1
= GEN_INT (-INTVAL (op1
));
9276 this_optab
= add_optab
;
9279 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp
)))
9280 this_optab
= this_optab
== add_optab
? addv_optab
: subv_optab
;
9282 /* For a preincrement, see if we can do this with a single instruction. */
9285 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9286 if (icode
!= (int) CODE_FOR_nothing
9287 /* Make sure that OP0 is valid for operands 0 and 1
9288 of the insn we want to queue. */
9289 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9290 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
)
9291 && (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9295 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9296 then we cannot just increment OP0. We must therefore contrive to
9297 increment the original value. Then, for postincrement, we can return
9298 OP0 since it is a copy of the old value. For preincrement, expand here
9299 unless we can do it with a single insn.
9301 Likewise if storing directly into OP0 would clobber high bits
9302 we need to preserve (bad_subreg). */
9303 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9305 /* This is the easiest way to increment the value wherever it is.
9306 Problems with multiple evaluation of INCREMENTED are prevented
9307 because either (1) it is a component_ref or preincrement,
9308 in which case it was stabilized above, or (2) it is an array_ref
9309 with constant index in an array in a register, which is
9310 safe to reevaluate. */
9311 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9312 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9313 ? MINUS_EXPR
: PLUS_EXPR
),
9316 TREE_OPERAND (exp
, 1));
9318 while (TREE_CODE (incremented
) == NOP_EXPR
9319 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9321 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9322 incremented
= TREE_OPERAND (incremented
, 0);
9325 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
);
9326 return post
? op0
: temp
;
9331 /* We have a true reference to the value in OP0.
9332 If there is an insn to add or subtract in this mode, queue it.
9333 Queueing the increment insn avoids the register shuffling
9334 that often results if we must increment now and first save
9335 the old value for subsequent use. */
9337 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9338 op0
= stabilize (op0
);
9341 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9342 if (icode
!= (int) CODE_FOR_nothing
9343 /* Make sure that OP0 is valid for operands 0 and 1
9344 of the insn we want to queue. */
9345 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9346 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
))
9348 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9349 op1
= force_reg (mode
, op1
);
9351 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9353 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9355 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
9356 ? force_reg (Pmode
, XEXP (op0
, 0))
9357 : copy_to_reg (XEXP (op0
, 0)));
9360 op0
= replace_equiv_address (op0
, addr
);
9361 temp
= force_reg (GET_MODE (op0
), op0
);
9362 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9363 op1
= force_reg (mode
, op1
);
9365 /* The increment queue is LIFO, thus we have to `queue'
9366 the instructions in reverse order. */
9367 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9368 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9373 /* Preincrement, or we can't increment with one simple insn. */
9375 /* Save a copy of the value before inc or dec, to return it later. */
9376 temp
= value
= copy_to_reg (op0
);
9378 /* Arrange to return the incremented value. */
9379 /* Copy the rtx because expand_binop will protect from the queue,
9380 and the results of that would be invalid for us to return
9381 if our caller does emit_queue before using our result. */
9382 temp
= copy_rtx (value
= op0
);
9384 /* Increment however we can. */
9385 op1
= expand_binop (mode
, this_optab
, value
, op1
, op0
,
9386 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9388 /* Make sure the value is stored into OP0. */
9390 emit_move_insn (op0
, op1
);
9395 /* Generate code to calculate EXP using a store-flag instruction
9396 and return an rtx for the result. EXP is either a comparison
9397 or a TRUTH_NOT_EXPR whose operand is a comparison.
9399 If TARGET is nonzero, store the result there if convenient.
9401 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9404 Return zero if there is no suitable set-flag instruction
9405 available on this machine.
9407 Once expand_expr has been called on the arguments of the comparison,
9408 we are committed to doing the store flag, since it is not safe to
9409 re-evaluate the expression. We emit the store-flag insn by calling
9410 emit_store_flag, but only expand the arguments if we have a reason
9411 to believe that emit_store_flag will be successful. If we think that
9412 it will, but it isn't, we have to simulate the store-flag with a
9413 set/jump/set sequence. */
9416 do_store_flag (tree exp
, rtx target
, enum machine_mode mode
, int only_cheap
)
9419 tree arg0
, arg1
, type
;
9421 enum machine_mode operand_mode
;
9425 enum insn_code icode
;
9426 rtx subtarget
= target
;
9429 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9430 result at the end. We can't simply invert the test since it would
9431 have already been inverted if it were valid. This case occurs for
9432 some floating-point comparisons. */
9434 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
9435 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
9437 arg0
= TREE_OPERAND (exp
, 0);
9438 arg1
= TREE_OPERAND (exp
, 1);
9440 /* Don't crash if the comparison was erroneous. */
9441 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
9444 type
= TREE_TYPE (arg0
);
9445 operand_mode
= TYPE_MODE (type
);
9446 unsignedp
= TREE_UNSIGNED (type
);
9448 /* We won't bother with BLKmode store-flag operations because it would mean
9449 passing a lot of information to emit_store_flag. */
9450 if (operand_mode
== BLKmode
)
9453 /* We won't bother with store-flag operations involving function pointers
9454 when function pointers must be canonicalized before comparisons. */
9455 #ifdef HAVE_canonicalize_funcptr_for_compare
9456 if (HAVE_canonicalize_funcptr_for_compare
9457 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
9458 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
9460 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
9461 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
9462 == FUNCTION_TYPE
))))
9469 /* Get the rtx comparison code to use. We know that EXP is a comparison
9470 operation of some type. Some comparisons against 1 and -1 can be
9471 converted to comparisons with zero. Do so here so that the tests
9472 below will be aware that we have a comparison with zero. These
9473 tests will not catch constants in the first operand, but constants
9474 are rarely passed as the first operand. */
9476 switch (TREE_CODE (exp
))
9485 if (integer_onep (arg1
))
9486 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
9488 code
= unsignedp
? LTU
: LT
;
9491 if (! unsignedp
&& integer_all_onesp (arg1
))
9492 arg1
= integer_zero_node
, code
= LT
;
9494 code
= unsignedp
? LEU
: LE
;
9497 if (! unsignedp
&& integer_all_onesp (arg1
))
9498 arg1
= integer_zero_node
, code
= GE
;
9500 code
= unsignedp
? GTU
: GT
;
9503 if (integer_onep (arg1
))
9504 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
9506 code
= unsignedp
? GEU
: GE
;
9509 case UNORDERED_EXPR
:
9535 /* Put a constant second. */
9536 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
9538 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
9539 code
= swap_condition (code
);
9542 /* If this is an equality or inequality test of a single bit, we can
9543 do this by shifting the bit being tested to the low-order bit and
9544 masking the result with the constant 1. If the condition was EQ,
9545 we xor it with 1. This does not require an scc insn and is faster
9546 than an scc insn even if we have it.
9548 The code to make this transformation was moved into fold_single_bit_test,
9549 so we just call into the folder and expand its result. */
9551 if ((code
== NE
|| code
== EQ
)
9552 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
9553 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
9555 tree type
= (*lang_hooks
.types
.type_for_mode
) (mode
, unsignedp
);
9556 return expand_expr (fold_single_bit_test (code
== NE
? NE_EXPR
: EQ_EXPR
,
9558 target
, VOIDmode
, EXPAND_NORMAL
);
9561 /* Now see if we are likely to be able to do this. Return if not. */
9562 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
9565 icode
= setcc_gen_code
[(int) code
];
9566 if (icode
== CODE_FOR_nothing
9567 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
9569 /* We can only do this if it is one of the special cases that
9570 can be handled without an scc insn. */
9571 if ((code
== LT
&& integer_zerop (arg1
))
9572 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
9574 else if (BRANCH_COST
>= 0
9575 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
9576 && TREE_CODE (type
) != REAL_TYPE
9577 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
9578 != CODE_FOR_nothing
)
9579 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
9580 != CODE_FOR_nothing
)))
9586 if (! get_subtarget (target
)
9587 || GET_MODE (subtarget
) != operand_mode
)
9590 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, 0);
9593 target
= gen_reg_rtx (mode
);
9595 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9596 because, if the emit_store_flag does anything it will succeed and
9597 OP0 and OP1 will not be used subsequently. */
9599 result
= emit_store_flag (target
, code
,
9600 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
9601 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
9602 operand_mode
, unsignedp
, 1);
9607 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
9608 result
, 0, OPTAB_LIB_WIDEN
);
9612 /* If this failed, we have to do this with set/compare/jump/set code. */
9613 if (GET_CODE (target
) != REG
9614 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
9615 target
= gen_reg_rtx (GET_MODE (target
));
9617 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
9618 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
9619 operand_mode
, NULL_RTX
);
9620 if (GET_CODE (result
) == CONST_INT
)
9621 return (((result
== const0_rtx
&& ! invert
)
9622 || (result
!= const0_rtx
&& invert
))
9623 ? const0_rtx
: const1_rtx
);
9625 /* The code of RESULT may not match CODE if compare_from_rtx
9626 decided to swap its operands and reverse the original code.
9628 We know that compare_from_rtx returns either a CONST_INT or
9629 a new comparison code, so it is safe to just extract the
9630 code from RESULT. */
9631 code
= GET_CODE (result
);
9633 label
= gen_label_rtx ();
9634 if (bcc_gen_fctn
[(int) code
] == 0)
9637 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
9638 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
9645 /* Stubs in case we haven't got a casesi insn. */
9647 # define HAVE_casesi 0
9648 # define gen_casesi(a, b, c, d, e) (0)
9649 # define CODE_FOR_casesi CODE_FOR_nothing
9652 /* If the machine does not have a case insn that compares the bounds,
9653 this means extra overhead for dispatch tables, which raises the
9654 threshold for using them. */
9655 #ifndef CASE_VALUES_THRESHOLD
9656 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9657 #endif /* CASE_VALUES_THRESHOLD */
9660 case_values_threshold (void)
9662 return CASE_VALUES_THRESHOLD
;
9665 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9666 0 otherwise (i.e. if there is no casesi instruction). */
9668 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
9669 rtx table_label ATTRIBUTE_UNUSED
, rtx default_label
)
9671 enum machine_mode index_mode
= SImode
;
9672 int index_bits
= GET_MODE_BITSIZE (index_mode
);
9673 rtx op1
, op2
, index
;
9674 enum machine_mode op_mode
;
9679 /* Convert the index to SImode. */
9680 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
9682 enum machine_mode omode
= TYPE_MODE (index_type
);
9683 rtx rangertx
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
9685 /* We must handle the endpoints in the original mode. */
9686 index_expr
= build (MINUS_EXPR
, index_type
,
9687 index_expr
, minval
);
9688 minval
= integer_zero_node
;
9689 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
9690 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
9691 omode
, 1, default_label
);
9692 /* Now we can safely truncate. */
9693 index
= convert_to_mode (index_mode
, index
, 0);
9697 if (TYPE_MODE (index_type
) != index_mode
)
9699 index_expr
= convert ((*lang_hooks
.types
.type_for_size
)
9700 (index_bits
, 0), index_expr
);
9701 index_type
= TREE_TYPE (index_expr
);
9704 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
9707 index
= protect_from_queue (index
, 0);
9708 do_pending_stack_adjust ();
9710 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
9711 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
9713 index
= copy_to_mode_reg (op_mode
, index
);
9715 op1
= expand_expr (minval
, NULL_RTX
, VOIDmode
, 0);
9717 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
9718 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
9719 op1
, TREE_UNSIGNED (TREE_TYPE (minval
)));
9720 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
9722 op1
= copy_to_mode_reg (op_mode
, op1
);
9724 op2
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
9726 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
9727 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
9728 op2
, TREE_UNSIGNED (TREE_TYPE (range
)));
9729 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
9731 op2
= copy_to_mode_reg (op_mode
, op2
);
9733 emit_jump_insn (gen_casesi (index
, op1
, op2
,
9734 table_label
, default_label
));
9738 /* Attempt to generate a tablejump instruction; same concept. */
9739 #ifndef HAVE_tablejump
9740 #define HAVE_tablejump 0
9741 #define gen_tablejump(x, y) (0)
9744 /* Subroutine of the next function.
9746 INDEX is the value being switched on, with the lowest value
9747 in the table already subtracted.
9748 MODE is its expected mode (needed if INDEX is constant).
9749 RANGE is the length of the jump table.
9750 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9752 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9753 index value is out of range. */
9756 do_tablejump (rtx index
, enum machine_mode mode
, rtx range
, rtx table_label
,
9761 if (INTVAL (range
) > cfun
->max_jumptable_ents
)
9762 cfun
->max_jumptable_ents
= INTVAL (range
);
9764 /* Do an unsigned comparison (in the proper mode) between the index
9765 expression and the value which represents the length of the range.
9766 Since we just finished subtracting the lower bound of the range
9767 from the index expression, this comparison allows us to simultaneously
9768 check that the original index expression value is both greater than
9769 or equal to the minimum value of the range and less than or equal to
9770 the maximum value of the range. */
9772 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
9775 /* If index is in range, it must fit in Pmode.
9776 Convert to Pmode so we can index with it. */
9778 index
= convert_to_mode (Pmode
, index
, 1);
9780 /* Don't let a MEM slip thru, because then INDEX that comes
9781 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9782 and break_out_memory_refs will go to work on it and mess it up. */
9783 #ifdef PIC_CASE_VECTOR_ADDRESS
9784 if (flag_pic
&& GET_CODE (index
) != REG
)
9785 index
= copy_to_mode_reg (Pmode
, index
);
9788 /* If flag_force_addr were to affect this address
9789 it could interfere with the tricky assumptions made
9790 about addresses that contain label-refs,
9791 which may be valid only very near the tablejump itself. */
9792 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9793 GET_MODE_SIZE, because this indicates how large insns are. The other
9794 uses should all be Pmode, because they are addresses. This code
9795 could fail if addresses and insns are not the same size. */
9796 index
= gen_rtx_PLUS (Pmode
,
9797 gen_rtx_MULT (Pmode
, index
,
9798 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
9799 gen_rtx_LABEL_REF (Pmode
, table_label
));
9800 #ifdef PIC_CASE_VECTOR_ADDRESS
9802 index
= PIC_CASE_VECTOR_ADDRESS (index
);
9805 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
9806 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
9807 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
9808 RTX_UNCHANGING_P (vector
) = 1;
9809 MEM_NOTRAP_P (vector
) = 1;
9810 convert_move (temp
, vector
, 0);
9812 emit_jump_insn (gen_tablejump (temp
, table_label
));
9814 /* If we are generating PIC code or if the table is PC-relative, the
9815 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9816 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
9821 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
9822 rtx table_label
, rtx default_label
)
9826 if (! HAVE_tablejump
)
9829 index_expr
= fold (build (MINUS_EXPR
, index_type
,
9830 convert (index_type
, index_expr
),
9831 convert (index_type
, minval
)));
9832 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
9834 index
= protect_from_queue (index
, 0);
9835 do_pending_stack_adjust ();
9837 do_tablejump (index
, TYPE_MODE (index_type
),
9838 convert_modes (TYPE_MODE (index_type
),
9839 TYPE_MODE (TREE_TYPE (range
)),
9840 expand_expr (range
, NULL_RTX
,
9842 TREE_UNSIGNED (TREE_TYPE (range
))),
9843 table_label
, default_label
);
9847 /* Nonzero if the mode is a valid vector mode for this architecture.
9848 This returns nonzero even if there is no hardware support for the
9849 vector mode, but we can emulate with narrower modes. */
9852 vector_mode_valid_p (enum machine_mode mode
)
9854 enum mode_class
class = GET_MODE_CLASS (mode
);
9855 enum machine_mode innermode
;
9857 /* Doh! What's going on? */
9858 if (class != MODE_VECTOR_INT
9859 && class != MODE_VECTOR_FLOAT
)
9862 /* Hardware support. Woo hoo! */
9863 if (VECTOR_MODE_SUPPORTED_P (mode
))
9866 innermode
= GET_MODE_INNER (mode
);
9868 /* We should probably return 1 if requesting V4DI and we have no DI,
9869 but we have V2DI, but this is probably very unlikely. */
9871 /* If we have support for the inner mode, we can safely emulate it.
9872 We may not have V2DI, but me can emulate with a pair of DIs. */
9873 return mov_optab
->handlers
[innermode
].insn_code
!= CODE_FOR_nothing
;
9876 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9878 const_vector_from_tree (tree exp
)
9883 enum machine_mode inner
, mode
;
9885 mode
= TYPE_MODE (TREE_TYPE (exp
));
9887 if (is_zeros_p (exp
))
9888 return CONST0_RTX (mode
);
9890 units
= GET_MODE_NUNITS (mode
);
9891 inner
= GET_MODE_INNER (mode
);
9893 v
= rtvec_alloc (units
);
9895 link
= TREE_VECTOR_CST_ELTS (exp
);
9896 for (i
= 0; link
; link
= TREE_CHAIN (link
), ++i
)
9898 elt
= TREE_VALUE (link
);
9900 if (TREE_CODE (elt
) == REAL_CST
)
9901 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
9904 RTVEC_ELT (v
, i
) = immed_double_const (TREE_INT_CST_LOW (elt
),
9905 TREE_INT_CST_HIGH (elt
),
9909 /* Initialize remaining elements to 0. */
9910 for (; i
< units
; ++i
)
9911 RTVEC_ELT (v
, i
) = CONST0_RTX (inner
);
9913 return gen_rtx_raw_CONST_VECTOR (mode
, v
);
9916 #include "gt-expr.h"