1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
52 /* Decide whether a function's arguments should be processed
53 from first to last or from last to first.
55 They should if the stack and args grow in opposite directions, but
56 only if we have push insns. */
60 #ifndef PUSH_ARGS_REVERSED
61 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
62 #define PUSH_ARGS_REVERSED /* If it's last to first. */
68 #ifndef STACK_PUSH_CODE
69 #ifdef STACK_GROWS_DOWNWARD
70 #define STACK_PUSH_CODE PRE_DEC
72 #define STACK_PUSH_CODE PRE_INC
76 /* Assume that case vectors are not pc-relative. */
77 #ifndef CASE_VECTOR_PC_RELATIVE
78 #define CASE_VECTOR_PC_RELATIVE 0
81 /* Convert defined/undefined to boolean. */
82 #ifdef TARGET_MEM_FUNCTIONS
83 #undef TARGET_MEM_FUNCTIONS
84 #define TARGET_MEM_FUNCTIONS 1
86 #define TARGET_MEM_FUNCTIONS 0
90 /* If this is nonzero, we do not bother generating VOLATILE
91 around volatile memory references, and we are willing to
92 output indirect addresses. If cse is to follow, we reject
93 indirect addresses so a useful potential cse is generated;
94 if it is used only once, instruction combination will produce
95 the same indirect address eventually. */
98 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
99 tree placeholder_list
= 0;
101 /* This structure is used by move_by_pieces to describe the move to
103 struct move_by_pieces
112 int explicit_inc_from
;
113 unsigned HOST_WIDE_INT len
;
114 HOST_WIDE_INT offset
;
118 /* This structure is used by store_by_pieces to describe the clear to
121 struct store_by_pieces
127 unsigned HOST_WIDE_INT len
;
128 HOST_WIDE_INT offset
;
129 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
);
134 static rtx
enqueue_insn (rtx
, rtx
);
135 static unsigned HOST_WIDE_INT
move_by_pieces_ninsns (unsigned HOST_WIDE_INT
,
137 static void move_by_pieces_1 (rtx (*) (rtx
, ...), enum machine_mode
,
138 struct move_by_pieces
*);
139 static bool block_move_libcall_safe_for_call_parm (void);
140 static bool emit_block_move_via_movstr (rtx
, rtx
, rtx
, unsigned);
141 static rtx
emit_block_move_via_libcall (rtx
, rtx
, rtx
);
142 static tree
emit_block_move_libcall_fn (int);
143 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
144 static rtx
clear_by_pieces_1 (void *, HOST_WIDE_INT
, enum machine_mode
);
145 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
146 static void store_by_pieces_1 (struct store_by_pieces
*, unsigned int);
147 static void store_by_pieces_2 (rtx (*) (rtx
, ...), enum machine_mode
,
148 struct store_by_pieces
*);
149 static bool clear_storage_via_clrstr (rtx
, rtx
, unsigned);
150 static rtx
clear_storage_via_libcall (rtx
, rtx
);
151 static tree
clear_storage_libcall_fn (int);
152 static rtx
compress_float_constant (rtx
, rtx
);
153 static rtx
get_subtarget (rtx
);
154 static int is_zeros_p (tree
);
155 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
156 HOST_WIDE_INT
, enum machine_mode
,
157 tree
, tree
, int, int);
158 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
);
159 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
, enum machine_mode
,
160 tree
, enum machine_mode
, int, tree
, int);
161 static rtx
var_rtx (tree
);
163 static unsigned HOST_WIDE_INT
highest_pow2_factor (tree
);
164 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_type (tree
, tree
);
166 static int is_aligning_offset (tree
, tree
);
167 static rtx
expand_increment (tree
, int, int);
168 static void expand_operands (tree
, tree
, rtx
, rtx
*, rtx
*,
169 enum expand_modifier
);
170 static rtx
do_store_flag (tree
, rtx
, enum machine_mode
, int);
172 static void emit_single_push_insn (enum machine_mode
, rtx
, tree
);
174 static void do_tablejump (rtx
, enum machine_mode
, rtx
, rtx
, rtx
);
175 static rtx
const_vector_from_tree (tree
);
177 /* Record for each mode whether we can move a register directly to or
178 from an object of that mode in memory. If we can't, we won't try
179 to use that mode directly when accessing a field of that mode. */
181 static char direct_load
[NUM_MACHINE_MODES
];
182 static char direct_store
[NUM_MACHINE_MODES
];
184 /* Record for each mode whether we can float-extend from memory. */
186 static bool float_extend_from_mem
[NUM_MACHINE_MODES
][NUM_MACHINE_MODES
];
188 /* If a memory-to-memory move would take MOVE_RATIO or more simple
189 move-instruction sequences, we will do a movstr or libcall instead. */
192 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
195 /* If we are optimizing for space (-Os), cut down the default move ratio. */
196 #define MOVE_RATIO (optimize_size ? 3 : 15)
200 /* This macro is used to determine whether move_by_pieces should be called
201 to perform a structure copy. */
202 #ifndef MOVE_BY_PIECES_P
203 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
204 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
207 /* If a clear memory operation would take CLEAR_RATIO or more simple
208 move-instruction sequences, we will do a clrstr or libcall instead. */
211 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
212 #define CLEAR_RATIO 2
214 /* If we are optimizing for space, cut down the default clear ratio. */
215 #define CLEAR_RATIO (optimize_size ? 3 : 15)
219 /* This macro is used to determine whether clear_by_pieces should be
220 called to clear storage. */
221 #ifndef CLEAR_BY_PIECES_P
222 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
223 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
226 /* This macro is used to determine whether store_by_pieces should be
227 called to "memset" storage with byte values other than zero, or
228 to "memcpy" storage when the source is a constant string. */
229 #ifndef STORE_BY_PIECES_P
230 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
233 /* This array records the insn_code of insns to perform block moves. */
234 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
236 /* This array records the insn_code of insns to perform block clears. */
237 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
239 /* These arrays record the insn_code of two different kinds of insns
240 to perform block compares. */
241 enum insn_code cmpstr_optab
[NUM_MACHINE_MODES
];
242 enum insn_code cmpmem_optab
[NUM_MACHINE_MODES
];
244 /* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
245 struct file_stack
*expr_wfl_stack
;
247 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
249 #ifndef SLOW_UNALIGNED_ACCESS
250 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
253 /* This is run once per compilation to set up which modes can be used
254 directly in memory and to initialize the block move optab. */
257 init_expr_once (void)
260 enum machine_mode mode
;
265 /* Try indexing by frame ptr and try by stack ptr.
266 It is known that on the Convex the stack ptr isn't a valid index.
267 With luck, one or the other is valid on any machine. */
268 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
269 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
271 /* A scratch register we can modify in-place below to avoid
272 useless RTL allocations. */
273 reg
= gen_rtx_REG (VOIDmode
, -1);
275 insn
= rtx_alloc (INSN
);
276 pat
= gen_rtx_SET (0, NULL_RTX
, NULL_RTX
);
277 PATTERN (insn
) = pat
;
279 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
280 mode
= (enum machine_mode
) ((int) mode
+ 1))
284 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
285 PUT_MODE (mem
, mode
);
286 PUT_MODE (mem1
, mode
);
287 PUT_MODE (reg
, mode
);
289 /* See if there is some register that can be used in this mode and
290 directly loaded or stored from memory. */
292 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
293 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
294 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
297 if (! HARD_REGNO_MODE_OK (regno
, mode
))
303 SET_DEST (pat
) = reg
;
304 if (recog (pat
, insn
, &num_clobbers
) >= 0)
305 direct_load
[(int) mode
] = 1;
307 SET_SRC (pat
) = mem1
;
308 SET_DEST (pat
) = reg
;
309 if (recog (pat
, insn
, &num_clobbers
) >= 0)
310 direct_load
[(int) mode
] = 1;
313 SET_DEST (pat
) = mem
;
314 if (recog (pat
, insn
, &num_clobbers
) >= 0)
315 direct_store
[(int) mode
] = 1;
318 SET_DEST (pat
) = mem1
;
319 if (recog (pat
, insn
, &num_clobbers
) >= 0)
320 direct_store
[(int) mode
] = 1;
324 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
326 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
327 mode
= GET_MODE_WIDER_MODE (mode
))
329 enum machine_mode srcmode
;
330 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
331 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
335 ic
= can_extend_p (mode
, srcmode
, 0);
336 if (ic
== CODE_FOR_nothing
)
339 PUT_MODE (mem
, srcmode
);
341 if ((*insn_data
[ic
].operand
[1].predicate
) (mem
, srcmode
))
342 float_extend_from_mem
[mode
][srcmode
] = true;
347 /* This is run at the start of compiling a function. */
352 cfun
->expr
= ggc_alloc_cleared (sizeof (struct expr_status
));
355 /* Small sanity check that the queue is empty at the end of a function. */
358 finish_expr_for_function (void)
364 /* Manage the queue of increment instructions to be output
365 for POSTINCREMENT_EXPR expressions, etc. */
367 /* Queue up to increment (or change) VAR later. BODY says how:
368 BODY should be the same thing you would pass to emit_insn
369 to increment right away. It will go to emit_insn later on.
371 The value is a QUEUED expression to be used in place of VAR
372 where you want to guarantee the pre-incrementation value of VAR. */
375 enqueue_insn (rtx var
, rtx body
)
377 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
), var
, NULL_RTX
, NULL_RTX
,
378 body
, pending_chain
);
379 return pending_chain
;
382 /* Use protect_from_queue to convert a QUEUED expression
383 into something that you can put immediately into an instruction.
384 If the queued incrementation has not happened yet,
385 protect_from_queue returns the variable itself.
386 If the incrementation has happened, protect_from_queue returns a temp
387 that contains a copy of the old value of the variable.
389 Any time an rtx which might possibly be a QUEUED is to be put
390 into an instruction, it must be passed through protect_from_queue first.
391 QUEUED expressions are not meaningful in instructions.
393 Do not pass a value through protect_from_queue and then hold
394 on to it for a while before putting it in an instruction!
395 If the queue is flushed in between, incorrect code will result. */
398 protect_from_queue (rtx x
, int modify
)
400 RTX_CODE code
= GET_CODE (x
);
402 #if 0 /* A QUEUED can hang around after the queue is forced out. */
403 /* Shortcut for most common case. */
404 if (pending_chain
== 0)
410 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
411 use of autoincrement. Make a copy of the contents of the memory
412 location rather than a copy of the address, but not if the value is
413 of mode BLKmode. Don't modify X in place since it might be
415 if (code
== MEM
&& GET_MODE (x
) != BLKmode
416 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
419 rtx
new = replace_equiv_address_nv (x
, QUEUED_VAR (y
));
423 rtx temp
= gen_reg_rtx (GET_MODE (x
));
425 emit_insn_before (gen_move_insn (temp
, new),
430 /* Copy the address into a pseudo, so that the returned value
431 remains correct across calls to emit_queue. */
432 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
435 /* Otherwise, recursively protect the subexpressions of all
436 the kinds of rtx's that can contain a QUEUED. */
439 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
440 if (tem
!= XEXP (x
, 0))
446 else if (code
== PLUS
|| code
== MULT
)
448 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
449 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
450 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
459 /* If the increment has not happened, use the variable itself. Copy it
460 into a new pseudo so that the value remains correct across calls to
462 if (QUEUED_INSN (x
) == 0)
463 return copy_to_reg (QUEUED_VAR (x
));
464 /* If the increment has happened and a pre-increment copy exists,
466 if (QUEUED_COPY (x
) != 0)
467 return QUEUED_COPY (x
);
468 /* The increment has happened but we haven't set up a pre-increment copy.
469 Set one up now, and use it. */
470 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
471 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
473 return QUEUED_COPY (x
);
476 /* Return nonzero if X contains a QUEUED expression:
477 if it contains anything that will be altered by a queued increment.
478 We handle only combinations of MEM, PLUS, MINUS and MULT operators
479 since memory addresses generally contain only those. */
482 queued_subexp_p (rtx x
)
484 enum rtx_code code
= GET_CODE (x
);
490 return queued_subexp_p (XEXP (x
, 0));
494 return (queued_subexp_p (XEXP (x
, 0))
495 || queued_subexp_p (XEXP (x
, 1)));
501 /* Perform all the pending incrementations. */
507 while ((p
= pending_chain
))
509 rtx body
= QUEUED_BODY (p
);
511 switch (GET_CODE (body
))
519 QUEUED_INSN (p
) = body
;
523 #ifdef ENABLE_CHECKING
530 QUEUED_INSN (p
) = emit_insn (body
);
534 pending_chain
= QUEUED_NEXT (p
);
538 /* Copy data from FROM to TO, where the machine modes are not the same.
539 Both modes may be integer, or both may be floating.
540 UNSIGNEDP should be nonzero if FROM is an unsigned type.
541 This causes zero-extension instead of sign-extension. */
544 convert_move (rtx to
, rtx from
, int unsignedp
)
546 enum machine_mode to_mode
= GET_MODE (to
);
547 enum machine_mode from_mode
= GET_MODE (from
);
548 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
549 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
553 /* rtx code for making an equivalent value. */
554 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
555 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
557 to
= protect_from_queue (to
, 1);
558 from
= protect_from_queue (from
, 0);
560 if (to_real
!= from_real
)
563 /* If FROM is a SUBREG that indicates that we have already done at least
564 the required extension, strip it. We don't handle such SUBREGs as
567 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
568 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
569 >= GET_MODE_SIZE (to_mode
))
570 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
571 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
573 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
576 if (to_mode
== from_mode
577 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
579 emit_move_insn (to
, from
);
583 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
585 if (GET_MODE_BITSIZE (from_mode
) != GET_MODE_BITSIZE (to_mode
))
588 if (VECTOR_MODE_P (to_mode
))
589 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
591 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
593 emit_move_insn (to
, from
);
597 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
599 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
600 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
609 if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
611 else if (GET_MODE_PRECISION (from_mode
) > GET_MODE_PRECISION (to_mode
))
616 /* Try converting directly if the insn is supported. */
618 code
= tab
->handlers
[to_mode
][from_mode
].insn_code
;
619 if (code
!= CODE_FOR_nothing
)
621 emit_unop_insn (code
, to
, from
,
622 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
626 /* Otherwise use a libcall. */
627 libcall
= tab
->handlers
[to_mode
][from_mode
].libfunc
;
630 /* This conversion is not implemented yet. */
634 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
636 insns
= get_insns ();
638 emit_libcall_block (insns
, to
, value
,
639 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
641 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
645 /* Handle pointer conversion. */ /* SPEE 900220. */
646 /* Targets are expected to provide conversion insns between PxImode and
647 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
648 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
650 enum machine_mode full_mode
651 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode
), MODE_INT
);
653 if (trunc_optab
->handlers
[to_mode
][full_mode
].insn_code
657 if (full_mode
!= from_mode
)
658 from
= convert_to_mode (full_mode
, from
, unsignedp
);
659 emit_unop_insn (trunc_optab
->handlers
[to_mode
][full_mode
].insn_code
,
663 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
665 enum machine_mode full_mode
666 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode
), MODE_INT
);
668 if (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
672 emit_unop_insn (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
,
674 if (to_mode
== full_mode
)
677 /* else proceed to integer conversions below */
678 from_mode
= full_mode
;
681 /* Now both modes are integers. */
683 /* Handle expanding beyond a word. */
684 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
685 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
692 enum machine_mode lowpart_mode
;
693 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
695 /* Try converting directly if the insn is supported. */
696 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
699 /* If FROM is a SUBREG, put it into a register. Do this
700 so that we always generate the same set of insns for
701 better cse'ing; if an intermediate assignment occurred,
702 we won't be doing the operation directly on the SUBREG. */
703 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
704 from
= force_reg (from_mode
, from
);
705 emit_unop_insn (code
, to
, from
, equiv_code
);
708 /* Next, try converting via full word. */
709 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
710 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
711 != CODE_FOR_nothing
))
713 if (GET_CODE (to
) == REG
)
714 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
715 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
716 emit_unop_insn (code
, to
,
717 gen_lowpart (word_mode
, to
), equiv_code
);
721 /* No special multiword conversion insn; do it by hand. */
724 /* Since we will turn this into a no conflict block, we must ensure
725 that the source does not overlap the target. */
727 if (reg_overlap_mentioned_p (to
, from
))
728 from
= force_reg (from_mode
, from
);
730 /* Get a copy of FROM widened to a word, if necessary. */
731 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
732 lowpart_mode
= word_mode
;
734 lowpart_mode
= from_mode
;
736 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
738 lowpart
= gen_lowpart (lowpart_mode
, to
);
739 emit_move_insn (lowpart
, lowfrom
);
741 /* Compute the value to put in each remaining word. */
743 fill_value
= const0_rtx
;
748 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
749 && STORE_FLAG_VALUE
== -1)
751 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
753 fill_value
= gen_reg_rtx (word_mode
);
754 emit_insn (gen_slt (fill_value
));
760 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
761 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
763 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
767 /* Fill the remaining words. */
768 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
770 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
771 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
776 if (fill_value
!= subword
)
777 emit_move_insn (subword
, fill_value
);
780 insns
= get_insns ();
783 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
784 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
788 /* Truncating multi-word to a word or less. */
789 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
790 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
792 if (!((GET_CODE (from
) == MEM
793 && ! MEM_VOLATILE_P (from
)
794 && direct_load
[(int) to_mode
]
795 && ! mode_dependent_address_p (XEXP (from
, 0)))
796 || GET_CODE (from
) == REG
797 || GET_CODE (from
) == SUBREG
))
798 from
= force_reg (from_mode
, from
);
799 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
803 /* Now follow all the conversions between integers
804 no more than a word long. */
806 /* For truncation, usually we can just refer to FROM in a narrower mode. */
807 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
808 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
809 GET_MODE_BITSIZE (from_mode
)))
811 if (!((GET_CODE (from
) == MEM
812 && ! MEM_VOLATILE_P (from
)
813 && direct_load
[(int) to_mode
]
814 && ! mode_dependent_address_p (XEXP (from
, 0)))
815 || GET_CODE (from
) == REG
816 || GET_CODE (from
) == SUBREG
))
817 from
= force_reg (from_mode
, from
);
818 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
819 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
820 from
= copy_to_reg (from
);
821 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
825 /* Handle extension. */
826 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
828 /* Convert directly if that works. */
829 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
833 from
= force_not_mem (from
);
835 emit_unop_insn (code
, to
, from
, equiv_code
);
840 enum machine_mode intermediate
;
844 /* Search for a mode to convert via. */
845 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
846 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
847 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
849 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
850 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
851 GET_MODE_BITSIZE (intermediate
))))
852 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
853 != CODE_FOR_nothing
))
855 convert_move (to
, convert_to_mode (intermediate
, from
,
856 unsignedp
), unsignedp
);
860 /* No suitable intermediate mode.
861 Generate what we need with shifts. */
862 shift_amount
= build_int_2 (GET_MODE_BITSIZE (to_mode
)
863 - GET_MODE_BITSIZE (from_mode
), 0);
864 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
865 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
867 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
870 emit_move_insn (to
, tmp
);
875 /* Support special truncate insns for certain modes. */
876 if (trunc_optab
->handlers
[to_mode
][from_mode
].insn_code
!= CODE_FOR_nothing
)
878 emit_unop_insn (trunc_optab
->handlers
[to_mode
][from_mode
].insn_code
,
883 /* Handle truncation of volatile memrefs, and so on;
884 the things that couldn't be truncated directly,
885 and for which there was no special instruction.
887 ??? Code above formerly short-circuited this, for most integer
888 mode pairs, with a force_reg in from_mode followed by a recursive
889 call to this routine. Appears always to have been wrong. */
890 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
892 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
893 emit_move_insn (to
, temp
);
897 /* Mode combination is not recognized. */
901 /* Return an rtx for a value that would result
902 from converting X to mode MODE.
903 Both X and MODE may be floating, or both integer.
904 UNSIGNEDP is nonzero if X is an unsigned value.
905 This can be done by referring to a part of X in place
906 or by copying to a new temporary with conversion.
908 This function *must not* call protect_from_queue
909 except when putting X into an insn (in which case convert_move does it). */
912 convert_to_mode (enum machine_mode mode
, rtx x
, int unsignedp
)
914 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
917 /* Return an rtx for a value that would result
918 from converting X from mode OLDMODE to mode MODE.
919 Both modes may be floating, or both integer.
920 UNSIGNEDP is nonzero if X is an unsigned value.
922 This can be done by referring to a part of X in place
923 or by copying to a new temporary with conversion.
925 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
927 This function *must not* call protect_from_queue
928 except when putting X into an insn (in which case convert_move does it). */
931 convert_modes (enum machine_mode mode
, enum machine_mode oldmode
, rtx x
, int unsignedp
)
935 /* If FROM is a SUBREG that indicates that we have already done at least
936 the required extension, strip it. */
938 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
939 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
940 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
941 x
= gen_lowpart (mode
, x
);
943 if (GET_MODE (x
) != VOIDmode
)
944 oldmode
= GET_MODE (x
);
949 /* There is one case that we must handle specially: If we are converting
950 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
951 we are to interpret the constant as unsigned, gen_lowpart will do
952 the wrong if the constant appears negative. What we want to do is
953 make the high-order word of the constant zero, not all ones. */
955 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
956 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
957 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
959 HOST_WIDE_INT val
= INTVAL (x
);
961 if (oldmode
!= VOIDmode
962 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
964 int width
= GET_MODE_BITSIZE (oldmode
);
966 /* We need to zero extend VAL. */
967 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
970 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
973 /* We can do this with a gen_lowpart if both desired and current modes
974 are integer, and this is either a constant integer, a register, or a
975 non-volatile MEM. Except for the constant case where MODE is no
976 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
978 if ((GET_CODE (x
) == CONST_INT
979 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
980 || (GET_MODE_CLASS (mode
) == MODE_INT
981 && GET_MODE_CLASS (oldmode
) == MODE_INT
982 && (GET_CODE (x
) == CONST_DOUBLE
983 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
984 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
985 && direct_load
[(int) mode
])
986 || (GET_CODE (x
) == REG
987 && (! HARD_REGISTER_P (x
)
988 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
989 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
990 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
992 /* ?? If we don't know OLDMODE, we have to assume here that
993 X does not need sign- or zero-extension. This may not be
994 the case, but it's the best we can do. */
995 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
996 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
998 HOST_WIDE_INT val
= INTVAL (x
);
999 int width
= GET_MODE_BITSIZE (oldmode
);
1001 /* We must sign or zero-extend in this case. Start by
1002 zero-extending, then sign extend if we need to. */
1003 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1005 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1006 val
|= (HOST_WIDE_INT
) (-1) << width
;
1008 return gen_int_mode (val
, mode
);
1011 return gen_lowpart (mode
, x
);
1014 /* Converting from integer constant into mode is always equivalent to an
1015 subreg operation. */
1016 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
1018 if (GET_MODE_BITSIZE (mode
) != GET_MODE_BITSIZE (oldmode
))
1020 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
1023 temp
= gen_reg_rtx (mode
);
1024 convert_move (temp
, x
, unsignedp
);
1028 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1029 store efficiently. Due to internal GCC limitations, this is
1030 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1031 for an immediate constant. */
1033 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1035 /* Determine whether the LEN bytes can be moved by using several move
1036 instructions. Return nonzero if a call to move_by_pieces should
1040 can_move_by_pieces (unsigned HOST_WIDE_INT len
,
1041 unsigned int align ATTRIBUTE_UNUSED
)
1043 return MOVE_BY_PIECES_P (len
, align
);
1046 /* Generate several move instructions to copy LEN bytes from block FROM to
1047 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1048 and TO through protect_from_queue before calling.
1050 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1051 used to push FROM to the stack.
1053 ALIGN is maximum stack alignment we can assume.
1055 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1056 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1060 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
1061 unsigned int align
, int endp
)
1063 struct move_by_pieces data
;
1064 rtx to_addr
, from_addr
= XEXP (from
, 0);
1065 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
1066 enum machine_mode mode
= VOIDmode
, tmode
;
1067 enum insn_code icode
;
1069 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
1072 data
.from_addr
= from_addr
;
1075 to_addr
= XEXP (to
, 0);
1078 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1079 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1081 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1088 #ifdef STACK_GROWS_DOWNWARD
1094 data
.to_addr
= to_addr
;
1097 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1098 || GET_CODE (from_addr
) == POST_INC
1099 || GET_CODE (from_addr
) == POST_DEC
);
1101 data
.explicit_inc_from
= 0;
1102 data
.explicit_inc_to
= 0;
1103 if (data
.reverse
) data
.offset
= len
;
1106 /* If copying requires more than two move insns,
1107 copy addresses to registers (to make displacements shorter)
1108 and use post-increment if available. */
1109 if (!(data
.autinc_from
&& data
.autinc_to
)
1110 && move_by_pieces_ninsns (len
, align
) > 2)
1112 /* Find the mode of the largest move... */
1113 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1114 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1115 if (GET_MODE_SIZE (tmode
) < max_size
)
1118 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
1120 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1121 data
.autinc_from
= 1;
1122 data
.explicit_inc_from
= -1;
1124 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
1126 data
.from_addr
= copy_addr_to_reg (from_addr
);
1127 data
.autinc_from
= 1;
1128 data
.explicit_inc_from
= 1;
1130 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1131 data
.from_addr
= copy_addr_to_reg (from_addr
);
1132 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
1134 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1136 data
.explicit_inc_to
= -1;
1138 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
1140 data
.to_addr
= copy_addr_to_reg (to_addr
);
1142 data
.explicit_inc_to
= 1;
1144 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1145 data
.to_addr
= copy_addr_to_reg (to_addr
);
1148 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1149 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1150 align
= MOVE_MAX
* BITS_PER_UNIT
;
1152 /* First move what we can in the largest integer mode, then go to
1153 successively smaller modes. */
1155 while (max_size
> 1)
1157 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1158 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1159 if (GET_MODE_SIZE (tmode
) < max_size
)
1162 if (mode
== VOIDmode
)
1165 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1166 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1167 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1169 max_size
= GET_MODE_SIZE (mode
);
1172 /* The code above should have handled everything. */
1186 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
1187 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
1189 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
1192 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
1199 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
1207 /* Return number of insns required to move L bytes by pieces.
1208 ALIGN (in bits) is maximum alignment we can assume. */
1210 static unsigned HOST_WIDE_INT
1211 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
)
1213 unsigned HOST_WIDE_INT n_insns
= 0;
1214 unsigned HOST_WIDE_INT max_size
= MOVE_MAX
+ 1;
1216 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1217 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1218 align
= MOVE_MAX
* BITS_PER_UNIT
;
1220 while (max_size
> 1)
1222 enum machine_mode mode
= VOIDmode
, tmode
;
1223 enum insn_code icode
;
1225 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1226 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1227 if (GET_MODE_SIZE (tmode
) < max_size
)
1230 if (mode
== VOIDmode
)
1233 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1234 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1235 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1237 max_size
= GET_MODE_SIZE (mode
);
1245 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1246 with move instructions for mode MODE. GENFUN is the gen_... function
1247 to make a move insn for that mode. DATA has all the other info. */
1250 move_by_pieces_1 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
1251 struct move_by_pieces
*data
)
1253 unsigned int size
= GET_MODE_SIZE (mode
);
1254 rtx to1
= NULL_RTX
, from1
;
1256 while (data
->len
>= size
)
1259 data
->offset
-= size
;
1263 if (data
->autinc_to
)
1264 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1267 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1270 if (data
->autinc_from
)
1271 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1274 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1276 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1277 emit_insn (gen_add2_insn (data
->to_addr
,
1278 GEN_INT (-(HOST_WIDE_INT
)size
)));
1279 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1280 emit_insn (gen_add2_insn (data
->from_addr
,
1281 GEN_INT (-(HOST_WIDE_INT
)size
)));
1284 emit_insn ((*genfun
) (to1
, from1
));
1287 #ifdef PUSH_ROUNDING
1288 emit_single_push_insn (mode
, from1
, NULL
);
1294 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1295 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1296 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1297 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1299 if (! data
->reverse
)
1300 data
->offset
+= size
;
1306 /* Emit code to move a block Y to a block X. This may be done with
1307 string-move instructions, with multiple scalar move instructions,
1308 or with a library call.
1310 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1311 SIZE is an rtx that says how long they are.
1312 ALIGN is the maximum alignment we can assume they have.
1313 METHOD describes what kind of copy this is, and what mechanisms may be used.
1315 Return the address of the new block, if memcpy is called and returns it,
1319 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1327 case BLOCK_OP_NORMAL
:
1328 may_use_call
= true;
1331 case BLOCK_OP_CALL_PARM
:
1332 may_use_call
= block_move_libcall_safe_for_call_parm ();
1334 /* Make inhibit_defer_pop nonzero around the library call
1335 to force it to pop the arguments right away. */
1339 case BLOCK_OP_NO_LIBCALL
:
1340 may_use_call
= false;
1347 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1349 if (GET_MODE (x
) != BLKmode
)
1351 if (GET_MODE (y
) != BLKmode
)
1354 x
= protect_from_queue (x
, 1);
1355 y
= protect_from_queue (y
, 0);
1356 size
= protect_from_queue (size
, 0);
1358 if (GET_CODE (x
) != MEM
)
1360 if (GET_CODE (y
) != MEM
)
1365 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1366 can be incorrect is coming from __builtin_memcpy. */
1367 if (GET_CODE (size
) == CONST_INT
)
1369 if (INTVAL (size
) == 0)
1372 x
= shallow_copy_rtx (x
);
1373 y
= shallow_copy_rtx (y
);
1374 set_mem_size (x
, size
);
1375 set_mem_size (y
, size
);
1378 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1379 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1380 else if (emit_block_move_via_movstr (x
, y
, size
, align
))
1382 else if (may_use_call
)
1383 retval
= emit_block_move_via_libcall (x
, y
, size
);
1385 emit_block_move_via_loop (x
, y
, size
, align
);
1387 if (method
== BLOCK_OP_CALL_PARM
)
1393 /* A subroutine of emit_block_move. Returns true if calling the
1394 block move libcall will not clobber any parameters which may have
1395 already been placed on the stack. */
1398 block_move_libcall_safe_for_call_parm (void)
1400 /* If arguments are pushed on the stack, then they're safe. */
1404 /* If registers go on the stack anyway, any argument is sure to clobber
1405 an outgoing argument. */
1406 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1408 tree fn
= emit_block_move_libcall_fn (false);
1410 if (REG_PARM_STACK_SPACE (fn
) != 0)
1415 /* If any argument goes in memory, then it might clobber an outgoing
1418 CUMULATIVE_ARGS args_so_far
;
1421 fn
= emit_block_move_libcall_fn (false);
1422 INIT_CUMULATIVE_ARGS (args_so_far
, TREE_TYPE (fn
), NULL_RTX
, 0);
1424 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1425 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1427 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1428 rtx tmp
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
1429 if (!tmp
|| !REG_P (tmp
))
1431 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1432 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, mode
,
1436 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, NULL_TREE
, 1);
1442 /* A subroutine of emit_block_move. Expand a movstr pattern;
1443 return true if successful. */
1446 emit_block_move_via_movstr (rtx x
, rtx y
, rtx size
, unsigned int align
)
1448 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1449 enum machine_mode mode
;
1451 /* Since this is a move insn, we don't care about volatility. */
1454 /* Try the most limited insn first, because there's no point
1455 including more than one in the machine description unless
1456 the more limited one has some advantage. */
1458 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1459 mode
= GET_MODE_WIDER_MODE (mode
))
1461 enum insn_code code
= movstr_optab
[(int) mode
];
1462 insn_operand_predicate_fn pred
;
1464 if (code
!= CODE_FOR_nothing
1465 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1466 here because if SIZE is less than the mode mask, as it is
1467 returned by the macro, it will definitely be less than the
1468 actual mode mask. */
1469 && ((GET_CODE (size
) == CONST_INT
1470 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1471 <= (GET_MODE_MASK (mode
) >> 1)))
1472 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1473 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1474 || (*pred
) (x
, BLKmode
))
1475 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1476 || (*pred
) (y
, BLKmode
))
1477 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1478 || (*pred
) (opalign
, VOIDmode
)))
1481 rtx last
= get_last_insn ();
1484 op2
= convert_to_mode (mode
, size
, 1);
1485 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1486 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1487 op2
= copy_to_mode_reg (mode
, op2
);
1489 /* ??? When called via emit_block_move_for_call, it'd be
1490 nice if there were some way to inform the backend, so
1491 that it doesn't fail the expansion because it thinks
1492 emitting the libcall would be more efficient. */
1494 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1502 delete_insns_since (last
);
1510 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1511 Return the return value from memcpy, 0 otherwise. */
1514 emit_block_move_via_libcall (rtx dst
, rtx src
, rtx size
)
1516 rtx dst_addr
, src_addr
;
1517 tree call_expr
, arg_list
, fn
, src_tree
, dst_tree
, size_tree
;
1518 enum machine_mode size_mode
;
1521 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1523 It is unsafe to save the value generated by protect_from_queue and reuse
1524 it later. Consider what happens if emit_queue is called before the
1525 return value from protect_from_queue is used.
1527 Expansion of the CALL_EXPR below will call emit_queue before we are
1528 finished emitting RTL for argument setup. So if we are not careful we
1529 could get the wrong value for an argument.
1531 To avoid this problem we go ahead and emit code to copy the addresses of
1532 DST and SRC and SIZE into new pseudos. We can then place those new
1533 pseudos into an RTL_EXPR and use them later, even after a call to
1536 Note this is not strictly needed for library calls since they do not call
1537 emit_queue before loading their arguments. However, we may need to have
1538 library calls call emit_queue in the future since failing to do so could
1539 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1540 arguments in registers. */
1542 dst_addr
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
1543 src_addr
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
1545 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1546 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1548 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1549 src_tree
= make_tree (ptr_type_node
, src_addr
);
1551 if (TARGET_MEM_FUNCTIONS
)
1552 size_mode
= TYPE_MODE (sizetype
);
1554 size_mode
= TYPE_MODE (unsigned_type_node
);
1556 size
= convert_to_mode (size_mode
, size
, 1);
1557 size
= copy_to_mode_reg (size_mode
, size
);
1559 /* It is incorrect to use the libcall calling conventions to call
1560 memcpy in this context. This could be a user call to memcpy and
1561 the user may wish to examine the return value from memcpy. For
1562 targets where libcalls and normal calls have different conventions
1563 for returning pointers, we could end up generating incorrect code.
1565 For convenience, we generate the call to bcopy this way as well. */
1567 if (TARGET_MEM_FUNCTIONS
)
1568 size_tree
= make_tree (sizetype
, size
);
1570 size_tree
= make_tree (unsigned_type_node
, size
);
1572 fn
= emit_block_move_libcall_fn (true);
1573 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
1574 if (TARGET_MEM_FUNCTIONS
)
1576 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1577 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1581 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1582 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1585 /* Now we have to build up the CALL_EXPR itself. */
1586 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1587 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1588 call_expr
, arg_list
, NULL_TREE
);
1590 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
1592 /* If we are initializing a readonly value, show the above call clobbered
1593 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1594 the delay slot scheduler might overlook conflicts and take nasty
1596 if (RTX_UNCHANGING_P (dst
))
1597 add_function_usage_to
1598 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode
,
1599 gen_rtx_CLOBBER (VOIDmode
, dst
),
1602 return TARGET_MEM_FUNCTIONS
? retval
: NULL_RTX
;
1605 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1606 for the function we use for block copies. The first time FOR_CALL
1607 is true, we call assemble_external. */
1609 static GTY(()) tree block_move_fn
;
1612 init_block_move_fn (const char *asmspec
)
1618 if (TARGET_MEM_FUNCTIONS
)
1620 fn
= get_identifier ("memcpy");
1621 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
1622 const_ptr_type_node
, sizetype
,
1627 fn
= get_identifier ("bcopy");
1628 args
= build_function_type_list (void_type_node
, const_ptr_type_node
,
1629 ptr_type_node
, unsigned_type_node
,
1633 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
1634 DECL_EXTERNAL (fn
) = 1;
1635 TREE_PUBLIC (fn
) = 1;
1636 DECL_ARTIFICIAL (fn
) = 1;
1637 TREE_NOTHROW (fn
) = 1;
1644 SET_DECL_RTL (block_move_fn
, NULL_RTX
);
1645 SET_DECL_ASSEMBLER_NAME (block_move_fn
, get_identifier (asmspec
));
1650 emit_block_move_libcall_fn (int for_call
)
1652 static bool emitted_extern
;
1655 init_block_move_fn (NULL
);
1657 if (for_call
&& !emitted_extern
)
1659 emitted_extern
= true;
1660 make_decl_rtl (block_move_fn
, NULL
);
1661 assemble_external (block_move_fn
);
1664 return block_move_fn
;
1667 /* A subroutine of emit_block_move. Copy the data via an explicit
1668 loop. This is used only when libcalls are forbidden. */
1669 /* ??? It'd be nice to copy in hunks larger than QImode. */
1672 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1673 unsigned int align ATTRIBUTE_UNUSED
)
1675 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
1676 enum machine_mode iter_mode
;
1678 iter_mode
= GET_MODE (size
);
1679 if (iter_mode
== VOIDmode
)
1680 iter_mode
= word_mode
;
1682 top_label
= gen_label_rtx ();
1683 cmp_label
= gen_label_rtx ();
1684 iter
= gen_reg_rtx (iter_mode
);
1686 emit_move_insn (iter
, const0_rtx
);
1688 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1689 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1690 do_pending_stack_adjust ();
1692 emit_note (NOTE_INSN_LOOP_BEG
);
1694 emit_jump (cmp_label
);
1695 emit_label (top_label
);
1697 tmp
= convert_modes (Pmode
, iter_mode
, iter
, true);
1698 x_addr
= gen_rtx_PLUS (Pmode
, x_addr
, tmp
);
1699 y_addr
= gen_rtx_PLUS (Pmode
, y_addr
, tmp
);
1700 x
= change_address (x
, QImode
, x_addr
);
1701 y
= change_address (y
, QImode
, y_addr
);
1703 emit_move_insn (x
, y
);
1705 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1706 true, OPTAB_LIB_WIDEN
);
1708 emit_move_insn (iter
, tmp
);
1710 emit_note (NOTE_INSN_LOOP_CONT
);
1711 emit_label (cmp_label
);
1713 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1716 emit_note (NOTE_INSN_LOOP_END
);
1719 /* Copy all or part of a value X into registers starting at REGNO.
1720 The number of registers to be filled is NREGS. */
1723 move_block_to_reg (int regno
, rtx x
, int nregs
, enum machine_mode mode
)
1726 #ifdef HAVE_load_multiple
1734 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1735 x
= validize_mem (force_const_mem (mode
, x
));
1737 /* See if the machine can do this with a load multiple insn. */
1738 #ifdef HAVE_load_multiple
1739 if (HAVE_load_multiple
)
1741 last
= get_last_insn ();
1742 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1750 delete_insns_since (last
);
1754 for (i
= 0; i
< nregs
; i
++)
1755 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1756 operand_subword_force (x
, i
, mode
));
1759 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1760 The number of registers to be filled is NREGS. */
1763 move_block_from_reg (int regno
, rtx x
, int nregs
)
1770 /* See if the machine can do this with a store multiple insn. */
1771 #ifdef HAVE_store_multiple
1772 if (HAVE_store_multiple
)
1774 rtx last
= get_last_insn ();
1775 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1783 delete_insns_since (last
);
1787 for (i
= 0; i
< nregs
; i
++)
1789 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1794 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1798 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1799 ORIG, where ORIG is a non-consecutive group of registers represented by
1800 a PARALLEL. The clone is identical to the original except in that the
1801 original set of registers is replaced by a new set of pseudo registers.
1802 The new set has the same modes as the original set. */
1805 gen_group_rtx (rtx orig
)
1810 if (GET_CODE (orig
) != PARALLEL
)
1813 length
= XVECLEN (orig
, 0);
1814 tmps
= alloca (sizeof (rtx
) * length
);
1816 /* Skip a NULL entry in first slot. */
1817 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
1822 for (; i
< length
; i
++)
1824 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
1825 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
1827 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
1830 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
1833 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1834 where DST is non-consecutive registers represented by a PARALLEL.
1835 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1839 emit_group_load (rtx dst
, rtx orig_src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1844 if (GET_CODE (dst
) != PARALLEL
)
1847 /* Check for a NULL entry, used to indicate that the parameter goes
1848 both on the stack and in registers. */
1849 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1854 tmps
= alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
1856 /* Process the pieces. */
1857 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1859 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1860 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1861 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1864 /* Handle trailing fragments that run over the size of the struct. */
1865 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1867 /* Arrange to shift the fragment to where it belongs.
1868 extract_bit_field loads to the lsb of the reg. */
1870 #ifdef BLOCK_REG_PADDING
1871 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
1872 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1877 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1878 bytelen
= ssize
- bytepos
;
1883 /* If we won't be loading directly from memory, protect the real source
1884 from strange tricks we might play; but make sure that the source can
1885 be loaded directly into the destination. */
1887 if (GET_CODE (orig_src
) != MEM
1888 && (!CONSTANT_P (orig_src
)
1889 || (GET_MODE (orig_src
) != mode
1890 && GET_MODE (orig_src
) != VOIDmode
)))
1892 if (GET_MODE (orig_src
) == VOIDmode
)
1893 src
= gen_reg_rtx (mode
);
1895 src
= gen_reg_rtx (GET_MODE (orig_src
));
1897 emit_move_insn (src
, orig_src
);
1900 /* Optimize the access just a bit. */
1901 if (GET_CODE (src
) == MEM
1902 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
1903 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
1904 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1905 && bytelen
== GET_MODE_SIZE (mode
))
1907 tmps
[i
] = gen_reg_rtx (mode
);
1908 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
1910 else if (GET_CODE (src
) == CONCAT
)
1912 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
1913 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
1915 if ((bytepos
== 0 && bytelen
== slen0
)
1916 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
1918 /* The following assumes that the concatenated objects all
1919 have the same size. In this case, a simple calculation
1920 can be used to determine the object and the bit field
1922 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
1923 if (! CONSTANT_P (tmps
[i
])
1924 && (GET_CODE (tmps
[i
]) != REG
|| GET_MODE (tmps
[i
]) != mode
))
1925 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
1926 (bytepos
% slen0
) * BITS_PER_UNIT
,
1927 1, NULL_RTX
, mode
, mode
, ssize
);
1929 else if (bytepos
== 0)
1931 rtx mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1932 emit_move_insn (mem
, src
);
1933 tmps
[i
] = adjust_address (mem
, mode
, 0);
1938 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1939 SIMD register, which is currently broken. While we get GCC
1940 to emit proper RTL for these cases, let's dump to memory. */
1941 else if (VECTOR_MODE_P (GET_MODE (dst
))
1942 && GET_CODE (src
) == REG
)
1944 int slen
= GET_MODE_SIZE (GET_MODE (src
));
1947 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1948 emit_move_insn (mem
, src
);
1949 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
1951 else if (CONSTANT_P (src
)
1952 || (GET_CODE (src
) == REG
&& GET_MODE (src
) == mode
))
1955 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
1956 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
1960 expand_binop (mode
, ashl_optab
, tmps
[i
], GEN_INT (shift
),
1961 tmps
[i
], 0, OPTAB_WIDEN
);
1966 /* Copy the extracted pieces into the proper (probable) hard regs. */
1967 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1968 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
1971 /* Emit code to move a block SRC to block DST, where SRC and DST are
1972 non-consecutive groups of registers, each represented by a PARALLEL. */
1975 emit_group_move (rtx dst
, rtx src
)
1979 if (GET_CODE (src
) != PARALLEL
1980 || GET_CODE (dst
) != PARALLEL
1981 || XVECLEN (src
, 0) != XVECLEN (dst
, 0))
1984 /* Skip first entry if NULL. */
1985 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
1986 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
1987 XEXP (XVECEXP (src
, 0, i
), 0));
1990 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1991 where SRC is non-consecutive registers represented by a PARALLEL.
1992 SSIZE represents the total size of block ORIG_DST, or -1 if not
1996 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
2001 if (GET_CODE (src
) != PARALLEL
)
2004 /* Check for a NULL entry, used to indicate that the parameter goes
2005 both on the stack and in registers. */
2006 if (XEXP (XVECEXP (src
, 0, 0), 0))
2011 tmps
= alloca (sizeof (rtx
) * XVECLEN (src
, 0));
2013 /* Copy the (probable) hard regs into pseudos. */
2014 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2016 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2017 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2018 emit_move_insn (tmps
[i
], reg
);
2022 /* If we won't be storing directly into memory, protect the real destination
2023 from strange tricks we might play. */
2025 if (GET_CODE (dst
) == PARALLEL
)
2029 /* We can get a PARALLEL dst if there is a conditional expression in
2030 a return statement. In that case, the dst and src are the same,
2031 so no action is necessary. */
2032 if (rtx_equal_p (dst
, src
))
2035 /* It is unclear if we can ever reach here, but we may as well handle
2036 it. Allocate a temporary, and split this into a store/load to/from
2039 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
2040 emit_group_store (temp
, src
, type
, ssize
);
2041 emit_group_load (dst
, temp
, type
, ssize
);
2044 else if (GET_CODE (dst
) != MEM
&& GET_CODE (dst
) != CONCAT
)
2046 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
2047 /* Make life a bit easier for combine. */
2048 emit_move_insn (dst
, CONST0_RTX (GET_MODE (orig_dst
)));
2051 /* Process the pieces. */
2052 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2054 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2055 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2056 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2059 /* Handle trailing fragments that run over the size of the struct. */
2060 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2062 /* store_bit_field always takes its value from the lsb.
2063 Move the fragment to the lsb if it's not already there. */
2065 #ifdef BLOCK_REG_PADDING
2066 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
2067 == (BYTES_BIG_ENDIAN
? upward
: downward
)
2073 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2074 expand_binop (mode
, ashr_optab
, tmps
[i
], GEN_INT (shift
),
2075 tmps
[i
], 0, OPTAB_WIDEN
);
2077 bytelen
= ssize
- bytepos
;
2080 if (GET_CODE (dst
) == CONCAT
)
2082 if (bytepos
+ bytelen
<= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2083 dest
= XEXP (dst
, 0);
2084 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2086 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2087 dest
= XEXP (dst
, 1);
2089 else if (bytepos
== 0 && XVECLEN (src
, 0))
2091 dest
= assign_stack_temp (GET_MODE (dest
),
2092 GET_MODE_SIZE (GET_MODE (dest
)), 0);
2093 emit_move_insn (adjust_address (dest
, GET_MODE (tmps
[i
]), bytepos
),
2102 /* Optimize the access just a bit. */
2103 if (GET_CODE (dest
) == MEM
2104 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
2105 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2106 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2107 && bytelen
== GET_MODE_SIZE (mode
))
2108 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2110 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2111 mode
, tmps
[i
], ssize
);
2116 /* Copy from the pseudo into the (probable) hard reg. */
2117 if (orig_dst
!= dst
)
2118 emit_move_insn (orig_dst
, dst
);
2121 /* Generate code to copy a BLKmode object of TYPE out of a
2122 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2123 is null, a stack temporary is created. TGTBLK is returned.
2125 The purpose of this routine is to handle functions that return
2126 BLKmode structures in registers. Some machines (the PA for example)
2127 want to return all small structures in registers regardless of the
2128 structure's alignment. */
2131 copy_blkmode_from_reg (rtx tgtblk
, rtx srcreg
, tree type
)
2133 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2134 rtx src
= NULL
, dst
= NULL
;
2135 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2136 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2140 tgtblk
= assign_temp (build_qualified_type (type
,
2142 | TYPE_QUAL_CONST
)),
2144 preserve_temp_slots (tgtblk
);
2147 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2148 into a new pseudo which is a full word. */
2150 if (GET_MODE (srcreg
) != BLKmode
2151 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2152 srcreg
= convert_to_mode (word_mode
, srcreg
, TREE_UNSIGNED (type
));
2154 /* If the structure doesn't take up a whole number of words, see whether
2155 SRCREG is padded on the left or on the right. If it's on the left,
2156 set PADDING_CORRECTION to the number of bits to skip.
2158 In most ABIs, the structure will be returned at the least end of
2159 the register, which translates to right padding on little-endian
2160 targets and left padding on big-endian targets. The opposite
2161 holds if the structure is returned at the most significant
2162 end of the register. */
2163 if (bytes
% UNITS_PER_WORD
!= 0
2164 && (targetm
.calls
.return_in_msb (type
)
2166 : BYTES_BIG_ENDIAN
))
2168 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2170 /* Copy the structure BITSIZE bites at a time.
2172 We could probably emit more efficient code for machines which do not use
2173 strict alignment, but it doesn't seem worth the effort at the current
2175 for (bitpos
= 0, xbitpos
= padding_correction
;
2176 bitpos
< bytes
* BITS_PER_UNIT
;
2177 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2179 /* We need a new source operand each time xbitpos is on a
2180 word boundary and when xbitpos == padding_correction
2181 (the first time through). */
2182 if (xbitpos
% BITS_PER_WORD
== 0
2183 || xbitpos
== padding_correction
)
2184 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2187 /* We need a new destination operand each time bitpos is on
2189 if (bitpos
% BITS_PER_WORD
== 0)
2190 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2192 /* Use xbitpos for the source extraction (right justified) and
2193 xbitpos for the destination store (left justified). */
2194 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2195 extract_bit_field (src
, bitsize
,
2196 xbitpos
% BITS_PER_WORD
, 1,
2197 NULL_RTX
, word_mode
, word_mode
,
2205 /* Add a USE expression for REG to the (possibly empty) list pointed
2206 to by CALL_FUSAGE. REG must denote a hard register. */
2209 use_reg (rtx
*call_fusage
, rtx reg
)
2211 if (GET_CODE (reg
) != REG
2212 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2216 = gen_rtx_EXPR_LIST (VOIDmode
,
2217 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2220 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2221 starting at REGNO. All of these registers must be hard registers. */
2224 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2228 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
2231 for (i
= 0; i
< nregs
; i
++)
2232 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2235 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2236 PARALLEL REGS. This is for calls that pass values in multiple
2237 non-contiguous locations. The Irix 6 ABI has examples of this. */
2240 use_group_regs (rtx
*call_fusage
, rtx regs
)
2244 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2246 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2248 /* A NULL entry means the parameter goes both on the stack and in
2249 registers. This can also be a MEM for targets that pass values
2250 partially on the stack and partially in registers. */
2251 if (reg
!= 0 && GET_CODE (reg
) == REG
)
2252 use_reg (call_fusage
, reg
);
2257 /* Determine whether the LEN bytes generated by CONSTFUN can be
2258 stored to memory using several move instructions. CONSTFUNDATA is
2259 a pointer which will be passed as argument in every CONSTFUN call.
2260 ALIGN is maximum alignment we can assume. Return nonzero if a
2261 call to store_by_pieces should succeed. */
2264 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
2265 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2266 void *constfundata
, unsigned int align
)
2268 unsigned HOST_WIDE_INT max_size
, l
;
2269 HOST_WIDE_INT offset
= 0;
2270 enum machine_mode mode
, tmode
;
2271 enum insn_code icode
;
2278 if (! STORE_BY_PIECES_P (len
, align
))
2281 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2282 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2283 align
= MOVE_MAX
* BITS_PER_UNIT
;
2285 /* We would first store what we can in the largest integer mode, then go to
2286 successively smaller modes. */
2289 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2294 max_size
= STORE_MAX_PIECES
+ 1;
2295 while (max_size
> 1)
2297 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2298 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2299 if (GET_MODE_SIZE (tmode
) < max_size
)
2302 if (mode
== VOIDmode
)
2305 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2306 if (icode
!= CODE_FOR_nothing
2307 && align
>= GET_MODE_ALIGNMENT (mode
))
2309 unsigned int size
= GET_MODE_SIZE (mode
);
2316 cst
= (*constfun
) (constfundata
, offset
, mode
);
2317 if (!LEGITIMATE_CONSTANT_P (cst
))
2327 max_size
= GET_MODE_SIZE (mode
);
2330 /* The code above should have handled everything. */
2338 /* Generate several move instructions to store LEN bytes generated by
2339 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2340 pointer which will be passed as argument in every CONSTFUN call.
2341 ALIGN is maximum alignment we can assume.
2342 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2343 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2347 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
2348 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2349 void *constfundata
, unsigned int align
, int endp
)
2351 struct store_by_pieces data
;
2360 if (! STORE_BY_PIECES_P (len
, align
))
2362 to
= protect_from_queue (to
, 1);
2363 data
.constfun
= constfun
;
2364 data
.constfundata
= constfundata
;
2367 store_by_pieces_1 (&data
, align
);
2378 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2379 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2381 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
2384 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2391 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2399 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2400 rtx with BLKmode). The caller must pass TO through protect_from_queue
2401 before calling. ALIGN is maximum alignment we can assume. */
2404 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
2406 struct store_by_pieces data
;
2411 data
.constfun
= clear_by_pieces_1
;
2412 data
.constfundata
= NULL
;
2415 store_by_pieces_1 (&data
, align
);
2418 /* Callback routine for clear_by_pieces.
2419 Return const0_rtx unconditionally. */
2422 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED
,
2423 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2424 enum machine_mode mode ATTRIBUTE_UNUSED
)
2429 /* Subroutine of clear_by_pieces and store_by_pieces.
2430 Generate several move instructions to store LEN bytes of block TO. (A MEM
2431 rtx with BLKmode). The caller must pass TO through protect_from_queue
2432 before calling. ALIGN is maximum alignment we can assume. */
2435 store_by_pieces_1 (struct store_by_pieces
*data ATTRIBUTE_UNUSED
,
2436 unsigned int align ATTRIBUTE_UNUSED
)
2438 rtx to_addr
= XEXP (data
->to
, 0);
2439 unsigned HOST_WIDE_INT max_size
= STORE_MAX_PIECES
+ 1;
2440 enum machine_mode mode
= VOIDmode
, tmode
;
2441 enum insn_code icode
;
2444 data
->to_addr
= to_addr
;
2446 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2447 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2449 data
->explicit_inc_to
= 0;
2451 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2453 data
->offset
= data
->len
;
2455 /* If storing requires more than two move insns,
2456 copy addresses to registers (to make displacements shorter)
2457 and use post-increment if available. */
2458 if (!data
->autinc_to
2459 && move_by_pieces_ninsns (data
->len
, align
) > 2)
2461 /* Determine the main mode we'll be using. */
2462 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2463 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2464 if (GET_MODE_SIZE (tmode
) < max_size
)
2467 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2469 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2470 data
->autinc_to
= 1;
2471 data
->explicit_inc_to
= -1;
2474 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2475 && ! data
->autinc_to
)
2477 data
->to_addr
= copy_addr_to_reg (to_addr
);
2478 data
->autinc_to
= 1;
2479 data
->explicit_inc_to
= 1;
2482 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2483 data
->to_addr
= copy_addr_to_reg (to_addr
);
2486 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2487 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2488 align
= MOVE_MAX
* BITS_PER_UNIT
;
2490 /* First store what we can in the largest integer mode, then go to
2491 successively smaller modes. */
2493 while (max_size
> 1)
2495 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2496 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2497 if (GET_MODE_SIZE (tmode
) < max_size
)
2500 if (mode
== VOIDmode
)
2503 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2504 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2505 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2507 max_size
= GET_MODE_SIZE (mode
);
2510 /* The code above should have handled everything. */
2515 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2516 with move instructions for mode MODE. GENFUN is the gen_... function
2517 to make a move insn for that mode. DATA has all the other info. */
2520 store_by_pieces_2 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
2521 struct store_by_pieces
*data
)
2523 unsigned int size
= GET_MODE_SIZE (mode
);
2526 while (data
->len
>= size
)
2529 data
->offset
-= size
;
2531 if (data
->autinc_to
)
2532 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2535 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2537 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2538 emit_insn (gen_add2_insn (data
->to_addr
,
2539 GEN_INT (-(HOST_WIDE_INT
) size
)));
2541 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2542 emit_insn ((*genfun
) (to1
, cst
));
2544 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2545 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2547 if (! data
->reverse
)
2548 data
->offset
+= size
;
2554 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2555 its length in bytes. */
2558 clear_storage (rtx object
, rtx size
)
2561 unsigned int align
= (GET_CODE (object
) == MEM
? MEM_ALIGN (object
)
2562 : GET_MODE_ALIGNMENT (GET_MODE (object
)));
2564 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2565 just move a zero. Otherwise, do this a piece at a time. */
2566 if (GET_MODE (object
) != BLKmode
2567 && GET_CODE (size
) == CONST_INT
2568 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (object
)))
2569 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2572 object
= protect_from_queue (object
, 1);
2573 size
= protect_from_queue (size
, 0);
2575 if (size
== const0_rtx
)
2577 else if (GET_CODE (size
) == CONST_INT
2578 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2579 clear_by_pieces (object
, INTVAL (size
), align
);
2580 else if (clear_storage_via_clrstr (object
, size
, align
))
2583 retval
= clear_storage_via_libcall (object
, size
);
2589 /* A subroutine of clear_storage. Expand a clrstr pattern;
2590 return true if successful. */
2593 clear_storage_via_clrstr (rtx object
, rtx size
, unsigned int align
)
2595 /* Try the most limited insn first, because there's no point
2596 including more than one in the machine description unless
2597 the more limited one has some advantage. */
2599 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2600 enum machine_mode mode
;
2602 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2603 mode
= GET_MODE_WIDER_MODE (mode
))
2605 enum insn_code code
= clrstr_optab
[(int) mode
];
2606 insn_operand_predicate_fn pred
;
2608 if (code
!= CODE_FOR_nothing
2609 /* We don't need MODE to be narrower than
2610 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2611 the mode mask, as it is returned by the macro, it will
2612 definitely be less than the actual mode mask. */
2613 && ((GET_CODE (size
) == CONST_INT
2614 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2615 <= (GET_MODE_MASK (mode
) >> 1)))
2616 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2617 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2618 || (*pred
) (object
, BLKmode
))
2619 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
2620 || (*pred
) (opalign
, VOIDmode
)))
2623 rtx last
= get_last_insn ();
2626 op1
= convert_to_mode (mode
, size
, 1);
2627 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2628 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
2629 op1
= copy_to_mode_reg (mode
, op1
);
2631 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2638 delete_insns_since (last
);
2645 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2646 Return the return value of memset, 0 otherwise. */
2649 clear_storage_via_libcall (rtx object
, rtx size
)
2651 tree call_expr
, arg_list
, fn
, object_tree
, size_tree
;
2652 enum machine_mode size_mode
;
2655 /* OBJECT or SIZE may have been passed through protect_from_queue.
2657 It is unsafe to save the value generated by protect_from_queue
2658 and reuse it later. Consider what happens if emit_queue is
2659 called before the return value from protect_from_queue is used.
2661 Expansion of the CALL_EXPR below will call emit_queue before
2662 we are finished emitting RTL for argument setup. So if we are
2663 not careful we could get the wrong value for an argument.
2665 To avoid this problem we go ahead and emit code to copy OBJECT
2666 and SIZE into new pseudos. We can then place those new pseudos
2667 into an RTL_EXPR and use them later, even after a call to
2670 Note this is not strictly needed for library calls since they
2671 do not call emit_queue before loading their arguments. However,
2672 we may need to have library calls call emit_queue in the future
2673 since failing to do so could cause problems for targets which
2674 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2676 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2678 if (TARGET_MEM_FUNCTIONS
)
2679 size_mode
= TYPE_MODE (sizetype
);
2681 size_mode
= TYPE_MODE (unsigned_type_node
);
2682 size
= convert_to_mode (size_mode
, size
, 1);
2683 size
= copy_to_mode_reg (size_mode
, size
);
2685 /* It is incorrect to use the libcall calling conventions to call
2686 memset in this context. This could be a user call to memset and
2687 the user may wish to examine the return value from memset. For
2688 targets where libcalls and normal calls have different conventions
2689 for returning pointers, we could end up generating incorrect code.
2691 For convenience, we generate the call to bzero this way as well. */
2693 object_tree
= make_tree (ptr_type_node
, object
);
2694 if (TARGET_MEM_FUNCTIONS
)
2695 size_tree
= make_tree (sizetype
, size
);
2697 size_tree
= make_tree (unsigned_type_node
, size
);
2699 fn
= clear_storage_libcall_fn (true);
2700 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
2701 if (TARGET_MEM_FUNCTIONS
)
2702 arg_list
= tree_cons (NULL_TREE
, integer_zero_node
, arg_list
);
2703 arg_list
= tree_cons (NULL_TREE
, object_tree
, arg_list
);
2705 /* Now we have to build up the CALL_EXPR itself. */
2706 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2707 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2708 call_expr
, arg_list
, NULL_TREE
);
2710 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
2712 /* If we are initializing a readonly value, show the above call
2713 clobbered it. Otherwise, a load from it may erroneously be
2714 hoisted from a loop. */
2715 if (RTX_UNCHANGING_P (object
))
2716 emit_insn (gen_rtx_CLOBBER (VOIDmode
, object
));
2718 return (TARGET_MEM_FUNCTIONS
? retval
: NULL_RTX
);
2721 /* A subroutine of clear_storage_via_libcall. Create the tree node
2722 for the function we use for block clears. The first time FOR_CALL
2723 is true, we call assemble_external. */
2725 static GTY(()) tree block_clear_fn
;
2728 init_block_clear_fn (const char *asmspec
)
2730 if (!block_clear_fn
)
2734 if (TARGET_MEM_FUNCTIONS
)
2736 fn
= get_identifier ("memset");
2737 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2738 integer_type_node
, sizetype
,
2743 fn
= get_identifier ("bzero");
2744 args
= build_function_type_list (void_type_node
, ptr_type_node
,
2745 unsigned_type_node
, NULL_TREE
);
2748 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
2749 DECL_EXTERNAL (fn
) = 1;
2750 TREE_PUBLIC (fn
) = 1;
2751 DECL_ARTIFICIAL (fn
) = 1;
2752 TREE_NOTHROW (fn
) = 1;
2754 block_clear_fn
= fn
;
2759 SET_DECL_RTL (block_clear_fn
, NULL_RTX
);
2760 SET_DECL_ASSEMBLER_NAME (block_clear_fn
, get_identifier (asmspec
));
2765 clear_storage_libcall_fn (int for_call
)
2767 static bool emitted_extern
;
2769 if (!block_clear_fn
)
2770 init_block_clear_fn (NULL
);
2772 if (for_call
&& !emitted_extern
)
2774 emitted_extern
= true;
2775 make_decl_rtl (block_clear_fn
, NULL
);
2776 assemble_external (block_clear_fn
);
2779 return block_clear_fn
;
2782 /* Generate code to copy Y into X.
2783 Both Y and X must have the same mode, except that
2784 Y can be a constant with VOIDmode.
2785 This mode cannot be BLKmode; use emit_block_move for that.
2787 Return the last instruction emitted. */
2790 emit_move_insn (rtx x
, rtx y
)
2792 enum machine_mode mode
= GET_MODE (x
);
2793 rtx y_cst
= NULL_RTX
;
2796 x
= protect_from_queue (x
, 1);
2797 y
= protect_from_queue (y
, 0);
2799 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
2802 /* Never force constant_p_rtx to memory. */
2803 if (GET_CODE (y
) == CONSTANT_P_RTX
)
2805 else if (CONSTANT_P (y
))
2808 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
2809 && (last_insn
= compress_float_constant (x
, y
)))
2814 if (!LEGITIMATE_CONSTANT_P (y
))
2816 y
= force_const_mem (mode
, y
);
2818 /* If the target's cannot_force_const_mem prevented the spill,
2819 assume that the target's move expanders will also take care
2820 of the non-legitimate constant. */
2826 /* If X or Y are memory references, verify that their addresses are valid
2828 if (GET_CODE (x
) == MEM
2829 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2830 && ! push_operand (x
, GET_MODE (x
)))
2832 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2833 x
= validize_mem (x
);
2835 if (GET_CODE (y
) == MEM
2836 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2838 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2839 y
= validize_mem (y
);
2841 if (mode
== BLKmode
)
2844 last_insn
= emit_move_insn_1 (x
, y
);
2846 if (y_cst
&& GET_CODE (x
) == REG
2847 && (set
= single_set (last_insn
)) != NULL_RTX
2848 && SET_DEST (set
) == x
2849 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
2850 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
2855 /* Low level part of emit_move_insn.
2856 Called just like emit_move_insn, but assumes X and Y
2857 are basically valid. */
2860 emit_move_insn_1 (rtx x
, rtx y
)
2862 enum machine_mode mode
= GET_MODE (x
);
2863 enum machine_mode submode
;
2864 enum mode_class
class = GET_MODE_CLASS (mode
);
2866 if ((unsigned int) mode
>= (unsigned int) MAX_MACHINE_MODE
)
2869 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2871 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2873 /* Expand complex moves by moving real part and imag part, if possible. */
2874 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
2875 && BLKmode
!= (submode
= GET_MODE_INNER (mode
))
2876 && (mov_optab
->handlers
[(int) submode
].insn_code
2877 != CODE_FOR_nothing
))
2879 /* Don't split destination if it is a stack push. */
2880 int stack
= push_operand (x
, GET_MODE (x
));
2882 #ifdef PUSH_ROUNDING
2883 /* In case we output to the stack, but the size is smaller than the
2884 machine can push exactly, we need to use move instructions. */
2886 && (PUSH_ROUNDING (GET_MODE_SIZE (submode
))
2887 != GET_MODE_SIZE (submode
)))
2890 HOST_WIDE_INT offset1
, offset2
;
2892 /* Do not use anti_adjust_stack, since we don't want to update
2893 stack_pointer_delta. */
2894 temp
= expand_binop (Pmode
,
2895 #ifdef STACK_GROWS_DOWNWARD
2903 (GET_MODE_SIZE (GET_MODE (x
)))),
2904 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
2906 if (temp
!= stack_pointer_rtx
)
2907 emit_move_insn (stack_pointer_rtx
, temp
);
2909 #ifdef STACK_GROWS_DOWNWARD
2911 offset2
= GET_MODE_SIZE (submode
);
2913 offset1
= -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)));
2914 offset2
= (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))
2915 + GET_MODE_SIZE (submode
));
2918 emit_move_insn (change_address (x
, submode
,
2919 gen_rtx_PLUS (Pmode
,
2921 GEN_INT (offset1
))),
2922 gen_realpart (submode
, y
));
2923 emit_move_insn (change_address (x
, submode
,
2924 gen_rtx_PLUS (Pmode
,
2926 GEN_INT (offset2
))),
2927 gen_imagpart (submode
, y
));
2931 /* If this is a stack, push the highpart first, so it
2932 will be in the argument order.
2934 In that case, change_address is used only to convert
2935 the mode, not to change the address. */
2938 /* Note that the real part always precedes the imag part in memory
2939 regardless of machine's endianness. */
2940 #ifdef STACK_GROWS_DOWNWARD
2941 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2942 gen_imagpart (submode
, y
));
2943 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2944 gen_realpart (submode
, y
));
2946 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2947 gen_realpart (submode
, y
));
2948 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2949 gen_imagpart (submode
, y
));
2954 rtx realpart_x
, realpart_y
;
2955 rtx imagpart_x
, imagpart_y
;
2957 /* If this is a complex value with each part being smaller than a
2958 word, the usual calling sequence will likely pack the pieces into
2959 a single register. Unfortunately, SUBREG of hard registers only
2960 deals in terms of words, so we have a problem converting input
2961 arguments to the CONCAT of two registers that is used elsewhere
2962 for complex values. If this is before reload, we can copy it into
2963 memory and reload. FIXME, we should see about using extract and
2964 insert on integer registers, but complex short and complex char
2965 variables should be rarely used. */
2966 if (GET_MODE_BITSIZE (mode
) < 2 * BITS_PER_WORD
2967 && (reload_in_progress
| reload_completed
) == 0)
2970 = (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
2972 = (REG_P (y
) && REGNO (y
) < FIRST_PSEUDO_REGISTER
);
2974 if (packed_dest_p
|| packed_src_p
)
2976 enum mode_class reg_class
= ((class == MODE_COMPLEX_FLOAT
)
2977 ? MODE_FLOAT
: MODE_INT
);
2979 enum machine_mode reg_mode
2980 = mode_for_size (GET_MODE_BITSIZE (mode
), reg_class
, 1);
2982 if (reg_mode
!= BLKmode
)
2984 rtx mem
= assign_stack_temp (reg_mode
,
2985 GET_MODE_SIZE (mode
), 0);
2986 rtx cmem
= adjust_address (mem
, mode
, 0);
2989 = N_("function using short complex types cannot be inline");
2993 rtx sreg
= gen_rtx_SUBREG (reg_mode
, x
, 0);
2995 emit_move_insn_1 (cmem
, y
);
2996 return emit_move_insn_1 (sreg
, mem
);
3000 rtx sreg
= gen_rtx_SUBREG (reg_mode
, y
, 0);
3002 emit_move_insn_1 (mem
, sreg
);
3003 return emit_move_insn_1 (x
, cmem
);
3009 realpart_x
= gen_realpart (submode
, x
);
3010 realpart_y
= gen_realpart (submode
, y
);
3011 imagpart_x
= gen_imagpart (submode
, x
);
3012 imagpart_y
= gen_imagpart (submode
, y
);
3014 /* Show the output dies here. This is necessary for SUBREGs
3015 of pseudos since we cannot track their lifetimes correctly;
3016 hard regs shouldn't appear here except as return values.
3017 We never want to emit such a clobber after reload. */
3019 && ! (reload_in_progress
|| reload_completed
)
3020 && (GET_CODE (realpart_x
) == SUBREG
3021 || GET_CODE (imagpart_x
) == SUBREG
))
3022 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3024 emit_move_insn (realpart_x
, realpart_y
);
3025 emit_move_insn (imagpart_x
, imagpart_y
);
3028 return get_last_insn ();
3031 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3032 find a mode to do it in. If we have a movcc, use it. Otherwise,
3033 find the MODE_INT mode of the same width. */
3034 else if (GET_MODE_CLASS (mode
) == MODE_CC
3035 && mov_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
3037 enum insn_code insn_code
;
3038 enum machine_mode tmode
= VOIDmode
;
3042 && mov_optab
->handlers
[(int) CCmode
].insn_code
!= CODE_FOR_nothing
)
3045 for (tmode
= QImode
; tmode
!= VOIDmode
;
3046 tmode
= GET_MODE_WIDER_MODE (tmode
))
3047 if (GET_MODE_SIZE (tmode
) == GET_MODE_SIZE (mode
))
3050 if (tmode
== VOIDmode
)
3053 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3054 may call change_address which is not appropriate if we were
3055 called when a reload was in progress. We don't have to worry
3056 about changing the address since the size in bytes is supposed to
3057 be the same. Copy the MEM to change the mode and move any
3058 substitutions from the old MEM to the new one. */
3060 if (reload_in_progress
)
3062 x
= gen_lowpart_common (tmode
, x1
);
3063 if (x
== 0 && GET_CODE (x1
) == MEM
)
3065 x
= adjust_address_nv (x1
, tmode
, 0);
3066 copy_replacements (x1
, x
);
3069 y
= gen_lowpart_common (tmode
, y1
);
3070 if (y
== 0 && GET_CODE (y1
) == MEM
)
3072 y
= adjust_address_nv (y1
, tmode
, 0);
3073 copy_replacements (y1
, y
);
3078 x
= gen_lowpart (tmode
, x
);
3079 y
= gen_lowpart (tmode
, y
);
3082 insn_code
= mov_optab
->handlers
[(int) tmode
].insn_code
;
3083 return emit_insn (GEN_FCN (insn_code
) (x
, y
));
3086 /* Try using a move pattern for the corresponding integer mode. This is
3087 only safe when simplify_subreg can convert MODE constants into integer
3088 constants. At present, it can only do this reliably if the value
3089 fits within a HOST_WIDE_INT. */
3090 else if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
3091 && (submode
= int_mode_for_mode (mode
)) != BLKmode
3092 && mov_optab
->handlers
[submode
].insn_code
!= CODE_FOR_nothing
)
3093 return emit_insn (GEN_FCN (mov_optab
->handlers
[submode
].insn_code
)
3094 (simplify_gen_subreg (submode
, x
, mode
, 0),
3095 simplify_gen_subreg (submode
, y
, mode
, 0)));
3097 /* This will handle any multi-word or full-word mode that lacks a move_insn
3098 pattern. However, you will get better code if you define such patterns,
3099 even if they must turn into multiple assembler instructions. */
3100 else if (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
3107 #ifdef PUSH_ROUNDING
3109 /* If X is a push on the stack, do the push now and replace
3110 X with a reference to the stack pointer. */
3111 if (push_operand (x
, GET_MODE (x
)))
3116 /* Do not use anti_adjust_stack, since we don't want to update
3117 stack_pointer_delta. */
3118 temp
= expand_binop (Pmode
,
3119 #ifdef STACK_GROWS_DOWNWARD
3127 (GET_MODE_SIZE (GET_MODE (x
)))),
3128 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
3130 if (temp
!= stack_pointer_rtx
)
3131 emit_move_insn (stack_pointer_rtx
, temp
);
3133 code
= GET_CODE (XEXP (x
, 0));
3135 /* Just hope that small offsets off SP are OK. */
3136 if (code
== POST_INC
)
3137 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3138 GEN_INT (-((HOST_WIDE_INT
)
3139 GET_MODE_SIZE (GET_MODE (x
)))));
3140 else if (code
== POST_DEC
)
3141 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3142 GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
3144 temp
= stack_pointer_rtx
;
3146 x
= change_address (x
, VOIDmode
, temp
);
3150 /* If we are in reload, see if either operand is a MEM whose address
3151 is scheduled for replacement. */
3152 if (reload_in_progress
&& GET_CODE (x
) == MEM
3153 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3154 x
= replace_equiv_address_nv (x
, inner
);
3155 if (reload_in_progress
&& GET_CODE (y
) == MEM
3156 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3157 y
= replace_equiv_address_nv (y
, inner
);
3163 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3166 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3167 rtx ypart
= operand_subword (y
, i
, 1, mode
);
3169 /* If we can't get a part of Y, put Y into memory if it is a
3170 constant. Otherwise, force it into a register. If we still
3171 can't get a part of Y, abort. */
3172 if (ypart
== 0 && CONSTANT_P (y
))
3174 y
= force_const_mem (mode
, y
);
3175 ypart
= operand_subword (y
, i
, 1, mode
);
3177 else if (ypart
== 0)
3178 ypart
= operand_subword_force (y
, i
, mode
);
3180 if (xpart
== 0 || ypart
== 0)
3183 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3185 last_insn
= emit_move_insn (xpart
, ypart
);
3191 /* Show the output dies here. This is necessary for SUBREGs
3192 of pseudos since we cannot track their lifetimes correctly;
3193 hard regs shouldn't appear here except as return values.
3194 We never want to emit such a clobber after reload. */
3196 && ! (reload_in_progress
|| reload_completed
)
3197 && need_clobber
!= 0)
3198 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3208 /* If Y is representable exactly in a narrower mode, and the target can
3209 perform the extension directly from constant or memory, then emit the
3210 move as an extension. */
3213 compress_float_constant (rtx x
, rtx y
)
3215 enum machine_mode dstmode
= GET_MODE (x
);
3216 enum machine_mode orig_srcmode
= GET_MODE (y
);
3217 enum machine_mode srcmode
;
3220 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3222 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3223 srcmode
!= orig_srcmode
;
3224 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3227 rtx trunc_y
, last_insn
;
3229 /* Skip if the target can't extend this way. */
3230 ic
= can_extend_p (dstmode
, srcmode
, 0);
3231 if (ic
== CODE_FOR_nothing
)
3234 /* Skip if the narrowed value isn't exact. */
3235 if (! exact_real_truncate (srcmode
, &r
))
3238 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3240 if (LEGITIMATE_CONSTANT_P (trunc_y
))
3242 /* Skip if the target needs extra instructions to perform
3244 if (! (*insn_data
[ic
].operand
[1].predicate
) (trunc_y
, srcmode
))
3247 else if (float_extend_from_mem
[dstmode
][srcmode
])
3248 trunc_y
= validize_mem (force_const_mem (srcmode
, trunc_y
));
3252 emit_unop_insn (ic
, x
, trunc_y
, UNKNOWN
);
3253 last_insn
= get_last_insn ();
3255 if (GET_CODE (x
) == REG
)
3256 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3264 /* Pushing data onto the stack. */
3266 /* Push a block of length SIZE (perhaps variable)
3267 and return an rtx to address the beginning of the block.
3268 Note that it is not possible for the value returned to be a QUEUED.
3269 The value may be virtual_outgoing_args_rtx.
3271 EXTRA is the number of bytes of padding to push in addition to SIZE.
3272 BELOW nonzero means this padding comes at low addresses;
3273 otherwise, the padding comes at high addresses. */
3276 push_block (rtx size
, int extra
, int below
)
3280 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3281 if (CONSTANT_P (size
))
3282 anti_adjust_stack (plus_constant (size
, extra
));
3283 else if (GET_CODE (size
) == REG
&& extra
== 0)
3284 anti_adjust_stack (size
);
3287 temp
= copy_to_mode_reg (Pmode
, size
);
3289 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3290 temp
, 0, OPTAB_LIB_WIDEN
);
3291 anti_adjust_stack (temp
);
3294 #ifndef STACK_GROWS_DOWNWARD
3300 temp
= virtual_outgoing_args_rtx
;
3301 if (extra
!= 0 && below
)
3302 temp
= plus_constant (temp
, extra
);
3306 if (GET_CODE (size
) == CONST_INT
)
3307 temp
= plus_constant (virtual_outgoing_args_rtx
,
3308 -INTVAL (size
) - (below
? 0 : extra
));
3309 else if (extra
!= 0 && !below
)
3310 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3311 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3313 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3314 negate_rtx (Pmode
, size
));
3317 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3320 #ifdef PUSH_ROUNDING
3322 /* Emit single push insn. */
3325 emit_single_push_insn (enum machine_mode mode
, rtx x
, tree type
)
3328 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3330 enum insn_code icode
;
3331 insn_operand_predicate_fn pred
;
3333 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3334 /* If there is push pattern, use it. Otherwise try old way of throwing
3335 MEM representing push operation to move expander. */
3336 icode
= push_optab
->handlers
[(int) mode
].insn_code
;
3337 if (icode
!= CODE_FOR_nothing
)
3339 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3340 && !((*pred
) (x
, mode
))))
3341 x
= force_reg (mode
, x
);
3342 emit_insn (GEN_FCN (icode
) (x
));
3345 if (GET_MODE_SIZE (mode
) == rounded_size
)
3346 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3347 /* If we are to pad downward, adjust the stack pointer first and
3348 then store X into the stack location using an offset. This is
3349 because emit_move_insn does not know how to pad; it does not have
3351 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
3353 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
3354 HOST_WIDE_INT offset
;
3356 emit_move_insn (stack_pointer_rtx
,
3357 expand_binop (Pmode
,
3358 #ifdef STACK_GROWS_DOWNWARD
3364 GEN_INT (rounded_size
),
3365 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
3367 offset
= (HOST_WIDE_INT
) padding_size
;
3368 #ifdef STACK_GROWS_DOWNWARD
3369 if (STACK_PUSH_CODE
== POST_DEC
)
3370 /* We have already decremented the stack pointer, so get the
3372 offset
+= (HOST_WIDE_INT
) rounded_size
;
3374 if (STACK_PUSH_CODE
== POST_INC
)
3375 /* We have already incremented the stack pointer, so get the
3377 offset
-= (HOST_WIDE_INT
) rounded_size
;
3379 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (offset
));
3383 #ifdef STACK_GROWS_DOWNWARD
3384 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3385 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3386 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3388 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3389 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3390 GEN_INT (rounded_size
));
3392 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3395 dest
= gen_rtx_MEM (mode
, dest_addr
);
3399 set_mem_attributes (dest
, type
, 1);
3401 if (flag_optimize_sibling_calls
)
3402 /* Function incoming arguments may overlap with sibling call
3403 outgoing arguments and we cannot allow reordering of reads
3404 from function arguments with stores to outgoing arguments
3405 of sibling calls. */
3406 set_mem_alias_set (dest
, 0);
3408 emit_move_insn (dest
, x
);
3412 /* Generate code to push X onto the stack, assuming it has mode MODE and
3414 MODE is redundant except when X is a CONST_INT (since they don't
3416 SIZE is an rtx for the size of data to be copied (in bytes),
3417 needed only if X is BLKmode.
3419 ALIGN (in bits) is maximum alignment we can assume.
3421 If PARTIAL and REG are both nonzero, then copy that many of the first
3422 words of X into registers starting with REG, and push the rest of X.
3423 The amount of space pushed is decreased by PARTIAL words,
3424 rounded *down* to a multiple of PARM_BOUNDARY.
3425 REG must be a hard register in this case.
3426 If REG is zero but PARTIAL is not, take any all others actions for an
3427 argument partially in registers, but do not actually load any
3430 EXTRA is the amount in bytes of extra space to leave next to this arg.
3431 This is ignored if an argument block has already been allocated.
3433 On a machine that lacks real push insns, ARGS_ADDR is the address of
3434 the bottom of the argument block for this call. We use indexing off there
3435 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3436 argument block has not been preallocated.
3438 ARGS_SO_FAR is the size of args previously pushed for this call.
3440 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3441 for arguments passed in registers. If nonzero, it will be the number
3442 of bytes required. */
3445 emit_push_insn (rtx x
, enum machine_mode mode
, tree type
, rtx size
,
3446 unsigned int align
, int partial
, rtx reg
, int extra
,
3447 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
3451 enum direction stack_direction
3452 #ifdef STACK_GROWS_DOWNWARD
3458 /* Decide where to pad the argument: `downward' for below,
3459 `upward' for above, or `none' for don't pad it.
3460 Default is below for small data on big-endian machines; else above. */
3461 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3463 /* Invert direction if stack is post-decrement.
3465 if (STACK_PUSH_CODE
== POST_DEC
)
3466 if (where_pad
!= none
)
3467 where_pad
= (where_pad
== downward
? upward
: downward
);
3469 xinner
= x
= protect_from_queue (x
, 0);
3471 if (mode
== BLKmode
)
3473 /* Copy a block into the stack, entirely or partially. */
3476 int used
= partial
* UNITS_PER_WORD
;
3480 if (reg
&& GET_CODE (reg
) == PARALLEL
)
3482 /* Use the size of the elt to compute offset. */
3483 rtx elt
= XEXP (XVECEXP (reg
, 0, 0), 0);
3484 used
= partial
* GET_MODE_SIZE (GET_MODE (elt
));
3485 offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3488 offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3495 /* USED is now the # of bytes we need not copy to the stack
3496 because registers will take care of them. */
3499 xinner
= adjust_address (xinner
, BLKmode
, used
);
3501 /* If the partial register-part of the arg counts in its stack size,
3502 skip the part of stack space corresponding to the registers.
3503 Otherwise, start copying to the beginning of the stack space,
3504 by setting SKIP to 0. */
3505 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3507 #ifdef PUSH_ROUNDING
3508 /* Do it with several push insns if that doesn't take lots of insns
3509 and if there is no difficulty with push insns that skip bytes
3510 on the stack for alignment purposes. */
3513 && GET_CODE (size
) == CONST_INT
3515 && MEM_ALIGN (xinner
) >= align
3516 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3517 /* Here we avoid the case of a structure whose weak alignment
3518 forces many pushes of a small amount of data,
3519 and such small pushes do rounding that causes trouble. */
3520 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3521 || align
>= BIGGEST_ALIGNMENT
3522 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3523 == (align
/ BITS_PER_UNIT
)))
3524 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3526 /* Push padding now if padding above and stack grows down,
3527 or if padding below and stack grows up.
3528 But if space already allocated, this has already been done. */
3529 if (extra
&& args_addr
== 0
3530 && where_pad
!= none
&& where_pad
!= stack_direction
)
3531 anti_adjust_stack (GEN_INT (extra
));
3533 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
3536 #endif /* PUSH_ROUNDING */
3540 /* Otherwise make space on the stack and copy the data
3541 to the address of that space. */
3543 /* Deduct words put into registers from the size we must copy. */
3546 if (GET_CODE (size
) == CONST_INT
)
3547 size
= GEN_INT (INTVAL (size
) - used
);
3549 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3550 GEN_INT (used
), NULL_RTX
, 0,
3554 /* Get the address of the stack space.
3555 In this case, we do not deal with EXTRA separately.
3556 A single stack adjust will do. */
3559 temp
= push_block (size
, extra
, where_pad
== downward
);
3562 else if (GET_CODE (args_so_far
) == CONST_INT
)
3563 temp
= memory_address (BLKmode
,
3564 plus_constant (args_addr
,
3565 skip
+ INTVAL (args_so_far
)));
3567 temp
= memory_address (BLKmode
,
3568 plus_constant (gen_rtx_PLUS (Pmode
,
3573 if (!ACCUMULATE_OUTGOING_ARGS
)
3575 /* If the source is referenced relative to the stack pointer,
3576 copy it to another register to stabilize it. We do not need
3577 to do this if we know that we won't be changing sp. */
3579 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3580 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3581 temp
= copy_to_reg (temp
);
3584 target
= gen_rtx_MEM (BLKmode
, temp
);
3588 set_mem_attributes (target
, type
, 1);
3589 /* Function incoming arguments may overlap with sibling call
3590 outgoing arguments and we cannot allow reordering of reads
3591 from function arguments with stores to outgoing arguments
3592 of sibling calls. */
3593 set_mem_alias_set (target
, 0);
3596 /* ALIGN may well be better aligned than TYPE, e.g. due to
3597 PARM_BOUNDARY. Assume the caller isn't lying. */
3598 set_mem_align (target
, align
);
3600 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
3603 else if (partial
> 0)
3605 /* Scalar partly in registers. */
3607 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3610 /* # words of start of argument
3611 that we must make space for but need not store. */
3612 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
3613 int args_offset
= INTVAL (args_so_far
);
3616 /* Push padding now if padding above and stack grows down,
3617 or if padding below and stack grows up.
3618 But if space already allocated, this has already been done. */
3619 if (extra
&& args_addr
== 0
3620 && where_pad
!= none
&& where_pad
!= stack_direction
)
3621 anti_adjust_stack (GEN_INT (extra
));
3623 /* If we make space by pushing it, we might as well push
3624 the real data. Otherwise, we can leave OFFSET nonzero
3625 and leave the space uninitialized. */
3629 /* Now NOT_STACK gets the number of words that we don't need to
3630 allocate on the stack. */
3631 not_stack
= partial
- offset
;
3633 /* If the partial register-part of the arg counts in its stack size,
3634 skip the part of stack space corresponding to the registers.
3635 Otherwise, start copying to the beginning of the stack space,
3636 by setting SKIP to 0. */
3637 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3639 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3640 x
= validize_mem (force_const_mem (mode
, x
));
3642 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3643 SUBREGs of such registers are not allowed. */
3644 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
3645 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3646 x
= copy_to_reg (x
);
3648 /* Loop over all the words allocated on the stack for this arg. */
3649 /* We can do it by words, because any scalar bigger than a word
3650 has a size a multiple of a word. */
3651 #ifndef PUSH_ARGS_REVERSED
3652 for (i
= not_stack
; i
< size
; i
++)
3654 for (i
= size
- 1; i
>= not_stack
; i
--)
3656 if (i
>= not_stack
+ offset
)
3657 emit_push_insn (operand_subword_force (x
, i
, mode
),
3658 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3660 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3662 reg_parm_stack_space
, alignment_pad
);
3669 /* Push padding now if padding above and stack grows down,
3670 or if padding below and stack grows up.
3671 But if space already allocated, this has already been done. */
3672 if (extra
&& args_addr
== 0
3673 && where_pad
!= none
&& where_pad
!= stack_direction
)
3674 anti_adjust_stack (GEN_INT (extra
));
3676 #ifdef PUSH_ROUNDING
3677 if (args_addr
== 0 && PUSH_ARGS
)
3678 emit_single_push_insn (mode
, x
, type
);
3682 if (GET_CODE (args_so_far
) == CONST_INT
)
3684 = memory_address (mode
,
3685 plus_constant (args_addr
,
3686 INTVAL (args_so_far
)));
3688 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3690 dest
= gen_rtx_MEM (mode
, addr
);
3693 set_mem_attributes (dest
, type
, 1);
3694 /* Function incoming arguments may overlap with sibling call
3695 outgoing arguments and we cannot allow reordering of reads
3696 from function arguments with stores to outgoing arguments
3697 of sibling calls. */
3698 set_mem_alias_set (dest
, 0);
3701 emit_move_insn (dest
, x
);
3705 /* If part should go in registers, copy that part
3706 into the appropriate registers. Do this now, at the end,
3707 since mem-to-mem copies above may do function calls. */
3708 if (partial
> 0 && reg
!= 0)
3710 /* Handle calls that pass values in multiple non-contiguous locations.
3711 The Irix 6 ABI has examples of this. */
3712 if (GET_CODE (reg
) == PARALLEL
)
3713 emit_group_load (reg
, x
, type
, -1);
3715 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
3718 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3719 anti_adjust_stack (GEN_INT (extra
));
3721 if (alignment_pad
&& args_addr
== 0)
3722 anti_adjust_stack (alignment_pad
);
3725 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3729 get_subtarget (rtx x
)
3732 /* Only registers can be subtargets. */
3733 || GET_CODE (x
) != REG
3734 /* If the register is readonly, it can't be set more than once. */
3735 || RTX_UNCHANGING_P (x
)
3736 /* Don't use hard regs to avoid extending their life. */
3737 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3738 /* Avoid subtargets inside loops,
3739 since they hide some invariant expressions. */
3740 || preserve_subexpressions_p ())
3744 /* Expand an assignment that stores the value of FROM into TO.
3745 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3746 (This may contain a QUEUED rtx;
3747 if the value is constant, this rtx is a constant.)
3748 Otherwise, the returned value is NULL_RTX. */
3751 expand_assignment (tree to
, tree from
, int want_value
)
3756 /* Don't crash if the lhs of the assignment was erroneous. */
3758 if (TREE_CODE (to
) == ERROR_MARK
)
3760 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3761 return want_value
? result
: NULL_RTX
;
3764 /* Assignment of a structure component needs special treatment
3765 if the structure component's rtx is not simply a MEM.
3766 Assignment of an array element at a constant index, and assignment of
3767 an array element in an unaligned packed structure field, has the same
3770 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
3771 || TREE_CODE (to
) == ARRAY_REF
|| TREE_CODE (to
) == ARRAY_RANGE_REF
3772 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
3774 enum machine_mode mode1
;
3775 HOST_WIDE_INT bitsize
, bitpos
;
3783 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
3784 &unsignedp
, &volatilep
);
3786 /* If we are going to use store_bit_field and extract_bit_field,
3787 make sure to_rtx will be safe for multiple use. */
3789 if (mode1
== VOIDmode
&& want_value
)
3790 tem
= stabilize_reference (tem
);
3792 orig_to_rtx
= to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, 0);
3796 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
3798 if (GET_CODE (to_rtx
) != MEM
)
3801 #ifdef POINTERS_EXTEND_UNSIGNED
3802 if (GET_MODE (offset_rtx
) != Pmode
)
3803 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
3805 if (GET_MODE (offset_rtx
) != ptr_mode
)
3806 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
3809 /* A constant address in TO_RTX can have VOIDmode, we must not try
3810 to call force_reg for that case. Avoid that case. */
3811 if (GET_CODE (to_rtx
) == MEM
3812 && GET_MODE (to_rtx
) == BLKmode
3813 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
3815 && (bitpos
% bitsize
) == 0
3816 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
3817 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
3819 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
3823 to_rtx
= offset_address (to_rtx
, offset_rtx
,
3824 highest_pow2_factor_for_type (TREE_TYPE (to
),
3828 if (GET_CODE (to_rtx
) == MEM
)
3830 /* If the field is at offset zero, we could have been given the
3831 DECL_RTX of the parent struct. Don't munge it. */
3832 to_rtx
= shallow_copy_rtx (to_rtx
);
3834 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
3837 /* Deal with volatile and readonly fields. The former is only done
3838 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3839 if (volatilep
&& GET_CODE (to_rtx
) == MEM
)
3841 if (to_rtx
== orig_to_rtx
)
3842 to_rtx
= copy_rtx (to_rtx
);
3843 MEM_VOLATILE_P (to_rtx
) = 1;
3846 if (TREE_CODE (to
) == COMPONENT_REF
3847 && TREE_READONLY (TREE_OPERAND (to
, 1))
3848 /* We can't assert that a MEM won't be set more than once
3849 if the component is not addressable because another
3850 non-addressable component may be referenced by the same MEM. */
3851 && ! (GET_CODE (to_rtx
) == MEM
&& ! can_address_p (to
)))
3853 if (to_rtx
== orig_to_rtx
)
3854 to_rtx
= copy_rtx (to_rtx
);
3855 RTX_UNCHANGING_P (to_rtx
) = 1;
3858 if (GET_CODE (to_rtx
) == MEM
&& ! can_address_p (to
))
3860 if (to_rtx
== orig_to_rtx
)
3861 to_rtx
= copy_rtx (to_rtx
);
3862 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
3865 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
3867 /* Spurious cast for HPUX compiler. */
3868 ? ((enum machine_mode
)
3869 TYPE_MODE (TREE_TYPE (to
)))
3871 unsignedp
, TREE_TYPE (tem
), get_alias_set (to
));
3873 preserve_temp_slots (result
);
3877 /* If the value is meaningful, convert RESULT to the proper mode.
3878 Otherwise, return nothing. */
3879 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
3880 TYPE_MODE (TREE_TYPE (from
)),
3882 TREE_UNSIGNED (TREE_TYPE (to
)))
3886 /* If the rhs is a function call and its value is not an aggregate,
3887 call the function before we start to compute the lhs.
3888 This is needed for correct code for cases such as
3889 val = setjmp (buf) on machines where reference to val
3890 requires loading up part of an address in a separate insn.
3892 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3893 since it might be a promoted variable where the zero- or sign- extension
3894 needs to be done. Handling this in the normal way is safe because no
3895 computation is done before the call. */
3896 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
3897 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
3898 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
3899 && GET_CODE (DECL_RTL (to
)) == REG
))
3904 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3906 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3908 /* Handle calls that return values in multiple non-contiguous locations.
3909 The Irix 6 ABI has examples of this. */
3910 if (GET_CODE (to_rtx
) == PARALLEL
)
3911 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
3912 int_size_in_bytes (TREE_TYPE (from
)));
3913 else if (GET_MODE (to_rtx
) == BLKmode
)
3914 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
3917 if (POINTER_TYPE_P (TREE_TYPE (to
)))
3918 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
3919 emit_move_insn (to_rtx
, value
);
3921 preserve_temp_slots (to_rtx
);
3924 return want_value
? to_rtx
: NULL_RTX
;
3927 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3928 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3931 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3933 /* Don't move directly into a return register. */
3934 if (TREE_CODE (to
) == RESULT_DECL
3935 && (GET_CODE (to_rtx
) == REG
|| GET_CODE (to_rtx
) == PARALLEL
))
3940 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
3942 if (GET_CODE (to_rtx
) == PARALLEL
)
3943 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
3944 int_size_in_bytes (TREE_TYPE (from
)));
3946 emit_move_insn (to_rtx
, temp
);
3948 preserve_temp_slots (to_rtx
);
3951 return want_value
? to_rtx
: NULL_RTX
;
3954 /* In case we are returning the contents of an object which overlaps
3955 the place the value is being stored, use a safe function when copying
3956 a value through a pointer into a structure value return block. */
3957 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
3958 && current_function_returns_struct
3959 && !current_function_returns_pcc_struct
)
3964 size
= expr_size (from
);
3965 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3967 if (TARGET_MEM_FUNCTIONS
)
3968 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
3969 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3970 XEXP (from_rtx
, 0), Pmode
,
3971 convert_to_mode (TYPE_MODE (sizetype
),
3972 size
, TREE_UNSIGNED (sizetype
)),
3973 TYPE_MODE (sizetype
));
3975 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
3976 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
3977 XEXP (to_rtx
, 0), Pmode
,
3978 convert_to_mode (TYPE_MODE (integer_type_node
),
3980 TREE_UNSIGNED (integer_type_node
)),
3981 TYPE_MODE (integer_type_node
));
3983 preserve_temp_slots (to_rtx
);
3986 return want_value
? to_rtx
: NULL_RTX
;
3989 /* Compute FROM and store the value in the rtx we got. */
3992 result
= store_expr (from
, to_rtx
, want_value
);
3993 preserve_temp_slots (result
);
3996 return want_value
? result
: NULL_RTX
;
3999 /* Generate code for computing expression EXP,
4000 and storing the value into TARGET.
4001 TARGET may contain a QUEUED rtx.
4003 If WANT_VALUE & 1 is nonzero, return a copy of the value
4004 not in TARGET, so that we can be sure to use the proper
4005 value in a containing expression even if TARGET has something
4006 else stored in it. If possible, we copy the value through a pseudo
4007 and return that pseudo. Or, if the value is constant, we try to
4008 return the constant. In some cases, we return a pseudo
4009 copied *from* TARGET.
4011 If the mode is BLKmode then we may return TARGET itself.
4012 It turns out that in BLKmode it doesn't cause a problem.
4013 because C has no operators that could combine two different
4014 assignments into the same BLKmode object with different values
4015 with no sequence point. Will other languages need this to
4018 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4019 to catch quickly any cases where the caller uses the value
4020 and fails to set WANT_VALUE.
4022 If WANT_VALUE & 2 is set, this is a store into a call param on the
4023 stack, and block moves may need to be treated specially. */
4026 store_expr (tree exp
, rtx target
, int want_value
)
4029 int dont_return_target
= 0;
4030 int dont_store_target
= 0;
4032 if (VOID_TYPE_P (TREE_TYPE (exp
)))
4034 /* C++ can generate ?: expressions with a throw expression in one
4035 branch and an rvalue in the other. Here, we resolve attempts to
4036 store the throw expression's nonexistent result. */
4039 expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
4042 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4044 /* Perform first part of compound expression, then assign from second
4046 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
4047 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4049 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
4051 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4053 /* For conditional expression, get safe form of the target. Then
4054 test the condition, doing the appropriate assignment on either
4055 side. This avoids the creation of unnecessary temporaries.
4056 For non-BLKmode, it is more efficient not to do this. */
4058 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4061 target
= protect_from_queue (target
, 1);
4063 do_pending_stack_adjust ();
4065 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
4066 start_cleanup_deferral ();
4067 store_expr (TREE_OPERAND (exp
, 1), target
, want_value
& 2);
4068 end_cleanup_deferral ();
4070 emit_jump_insn (gen_jump (lab2
));
4073 start_cleanup_deferral ();
4074 store_expr (TREE_OPERAND (exp
, 2), target
, want_value
& 2);
4075 end_cleanup_deferral ();
4080 return want_value
& 1 ? target
: NULL_RTX
;
4082 else if (queued_subexp_p (target
))
4083 /* If target contains a postincrement, let's not risk
4084 using it as the place to generate the rhs. */
4086 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
4088 /* Expand EXP into a new pseudo. */
4089 temp
= gen_reg_rtx (GET_MODE (target
));
4090 temp
= expand_expr (exp
, temp
, GET_MODE (target
),
4092 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4095 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
),
4097 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4099 /* If target is volatile, ANSI requires accessing the value
4100 *from* the target, if it is accessed. So make that happen.
4101 In no case return the target itself. */
4102 if (! MEM_VOLATILE_P (target
) && (want_value
& 1) != 0)
4103 dont_return_target
= 1;
4105 else if ((want_value
& 1) != 0
4106 && GET_CODE (target
) == MEM
4107 && ! MEM_VOLATILE_P (target
)
4108 && GET_MODE (target
) != BLKmode
)
4109 /* If target is in memory and caller wants value in a register instead,
4110 arrange that. Pass TARGET as target for expand_expr so that,
4111 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4112 We know expand_expr will not use the target in that case.
4113 Don't do this if TARGET is volatile because we are supposed
4114 to write it and then read it. */
4116 temp
= expand_expr (exp
, target
, GET_MODE (target
),
4117 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4118 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
4120 /* If TEMP is already in the desired TARGET, only copy it from
4121 memory and don't store it there again. */
4123 || (rtx_equal_p (temp
, target
)
4124 && ! side_effects_p (temp
) && ! side_effects_p (target
)))
4125 dont_store_target
= 1;
4126 temp
= copy_to_reg (temp
);
4128 dont_return_target
= 1;
4130 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4131 /* If this is a scalar in a register that is stored in a wider mode
4132 than the declared mode, compute the result into its declared mode
4133 and then convert to the wider mode. Our value is the computed
4136 rtx inner_target
= 0;
4138 /* If we don't want a value, we can do the conversion inside EXP,
4139 which will often result in some optimizations. Do the conversion
4140 in two steps: first change the signedness, if needed, then
4141 the extend. But don't do this if the type of EXP is a subtype
4142 of something else since then the conversion might involve
4143 more than just converting modes. */
4144 if ((want_value
& 1) == 0
4145 && INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4146 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
4148 if (TREE_UNSIGNED (TREE_TYPE (exp
))
4149 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4151 ((*lang_hooks
.types
.signed_or_unsigned_type
)
4152 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)), exp
);
4154 exp
= convert ((*lang_hooks
.types
.type_for_mode
)
4155 (GET_MODE (SUBREG_REG (target
)),
4156 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4159 inner_target
= SUBREG_REG (target
);
4162 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
4163 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4165 /* If TEMP is a MEM and we want a result value, make the access
4166 now so it gets done only once. Strictly speaking, this is
4167 only necessary if the MEM is volatile, or if the address
4168 overlaps TARGET. But not performing the load twice also
4169 reduces the amount of rtl we generate and then have to CSE. */
4170 if (GET_CODE (temp
) == MEM
&& (want_value
& 1) != 0)
4171 temp
= copy_to_reg (temp
);
4173 /* If TEMP is a VOIDmode constant, use convert_modes to make
4174 sure that we properly convert it. */
4175 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4177 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4178 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4179 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4180 GET_MODE (target
), temp
,
4181 SUBREG_PROMOTED_UNSIGNED_P (target
));
4184 convert_move (SUBREG_REG (target
), temp
,
4185 SUBREG_PROMOTED_UNSIGNED_P (target
));
4187 /* If we promoted a constant, change the mode back down to match
4188 target. Otherwise, the caller might get confused by a result whose
4189 mode is larger than expected. */
4191 if ((want_value
& 1) != 0 && GET_MODE (temp
) != GET_MODE (target
))
4193 if (GET_MODE (temp
) != VOIDmode
)
4195 temp
= gen_lowpart_SUBREG (GET_MODE (target
), temp
);
4196 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4197 SUBREG_PROMOTED_UNSIGNED_SET (temp
,
4198 SUBREG_PROMOTED_UNSIGNED_P (target
));
4201 temp
= convert_modes (GET_MODE (target
),
4202 GET_MODE (SUBREG_REG (target
)),
4203 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4206 return want_value
& 1 ? temp
: NULL_RTX
;
4210 temp
= expand_expr (exp
, target
, GET_MODE (target
),
4211 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4212 /* Return TARGET if it's a specified hardware register.
4213 If TARGET is a volatile mem ref, either return TARGET
4214 or return a reg copied *from* TARGET; ANSI requires this.
4216 Otherwise, if TEMP is not TARGET, return TEMP
4217 if it is constant (for efficiency),
4218 or if we really want the correct value. */
4219 if (!(target
&& GET_CODE (target
) == REG
4220 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4221 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
4222 && ! rtx_equal_p (temp
, target
)
4223 && (CONSTANT_P (temp
) || (want_value
& 1) != 0))
4224 dont_return_target
= 1;
4227 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4228 the same as that of TARGET, adjust the constant. This is needed, for
4229 example, in case it is a CONST_DOUBLE and we want only a word-sized
4231 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4232 && TREE_CODE (exp
) != ERROR_MARK
4233 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4234 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4235 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
4237 /* If value was not generated in the target, store it there.
4238 Convert the value to TARGET's type first if necessary.
4239 If TEMP and TARGET compare equal according to rtx_equal_p, but
4240 one or both of them are volatile memory refs, we have to distinguish
4242 - expand_expr has used TARGET. In this case, we must not generate
4243 another copy. This can be detected by TARGET being equal according
4245 - expand_expr has not used TARGET - that means that the source just
4246 happens to have the same RTX form. Since temp will have been created
4247 by expand_expr, it will compare unequal according to == .
4248 We must generate a copy in this case, to reach the correct number
4249 of volatile memory references. */
4251 if ((! rtx_equal_p (temp
, target
)
4252 || (temp
!= target
&& (side_effects_p (temp
)
4253 || side_effects_p (target
))))
4254 && TREE_CODE (exp
) != ERROR_MARK
4255 && ! dont_store_target
4256 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4257 but TARGET is not valid memory reference, TEMP will differ
4258 from TARGET although it is really the same location. */
4259 && (TREE_CODE_CLASS (TREE_CODE (exp
)) != 'd'
4260 || target
!= DECL_RTL_IF_SET (exp
))
4261 /* If there's nothing to copy, don't bother. Don't call expr_size
4262 unless necessary, because some front-ends (C++) expr_size-hook
4263 aborts on objects that are not supposed to be bit-copied or
4265 && expr_size (exp
) != const0_rtx
)
4267 target
= protect_from_queue (target
, 1);
4268 if (GET_MODE (temp
) != GET_MODE (target
)
4269 && GET_MODE (temp
) != VOIDmode
)
4271 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4272 if (dont_return_target
)
4274 /* In this case, we will return TEMP,
4275 so make sure it has the proper mode.
4276 But don't forget to store the value into TARGET. */
4277 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4278 emit_move_insn (target
, temp
);
4281 convert_move (target
, temp
, unsignedp
);
4284 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4286 /* Handle copying a string constant into an array. The string
4287 constant may be shorter than the array. So copy just the string's
4288 actual length, and clear the rest. First get the size of the data
4289 type of the string, which is actually the size of the target. */
4290 rtx size
= expr_size (exp
);
4292 if (GET_CODE (size
) == CONST_INT
4293 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4294 emit_block_move (target
, temp
, size
,
4296 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4299 /* Compute the size of the data to copy from the string. */
4301 = size_binop (MIN_EXPR
,
4302 make_tree (sizetype
, size
),
4303 size_int (TREE_STRING_LENGTH (exp
)));
4305 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
4307 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4310 /* Copy that much. */
4311 copy_size_rtx
= convert_to_mode (ptr_mode
, copy_size_rtx
,
4312 TREE_UNSIGNED (sizetype
));
4313 emit_block_move (target
, temp
, copy_size_rtx
,
4315 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4317 /* Figure out how much is left in TARGET that we have to clear.
4318 Do all calculations in ptr_mode. */
4319 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4321 size
= plus_constant (size
, -INTVAL (copy_size_rtx
));
4322 target
= adjust_address (target
, BLKmode
,
4323 INTVAL (copy_size_rtx
));
4327 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
4328 copy_size_rtx
, NULL_RTX
, 0,
4331 #ifdef POINTERS_EXTEND_UNSIGNED
4332 if (GET_MODE (copy_size_rtx
) != Pmode
)
4333 copy_size_rtx
= convert_to_mode (Pmode
, copy_size_rtx
,
4334 TREE_UNSIGNED (sizetype
));
4337 target
= offset_address (target
, copy_size_rtx
,
4338 highest_pow2_factor (copy_size
));
4339 label
= gen_label_rtx ();
4340 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4341 GET_MODE (size
), 0, label
);
4344 if (size
!= const0_rtx
)
4345 clear_storage (target
, size
);
4351 /* Handle calls that return values in multiple non-contiguous locations.
4352 The Irix 6 ABI has examples of this. */
4353 else if (GET_CODE (target
) == PARALLEL
)
4354 emit_group_load (target
, temp
, TREE_TYPE (exp
),
4355 int_size_in_bytes (TREE_TYPE (exp
)));
4356 else if (GET_MODE (temp
) == BLKmode
)
4357 emit_block_move (target
, temp
, expr_size (exp
),
4359 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4361 emit_move_insn (target
, temp
);
4364 /* If we don't want a value, return NULL_RTX. */
4365 if ((want_value
& 1) == 0)
4368 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4369 ??? The latter test doesn't seem to make sense. */
4370 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
4373 /* Return TARGET itself if it is a hard register. */
4374 else if ((want_value
& 1) != 0
4375 && GET_MODE (target
) != BLKmode
4376 && ! (GET_CODE (target
) == REG
4377 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4378 return copy_to_reg (target
);
4384 /* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
4387 is_zeros_p (tree exp
)
4391 switch (TREE_CODE (exp
))
4395 case NON_LVALUE_EXPR
:
4396 case VIEW_CONVERT_EXPR
:
4397 return is_zeros_p (TREE_OPERAND (exp
, 0));
4400 return integer_zerop (exp
);
4404 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
4407 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp
), dconst0
);
4410 for (elt
= TREE_VECTOR_CST_ELTS (exp
); elt
;
4411 elt
= TREE_CHAIN (elt
))
4412 if (!is_zeros_p (TREE_VALUE (elt
)))
4418 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4419 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
4420 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4421 if (! is_zeros_p (TREE_VALUE (elt
)))
4431 /* Return 1 if EXP contains mostly (3/4) zeros. */
4434 mostly_zeros_p (tree exp
)
4436 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4438 int elts
= 0, zeros
= 0;
4439 tree elt
= CONSTRUCTOR_ELTS (exp
);
4440 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4442 /* If there are no ranges of true bits, it is all zero. */
4443 return elt
== NULL_TREE
;
4445 for (; elt
; elt
= TREE_CHAIN (elt
))
4447 /* We do not handle the case where the index is a RANGE_EXPR,
4448 so the statistic will be somewhat inaccurate.
4449 We do make a more accurate count in store_constructor itself,
4450 so since this function is only used for nested array elements,
4451 this should be close enough. */
4452 if (mostly_zeros_p (TREE_VALUE (elt
)))
4457 return 4 * zeros
>= 3 * elts
;
4460 return is_zeros_p (exp
);
4463 /* Helper function for store_constructor.
4464 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4465 TYPE is the type of the CONSTRUCTOR, not the element type.
4466 CLEARED is as for store_constructor.
4467 ALIAS_SET is the alias set to use for any stores.
4469 This provides a recursive shortcut back to store_constructor when it isn't
4470 necessary to go through store_field. This is so that we can pass through
4471 the cleared field to let store_constructor know that we may not have to
4472 clear a substructure if the outer structure has already been cleared. */
4475 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
4476 HOST_WIDE_INT bitpos
, enum machine_mode mode
,
4477 tree exp
, tree type
, int cleared
, int alias_set
)
4479 if (TREE_CODE (exp
) == CONSTRUCTOR
4480 && bitpos
% BITS_PER_UNIT
== 0
4481 /* If we have a nonzero bitpos for a register target, then we just
4482 let store_field do the bitfield handling. This is unlikely to
4483 generate unnecessary clear instructions anyways. */
4484 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
4486 if (GET_CODE (target
) == MEM
)
4488 = adjust_address (target
,
4489 GET_MODE (target
) == BLKmode
4491 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4492 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
4495 /* Update the alias set, if required. */
4496 if (GET_CODE (target
) == MEM
&& ! MEM_KEEP_ALIAS_SET_P (target
)
4497 && MEM_ALIAS_SET (target
) != 0)
4499 target
= copy_rtx (target
);
4500 set_mem_alias_set (target
, alias_set
);
4503 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
4506 store_field (target
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
4510 /* Store the value of constructor EXP into the rtx TARGET.
4511 TARGET is either a REG or a MEM; we know it cannot conflict, since
4512 safe_from_p has been called.
4513 CLEARED is true if TARGET is known to have been zero'd.
4514 SIZE is the number of bytes of TARGET we are allowed to modify: this
4515 may not be the same as the size of EXP if we are assigning to a field
4516 which has been packed to exclude padding bits. */
4519 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
)
4521 tree type
= TREE_TYPE (exp
);
4522 #ifdef WORD_REGISTER_OPERATIONS
4523 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4526 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
4527 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4531 /* If size is zero or the target is already cleared, do nothing. */
4532 if (size
== 0 || cleared
)
4534 /* We either clear the aggregate or indicate the value is dead. */
4535 else if ((TREE_CODE (type
) == UNION_TYPE
4536 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4537 && ! CONSTRUCTOR_ELTS (exp
))
4538 /* If the constructor is empty, clear the union. */
4540 clear_storage (target
, expr_size (exp
));
4544 /* If we are building a static constructor into a register,
4545 set the initial value as zero so we can fold the value into
4546 a constant. But if more than one register is involved,
4547 this probably loses. */
4548 else if (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
4549 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4551 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4555 /* If the constructor has fewer fields than the structure
4556 or if we are initializing the structure to mostly zeros,
4557 clear the whole structure first. Don't do this if TARGET is a
4558 register whose mode size isn't equal to SIZE since clear_storage
4559 can't handle this case. */
4560 else if (((list_length (CONSTRUCTOR_ELTS (exp
)) != fields_length (type
))
4561 || mostly_zeros_p (exp
))
4562 && (GET_CODE (target
) != REG
4563 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
4566 rtx xtarget
= target
;
4568 if (readonly_fields_p (type
))
4570 xtarget
= copy_rtx (xtarget
);
4571 RTX_UNCHANGING_P (xtarget
) = 1;
4574 clear_storage (xtarget
, GEN_INT (size
));
4579 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4581 /* Store each element of the constructor into
4582 the corresponding field of TARGET. */
4584 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4586 tree field
= TREE_PURPOSE (elt
);
4587 tree value
= TREE_VALUE (elt
);
4588 enum machine_mode mode
;
4589 HOST_WIDE_INT bitsize
;
4590 HOST_WIDE_INT bitpos
= 0;
4592 rtx to_rtx
= target
;
4594 /* Just ignore missing fields.
4595 We cleared the whole structure, above,
4596 if any fields are missing. */
4600 if (cleared
&& is_zeros_p (value
))
4603 if (host_integerp (DECL_SIZE (field
), 1))
4604 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4608 mode
= DECL_MODE (field
);
4609 if (DECL_BIT_FIELD (field
))
4612 offset
= DECL_FIELD_OFFSET (field
);
4613 if (host_integerp (offset
, 0)
4614 && host_integerp (bit_position (field
), 0))
4616 bitpos
= int_bit_position (field
);
4620 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
4626 if (CONTAINS_PLACEHOLDER_P (offset
))
4627 offset
= build (WITH_RECORD_EXPR
, sizetype
,
4628 offset
, make_tree (TREE_TYPE (exp
), target
));
4630 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4631 if (GET_CODE (to_rtx
) != MEM
)
4634 #ifdef POINTERS_EXTEND_UNSIGNED
4635 if (GET_MODE (offset_rtx
) != Pmode
)
4636 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
4638 if (GET_MODE (offset_rtx
) != ptr_mode
)
4639 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4642 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4643 highest_pow2_factor (offset
));
4646 if (TREE_READONLY (field
))
4648 if (GET_CODE (to_rtx
) == MEM
)
4649 to_rtx
= copy_rtx (to_rtx
);
4651 RTX_UNCHANGING_P (to_rtx
) = 1;
4654 #ifdef WORD_REGISTER_OPERATIONS
4655 /* If this initializes a field that is smaller than a word, at the
4656 start of a word, try to widen it to a full word.
4657 This special case allows us to output C++ member function
4658 initializations in a form that the optimizers can understand. */
4659 if (GET_CODE (target
) == REG
4660 && bitsize
< BITS_PER_WORD
4661 && bitpos
% BITS_PER_WORD
== 0
4662 && GET_MODE_CLASS (mode
) == MODE_INT
4663 && TREE_CODE (value
) == INTEGER_CST
4665 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
4667 tree type
= TREE_TYPE (value
);
4669 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4671 type
= (*lang_hooks
.types
.type_for_size
)
4672 (BITS_PER_WORD
, TREE_UNSIGNED (type
));
4673 value
= convert (type
, value
);
4676 if (BYTES_BIG_ENDIAN
)
4678 = fold (build (LSHIFT_EXPR
, type
, value
,
4679 build_int_2 (BITS_PER_WORD
- bitsize
, 0)));
4680 bitsize
= BITS_PER_WORD
;
4685 if (GET_CODE (to_rtx
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (to_rtx
)
4686 && DECL_NONADDRESSABLE_P (field
))
4688 to_rtx
= copy_rtx (to_rtx
);
4689 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4692 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
4693 value
, type
, cleared
,
4694 get_alias_set (TREE_TYPE (field
)));
4697 else if (TREE_CODE (type
) == ARRAY_TYPE
4698 || TREE_CODE (type
) == VECTOR_TYPE
)
4703 tree domain
= TYPE_DOMAIN (type
);
4704 tree elttype
= TREE_TYPE (type
);
4706 HOST_WIDE_INT minelt
= 0;
4707 HOST_WIDE_INT maxelt
= 0;
4709 /* Vectors are like arrays, but the domain is stored via an array
4711 if (TREE_CODE (type
) == VECTOR_TYPE
)
4713 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4714 the same field as TYPE_DOMAIN, we are not guaranteed that
4716 domain
= TYPE_DEBUG_REPRESENTATION_TYPE (type
);
4717 domain
= TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain
)));
4720 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
4721 && TYPE_MAX_VALUE (domain
)
4722 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
4723 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
4725 /* If we have constant bounds for the range of the type, get them. */
4728 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
4729 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
4732 /* If the constructor has fewer elements than the array,
4733 clear the whole array first. Similarly if this is
4734 static constructor of a non-BLKmode object. */
4735 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
4739 HOST_WIDE_INT count
= 0, zero_count
= 0;
4740 need_to_clear
= ! const_bounds_p
;
4742 /* This loop is a more accurate version of the loop in
4743 mostly_zeros_p (it handles RANGE_EXPR in an index).
4744 It is also needed to check for missing elements. */
4745 for (elt
= CONSTRUCTOR_ELTS (exp
);
4746 elt
!= NULL_TREE
&& ! need_to_clear
;
4747 elt
= TREE_CHAIN (elt
))
4749 tree index
= TREE_PURPOSE (elt
);
4750 HOST_WIDE_INT this_node_count
;
4752 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4754 tree lo_index
= TREE_OPERAND (index
, 0);
4755 tree hi_index
= TREE_OPERAND (index
, 1);
4757 if (! host_integerp (lo_index
, 1)
4758 || ! host_integerp (hi_index
, 1))
4764 this_node_count
= (tree_low_cst (hi_index
, 1)
4765 - tree_low_cst (lo_index
, 1) + 1);
4768 this_node_count
= 1;
4770 count
+= this_node_count
;
4771 if (mostly_zeros_p (TREE_VALUE (elt
)))
4772 zero_count
+= this_node_count
;
4775 /* Clear the entire array first if there are any missing elements,
4776 or if the incidence of zero elements is >= 75%. */
4778 && (count
< maxelt
- minelt
+ 1 || 4 * zero_count
>= 3 * count
))
4782 if (need_to_clear
&& size
> 0)
4787 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4789 clear_storage (target
, GEN_INT (size
));
4793 else if (REG_P (target
))
4794 /* Inform later passes that the old value is dead. */
4795 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4797 /* Store each element of the constructor into
4798 the corresponding element of TARGET, determined
4799 by counting the elements. */
4800 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
4802 elt
= TREE_CHAIN (elt
), i
++)
4804 enum machine_mode mode
;
4805 HOST_WIDE_INT bitsize
;
4806 HOST_WIDE_INT bitpos
;
4808 tree value
= TREE_VALUE (elt
);
4809 tree index
= TREE_PURPOSE (elt
);
4810 rtx xtarget
= target
;
4812 if (cleared
&& is_zeros_p (value
))
4815 unsignedp
= TREE_UNSIGNED (elttype
);
4816 mode
= TYPE_MODE (elttype
);
4817 if (mode
== BLKmode
)
4818 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
4819 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
4822 bitsize
= GET_MODE_BITSIZE (mode
);
4824 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4826 tree lo_index
= TREE_OPERAND (index
, 0);
4827 tree hi_index
= TREE_OPERAND (index
, 1);
4828 rtx index_r
, pos_rtx
, loop_end
;
4829 struct nesting
*loop
;
4830 HOST_WIDE_INT lo
, hi
, count
;
4833 /* If the range is constant and "small", unroll the loop. */
4835 && host_integerp (lo_index
, 0)
4836 && host_integerp (hi_index
, 0)
4837 && (lo
= tree_low_cst (lo_index
, 0),
4838 hi
= tree_low_cst (hi_index
, 0),
4839 count
= hi
- lo
+ 1,
4840 (GET_CODE (target
) != MEM
4842 || (host_integerp (TYPE_SIZE (elttype
), 1)
4843 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
4846 lo
-= minelt
; hi
-= minelt
;
4847 for (; lo
<= hi
; lo
++)
4849 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
4851 if (GET_CODE (target
) == MEM
4852 && !MEM_KEEP_ALIAS_SET_P (target
)
4853 && TREE_CODE (type
) == ARRAY_TYPE
4854 && TYPE_NONALIASED_COMPONENT (type
))
4856 target
= copy_rtx (target
);
4857 MEM_KEEP_ALIAS_SET_P (target
) = 1;
4860 store_constructor_field
4861 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
4862 get_alias_set (elttype
));
4867 expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
4868 loop_end
= gen_label_rtx ();
4870 unsignedp
= TREE_UNSIGNED (domain
);
4872 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
4875 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
4877 SET_DECL_RTL (index
, index_r
);
4878 if (TREE_CODE (value
) == SAVE_EXPR
4879 && SAVE_EXPR_RTL (value
) == 0)
4881 /* Make sure value gets expanded once before the
4883 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
4886 store_expr (lo_index
, index_r
, 0);
4887 loop
= expand_start_loop (0);
4889 /* Assign value to element index. */
4891 = convert (ssizetype
,
4892 fold (build (MINUS_EXPR
, TREE_TYPE (index
),
4893 index
, TYPE_MIN_VALUE (domain
))));
4894 position
= size_binop (MULT_EXPR
, position
,
4896 TYPE_SIZE_UNIT (elttype
)));
4898 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
4899 xtarget
= offset_address (target
, pos_rtx
,
4900 highest_pow2_factor (position
));
4901 xtarget
= adjust_address (xtarget
, mode
, 0);
4902 if (TREE_CODE (value
) == CONSTRUCTOR
)
4903 store_constructor (value
, xtarget
, cleared
,
4904 bitsize
/ BITS_PER_UNIT
);
4906 store_expr (value
, xtarget
, 0);
4908 expand_exit_loop_if_false (loop
,
4909 build (LT_EXPR
, integer_type_node
,
4912 expand_increment (build (PREINCREMENT_EXPR
,
4914 index
, integer_one_node
), 0, 0);
4916 emit_label (loop_end
);
4919 else if ((index
!= 0 && ! host_integerp (index
, 0))
4920 || ! host_integerp (TYPE_SIZE (elttype
), 1))
4925 index
= ssize_int (1);
4928 index
= convert (ssizetype
,
4929 fold (build (MINUS_EXPR
, index
,
4930 TYPE_MIN_VALUE (domain
))));
4932 position
= size_binop (MULT_EXPR
, index
,
4934 TYPE_SIZE_UNIT (elttype
)));
4935 xtarget
= offset_address (target
,
4936 expand_expr (position
, 0, VOIDmode
, 0),
4937 highest_pow2_factor (position
));
4938 xtarget
= adjust_address (xtarget
, mode
, 0);
4939 store_expr (value
, xtarget
, 0);
4944 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
4945 * tree_low_cst (TYPE_SIZE (elttype
), 1));
4947 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
4949 if (GET_CODE (target
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (target
)
4950 && TREE_CODE (type
) == ARRAY_TYPE
4951 && TYPE_NONALIASED_COMPONENT (type
))
4953 target
= copy_rtx (target
);
4954 MEM_KEEP_ALIAS_SET_P (target
) = 1;
4957 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
4958 type
, cleared
, get_alias_set (elttype
));
4964 /* Set constructor assignments. */
4965 else if (TREE_CODE (type
) == SET_TYPE
)
4967 tree elt
= CONSTRUCTOR_ELTS (exp
);
4968 unsigned HOST_WIDE_INT nbytes
= int_size_in_bytes (type
), nbits
;
4969 tree domain
= TYPE_DOMAIN (type
);
4970 tree domain_min
, domain_max
, bitlength
;
4972 /* The default implementation strategy is to extract the constant
4973 parts of the constructor, use that to initialize the target,
4974 and then "or" in whatever non-constant ranges we need in addition.
4976 If a large set is all zero or all ones, it is
4977 probably better to set it using memset (if available) or bzero.
4978 Also, if a large set has just a single range, it may also be
4979 better to first clear all the first clear the set (using
4980 bzero/memset), and set the bits we want. */
4982 /* Check for all zeros. */
4983 if (elt
== NULL_TREE
&& size
> 0)
4986 clear_storage (target
, GEN_INT (size
));
4990 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
4991 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
4992 bitlength
= size_binop (PLUS_EXPR
,
4993 size_diffop (domain_max
, domain_min
),
4996 nbits
= tree_low_cst (bitlength
, 1);
4998 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4999 are "complicated" (more than one range), initialize (the
5000 constant parts) by copying from a constant. */
5001 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
5002 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
5004 unsigned int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
5005 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
5006 char *bit_buffer
= alloca (nbits
);
5007 HOST_WIDE_INT word
= 0;
5008 unsigned int bit_pos
= 0;
5009 unsigned int ibit
= 0;
5010 unsigned int offset
= 0; /* In bytes from beginning of set. */
5012 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
5015 if (bit_buffer
[ibit
])
5017 if (BYTES_BIG_ENDIAN
)
5018 word
|= (1 << (set_word_size
- 1 - bit_pos
));
5020 word
|= 1 << bit_pos
;
5024 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
5026 if (word
!= 0 || ! cleared
)
5028 rtx datum
= GEN_INT (word
);
5031 /* The assumption here is that it is safe to use
5032 XEXP if the set is multi-word, but not if
5033 it's single-word. */
5034 if (GET_CODE (target
) == MEM
)
5035 to_rtx
= adjust_address (target
, mode
, offset
);
5036 else if (offset
== 0)
5040 emit_move_insn (to_rtx
, datum
);
5047 offset
+= set_word_size
/ BITS_PER_UNIT
;
5052 /* Don't bother clearing storage if the set is all ones. */
5053 if (TREE_CHAIN (elt
) != NULL_TREE
5054 || (TREE_PURPOSE (elt
) == NULL_TREE
5056 : ( ! host_integerp (TREE_VALUE (elt
), 0)
5057 || ! host_integerp (TREE_PURPOSE (elt
), 0)
5058 || (tree_low_cst (TREE_VALUE (elt
), 0)
5059 - tree_low_cst (TREE_PURPOSE (elt
), 0) + 1
5060 != (HOST_WIDE_INT
) nbits
))))
5061 clear_storage (target
, expr_size (exp
));
5063 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
5065 /* Start of range of element or NULL. */
5066 tree startbit
= TREE_PURPOSE (elt
);
5067 /* End of range of element, or element value. */
5068 tree endbit
= TREE_VALUE (elt
);
5069 HOST_WIDE_INT startb
, endb
;
5070 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
5072 bitlength_rtx
= expand_expr (bitlength
,
5073 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
5075 /* Handle non-range tuple element like [ expr ]. */
5076 if (startbit
== NULL_TREE
)
5078 startbit
= save_expr (endbit
);
5082 startbit
= convert (sizetype
, startbit
);
5083 endbit
= convert (sizetype
, endbit
);
5084 if (! integer_zerop (domain_min
))
5086 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
5087 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
5089 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
5090 EXPAND_CONST_ADDRESS
);
5091 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
5092 EXPAND_CONST_ADDRESS
);
5098 ((build_qualified_type ((*lang_hooks
.types
.type_for_mode
)
5099 (GET_MODE (target
), 0),
5102 emit_move_insn (targetx
, target
);
5105 else if (GET_CODE (target
) == MEM
)
5110 /* Optimization: If startbit and endbit are constants divisible
5111 by BITS_PER_UNIT, call memset instead. */
5112 if (TARGET_MEM_FUNCTIONS
5113 && TREE_CODE (startbit
) == INTEGER_CST
5114 && TREE_CODE (endbit
) == INTEGER_CST
5115 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
5116 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
5118 emit_library_call (memset_libfunc
, LCT_NORMAL
,
5120 plus_constant (XEXP (targetx
, 0),
5121 startb
/ BITS_PER_UNIT
),
5123 constm1_rtx
, TYPE_MODE (integer_type_node
),
5124 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
5125 TYPE_MODE (sizetype
));
5128 emit_library_call (setbits_libfunc
, LCT_NORMAL
,
5129 VOIDmode
, 4, XEXP (targetx
, 0),
5130 Pmode
, bitlength_rtx
, TYPE_MODE (sizetype
),
5131 startbit_rtx
, TYPE_MODE (sizetype
),
5132 endbit_rtx
, TYPE_MODE (sizetype
));
5135 emit_move_insn (target
, targetx
);
5143 /* Store the value of EXP (an expression tree)
5144 into a subfield of TARGET which has mode MODE and occupies
5145 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5146 If MODE is VOIDmode, it means that we are storing into a bit-field.
5148 If VALUE_MODE is VOIDmode, return nothing in particular.
5149 UNSIGNEDP is not used in this case.
5151 Otherwise, return an rtx for the value stored. This rtx
5152 has mode VALUE_MODE if that is convenient to do.
5153 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5155 TYPE is the type of the underlying object,
5157 ALIAS_SET is the alias set for the destination. This value will
5158 (in general) be different from that for TARGET, since TARGET is a
5159 reference to the containing structure. */
5162 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
5163 enum machine_mode mode
, tree exp
, enum machine_mode value_mode
,
5164 int unsignedp
, tree type
, int alias_set
)
5166 HOST_WIDE_INT width_mask
= 0;
5168 if (TREE_CODE (exp
) == ERROR_MARK
)
5171 /* If we have nothing to store, do nothing unless the expression has
5174 return expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
5175 else if (bitsize
>= 0 && bitsize
< HOST_BITS_PER_WIDE_INT
)
5176 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5178 /* If we are storing into an unaligned field of an aligned union that is
5179 in a register, we may have the mode of TARGET being an integer mode but
5180 MODE == BLKmode. In that case, get an aligned object whose size and
5181 alignment are the same as TARGET and store TARGET into it (we can avoid
5182 the store if the field being stored is the entire width of TARGET). Then
5183 call ourselves recursively to store the field into a BLKmode version of
5184 that object. Finally, load from the object into TARGET. This is not
5185 very efficient in general, but should only be slightly more expensive
5186 than the otherwise-required unaligned accesses. Perhaps this can be
5187 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5188 twice, once with emit_move_insn and once via store_field. */
5191 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
5193 rtx object
= assign_temp (type
, 0, 1, 1);
5194 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
5196 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5197 emit_move_insn (object
, target
);
5199 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
5202 emit_move_insn (target
, object
);
5204 /* We want to return the BLKmode version of the data. */
5208 if (GET_CODE (target
) == CONCAT
)
5210 /* We're storing into a struct containing a single __complex. */
5214 return store_expr (exp
, target
, 0);
5217 /* If the structure is in a register or if the component
5218 is a bit field, we cannot use addressing to access it.
5219 Use bit-field techniques or SUBREG to store in it. */
5221 if (mode
== VOIDmode
5222 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5223 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5224 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5225 || GET_CODE (target
) == REG
5226 || GET_CODE (target
) == SUBREG
5227 /* If the field isn't aligned enough to store as an ordinary memref,
5228 store it as a bit field. */
5230 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
5231 || bitpos
% GET_MODE_ALIGNMENT (mode
))
5232 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
5233 || (bitpos
% BITS_PER_UNIT
!= 0)))
5234 /* If the RHS and field are a constant size and the size of the
5235 RHS isn't the same size as the bitfield, we must use bitfield
5238 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5239 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5241 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
5243 /* If BITSIZE is narrower than the size of the type of EXP
5244 we will be narrowing TEMP. Normally, what's wanted are the
5245 low-order bits. However, if EXP's type is a record and this is
5246 big-endian machine, we want the upper BITSIZE bits. */
5247 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5248 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
5249 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5250 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5251 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5255 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5257 if (mode
!= VOIDmode
&& mode
!= BLKmode
5258 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5259 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5261 /* If the modes of TARGET and TEMP are both BLKmode, both
5262 must be in memory and BITPOS must be aligned on a byte
5263 boundary. If so, we simply do a block copy. */
5264 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5266 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
5267 || bitpos
% BITS_PER_UNIT
!= 0)
5270 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5271 emit_block_move (target
, temp
,
5272 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5276 return value_mode
== VOIDmode
? const0_rtx
: target
;
5279 /* Store the value in the bitfield. */
5280 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
,
5281 int_size_in_bytes (type
));
5283 if (value_mode
!= VOIDmode
)
5285 /* The caller wants an rtx for the value.
5286 If possible, avoid refetching from the bitfield itself. */
5288 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
5291 enum machine_mode tmode
;
5293 tmode
= GET_MODE (temp
);
5294 if (tmode
== VOIDmode
)
5298 return expand_and (tmode
, temp
,
5299 gen_int_mode (width_mask
, tmode
),
5302 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
5303 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5304 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5307 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
5308 NULL_RTX
, value_mode
, VOIDmode
,
5309 int_size_in_bytes (type
));
5315 rtx addr
= XEXP (target
, 0);
5316 rtx to_rtx
= target
;
5318 /* If a value is wanted, it must be the lhs;
5319 so make the address stable for multiple use. */
5321 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
5322 && ! CONSTANT_ADDRESS_P (addr
)
5323 /* A frame-pointer reference is already stable. */
5324 && ! (GET_CODE (addr
) == PLUS
5325 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
5326 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
5327 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
5328 to_rtx
= replace_equiv_address (to_rtx
, copy_to_reg (addr
));
5330 /* Now build a reference to just the desired component. */
5332 to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5334 if (to_rtx
== target
)
5335 to_rtx
= copy_rtx (to_rtx
);
5337 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5338 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5339 set_mem_alias_set (to_rtx
, alias_set
);
5341 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
5345 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5346 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5347 codes and find the ultimate containing object, which we return.
5349 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5350 bit position, and *PUNSIGNEDP to the signedness of the field.
5351 If the position of the field is variable, we store a tree
5352 giving the variable offset (in units) in *POFFSET.
5353 This offset is in addition to the bit position.
5354 If the position is not variable, we store 0 in *POFFSET.
5356 If any of the extraction expressions is volatile,
5357 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5359 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5360 is a mode that can be used to access the field. In that case, *PBITSIZE
5363 If the field describes a variable-sized object, *PMODE is set to
5364 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5365 this case, but the address of the object can be found. */
5368 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
5369 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
5370 enum machine_mode
*pmode
, int *punsignedp
,
5374 enum machine_mode mode
= VOIDmode
;
5375 tree offset
= size_zero_node
;
5376 tree bit_offset
= bitsize_zero_node
;
5377 tree placeholder_ptr
= 0;
5380 /* First get the mode, signedness, and size. We do this from just the
5381 outermost expression. */
5382 if (TREE_CODE (exp
) == COMPONENT_REF
)
5384 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5385 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5386 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5388 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
5390 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5392 size_tree
= TREE_OPERAND (exp
, 1);
5393 *punsignedp
= TREE_UNSIGNED (exp
);
5397 mode
= TYPE_MODE (TREE_TYPE (exp
));
5398 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
5400 if (mode
== BLKmode
)
5401 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5403 *pbitsize
= GET_MODE_BITSIZE (mode
);
5408 if (! host_integerp (size_tree
, 1))
5409 mode
= BLKmode
, *pbitsize
= -1;
5411 *pbitsize
= tree_low_cst (size_tree
, 1);
5414 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5415 and find the ultimate containing object. */
5418 if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5419 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
, TREE_OPERAND (exp
, 2));
5420 else if (TREE_CODE (exp
) == COMPONENT_REF
)
5422 tree field
= TREE_OPERAND (exp
, 1);
5423 tree this_offset
= DECL_FIELD_OFFSET (field
);
5425 /* If this field hasn't been filled in yet, don't go
5426 past it. This should only happen when folding expressions
5427 made during type construction. */
5428 if (this_offset
== 0)
5430 else if (CONTAINS_PLACEHOLDER_P (this_offset
))
5431 this_offset
= build (WITH_RECORD_EXPR
, sizetype
, this_offset
, exp
);
5433 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5434 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5435 DECL_FIELD_BIT_OFFSET (field
));
5437 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5440 else if (TREE_CODE (exp
) == ARRAY_REF
5441 || TREE_CODE (exp
) == ARRAY_RANGE_REF
)
5443 tree index
= TREE_OPERAND (exp
, 1);
5444 tree array
= TREE_OPERAND (exp
, 0);
5445 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
5446 tree low_bound
= (domain
? TYPE_MIN_VALUE (domain
) : 0);
5447 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array
)));
5449 /* We assume all arrays have sizes that are a multiple of a byte.
5450 First subtract the lower bound, if any, in the type of the
5451 index, then convert to sizetype and multiply by the size of the
5453 if (low_bound
!= 0 && ! integer_zerop (low_bound
))
5454 index
= fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5457 /* If the index has a self-referential type, pass it to a
5458 WITH_RECORD_EXPR; if the component size is, pass our
5459 component to one. */
5460 if (CONTAINS_PLACEHOLDER_P (index
))
5461 index
= build (WITH_RECORD_EXPR
, TREE_TYPE (index
), index
, exp
);
5462 if (CONTAINS_PLACEHOLDER_P (unit_size
))
5463 unit_size
= build (WITH_RECORD_EXPR
, sizetype
, unit_size
, array
);
5465 offset
= size_binop (PLUS_EXPR
, offset
,
5466 size_binop (MULT_EXPR
,
5467 convert (sizetype
, index
),
5471 else if (TREE_CODE (exp
) == PLACEHOLDER_EXPR
)
5473 tree
new = find_placeholder (exp
, &placeholder_ptr
);
5475 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5476 We might have been called from tree optimization where we
5477 haven't set up an object yet. */
5486 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5487 conversions that don't change the mode, and all view conversions
5488 except those that need to "step up" the alignment. */
5489 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
5490 && ! (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
5491 && ! ((TYPE_ALIGN (TREE_TYPE (exp
))
5492 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5494 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
5495 < BIGGEST_ALIGNMENT
)
5496 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
5497 || TYPE_ALIGN_OK (TREE_TYPE
5498 (TREE_OPERAND (exp
, 0))))))
5499 && ! ((TREE_CODE (exp
) == NOP_EXPR
5500 || TREE_CODE (exp
) == CONVERT_EXPR
)
5501 && (TYPE_MODE (TREE_TYPE (exp
))
5502 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
5505 /* If any reference in the chain is volatile, the effect is volatile. */
5506 if (TREE_THIS_VOLATILE (exp
))
5509 exp
= TREE_OPERAND (exp
, 0);
5512 /* If OFFSET is constant, see if we can return the whole thing as a
5513 constant bit position. Otherwise, split it up. */
5514 if (host_integerp (offset
, 0)
5515 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5517 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5518 && host_integerp (tem
, 0))
5519 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5521 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5527 /* Return 1 if T is an expression that get_inner_reference handles. */
5530 handled_component_p (tree t
)
5532 switch (TREE_CODE (t
))
5537 case ARRAY_RANGE_REF
:
5538 case NON_LVALUE_EXPR
:
5539 case VIEW_CONVERT_EXPR
:
5542 /* ??? Sure they are handled, but get_inner_reference may return
5543 a different PBITSIZE, depending upon whether the expression is
5544 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5547 return (TYPE_MODE (TREE_TYPE (t
))
5548 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t
, 0))));
5555 /* Given an rtx VALUE that may contain additions and multiplications, return
5556 an equivalent value that just refers to a register, memory, or constant.
5557 This is done by generating instructions to perform the arithmetic and
5558 returning a pseudo-register containing the value.
5560 The returned value may be a REG, SUBREG, MEM or constant. */
5563 force_operand (rtx value
, rtx target
)
5566 /* Use subtarget as the target for operand 0 of a binary operation. */
5567 rtx subtarget
= get_subtarget (target
);
5568 enum rtx_code code
= GET_CODE (value
);
5570 /* Check for a PIC address load. */
5571 if ((code
== PLUS
|| code
== MINUS
)
5572 && XEXP (value
, 0) == pic_offset_table_rtx
5573 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5574 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5575 || GET_CODE (XEXP (value
, 1)) == CONST
))
5578 subtarget
= gen_reg_rtx (GET_MODE (value
));
5579 emit_move_insn (subtarget
, value
);
5583 if (code
== ZERO_EXTEND
|| code
== SIGN_EXTEND
)
5586 target
= gen_reg_rtx (GET_MODE (value
));
5587 convert_move (target
, force_operand (XEXP (value
, 0), NULL
),
5588 code
== ZERO_EXTEND
);
5592 if (GET_RTX_CLASS (code
) == '2' || GET_RTX_CLASS (code
) == 'c')
5594 op2
= XEXP (value
, 1);
5595 if (!CONSTANT_P (op2
) && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5597 if (code
== MINUS
&& GET_CODE (op2
) == CONST_INT
)
5600 op2
= negate_rtx (GET_MODE (value
), op2
);
5603 /* Check for an addition with OP2 a constant integer and our first
5604 operand a PLUS of a virtual register and something else. In that
5605 case, we want to emit the sum of the virtual register and the
5606 constant first and then add the other value. This allows virtual
5607 register instantiation to simply modify the constant rather than
5608 creating another one around this addition. */
5609 if (code
== PLUS
&& GET_CODE (op2
) == CONST_INT
5610 && GET_CODE (XEXP (value
, 0)) == PLUS
5611 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
5612 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5613 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5615 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
5616 XEXP (XEXP (value
, 0), 0), op2
,
5617 subtarget
, 0, OPTAB_LIB_WIDEN
);
5618 return expand_simple_binop (GET_MODE (value
), code
, temp
,
5619 force_operand (XEXP (XEXP (value
,
5621 target
, 0, OPTAB_LIB_WIDEN
);
5624 op1
= force_operand (XEXP (value
, 0), subtarget
);
5625 op2
= force_operand (op2
, NULL_RTX
);
5629 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
5631 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
5632 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5633 target
, 1, OPTAB_LIB_WIDEN
);
5635 return expand_divmod (0,
5636 FLOAT_MODE_P (GET_MODE (value
))
5637 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
5638 GET_MODE (value
), op1
, op2
, target
, 0);
5641 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5645 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
5649 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5653 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5654 target
, 0, OPTAB_LIB_WIDEN
);
5657 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5658 target
, 1, OPTAB_LIB_WIDEN
);
5661 if (GET_RTX_CLASS (code
) == '1')
5663 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
5664 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
5667 #ifdef INSN_SCHEDULING
5668 /* On machines that have insn scheduling, we want all memory reference to be
5669 explicit, so we need to deal with such paradoxical SUBREGs. */
5670 if (GET_CODE (value
) == SUBREG
&& GET_CODE (SUBREG_REG (value
)) == MEM
5671 && (GET_MODE_SIZE (GET_MODE (value
))
5672 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
5674 = simplify_gen_subreg (GET_MODE (value
),
5675 force_reg (GET_MODE (SUBREG_REG (value
)),
5676 force_operand (SUBREG_REG (value
),
5678 GET_MODE (SUBREG_REG (value
)),
5679 SUBREG_BYTE (value
));
5685 /* Subroutine of expand_expr: return nonzero iff there is no way that
5686 EXP can reference X, which is being modified. TOP_P is nonzero if this
5687 call is going to be used to determine whether we need a temporary
5688 for EXP, as opposed to a recursive call to this function.
5690 It is always safe for this routine to return zero since it merely
5691 searches for optimization opportunities. */
5694 safe_from_p (rtx x
, tree exp
, int top_p
)
5698 static tree save_expr_list
;
5701 /* If EXP has varying size, we MUST use a target since we currently
5702 have no way of allocating temporaries of variable size
5703 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5704 So we assume here that something at a higher level has prevented a
5705 clash. This is somewhat bogus, but the best we can do. Only
5706 do this when X is BLKmode and when we are at the top level. */
5707 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
5708 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
5709 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
5710 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
5711 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
5713 && GET_MODE (x
) == BLKmode
)
5714 /* If X is in the outgoing argument area, it is always safe. */
5715 || (GET_CODE (x
) == MEM
5716 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
5717 || (GET_CODE (XEXP (x
, 0)) == PLUS
5718 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
5721 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5722 find the underlying pseudo. */
5723 if (GET_CODE (x
) == SUBREG
)
5726 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5730 /* A SAVE_EXPR might appear many times in the expression passed to the
5731 top-level safe_from_p call, and if it has a complex subexpression,
5732 examining it multiple times could result in a combinatorial explosion.
5733 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
5734 with optimization took about 28 minutes to compile -- even though it was
5735 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5736 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5737 we have processed. Note that the only test of top_p was above. */
5746 rtn
= safe_from_p (x
, exp
, 0);
5748 for (t
= save_expr_list
; t
!= 0; t
= TREE_CHAIN (t
))
5749 TREE_PRIVATE (TREE_PURPOSE (t
)) = 0;
5754 /* Now look at our tree code and possibly recurse. */
5755 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
5758 exp_rtl
= DECL_RTL_IF_SET (exp
);
5765 if (TREE_CODE (exp
) == TREE_LIST
)
5769 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
5771 exp
= TREE_CHAIN (exp
);
5774 if (TREE_CODE (exp
) != TREE_LIST
)
5775 return safe_from_p (x
, exp
, 0);
5778 else if (TREE_CODE (exp
) == ERROR_MARK
)
5779 return 1; /* An already-visited SAVE_EXPR? */
5785 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
5790 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5794 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5795 the expression. If it is set, we conflict iff we are that rtx or
5796 both are in memory. Otherwise, we check all operands of the
5797 expression recursively. */
5799 switch (TREE_CODE (exp
))
5802 /* If the operand is static or we are static, we can't conflict.
5803 Likewise if we don't conflict with the operand at all. */
5804 if (staticp (TREE_OPERAND (exp
, 0))
5805 || TREE_STATIC (exp
)
5806 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
5809 /* Otherwise, the only way this can conflict is if we are taking
5810 the address of a DECL a that address if part of X, which is
5812 exp
= TREE_OPERAND (exp
, 0);
5815 if (!DECL_RTL_SET_P (exp
)
5816 || GET_CODE (DECL_RTL (exp
)) != MEM
)
5819 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
5824 if (GET_CODE (x
) == MEM
5825 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
5826 get_alias_set (exp
)))
5831 /* Assume that the call will clobber all hard registers and
5833 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5834 || GET_CODE (x
) == MEM
)
5839 /* If a sequence exists, we would have to scan every instruction
5840 in the sequence to see if it was safe. This is probably not
5842 if (RTL_EXPR_SEQUENCE (exp
))
5845 exp_rtl
= RTL_EXPR_RTL (exp
);
5848 case WITH_CLEANUP_EXPR
:
5849 exp_rtl
= WITH_CLEANUP_EXPR_RTL (exp
);
5852 case CLEANUP_POINT_EXPR
:
5853 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5856 exp_rtl
= SAVE_EXPR_RTL (exp
);
5860 /* If we've already scanned this, don't do it again. Otherwise,
5861 show we've scanned it and record for clearing the flag if we're
5863 if (TREE_PRIVATE (exp
))
5866 TREE_PRIVATE (exp
) = 1;
5867 if (! safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
5869 TREE_PRIVATE (exp
) = 0;
5873 save_expr_list
= tree_cons (exp
, NULL_TREE
, save_expr_list
);
5877 /* The only operand we look at is operand 1. The rest aren't
5878 part of the expression. */
5879 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
5885 /* If we have an rtx, we do not need to scan our operands. */
5889 nops
= first_rtl_op (TREE_CODE (exp
));
5890 for (i
= 0; i
< nops
; i
++)
5891 if (TREE_OPERAND (exp
, i
) != 0
5892 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
5895 /* If this is a language-specific tree code, it may require
5896 special handling. */
5897 if ((unsigned int) TREE_CODE (exp
)
5898 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5899 && !(*lang_hooks
.safe_from_p
) (x
, exp
))
5903 /* If we have an rtl, find any enclosed object. Then see if we conflict
5907 if (GET_CODE (exp_rtl
) == SUBREG
)
5909 exp_rtl
= SUBREG_REG (exp_rtl
);
5910 if (GET_CODE (exp_rtl
) == REG
5911 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
5915 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5916 are memory and they conflict. */
5917 return ! (rtx_equal_p (x
, exp_rtl
)
5918 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
5919 && true_dependence (exp_rtl
, VOIDmode
, x
,
5920 rtx_addr_varies_p
)));
5923 /* If we reach here, it is safe. */
5927 /* Subroutine of expand_expr: return rtx if EXP is a
5928 variable or parameter; else return 0. */
5934 switch (TREE_CODE (exp
))
5938 return DECL_RTL (exp
);
5944 #ifdef MAX_INTEGER_COMPUTATION_MODE
5947 check_max_integer_computation_mode (tree exp
)
5949 enum tree_code code
;
5950 enum machine_mode mode
;
5952 /* Strip any NOPs that don't change the mode. */
5954 code
= TREE_CODE (exp
);
5956 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5957 if (code
== NOP_EXPR
5958 && TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
5961 /* First check the type of the overall operation. We need only look at
5962 unary, binary and relational operations. */
5963 if (TREE_CODE_CLASS (code
) == '1'
5964 || TREE_CODE_CLASS (code
) == '2'
5965 || TREE_CODE_CLASS (code
) == '<')
5967 mode
= TYPE_MODE (TREE_TYPE (exp
));
5968 if (GET_MODE_CLASS (mode
) == MODE_INT
5969 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5970 internal_error ("unsupported wide integer operation");
5973 /* Check operand of a unary op. */
5974 if (TREE_CODE_CLASS (code
) == '1')
5976 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5977 if (GET_MODE_CLASS (mode
) == MODE_INT
5978 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5979 internal_error ("unsupported wide integer operation");
5982 /* Check operands of a binary/comparison op. */
5983 if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<')
5985 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5986 if (GET_MODE_CLASS (mode
) == MODE_INT
5987 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5988 internal_error ("unsupported wide integer operation");
5990 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
5991 if (GET_MODE_CLASS (mode
) == MODE_INT
5992 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5993 internal_error ("unsupported wide integer operation");
5998 /* Return the highest power of two that EXP is known to be a multiple of.
5999 This is used in updating alignment of MEMs in array references. */
6001 static unsigned HOST_WIDE_INT
6002 highest_pow2_factor (tree exp
)
6004 unsigned HOST_WIDE_INT c0
, c1
;
6006 switch (TREE_CODE (exp
))
6009 /* We can find the lowest bit that's a one. If the low
6010 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6011 We need to handle this case since we can find it in a COND_EXPR,
6012 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6013 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6015 if (TREE_CONSTANT_OVERFLOW (exp
))
6016 return BIGGEST_ALIGNMENT
;
6019 /* Note: tree_low_cst is intentionally not used here,
6020 we don't care about the upper bits. */
6021 c0
= TREE_INT_CST_LOW (exp
);
6023 return c0
? c0
: BIGGEST_ALIGNMENT
;
6027 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
6028 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6029 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6030 return MIN (c0
, c1
);
6033 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6034 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6037 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6039 if (integer_pow2p (TREE_OPERAND (exp
, 1))
6040 && host_integerp (TREE_OPERAND (exp
, 1), 1))
6042 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6043 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
6044 return MAX (1, c0
/ c1
);
6048 case NON_LVALUE_EXPR
: case NOP_EXPR
: case CONVERT_EXPR
:
6049 case SAVE_EXPR
: case WITH_RECORD_EXPR
:
6050 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
6053 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6056 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6057 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
6058 return MIN (c0
, c1
);
6067 /* Similar, except that it is known that the expression must be a multiple
6068 of the alignment of TYPE. */
6070 static unsigned HOST_WIDE_INT
6071 highest_pow2_factor_for_type (tree type
, tree exp
)
6073 unsigned HOST_WIDE_INT type_align
, factor
;
6075 factor
= highest_pow2_factor (exp
);
6076 type_align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
6077 return MAX (factor
, type_align
);
6080 /* Return an object on the placeholder list that matches EXP, a
6081 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6082 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6083 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6084 is a location which initially points to a starting location in the
6085 placeholder list (zero means start of the list) and where a pointer into
6086 the placeholder list at which the object is found is placed. */
6089 find_placeholder (tree exp
, tree
*plist
)
6091 tree type
= TREE_TYPE (exp
);
6092 tree placeholder_expr
;
6094 for (placeholder_expr
6095 = plist
&& *plist
? TREE_CHAIN (*plist
) : placeholder_list
;
6096 placeholder_expr
!= 0;
6097 placeholder_expr
= TREE_CHAIN (placeholder_expr
))
6099 tree need_type
= TYPE_MAIN_VARIANT (type
);
6102 /* Find the outermost reference that is of the type we want. If none,
6103 see if any object has a type that is a pointer to the type we
6105 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6106 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
6107 || TREE_CODE (elt
) == COND_EXPR
)
6108 ? TREE_OPERAND (elt
, 1)
6109 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6110 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6111 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6112 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6113 ? TREE_OPERAND (elt
, 0) : 0))
6114 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
6117 *plist
= placeholder_expr
;
6121 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6123 = ((TREE_CODE (elt
) == COMPOUND_EXPR
6124 || TREE_CODE (elt
) == COND_EXPR
)
6125 ? TREE_OPERAND (elt
, 1)
6126 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6127 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6128 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6129 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6130 ? TREE_OPERAND (elt
, 0) : 0))
6131 if (POINTER_TYPE_P (TREE_TYPE (elt
))
6132 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
6136 *plist
= placeholder_expr
;
6137 return build1 (INDIRECT_REF
, need_type
, elt
);
6144 /* Subroutine of expand_expr. Expand the two operands of a binary
6145 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6146 The value may be stored in TARGET if TARGET is nonzero. The
6147 MODIFIER argument is as documented by expand_expr. */
6150 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
6151 enum expand_modifier modifier
)
6153 if (! safe_from_p (target
, exp1
, 1))
6155 if (operand_equal_p (exp0
, exp1
, 0))
6157 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6158 *op1
= copy_rtx (*op0
);
6162 /* If we need to preserve evaluation order, copy exp0 into its own
6163 temporary variable so that it can't be clobbered by exp1. */
6164 if (flag_evaluation_order
&& TREE_SIDE_EFFECTS (exp1
))
6165 exp0
= save_expr (exp0
);
6166 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6167 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
6172 /* expand_expr: generate code for computing expression EXP.
6173 An rtx for the computed value is returned. The value is never null.
6174 In the case of a void EXP, const0_rtx is returned.
6176 The value may be stored in TARGET if TARGET is nonzero.
6177 TARGET is just a suggestion; callers must assume that
6178 the rtx returned may not be the same as TARGET.
6180 If TARGET is CONST0_RTX, it means that the value will be ignored.
6182 If TMODE is not VOIDmode, it suggests generating the
6183 result in mode TMODE. But this is done only when convenient.
6184 Otherwise, TMODE is ignored and the value generated in its natural mode.
6185 TMODE is just a suggestion; callers must assume that
6186 the rtx returned may not have mode TMODE.
6188 Note that TARGET may have neither TMODE nor MODE. In that case, it
6189 probably will not be used.
6191 If MODIFIER is EXPAND_SUM then when EXP is an addition
6192 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6193 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6194 products as above, or REG or MEM, or constant.
6195 Ordinarily in such cases we would output mul or add instructions
6196 and then return a pseudo reg containing the sum.
6198 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6199 it also marks a label as absolutely required (it can't be dead).
6200 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6201 This is used for outputting expressions used in initializers.
6203 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6204 with a constant address even if that address is not normally legitimate.
6205 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6207 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6208 a call parameter. Such targets require special care as we haven't yet
6209 marked TARGET so that it's safe from being trashed by libcalls. We
6210 don't want to use TARGET for anything but the final result;
6211 Intermediate values must go elsewhere. Additionally, calls to
6212 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6215 expand_expr (tree exp
, rtx target
, enum machine_mode tmode
,
6216 enum expand_modifier modifier
)
6219 tree type
= TREE_TYPE (exp
);
6220 int unsignedp
= TREE_UNSIGNED (type
);
6221 enum machine_mode mode
;
6222 enum tree_code code
= TREE_CODE (exp
);
6224 rtx subtarget
, original_target
;
6228 /* Handle ERROR_MARK before anybody tries to access its type. */
6229 if (TREE_CODE (exp
) == ERROR_MARK
|| TREE_CODE (type
) == ERROR_MARK
)
6231 op0
= CONST0_RTX (tmode
);
6237 mode
= TYPE_MODE (type
);
6238 /* Use subtarget as the target for operand 0 of a binary operation. */
6239 subtarget
= get_subtarget (target
);
6240 original_target
= target
;
6241 ignore
= (target
== const0_rtx
6242 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
6243 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
6244 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
6245 && TREE_CODE (type
) == VOID_TYPE
));
6247 /* If we are going to ignore this result, we need only do something
6248 if there is a side-effect somewhere in the expression. If there
6249 is, short-circuit the most common cases here. Note that we must
6250 not call expand_expr with anything but const0_rtx in case this
6251 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6255 if (! TREE_SIDE_EFFECTS (exp
))
6258 /* Ensure we reference a volatile object even if value is ignored, but
6259 don't do this if all we are doing is taking its address. */
6260 if (TREE_THIS_VOLATILE (exp
)
6261 && TREE_CODE (exp
) != FUNCTION_DECL
6262 && mode
!= VOIDmode
&& mode
!= BLKmode
6263 && modifier
!= EXPAND_CONST_ADDRESS
)
6265 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
6266 if (GET_CODE (temp
) == MEM
)
6267 temp
= copy_to_reg (temp
);
6271 if (TREE_CODE_CLASS (code
) == '1' || code
== COMPONENT_REF
6272 || code
== INDIRECT_REF
|| code
== BUFFER_REF
)
6273 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6276 else if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<'
6277 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
6279 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6280 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6283 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6284 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
6285 /* If the second operand has no side effects, just evaluate
6287 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6289 else if (code
== BIT_FIELD_REF
)
6291 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6292 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6293 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, modifier
);
6300 #ifdef MAX_INTEGER_COMPUTATION_MODE
6301 /* Only check stuff here if the mode we want is different from the mode
6302 of the expression; if it's the same, check_max_integer_computation_mode
6303 will handle it. Do we really need to check this stuff at all? */
6306 && GET_MODE (target
) != mode
6307 && TREE_CODE (exp
) != INTEGER_CST
6308 && TREE_CODE (exp
) != PARM_DECL
6309 && TREE_CODE (exp
) != ARRAY_REF
6310 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6311 && TREE_CODE (exp
) != COMPONENT_REF
6312 && TREE_CODE (exp
) != BIT_FIELD_REF
6313 && TREE_CODE (exp
) != INDIRECT_REF
6314 && TREE_CODE (exp
) != CALL_EXPR
6315 && TREE_CODE (exp
) != VAR_DECL
6316 && TREE_CODE (exp
) != RTL_EXPR
)
6318 enum machine_mode mode
= GET_MODE (target
);
6320 if (GET_MODE_CLASS (mode
) == MODE_INT
6321 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6322 internal_error ("unsupported wide integer operation");
6326 && TREE_CODE (exp
) != INTEGER_CST
6327 && TREE_CODE (exp
) != PARM_DECL
6328 && TREE_CODE (exp
) != ARRAY_REF
6329 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6330 && TREE_CODE (exp
) != COMPONENT_REF
6331 && TREE_CODE (exp
) != BIT_FIELD_REF
6332 && TREE_CODE (exp
) != INDIRECT_REF
6333 && TREE_CODE (exp
) != VAR_DECL
6334 && TREE_CODE (exp
) != CALL_EXPR
6335 && TREE_CODE (exp
) != RTL_EXPR
6336 && GET_MODE_CLASS (tmode
) == MODE_INT
6337 && tmode
> MAX_INTEGER_COMPUTATION_MODE
)
6338 internal_error ("unsupported wide integer operation");
6340 check_max_integer_computation_mode (exp
);
6343 /* If will do cse, generate all results into pseudo registers
6344 since 1) that allows cse to find more things
6345 and 2) otherwise cse could produce an insn the machine
6346 cannot support. An exception is a CONSTRUCTOR into a multi-word
6347 MEM: that's much more likely to be most efficient into the MEM.
6348 Another is a CALL_EXPR which must return in memory. */
6350 if (! cse_not_expected
&& mode
!= BLKmode
&& target
6351 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
)
6352 && ! (code
== CONSTRUCTOR
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
6353 && ! (code
== CALL_EXPR
&& aggregate_value_p (exp
, exp
)))
6360 tree function
= decl_function_context (exp
);
6361 /* Labels in containing functions, or labels used from initializers,
6363 if (modifier
== EXPAND_INITIALIZER
6364 || (function
!= current_function_decl
6365 && function
!= inline_function_decl
6367 temp
= force_label_rtx (exp
);
6369 temp
= label_rtx (exp
);
6371 temp
= gen_rtx_MEM (FUNCTION_MODE
, gen_rtx_LABEL_REF (Pmode
, temp
));
6372 if (function
!= current_function_decl
6373 && function
!= inline_function_decl
&& function
!= 0)
6374 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
6379 if (!DECL_RTL_SET_P (exp
))
6381 error ("%Jprior parameter's size depends on '%D'", exp
, exp
);
6382 return CONST0_RTX (mode
);
6385 /* ... fall through ... */
6388 /* If a static var's type was incomplete when the decl was written,
6389 but the type is complete now, lay out the decl now. */
6390 if (DECL_SIZE (exp
) == 0
6391 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
6392 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6393 layout_decl (exp
, 0);
6395 /* ... fall through ... */
6399 if (DECL_RTL (exp
) == 0)
6402 /* Ensure variable marked as used even if it doesn't go through
6403 a parser. If it hasn't be used yet, write out an external
6405 if (! TREE_USED (exp
))
6407 assemble_external (exp
);
6408 TREE_USED (exp
) = 1;
6411 /* Show we haven't gotten RTL for this yet. */
6414 /* Handle variables inherited from containing functions. */
6415 context
= decl_function_context (exp
);
6417 /* We treat inline_function_decl as an alias for the current function
6418 because that is the inline function whose vars, types, etc.
6419 are being merged into the current function.
6420 See expand_inline_function. */
6422 if (context
!= 0 && context
!= current_function_decl
6423 && context
!= inline_function_decl
6424 /* If var is static, we don't need a static chain to access it. */
6425 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
6426 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
6430 /* Mark as non-local and addressable. */
6431 DECL_NONLOCAL (exp
) = 1;
6432 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
6434 (*lang_hooks
.mark_addressable
) (exp
);
6435 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
6437 addr
= XEXP (DECL_RTL (exp
), 0);
6438 if (GET_CODE (addr
) == MEM
)
6440 = replace_equiv_address (addr
,
6441 fix_lexical_addr (XEXP (addr
, 0), exp
));
6443 addr
= fix_lexical_addr (addr
, exp
);
6445 temp
= replace_equiv_address (DECL_RTL (exp
), addr
);
6448 /* This is the case of an array whose size is to be determined
6449 from its initializer, while the initializer is still being parsed.
6452 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6453 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
6454 temp
= validize_mem (DECL_RTL (exp
));
6456 /* If DECL_RTL is memory, we are in the normal case and either
6457 the address is not valid or it is not a register and -fforce-addr
6458 is specified, get the address into a register. */
6460 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6461 && modifier
!= EXPAND_CONST_ADDRESS
6462 && modifier
!= EXPAND_SUM
6463 && modifier
!= EXPAND_INITIALIZER
6464 && (! memory_address_p (DECL_MODE (exp
),
6465 XEXP (DECL_RTL (exp
), 0))
6467 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
6468 temp
= replace_equiv_address (DECL_RTL (exp
),
6469 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
6471 /* If we got something, return it. But first, set the alignment
6472 if the address is a register. */
6475 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
6476 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6481 /* If the mode of DECL_RTL does not match that of the decl, it
6482 must be a promoted value. We return a SUBREG of the wanted mode,
6483 but mark it so that we know that it was already extended. */
6485 if (GET_CODE (DECL_RTL (exp
)) == REG
6486 && GET_MODE (DECL_RTL (exp
)) != DECL_MODE (exp
))
6488 /* Get the signedness used for this variable. Ensure we get the
6489 same mode we got when the variable was declared. */
6490 if (GET_MODE (DECL_RTL (exp
))
6491 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
,
6492 (TREE_CODE (exp
) == RESULT_DECL
? 1 : 0)))
6495 temp
= gen_lowpart_SUBREG (mode
, DECL_RTL (exp
));
6496 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6497 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6501 return DECL_RTL (exp
);
6504 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
6505 TREE_INT_CST_HIGH (exp
), mode
);
6507 /* ??? If overflow is set, fold will have done an incomplete job,
6508 which can result in (plus xx (const_int 0)), which can get
6509 simplified by validate_replace_rtx during virtual register
6510 instantiation, which can result in unrecognizable insns.
6511 Avoid this by forcing all overflows into registers. */
6512 if (TREE_CONSTANT_OVERFLOW (exp
)
6513 && modifier
!= EXPAND_INITIALIZER
)
6514 temp
= force_reg (mode
, temp
);
6519 return const_vector_from_tree (exp
);
6522 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
6525 /* If optimized, generate immediate CONST_DOUBLE
6526 which will be turned into memory by reload if necessary.
6528 We used to force a register so that loop.c could see it. But
6529 this does not allow gen_* patterns to perform optimizations with
6530 the constants. It also produces two insns in cases like "x = 1.0;".
6531 On most machines, floating-point constants are not permitted in
6532 many insns, so we'd end up copying it to a register in any case.
6534 Now, we do the copying in expand_binop, if appropriate. */
6535 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
6536 TYPE_MODE (TREE_TYPE (exp
)));
6539 /* Handle evaluating a complex constant in a CONCAT target. */
6540 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
6542 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
6545 rtarg
= XEXP (original_target
, 0);
6546 itarg
= XEXP (original_target
, 1);
6548 /* Move the real and imaginary parts separately. */
6549 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, 0);
6550 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, 0);
6553 emit_move_insn (rtarg
, op0
);
6555 emit_move_insn (itarg
, op1
);
6557 return original_target
;
6560 /* ... fall through ... */
6563 temp
= output_constant_def (exp
, 1);
6565 /* temp contains a constant address.
6566 On RISC machines where a constant address isn't valid,
6567 make some insns to get that address into a register. */
6568 if (modifier
!= EXPAND_CONST_ADDRESS
6569 && modifier
!= EXPAND_INITIALIZER
6570 && modifier
!= EXPAND_SUM
6571 && (! memory_address_p (mode
, XEXP (temp
, 0))
6572 || flag_force_addr
))
6573 return replace_equiv_address (temp
,
6574 copy_rtx (XEXP (temp
, 0)));
6577 case EXPR_WITH_FILE_LOCATION
:
6580 struct file_stack fs
;
6582 fs
.location
= input_location
;
6583 fs
.next
= expr_wfl_stack
;
6584 input_filename
= EXPR_WFL_FILENAME (exp
);
6585 input_line
= EXPR_WFL_LINENO (exp
);
6586 expr_wfl_stack
= &fs
;
6587 if (EXPR_WFL_EMIT_LINE_NOTE (exp
))
6588 emit_line_note (input_location
);
6589 /* Possibly avoid switching back and forth here. */
6590 to_return
= expand_expr (EXPR_WFL_NODE (exp
),
6591 (ignore
? const0_rtx
: target
),
6593 if (expr_wfl_stack
!= &fs
)
6595 input_location
= fs
.location
;
6596 expr_wfl_stack
= fs
.next
;
6601 context
= decl_function_context (exp
);
6603 /* If this SAVE_EXPR was at global context, assume we are an
6604 initialization function and move it into our context. */
6606 SAVE_EXPR_CONTEXT (exp
) = current_function_decl
;
6608 /* We treat inline_function_decl as an alias for the current function
6609 because that is the inline function whose vars, types, etc.
6610 are being merged into the current function.
6611 See expand_inline_function. */
6612 if (context
== current_function_decl
|| context
== inline_function_decl
)
6615 /* If this is non-local, handle it. */
6618 /* The following call just exists to abort if the context is
6619 not of a containing function. */
6620 find_function_data (context
);
6622 temp
= SAVE_EXPR_RTL (exp
);
6623 if (temp
&& GET_CODE (temp
) == REG
)
6625 put_var_into_stack (exp
, /*rescan=*/true);
6626 temp
= SAVE_EXPR_RTL (exp
);
6628 if (temp
== 0 || GET_CODE (temp
) != MEM
)
6631 replace_equiv_address (temp
,
6632 fix_lexical_addr (XEXP (temp
, 0), exp
));
6634 if (SAVE_EXPR_RTL (exp
) == 0)
6636 if (mode
== VOIDmode
)
6639 temp
= assign_temp (build_qualified_type (type
,
6641 | TYPE_QUAL_CONST
)),
6644 SAVE_EXPR_RTL (exp
) = temp
;
6645 if (!optimize
&& GET_CODE (temp
) == REG
)
6646 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, temp
,
6649 /* If the mode of TEMP does not match that of the expression, it
6650 must be a promoted value. We pass store_expr a SUBREG of the
6651 wanted mode but mark it so that we know that it was already
6654 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
6656 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6657 promote_mode (type
, mode
, &unsignedp
, 0);
6658 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6659 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6662 if (temp
== const0_rtx
)
6663 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
6665 store_expr (TREE_OPERAND (exp
, 0), temp
,
6666 modifier
== EXPAND_STACK_PARM
? 2 : 0);
6668 TREE_USED (exp
) = 1;
6671 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6672 must be a promoted value. We return a SUBREG of the wanted mode,
6673 but mark it so that we know that it was already extended. */
6675 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
6676 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
6678 /* Compute the signedness and make the proper SUBREG. */
6679 promote_mode (type
, mode
, &unsignedp
, 0);
6680 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6681 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6682 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6686 return SAVE_EXPR_RTL (exp
);
6691 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6692 TREE_OPERAND (exp
, 0)
6693 = (*lang_hooks
.unsave_expr_now
) (TREE_OPERAND (exp
, 0));
6697 case PLACEHOLDER_EXPR
:
6699 tree old_list
= placeholder_list
;
6700 tree placeholder_expr
= 0;
6702 exp
= find_placeholder (exp
, &placeholder_expr
);
6706 placeholder_list
= TREE_CHAIN (placeholder_expr
);
6707 temp
= expand_expr (exp
, original_target
, tmode
, modifier
);
6708 placeholder_list
= old_list
;
6712 case WITH_RECORD_EXPR
:
6713 /* Put the object on the placeholder list, expand our first operand,
6714 and pop the list. */
6715 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
6717 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
, tmode
,
6719 placeholder_list
= TREE_CHAIN (placeholder_list
);
6723 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6724 expand_goto (TREE_OPERAND (exp
, 0));
6726 expand_computed_goto (TREE_OPERAND (exp
, 0));
6730 expand_exit_loop_if_false (NULL
,
6731 invert_truthvalue (TREE_OPERAND (exp
, 0)));
6734 case LABELED_BLOCK_EXPR
:
6735 if (LABELED_BLOCK_BODY (exp
))
6736 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp
), 0, 1);
6737 /* Should perhaps use expand_label, but this is simpler and safer. */
6738 do_pending_stack_adjust ();
6739 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp
)));
6742 case EXIT_BLOCK_EXPR
:
6743 if (EXIT_BLOCK_RETURN (exp
))
6744 sorry ("returned value in block_exit_expr");
6745 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp
)));
6750 expand_start_loop (1);
6751 expand_expr_stmt_value (TREE_OPERAND (exp
, 0), 0, 1);
6759 tree vars
= TREE_OPERAND (exp
, 0);
6761 /* Need to open a binding contour here because
6762 if there are any cleanups they must be contained here. */
6763 expand_start_bindings (2);
6765 /* Mark the corresponding BLOCK for output in its proper place. */
6766 if (TREE_OPERAND (exp
, 2) != 0
6767 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
6768 (*lang_hooks
.decls
.insert_block
) (TREE_OPERAND (exp
, 2));
6770 /* If VARS have not yet been expanded, expand them now. */
6773 if (!DECL_RTL_SET_P (vars
))
6775 expand_decl_init (vars
);
6776 vars
= TREE_CHAIN (vars
);
6779 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, modifier
);
6781 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
6787 if (RTL_EXPR_SEQUENCE (exp
))
6789 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
6791 emit_insn (RTL_EXPR_SEQUENCE (exp
));
6792 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
6794 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
6795 free_temps_for_rtl_expr (exp
);
6796 return RTL_EXPR_RTL (exp
);
6799 /* If we don't need the result, just ensure we evaluate any
6805 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
6806 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
, 0);
6811 /* All elts simple constants => refer to a constant in memory. But
6812 if this is a non-BLKmode mode, let it store a field at a time
6813 since that should make a CONST_INT or CONST_DOUBLE when we
6814 fold. Likewise, if we have a target we can use, it is best to
6815 store directly into the target unless the type is large enough
6816 that memcpy will be used. If we are making an initializer and
6817 all operands are constant, put it in memory as well.
6819 FIXME: Avoid trying to fill vector constructors piece-meal.
6820 Output them with output_constant_def below unless we're sure
6821 they're zeros. This should go away when vector initializers
6822 are treated like VECTOR_CST instead of arrays.
6824 else if ((TREE_STATIC (exp
)
6825 && ((mode
== BLKmode
6826 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
6827 || TREE_ADDRESSABLE (exp
)
6828 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
6829 && (! MOVE_BY_PIECES_P
6830 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
6832 && ((TREE_CODE (type
) == VECTOR_TYPE
6833 && !is_zeros_p (exp
))
6834 || ! mostly_zeros_p (exp
)))))
6835 || ((modifier
== EXPAND_INITIALIZER
6836 || modifier
== EXPAND_CONST_ADDRESS
)
6837 && TREE_CONSTANT (exp
)))
6839 rtx constructor
= output_constant_def (exp
, 1);
6841 if (modifier
!= EXPAND_CONST_ADDRESS
6842 && modifier
!= EXPAND_INITIALIZER
6843 && modifier
!= EXPAND_SUM
)
6844 constructor
= validize_mem (constructor
);
6850 /* Handle calls that pass values in multiple non-contiguous
6851 locations. The Irix 6 ABI has examples of this. */
6852 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
6853 || GET_CODE (target
) == PARALLEL
6854 || modifier
== EXPAND_STACK_PARM
)
6856 = assign_temp (build_qualified_type (type
,
6858 | (TREE_READONLY (exp
)
6859 * TYPE_QUAL_CONST
))),
6860 0, TREE_ADDRESSABLE (exp
), 1);
6862 store_constructor (exp
, target
, 0, int_expr_size (exp
));
6868 tree exp1
= TREE_OPERAND (exp
, 0);
6870 tree string
= string_constant (exp1
, &index
);
6872 /* Try to optimize reads from const strings. */
6874 && TREE_CODE (string
) == STRING_CST
6875 && TREE_CODE (index
) == INTEGER_CST
6876 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
6877 && GET_MODE_CLASS (mode
) == MODE_INT
6878 && GET_MODE_SIZE (mode
) == 1
6879 && modifier
!= EXPAND_WRITE
)
6880 return gen_int_mode (TREE_STRING_POINTER (string
)
6881 [TREE_INT_CST_LOW (index
)], mode
);
6883 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
6884 op0
= memory_address (mode
, op0
);
6885 temp
= gen_rtx_MEM (mode
, op0
);
6886 set_mem_attributes (temp
, exp
, 0);
6888 /* If we are writing to this object and its type is a record with
6889 readonly fields, we must mark it as readonly so it will
6890 conflict with readonly references to those fields. */
6891 if (modifier
== EXPAND_WRITE
&& readonly_fields_p (type
))
6892 RTX_UNCHANGING_P (temp
) = 1;
6898 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
6902 tree array
= TREE_OPERAND (exp
, 0);
6903 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
6904 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
6905 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
6908 /* Optimize the special-case of a zero lower bound.
6910 We convert the low_bound to sizetype to avoid some problems
6911 with constant folding. (E.g. suppose the lower bound is 1,
6912 and its mode is QI. Without the conversion, (ARRAY
6913 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6914 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6916 if (! integer_zerop (low_bound
))
6917 index
= size_diffop (index
, convert (sizetype
, low_bound
));
6919 /* Fold an expression like: "foo"[2].
6920 This is not done in fold so it won't happen inside &.
6921 Don't fold if this is for wide characters since it's too
6922 difficult to do correctly and this is a very rare case. */
6924 if (modifier
!= EXPAND_CONST_ADDRESS
6925 && modifier
!= EXPAND_INITIALIZER
6926 && modifier
!= EXPAND_MEMORY
6927 && TREE_CODE (array
) == STRING_CST
6928 && TREE_CODE (index
) == INTEGER_CST
6929 && compare_tree_int (index
, TREE_STRING_LENGTH (array
)) < 0
6930 && GET_MODE_CLASS (mode
) == MODE_INT
6931 && GET_MODE_SIZE (mode
) == 1)
6932 return gen_int_mode (TREE_STRING_POINTER (array
)
6933 [TREE_INT_CST_LOW (index
)], mode
);
6935 /* If this is a constant index into a constant array,
6936 just get the value from the array. Handle both the cases when
6937 we have an explicit constructor and when our operand is a variable
6938 that was declared const. */
6940 if (modifier
!= EXPAND_CONST_ADDRESS
6941 && modifier
!= EXPAND_INITIALIZER
6942 && modifier
!= EXPAND_MEMORY
6943 && TREE_CODE (array
) == CONSTRUCTOR
6944 && ! TREE_SIDE_EFFECTS (array
)
6945 && TREE_CODE (index
) == INTEGER_CST
6946 && 0 > compare_tree_int (index
,
6947 list_length (CONSTRUCTOR_ELTS
6948 (TREE_OPERAND (exp
, 0)))))
6952 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
6953 i
= TREE_INT_CST_LOW (index
);
6954 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
6958 return expand_expr (fold (TREE_VALUE (elem
)), target
, tmode
,
6962 else if (optimize
>= 1
6963 && modifier
!= EXPAND_CONST_ADDRESS
6964 && modifier
!= EXPAND_INITIALIZER
6965 && modifier
!= EXPAND_MEMORY
6966 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
6967 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
6968 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
6969 && targetm
.binds_local_p (array
))
6971 if (TREE_CODE (index
) == INTEGER_CST
)
6973 tree init
= DECL_INITIAL (array
);
6975 if (TREE_CODE (init
) == CONSTRUCTOR
)
6979 for (elem
= CONSTRUCTOR_ELTS (init
);
6981 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
6982 elem
= TREE_CHAIN (elem
))
6985 if (elem
&& !TREE_SIDE_EFFECTS (TREE_VALUE (elem
)))
6986 return expand_expr (fold (TREE_VALUE (elem
)), target
,
6989 else if (TREE_CODE (init
) == STRING_CST
6990 && 0 > compare_tree_int (index
,
6991 TREE_STRING_LENGTH (init
)))
6993 tree type
= TREE_TYPE (TREE_TYPE (init
));
6994 enum machine_mode mode
= TYPE_MODE (type
);
6996 if (GET_MODE_CLASS (mode
) == MODE_INT
6997 && GET_MODE_SIZE (mode
) == 1)
6998 return gen_int_mode (TREE_STRING_POINTER (init
)
6999 [TREE_INT_CST_LOW (index
)], mode
);
7004 goto normal_inner_ref
;
7007 /* If the operand is a CONSTRUCTOR, we can just extract the
7008 appropriate field if it is present. */
7009 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
)
7013 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
7014 elt
= TREE_CHAIN (elt
))
7015 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
7016 /* We can normally use the value of the field in the
7017 CONSTRUCTOR. However, if this is a bitfield in
7018 an integral mode that we can fit in a HOST_WIDE_INT,
7019 we must mask only the number of bits in the bitfield,
7020 since this is done implicitly by the constructor. If
7021 the bitfield does not meet either of those conditions,
7022 we can't do this optimization. */
7023 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
7024 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
7026 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
7027 <= HOST_BITS_PER_WIDE_INT
))))
7029 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
))
7030 && modifier
== EXPAND_STACK_PARM
)
7032 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
7033 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
7035 HOST_WIDE_INT bitsize
7036 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt
)));
7037 enum machine_mode imode
7038 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
7040 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
7042 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
7043 op0
= expand_and (imode
, op0
, op1
, target
);
7048 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
7051 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
7053 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
7061 goto normal_inner_ref
;
7064 case ARRAY_RANGE_REF
:
7067 enum machine_mode mode1
;
7068 HOST_WIDE_INT bitsize
, bitpos
;
7071 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7072 &mode1
, &unsignedp
, &volatilep
);
7075 /* If we got back the original object, something is wrong. Perhaps
7076 we are evaluating an expression too early. In any event, don't
7077 infinitely recurse. */
7081 /* If TEM's type is a union of variable size, pass TARGET to the inner
7082 computation, since it will need a temporary and TARGET is known
7083 to have to do. This occurs in unchecked conversion in Ada. */
7087 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
7088 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
7090 && modifier
!= EXPAND_STACK_PARM
7091 ? target
: NULL_RTX
),
7093 (modifier
== EXPAND_INITIALIZER
7094 || modifier
== EXPAND_CONST_ADDRESS
7095 || modifier
== EXPAND_STACK_PARM
)
7096 ? modifier
: EXPAND_NORMAL
);
7098 /* If this is a constant, put it into a register if it is a
7099 legitimate constant and OFFSET is 0 and memory if it isn't. */
7100 if (CONSTANT_P (op0
))
7102 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
7103 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
7105 op0
= force_reg (mode
, op0
);
7107 op0
= validize_mem (force_const_mem (mode
, op0
));
7110 /* Otherwise, if this object not in memory and we either have an
7111 offset or a BLKmode result, put it there. This case can't occur in
7112 C, but can in Ada if we have unchecked conversion of an expression
7113 from a scalar type to an array or record type or for an
7114 ARRAY_RANGE_REF whose type is BLKmode. */
7115 else if (GET_CODE (op0
) != MEM
7117 || (code
== ARRAY_RANGE_REF
&& mode
== BLKmode
)))
7119 /* If the operand is a SAVE_EXPR, we can deal with this by
7120 forcing the SAVE_EXPR into memory. */
7121 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
7123 put_var_into_stack (TREE_OPERAND (exp
, 0),
7125 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
7130 = build_qualified_type (TREE_TYPE (tem
),
7131 (TYPE_QUALS (TREE_TYPE (tem
))
7132 | TYPE_QUAL_CONST
));
7133 rtx memloc
= assign_temp (nt
, 1, 1, 1);
7135 emit_move_insn (memloc
, op0
);
7142 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
7145 if (GET_CODE (op0
) != MEM
)
7148 #ifdef POINTERS_EXTEND_UNSIGNED
7149 if (GET_MODE (offset_rtx
) != Pmode
)
7150 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
7152 if (GET_MODE (offset_rtx
) != ptr_mode
)
7153 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
7156 /* A constant address in OP0 can have VOIDmode, we must not try
7157 to call force_reg for that case. Avoid that case. */
7158 if (GET_CODE (op0
) == MEM
7159 && GET_MODE (op0
) == BLKmode
7160 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
7162 && (bitpos
% bitsize
) == 0
7163 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
7164 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
7166 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7170 op0
= offset_address (op0
, offset_rtx
,
7171 highest_pow2_factor (offset
));
7174 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7175 record its alignment as BIGGEST_ALIGNMENT. */
7176 if (GET_CODE (op0
) == MEM
&& bitpos
== 0 && offset
!= 0
7177 && is_aligning_offset (offset
, tem
))
7178 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
7180 /* Don't forget about volatility even if this is a bitfield. */
7181 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
7183 if (op0
== orig_op0
)
7184 op0
= copy_rtx (op0
);
7186 MEM_VOLATILE_P (op0
) = 1;
7189 /* The following code doesn't handle CONCAT.
7190 Assume only bitpos == 0 can be used for CONCAT, due to
7191 one element arrays having the same mode as its element. */
7192 if (GET_CODE (op0
) == CONCAT
)
7194 if (bitpos
!= 0 || bitsize
!= GET_MODE_BITSIZE (GET_MODE (op0
)))
7199 /* In cases where an aligned union has an unaligned object
7200 as a field, we might be extracting a BLKmode value from
7201 an integer-mode (e.g., SImode) object. Handle this case
7202 by doing the extract into an object as wide as the field
7203 (which we know to be the width of a basic mode), then
7204 storing into memory, and changing the mode to BLKmode. */
7205 if (mode1
== VOIDmode
7206 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7207 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
7208 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7209 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
7210 && modifier
!= EXPAND_CONST_ADDRESS
7211 && modifier
!= EXPAND_INITIALIZER
)
7212 /* If the field isn't aligned enough to fetch as a memref,
7213 fetch it as a bit field. */
7214 || (mode1
!= BLKmode
7215 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
7216 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0))
7217 && ((modifier
== EXPAND_CONST_ADDRESS
7218 || modifier
== EXPAND_INITIALIZER
)
7220 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
7221 || (bitpos
% BITS_PER_UNIT
!= 0)))
7222 /* If the type and the field are a constant size and the
7223 size of the type isn't the same size as the bitfield,
7224 we must use bitfield operations. */
7226 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
)))
7228 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7231 enum machine_mode ext_mode
= mode
;
7233 if (ext_mode
== BLKmode
7234 && ! (target
!= 0 && GET_CODE (op0
) == MEM
7235 && GET_CODE (target
) == MEM
7236 && bitpos
% BITS_PER_UNIT
== 0))
7237 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7239 if (ext_mode
== BLKmode
)
7242 target
= assign_temp (type
, 0, 1, 1);
7247 /* In this case, BITPOS must start at a byte boundary and
7248 TARGET, if specified, must be a MEM. */
7249 if (GET_CODE (op0
) != MEM
7250 || (target
!= 0 && GET_CODE (target
) != MEM
)
7251 || bitpos
% BITS_PER_UNIT
!= 0)
7254 emit_block_move (target
,
7255 adjust_address (op0
, VOIDmode
,
7256 bitpos
/ BITS_PER_UNIT
),
7257 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7259 (modifier
== EXPAND_STACK_PARM
7260 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7265 op0
= validize_mem (op0
);
7267 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
7268 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7270 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
7271 (modifier
== EXPAND_STACK_PARM
7272 ? NULL_RTX
: target
),
7274 int_size_in_bytes (TREE_TYPE (tem
)));
7276 /* If the result is a record type and BITSIZE is narrower than
7277 the mode of OP0, an integral mode, and this is a big endian
7278 machine, we must put the field into the high-order bits. */
7279 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7280 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7281 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
7282 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7283 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7287 if (mode
== BLKmode
)
7289 rtx
new = assign_temp (build_qualified_type
7290 ((*lang_hooks
.types
.type_for_mode
)
7292 TYPE_QUAL_CONST
), 0, 1, 1);
7294 emit_move_insn (new, op0
);
7295 op0
= copy_rtx (new);
7296 PUT_MODE (op0
, BLKmode
);
7297 set_mem_attributes (op0
, exp
, 1);
7303 /* If the result is BLKmode, use that to access the object
7305 if (mode
== BLKmode
)
7308 /* Get a reference to just this component. */
7309 if (modifier
== EXPAND_CONST_ADDRESS
7310 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7311 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7313 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7315 if (op0
== orig_op0
)
7316 op0
= copy_rtx (op0
);
7318 set_mem_attributes (op0
, exp
, 0);
7319 if (GET_CODE (XEXP (op0
, 0)) == REG
)
7320 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7322 MEM_VOLATILE_P (op0
) |= volatilep
;
7323 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7324 || modifier
== EXPAND_CONST_ADDRESS
7325 || modifier
== EXPAND_INITIALIZER
)
7327 else if (target
== 0)
7328 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7330 convert_move (target
, op0
, unsignedp
);
7336 rtx insn
, before
= get_last_insn (), vtbl_ref
;
7338 /* Evaluate the interior expression. */
7339 subtarget
= expand_expr (TREE_OPERAND (exp
, 0), target
,
7342 /* Get or create an instruction off which to hang a note. */
7343 if (REG_P (subtarget
))
7346 insn
= get_last_insn ();
7349 if (! INSN_P (insn
))
7350 insn
= prev_nonnote_insn (insn
);
7354 target
= gen_reg_rtx (GET_MODE (subtarget
));
7355 insn
= emit_move_insn (target
, subtarget
);
7358 /* Collect the data for the note. */
7359 vtbl_ref
= XEXP (DECL_RTL (TREE_OPERAND (exp
, 1)), 0);
7360 vtbl_ref
= plus_constant (vtbl_ref
,
7361 tree_low_cst (TREE_OPERAND (exp
, 2), 0));
7362 /* Discard the initial CONST that was added. */
7363 vtbl_ref
= XEXP (vtbl_ref
, 0);
7366 = gen_rtx_EXPR_LIST (REG_VTABLE_REF
, vtbl_ref
, REG_NOTES (insn
));
7371 /* Intended for a reference to a buffer of a file-object in Pascal.
7372 But it's not certain that a special tree code will really be
7373 necessary for these. INDIRECT_REF might work for them. */
7379 /* Pascal set IN expression.
7382 rlo = set_low - (set_low%bits_per_word);
7383 the_word = set [ (index - rlo)/bits_per_word ];
7384 bit_index = index % bits_per_word;
7385 bitmask = 1 << bit_index;
7386 return !!(the_word & bitmask); */
7388 tree set
= TREE_OPERAND (exp
, 0);
7389 tree index
= TREE_OPERAND (exp
, 1);
7390 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
7391 tree set_type
= TREE_TYPE (set
);
7392 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
7393 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
7394 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
7395 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
7396 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
7397 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
7398 rtx setaddr
= XEXP (setval
, 0);
7399 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
7401 rtx diff
, quo
, rem
, addr
, bit
, result
;
7403 /* If domain is empty, answer is no. Likewise if index is constant
7404 and out of bounds. */
7405 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
7406 && TREE_CODE (set_low_bound
) == INTEGER_CST
7407 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
7408 || (TREE_CODE (index
) == INTEGER_CST
7409 && TREE_CODE (set_low_bound
) == INTEGER_CST
7410 && tree_int_cst_lt (index
, set_low_bound
))
7411 || (TREE_CODE (set_high_bound
) == INTEGER_CST
7412 && TREE_CODE (index
) == INTEGER_CST
7413 && tree_int_cst_lt (set_high_bound
, index
))))
7417 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7419 /* If we get here, we have to generate the code for both cases
7420 (in range and out of range). */
7422 op0
= gen_label_rtx ();
7423 op1
= gen_label_rtx ();
7425 if (! (GET_CODE (index_val
) == CONST_INT
7426 && GET_CODE (lo_r
) == CONST_INT
))
7427 emit_cmp_and_jump_insns (index_val
, lo_r
, LT
, NULL_RTX
,
7428 GET_MODE (index_val
), iunsignedp
, op1
);
7430 if (! (GET_CODE (index_val
) == CONST_INT
7431 && GET_CODE (hi_r
) == CONST_INT
))
7432 emit_cmp_and_jump_insns (index_val
, hi_r
, GT
, NULL_RTX
,
7433 GET_MODE (index_val
), iunsignedp
, op1
);
7435 /* Calculate the element number of bit zero in the first word
7437 if (GET_CODE (lo_r
) == CONST_INT
)
7438 rlow
= GEN_INT (INTVAL (lo_r
)
7439 & ~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
7441 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
7442 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
7443 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7445 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
7446 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7448 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
7449 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7450 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
7451 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7453 addr
= memory_address (byte_mode
,
7454 expand_binop (index_mode
, add_optab
, diff
,
7455 setaddr
, NULL_RTX
, iunsignedp
,
7458 /* Extract the bit we want to examine. */
7459 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
7460 gen_rtx_MEM (byte_mode
, addr
),
7461 make_tree (TREE_TYPE (index
), rem
),
7463 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
7464 GET_MODE (target
) == byte_mode
? target
: 0,
7465 1, OPTAB_LIB_WIDEN
);
7467 if (result
!= target
)
7468 convert_move (target
, result
, 1);
7470 /* Output the code to handle the out-of-range case. */
7473 emit_move_insn (target
, const0_rtx
);
7478 case WITH_CLEANUP_EXPR
:
7479 if (WITH_CLEANUP_EXPR_RTL (exp
) == 0)
7481 WITH_CLEANUP_EXPR_RTL (exp
)
7482 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7483 expand_decl_cleanup_eh (NULL_TREE
, TREE_OPERAND (exp
, 1),
7484 CLEANUP_EH_ONLY (exp
));
7486 /* That's it for this cleanup. */
7487 TREE_OPERAND (exp
, 1) = 0;
7489 return WITH_CLEANUP_EXPR_RTL (exp
);
7491 case CLEANUP_POINT_EXPR
:
7493 /* Start a new binding layer that will keep track of all cleanup
7494 actions to be performed. */
7495 expand_start_bindings (2);
7497 target_temp_slot_level
= temp_slot_level
;
7499 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7500 /* If we're going to use this value, load it up now. */
7502 op0
= force_not_mem (op0
);
7503 preserve_temp_slots (op0
);
7504 expand_end_bindings (NULL_TREE
, 0, 0);
7509 /* Check for a built-in function. */
7510 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7511 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7513 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7515 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7516 == BUILT_IN_FRONTEND
)
7517 return (*lang_hooks
.expand_expr
) (exp
, original_target
,
7520 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7523 return expand_call (exp
, target
, ignore
);
7525 case NON_LVALUE_EXPR
:
7528 case REFERENCE_EXPR
:
7529 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7532 if (TREE_CODE (type
) == UNION_TYPE
)
7534 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7536 /* If both input and output are BLKmode, this conversion isn't doing
7537 anything except possibly changing memory attribute. */
7538 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7540 rtx result
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7543 result
= copy_rtx (result
);
7544 set_mem_attributes (result
, exp
, 0);
7549 target
= assign_temp (type
, 0, 1, 1);
7551 if (GET_CODE (target
) == MEM
)
7552 /* Store data into beginning of memory target. */
7553 store_expr (TREE_OPERAND (exp
, 0),
7554 adjust_address (target
, TYPE_MODE (valtype
), 0),
7555 modifier
== EXPAND_STACK_PARM
? 2 : 0);
7557 else if (GET_CODE (target
) == REG
)
7558 /* Store this field into a union of the proper type. */
7559 store_field (target
,
7560 MIN ((int_size_in_bytes (TREE_TYPE
7561 (TREE_OPERAND (exp
, 0)))
7563 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7564 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7565 VOIDmode
, 0, type
, 0);
7569 /* Return the entire union. */
7573 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7575 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7578 /* If the signedness of the conversion differs and OP0 is
7579 a promoted SUBREG, clear that indication since we now
7580 have to do the proper extension. */
7581 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7582 && GET_CODE (op0
) == SUBREG
)
7583 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7588 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7589 if (GET_MODE (op0
) == mode
)
7592 /* If OP0 is a constant, just convert it into the proper mode. */
7593 if (CONSTANT_P (op0
))
7595 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7596 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
7598 if (modifier
== EXPAND_INITIALIZER
)
7599 return simplify_gen_subreg (mode
, op0
, inner_mode
,
7600 subreg_lowpart_offset (mode
,
7603 return convert_modes (mode
, inner_mode
, op0
,
7604 TREE_UNSIGNED (inner_type
));
7607 if (modifier
== EXPAND_INITIALIZER
)
7608 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7612 convert_to_mode (mode
, op0
,
7613 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7615 convert_move (target
, op0
,
7616 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7619 case VIEW_CONVERT_EXPR
:
7620 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7622 /* If the input and output modes are both the same, we are done.
7623 Otherwise, if neither mode is BLKmode and both are integral and within
7624 a word, we can use gen_lowpart. If neither is true, make sure the
7625 operand is in memory and convert the MEM to the new mode. */
7626 if (TYPE_MODE (type
) == GET_MODE (op0
))
7628 else if (TYPE_MODE (type
) != BLKmode
&& GET_MODE (op0
) != BLKmode
7629 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7630 && GET_MODE_CLASS (TYPE_MODE (type
)) == MODE_INT
7631 && GET_MODE_SIZE (TYPE_MODE (type
)) <= UNITS_PER_WORD
7632 && GET_MODE_SIZE (GET_MODE (op0
)) <= UNITS_PER_WORD
)
7633 op0
= gen_lowpart (TYPE_MODE (type
), op0
);
7634 else if (GET_CODE (op0
) != MEM
)
7636 /* If the operand is not a MEM, force it into memory. Since we
7637 are going to be be changing the mode of the MEM, don't call
7638 force_const_mem for constants because we don't allow pool
7639 constants to change mode. */
7640 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7642 if (TREE_ADDRESSABLE (exp
))
7645 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
7647 = assign_stack_temp_for_type
7648 (TYPE_MODE (inner_type
),
7649 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
7651 emit_move_insn (target
, op0
);
7655 /* At this point, OP0 is in the correct mode. If the output type is such
7656 that the operand is known to be aligned, indicate that it is.
7657 Otherwise, we need only be concerned about alignment for non-BLKmode
7659 if (GET_CODE (op0
) == MEM
)
7661 op0
= copy_rtx (op0
);
7663 if (TYPE_ALIGN_OK (type
))
7664 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
7665 else if (TYPE_MODE (type
) != BLKmode
&& STRICT_ALIGNMENT
7666 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
7668 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7669 HOST_WIDE_INT temp_size
7670 = MAX (int_size_in_bytes (inner_type
),
7671 (HOST_WIDE_INT
) GET_MODE_SIZE (TYPE_MODE (type
)));
7672 rtx
new = assign_stack_temp_for_type (TYPE_MODE (type
),
7673 temp_size
, 0, type
);
7674 rtx new_with_op0_mode
= adjust_address (new, GET_MODE (op0
), 0);
7676 if (TREE_ADDRESSABLE (exp
))
7679 if (GET_MODE (op0
) == BLKmode
)
7680 emit_block_move (new_with_op0_mode
, op0
,
7681 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type
))),
7682 (modifier
== EXPAND_STACK_PARM
7683 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7685 emit_move_insn (new_with_op0_mode
, op0
);
7690 op0
= adjust_address (op0
, TYPE_MODE (type
), 0);
7696 this_optab
= ! unsignedp
&& flag_trapv
7697 && (GET_MODE_CLASS (mode
) == MODE_INT
)
7698 ? addv_optab
: add_optab
;
7700 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7701 something else, make sure we add the register to the constant and
7702 then to the other thing. This case can occur during strength
7703 reduction and doing it this way will produce better code if the
7704 frame pointer or argument pointer is eliminated.
7706 fold-const.c will ensure that the constant is always in the inner
7707 PLUS_EXPR, so the only case we need to do anything about is if
7708 sp, ap, or fp is our second argument, in which case we must swap
7709 the innermost first argument and our second argument. */
7711 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7712 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7713 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
7714 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7715 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7716 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7718 tree t
= TREE_OPERAND (exp
, 1);
7720 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7721 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
7724 /* If the result is to be ptr_mode and we are adding an integer to
7725 something, we might be forming a constant. So try to use
7726 plus_constant. If it produces a sum and we can't accept it,
7727 use force_operand. This allows P = &ARR[const] to generate
7728 efficient code on machines where a SYMBOL_REF is not a valid
7731 If this is an EXPAND_SUM call, always return the sum. */
7732 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7733 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
7735 if (modifier
== EXPAND_STACK_PARM
)
7737 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
7738 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7739 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
7743 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
7745 /* Use immed_double_const to ensure that the constant is
7746 truncated according to the mode of OP1, then sign extended
7747 to a HOST_WIDE_INT. Using the constant directly can result
7748 in non-canonical RTL in a 64x32 cross compile. */
7750 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
7752 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
7753 op1
= plus_constant (op1
, INTVAL (constant_part
));
7754 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7755 op1
= force_operand (op1
, target
);
7759 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7760 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
7761 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
7765 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7766 (modifier
== EXPAND_INITIALIZER
7767 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
7768 if (! CONSTANT_P (op0
))
7770 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7771 VOIDmode
, modifier
);
7772 /* Return a PLUS if modifier says it's OK. */
7773 if (modifier
== EXPAND_SUM
7774 || modifier
== EXPAND_INITIALIZER
)
7775 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7778 /* Use immed_double_const to ensure that the constant is
7779 truncated according to the mode of OP1, then sign extended
7780 to a HOST_WIDE_INT. Using the constant directly can result
7781 in non-canonical RTL in a 64x32 cross compile. */
7783 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
7785 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7786 op0
= plus_constant (op0
, INTVAL (constant_part
));
7787 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7788 op0
= force_operand (op0
, target
);
7793 /* No sense saving up arithmetic to be done
7794 if it's all in the wrong mode to form part of an address.
7795 And force_operand won't know whether to sign-extend or
7797 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7798 || mode
!= ptr_mode
)
7800 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7801 subtarget
, &op0
, &op1
, 0);
7802 if (op0
== const0_rtx
)
7804 if (op1
== const0_rtx
)
7809 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7810 subtarget
, &op0
, &op1
, modifier
);
7811 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7814 /* For initializers, we are allowed to return a MINUS of two
7815 symbolic constants. Here we handle all cases when both operands
7817 /* Handle difference of two symbolic constants,
7818 for the sake of an initializer. */
7819 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7820 && really_constant_p (TREE_OPERAND (exp
, 0))
7821 && really_constant_p (TREE_OPERAND (exp
, 1)))
7823 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7824 NULL_RTX
, &op0
, &op1
, modifier
);
7826 /* If the last operand is a CONST_INT, use plus_constant of
7827 the negated constant. Else make the MINUS. */
7828 if (GET_CODE (op1
) == CONST_INT
)
7829 return plus_constant (op0
, - INTVAL (op1
));
7831 return gen_rtx_MINUS (mode
, op0
, op1
);
7834 this_optab
= ! unsignedp
&& flag_trapv
7835 && (GET_MODE_CLASS(mode
) == MODE_INT
)
7836 ? subv_optab
: sub_optab
;
7838 /* No sense saving up arithmetic to be done
7839 if it's all in the wrong mode to form part of an address.
7840 And force_operand won't know whether to sign-extend or
7842 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7843 || mode
!= ptr_mode
)
7846 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7847 subtarget
, &op0
, &op1
, modifier
);
7849 /* Convert A - const to A + (-const). */
7850 if (GET_CODE (op1
) == CONST_INT
)
7852 op1
= negate_rtx (mode
, op1
);
7853 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7859 /* If first operand is constant, swap them.
7860 Thus the following special case checks need only
7861 check the second operand. */
7862 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
7864 tree t1
= TREE_OPERAND (exp
, 0);
7865 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
7866 TREE_OPERAND (exp
, 1) = t1
;
7869 /* Attempt to return something suitable for generating an
7870 indexed address, for machines that support that. */
7872 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
7873 && host_integerp (TREE_OPERAND (exp
, 1), 0))
7875 tree exp1
= TREE_OPERAND (exp
, 1);
7877 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7880 if (GET_CODE (op0
) != REG
)
7881 op0
= force_operand (op0
, NULL_RTX
);
7882 if (GET_CODE (op0
) != REG
)
7883 op0
= copy_to_mode_reg (mode
, op0
);
7885 return gen_rtx_MULT (mode
, op0
,
7886 gen_int_mode (tree_low_cst (exp1
, 0),
7887 TYPE_MODE (TREE_TYPE (exp1
))));
7890 if (modifier
== EXPAND_STACK_PARM
)
7893 /* Check for multiplying things that have been extended
7894 from a narrower type. If this machine supports multiplying
7895 in that narrower type with a result in the desired type,
7896 do it that way, and avoid the explicit type-conversion. */
7897 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
7898 && TREE_CODE (type
) == INTEGER_TYPE
7899 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7900 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7901 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7902 && int_fits_type_p (TREE_OPERAND (exp
, 1),
7903 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7904 /* Don't use a widening multiply if a shift will do. */
7905 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
7906 > HOST_BITS_PER_WIDE_INT
)
7907 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
7909 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7910 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7912 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
7913 /* If both operands are extended, they must either both
7914 be zero-extended or both be sign-extended. */
7915 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7917 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
7919 enum machine_mode innermode
7920 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
7921 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7922 ? smul_widen_optab
: umul_widen_optab
);
7923 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7924 ? umul_widen_optab
: smul_widen_optab
);
7925 if (mode
== GET_MODE_WIDER_MODE (innermode
))
7927 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
7929 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7930 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7931 TREE_OPERAND (exp
, 1),
7932 NULL_RTX
, &op0
, &op1
, 0);
7934 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7935 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7936 NULL_RTX
, &op0
, &op1
, 0);
7939 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
7940 && innermode
== word_mode
)
7943 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7944 NULL_RTX
, VOIDmode
, 0);
7945 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7946 op1
= convert_modes (innermode
, mode
,
7947 expand_expr (TREE_OPERAND (exp
, 1),
7948 NULL_RTX
, VOIDmode
, 0),
7951 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7952 NULL_RTX
, VOIDmode
, 0);
7953 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
7954 unsignedp
, OPTAB_LIB_WIDEN
);
7955 htem
= expand_mult_highpart_adjust (innermode
,
7956 gen_highpart (innermode
, temp
),
7958 gen_highpart (innermode
, temp
),
7960 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
7965 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7966 subtarget
, &op0
, &op1
, 0);
7967 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
7969 case TRUNC_DIV_EXPR
:
7970 case FLOOR_DIV_EXPR
:
7972 case ROUND_DIV_EXPR
:
7973 case EXACT_DIV_EXPR
:
7974 if (modifier
== EXPAND_STACK_PARM
)
7976 /* Possible optimization: compute the dividend with EXPAND_SUM
7977 then if the divisor is constant can optimize the case
7978 where some terms of the dividend have coeffs divisible by it. */
7979 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7980 subtarget
, &op0
, &op1
, 0);
7981 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
7984 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7985 expensive divide. If not, combine will rebuild the original
7987 if (flag_unsafe_math_optimizations
&& optimize
&& !optimize_size
7988 && TREE_CODE (type
) == REAL_TYPE
7989 && !real_onep (TREE_OPERAND (exp
, 0)))
7990 return expand_expr (build (MULT_EXPR
, type
, TREE_OPERAND (exp
, 0),
7991 build (RDIV_EXPR
, type
,
7992 build_real (type
, dconst1
),
7993 TREE_OPERAND (exp
, 1))),
7994 target
, tmode
, modifier
);
7995 this_optab
= sdiv_optab
;
7998 case TRUNC_MOD_EXPR
:
7999 case FLOOR_MOD_EXPR
:
8001 case ROUND_MOD_EXPR
:
8002 if (modifier
== EXPAND_STACK_PARM
)
8004 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8005 subtarget
, &op0
, &op1
, 0);
8006 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
8008 case FIX_ROUND_EXPR
:
8009 case FIX_FLOOR_EXPR
:
8011 abort (); /* Not used for C. */
8013 case FIX_TRUNC_EXPR
:
8014 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8015 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8016 target
= gen_reg_rtx (mode
);
8017 expand_fix (target
, op0
, unsignedp
);
8021 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8022 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8023 target
= gen_reg_rtx (mode
);
8024 /* expand_float can't figure out what to do if FROM has VOIDmode.
8025 So give it the correct mode. With -O, cse will optimize this. */
8026 if (GET_MODE (op0
) == VOIDmode
)
8027 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8029 expand_float (target
, op0
,
8030 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8034 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8035 if (modifier
== EXPAND_STACK_PARM
)
8037 temp
= expand_unop (mode
,
8038 ! unsignedp
&& flag_trapv
8039 && (GET_MODE_CLASS(mode
) == MODE_INT
)
8040 ? negv_optab
: neg_optab
, op0
, target
, 0);
8046 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8047 if (modifier
== EXPAND_STACK_PARM
)
8050 /* ABS_EXPR is not valid for complex arguments. */
8051 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
8052 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
8055 /* Unsigned abs is simply the operand. Testing here means we don't
8056 risk generating incorrect code below. */
8057 if (TREE_UNSIGNED (type
))
8060 return expand_abs (mode
, op0
, target
, unsignedp
,
8061 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
8065 target
= original_target
;
8067 || modifier
== EXPAND_STACK_PARM
8068 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
8069 || GET_MODE (target
) != mode
8070 || (GET_CODE (target
) == REG
8071 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8072 target
= gen_reg_rtx (mode
);
8073 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8074 target
, &op0
, &op1
, 0);
8076 /* First try to do it with a special MIN or MAX instruction.
8077 If that does not win, use a conditional jump to select the proper
8079 this_optab
= (TREE_UNSIGNED (type
)
8080 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
8081 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
8083 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8088 /* At this point, a MEM target is no longer useful; we will get better
8091 if (GET_CODE (target
) == MEM
)
8092 target
= gen_reg_rtx (mode
);
8094 /* If op1 was placed in target, swap op0 and op1. */
8095 if (target
!= op0
&& target
== op1
)
8103 emit_move_insn (target
, op0
);
8105 op0
= gen_label_rtx ();
8107 /* If this mode is an integer too wide to compare properly,
8108 compare word by word. Rely on cse to optimize constant cases. */
8109 if (GET_MODE_CLASS (mode
) == MODE_INT
8110 && ! can_compare_p (GE
, mode
, ccp_jump
))
8112 if (code
== MAX_EXPR
)
8113 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8114 target
, op1
, NULL_RTX
, op0
);
8116 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8117 op1
, target
, NULL_RTX
, op0
);
8121 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)));
8122 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
8123 unsignedp
, mode
, NULL_RTX
, NULL_RTX
,
8126 emit_move_insn (target
, op1
);
8131 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8132 if (modifier
== EXPAND_STACK_PARM
)
8134 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8139 /* ??? Can optimize bitwise operations with one arg constant.
8140 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8141 and (a bitwise1 b) bitwise2 b (etc)
8142 but that is probably not worth while. */
8144 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8145 boolean values when we want in all cases to compute both of them. In
8146 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8147 as actual zero-or-1 values and then bitwise anding. In cases where
8148 there cannot be any side effects, better code would be made by
8149 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8150 how to recognize those cases. */
8152 case TRUTH_AND_EXPR
:
8154 this_optab
= and_optab
;
8159 this_optab
= ior_optab
;
8162 case TRUTH_XOR_EXPR
:
8164 this_optab
= xor_optab
;
8171 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8173 if (modifier
== EXPAND_STACK_PARM
)
8175 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8176 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
8179 /* Could determine the answer when only additive constants differ. Also,
8180 the addition of one can be handled by changing the condition. */
8187 case UNORDERED_EXPR
:
8194 temp
= do_store_flag (exp
,
8195 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
8196 tmode
!= VOIDmode
? tmode
: mode
, 0);
8200 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8201 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
8203 && GET_CODE (original_target
) == REG
8204 && (GET_MODE (original_target
)
8205 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
8207 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
8210 /* If temp is constant, we can just compute the result. */
8211 if (GET_CODE (temp
) == CONST_INT
)
8213 if (INTVAL (temp
) != 0)
8214 emit_move_insn (target
, const1_rtx
);
8216 emit_move_insn (target
, const0_rtx
);
8221 if (temp
!= original_target
)
8223 enum machine_mode mode1
= GET_MODE (temp
);
8224 if (mode1
== VOIDmode
)
8225 mode1
= tmode
!= VOIDmode
? tmode
: mode
;
8227 temp
= copy_to_mode_reg (mode1
, temp
);
8230 op1
= gen_label_rtx ();
8231 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
8232 GET_MODE (temp
), unsignedp
, op1
);
8233 emit_move_insn (temp
, const1_rtx
);
8238 /* If no set-flag instruction, must generate a conditional
8239 store into a temporary variable. Drop through
8240 and handle this like && and ||. */
8242 case TRUTH_ANDIF_EXPR
:
8243 case TRUTH_ORIF_EXPR
:
8246 || modifier
== EXPAND_STACK_PARM
8247 || ! safe_from_p (target
, exp
, 1)
8248 /* Make sure we don't have a hard reg (such as function's return
8249 value) live across basic blocks, if not optimizing. */
8250 || (!optimize
&& GET_CODE (target
) == REG
8251 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8252 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8255 emit_clr_insn (target
);
8257 op1
= gen_label_rtx ();
8258 jumpifnot (exp
, op1
);
8261 emit_0_to_1_insn (target
);
8264 return ignore
? const0_rtx
: target
;
8266 case TRUTH_NOT_EXPR
:
8267 if (modifier
== EXPAND_STACK_PARM
)
8269 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8270 /* The parser is careful to generate TRUTH_NOT_EXPR
8271 only with operands that are always zero or one. */
8272 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8273 target
, 1, OPTAB_LIB_WIDEN
);
8279 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
8281 return expand_expr (TREE_OPERAND (exp
, 1),
8282 (ignore
? const0_rtx
: target
),
8283 VOIDmode
, modifier
);
8286 /* If we would have a "singleton" (see below) were it not for a
8287 conversion in each arm, bring that conversion back out. */
8288 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8289 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
8290 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
8291 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
8293 tree iftrue
= TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
8294 tree iffalse
= TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
8296 if ((TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '2'
8297 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8298 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '2'
8299 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0))
8300 || (TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '1'
8301 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8302 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '1'
8303 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0)))
8304 return expand_expr (build1 (NOP_EXPR
, type
,
8305 build (COND_EXPR
, TREE_TYPE (iftrue
),
8306 TREE_OPERAND (exp
, 0),
8308 target
, tmode
, modifier
);
8312 /* Note that COND_EXPRs whose type is a structure or union
8313 are required to be constructed to contain assignments of
8314 a temporary variable, so that we can evaluate them here
8315 for side effect only. If type is void, we must do likewise. */
8317 /* If an arm of the branch requires a cleanup,
8318 only that cleanup is performed. */
8321 tree binary_op
= 0, unary_op
= 0;
8323 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8324 convert it to our mode, if necessary. */
8325 if (integer_onep (TREE_OPERAND (exp
, 1))
8326 && integer_zerop (TREE_OPERAND (exp
, 2))
8327 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8331 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
8336 if (modifier
== EXPAND_STACK_PARM
)
8338 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, modifier
);
8339 if (GET_MODE (op0
) == mode
)
8343 target
= gen_reg_rtx (mode
);
8344 convert_move (target
, op0
, unsignedp
);
8348 /* Check for X ? A + B : A. If we have this, we can copy A to the
8349 output and conditionally add B. Similarly for unary operations.
8350 Don't do this if X has side-effects because those side effects
8351 might affect A or B and the "?" operation is a sequence point in
8352 ANSI. (operand_equal_p tests for side effects.) */
8354 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
8355 && operand_equal_p (TREE_OPERAND (exp
, 2),
8356 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8357 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
8358 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
8359 && operand_equal_p (TREE_OPERAND (exp
, 1),
8360 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8361 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
8362 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
8363 && operand_equal_p (TREE_OPERAND (exp
, 2),
8364 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8365 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
8366 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
8367 && operand_equal_p (TREE_OPERAND (exp
, 1),
8368 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8369 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
8371 /* If we are not to produce a result, we have no target. Otherwise,
8372 if a target was specified use it; it will not be used as an
8373 intermediate target unless it is safe. If no target, use a
8378 else if (modifier
== EXPAND_STACK_PARM
)
8379 temp
= assign_temp (type
, 0, 0, 1);
8380 else if (original_target
8381 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
8382 || (singleton
&& GET_CODE (original_target
) == REG
8383 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
8384 && original_target
== var_rtx (singleton
)))
8385 && GET_MODE (original_target
) == mode
8386 #ifdef HAVE_conditional_move
8387 && (! can_conditionally_move_p (mode
)
8388 || GET_CODE (original_target
) == REG
8389 || TREE_ADDRESSABLE (type
))
8391 && (GET_CODE (original_target
) != MEM
8392 || TREE_ADDRESSABLE (type
)))
8393 temp
= original_target
;
8394 else if (TREE_ADDRESSABLE (type
))
8397 temp
= assign_temp (type
, 0, 0, 1);
8399 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8400 do the test of X as a store-flag operation, do this as
8401 A + ((X != 0) << log C). Similarly for other simple binary
8402 operators. Only do for C == 1 if BRANCH_COST is low. */
8403 if (temp
&& singleton
&& binary_op
8404 && (TREE_CODE (binary_op
) == PLUS_EXPR
8405 || TREE_CODE (binary_op
) == MINUS_EXPR
8406 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
8407 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
8408 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
8409 : integer_onep (TREE_OPERAND (binary_op
, 1)))
8410 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8414 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
8415 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8416 ? addv_optab
: add_optab
)
8417 : TREE_CODE (binary_op
) == MINUS_EXPR
8418 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8419 ? subv_optab
: sub_optab
)
8420 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
8423 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8424 if (singleton
== TREE_OPERAND (exp
, 1))
8425 cond
= invert_truthvalue (TREE_OPERAND (exp
, 0));
8427 cond
= TREE_OPERAND (exp
, 0);
8429 result
= do_store_flag (cond
, (safe_from_p (temp
, singleton
, 1)
8431 mode
, BRANCH_COST
<= 1);
8433 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
8434 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
8435 build_int_2 (tree_log2
8439 (safe_from_p (temp
, singleton
, 1)
8440 ? temp
: NULL_RTX
), 0);
8444 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
8445 return expand_binop (mode
, boptab
, op1
, result
, temp
,
8446 unsignedp
, OPTAB_LIB_WIDEN
);
8450 do_pending_stack_adjust ();
8452 op0
= gen_label_rtx ();
8454 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
8458 /* If the target conflicts with the other operand of the
8459 binary op, we can't use it. Also, we can't use the target
8460 if it is a hard register, because evaluating the condition
8461 might clobber it. */
8463 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
8464 || (GET_CODE (temp
) == REG
8465 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
8466 temp
= gen_reg_rtx (mode
);
8467 store_expr (singleton
, temp
,
8468 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8471 expand_expr (singleton
,
8472 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8473 if (singleton
== TREE_OPERAND (exp
, 1))
8474 jumpif (TREE_OPERAND (exp
, 0), op0
);
8476 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8478 start_cleanup_deferral ();
8479 if (binary_op
&& temp
== 0)
8480 /* Just touch the other operand. */
8481 expand_expr (TREE_OPERAND (binary_op
, 1),
8482 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8484 store_expr (build (TREE_CODE (binary_op
), type
,
8485 make_tree (type
, temp
),
8486 TREE_OPERAND (binary_op
, 1)),
8487 temp
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8489 store_expr (build1 (TREE_CODE (unary_op
), type
,
8490 make_tree (type
, temp
)),
8491 temp
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8494 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8495 comparison operator. If we have one of these cases, set the
8496 output to A, branch on A (cse will merge these two references),
8497 then set the output to FOO. */
8499 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8500 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8501 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8502 TREE_OPERAND (exp
, 1), 0)
8503 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8504 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
8505 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
8507 if (GET_CODE (temp
) == REG
8508 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8509 temp
= gen_reg_rtx (mode
);
8510 store_expr (TREE_OPERAND (exp
, 1), temp
,
8511 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8512 jumpif (TREE_OPERAND (exp
, 0), op0
);
8514 start_cleanup_deferral ();
8515 if (TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8516 store_expr (TREE_OPERAND (exp
, 2), temp
,
8517 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8519 expand_expr (TREE_OPERAND (exp
, 2),
8520 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8524 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8525 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8526 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8527 TREE_OPERAND (exp
, 2), 0)
8528 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8529 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
8530 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
8532 if (GET_CODE (temp
) == REG
8533 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8534 temp
= gen_reg_rtx (mode
);
8535 store_expr (TREE_OPERAND (exp
, 2), temp
,
8536 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8537 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8539 start_cleanup_deferral ();
8540 if (TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8541 store_expr (TREE_OPERAND (exp
, 1), temp
,
8542 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8544 expand_expr (TREE_OPERAND (exp
, 1),
8545 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8550 op1
= gen_label_rtx ();
8551 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8553 start_cleanup_deferral ();
8555 /* One branch of the cond can be void, if it never returns. For
8556 example A ? throw : E */
8558 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8559 store_expr (TREE_OPERAND (exp
, 1), temp
,
8560 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8562 expand_expr (TREE_OPERAND (exp
, 1),
8563 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8564 end_cleanup_deferral ();
8566 emit_jump_insn (gen_jump (op1
));
8569 start_cleanup_deferral ();
8571 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8572 store_expr (TREE_OPERAND (exp
, 2), temp
,
8573 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8575 expand_expr (TREE_OPERAND (exp
, 2),
8576 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8579 end_cleanup_deferral ();
8590 /* Something needs to be initialized, but we didn't know
8591 where that thing was when building the tree. For example,
8592 it could be the return value of a function, or a parameter
8593 to a function which lays down in the stack, or a temporary
8594 variable which must be passed by reference.
8596 We guarantee that the expression will either be constructed
8597 or copied into our original target. */
8599 tree slot
= TREE_OPERAND (exp
, 0);
8600 tree cleanups
= NULL_TREE
;
8603 if (TREE_CODE (slot
) != VAR_DECL
)
8607 target
= original_target
;
8609 /* Set this here so that if we get a target that refers to a
8610 register variable that's already been used, put_reg_into_stack
8611 knows that it should fix up those uses. */
8612 TREE_USED (slot
) = 1;
8616 if (DECL_RTL_SET_P (slot
))
8618 target
= DECL_RTL (slot
);
8619 /* If we have already expanded the slot, so don't do
8621 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8626 target
= assign_temp (type
, 2, 0, 1);
8627 /* All temp slots at this level must not conflict. */
8628 preserve_temp_slots (target
);
8629 SET_DECL_RTL (slot
, target
);
8630 if (TREE_ADDRESSABLE (slot
))
8631 put_var_into_stack (slot
, /*rescan=*/false);
8633 /* Since SLOT is not known to the called function
8634 to belong to its stack frame, we must build an explicit
8635 cleanup. This case occurs when we must build up a reference
8636 to pass the reference as an argument. In this case,
8637 it is very likely that such a reference need not be
8640 if (TREE_OPERAND (exp
, 2) == 0)
8641 TREE_OPERAND (exp
, 2)
8642 = (*lang_hooks
.maybe_build_cleanup
) (slot
);
8643 cleanups
= TREE_OPERAND (exp
, 2);
8648 /* This case does occur, when expanding a parameter which
8649 needs to be constructed on the stack. The target
8650 is the actual stack address that we want to initialize.
8651 The function we call will perform the cleanup in this case. */
8653 /* If we have already assigned it space, use that space,
8654 not target that we were passed in, as our target
8655 parameter is only a hint. */
8656 if (DECL_RTL_SET_P (slot
))
8658 target
= DECL_RTL (slot
);
8659 /* If we have already expanded the slot, so don't do
8661 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8666 SET_DECL_RTL (slot
, target
);
8667 /* If we must have an addressable slot, then make sure that
8668 the RTL that we just stored in slot is OK. */
8669 if (TREE_ADDRESSABLE (slot
))
8670 put_var_into_stack (slot
, /*rescan=*/true);
8674 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
8675 /* Mark it as expanded. */
8676 TREE_OPERAND (exp
, 1) = NULL_TREE
;
8678 store_expr (exp1
, target
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8680 expand_decl_cleanup_eh (NULL_TREE
, cleanups
, CLEANUP_EH_ONLY (exp
));
8687 tree lhs
= TREE_OPERAND (exp
, 0);
8688 tree rhs
= TREE_OPERAND (exp
, 1);
8690 temp
= expand_assignment (lhs
, rhs
, ! ignore
);
8696 /* If lhs is complex, expand calls in rhs before computing it.
8697 That's so we don't compute a pointer and save it over a
8698 call. If lhs is simple, compute it first so we can give it
8699 as a target if the rhs is just a call. This avoids an
8700 extra temp and copy and that prevents a partial-subsumption
8701 which makes bad code. Actually we could treat
8702 component_ref's of vars like vars. */
8704 tree lhs
= TREE_OPERAND (exp
, 0);
8705 tree rhs
= TREE_OPERAND (exp
, 1);
8709 /* Check for |= or &= of a bitfield of size one into another bitfield
8710 of size 1. In this case, (unless we need the result of the
8711 assignment) we can do this more efficiently with a
8712 test followed by an assignment, if necessary.
8714 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8715 things change so we do, this code should be enhanced to
8718 && TREE_CODE (lhs
) == COMPONENT_REF
8719 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
8720 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
8721 && TREE_OPERAND (rhs
, 0) == lhs
8722 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
8723 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
8724 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
8726 rtx label
= gen_label_rtx ();
8728 do_jump (TREE_OPERAND (rhs
, 1),
8729 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
8730 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
8731 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
8732 (TREE_CODE (rhs
) == BIT_IOR_EXPR
8734 : integer_zero_node
)),
8736 do_pending_stack_adjust ();
8741 temp
= expand_assignment (lhs
, rhs
, ! ignore
);
8747 if (!TREE_OPERAND (exp
, 0))
8748 expand_null_return ();
8750 expand_return (TREE_OPERAND (exp
, 0));
8753 case PREINCREMENT_EXPR
:
8754 case PREDECREMENT_EXPR
:
8755 return expand_increment (exp
, 0, ignore
);
8757 case POSTINCREMENT_EXPR
:
8758 case POSTDECREMENT_EXPR
:
8759 /* Faster to treat as pre-increment if result is not used. */
8760 return expand_increment (exp
, ! ignore
, ignore
);
8763 if (modifier
== EXPAND_STACK_PARM
)
8765 /* Are we taking the address of a nested function? */
8766 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
8767 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
8768 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0))
8769 && ! TREE_STATIC (exp
))
8771 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
8772 op0
= force_operand (op0
, target
);
8774 /* If we are taking the address of something erroneous, just
8776 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
8778 /* If we are taking the address of a constant and are at the
8779 top level, we have to use output_constant_def since we can't
8780 call force_const_mem at top level. */
8782 && (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
8783 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0)))
8785 op0
= XEXP (output_constant_def (TREE_OPERAND (exp
, 0), 0), 0);
8788 /* We make sure to pass const0_rtx down if we came in with
8789 ignore set, to avoid doing the cleanups twice for something. */
8790 op0
= expand_expr (TREE_OPERAND (exp
, 0),
8791 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
8792 (modifier
== EXPAND_INITIALIZER
8793 ? modifier
: EXPAND_CONST_ADDRESS
));
8795 /* If we are going to ignore the result, OP0 will have been set
8796 to const0_rtx, so just return it. Don't get confused and
8797 think we are taking the address of the constant. */
8801 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8802 clever and returns a REG when given a MEM. */
8803 op0
= protect_from_queue (op0
, 1);
8805 /* We would like the object in memory. If it is a constant, we can
8806 have it be statically allocated into memory. For a non-constant,
8807 we need to allocate some memory and store the value into it. */
8809 if (CONSTANT_P (op0
))
8810 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8812 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
8813 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
8814 || GET_CODE (op0
) == PARALLEL
|| GET_CODE (op0
) == LO_SUM
)
8816 /* If the operand is a SAVE_EXPR, we can deal with this by
8817 forcing the SAVE_EXPR into memory. */
8818 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
8820 put_var_into_stack (TREE_OPERAND (exp
, 0),
8822 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
8826 /* If this object is in a register, it can't be BLKmode. */
8827 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8828 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
8830 if (GET_CODE (op0
) == PARALLEL
)
8831 /* Handle calls that pass values in multiple
8832 non-contiguous locations. The Irix 6 ABI has examples
8834 emit_group_store (memloc
, op0
, inner_type
,
8835 int_size_in_bytes (inner_type
));
8837 emit_move_insn (memloc
, op0
);
8843 if (GET_CODE (op0
) != MEM
)
8846 mark_temp_addr_taken (op0
);
8847 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8849 op0
= XEXP (op0
, 0);
8850 if (GET_MODE (op0
) == Pmode
&& mode
== ptr_mode
)
8851 op0
= convert_memory_address (ptr_mode
, op0
);
8855 /* If OP0 is not aligned as least as much as the type requires, we
8856 need to make a temporary, copy OP0 to it, and take the address of
8857 the temporary. We want to use the alignment of the type, not of
8858 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8859 the test for BLKmode means that can't happen. The test for
8860 BLKmode is because we never make mis-aligned MEMs with
8863 We don't need to do this at all if the machine doesn't have
8864 strict alignment. */
8865 if (STRICT_ALIGNMENT
&& GET_MODE (op0
) == BLKmode
8866 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
8868 && MEM_ALIGN (op0
) < BIGGEST_ALIGNMENT
)
8870 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8873 if (TYPE_ALIGN_OK (inner_type
))
8876 if (TREE_ADDRESSABLE (inner_type
))
8878 /* We can't make a bitwise copy of this object, so fail. */
8879 error ("cannot take the address of an unaligned member");
8883 new = assign_stack_temp_for_type
8884 (TYPE_MODE (inner_type
),
8885 MEM_SIZE (op0
) ? INTVAL (MEM_SIZE (op0
))
8886 : int_size_in_bytes (inner_type
),
8887 1, build_qualified_type (inner_type
,
8888 (TYPE_QUALS (inner_type
)
8889 | TYPE_QUAL_CONST
)));
8891 emit_block_move (new, op0
, expr_size (TREE_OPERAND (exp
, 0)),
8892 (modifier
== EXPAND_STACK_PARM
8893 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
8898 op0
= force_operand (XEXP (op0
, 0), target
);
8902 && GET_CODE (op0
) != REG
8903 && modifier
!= EXPAND_CONST_ADDRESS
8904 && modifier
!= EXPAND_INITIALIZER
8905 && modifier
!= EXPAND_SUM
)
8906 op0
= force_reg (Pmode
, op0
);
8908 if (GET_CODE (op0
) == REG
8909 && ! REG_USERVAR_P (op0
))
8910 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)));
8912 if (GET_MODE (op0
) == Pmode
&& mode
== ptr_mode
)
8913 op0
= convert_memory_address (ptr_mode
, op0
);
8917 case ENTRY_VALUE_EXPR
:
8920 /* COMPLEX type for Extended Pascal & Fortran */
8923 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8926 /* Get the rtx code of the operands. */
8927 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8928 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
8931 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8935 /* Move the real (op0) and imaginary (op1) parts to their location. */
8936 emit_move_insn (gen_realpart (mode
, target
), op0
);
8937 emit_move_insn (gen_imagpart (mode
, target
), op1
);
8939 insns
= get_insns ();
8942 /* Complex construction should appear as a single unit. */
8943 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8944 each with a separate pseudo as destination.
8945 It's not correct for flow to treat them as a unit. */
8946 if (GET_CODE (target
) != CONCAT
)
8947 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
8955 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8956 return gen_realpart (mode
, op0
);
8959 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8960 return gen_imagpart (mode
, op0
);
8964 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8968 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8971 target
= gen_reg_rtx (mode
);
8975 /* Store the realpart and the negated imagpart to target. */
8976 emit_move_insn (gen_realpart (partmode
, target
),
8977 gen_realpart (partmode
, op0
));
8979 imag_t
= gen_imagpart (partmode
, target
);
8980 temp
= expand_unop (partmode
,
8981 ! unsignedp
&& flag_trapv
8982 && (GET_MODE_CLASS(partmode
) == MODE_INT
)
8983 ? negv_optab
: neg_optab
,
8984 gen_imagpart (partmode
, op0
), imag_t
, 0);
8986 emit_move_insn (imag_t
, temp
);
8988 insns
= get_insns ();
8991 /* Conjugate should appear as a single unit
8992 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8993 each with a separate pseudo as destination.
8994 It's not correct for flow to treat them as a unit. */
8995 if (GET_CODE (target
) != CONCAT
)
8996 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
9003 case TRY_CATCH_EXPR
:
9005 tree handler
= TREE_OPERAND (exp
, 1);
9007 expand_eh_region_start ();
9009 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9011 expand_eh_region_end_cleanup (handler
);
9016 case TRY_FINALLY_EXPR
:
9018 tree try_block
= TREE_OPERAND (exp
, 0);
9019 tree finally_block
= TREE_OPERAND (exp
, 1);
9021 if (!optimize
|| unsafe_for_reeval (finally_block
) > 1)
9023 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9024 is not sufficient, so we cannot expand the block twice.
9025 So we play games with GOTO_SUBROUTINE_EXPR to let us
9026 expand the thing only once. */
9027 /* When not optimizing, we go ahead with this form since
9028 (1) user breakpoints operate more predictably without
9029 code duplication, and
9030 (2) we're not running any of the global optimizers
9031 that would explode in time/space with the highly
9032 connected CFG created by the indirect branching. */
9034 rtx finally_label
= gen_label_rtx ();
9035 rtx done_label
= gen_label_rtx ();
9036 rtx return_link
= gen_reg_rtx (Pmode
);
9037 tree cleanup
= build (GOTO_SUBROUTINE_EXPR
, void_type_node
,
9038 (tree
) finally_label
, (tree
) return_link
);
9039 TREE_SIDE_EFFECTS (cleanup
) = 1;
9041 /* Start a new binding layer that will keep track of all cleanup
9042 actions to be performed. */
9043 expand_start_bindings (2);
9044 target_temp_slot_level
= temp_slot_level
;
9046 expand_decl_cleanup (NULL_TREE
, cleanup
);
9047 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
9049 preserve_temp_slots (op0
);
9050 expand_end_bindings (NULL_TREE
, 0, 0);
9051 emit_jump (done_label
);
9052 emit_label (finally_label
);
9053 expand_expr (finally_block
, const0_rtx
, VOIDmode
, 0);
9054 emit_indirect_jump (return_link
);
9055 emit_label (done_label
);
9059 expand_start_bindings (2);
9060 target_temp_slot_level
= temp_slot_level
;
9062 expand_decl_cleanup (NULL_TREE
, finally_block
);
9063 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
9065 preserve_temp_slots (op0
);
9066 expand_end_bindings (NULL_TREE
, 0, 0);
9072 case GOTO_SUBROUTINE_EXPR
:
9074 rtx subr
= (rtx
) TREE_OPERAND (exp
, 0);
9075 rtx return_link
= *(rtx
*) &TREE_OPERAND (exp
, 1);
9076 rtx return_address
= gen_label_rtx ();
9077 emit_move_insn (return_link
,
9078 gen_rtx_LABEL_REF (Pmode
, return_address
));
9080 emit_label (return_address
);
9085 return expand_builtin_va_arg (TREE_OPERAND (exp
, 0), type
);
9088 return get_exception_pointer (cfun
);
9091 /* Function descriptors are not valid except for as
9092 initialization constants, and should not be expanded. */
9096 return (*lang_hooks
.expand_expr
) (exp
, original_target
, tmode
, modifier
);
9099 /* Here to do an ordinary binary operator, generating an instruction
9100 from the optab already placed in `this_optab'. */
9102 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
9103 subtarget
, &op0
, &op1
, 0);
9105 if (modifier
== EXPAND_STACK_PARM
)
9107 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9108 unsignedp
, OPTAB_LIB_WIDEN
);
9114 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9115 when applied to the address of EXP produces an address known to be
9116 aligned more than BIGGEST_ALIGNMENT. */
9119 is_aligning_offset (tree offset
, tree exp
)
9121 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9122 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9123 || TREE_CODE (offset
) == NOP_EXPR
9124 || TREE_CODE (offset
) == CONVERT_EXPR
9125 || TREE_CODE (offset
) == WITH_RECORD_EXPR
)
9126 offset
= TREE_OPERAND (offset
, 0);
9128 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9129 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9130 if (TREE_CODE (offset
) != BIT_AND_EXPR
9131 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
9132 || compare_tree_int (TREE_OPERAND (offset
, 1), BIGGEST_ALIGNMENT
) <= 0
9133 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
9136 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9137 It must be NEGATE_EXPR. Then strip any more conversions. */
9138 offset
= TREE_OPERAND (offset
, 0);
9139 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9140 || TREE_CODE (offset
) == NOP_EXPR
9141 || TREE_CODE (offset
) == CONVERT_EXPR
)
9142 offset
= TREE_OPERAND (offset
, 0);
9144 if (TREE_CODE (offset
) != NEGATE_EXPR
)
9147 offset
= TREE_OPERAND (offset
, 0);
9148 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9149 || TREE_CODE (offset
) == NOP_EXPR
9150 || TREE_CODE (offset
) == CONVERT_EXPR
)
9151 offset
= TREE_OPERAND (offset
, 0);
9153 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9154 whose type is the same as EXP. */
9155 return (TREE_CODE (offset
) == ADDR_EXPR
9156 && (TREE_OPERAND (offset
, 0) == exp
9157 || (TREE_CODE (TREE_OPERAND (offset
, 0)) == PLACEHOLDER_EXPR
9158 && (TREE_TYPE (TREE_OPERAND (offset
, 0))
9159 == TREE_TYPE (exp
)))));
9162 /* Return the tree node if an ARG corresponds to a string constant or zero
9163 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9164 in bytes within the string that ARG is accessing. The type of the
9165 offset will be `sizetype'. */
9168 string_constant (tree arg
, tree
*ptr_offset
)
9172 if (TREE_CODE (arg
) == ADDR_EXPR
9173 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
9175 *ptr_offset
= size_zero_node
;
9176 return TREE_OPERAND (arg
, 0);
9178 else if (TREE_CODE (arg
) == PLUS_EXPR
)
9180 tree arg0
= TREE_OPERAND (arg
, 0);
9181 tree arg1
= TREE_OPERAND (arg
, 1);
9186 if (TREE_CODE (arg0
) == ADDR_EXPR
9187 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
9189 *ptr_offset
= convert (sizetype
, arg1
);
9190 return TREE_OPERAND (arg0
, 0);
9192 else if (TREE_CODE (arg1
) == ADDR_EXPR
9193 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
9195 *ptr_offset
= convert (sizetype
, arg0
);
9196 return TREE_OPERAND (arg1
, 0);
9203 /* Expand code for a post- or pre- increment or decrement
9204 and return the RTX for the result.
9205 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9208 expand_increment (tree exp
, int post
, int ignore
)
9212 tree incremented
= TREE_OPERAND (exp
, 0);
9213 optab this_optab
= add_optab
;
9215 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
9216 int op0_is_copy
= 0;
9217 int single_insn
= 0;
9218 /* 1 means we can't store into OP0 directly,
9219 because it is a subreg narrower than a word,
9220 and we don't dare clobber the rest of the word. */
9223 /* Stabilize any component ref that might need to be
9224 evaluated more than once below. */
9226 || TREE_CODE (incremented
) == BIT_FIELD_REF
9227 || (TREE_CODE (incremented
) == COMPONENT_REF
9228 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
9229 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
9230 incremented
= stabilize_reference (incremented
);
9231 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9232 ones into save exprs so that they don't accidentally get evaluated
9233 more than once by the code below. */
9234 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
9235 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
9236 incremented
= save_expr (incremented
);
9238 /* Compute the operands as RTX.
9239 Note whether OP0 is the actual lvalue or a copy of it:
9240 I believe it is a copy iff it is a register or subreg
9241 and insns were generated in computing it. */
9243 temp
= get_last_insn ();
9244 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, 0);
9246 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9247 in place but instead must do sign- or zero-extension during assignment,
9248 so we copy it into a new register and let the code below use it as
9251 Note that we can safely modify this SUBREG since it is know not to be
9252 shared (it was made by the expand_expr call above). */
9254 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9257 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9261 else if (GET_CODE (op0
) == SUBREG
9262 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9264 /* We cannot increment this SUBREG in place. If we are
9265 post-incrementing, get a copy of the old value. Otherwise,
9266 just mark that we cannot increment in place. */
9268 op0
= copy_to_reg (op0
);
9273 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
9274 && temp
!= get_last_insn ());
9275 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9277 /* Decide whether incrementing or decrementing. */
9278 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9279 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9280 this_optab
= sub_optab
;
9282 /* Convert decrement by a constant into a negative increment. */
9283 if (this_optab
== sub_optab
9284 && GET_CODE (op1
) == CONST_INT
)
9286 op1
= GEN_INT (-INTVAL (op1
));
9287 this_optab
= add_optab
;
9290 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp
)))
9291 this_optab
= this_optab
== add_optab
? addv_optab
: subv_optab
;
9293 /* For a preincrement, see if we can do this with a single instruction. */
9296 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9297 if (icode
!= (int) CODE_FOR_nothing
9298 /* Make sure that OP0 is valid for operands 0 and 1
9299 of the insn we want to queue. */
9300 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9301 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
)
9302 && (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9306 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9307 then we cannot just increment OP0. We must therefore contrive to
9308 increment the original value. Then, for postincrement, we can return
9309 OP0 since it is a copy of the old value. For preincrement, expand here
9310 unless we can do it with a single insn.
9312 Likewise if storing directly into OP0 would clobber high bits
9313 we need to preserve (bad_subreg). */
9314 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9316 /* This is the easiest way to increment the value wherever it is.
9317 Problems with multiple evaluation of INCREMENTED are prevented
9318 because either (1) it is a component_ref or preincrement,
9319 in which case it was stabilized above, or (2) it is an array_ref
9320 with constant index in an array in a register, which is
9321 safe to reevaluate. */
9322 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9323 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9324 ? MINUS_EXPR
: PLUS_EXPR
),
9327 TREE_OPERAND (exp
, 1));
9329 while (TREE_CODE (incremented
) == NOP_EXPR
9330 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9332 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9333 incremented
= TREE_OPERAND (incremented
, 0);
9336 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
);
9337 return post
? op0
: temp
;
9342 /* We have a true reference to the value in OP0.
9343 If there is an insn to add or subtract in this mode, queue it.
9344 Queuing the increment insn avoids the register shuffling
9345 that often results if we must increment now and first save
9346 the old value for subsequent use. */
9348 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9349 op0
= stabilize (op0
);
9352 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9353 if (icode
!= (int) CODE_FOR_nothing
9354 /* Make sure that OP0 is valid for operands 0 and 1
9355 of the insn we want to queue. */
9356 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9357 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
))
9359 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9360 op1
= force_reg (mode
, op1
);
9362 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9364 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9366 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
9367 ? force_reg (Pmode
, XEXP (op0
, 0))
9368 : copy_to_reg (XEXP (op0
, 0)));
9371 op0
= replace_equiv_address (op0
, addr
);
9372 temp
= force_reg (GET_MODE (op0
), op0
);
9373 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9374 op1
= force_reg (mode
, op1
);
9376 /* The increment queue is LIFO, thus we have to `queue'
9377 the instructions in reverse order. */
9378 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9379 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9384 /* Preincrement, or we can't increment with one simple insn. */
9386 /* Save a copy of the value before inc or dec, to return it later. */
9387 temp
= value
= copy_to_reg (op0
);
9389 /* Arrange to return the incremented value. */
9390 /* Copy the rtx because expand_binop will protect from the queue,
9391 and the results of that would be invalid for us to return
9392 if our caller does emit_queue before using our result. */
9393 temp
= copy_rtx (value
= op0
);
9395 /* Increment however we can. */
9396 op1
= expand_binop (mode
, this_optab
, value
, op1
, op0
,
9397 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9399 /* Make sure the value is stored into OP0. */
9401 emit_move_insn (op0
, op1
);
9406 /* Generate code to calculate EXP using a store-flag instruction
9407 and return an rtx for the result. EXP is either a comparison
9408 or a TRUTH_NOT_EXPR whose operand is a comparison.
9410 If TARGET is nonzero, store the result there if convenient.
9412 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9415 Return zero if there is no suitable set-flag instruction
9416 available on this machine.
9418 Once expand_expr has been called on the arguments of the comparison,
9419 we are committed to doing the store flag, since it is not safe to
9420 re-evaluate the expression. We emit the store-flag insn by calling
9421 emit_store_flag, but only expand the arguments if we have a reason
9422 to believe that emit_store_flag will be successful. If we think that
9423 it will, but it isn't, we have to simulate the store-flag with a
9424 set/jump/set sequence. */
9427 do_store_flag (tree exp
, rtx target
, enum machine_mode mode
, int only_cheap
)
9430 tree arg0
, arg1
, type
;
9432 enum machine_mode operand_mode
;
9436 enum insn_code icode
;
9437 rtx subtarget
= target
;
9440 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9441 result at the end. We can't simply invert the test since it would
9442 have already been inverted if it were valid. This case occurs for
9443 some floating-point comparisons. */
9445 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
9446 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
9448 arg0
= TREE_OPERAND (exp
, 0);
9449 arg1
= TREE_OPERAND (exp
, 1);
9451 /* Don't crash if the comparison was erroneous. */
9452 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
9455 type
= TREE_TYPE (arg0
);
9456 operand_mode
= TYPE_MODE (type
);
9457 unsignedp
= TREE_UNSIGNED (type
);
9459 /* We won't bother with BLKmode store-flag operations because it would mean
9460 passing a lot of information to emit_store_flag. */
9461 if (operand_mode
== BLKmode
)
9464 /* We won't bother with store-flag operations involving function pointers
9465 when function pointers must be canonicalized before comparisons. */
9466 #ifdef HAVE_canonicalize_funcptr_for_compare
9467 if (HAVE_canonicalize_funcptr_for_compare
9468 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
9469 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
9471 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
9472 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
9473 == FUNCTION_TYPE
))))
9480 /* Get the rtx comparison code to use. We know that EXP is a comparison
9481 operation of some type. Some comparisons against 1 and -1 can be
9482 converted to comparisons with zero. Do so here so that the tests
9483 below will be aware that we have a comparison with zero. These
9484 tests will not catch constants in the first operand, but constants
9485 are rarely passed as the first operand. */
9487 switch (TREE_CODE (exp
))
9496 if (integer_onep (arg1
))
9497 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
9499 code
= unsignedp
? LTU
: LT
;
9502 if (! unsignedp
&& integer_all_onesp (arg1
))
9503 arg1
= integer_zero_node
, code
= LT
;
9505 code
= unsignedp
? LEU
: LE
;
9508 if (! unsignedp
&& integer_all_onesp (arg1
))
9509 arg1
= integer_zero_node
, code
= GE
;
9511 code
= unsignedp
? GTU
: GT
;
9514 if (integer_onep (arg1
))
9515 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
9517 code
= unsignedp
? GEU
: GE
;
9520 case UNORDERED_EXPR
:
9546 /* Put a constant second. */
9547 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
9549 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
9550 code
= swap_condition (code
);
9553 /* If this is an equality or inequality test of a single bit, we can
9554 do this by shifting the bit being tested to the low-order bit and
9555 masking the result with the constant 1. If the condition was EQ,
9556 we xor it with 1. This does not require an scc insn and is faster
9557 than an scc insn even if we have it.
9559 The code to make this transformation was moved into fold_single_bit_test,
9560 so we just call into the folder and expand its result. */
9562 if ((code
== NE
|| code
== EQ
)
9563 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
9564 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
9566 tree type
= (*lang_hooks
.types
.type_for_mode
) (mode
, unsignedp
);
9567 return expand_expr (fold_single_bit_test (code
== NE
? NE_EXPR
: EQ_EXPR
,
9569 target
, VOIDmode
, EXPAND_NORMAL
);
9572 /* Now see if we are likely to be able to do this. Return if not. */
9573 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
9576 icode
= setcc_gen_code
[(int) code
];
9577 if (icode
== CODE_FOR_nothing
9578 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
9580 /* We can only do this if it is one of the special cases that
9581 can be handled without an scc insn. */
9582 if ((code
== LT
&& integer_zerop (arg1
))
9583 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
9585 else if (BRANCH_COST
>= 0
9586 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
9587 && TREE_CODE (type
) != REAL_TYPE
9588 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
9589 != CODE_FOR_nothing
)
9590 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
9591 != CODE_FOR_nothing
)))
9597 if (! get_subtarget (target
)
9598 || GET_MODE (subtarget
) != operand_mode
)
9601 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, 0);
9604 target
= gen_reg_rtx (mode
);
9606 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9607 because, if the emit_store_flag does anything it will succeed and
9608 OP0 and OP1 will not be used subsequently. */
9610 result
= emit_store_flag (target
, code
,
9611 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
9612 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
9613 operand_mode
, unsignedp
, 1);
9618 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
9619 result
, 0, OPTAB_LIB_WIDEN
);
9623 /* If this failed, we have to do this with set/compare/jump/set code. */
9624 if (GET_CODE (target
) != REG
9625 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
9626 target
= gen_reg_rtx (GET_MODE (target
));
9628 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
9629 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
9630 operand_mode
, NULL_RTX
);
9631 if (GET_CODE (result
) == CONST_INT
)
9632 return (((result
== const0_rtx
&& ! invert
)
9633 || (result
!= const0_rtx
&& invert
))
9634 ? const0_rtx
: const1_rtx
);
9636 /* The code of RESULT may not match CODE if compare_from_rtx
9637 decided to swap its operands and reverse the original code.
9639 We know that compare_from_rtx returns either a CONST_INT or
9640 a new comparison code, so it is safe to just extract the
9641 code from RESULT. */
9642 code
= GET_CODE (result
);
9644 label
= gen_label_rtx ();
9645 if (bcc_gen_fctn
[(int) code
] == 0)
9648 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
9649 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
9656 /* Stubs in case we haven't got a casesi insn. */
9658 # define HAVE_casesi 0
9659 # define gen_casesi(a, b, c, d, e) (0)
9660 # define CODE_FOR_casesi CODE_FOR_nothing
9663 /* If the machine does not have a case insn that compares the bounds,
9664 this means extra overhead for dispatch tables, which raises the
9665 threshold for using them. */
9666 #ifndef CASE_VALUES_THRESHOLD
9667 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9668 #endif /* CASE_VALUES_THRESHOLD */
9671 case_values_threshold (void)
9673 return CASE_VALUES_THRESHOLD
;
9676 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9677 0 otherwise (i.e. if there is no casesi instruction). */
9679 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
9680 rtx table_label ATTRIBUTE_UNUSED
, rtx default_label
)
9682 enum machine_mode index_mode
= SImode
;
9683 int index_bits
= GET_MODE_BITSIZE (index_mode
);
9684 rtx op1
, op2
, index
;
9685 enum machine_mode op_mode
;
9690 /* Convert the index to SImode. */
9691 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
9693 enum machine_mode omode
= TYPE_MODE (index_type
);
9694 rtx rangertx
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
9696 /* We must handle the endpoints in the original mode. */
9697 index_expr
= build (MINUS_EXPR
, index_type
,
9698 index_expr
, minval
);
9699 minval
= integer_zero_node
;
9700 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
9701 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
9702 omode
, 1, default_label
);
9703 /* Now we can safely truncate. */
9704 index
= convert_to_mode (index_mode
, index
, 0);
9708 if (TYPE_MODE (index_type
) != index_mode
)
9710 index_expr
= convert ((*lang_hooks
.types
.type_for_size
)
9711 (index_bits
, 0), index_expr
);
9712 index_type
= TREE_TYPE (index_expr
);
9715 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
9718 index
= protect_from_queue (index
, 0);
9719 do_pending_stack_adjust ();
9721 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
9722 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
9724 index
= copy_to_mode_reg (op_mode
, index
);
9726 op1
= expand_expr (minval
, NULL_RTX
, VOIDmode
, 0);
9728 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
9729 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
9730 op1
, TREE_UNSIGNED (TREE_TYPE (minval
)));
9731 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
9733 op1
= copy_to_mode_reg (op_mode
, op1
);
9735 op2
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
9737 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
9738 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
9739 op2
, TREE_UNSIGNED (TREE_TYPE (range
)));
9740 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
9742 op2
= copy_to_mode_reg (op_mode
, op2
);
9744 emit_jump_insn (gen_casesi (index
, op1
, op2
,
9745 table_label
, default_label
));
9749 /* Attempt to generate a tablejump instruction; same concept. */
9750 #ifndef HAVE_tablejump
9751 #define HAVE_tablejump 0
9752 #define gen_tablejump(x, y) (0)
9755 /* Subroutine of the next function.
9757 INDEX is the value being switched on, with the lowest value
9758 in the table already subtracted.
9759 MODE is its expected mode (needed if INDEX is constant).
9760 RANGE is the length of the jump table.
9761 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9763 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9764 index value is out of range. */
9767 do_tablejump (rtx index
, enum machine_mode mode
, rtx range
, rtx table_label
,
9772 if (INTVAL (range
) > cfun
->max_jumptable_ents
)
9773 cfun
->max_jumptable_ents
= INTVAL (range
);
9775 /* Do an unsigned comparison (in the proper mode) between the index
9776 expression and the value which represents the length of the range.
9777 Since we just finished subtracting the lower bound of the range
9778 from the index expression, this comparison allows us to simultaneously
9779 check that the original index expression value is both greater than
9780 or equal to the minimum value of the range and less than or equal to
9781 the maximum value of the range. */
9783 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
9786 /* If index is in range, it must fit in Pmode.
9787 Convert to Pmode so we can index with it. */
9789 index
= convert_to_mode (Pmode
, index
, 1);
9791 /* Don't let a MEM slip thru, because then INDEX that comes
9792 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9793 and break_out_memory_refs will go to work on it and mess it up. */
9794 #ifdef PIC_CASE_VECTOR_ADDRESS
9795 if (flag_pic
&& GET_CODE (index
) != REG
)
9796 index
= copy_to_mode_reg (Pmode
, index
);
9799 /* If flag_force_addr were to affect this address
9800 it could interfere with the tricky assumptions made
9801 about addresses that contain label-refs,
9802 which may be valid only very near the tablejump itself. */
9803 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9804 GET_MODE_SIZE, because this indicates how large insns are. The other
9805 uses should all be Pmode, because they are addresses. This code
9806 could fail if addresses and insns are not the same size. */
9807 index
= gen_rtx_PLUS (Pmode
,
9808 gen_rtx_MULT (Pmode
, index
,
9809 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
9810 gen_rtx_LABEL_REF (Pmode
, table_label
));
9811 #ifdef PIC_CASE_VECTOR_ADDRESS
9813 index
= PIC_CASE_VECTOR_ADDRESS (index
);
9816 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
9817 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
9818 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
9819 RTX_UNCHANGING_P (vector
) = 1;
9820 MEM_NOTRAP_P (vector
) = 1;
9821 convert_move (temp
, vector
, 0);
9823 emit_jump_insn (gen_tablejump (temp
, table_label
));
9825 /* If we are generating PIC code or if the table is PC-relative, the
9826 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9827 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
9832 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
9833 rtx table_label
, rtx default_label
)
9837 if (! HAVE_tablejump
)
9840 index_expr
= fold (build (MINUS_EXPR
, index_type
,
9841 convert (index_type
, index_expr
),
9842 convert (index_type
, minval
)));
9843 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
9845 index
= protect_from_queue (index
, 0);
9846 do_pending_stack_adjust ();
9848 do_tablejump (index
, TYPE_MODE (index_type
),
9849 convert_modes (TYPE_MODE (index_type
),
9850 TYPE_MODE (TREE_TYPE (range
)),
9851 expand_expr (range
, NULL_RTX
,
9853 TREE_UNSIGNED (TREE_TYPE (range
))),
9854 table_label
, default_label
);
9858 /* Nonzero if the mode is a valid vector mode for this architecture.
9859 This returns nonzero even if there is no hardware support for the
9860 vector mode, but we can emulate with narrower modes. */
9863 vector_mode_valid_p (enum machine_mode mode
)
9865 enum mode_class
class = GET_MODE_CLASS (mode
);
9866 enum machine_mode innermode
;
9868 /* Doh! What's going on? */
9869 if (class != MODE_VECTOR_INT
9870 && class != MODE_VECTOR_FLOAT
)
9873 /* Hardware support. Woo hoo! */
9874 if (VECTOR_MODE_SUPPORTED_P (mode
))
9877 innermode
= GET_MODE_INNER (mode
);
9879 /* We should probably return 1 if requesting V4DI and we have no DI,
9880 but we have V2DI, but this is probably very unlikely. */
9882 /* If we have support for the inner mode, we can safely emulate it.
9883 We may not have V2DI, but me can emulate with a pair of DIs. */
9884 return mov_optab
->handlers
[innermode
].insn_code
!= CODE_FOR_nothing
;
9887 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9889 const_vector_from_tree (tree exp
)
9894 enum machine_mode inner
, mode
;
9896 mode
= TYPE_MODE (TREE_TYPE (exp
));
9898 if (is_zeros_p (exp
))
9899 return CONST0_RTX (mode
);
9901 units
= GET_MODE_NUNITS (mode
);
9902 inner
= GET_MODE_INNER (mode
);
9904 v
= rtvec_alloc (units
);
9906 link
= TREE_VECTOR_CST_ELTS (exp
);
9907 for (i
= 0; link
; link
= TREE_CHAIN (link
), ++i
)
9909 elt
= TREE_VALUE (link
);
9911 if (TREE_CODE (elt
) == REAL_CST
)
9912 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
9915 RTVEC_ELT (v
, i
) = immed_double_const (TREE_INT_CST_LOW (elt
),
9916 TREE_INT_CST_HIGH (elt
),
9920 /* Initialize remaining elements to 0. */
9921 for (; i
< units
; ++i
)
9922 RTVEC_ELT (v
, i
) = CONST0_RTX (inner
);
9924 return gen_rtx_raw_CONST_VECTOR (mode
, v
);
9927 #include "gt-expr.h"