1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
53 /* Decide whether a function's arguments should be processed
54 from first to last or from last to first.
56 They should if the stack and args grow in opposite directions, but
57 only if we have push insns. */
61 #ifndef PUSH_ARGS_REVERSED
62 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
63 #define PUSH_ARGS_REVERSED /* If it's last to first. */
69 #ifndef STACK_PUSH_CODE
70 #ifdef STACK_GROWS_DOWNWARD
71 #define STACK_PUSH_CODE PRE_DEC
73 #define STACK_PUSH_CODE PRE_INC
77 /* Convert defined/undefined to boolean. */
78 #ifdef TARGET_MEM_FUNCTIONS
79 #undef TARGET_MEM_FUNCTIONS
80 #define TARGET_MEM_FUNCTIONS 1
82 #define TARGET_MEM_FUNCTIONS 0
86 /* If this is nonzero, we do not bother generating VOLATILE
87 around volatile memory references, and we are willing to
88 output indirect addresses. If cse is to follow, we reject
89 indirect addresses so a useful potential cse is generated;
90 if it is used only once, instruction combination will produce
91 the same indirect address eventually. */
94 /* This structure is used by move_by_pieces to describe the move to
105 int explicit_inc_from
;
106 unsigned HOST_WIDE_INT len
;
107 HOST_WIDE_INT offset
;
111 /* This structure is used by store_by_pieces to describe the clear to
114 struct store_by_pieces
120 unsigned HOST_WIDE_INT len
;
121 HOST_WIDE_INT offset
;
122 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
);
127 static rtx
enqueue_insn (rtx
, rtx
);
128 static unsigned HOST_WIDE_INT
move_by_pieces_ninsns (unsigned HOST_WIDE_INT
,
130 static void move_by_pieces_1 (rtx (*) (rtx
, ...), enum machine_mode
,
131 struct move_by_pieces
*);
132 static bool block_move_libcall_safe_for_call_parm (void);
133 static bool emit_block_move_via_movstr (rtx
, rtx
, rtx
, unsigned);
134 static rtx
emit_block_move_via_libcall (rtx
, rtx
, rtx
);
135 static tree
emit_block_move_libcall_fn (int);
136 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
137 static rtx
clear_by_pieces_1 (void *, HOST_WIDE_INT
, enum machine_mode
);
138 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
139 static void store_by_pieces_1 (struct store_by_pieces
*, unsigned int);
140 static void store_by_pieces_2 (rtx (*) (rtx
, ...), enum machine_mode
,
141 struct store_by_pieces
*);
142 static bool clear_storage_via_clrstr (rtx
, rtx
, unsigned);
143 static rtx
clear_storage_via_libcall (rtx
, rtx
);
144 static tree
clear_storage_libcall_fn (int);
145 static rtx
compress_float_constant (rtx
, rtx
);
146 static rtx
get_subtarget (rtx
);
147 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
148 HOST_WIDE_INT
, enum machine_mode
,
149 tree
, tree
, int, int);
150 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
);
151 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
, enum machine_mode
,
152 tree
, enum machine_mode
, int, tree
, int);
153 static rtx
var_rtx (tree
);
155 static unsigned HOST_WIDE_INT
highest_pow2_factor (tree
);
156 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (tree
, tree
);
158 static int is_aligning_offset (tree
, tree
);
159 static rtx
expand_increment (tree
, int, int);
160 static void expand_operands (tree
, tree
, rtx
, rtx
*, rtx
*,
161 enum expand_modifier
);
162 static rtx
do_store_flag (tree
, rtx
, enum machine_mode
, int);
164 static void emit_single_push_insn (enum machine_mode
, rtx
, tree
);
166 static void do_tablejump (rtx
, enum machine_mode
, rtx
, rtx
, rtx
);
167 static rtx
const_vector_from_tree (tree
);
169 /* Record for each mode whether we can move a register directly to or
170 from an object of that mode in memory. If we can't, we won't try
171 to use that mode directly when accessing a field of that mode. */
173 static char direct_load
[NUM_MACHINE_MODES
];
174 static char direct_store
[NUM_MACHINE_MODES
];
176 /* Record for each mode whether we can float-extend from memory. */
178 static bool float_extend_from_mem
[NUM_MACHINE_MODES
][NUM_MACHINE_MODES
];
180 /* This macro is used to determine whether move_by_pieces should be called
181 to perform a structure copy. */
182 #ifndef MOVE_BY_PIECES_P
183 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
187 /* This macro is used to determine whether clear_by_pieces should be
188 called to clear storage. */
189 #ifndef CLEAR_BY_PIECES_P
190 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
191 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
194 /* This macro is used to determine whether store_by_pieces should be
195 called to "memset" storage with byte values other than zero, or
196 to "memcpy" storage when the source is a constant string. */
197 #ifndef STORE_BY_PIECES_P
198 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
201 /* This array records the insn_code of insns to perform block moves. */
202 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
204 /* This array records the insn_code of insns to perform block clears. */
205 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
207 /* These arrays record the insn_code of two different kinds of insns
208 to perform block compares. */
209 enum insn_code cmpstr_optab
[NUM_MACHINE_MODES
];
210 enum insn_code cmpmem_optab
[NUM_MACHINE_MODES
];
212 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
214 #ifndef SLOW_UNALIGNED_ACCESS
215 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
218 /* This is run once per compilation to set up which modes can be used
219 directly in memory and to initialize the block move optab. */
222 init_expr_once (void)
225 enum machine_mode mode
;
230 /* Try indexing by frame ptr and try by stack ptr.
231 It is known that on the Convex the stack ptr isn't a valid index.
232 With luck, one or the other is valid on any machine. */
233 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
234 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
236 /* A scratch register we can modify in-place below to avoid
237 useless RTL allocations. */
238 reg
= gen_rtx_REG (VOIDmode
, -1);
240 insn
= rtx_alloc (INSN
);
241 pat
= gen_rtx_SET (0, NULL_RTX
, NULL_RTX
);
242 PATTERN (insn
) = pat
;
244 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
245 mode
= (enum machine_mode
) ((int) mode
+ 1))
249 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
250 PUT_MODE (mem
, mode
);
251 PUT_MODE (mem1
, mode
);
252 PUT_MODE (reg
, mode
);
254 /* See if there is some register that can be used in this mode and
255 directly loaded or stored from memory. */
257 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
258 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
259 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
262 if (! HARD_REGNO_MODE_OK (regno
, mode
))
268 SET_DEST (pat
) = reg
;
269 if (recog (pat
, insn
, &num_clobbers
) >= 0)
270 direct_load
[(int) mode
] = 1;
272 SET_SRC (pat
) = mem1
;
273 SET_DEST (pat
) = reg
;
274 if (recog (pat
, insn
, &num_clobbers
) >= 0)
275 direct_load
[(int) mode
] = 1;
278 SET_DEST (pat
) = mem
;
279 if (recog (pat
, insn
, &num_clobbers
) >= 0)
280 direct_store
[(int) mode
] = 1;
283 SET_DEST (pat
) = mem1
;
284 if (recog (pat
, insn
, &num_clobbers
) >= 0)
285 direct_store
[(int) mode
] = 1;
289 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
291 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
292 mode
= GET_MODE_WIDER_MODE (mode
))
294 enum machine_mode srcmode
;
295 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
296 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
300 ic
= can_extend_p (mode
, srcmode
, 0);
301 if (ic
== CODE_FOR_nothing
)
304 PUT_MODE (mem
, srcmode
);
306 if ((*insn_data
[ic
].operand
[1].predicate
) (mem
, srcmode
))
307 float_extend_from_mem
[mode
][srcmode
] = true;
312 /* This is run at the start of compiling a function. */
317 cfun
->expr
= ggc_alloc_cleared (sizeof (struct expr_status
));
320 /* Small sanity check that the queue is empty at the end of a function. */
323 finish_expr_for_function (void)
329 /* Manage the queue of increment instructions to be output
330 for POSTINCREMENT_EXPR expressions, etc. */
332 /* Queue up to increment (or change) VAR later. BODY says how:
333 BODY should be the same thing you would pass to emit_insn
334 to increment right away. It will go to emit_insn later on.
336 The value is a QUEUED expression to be used in place of VAR
337 where you want to guarantee the pre-incrementation value of VAR. */
340 enqueue_insn (rtx var
, rtx body
)
342 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
), var
, NULL_RTX
, NULL_RTX
,
343 body
, pending_chain
);
344 return pending_chain
;
347 /* Use protect_from_queue to convert a QUEUED expression
348 into something that you can put immediately into an instruction.
349 If the queued incrementation has not happened yet,
350 protect_from_queue returns the variable itself.
351 If the incrementation has happened, protect_from_queue returns a temp
352 that contains a copy of the old value of the variable.
354 Any time an rtx which might possibly be a QUEUED is to be put
355 into an instruction, it must be passed through protect_from_queue first.
356 QUEUED expressions are not meaningful in instructions.
358 Do not pass a value through protect_from_queue and then hold
359 on to it for a while before putting it in an instruction!
360 If the queue is flushed in between, incorrect code will result. */
363 protect_from_queue (rtx x
, int modify
)
365 RTX_CODE code
= GET_CODE (x
);
367 #if 0 /* A QUEUED can hang around after the queue is forced out. */
368 /* Shortcut for most common case. */
369 if (pending_chain
== 0)
375 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
376 use of autoincrement. Make a copy of the contents of the memory
377 location rather than a copy of the address, but not if the value is
378 of mode BLKmode. Don't modify X in place since it might be
380 if (code
== MEM
&& GET_MODE (x
) != BLKmode
381 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
384 rtx
new = replace_equiv_address_nv (x
, QUEUED_VAR (y
));
388 rtx temp
= gen_reg_rtx (GET_MODE (x
));
390 emit_insn_before (gen_move_insn (temp
, new),
395 /* Copy the address into a pseudo, so that the returned value
396 remains correct across calls to emit_queue. */
397 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
400 /* Otherwise, recursively protect the subexpressions of all
401 the kinds of rtx's that can contain a QUEUED. */
404 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
405 if (tem
!= XEXP (x
, 0))
411 else if (code
== PLUS
|| code
== MULT
)
413 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
414 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
415 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
424 /* If the increment has not happened, use the variable itself. Copy it
425 into a new pseudo so that the value remains correct across calls to
427 if (QUEUED_INSN (x
) == 0)
428 return copy_to_reg (QUEUED_VAR (x
));
429 /* If the increment has happened and a pre-increment copy exists,
431 if (QUEUED_COPY (x
) != 0)
432 return QUEUED_COPY (x
);
433 /* The increment has happened but we haven't set up a pre-increment copy.
434 Set one up now, and use it. */
435 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
436 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
438 return QUEUED_COPY (x
);
441 /* Return nonzero if X contains a QUEUED expression:
442 if it contains anything that will be altered by a queued increment.
443 We handle only combinations of MEM, PLUS, MINUS and MULT operators
444 since memory addresses generally contain only those. */
447 queued_subexp_p (rtx x
)
449 enum rtx_code code
= GET_CODE (x
);
455 return queued_subexp_p (XEXP (x
, 0));
459 return (queued_subexp_p (XEXP (x
, 0))
460 || queued_subexp_p (XEXP (x
, 1)));
466 /* Retrieve a mark on the queue. */
471 return pending_chain
;
474 /* Perform all the pending incrementations that have been enqueued
475 after MARK was retrieved. If MARK is null, perform all the
476 pending incrementations. */
479 emit_insns_enqueued_after_mark (rtx mark
)
483 /* The marked incrementation may have been emitted in the meantime
484 through a call to emit_queue. In this case, the mark is not valid
485 anymore so do nothing. */
486 if (mark
&& ! QUEUED_BODY (mark
))
489 while ((p
= pending_chain
) != mark
)
491 rtx body
= QUEUED_BODY (p
);
493 switch (GET_CODE (body
))
501 QUEUED_INSN (p
) = body
;
505 #ifdef ENABLE_CHECKING
512 QUEUED_INSN (p
) = emit_insn (body
);
517 pending_chain
= QUEUED_NEXT (p
);
521 /* Perform all the pending incrementations. */
526 emit_insns_enqueued_after_mark (NULL_RTX
);
529 /* Copy data from FROM to TO, where the machine modes are not the same.
530 Both modes may be integer, or both may be floating.
531 UNSIGNEDP should be nonzero if FROM is an unsigned type.
532 This causes zero-extension instead of sign-extension. */
535 convert_move (rtx to
, rtx from
, int unsignedp
)
537 enum machine_mode to_mode
= GET_MODE (to
);
538 enum machine_mode from_mode
= GET_MODE (from
);
539 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
540 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
544 /* rtx code for making an equivalent value. */
545 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
546 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
548 to
= protect_from_queue (to
, 1);
549 from
= protect_from_queue (from
, 0);
551 if (to_real
!= from_real
)
554 /* If the source and destination are already the same, then there's
559 /* If FROM is a SUBREG that indicates that we have already done at least
560 the required extension, strip it. We don't handle such SUBREGs as
563 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
564 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
565 >= GET_MODE_SIZE (to_mode
))
566 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
567 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
569 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
572 if (to_mode
== from_mode
573 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
575 emit_move_insn (to
, from
);
579 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
581 if (GET_MODE_BITSIZE (from_mode
) != GET_MODE_BITSIZE (to_mode
))
584 if (VECTOR_MODE_P (to_mode
))
585 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
587 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
589 emit_move_insn (to
, from
);
593 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
595 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
596 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
605 if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
607 else if (GET_MODE_PRECISION (from_mode
) > GET_MODE_PRECISION (to_mode
))
612 /* Try converting directly if the insn is supported. */
614 code
= tab
->handlers
[to_mode
][from_mode
].insn_code
;
615 if (code
!= CODE_FOR_nothing
)
617 emit_unop_insn (code
, to
, from
,
618 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
622 /* Otherwise use a libcall. */
623 libcall
= tab
->handlers
[to_mode
][from_mode
].libfunc
;
626 /* This conversion is not implemented yet. */
630 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
632 insns
= get_insns ();
634 emit_libcall_block (insns
, to
, value
,
635 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
637 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
641 /* Handle pointer conversion. */ /* SPEE 900220. */
642 /* Targets are expected to provide conversion insns between PxImode and
643 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
644 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
646 enum machine_mode full_mode
647 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode
), MODE_INT
);
649 if (trunc_optab
->handlers
[to_mode
][full_mode
].insn_code
653 if (full_mode
!= from_mode
)
654 from
= convert_to_mode (full_mode
, from
, unsignedp
);
655 emit_unop_insn (trunc_optab
->handlers
[to_mode
][full_mode
].insn_code
,
659 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
661 enum machine_mode full_mode
662 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode
), MODE_INT
);
664 if (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
668 emit_unop_insn (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
,
670 if (to_mode
== full_mode
)
673 /* else proceed to integer conversions below. */
674 from_mode
= full_mode
;
677 /* Now both modes are integers. */
679 /* Handle expanding beyond a word. */
680 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
681 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
688 enum machine_mode lowpart_mode
;
689 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
691 /* Try converting directly if the insn is supported. */
692 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
695 /* If FROM is a SUBREG, put it into a register. Do this
696 so that we always generate the same set of insns for
697 better cse'ing; if an intermediate assignment occurred,
698 we won't be doing the operation directly on the SUBREG. */
699 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
700 from
= force_reg (from_mode
, from
);
701 emit_unop_insn (code
, to
, from
, equiv_code
);
704 /* Next, try converting via full word. */
705 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
706 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
707 != CODE_FOR_nothing
))
709 if (GET_CODE (to
) == REG
)
711 if (reg_overlap_mentioned_p (to
, from
))
712 from
= force_reg (from_mode
, from
);
713 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
715 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
716 emit_unop_insn (code
, to
,
717 gen_lowpart (word_mode
, to
), equiv_code
);
721 /* No special multiword conversion insn; do it by hand. */
724 /* Since we will turn this into a no conflict block, we must ensure
725 that the source does not overlap the target. */
727 if (reg_overlap_mentioned_p (to
, from
))
728 from
= force_reg (from_mode
, from
);
730 /* Get a copy of FROM widened to a word, if necessary. */
731 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
732 lowpart_mode
= word_mode
;
734 lowpart_mode
= from_mode
;
736 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
738 lowpart
= gen_lowpart (lowpart_mode
, to
);
739 emit_move_insn (lowpart
, lowfrom
);
741 /* Compute the value to put in each remaining word. */
743 fill_value
= const0_rtx
;
748 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
749 && STORE_FLAG_VALUE
== -1)
751 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
753 fill_value
= gen_reg_rtx (word_mode
);
754 emit_insn (gen_slt (fill_value
));
760 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
761 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
763 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
767 /* Fill the remaining words. */
768 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
770 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
771 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
776 if (fill_value
!= subword
)
777 emit_move_insn (subword
, fill_value
);
780 insns
= get_insns ();
783 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
784 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
788 /* Truncating multi-word to a word or less. */
789 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
790 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
792 if (!((GET_CODE (from
) == MEM
793 && ! MEM_VOLATILE_P (from
)
794 && direct_load
[(int) to_mode
]
795 && ! mode_dependent_address_p (XEXP (from
, 0)))
796 || GET_CODE (from
) == REG
797 || GET_CODE (from
) == SUBREG
))
798 from
= force_reg (from_mode
, from
);
799 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
803 /* Now follow all the conversions between integers
804 no more than a word long. */
806 /* For truncation, usually we can just refer to FROM in a narrower mode. */
807 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
808 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
809 GET_MODE_BITSIZE (from_mode
)))
811 if (!((GET_CODE (from
) == MEM
812 && ! MEM_VOLATILE_P (from
)
813 && direct_load
[(int) to_mode
]
814 && ! mode_dependent_address_p (XEXP (from
, 0)))
815 || GET_CODE (from
) == REG
816 || GET_CODE (from
) == SUBREG
))
817 from
= force_reg (from_mode
, from
);
818 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
819 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
820 from
= copy_to_reg (from
);
821 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
825 /* Handle extension. */
826 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
828 /* Convert directly if that works. */
829 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
833 from
= force_not_mem (from
);
835 emit_unop_insn (code
, to
, from
, equiv_code
);
840 enum machine_mode intermediate
;
844 /* Search for a mode to convert via. */
845 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
846 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
847 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
849 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
850 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
851 GET_MODE_BITSIZE (intermediate
))))
852 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
853 != CODE_FOR_nothing
))
855 convert_move (to
, convert_to_mode (intermediate
, from
,
856 unsignedp
), unsignedp
);
860 /* No suitable intermediate mode.
861 Generate what we need with shifts. */
862 shift_amount
= build_int_2 (GET_MODE_BITSIZE (to_mode
)
863 - GET_MODE_BITSIZE (from_mode
), 0);
864 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
865 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
867 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
870 emit_move_insn (to
, tmp
);
875 /* Support special truncate insns for certain modes. */
876 if (trunc_optab
->handlers
[to_mode
][from_mode
].insn_code
!= CODE_FOR_nothing
)
878 emit_unop_insn (trunc_optab
->handlers
[to_mode
][from_mode
].insn_code
,
883 /* Handle truncation of volatile memrefs, and so on;
884 the things that couldn't be truncated directly,
885 and for which there was no special instruction.
887 ??? Code above formerly short-circuited this, for most integer
888 mode pairs, with a force_reg in from_mode followed by a recursive
889 call to this routine. Appears always to have been wrong. */
890 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
892 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
893 emit_move_insn (to
, temp
);
897 /* Mode combination is not recognized. */
901 /* Return an rtx for a value that would result
902 from converting X to mode MODE.
903 Both X and MODE may be floating, or both integer.
904 UNSIGNEDP is nonzero if X is an unsigned value.
905 This can be done by referring to a part of X in place
906 or by copying to a new temporary with conversion.
908 This function *must not* call protect_from_queue
909 except when putting X into an insn (in which case convert_move does it). */
912 convert_to_mode (enum machine_mode mode
, rtx x
, int unsignedp
)
914 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
917 /* Return an rtx for a value that would result
918 from converting X from mode OLDMODE to mode MODE.
919 Both modes may be floating, or both integer.
920 UNSIGNEDP is nonzero if X is an unsigned value.
922 This can be done by referring to a part of X in place
923 or by copying to a new temporary with conversion.
925 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
927 This function *must not* call protect_from_queue
928 except when putting X into an insn (in which case convert_move does it). */
931 convert_modes (enum machine_mode mode
, enum machine_mode oldmode
, rtx x
, int unsignedp
)
935 /* If FROM is a SUBREG that indicates that we have already done at least
936 the required extension, strip it. */
938 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
939 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
940 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
941 x
= gen_lowpart (mode
, x
);
943 if (GET_MODE (x
) != VOIDmode
)
944 oldmode
= GET_MODE (x
);
949 /* There is one case that we must handle specially: If we are converting
950 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
951 we are to interpret the constant as unsigned, gen_lowpart will do
952 the wrong if the constant appears negative. What we want to do is
953 make the high-order word of the constant zero, not all ones. */
955 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
956 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
957 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
959 HOST_WIDE_INT val
= INTVAL (x
);
961 if (oldmode
!= VOIDmode
962 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
964 int width
= GET_MODE_BITSIZE (oldmode
);
966 /* We need to zero extend VAL. */
967 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
970 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
973 /* We can do this with a gen_lowpart if both desired and current modes
974 are integer, and this is either a constant integer, a register, or a
975 non-volatile MEM. Except for the constant case where MODE is no
976 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
978 if ((GET_CODE (x
) == CONST_INT
979 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
980 || (GET_MODE_CLASS (mode
) == MODE_INT
981 && GET_MODE_CLASS (oldmode
) == MODE_INT
982 && (GET_CODE (x
) == CONST_DOUBLE
983 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
984 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
985 && direct_load
[(int) mode
])
986 || (GET_CODE (x
) == REG
987 && (! HARD_REGISTER_P (x
)
988 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
989 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
990 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
992 /* ?? If we don't know OLDMODE, we have to assume here that
993 X does not need sign- or zero-extension. This may not be
994 the case, but it's the best we can do. */
995 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
996 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
998 HOST_WIDE_INT val
= INTVAL (x
);
999 int width
= GET_MODE_BITSIZE (oldmode
);
1001 /* We must sign or zero-extend in this case. Start by
1002 zero-extending, then sign extend if we need to. */
1003 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1005 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1006 val
|= (HOST_WIDE_INT
) (-1) << width
;
1008 return gen_int_mode (val
, mode
);
1011 return gen_lowpart (mode
, x
);
1014 /* Converting from integer constant into mode is always equivalent to an
1015 subreg operation. */
1016 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
1018 if (GET_MODE_BITSIZE (mode
) != GET_MODE_BITSIZE (oldmode
))
1020 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
1023 temp
= gen_reg_rtx (mode
);
1024 convert_move (temp
, x
, unsignedp
);
1028 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1029 store efficiently. Due to internal GCC limitations, this is
1030 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1031 for an immediate constant. */
1033 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1035 /* Determine whether the LEN bytes can be moved by using several move
1036 instructions. Return nonzero if a call to move_by_pieces should
1040 can_move_by_pieces (unsigned HOST_WIDE_INT len
,
1041 unsigned int align ATTRIBUTE_UNUSED
)
1043 return MOVE_BY_PIECES_P (len
, align
);
1046 /* Generate several move instructions to copy LEN bytes from block FROM to
1047 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1048 and TO through protect_from_queue before calling.
1050 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1051 used to push FROM to the stack.
1053 ALIGN is maximum stack alignment we can assume.
1055 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1056 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1060 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
1061 unsigned int align
, int endp
)
1063 struct move_by_pieces data
;
1064 rtx to_addr
, from_addr
= XEXP (from
, 0);
1065 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
1066 enum machine_mode mode
= VOIDmode
, tmode
;
1067 enum insn_code icode
;
1069 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
1072 data
.from_addr
= from_addr
;
1075 to_addr
= XEXP (to
, 0);
1078 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1079 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1081 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1088 #ifdef STACK_GROWS_DOWNWARD
1094 data
.to_addr
= to_addr
;
1097 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1098 || GET_CODE (from_addr
) == POST_INC
1099 || GET_CODE (from_addr
) == POST_DEC
);
1101 data
.explicit_inc_from
= 0;
1102 data
.explicit_inc_to
= 0;
1103 if (data
.reverse
) data
.offset
= len
;
1106 /* If copying requires more than two move insns,
1107 copy addresses to registers (to make displacements shorter)
1108 and use post-increment if available. */
1109 if (!(data
.autinc_from
&& data
.autinc_to
)
1110 && move_by_pieces_ninsns (len
, align
) > 2)
1112 /* Find the mode of the largest move... */
1113 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1114 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1115 if (GET_MODE_SIZE (tmode
) < max_size
)
1118 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
1120 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1121 data
.autinc_from
= 1;
1122 data
.explicit_inc_from
= -1;
1124 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
1126 data
.from_addr
= copy_addr_to_reg (from_addr
);
1127 data
.autinc_from
= 1;
1128 data
.explicit_inc_from
= 1;
1130 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1131 data
.from_addr
= copy_addr_to_reg (from_addr
);
1132 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
1134 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1136 data
.explicit_inc_to
= -1;
1138 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
1140 data
.to_addr
= copy_addr_to_reg (to_addr
);
1142 data
.explicit_inc_to
= 1;
1144 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1145 data
.to_addr
= copy_addr_to_reg (to_addr
);
1148 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1149 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1150 align
= MOVE_MAX
* BITS_PER_UNIT
;
1152 /* First move what we can in the largest integer mode, then go to
1153 successively smaller modes. */
1155 while (max_size
> 1)
1157 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1158 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1159 if (GET_MODE_SIZE (tmode
) < max_size
)
1162 if (mode
== VOIDmode
)
1165 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1166 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1167 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1169 max_size
= GET_MODE_SIZE (mode
);
1172 /* The code above should have handled everything. */
1186 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
1187 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
1189 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
1192 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
1199 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
1207 /* Return number of insns required to move L bytes by pieces.
1208 ALIGN (in bits) is maximum alignment we can assume. */
1210 static unsigned HOST_WIDE_INT
1211 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
)
1213 unsigned HOST_WIDE_INT n_insns
= 0;
1214 unsigned HOST_WIDE_INT max_size
= MOVE_MAX
+ 1;
1216 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1217 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1218 align
= MOVE_MAX
* BITS_PER_UNIT
;
1220 while (max_size
> 1)
1222 enum machine_mode mode
= VOIDmode
, tmode
;
1223 enum insn_code icode
;
1225 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1226 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1227 if (GET_MODE_SIZE (tmode
) < max_size
)
1230 if (mode
== VOIDmode
)
1233 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1234 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1235 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1237 max_size
= GET_MODE_SIZE (mode
);
1245 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1246 with move instructions for mode MODE. GENFUN is the gen_... function
1247 to make a move insn for that mode. DATA has all the other info. */
1250 move_by_pieces_1 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
1251 struct move_by_pieces
*data
)
1253 unsigned int size
= GET_MODE_SIZE (mode
);
1254 rtx to1
= NULL_RTX
, from1
;
1256 while (data
->len
>= size
)
1259 data
->offset
-= size
;
1263 if (data
->autinc_to
)
1264 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1267 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1270 if (data
->autinc_from
)
1271 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1274 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1276 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1277 emit_insn (gen_add2_insn (data
->to_addr
,
1278 GEN_INT (-(HOST_WIDE_INT
)size
)));
1279 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1280 emit_insn (gen_add2_insn (data
->from_addr
,
1281 GEN_INT (-(HOST_WIDE_INT
)size
)));
1284 emit_insn ((*genfun
) (to1
, from1
));
1287 #ifdef PUSH_ROUNDING
1288 emit_single_push_insn (mode
, from1
, NULL
);
1294 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1295 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1296 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1297 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1299 if (! data
->reverse
)
1300 data
->offset
+= size
;
1306 /* Emit code to move a block Y to a block X. This may be done with
1307 string-move instructions, with multiple scalar move instructions,
1308 or with a library call.
1310 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1311 SIZE is an rtx that says how long they are.
1312 ALIGN is the maximum alignment we can assume they have.
1313 METHOD describes what kind of copy this is, and what mechanisms may be used.
1315 Return the address of the new block, if memcpy is called and returns it,
1319 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1327 case BLOCK_OP_NORMAL
:
1328 may_use_call
= true;
1331 case BLOCK_OP_CALL_PARM
:
1332 may_use_call
= block_move_libcall_safe_for_call_parm ();
1334 /* Make inhibit_defer_pop nonzero around the library call
1335 to force it to pop the arguments right away. */
1339 case BLOCK_OP_NO_LIBCALL
:
1340 may_use_call
= false;
1347 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1349 if (GET_MODE (x
) != BLKmode
)
1351 if (GET_MODE (y
) != BLKmode
)
1354 x
= protect_from_queue (x
, 1);
1355 y
= protect_from_queue (y
, 0);
1356 size
= protect_from_queue (size
, 0);
1358 if (GET_CODE (x
) != MEM
)
1360 if (GET_CODE (y
) != MEM
)
1365 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1366 can be incorrect is coming from __builtin_memcpy. */
1367 if (GET_CODE (size
) == CONST_INT
)
1369 if (INTVAL (size
) == 0)
1372 x
= shallow_copy_rtx (x
);
1373 y
= shallow_copy_rtx (y
);
1374 set_mem_size (x
, size
);
1375 set_mem_size (y
, size
);
1378 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1379 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1380 else if (emit_block_move_via_movstr (x
, y
, size
, align
))
1382 else if (may_use_call
)
1383 retval
= emit_block_move_via_libcall (x
, y
, size
);
1385 emit_block_move_via_loop (x
, y
, size
, align
);
1387 if (method
== BLOCK_OP_CALL_PARM
)
1393 /* A subroutine of emit_block_move. Returns true if calling the
1394 block move libcall will not clobber any parameters which may have
1395 already been placed on the stack. */
1398 block_move_libcall_safe_for_call_parm (void)
1400 /* If arguments are pushed on the stack, then they're safe. */
1404 /* If registers go on the stack anyway, any argument is sure to clobber
1405 an outgoing argument. */
1406 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1408 tree fn
= emit_block_move_libcall_fn (false);
1410 if (REG_PARM_STACK_SPACE (fn
) != 0)
1415 /* If any argument goes in memory, then it might clobber an outgoing
1418 CUMULATIVE_ARGS args_so_far
;
1421 fn
= emit_block_move_libcall_fn (false);
1422 INIT_CUMULATIVE_ARGS (args_so_far
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1424 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1425 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1427 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1428 rtx tmp
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
1429 if (!tmp
|| !REG_P (tmp
))
1431 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1432 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, mode
,
1436 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, NULL_TREE
, 1);
1442 /* A subroutine of emit_block_move. Expand a movstr pattern;
1443 return true if successful. */
1446 emit_block_move_via_movstr (rtx x
, rtx y
, rtx size
, unsigned int align
)
1448 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1449 int save_volatile_ok
= volatile_ok
;
1450 enum machine_mode mode
;
1452 /* Since this is a move insn, we don't care about volatility. */
1455 /* Try the most limited insn first, because there's no point
1456 including more than one in the machine description unless
1457 the more limited one has some advantage. */
1459 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1460 mode
= GET_MODE_WIDER_MODE (mode
))
1462 enum insn_code code
= movstr_optab
[(int) mode
];
1463 insn_operand_predicate_fn pred
;
1465 if (code
!= CODE_FOR_nothing
1466 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1467 here because if SIZE is less than the mode mask, as it is
1468 returned by the macro, it will definitely be less than the
1469 actual mode mask. */
1470 && ((GET_CODE (size
) == CONST_INT
1471 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1472 <= (GET_MODE_MASK (mode
) >> 1)))
1473 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1474 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1475 || (*pred
) (x
, BLKmode
))
1476 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1477 || (*pred
) (y
, BLKmode
))
1478 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1479 || (*pred
) (opalign
, VOIDmode
)))
1482 rtx last
= get_last_insn ();
1485 op2
= convert_to_mode (mode
, size
, 1);
1486 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1487 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1488 op2
= copy_to_mode_reg (mode
, op2
);
1490 /* ??? When called via emit_block_move_for_call, it'd be
1491 nice if there were some way to inform the backend, so
1492 that it doesn't fail the expansion because it thinks
1493 emitting the libcall would be more efficient. */
1495 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1499 volatile_ok
= save_volatile_ok
;
1503 delete_insns_since (last
);
1507 volatile_ok
= save_volatile_ok
;
1511 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1512 Return the return value from memcpy, 0 otherwise. */
1515 emit_block_move_via_libcall (rtx dst
, rtx src
, rtx size
)
1517 rtx dst_addr
, src_addr
;
1518 tree call_expr
, arg_list
, fn
, src_tree
, dst_tree
, size_tree
;
1519 enum machine_mode size_mode
;
1522 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1524 It is unsafe to save the value generated by protect_from_queue and reuse
1525 it later. Consider what happens if emit_queue is called before the
1526 return value from protect_from_queue is used.
1528 Expansion of the CALL_EXPR below will call emit_queue before we are
1529 finished emitting RTL for argument setup. So if we are not careful we
1530 could get the wrong value for an argument.
1532 To avoid this problem we go ahead and emit code to copy the addresses of
1533 DST and SRC and SIZE into new pseudos. We can then place those new
1534 pseudos into an RTL_EXPR and use them later, even after a call to
1537 Note this is not strictly needed for library calls since they do not call
1538 emit_queue before loading their arguments. However, we may need to have
1539 library calls call emit_queue in the future since failing to do so could
1540 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1541 arguments in registers. */
1543 dst_addr
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
1544 src_addr
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
1546 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1547 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1549 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1550 src_tree
= make_tree (ptr_type_node
, src_addr
);
1552 if (TARGET_MEM_FUNCTIONS
)
1553 size_mode
= TYPE_MODE (sizetype
);
1555 size_mode
= TYPE_MODE (unsigned_type_node
);
1557 size
= convert_to_mode (size_mode
, size
, 1);
1558 size
= copy_to_mode_reg (size_mode
, size
);
1560 /* It is incorrect to use the libcall calling conventions to call
1561 memcpy in this context. This could be a user call to memcpy and
1562 the user may wish to examine the return value from memcpy. For
1563 targets where libcalls and normal calls have different conventions
1564 for returning pointers, we could end up generating incorrect code.
1566 For convenience, we generate the call to bcopy this way as well. */
1568 if (TARGET_MEM_FUNCTIONS
)
1569 size_tree
= make_tree (sizetype
, size
);
1571 size_tree
= make_tree (unsigned_type_node
, size
);
1573 fn
= emit_block_move_libcall_fn (true);
1574 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
1575 if (TARGET_MEM_FUNCTIONS
)
1577 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1578 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1582 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1583 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1586 /* Now we have to build up the CALL_EXPR itself. */
1587 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1588 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1589 call_expr
, arg_list
, NULL_TREE
);
1591 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
1593 /* If we are initializing a readonly value, show the above call clobbered
1594 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1595 the delay slot scheduler might overlook conflicts and take nasty
1597 if (RTX_UNCHANGING_P (dst
))
1598 add_function_usage_to
1599 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode
,
1600 gen_rtx_CLOBBER (VOIDmode
, dst
),
1603 return TARGET_MEM_FUNCTIONS
? retval
: NULL_RTX
;
1606 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1607 for the function we use for block copies. The first time FOR_CALL
1608 is true, we call assemble_external. */
1610 static GTY(()) tree block_move_fn
;
1613 init_block_move_fn (const char *asmspec
)
1619 if (TARGET_MEM_FUNCTIONS
)
1621 fn
= get_identifier ("memcpy");
1622 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
1623 const_ptr_type_node
, sizetype
,
1628 fn
= get_identifier ("bcopy");
1629 args
= build_function_type_list (void_type_node
, const_ptr_type_node
,
1630 ptr_type_node
, unsigned_type_node
,
1634 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
1635 DECL_EXTERNAL (fn
) = 1;
1636 TREE_PUBLIC (fn
) = 1;
1637 DECL_ARTIFICIAL (fn
) = 1;
1638 TREE_NOTHROW (fn
) = 1;
1645 SET_DECL_RTL (block_move_fn
, NULL_RTX
);
1646 SET_DECL_ASSEMBLER_NAME (block_move_fn
, get_identifier (asmspec
));
1651 emit_block_move_libcall_fn (int for_call
)
1653 static bool emitted_extern
;
1656 init_block_move_fn (NULL
);
1658 if (for_call
&& !emitted_extern
)
1660 emitted_extern
= true;
1661 make_decl_rtl (block_move_fn
, NULL
);
1662 assemble_external (block_move_fn
);
1665 return block_move_fn
;
1668 /* A subroutine of emit_block_move. Copy the data via an explicit
1669 loop. This is used only when libcalls are forbidden. */
1670 /* ??? It'd be nice to copy in hunks larger than QImode. */
1673 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1674 unsigned int align ATTRIBUTE_UNUSED
)
1676 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
1677 enum machine_mode iter_mode
;
1679 iter_mode
= GET_MODE (size
);
1680 if (iter_mode
== VOIDmode
)
1681 iter_mode
= word_mode
;
1683 top_label
= gen_label_rtx ();
1684 cmp_label
= gen_label_rtx ();
1685 iter
= gen_reg_rtx (iter_mode
);
1687 emit_move_insn (iter
, const0_rtx
);
1689 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1690 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1691 do_pending_stack_adjust ();
1693 emit_jump (cmp_label
);
1694 emit_label (top_label
);
1696 tmp
= convert_modes (Pmode
, iter_mode
, iter
, true);
1697 x_addr
= gen_rtx_PLUS (Pmode
, x_addr
, tmp
);
1698 y_addr
= gen_rtx_PLUS (Pmode
, y_addr
, tmp
);
1699 x
= change_address (x
, QImode
, x_addr
);
1700 y
= change_address (y
, QImode
, y_addr
);
1702 emit_move_insn (x
, y
);
1704 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1705 true, OPTAB_LIB_WIDEN
);
1707 emit_move_insn (iter
, tmp
);
1709 emit_label (cmp_label
);
1711 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1715 /* Copy all or part of a value X into registers starting at REGNO.
1716 The number of registers to be filled is NREGS. */
1719 move_block_to_reg (int regno
, rtx x
, int nregs
, enum machine_mode mode
)
1722 #ifdef HAVE_load_multiple
1730 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1731 x
= validize_mem (force_const_mem (mode
, x
));
1733 /* See if the machine can do this with a load multiple insn. */
1734 #ifdef HAVE_load_multiple
1735 if (HAVE_load_multiple
)
1737 last
= get_last_insn ();
1738 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1746 delete_insns_since (last
);
1750 for (i
= 0; i
< nregs
; i
++)
1751 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1752 operand_subword_force (x
, i
, mode
));
1755 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1756 The number of registers to be filled is NREGS. */
1759 move_block_from_reg (int regno
, rtx x
, int nregs
)
1766 /* See if the machine can do this with a store multiple insn. */
1767 #ifdef HAVE_store_multiple
1768 if (HAVE_store_multiple
)
1770 rtx last
= get_last_insn ();
1771 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1779 delete_insns_since (last
);
1783 for (i
= 0; i
< nregs
; i
++)
1785 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1790 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1794 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1795 ORIG, where ORIG is a non-consecutive group of registers represented by
1796 a PARALLEL. The clone is identical to the original except in that the
1797 original set of registers is replaced by a new set of pseudo registers.
1798 The new set has the same modes as the original set. */
1801 gen_group_rtx (rtx orig
)
1806 if (GET_CODE (orig
) != PARALLEL
)
1809 length
= XVECLEN (orig
, 0);
1810 tmps
= alloca (sizeof (rtx
) * length
);
1812 /* Skip a NULL entry in first slot. */
1813 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
1818 for (; i
< length
; i
++)
1820 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
1821 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
1823 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
1826 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
1829 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1830 where DST is non-consecutive registers represented by a PARALLEL.
1831 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1835 emit_group_load (rtx dst
, rtx orig_src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1840 if (GET_CODE (dst
) != PARALLEL
)
1843 /* Check for a NULL entry, used to indicate that the parameter goes
1844 both on the stack and in registers. */
1845 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1850 tmps
= alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
1852 /* Process the pieces. */
1853 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1855 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1856 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1857 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1860 /* Handle trailing fragments that run over the size of the struct. */
1861 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1863 /* Arrange to shift the fragment to where it belongs.
1864 extract_bit_field loads to the lsb of the reg. */
1866 #ifdef BLOCK_REG_PADDING
1867 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
1868 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1873 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1874 bytelen
= ssize
- bytepos
;
1879 /* If we won't be loading directly from memory, protect the real source
1880 from strange tricks we might play; but make sure that the source can
1881 be loaded directly into the destination. */
1883 if (GET_CODE (orig_src
) != MEM
1884 && (!CONSTANT_P (orig_src
)
1885 || (GET_MODE (orig_src
) != mode
1886 && GET_MODE (orig_src
) != VOIDmode
)))
1888 if (GET_MODE (orig_src
) == VOIDmode
)
1889 src
= gen_reg_rtx (mode
);
1891 src
= gen_reg_rtx (GET_MODE (orig_src
));
1893 emit_move_insn (src
, orig_src
);
1896 /* Optimize the access just a bit. */
1897 if (GET_CODE (src
) == MEM
1898 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
1899 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
1900 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1901 && bytelen
== GET_MODE_SIZE (mode
))
1903 tmps
[i
] = gen_reg_rtx (mode
);
1904 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
1906 else if (GET_CODE (src
) == CONCAT
)
1908 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
1909 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
1911 if ((bytepos
== 0 && bytelen
== slen0
)
1912 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
1914 /* The following assumes that the concatenated objects all
1915 have the same size. In this case, a simple calculation
1916 can be used to determine the object and the bit field
1918 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
1919 if (! CONSTANT_P (tmps
[i
])
1920 && (GET_CODE (tmps
[i
]) != REG
|| GET_MODE (tmps
[i
]) != mode
))
1921 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
1922 (bytepos
% slen0
) * BITS_PER_UNIT
,
1923 1, NULL_RTX
, mode
, mode
, ssize
);
1925 else if (bytepos
== 0)
1927 rtx mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1928 emit_move_insn (mem
, src
);
1929 tmps
[i
] = adjust_address (mem
, mode
, 0);
1934 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1935 SIMD register, which is currently broken. While we get GCC
1936 to emit proper RTL for these cases, let's dump to memory. */
1937 else if (VECTOR_MODE_P (GET_MODE (dst
))
1938 && GET_CODE (src
) == REG
)
1940 int slen
= GET_MODE_SIZE (GET_MODE (src
));
1943 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1944 emit_move_insn (mem
, src
);
1945 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
1947 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
1948 && XVECLEN (dst
, 0) > 1)
1949 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE(dst
), bytepos
);
1950 else if (CONSTANT_P (src
)
1951 || (GET_CODE (src
) == REG
&& GET_MODE (src
) == mode
))
1954 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
1955 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
1959 expand_binop (mode
, ashl_optab
, tmps
[i
], GEN_INT (shift
),
1960 tmps
[i
], 0, OPTAB_WIDEN
);
1965 /* Copy the extracted pieces into the proper (probable) hard regs. */
1966 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1967 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
1970 /* Emit code to move a block SRC to block DST, where SRC and DST are
1971 non-consecutive groups of registers, each represented by a PARALLEL. */
1974 emit_group_move (rtx dst
, rtx src
)
1978 if (GET_CODE (src
) != PARALLEL
1979 || GET_CODE (dst
) != PARALLEL
1980 || XVECLEN (src
, 0) != XVECLEN (dst
, 0))
1983 /* Skip first entry if NULL. */
1984 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
1985 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
1986 XEXP (XVECEXP (src
, 0, i
), 0));
1989 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1990 where SRC is non-consecutive registers represented by a PARALLEL.
1991 SSIZE represents the total size of block ORIG_DST, or -1 if not
1995 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
2000 if (GET_CODE (src
) != PARALLEL
)
2003 /* Check for a NULL entry, used to indicate that the parameter goes
2004 both on the stack and in registers. */
2005 if (XEXP (XVECEXP (src
, 0, 0), 0))
2010 tmps
= alloca (sizeof (rtx
) * XVECLEN (src
, 0));
2012 /* Copy the (probable) hard regs into pseudos. */
2013 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2015 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2016 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2017 emit_move_insn (tmps
[i
], reg
);
2021 /* If we won't be storing directly into memory, protect the real destination
2022 from strange tricks we might play. */
2024 if (GET_CODE (dst
) == PARALLEL
)
2028 /* We can get a PARALLEL dst if there is a conditional expression in
2029 a return statement. In that case, the dst and src are the same,
2030 so no action is necessary. */
2031 if (rtx_equal_p (dst
, src
))
2034 /* It is unclear if we can ever reach here, but we may as well handle
2035 it. Allocate a temporary, and split this into a store/load to/from
2038 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
2039 emit_group_store (temp
, src
, type
, ssize
);
2040 emit_group_load (dst
, temp
, type
, ssize
);
2043 else if (GET_CODE (dst
) != MEM
&& GET_CODE (dst
) != CONCAT
)
2045 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
2046 /* Make life a bit easier for combine. */
2047 emit_move_insn (dst
, CONST0_RTX (GET_MODE (orig_dst
)));
2050 /* Process the pieces. */
2051 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2053 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2054 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2055 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2058 /* Handle trailing fragments that run over the size of the struct. */
2059 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2061 /* store_bit_field always takes its value from the lsb.
2062 Move the fragment to the lsb if it's not already there. */
2064 #ifdef BLOCK_REG_PADDING
2065 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
2066 == (BYTES_BIG_ENDIAN
? upward
: downward
)
2072 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2073 expand_binop (mode
, ashr_optab
, tmps
[i
], GEN_INT (shift
),
2074 tmps
[i
], 0, OPTAB_WIDEN
);
2076 bytelen
= ssize
- bytepos
;
2079 if (GET_CODE (dst
) == CONCAT
)
2081 if (bytepos
+ bytelen
<= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2082 dest
= XEXP (dst
, 0);
2083 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2085 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2086 dest
= XEXP (dst
, 1);
2088 else if (bytepos
== 0 && XVECLEN (src
, 0))
2090 dest
= assign_stack_temp (GET_MODE (dest
),
2091 GET_MODE_SIZE (GET_MODE (dest
)), 0);
2092 emit_move_insn (adjust_address (dest
, GET_MODE (tmps
[i
]), bytepos
),
2101 /* Optimize the access just a bit. */
2102 if (GET_CODE (dest
) == MEM
2103 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
2104 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2105 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2106 && bytelen
== GET_MODE_SIZE (mode
))
2107 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2109 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2110 mode
, tmps
[i
], ssize
);
2115 /* Copy from the pseudo into the (probable) hard reg. */
2116 if (orig_dst
!= dst
)
2117 emit_move_insn (orig_dst
, dst
);
2120 /* Generate code to copy a BLKmode object of TYPE out of a
2121 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2122 is null, a stack temporary is created. TGTBLK is returned.
2124 The purpose of this routine is to handle functions that return
2125 BLKmode structures in registers. Some machines (the PA for example)
2126 want to return all small structures in registers regardless of the
2127 structure's alignment. */
2130 copy_blkmode_from_reg (rtx tgtblk
, rtx srcreg
, tree type
)
2132 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2133 rtx src
= NULL
, dst
= NULL
;
2134 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2135 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2139 tgtblk
= assign_temp (build_qualified_type (type
,
2141 | TYPE_QUAL_CONST
)),
2143 preserve_temp_slots (tgtblk
);
2146 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2147 into a new pseudo which is a full word. */
2149 if (GET_MODE (srcreg
) != BLKmode
2150 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2151 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
2153 /* If the structure doesn't take up a whole number of words, see whether
2154 SRCREG is padded on the left or on the right. If it's on the left,
2155 set PADDING_CORRECTION to the number of bits to skip.
2157 In most ABIs, the structure will be returned at the least end of
2158 the register, which translates to right padding on little-endian
2159 targets and left padding on big-endian targets. The opposite
2160 holds if the structure is returned at the most significant
2161 end of the register. */
2162 if (bytes
% UNITS_PER_WORD
!= 0
2163 && (targetm
.calls
.return_in_msb (type
)
2165 : BYTES_BIG_ENDIAN
))
2167 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2169 /* Copy the structure BITSIZE bites at a time.
2171 We could probably emit more efficient code for machines which do not use
2172 strict alignment, but it doesn't seem worth the effort at the current
2174 for (bitpos
= 0, xbitpos
= padding_correction
;
2175 bitpos
< bytes
* BITS_PER_UNIT
;
2176 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2178 /* We need a new source operand each time xbitpos is on a
2179 word boundary and when xbitpos == padding_correction
2180 (the first time through). */
2181 if (xbitpos
% BITS_PER_WORD
== 0
2182 || xbitpos
== padding_correction
)
2183 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2186 /* We need a new destination operand each time bitpos is on
2188 if (bitpos
% BITS_PER_WORD
== 0)
2189 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2191 /* Use xbitpos for the source extraction (right justified) and
2192 xbitpos for the destination store (left justified). */
2193 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2194 extract_bit_field (src
, bitsize
,
2195 xbitpos
% BITS_PER_WORD
, 1,
2196 NULL_RTX
, word_mode
, word_mode
,
2204 /* Add a USE expression for REG to the (possibly empty) list pointed
2205 to by CALL_FUSAGE. REG must denote a hard register. */
2208 use_reg (rtx
*call_fusage
, rtx reg
)
2210 if (GET_CODE (reg
) != REG
2211 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2215 = gen_rtx_EXPR_LIST (VOIDmode
,
2216 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2219 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2220 starting at REGNO. All of these registers must be hard registers. */
2223 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2227 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
2230 for (i
= 0; i
< nregs
; i
++)
2231 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2234 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2235 PARALLEL REGS. This is for calls that pass values in multiple
2236 non-contiguous locations. The Irix 6 ABI has examples of this. */
2239 use_group_regs (rtx
*call_fusage
, rtx regs
)
2243 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2245 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2247 /* A NULL entry means the parameter goes both on the stack and in
2248 registers. This can also be a MEM for targets that pass values
2249 partially on the stack and partially in registers. */
2250 if (reg
!= 0 && GET_CODE (reg
) == REG
)
2251 use_reg (call_fusage
, reg
);
2256 /* Determine whether the LEN bytes generated by CONSTFUN can be
2257 stored to memory using several move instructions. CONSTFUNDATA is
2258 a pointer which will be passed as argument in every CONSTFUN call.
2259 ALIGN is maximum alignment we can assume. Return nonzero if a
2260 call to store_by_pieces should succeed. */
2263 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
2264 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2265 void *constfundata
, unsigned int align
)
2267 unsigned HOST_WIDE_INT max_size
, l
;
2268 HOST_WIDE_INT offset
= 0;
2269 enum machine_mode mode
, tmode
;
2270 enum insn_code icode
;
2277 if (! STORE_BY_PIECES_P (len
, align
))
2280 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2281 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2282 align
= MOVE_MAX
* BITS_PER_UNIT
;
2284 /* We would first store what we can in the largest integer mode, then go to
2285 successively smaller modes. */
2288 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2293 max_size
= STORE_MAX_PIECES
+ 1;
2294 while (max_size
> 1)
2296 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2297 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2298 if (GET_MODE_SIZE (tmode
) < max_size
)
2301 if (mode
== VOIDmode
)
2304 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2305 if (icode
!= CODE_FOR_nothing
2306 && align
>= GET_MODE_ALIGNMENT (mode
))
2308 unsigned int size
= GET_MODE_SIZE (mode
);
2315 cst
= (*constfun
) (constfundata
, offset
, mode
);
2316 if (!LEGITIMATE_CONSTANT_P (cst
))
2326 max_size
= GET_MODE_SIZE (mode
);
2329 /* The code above should have handled everything. */
2337 /* Generate several move instructions to store LEN bytes generated by
2338 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2339 pointer which will be passed as argument in every CONSTFUN call.
2340 ALIGN is maximum alignment we can assume.
2341 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2342 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2346 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
2347 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2348 void *constfundata
, unsigned int align
, int endp
)
2350 struct store_by_pieces data
;
2359 if (! STORE_BY_PIECES_P (len
, align
))
2361 to
= protect_from_queue (to
, 1);
2362 data
.constfun
= constfun
;
2363 data
.constfundata
= constfundata
;
2366 store_by_pieces_1 (&data
, align
);
2377 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2378 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2380 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
2383 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2390 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2398 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2399 rtx with BLKmode). The caller must pass TO through protect_from_queue
2400 before calling. ALIGN is maximum alignment we can assume. */
2403 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
2405 struct store_by_pieces data
;
2410 data
.constfun
= clear_by_pieces_1
;
2411 data
.constfundata
= NULL
;
2414 store_by_pieces_1 (&data
, align
);
2417 /* Callback routine for clear_by_pieces.
2418 Return const0_rtx unconditionally. */
2421 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED
,
2422 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2423 enum machine_mode mode ATTRIBUTE_UNUSED
)
2428 /* Subroutine of clear_by_pieces and store_by_pieces.
2429 Generate several move instructions to store LEN bytes of block TO. (A MEM
2430 rtx with BLKmode). The caller must pass TO through protect_from_queue
2431 before calling. ALIGN is maximum alignment we can assume. */
2434 store_by_pieces_1 (struct store_by_pieces
*data ATTRIBUTE_UNUSED
,
2435 unsigned int align ATTRIBUTE_UNUSED
)
2437 rtx to_addr
= XEXP (data
->to
, 0);
2438 unsigned HOST_WIDE_INT max_size
= STORE_MAX_PIECES
+ 1;
2439 enum machine_mode mode
= VOIDmode
, tmode
;
2440 enum insn_code icode
;
2443 data
->to_addr
= to_addr
;
2445 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2446 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2448 data
->explicit_inc_to
= 0;
2450 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2452 data
->offset
= data
->len
;
2454 /* If storing requires more than two move insns,
2455 copy addresses to registers (to make displacements shorter)
2456 and use post-increment if available. */
2457 if (!data
->autinc_to
2458 && move_by_pieces_ninsns (data
->len
, align
) > 2)
2460 /* Determine the main mode we'll be using. */
2461 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2462 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2463 if (GET_MODE_SIZE (tmode
) < max_size
)
2466 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2468 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2469 data
->autinc_to
= 1;
2470 data
->explicit_inc_to
= -1;
2473 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2474 && ! data
->autinc_to
)
2476 data
->to_addr
= copy_addr_to_reg (to_addr
);
2477 data
->autinc_to
= 1;
2478 data
->explicit_inc_to
= 1;
2481 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2482 data
->to_addr
= copy_addr_to_reg (to_addr
);
2485 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2486 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2487 align
= MOVE_MAX
* BITS_PER_UNIT
;
2489 /* First store what we can in the largest integer mode, then go to
2490 successively smaller modes. */
2492 while (max_size
> 1)
2494 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2495 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2496 if (GET_MODE_SIZE (tmode
) < max_size
)
2499 if (mode
== VOIDmode
)
2502 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2503 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2504 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2506 max_size
= GET_MODE_SIZE (mode
);
2509 /* The code above should have handled everything. */
2514 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2515 with move instructions for mode MODE. GENFUN is the gen_... function
2516 to make a move insn for that mode. DATA has all the other info. */
2519 store_by_pieces_2 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
2520 struct store_by_pieces
*data
)
2522 unsigned int size
= GET_MODE_SIZE (mode
);
2525 while (data
->len
>= size
)
2528 data
->offset
-= size
;
2530 if (data
->autinc_to
)
2531 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2534 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2536 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2537 emit_insn (gen_add2_insn (data
->to_addr
,
2538 GEN_INT (-(HOST_WIDE_INT
) size
)));
2540 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2541 emit_insn ((*genfun
) (to1
, cst
));
2543 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2544 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2546 if (! data
->reverse
)
2547 data
->offset
+= size
;
2553 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2554 its length in bytes. */
2557 clear_storage (rtx object
, rtx size
)
2560 unsigned int align
= (GET_CODE (object
) == MEM
? MEM_ALIGN (object
)
2561 : GET_MODE_ALIGNMENT (GET_MODE (object
)));
2563 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2564 just move a zero. Otherwise, do this a piece at a time. */
2565 if (GET_MODE (object
) != BLKmode
2566 && GET_CODE (size
) == CONST_INT
2567 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (object
)))
2568 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2571 object
= protect_from_queue (object
, 1);
2572 size
= protect_from_queue (size
, 0);
2574 if (size
== const0_rtx
)
2576 else if (GET_CODE (size
) == CONST_INT
2577 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2578 clear_by_pieces (object
, INTVAL (size
), align
);
2579 else if (clear_storage_via_clrstr (object
, size
, align
))
2582 retval
= clear_storage_via_libcall (object
, size
);
2588 /* A subroutine of clear_storage. Expand a clrstr pattern;
2589 return true if successful. */
2592 clear_storage_via_clrstr (rtx object
, rtx size
, unsigned int align
)
2594 /* Try the most limited insn first, because there's no point
2595 including more than one in the machine description unless
2596 the more limited one has some advantage. */
2598 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2599 enum machine_mode mode
;
2601 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2602 mode
= GET_MODE_WIDER_MODE (mode
))
2604 enum insn_code code
= clrstr_optab
[(int) mode
];
2605 insn_operand_predicate_fn pred
;
2607 if (code
!= CODE_FOR_nothing
2608 /* We don't need MODE to be narrower than
2609 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2610 the mode mask, as it is returned by the macro, it will
2611 definitely be less than the actual mode mask. */
2612 && ((GET_CODE (size
) == CONST_INT
2613 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2614 <= (GET_MODE_MASK (mode
) >> 1)))
2615 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2616 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2617 || (*pred
) (object
, BLKmode
))
2618 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
2619 || (*pred
) (opalign
, VOIDmode
)))
2622 rtx last
= get_last_insn ();
2625 op1
= convert_to_mode (mode
, size
, 1);
2626 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2627 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
2628 op1
= copy_to_mode_reg (mode
, op1
);
2630 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2637 delete_insns_since (last
);
2644 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2645 Return the return value of memset, 0 otherwise. */
2648 clear_storage_via_libcall (rtx object
, rtx size
)
2650 tree call_expr
, arg_list
, fn
, object_tree
, size_tree
;
2651 enum machine_mode size_mode
;
2654 /* OBJECT or SIZE may have been passed through protect_from_queue.
2656 It is unsafe to save the value generated by protect_from_queue
2657 and reuse it later. Consider what happens if emit_queue is
2658 called before the return value from protect_from_queue is used.
2660 Expansion of the CALL_EXPR below will call emit_queue before
2661 we are finished emitting RTL for argument setup. So if we are
2662 not careful we could get the wrong value for an argument.
2664 To avoid this problem we go ahead and emit code to copy OBJECT
2665 and SIZE into new pseudos. We can then place those new pseudos
2666 into an RTL_EXPR and use them later, even after a call to
2669 Note this is not strictly needed for library calls since they
2670 do not call emit_queue before loading their arguments. However,
2671 we may need to have library calls call emit_queue in the future
2672 since failing to do so could cause problems for targets which
2673 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2675 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2677 if (TARGET_MEM_FUNCTIONS
)
2678 size_mode
= TYPE_MODE (sizetype
);
2680 size_mode
= TYPE_MODE (unsigned_type_node
);
2681 size
= convert_to_mode (size_mode
, size
, 1);
2682 size
= copy_to_mode_reg (size_mode
, size
);
2684 /* It is incorrect to use the libcall calling conventions to call
2685 memset in this context. This could be a user call to memset and
2686 the user may wish to examine the return value from memset. For
2687 targets where libcalls and normal calls have different conventions
2688 for returning pointers, we could end up generating incorrect code.
2690 For convenience, we generate the call to bzero this way as well. */
2692 object_tree
= make_tree (ptr_type_node
, object
);
2693 if (TARGET_MEM_FUNCTIONS
)
2694 size_tree
= make_tree (sizetype
, size
);
2696 size_tree
= make_tree (unsigned_type_node
, size
);
2698 fn
= clear_storage_libcall_fn (true);
2699 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
2700 if (TARGET_MEM_FUNCTIONS
)
2701 arg_list
= tree_cons (NULL_TREE
, integer_zero_node
, arg_list
);
2702 arg_list
= tree_cons (NULL_TREE
, object_tree
, arg_list
);
2704 /* Now we have to build up the CALL_EXPR itself. */
2705 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2706 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2707 call_expr
, arg_list
, NULL_TREE
);
2709 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
2711 /* If we are initializing a readonly value, show the above call
2712 clobbered it. Otherwise, a load from it may erroneously be
2713 hoisted from a loop. */
2714 if (RTX_UNCHANGING_P (object
))
2715 emit_insn (gen_rtx_CLOBBER (VOIDmode
, object
));
2717 return (TARGET_MEM_FUNCTIONS
? retval
: NULL_RTX
);
2720 /* A subroutine of clear_storage_via_libcall. Create the tree node
2721 for the function we use for block clears. The first time FOR_CALL
2722 is true, we call assemble_external. */
2724 static GTY(()) tree block_clear_fn
;
2727 init_block_clear_fn (const char *asmspec
)
2729 if (!block_clear_fn
)
2733 if (TARGET_MEM_FUNCTIONS
)
2735 fn
= get_identifier ("memset");
2736 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2737 integer_type_node
, sizetype
,
2742 fn
= get_identifier ("bzero");
2743 args
= build_function_type_list (void_type_node
, ptr_type_node
,
2744 unsigned_type_node
, NULL_TREE
);
2747 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
2748 DECL_EXTERNAL (fn
) = 1;
2749 TREE_PUBLIC (fn
) = 1;
2750 DECL_ARTIFICIAL (fn
) = 1;
2751 TREE_NOTHROW (fn
) = 1;
2753 block_clear_fn
= fn
;
2758 SET_DECL_RTL (block_clear_fn
, NULL_RTX
);
2759 SET_DECL_ASSEMBLER_NAME (block_clear_fn
, get_identifier (asmspec
));
2764 clear_storage_libcall_fn (int for_call
)
2766 static bool emitted_extern
;
2768 if (!block_clear_fn
)
2769 init_block_clear_fn (NULL
);
2771 if (for_call
&& !emitted_extern
)
2773 emitted_extern
= true;
2774 make_decl_rtl (block_clear_fn
, NULL
);
2775 assemble_external (block_clear_fn
);
2778 return block_clear_fn
;
2781 /* Generate code to copy Y into X.
2782 Both Y and X must have the same mode, except that
2783 Y can be a constant with VOIDmode.
2784 This mode cannot be BLKmode; use emit_block_move for that.
2786 Return the last instruction emitted. */
2789 emit_move_insn (rtx x
, rtx y
)
2791 enum machine_mode mode
= GET_MODE (x
);
2792 rtx y_cst
= NULL_RTX
;
2795 x
= protect_from_queue (x
, 1);
2796 y
= protect_from_queue (y
, 0);
2798 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
2804 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
2805 && (last_insn
= compress_float_constant (x
, y
)))
2810 if (!LEGITIMATE_CONSTANT_P (y
))
2812 y
= force_const_mem (mode
, y
);
2814 /* If the target's cannot_force_const_mem prevented the spill,
2815 assume that the target's move expanders will also take care
2816 of the non-legitimate constant. */
2822 /* If X or Y are memory references, verify that their addresses are valid
2824 if (GET_CODE (x
) == MEM
2825 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2826 && ! push_operand (x
, GET_MODE (x
)))
2828 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2829 x
= validize_mem (x
);
2831 if (GET_CODE (y
) == MEM
2832 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2834 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2835 y
= validize_mem (y
);
2837 if (mode
== BLKmode
)
2840 last_insn
= emit_move_insn_1 (x
, y
);
2842 if (y_cst
&& GET_CODE (x
) == REG
2843 && (set
= single_set (last_insn
)) != NULL_RTX
2844 && SET_DEST (set
) == x
2845 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
2846 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
2851 /* Low level part of emit_move_insn.
2852 Called just like emit_move_insn, but assumes X and Y
2853 are basically valid. */
2856 emit_move_insn_1 (rtx x
, rtx y
)
2858 enum machine_mode mode
= GET_MODE (x
);
2859 enum machine_mode submode
;
2860 enum mode_class
class = GET_MODE_CLASS (mode
);
2862 if ((unsigned int) mode
>= (unsigned int) MAX_MACHINE_MODE
)
2865 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2867 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2869 /* Expand complex moves by moving real part and imag part, if possible. */
2870 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
2871 && BLKmode
!= (submode
= GET_MODE_INNER (mode
))
2872 && (mov_optab
->handlers
[(int) submode
].insn_code
2873 != CODE_FOR_nothing
))
2875 /* Don't split destination if it is a stack push. */
2876 int stack
= push_operand (x
, GET_MODE (x
));
2878 #ifdef PUSH_ROUNDING
2879 /* In case we output to the stack, but the size is smaller than the
2880 machine can push exactly, we need to use move instructions. */
2882 && (PUSH_ROUNDING (GET_MODE_SIZE (submode
))
2883 != GET_MODE_SIZE (submode
)))
2886 HOST_WIDE_INT offset1
, offset2
;
2888 /* Do not use anti_adjust_stack, since we don't want to update
2889 stack_pointer_delta. */
2890 temp
= expand_binop (Pmode
,
2891 #ifdef STACK_GROWS_DOWNWARD
2899 (GET_MODE_SIZE (GET_MODE (x
)))),
2900 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
2902 if (temp
!= stack_pointer_rtx
)
2903 emit_move_insn (stack_pointer_rtx
, temp
);
2905 #ifdef STACK_GROWS_DOWNWARD
2907 offset2
= GET_MODE_SIZE (submode
);
2909 offset1
= -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)));
2910 offset2
= (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))
2911 + GET_MODE_SIZE (submode
));
2914 emit_move_insn (change_address (x
, submode
,
2915 gen_rtx_PLUS (Pmode
,
2917 GEN_INT (offset1
))),
2918 gen_realpart (submode
, y
));
2919 emit_move_insn (change_address (x
, submode
,
2920 gen_rtx_PLUS (Pmode
,
2922 GEN_INT (offset2
))),
2923 gen_imagpart (submode
, y
));
2927 /* If this is a stack, push the highpart first, so it
2928 will be in the argument order.
2930 In that case, change_address is used only to convert
2931 the mode, not to change the address. */
2934 /* Note that the real part always precedes the imag part in memory
2935 regardless of machine's endianness. */
2936 #ifdef STACK_GROWS_DOWNWARD
2937 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2938 gen_imagpart (submode
, y
));
2939 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2940 gen_realpart (submode
, y
));
2942 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2943 gen_realpart (submode
, y
));
2944 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2945 gen_imagpart (submode
, y
));
2950 rtx realpart_x
, realpart_y
;
2951 rtx imagpart_x
, imagpart_y
;
2953 /* If this is a complex value with each part being smaller than a
2954 word, the usual calling sequence will likely pack the pieces into
2955 a single register. Unfortunately, SUBREG of hard registers only
2956 deals in terms of words, so we have a problem converting input
2957 arguments to the CONCAT of two registers that is used elsewhere
2958 for complex values. If this is before reload, we can copy it into
2959 memory and reload. FIXME, we should see about using extract and
2960 insert on integer registers, but complex short and complex char
2961 variables should be rarely used. */
2962 if (GET_MODE_BITSIZE (mode
) < 2 * BITS_PER_WORD
2963 && (reload_in_progress
| reload_completed
) == 0)
2966 = (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
2968 = (REG_P (y
) && REGNO (y
) < FIRST_PSEUDO_REGISTER
);
2970 if (packed_dest_p
|| packed_src_p
)
2972 enum mode_class reg_class
= ((class == MODE_COMPLEX_FLOAT
)
2973 ? MODE_FLOAT
: MODE_INT
);
2975 enum machine_mode reg_mode
2976 = mode_for_size (GET_MODE_BITSIZE (mode
), reg_class
, 1);
2978 if (reg_mode
!= BLKmode
)
2980 rtx mem
= assign_stack_temp (reg_mode
,
2981 GET_MODE_SIZE (mode
), 0);
2982 rtx cmem
= adjust_address (mem
, mode
, 0);
2986 rtx sreg
= gen_rtx_SUBREG (reg_mode
, x
, 0);
2988 emit_move_insn_1 (cmem
, y
);
2989 return emit_move_insn_1 (sreg
, mem
);
2993 rtx sreg
= gen_rtx_SUBREG (reg_mode
, y
, 0);
2995 emit_move_insn_1 (mem
, sreg
);
2996 return emit_move_insn_1 (x
, cmem
);
3002 realpart_x
= gen_realpart (submode
, x
);
3003 realpart_y
= gen_realpart (submode
, y
);
3004 imagpart_x
= gen_imagpart (submode
, x
);
3005 imagpart_y
= gen_imagpart (submode
, y
);
3007 /* Show the output dies here. This is necessary for SUBREGs
3008 of pseudos since we cannot track their lifetimes correctly;
3009 hard regs shouldn't appear here except as return values.
3010 We never want to emit such a clobber after reload. */
3012 && ! (reload_in_progress
|| reload_completed
)
3013 && (GET_CODE (realpart_x
) == SUBREG
3014 || GET_CODE (imagpart_x
) == SUBREG
))
3015 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3017 emit_move_insn (realpart_x
, realpart_y
);
3018 emit_move_insn (imagpart_x
, imagpart_y
);
3021 return get_last_insn ();
3024 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3025 find a mode to do it in. If we have a movcc, use it. Otherwise,
3026 find the MODE_INT mode of the same width. */
3027 else if (GET_MODE_CLASS (mode
) == MODE_CC
3028 && mov_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
3030 enum insn_code insn_code
;
3031 enum machine_mode tmode
= VOIDmode
;
3035 && mov_optab
->handlers
[(int) CCmode
].insn_code
!= CODE_FOR_nothing
)
3038 for (tmode
= QImode
; tmode
!= VOIDmode
;
3039 tmode
= GET_MODE_WIDER_MODE (tmode
))
3040 if (GET_MODE_SIZE (tmode
) == GET_MODE_SIZE (mode
))
3043 if (tmode
== VOIDmode
)
3046 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3047 may call change_address which is not appropriate if we were
3048 called when a reload was in progress. We don't have to worry
3049 about changing the address since the size in bytes is supposed to
3050 be the same. Copy the MEM to change the mode and move any
3051 substitutions from the old MEM to the new one. */
3053 if (reload_in_progress
)
3055 x
= gen_lowpart_common (tmode
, x1
);
3056 if (x
== 0 && GET_CODE (x1
) == MEM
)
3058 x
= adjust_address_nv (x1
, tmode
, 0);
3059 copy_replacements (x1
, x
);
3062 y
= gen_lowpart_common (tmode
, y1
);
3063 if (y
== 0 && GET_CODE (y1
) == MEM
)
3065 y
= adjust_address_nv (y1
, tmode
, 0);
3066 copy_replacements (y1
, y
);
3071 x
= gen_lowpart (tmode
, x
);
3072 y
= gen_lowpart (tmode
, y
);
3075 insn_code
= mov_optab
->handlers
[(int) tmode
].insn_code
;
3076 return emit_insn (GEN_FCN (insn_code
) (x
, y
));
3079 /* Try using a move pattern for the corresponding integer mode. This is
3080 only safe when simplify_subreg can convert MODE constants into integer
3081 constants. At present, it can only do this reliably if the value
3082 fits within a HOST_WIDE_INT. */
3083 else if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
3084 && (submode
= int_mode_for_mode (mode
)) != BLKmode
3085 && mov_optab
->handlers
[submode
].insn_code
!= CODE_FOR_nothing
)
3086 return emit_insn (GEN_FCN (mov_optab
->handlers
[submode
].insn_code
)
3087 (simplify_gen_subreg (submode
, x
, mode
, 0),
3088 simplify_gen_subreg (submode
, y
, mode
, 0)));
3090 /* This will handle any multi-word or full-word mode that lacks a move_insn
3091 pattern. However, you will get better code if you define such patterns,
3092 even if they must turn into multiple assembler instructions. */
3093 else if (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
3100 #ifdef PUSH_ROUNDING
3102 /* If X is a push on the stack, do the push now and replace
3103 X with a reference to the stack pointer. */
3104 if (push_operand (x
, GET_MODE (x
)))
3109 /* Do not use anti_adjust_stack, since we don't want to update
3110 stack_pointer_delta. */
3111 temp
= expand_binop (Pmode
,
3112 #ifdef STACK_GROWS_DOWNWARD
3120 (GET_MODE_SIZE (GET_MODE (x
)))),
3121 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
3123 if (temp
!= stack_pointer_rtx
)
3124 emit_move_insn (stack_pointer_rtx
, temp
);
3126 code
= GET_CODE (XEXP (x
, 0));
3128 /* Just hope that small offsets off SP are OK. */
3129 if (code
== POST_INC
)
3130 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3131 GEN_INT (-((HOST_WIDE_INT
)
3132 GET_MODE_SIZE (GET_MODE (x
)))));
3133 else if (code
== POST_DEC
)
3134 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3135 GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
3137 temp
= stack_pointer_rtx
;
3139 x
= change_address (x
, VOIDmode
, temp
);
3143 /* If we are in reload, see if either operand is a MEM whose address
3144 is scheduled for replacement. */
3145 if (reload_in_progress
&& GET_CODE (x
) == MEM
3146 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3147 x
= replace_equiv_address_nv (x
, inner
);
3148 if (reload_in_progress
&& GET_CODE (y
) == MEM
3149 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3150 y
= replace_equiv_address_nv (y
, inner
);
3156 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3159 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3160 rtx ypart
= operand_subword (y
, i
, 1, mode
);
3162 /* If we can't get a part of Y, put Y into memory if it is a
3163 constant. Otherwise, force it into a register. If we still
3164 can't get a part of Y, abort. */
3165 if (ypart
== 0 && CONSTANT_P (y
))
3167 y
= force_const_mem (mode
, y
);
3168 ypart
= operand_subword (y
, i
, 1, mode
);
3170 else if (ypart
== 0)
3171 ypart
= operand_subword_force (y
, i
, mode
);
3173 if (xpart
== 0 || ypart
== 0)
3176 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3178 last_insn
= emit_move_insn (xpart
, ypart
);
3184 /* Show the output dies here. This is necessary for SUBREGs
3185 of pseudos since we cannot track their lifetimes correctly;
3186 hard regs shouldn't appear here except as return values.
3187 We never want to emit such a clobber after reload. */
3189 && ! (reload_in_progress
|| reload_completed
)
3190 && need_clobber
!= 0)
3191 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3201 /* If Y is representable exactly in a narrower mode, and the target can
3202 perform the extension directly from constant or memory, then emit the
3203 move as an extension. */
3206 compress_float_constant (rtx x
, rtx y
)
3208 enum machine_mode dstmode
= GET_MODE (x
);
3209 enum machine_mode orig_srcmode
= GET_MODE (y
);
3210 enum machine_mode srcmode
;
3213 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3215 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3216 srcmode
!= orig_srcmode
;
3217 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3220 rtx trunc_y
, last_insn
;
3222 /* Skip if the target can't extend this way. */
3223 ic
= can_extend_p (dstmode
, srcmode
, 0);
3224 if (ic
== CODE_FOR_nothing
)
3227 /* Skip if the narrowed value isn't exact. */
3228 if (! exact_real_truncate (srcmode
, &r
))
3231 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3233 if (LEGITIMATE_CONSTANT_P (trunc_y
))
3235 /* Skip if the target needs extra instructions to perform
3237 if (! (*insn_data
[ic
].operand
[1].predicate
) (trunc_y
, srcmode
))
3240 else if (float_extend_from_mem
[dstmode
][srcmode
])
3241 trunc_y
= validize_mem (force_const_mem (srcmode
, trunc_y
));
3245 emit_unop_insn (ic
, x
, trunc_y
, UNKNOWN
);
3246 last_insn
= get_last_insn ();
3248 if (GET_CODE (x
) == REG
)
3249 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3257 /* Pushing data onto the stack. */
3259 /* Push a block of length SIZE (perhaps variable)
3260 and return an rtx to address the beginning of the block.
3261 Note that it is not possible for the value returned to be a QUEUED.
3262 The value may be virtual_outgoing_args_rtx.
3264 EXTRA is the number of bytes of padding to push in addition to SIZE.
3265 BELOW nonzero means this padding comes at low addresses;
3266 otherwise, the padding comes at high addresses. */
3269 push_block (rtx size
, int extra
, int below
)
3273 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3274 if (CONSTANT_P (size
))
3275 anti_adjust_stack (plus_constant (size
, extra
));
3276 else if (GET_CODE (size
) == REG
&& extra
== 0)
3277 anti_adjust_stack (size
);
3280 temp
= copy_to_mode_reg (Pmode
, size
);
3282 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3283 temp
, 0, OPTAB_LIB_WIDEN
);
3284 anti_adjust_stack (temp
);
3287 #ifndef STACK_GROWS_DOWNWARD
3293 temp
= virtual_outgoing_args_rtx
;
3294 if (extra
!= 0 && below
)
3295 temp
= plus_constant (temp
, extra
);
3299 if (GET_CODE (size
) == CONST_INT
)
3300 temp
= plus_constant (virtual_outgoing_args_rtx
,
3301 -INTVAL (size
) - (below
? 0 : extra
));
3302 else if (extra
!= 0 && !below
)
3303 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3304 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3306 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3307 negate_rtx (Pmode
, size
));
3310 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3313 #ifdef PUSH_ROUNDING
3315 /* Emit single push insn. */
3318 emit_single_push_insn (enum machine_mode mode
, rtx x
, tree type
)
3321 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3323 enum insn_code icode
;
3324 insn_operand_predicate_fn pred
;
3326 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3327 /* If there is push pattern, use it. Otherwise try old way of throwing
3328 MEM representing push operation to move expander. */
3329 icode
= push_optab
->handlers
[(int) mode
].insn_code
;
3330 if (icode
!= CODE_FOR_nothing
)
3332 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3333 && !((*pred
) (x
, mode
))))
3334 x
= force_reg (mode
, x
);
3335 emit_insn (GEN_FCN (icode
) (x
));
3338 if (GET_MODE_SIZE (mode
) == rounded_size
)
3339 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3340 /* If we are to pad downward, adjust the stack pointer first and
3341 then store X into the stack location using an offset. This is
3342 because emit_move_insn does not know how to pad; it does not have
3344 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
3346 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
3347 HOST_WIDE_INT offset
;
3349 emit_move_insn (stack_pointer_rtx
,
3350 expand_binop (Pmode
,
3351 #ifdef STACK_GROWS_DOWNWARD
3357 GEN_INT (rounded_size
),
3358 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
3360 offset
= (HOST_WIDE_INT
) padding_size
;
3361 #ifdef STACK_GROWS_DOWNWARD
3362 if (STACK_PUSH_CODE
== POST_DEC
)
3363 /* We have already decremented the stack pointer, so get the
3365 offset
+= (HOST_WIDE_INT
) rounded_size
;
3367 if (STACK_PUSH_CODE
== POST_INC
)
3368 /* We have already incremented the stack pointer, so get the
3370 offset
-= (HOST_WIDE_INT
) rounded_size
;
3372 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (offset
));
3376 #ifdef STACK_GROWS_DOWNWARD
3377 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3378 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3379 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3381 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3382 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3383 GEN_INT (rounded_size
));
3385 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3388 dest
= gen_rtx_MEM (mode
, dest_addr
);
3392 set_mem_attributes (dest
, type
, 1);
3394 if (flag_optimize_sibling_calls
)
3395 /* Function incoming arguments may overlap with sibling call
3396 outgoing arguments and we cannot allow reordering of reads
3397 from function arguments with stores to outgoing arguments
3398 of sibling calls. */
3399 set_mem_alias_set (dest
, 0);
3401 emit_move_insn (dest
, x
);
3405 /* Generate code to push X onto the stack, assuming it has mode MODE and
3407 MODE is redundant except when X is a CONST_INT (since they don't
3409 SIZE is an rtx for the size of data to be copied (in bytes),
3410 needed only if X is BLKmode.
3412 ALIGN (in bits) is maximum alignment we can assume.
3414 If PARTIAL and REG are both nonzero, then copy that many of the first
3415 words of X into registers starting with REG, and push the rest of X.
3416 The amount of space pushed is decreased by PARTIAL words,
3417 rounded *down* to a multiple of PARM_BOUNDARY.
3418 REG must be a hard register in this case.
3419 If REG is zero but PARTIAL is not, take any all others actions for an
3420 argument partially in registers, but do not actually load any
3423 EXTRA is the amount in bytes of extra space to leave next to this arg.
3424 This is ignored if an argument block has already been allocated.
3426 On a machine that lacks real push insns, ARGS_ADDR is the address of
3427 the bottom of the argument block for this call. We use indexing off there
3428 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3429 argument block has not been preallocated.
3431 ARGS_SO_FAR is the size of args previously pushed for this call.
3433 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3434 for arguments passed in registers. If nonzero, it will be the number
3435 of bytes required. */
3438 emit_push_insn (rtx x
, enum machine_mode mode
, tree type
, rtx size
,
3439 unsigned int align
, int partial
, rtx reg
, int extra
,
3440 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
3444 enum direction stack_direction
3445 #ifdef STACK_GROWS_DOWNWARD
3451 /* Decide where to pad the argument: `downward' for below,
3452 `upward' for above, or `none' for don't pad it.
3453 Default is below for small data on big-endian machines; else above. */
3454 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3456 /* Invert direction if stack is post-decrement.
3458 if (STACK_PUSH_CODE
== POST_DEC
)
3459 if (where_pad
!= none
)
3460 where_pad
= (where_pad
== downward
? upward
: downward
);
3462 xinner
= x
= protect_from_queue (x
, 0);
3464 if (mode
== BLKmode
)
3466 /* Copy a block into the stack, entirely or partially. */
3469 int used
= partial
* UNITS_PER_WORD
;
3473 if (reg
&& GET_CODE (reg
) == PARALLEL
)
3475 /* Use the size of the elt to compute offset. */
3476 rtx elt
= XEXP (XVECEXP (reg
, 0, 0), 0);
3477 used
= partial
* GET_MODE_SIZE (GET_MODE (elt
));
3478 offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3481 offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3488 /* USED is now the # of bytes we need not copy to the stack
3489 because registers will take care of them. */
3492 xinner
= adjust_address (xinner
, BLKmode
, used
);
3494 /* If the partial register-part of the arg counts in its stack size,
3495 skip the part of stack space corresponding to the registers.
3496 Otherwise, start copying to the beginning of the stack space,
3497 by setting SKIP to 0. */
3498 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3500 #ifdef PUSH_ROUNDING
3501 /* Do it with several push insns if that doesn't take lots of insns
3502 and if there is no difficulty with push insns that skip bytes
3503 on the stack for alignment purposes. */
3506 && GET_CODE (size
) == CONST_INT
3508 && MEM_ALIGN (xinner
) >= align
3509 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3510 /* Here we avoid the case of a structure whose weak alignment
3511 forces many pushes of a small amount of data,
3512 and such small pushes do rounding that causes trouble. */
3513 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3514 || align
>= BIGGEST_ALIGNMENT
3515 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3516 == (align
/ BITS_PER_UNIT
)))
3517 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3519 /* Push padding now if padding above and stack grows down,
3520 or if padding below and stack grows up.
3521 But if space already allocated, this has already been done. */
3522 if (extra
&& args_addr
== 0
3523 && where_pad
!= none
&& where_pad
!= stack_direction
)
3524 anti_adjust_stack (GEN_INT (extra
));
3526 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
3529 #endif /* PUSH_ROUNDING */
3533 /* Otherwise make space on the stack and copy the data
3534 to the address of that space. */
3536 /* Deduct words put into registers from the size we must copy. */
3539 if (GET_CODE (size
) == CONST_INT
)
3540 size
= GEN_INT (INTVAL (size
) - used
);
3542 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3543 GEN_INT (used
), NULL_RTX
, 0,
3547 /* Get the address of the stack space.
3548 In this case, we do not deal with EXTRA separately.
3549 A single stack adjust will do. */
3552 temp
= push_block (size
, extra
, where_pad
== downward
);
3555 else if (GET_CODE (args_so_far
) == CONST_INT
)
3556 temp
= memory_address (BLKmode
,
3557 plus_constant (args_addr
,
3558 skip
+ INTVAL (args_so_far
)));
3560 temp
= memory_address (BLKmode
,
3561 plus_constant (gen_rtx_PLUS (Pmode
,
3566 if (!ACCUMULATE_OUTGOING_ARGS
)
3568 /* If the source is referenced relative to the stack pointer,
3569 copy it to another register to stabilize it. We do not need
3570 to do this if we know that we won't be changing sp. */
3572 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3573 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3574 temp
= copy_to_reg (temp
);
3577 target
= gen_rtx_MEM (BLKmode
, temp
);
3581 set_mem_attributes (target
, type
, 1);
3582 /* Function incoming arguments may overlap with sibling call
3583 outgoing arguments and we cannot allow reordering of reads
3584 from function arguments with stores to outgoing arguments
3585 of sibling calls. */
3586 set_mem_alias_set (target
, 0);
3589 /* ALIGN may well be better aligned than TYPE, e.g. due to
3590 PARM_BOUNDARY. Assume the caller isn't lying. */
3591 set_mem_align (target
, align
);
3593 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
3596 else if (partial
> 0)
3598 /* Scalar partly in registers. */
3600 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3603 /* # words of start of argument
3604 that we must make space for but need not store. */
3605 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
3606 int args_offset
= INTVAL (args_so_far
);
3609 /* Push padding now if padding above and stack grows down,
3610 or if padding below and stack grows up.
3611 But if space already allocated, this has already been done. */
3612 if (extra
&& args_addr
== 0
3613 && where_pad
!= none
&& where_pad
!= stack_direction
)
3614 anti_adjust_stack (GEN_INT (extra
));
3616 /* If we make space by pushing it, we might as well push
3617 the real data. Otherwise, we can leave OFFSET nonzero
3618 and leave the space uninitialized. */
3622 /* Now NOT_STACK gets the number of words that we don't need to
3623 allocate on the stack. */
3624 not_stack
= partial
- offset
;
3626 /* If the partial register-part of the arg counts in its stack size,
3627 skip the part of stack space corresponding to the registers.
3628 Otherwise, start copying to the beginning of the stack space,
3629 by setting SKIP to 0. */
3630 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3632 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3633 x
= validize_mem (force_const_mem (mode
, x
));
3635 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3636 SUBREGs of such registers are not allowed. */
3637 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
3638 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3639 x
= copy_to_reg (x
);
3641 /* Loop over all the words allocated on the stack for this arg. */
3642 /* We can do it by words, because any scalar bigger than a word
3643 has a size a multiple of a word. */
3644 #ifndef PUSH_ARGS_REVERSED
3645 for (i
= not_stack
; i
< size
; i
++)
3647 for (i
= size
- 1; i
>= not_stack
; i
--)
3649 if (i
>= not_stack
+ offset
)
3650 emit_push_insn (operand_subword_force (x
, i
, mode
),
3651 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3653 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3655 reg_parm_stack_space
, alignment_pad
);
3662 /* Push padding now if padding above and stack grows down,
3663 or if padding below and stack grows up.
3664 But if space already allocated, this has already been done. */
3665 if (extra
&& args_addr
== 0
3666 && where_pad
!= none
&& where_pad
!= stack_direction
)
3667 anti_adjust_stack (GEN_INT (extra
));
3669 #ifdef PUSH_ROUNDING
3670 if (args_addr
== 0 && PUSH_ARGS
)
3671 emit_single_push_insn (mode
, x
, type
);
3675 if (GET_CODE (args_so_far
) == CONST_INT
)
3677 = memory_address (mode
,
3678 plus_constant (args_addr
,
3679 INTVAL (args_so_far
)));
3681 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3683 dest
= gen_rtx_MEM (mode
, addr
);
3686 set_mem_attributes (dest
, type
, 1);
3687 /* Function incoming arguments may overlap with sibling call
3688 outgoing arguments and we cannot allow reordering of reads
3689 from function arguments with stores to outgoing arguments
3690 of sibling calls. */
3691 set_mem_alias_set (dest
, 0);
3694 emit_move_insn (dest
, x
);
3698 /* If part should go in registers, copy that part
3699 into the appropriate registers. Do this now, at the end,
3700 since mem-to-mem copies above may do function calls. */
3701 if (partial
> 0 && reg
!= 0)
3703 /* Handle calls that pass values in multiple non-contiguous locations.
3704 The Irix 6 ABI has examples of this. */
3705 if (GET_CODE (reg
) == PARALLEL
)
3706 emit_group_load (reg
, x
, type
, -1);
3708 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
3711 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3712 anti_adjust_stack (GEN_INT (extra
));
3714 if (alignment_pad
&& args_addr
== 0)
3715 anti_adjust_stack (alignment_pad
);
3718 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3722 get_subtarget (rtx x
)
3725 /* Only registers can be subtargets. */
3726 || GET_CODE (x
) != REG
3727 /* If the register is readonly, it can't be set more than once. */
3728 || RTX_UNCHANGING_P (x
)
3729 /* Don't use hard regs to avoid extending their life. */
3730 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3731 /* Avoid subtargets inside loops,
3732 since they hide some invariant expressions. */
3733 || preserve_subexpressions_p ())
3737 /* Expand an assignment that stores the value of FROM into TO.
3738 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3739 (This may contain a QUEUED rtx;
3740 if the value is constant, this rtx is a constant.)
3741 Otherwise, the returned value is NULL_RTX. */
3744 expand_assignment (tree to
, tree from
, int want_value
)
3749 /* Don't crash if the lhs of the assignment was erroneous. */
3751 if (TREE_CODE (to
) == ERROR_MARK
)
3753 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3754 return want_value
? result
: NULL_RTX
;
3757 /* Assignment of a structure component needs special treatment
3758 if the structure component's rtx is not simply a MEM.
3759 Assignment of an array element at a constant index, and assignment of
3760 an array element in an unaligned packed structure field, has the same
3763 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
3764 || TREE_CODE (to
) == ARRAY_REF
|| TREE_CODE (to
) == ARRAY_RANGE_REF
3765 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
3767 enum machine_mode mode1
;
3768 HOST_WIDE_INT bitsize
, bitpos
;
3776 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
3777 &unsignedp
, &volatilep
);
3779 /* If we are going to use store_bit_field and extract_bit_field,
3780 make sure to_rtx will be safe for multiple use. */
3782 if (mode1
== VOIDmode
&& want_value
)
3783 tem
= stabilize_reference (tem
);
3785 orig_to_rtx
= to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, 0);
3789 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
3791 if (GET_CODE (to_rtx
) != MEM
)
3794 #ifdef POINTERS_EXTEND_UNSIGNED
3795 if (GET_MODE (offset_rtx
) != Pmode
)
3796 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
3798 if (GET_MODE (offset_rtx
) != ptr_mode
)
3799 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
3802 /* A constant address in TO_RTX can have VOIDmode, we must not try
3803 to call force_reg for that case. Avoid that case. */
3804 if (GET_CODE (to_rtx
) == MEM
3805 && GET_MODE (to_rtx
) == BLKmode
3806 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
3808 && (bitpos
% bitsize
) == 0
3809 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
3810 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
3812 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
3816 to_rtx
= offset_address (to_rtx
, offset_rtx
,
3817 highest_pow2_factor_for_target (to
,
3821 if (GET_CODE (to_rtx
) == MEM
)
3823 /* If the field is at offset zero, we could have been given the
3824 DECL_RTX of the parent struct. Don't munge it. */
3825 to_rtx
= shallow_copy_rtx (to_rtx
);
3827 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
3830 /* Deal with volatile and readonly fields. The former is only done
3831 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3832 if (volatilep
&& GET_CODE (to_rtx
) == MEM
)
3834 if (to_rtx
== orig_to_rtx
)
3835 to_rtx
= copy_rtx (to_rtx
);
3836 MEM_VOLATILE_P (to_rtx
) = 1;
3839 if (TREE_CODE (to
) == COMPONENT_REF
3840 && TREE_READONLY (TREE_OPERAND (to
, 1))
3841 /* We can't assert that a MEM won't be set more than once
3842 if the component is not addressable because another
3843 non-addressable component may be referenced by the same MEM. */
3844 && ! (GET_CODE (to_rtx
) == MEM
&& ! can_address_p (to
)))
3846 if (to_rtx
== orig_to_rtx
)
3847 to_rtx
= copy_rtx (to_rtx
);
3848 RTX_UNCHANGING_P (to_rtx
) = 1;
3851 if (GET_CODE (to_rtx
) == MEM
&& ! can_address_p (to
))
3853 if (to_rtx
== orig_to_rtx
)
3854 to_rtx
= copy_rtx (to_rtx
);
3855 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
3858 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
3860 /* Spurious cast for HPUX compiler. */
3861 ? ((enum machine_mode
)
3862 TYPE_MODE (TREE_TYPE (to
)))
3864 unsignedp
, TREE_TYPE (tem
), get_alias_set (to
));
3866 preserve_temp_slots (result
);
3870 /* If the value is meaningful, convert RESULT to the proper mode.
3871 Otherwise, return nothing. */
3872 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
3873 TYPE_MODE (TREE_TYPE (from
)),
3875 TYPE_UNSIGNED (TREE_TYPE (to
)))
3879 /* If the rhs is a function call and its value is not an aggregate,
3880 call the function before we start to compute the lhs.
3881 This is needed for correct code for cases such as
3882 val = setjmp (buf) on machines where reference to val
3883 requires loading up part of an address in a separate insn.
3885 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3886 since it might be a promoted variable where the zero- or sign- extension
3887 needs to be done. Handling this in the normal way is safe because no
3888 computation is done before the call. */
3889 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
3890 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
3891 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
3892 && GET_CODE (DECL_RTL (to
)) == REG
))
3897 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3899 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3901 /* Handle calls that return values in multiple non-contiguous locations.
3902 The Irix 6 ABI has examples of this. */
3903 if (GET_CODE (to_rtx
) == PARALLEL
)
3904 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
3905 int_size_in_bytes (TREE_TYPE (from
)));
3906 else if (GET_MODE (to_rtx
) == BLKmode
)
3907 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
3910 if (POINTER_TYPE_P (TREE_TYPE (to
)))
3911 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
3912 emit_move_insn (to_rtx
, value
);
3914 preserve_temp_slots (to_rtx
);
3917 return want_value
? to_rtx
: NULL_RTX
;
3920 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3921 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3924 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3926 /* Don't move directly into a return register. */
3927 if (TREE_CODE (to
) == RESULT_DECL
3928 && (GET_CODE (to_rtx
) == REG
|| GET_CODE (to_rtx
) == PARALLEL
))
3933 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
3935 if (GET_CODE (to_rtx
) == PARALLEL
)
3936 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
3937 int_size_in_bytes (TREE_TYPE (from
)));
3939 emit_move_insn (to_rtx
, temp
);
3941 preserve_temp_slots (to_rtx
);
3944 return want_value
? to_rtx
: NULL_RTX
;
3947 /* In case we are returning the contents of an object which overlaps
3948 the place the value is being stored, use a safe function when copying
3949 a value through a pointer into a structure value return block. */
3950 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
3951 && current_function_returns_struct
3952 && !current_function_returns_pcc_struct
)
3957 size
= expr_size (from
);
3958 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3960 if (TARGET_MEM_FUNCTIONS
)
3961 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
3962 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3963 XEXP (from_rtx
, 0), Pmode
,
3964 convert_to_mode (TYPE_MODE (sizetype
),
3965 size
, TYPE_UNSIGNED (sizetype
)),
3966 TYPE_MODE (sizetype
));
3968 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
3969 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
3970 XEXP (to_rtx
, 0), Pmode
,
3971 convert_to_mode (TYPE_MODE (integer_type_node
),
3973 TYPE_UNSIGNED (integer_type_node
)),
3974 TYPE_MODE (integer_type_node
));
3976 preserve_temp_slots (to_rtx
);
3979 return want_value
? to_rtx
: NULL_RTX
;
3982 /* Compute FROM and store the value in the rtx we got. */
3985 result
= store_expr (from
, to_rtx
, want_value
);
3986 preserve_temp_slots (result
);
3989 return want_value
? result
: NULL_RTX
;
3992 /* Generate code for computing expression EXP,
3993 and storing the value into TARGET.
3994 TARGET may contain a QUEUED rtx.
3996 If WANT_VALUE & 1 is nonzero, return a copy of the value
3997 not in TARGET, so that we can be sure to use the proper
3998 value in a containing expression even if TARGET has something
3999 else stored in it. If possible, we copy the value through a pseudo
4000 and return that pseudo. Or, if the value is constant, we try to
4001 return the constant. In some cases, we return a pseudo
4002 copied *from* TARGET.
4004 If the mode is BLKmode then we may return TARGET itself.
4005 It turns out that in BLKmode it doesn't cause a problem.
4006 because C has no operators that could combine two different
4007 assignments into the same BLKmode object with different values
4008 with no sequence point. Will other languages need this to
4011 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4012 to catch quickly any cases where the caller uses the value
4013 and fails to set WANT_VALUE.
4015 If WANT_VALUE & 2 is set, this is a store into a call param on the
4016 stack, and block moves may need to be treated specially. */
4019 store_expr (tree exp
, rtx target
, int want_value
)
4022 rtx alt_rtl
= NULL_RTX
;
4023 rtx mark
= mark_queue ();
4024 int dont_return_target
= 0;
4025 int dont_store_target
= 0;
4027 if (VOID_TYPE_P (TREE_TYPE (exp
)))
4029 /* C++ can generate ?: expressions with a throw expression in one
4030 branch and an rvalue in the other. Here, we resolve attempts to
4031 store the throw expression's nonexistent result. */
4034 expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
4037 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4039 /* Perform first part of compound expression, then assign from second
4041 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
4042 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4044 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
4046 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4048 /* For conditional expression, get safe form of the target. Then
4049 test the condition, doing the appropriate assignment on either
4050 side. This avoids the creation of unnecessary temporaries.
4051 For non-BLKmode, it is more efficient not to do this. */
4053 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4056 target
= protect_from_queue (target
, 1);
4058 do_pending_stack_adjust ();
4060 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
4061 start_cleanup_deferral ();
4062 store_expr (TREE_OPERAND (exp
, 1), target
, want_value
& 2);
4063 end_cleanup_deferral ();
4065 emit_jump_insn (gen_jump (lab2
));
4068 start_cleanup_deferral ();
4069 store_expr (TREE_OPERAND (exp
, 2), target
, want_value
& 2);
4070 end_cleanup_deferral ();
4075 return want_value
& 1 ? target
: NULL_RTX
;
4077 else if (queued_subexp_p (target
))
4078 /* If target contains a postincrement, let's not risk
4079 using it as the place to generate the rhs. */
4081 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
4083 /* Expand EXP into a new pseudo. */
4084 temp
= gen_reg_rtx (GET_MODE (target
));
4085 temp
= expand_expr (exp
, temp
, GET_MODE (target
),
4087 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4090 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
),
4092 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4094 /* If target is volatile, ANSI requires accessing the value
4095 *from* the target, if it is accessed. So make that happen.
4096 In no case return the target itself. */
4097 if (! MEM_VOLATILE_P (target
) && (want_value
& 1) != 0)
4098 dont_return_target
= 1;
4100 else if ((want_value
& 1) != 0
4101 && GET_CODE (target
) == MEM
4102 && ! MEM_VOLATILE_P (target
)
4103 && GET_MODE (target
) != BLKmode
)
4104 /* If target is in memory and caller wants value in a register instead,
4105 arrange that. Pass TARGET as target for expand_expr so that,
4106 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4107 We know expand_expr will not use the target in that case.
4108 Don't do this if TARGET is volatile because we are supposed
4109 to write it and then read it. */
4111 temp
= expand_expr (exp
, target
, GET_MODE (target
),
4112 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4113 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
4115 /* If TEMP is already in the desired TARGET, only copy it from
4116 memory and don't store it there again. */
4118 || (rtx_equal_p (temp
, target
)
4119 && ! side_effects_p (temp
) && ! side_effects_p (target
)))
4120 dont_store_target
= 1;
4121 temp
= copy_to_reg (temp
);
4123 dont_return_target
= 1;
4125 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4126 /* If this is a scalar in a register that is stored in a wider mode
4127 than the declared mode, compute the result into its declared mode
4128 and then convert to the wider mode. Our value is the computed
4131 rtx inner_target
= 0;
4133 /* If we don't want a value, we can do the conversion inside EXP,
4134 which will often result in some optimizations. Do the conversion
4135 in two steps: first change the signedness, if needed, then
4136 the extend. But don't do this if the type of EXP is a subtype
4137 of something else since then the conversion might involve
4138 more than just converting modes. */
4139 if ((want_value
& 1) == 0
4140 && INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4141 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
4143 if (TYPE_UNSIGNED (TREE_TYPE (exp
))
4144 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4146 (lang_hooks
.types
.signed_or_unsigned_type
4147 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)), exp
);
4149 exp
= convert (lang_hooks
.types
.type_for_mode
4150 (GET_MODE (SUBREG_REG (target
)),
4151 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4154 inner_target
= SUBREG_REG (target
);
4157 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
4158 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4160 /* If TEMP is a MEM and we want a result value, make the access
4161 now so it gets done only once. Strictly speaking, this is
4162 only necessary if the MEM is volatile, or if the address
4163 overlaps TARGET. But not performing the load twice also
4164 reduces the amount of rtl we generate and then have to CSE. */
4165 if (GET_CODE (temp
) == MEM
&& (want_value
& 1) != 0)
4166 temp
= copy_to_reg (temp
);
4168 /* If TEMP is a VOIDmode constant, use convert_modes to make
4169 sure that we properly convert it. */
4170 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4172 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4173 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4174 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4175 GET_MODE (target
), temp
,
4176 SUBREG_PROMOTED_UNSIGNED_P (target
));
4179 convert_move (SUBREG_REG (target
), temp
,
4180 SUBREG_PROMOTED_UNSIGNED_P (target
));
4182 /* If we promoted a constant, change the mode back down to match
4183 target. Otherwise, the caller might get confused by a result whose
4184 mode is larger than expected. */
4186 if ((want_value
& 1) != 0 && GET_MODE (temp
) != GET_MODE (target
))
4188 if (GET_MODE (temp
) != VOIDmode
)
4190 temp
= gen_lowpart_SUBREG (GET_MODE (target
), temp
);
4191 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4192 SUBREG_PROMOTED_UNSIGNED_SET (temp
,
4193 SUBREG_PROMOTED_UNSIGNED_P (target
));
4196 temp
= convert_modes (GET_MODE (target
),
4197 GET_MODE (SUBREG_REG (target
)),
4198 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4201 return want_value
& 1 ? temp
: NULL_RTX
;
4205 temp
= expand_expr_real (exp
, target
, GET_MODE (target
),
4207 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
4209 /* Return TARGET if it's a specified hardware register.
4210 If TARGET is a volatile mem ref, either return TARGET
4211 or return a reg copied *from* TARGET; ANSI requires this.
4213 Otherwise, if TEMP is not TARGET, return TEMP
4214 if it is constant (for efficiency),
4215 or if we really want the correct value. */
4216 if (!(target
&& GET_CODE (target
) == REG
4217 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4218 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
4219 && ! rtx_equal_p (temp
, target
)
4220 && (CONSTANT_P (temp
) || (want_value
& 1) != 0))
4221 dont_return_target
= 1;
4224 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4225 the same as that of TARGET, adjust the constant. This is needed, for
4226 example, in case it is a CONST_DOUBLE and we want only a word-sized
4228 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4229 && TREE_CODE (exp
) != ERROR_MARK
4230 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4231 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4232 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
4234 /* If value was not generated in the target, store it there.
4235 Convert the value to TARGET's type first if necessary and emit the
4236 pending incrementations that have been queued when expanding EXP.
4237 Note that we cannot emit the whole queue blindly because this will
4238 effectively disable the POST_INC optimization later.
4240 If TEMP and TARGET compare equal according to rtx_equal_p, but
4241 one or both of them are volatile memory refs, we have to distinguish
4243 - expand_expr has used TARGET. In this case, we must not generate
4244 another copy. This can be detected by TARGET being equal according
4246 - expand_expr has not used TARGET - that means that the source just
4247 happens to have the same RTX form. Since temp will have been created
4248 by expand_expr, it will compare unequal according to == .
4249 We must generate a copy in this case, to reach the correct number
4250 of volatile memory references. */
4252 if ((! rtx_equal_p (temp
, target
)
4253 || (temp
!= target
&& (side_effects_p (temp
)
4254 || side_effects_p (target
))))
4255 && TREE_CODE (exp
) != ERROR_MARK
4256 && ! dont_store_target
4257 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4258 but TARGET is not valid memory reference, TEMP will differ
4259 from TARGET although it is really the same location. */
4260 && !(alt_rtl
&& rtx_equal_p (alt_rtl
, target
))
4261 /* If there's nothing to copy, don't bother. Don't call expr_size
4262 unless necessary, because some front-ends (C++) expr_size-hook
4263 aborts on objects that are not supposed to be bit-copied or
4265 && expr_size (exp
) != const0_rtx
)
4267 emit_insns_enqueued_after_mark (mark
);
4268 target
= protect_from_queue (target
, 1);
4269 temp
= protect_from_queue (temp
, 0);
4270 if (GET_MODE (temp
) != GET_MODE (target
)
4271 && GET_MODE (temp
) != VOIDmode
)
4273 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
4274 if (dont_return_target
)
4276 /* In this case, we will return TEMP,
4277 so make sure it has the proper mode.
4278 But don't forget to store the value into TARGET. */
4279 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4280 emit_move_insn (target
, temp
);
4283 convert_move (target
, temp
, unsignedp
);
4286 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4288 /* Handle copying a string constant into an array. The string
4289 constant may be shorter than the array. So copy just the string's
4290 actual length, and clear the rest. First get the size of the data
4291 type of the string, which is actually the size of the target. */
4292 rtx size
= expr_size (exp
);
4294 if (GET_CODE (size
) == CONST_INT
4295 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4296 emit_block_move (target
, temp
, size
,
4298 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4301 /* Compute the size of the data to copy from the string. */
4303 = size_binop (MIN_EXPR
,
4304 make_tree (sizetype
, size
),
4305 size_int (TREE_STRING_LENGTH (exp
)));
4307 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
4309 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4312 /* Copy that much. */
4313 copy_size_rtx
= convert_to_mode (ptr_mode
, copy_size_rtx
,
4314 TYPE_UNSIGNED (sizetype
));
4315 emit_block_move (target
, temp
, copy_size_rtx
,
4317 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4319 /* Figure out how much is left in TARGET that we have to clear.
4320 Do all calculations in ptr_mode. */
4321 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4323 size
= plus_constant (size
, -INTVAL (copy_size_rtx
));
4324 target
= adjust_address (target
, BLKmode
,
4325 INTVAL (copy_size_rtx
));
4329 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
4330 copy_size_rtx
, NULL_RTX
, 0,
4333 #ifdef POINTERS_EXTEND_UNSIGNED
4334 if (GET_MODE (copy_size_rtx
) != Pmode
)
4335 copy_size_rtx
= convert_to_mode (Pmode
, copy_size_rtx
,
4336 TYPE_UNSIGNED (sizetype
));
4339 target
= offset_address (target
, copy_size_rtx
,
4340 highest_pow2_factor (copy_size
));
4341 label
= gen_label_rtx ();
4342 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4343 GET_MODE (size
), 0, label
);
4346 if (size
!= const0_rtx
)
4347 clear_storage (target
, size
);
4353 /* Handle calls that return values in multiple non-contiguous locations.
4354 The Irix 6 ABI has examples of this. */
4355 else if (GET_CODE (target
) == PARALLEL
)
4356 emit_group_load (target
, temp
, TREE_TYPE (exp
),
4357 int_size_in_bytes (TREE_TYPE (exp
)));
4358 else if (GET_MODE (temp
) == BLKmode
)
4359 emit_block_move (target
, temp
, expr_size (exp
),
4361 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4364 temp
= force_operand (temp
, target
);
4366 emit_move_insn (target
, temp
);
4370 /* If we don't want a value, return NULL_RTX. */
4371 if ((want_value
& 1) == 0)
4374 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4375 ??? The latter test doesn't seem to make sense. */
4376 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
4379 /* Return TARGET itself if it is a hard register. */
4380 else if ((want_value
& 1) != 0
4381 && GET_MODE (target
) != BLKmode
4382 && ! (GET_CODE (target
) == REG
4383 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4384 return copy_to_reg (target
);
4390 /* Examine CTOR. Discover how many scalar fields are set to non-zero
4391 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4392 are set to non-constant values and place it in *P_NC_ELTS. */
4395 categorize_ctor_elements_1 (tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
4396 HOST_WIDE_INT
*p_nc_elts
)
4398 HOST_WIDE_INT nz_elts
, nc_elts
;
4404 for (list
= CONSTRUCTOR_ELTS (ctor
); list
; list
= TREE_CHAIN (list
))
4406 tree value
= TREE_VALUE (list
);
4407 tree purpose
= TREE_PURPOSE (list
);
4411 if (TREE_CODE (purpose
) == RANGE_EXPR
)
4413 tree lo_index
= TREE_OPERAND (purpose
, 0);
4414 tree hi_index
= TREE_OPERAND (purpose
, 1);
4416 if (host_integerp (lo_index
, 1) && host_integerp (hi_index
, 1))
4417 mult
= (tree_low_cst (hi_index
, 1)
4418 - tree_low_cst (lo_index
, 1) + 1);
4421 switch (TREE_CODE (value
))
4425 HOST_WIDE_INT nz
= 0, nc
= 0;
4426 categorize_ctor_elements_1 (value
, &nz
, &nc
);
4427 nz_elts
+= mult
* nz
;
4428 nc_elts
+= mult
* nc
;
4434 if (!initializer_zerop (value
))
4438 if (!initializer_zerop (TREE_REALPART (value
)))
4440 if (!initializer_zerop (TREE_IMAGPART (value
)))
4446 for (v
= TREE_VECTOR_CST_ELTS (value
); v
; v
= TREE_CHAIN (v
))
4447 if (!initializer_zerop (TREE_VALUE (v
)))
4454 if (!initializer_constant_valid_p (value
, TREE_TYPE (value
)))
4460 *p_nz_elts
+= nz_elts
;
4461 *p_nc_elts
+= nc_elts
;
4465 categorize_ctor_elements (tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
4466 HOST_WIDE_INT
*p_nc_elts
)
4470 categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_nc_elts
);
4473 /* Count the number of scalars in TYPE. Return -1 on overflow or
4477 count_type_elements (tree type
)
4479 const HOST_WIDE_INT max
= ~((HOST_WIDE_INT
)1 << (HOST_BITS_PER_WIDE_INT
-1));
4480 switch (TREE_CODE (type
))
4484 tree telts
= array_type_nelts (type
);
4485 if (telts
&& host_integerp (telts
, 1))
4487 HOST_WIDE_INT n
= tree_low_cst (telts
, 1);
4488 HOST_WIDE_INT m
= count_type_elements (TREE_TYPE (type
));
4499 HOST_WIDE_INT n
= 0, t
;
4502 for (f
= TYPE_FIELDS (type
); f
; f
= TREE_CHAIN (f
))
4503 if (TREE_CODE (f
) == FIELD_DECL
)
4505 t
= count_type_elements (TREE_TYPE (f
));
4515 case QUAL_UNION_TYPE
:
4517 /* Ho hum. How in the world do we guess here? Clearly it isn't
4518 right to count the fields. Guess based on the number of words. */
4519 HOST_WIDE_INT n
= int_size_in_bytes (type
);
4522 return n
/ UNITS_PER_WORD
;
4529 /* ??? This is broke. We should encode the vector width in the tree. */
4530 return GET_MODE_NUNITS (TYPE_MODE (type
));
4539 case REFERENCE_TYPE
:
4553 /* Return 1 if EXP contains mostly (3/4) zeros. */
4556 mostly_zeros_p (tree exp
)
4558 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4561 HOST_WIDE_INT nz_elts
, nc_elts
, elts
;
4563 /* If there are no ranges of true bits, it is all zero. */
4564 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4565 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
4567 categorize_ctor_elements (exp
, &nz_elts
, &nc_elts
);
4568 elts
= count_type_elements (TREE_TYPE (exp
));
4570 return nz_elts
< elts
/ 4;
4573 return initializer_zerop (exp
);
4576 /* Helper function for store_constructor.
4577 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4578 TYPE is the type of the CONSTRUCTOR, not the element type.
4579 CLEARED is as for store_constructor.
4580 ALIAS_SET is the alias set to use for any stores.
4582 This provides a recursive shortcut back to store_constructor when it isn't
4583 necessary to go through store_field. This is so that we can pass through
4584 the cleared field to let store_constructor know that we may not have to
4585 clear a substructure if the outer structure has already been cleared. */
4588 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
4589 HOST_WIDE_INT bitpos
, enum machine_mode mode
,
4590 tree exp
, tree type
, int cleared
, int alias_set
)
4592 if (TREE_CODE (exp
) == CONSTRUCTOR
4593 /* We can only call store_constructor recursively if the size and
4594 bit position are on a byte boundary. */
4595 && bitpos
% BITS_PER_UNIT
== 0
4596 && (bitsize
> 0 && bitsize
% BITS_PER_UNIT
== 0)
4597 /* If we have a nonzero bitpos for a register target, then we just
4598 let store_field do the bitfield handling. This is unlikely to
4599 generate unnecessary clear instructions anyways. */
4600 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
4602 if (GET_CODE (target
) == MEM
)
4604 = adjust_address (target
,
4605 GET_MODE (target
) == BLKmode
4607 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4608 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
4611 /* Update the alias set, if required. */
4612 if (GET_CODE (target
) == MEM
&& ! MEM_KEEP_ALIAS_SET_P (target
)
4613 && MEM_ALIAS_SET (target
) != 0)
4615 target
= copy_rtx (target
);
4616 set_mem_alias_set (target
, alias_set
);
4619 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
4622 store_field (target
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
4626 /* Store the value of constructor EXP into the rtx TARGET.
4627 TARGET is either a REG or a MEM; we know it cannot conflict, since
4628 safe_from_p has been called.
4629 CLEARED is true if TARGET is known to have been zero'd.
4630 SIZE is the number of bytes of TARGET we are allowed to modify: this
4631 may not be the same as the size of EXP if we are assigning to a field
4632 which has been packed to exclude padding bits. */
4635 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
)
4637 tree type
= TREE_TYPE (exp
);
4638 #ifdef WORD_REGISTER_OPERATIONS
4639 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4642 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
4643 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4647 /* If size is zero or the target is already cleared, do nothing. */
4648 if (size
== 0 || cleared
)
4650 /* We either clear the aggregate or indicate the value is dead. */
4651 else if ((TREE_CODE (type
) == UNION_TYPE
4652 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4653 && ! CONSTRUCTOR_ELTS (exp
))
4654 /* If the constructor is empty, clear the union. */
4656 clear_storage (target
, expr_size (exp
));
4660 /* If we are building a static constructor into a register,
4661 set the initial value as zero so we can fold the value into
4662 a constant. But if more than one register is involved,
4663 this probably loses. */
4664 else if (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
4665 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4667 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4671 /* If the constructor has fewer fields than the structure
4672 or if we are initializing the structure to mostly zeros,
4673 clear the whole structure first. Don't do this if TARGET is a
4674 register whose mode size isn't equal to SIZE since clear_storage
4675 can't handle this case. */
4676 else if (((list_length (CONSTRUCTOR_ELTS (exp
)) != fields_length (type
))
4677 || mostly_zeros_p (exp
))
4678 && (GET_CODE (target
) != REG
4679 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
4682 rtx xtarget
= target
;
4684 if (readonly_fields_p (type
))
4686 xtarget
= copy_rtx (xtarget
);
4687 RTX_UNCHANGING_P (xtarget
) = 1;
4690 clear_storage (xtarget
, GEN_INT (size
));
4695 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4697 /* Store each element of the constructor into
4698 the corresponding field of TARGET. */
4700 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4702 tree field
= TREE_PURPOSE (elt
);
4703 tree value
= TREE_VALUE (elt
);
4704 enum machine_mode mode
;
4705 HOST_WIDE_INT bitsize
;
4706 HOST_WIDE_INT bitpos
= 0;
4708 rtx to_rtx
= target
;
4710 /* Just ignore missing fields.
4711 We cleared the whole structure, above,
4712 if any fields are missing. */
4716 if (cleared
&& initializer_zerop (value
))
4719 if (host_integerp (DECL_SIZE (field
), 1))
4720 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4724 mode
= DECL_MODE (field
);
4725 if (DECL_BIT_FIELD (field
))
4728 offset
= DECL_FIELD_OFFSET (field
);
4729 if (host_integerp (offset
, 0)
4730 && host_integerp (bit_position (field
), 0))
4732 bitpos
= int_bit_position (field
);
4736 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
4743 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset
,
4744 make_tree (TREE_TYPE (exp
),
4747 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4748 if (GET_CODE (to_rtx
) != MEM
)
4751 #ifdef POINTERS_EXTEND_UNSIGNED
4752 if (GET_MODE (offset_rtx
) != Pmode
)
4753 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
4755 if (GET_MODE (offset_rtx
) != ptr_mode
)
4756 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4759 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4760 highest_pow2_factor (offset
));
4763 if (TREE_READONLY (field
))
4765 if (GET_CODE (to_rtx
) == MEM
)
4766 to_rtx
= copy_rtx (to_rtx
);
4768 RTX_UNCHANGING_P (to_rtx
) = 1;
4771 #ifdef WORD_REGISTER_OPERATIONS
4772 /* If this initializes a field that is smaller than a word, at the
4773 start of a word, try to widen it to a full word.
4774 This special case allows us to output C++ member function
4775 initializations in a form that the optimizers can understand. */
4776 if (GET_CODE (target
) == REG
4777 && bitsize
< BITS_PER_WORD
4778 && bitpos
% BITS_PER_WORD
== 0
4779 && GET_MODE_CLASS (mode
) == MODE_INT
4780 && TREE_CODE (value
) == INTEGER_CST
4782 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
4784 tree type
= TREE_TYPE (value
);
4786 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4788 type
= lang_hooks
.types
.type_for_size
4789 (BITS_PER_WORD
, TYPE_UNSIGNED (type
));
4790 value
= convert (type
, value
);
4793 if (BYTES_BIG_ENDIAN
)
4795 = fold (build (LSHIFT_EXPR
, type
, value
,
4796 build_int_2 (BITS_PER_WORD
- bitsize
, 0)));
4797 bitsize
= BITS_PER_WORD
;
4802 if (GET_CODE (to_rtx
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (to_rtx
)
4803 && DECL_NONADDRESSABLE_P (field
))
4805 to_rtx
= copy_rtx (to_rtx
);
4806 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4809 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
4810 value
, type
, cleared
,
4811 get_alias_set (TREE_TYPE (field
)));
4814 else if (TREE_CODE (type
) == ARRAY_TYPE
4815 || TREE_CODE (type
) == VECTOR_TYPE
)
4821 tree elttype
= TREE_TYPE (type
);
4823 HOST_WIDE_INT minelt
= 0;
4824 HOST_WIDE_INT maxelt
= 0;
4828 unsigned n_elts
= 0;
4830 if (TREE_CODE (type
) == ARRAY_TYPE
)
4831 domain
= TYPE_DOMAIN (type
);
4833 /* Vectors do not have domains; look up the domain of
4834 the array embedded in the debug representation type.
4835 FIXME Would probably be more efficient to treat vectors
4836 separately from arrays. */
4838 domain
= TYPE_DEBUG_REPRESENTATION_TYPE (type
);
4839 domain
= TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain
)));
4840 if (REG_P (target
) && VECTOR_MODE_P (GET_MODE (target
)))
4842 enum machine_mode mode
= GET_MODE (target
);
4844 icode
= (int) vec_init_optab
->handlers
[mode
].insn_code
;
4845 if (icode
!= CODE_FOR_nothing
)
4849 elt_size
= GET_MODE_SIZE (GET_MODE_INNER (mode
));
4850 n_elts
= (GET_MODE_SIZE (mode
) / elt_size
);
4851 vector
= alloca (n_elts
);
4852 for (i
= 0; i
< n_elts
; i
++)
4853 vector
[i
] = CONST0_RTX (GET_MODE_INNER (mode
));
4858 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
4859 && TYPE_MAX_VALUE (domain
)
4860 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
4861 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
4863 /* If we have constant bounds for the range of the type, get them. */
4866 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
4867 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
4870 /* If the constructor has fewer elements than the array,
4871 clear the whole array first. Similarly if this is
4872 static constructor of a non-BLKmode object. */
4873 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
4877 HOST_WIDE_INT count
= 0, zero_count
= 0;
4878 need_to_clear
= ! const_bounds_p
;
4880 /* This loop is a more accurate version of the loop in
4881 mostly_zeros_p (it handles RANGE_EXPR in an index).
4882 It is also needed to check for missing elements. */
4883 for (elt
= CONSTRUCTOR_ELTS (exp
);
4884 elt
!= NULL_TREE
&& ! need_to_clear
;
4885 elt
= TREE_CHAIN (elt
))
4887 tree index
= TREE_PURPOSE (elt
);
4888 HOST_WIDE_INT this_node_count
;
4890 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4892 tree lo_index
= TREE_OPERAND (index
, 0);
4893 tree hi_index
= TREE_OPERAND (index
, 1);
4895 if (! host_integerp (lo_index
, 1)
4896 || ! host_integerp (hi_index
, 1))
4902 this_node_count
= (tree_low_cst (hi_index
, 1)
4903 - tree_low_cst (lo_index
, 1) + 1);
4906 this_node_count
= 1;
4908 count
+= this_node_count
;
4909 if (mostly_zeros_p (TREE_VALUE (elt
)))
4910 zero_count
+= this_node_count
;
4913 /* Clear the entire array first if there are any missing elements,
4914 or if the incidence of zero elements is >= 75%. */
4916 && (count
< maxelt
- minelt
+ 1 || 4 * zero_count
>= 3 * count
))
4920 if (need_to_clear
&& size
> 0 && !vector
)
4925 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4927 clear_storage (target
, GEN_INT (size
));
4931 else if (REG_P (target
))
4932 /* Inform later passes that the old value is dead. */
4933 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4935 /* Store each element of the constructor into
4936 the corresponding element of TARGET, determined
4937 by counting the elements. */
4938 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
4940 elt
= TREE_CHAIN (elt
), i
++)
4942 enum machine_mode mode
;
4943 HOST_WIDE_INT bitsize
;
4944 HOST_WIDE_INT bitpos
;
4946 tree value
= TREE_VALUE (elt
);
4947 tree index
= TREE_PURPOSE (elt
);
4948 rtx xtarget
= target
;
4950 if (cleared
&& initializer_zerop (value
))
4953 unsignedp
= TYPE_UNSIGNED (elttype
);
4954 mode
= TYPE_MODE (elttype
);
4955 if (mode
== BLKmode
)
4956 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
4957 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
4960 bitsize
= GET_MODE_BITSIZE (mode
);
4962 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4964 tree lo_index
= TREE_OPERAND (index
, 0);
4965 tree hi_index
= TREE_OPERAND (index
, 1);
4966 rtx index_r
, pos_rtx
;
4967 HOST_WIDE_INT lo
, hi
, count
;
4973 /* If the range is constant and "small", unroll the loop. */
4975 && host_integerp (lo_index
, 0)
4976 && host_integerp (hi_index
, 0)
4977 && (lo
= tree_low_cst (lo_index
, 0),
4978 hi
= tree_low_cst (hi_index
, 0),
4979 count
= hi
- lo
+ 1,
4980 (GET_CODE (target
) != MEM
4982 || (host_integerp (TYPE_SIZE (elttype
), 1)
4983 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
4986 lo
-= minelt
; hi
-= minelt
;
4987 for (; lo
<= hi
; lo
++)
4989 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
4991 if (GET_CODE (target
) == MEM
4992 && !MEM_KEEP_ALIAS_SET_P (target
)
4993 && TREE_CODE (type
) == ARRAY_TYPE
4994 && TYPE_NONALIASED_COMPONENT (type
))
4996 target
= copy_rtx (target
);
4997 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5000 store_constructor_field
5001 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
5002 get_alias_set (elttype
));
5007 rtx loop_start
= gen_label_rtx ();
5008 rtx loop_end
= gen_label_rtx ();
5011 expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
5012 unsignedp
= TYPE_UNSIGNED (domain
);
5014 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
5017 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
5019 SET_DECL_RTL (index
, index_r
);
5020 if (TREE_CODE (value
) == SAVE_EXPR
5021 && SAVE_EXPR_RTL (value
) == 0)
5023 /* Make sure value gets expanded once before the
5025 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
5028 store_expr (lo_index
, index_r
, 0);
5030 /* Build the head of the loop. */
5031 do_pending_stack_adjust ();
5033 emit_label (loop_start
);
5035 /* Assign value to element index. */
5037 = convert (ssizetype
,
5038 fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5039 index
, TYPE_MIN_VALUE (domain
))));
5040 position
= size_binop (MULT_EXPR
, position
,
5042 TYPE_SIZE_UNIT (elttype
)));
5044 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
5045 xtarget
= offset_address (target
, pos_rtx
,
5046 highest_pow2_factor (position
));
5047 xtarget
= adjust_address (xtarget
, mode
, 0);
5048 if (TREE_CODE (value
) == CONSTRUCTOR
)
5049 store_constructor (value
, xtarget
, cleared
,
5050 bitsize
/ BITS_PER_UNIT
);
5052 store_expr (value
, xtarget
, 0);
5054 /* Generate a conditional jump to exit the loop. */
5055 exit_cond
= build (LT_EXPR
, integer_type_node
,
5057 jumpif (exit_cond
, loop_end
);
5059 /* Update the loop counter, and jump to the head of
5061 expand_increment (build (PREINCREMENT_EXPR
,
5063 index
, integer_one_node
), 0, 0);
5064 emit_jump (loop_start
);
5066 /* Build the end of the loop. */
5067 emit_label (loop_end
);
5070 else if ((index
!= 0 && ! host_integerp (index
, 0))
5071 || ! host_integerp (TYPE_SIZE (elttype
), 1))
5079 index
= ssize_int (1);
5082 index
= convert (ssizetype
,
5083 fold (build (MINUS_EXPR
, index
,
5084 TYPE_MIN_VALUE (domain
))));
5086 position
= size_binop (MULT_EXPR
, index
,
5088 TYPE_SIZE_UNIT (elttype
)));
5089 xtarget
= offset_address (target
,
5090 expand_expr (position
, 0, VOIDmode
, 0),
5091 highest_pow2_factor (position
));
5092 xtarget
= adjust_address (xtarget
, mode
, 0);
5093 store_expr (value
, xtarget
, 0);
5100 pos
= tree_low_cst (index
, 0) - minelt
;
5103 vector
[pos
] = expand_expr (value
, NULL_RTX
, VOIDmode
, 0);
5108 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
5109 * tree_low_cst (TYPE_SIZE (elttype
), 1));
5111 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
5113 if (GET_CODE (target
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (target
)
5114 && TREE_CODE (type
) == ARRAY_TYPE
5115 && TYPE_NONALIASED_COMPONENT (type
))
5117 target
= copy_rtx (target
);
5118 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5120 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
5121 type
, cleared
, get_alias_set (elttype
));
5126 emit_insn (GEN_FCN (icode
) (target
,
5127 gen_rtx_PARALLEL (GET_MODE (target
),
5128 gen_rtvec_v (n_elts
, vector
))));
5132 /* Set constructor assignments. */
5133 else if (TREE_CODE (type
) == SET_TYPE
)
5135 tree elt
= CONSTRUCTOR_ELTS (exp
);
5136 unsigned HOST_WIDE_INT nbytes
= int_size_in_bytes (type
), nbits
;
5137 tree domain
= TYPE_DOMAIN (type
);
5138 tree domain_min
, domain_max
, bitlength
;
5140 /* The default implementation strategy is to extract the constant
5141 parts of the constructor, use that to initialize the target,
5142 and then "or" in whatever non-constant ranges we need in addition.
5144 If a large set is all zero or all ones, it is
5145 probably better to set it using memset (if available) or bzero.
5146 Also, if a large set has just a single range, it may also be
5147 better to first clear all the first clear the set (using
5148 bzero/memset), and set the bits we want. */
5150 /* Check for all zeros. */
5151 if (elt
== NULL_TREE
&& size
> 0)
5154 clear_storage (target
, GEN_INT (size
));
5158 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
5159 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
5160 bitlength
= size_binop (PLUS_EXPR
,
5161 size_diffop (domain_max
, domain_min
),
5164 nbits
= tree_low_cst (bitlength
, 1);
5166 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5167 are "complicated" (more than one range), initialize (the
5168 constant parts) by copying from a constant. */
5169 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
5170 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
5172 unsigned int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
5173 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
5174 char *bit_buffer
= alloca (nbits
);
5175 HOST_WIDE_INT word
= 0;
5176 unsigned int bit_pos
= 0;
5177 unsigned int ibit
= 0;
5178 unsigned int offset
= 0; /* In bytes from beginning of set. */
5180 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
5183 if (bit_buffer
[ibit
])
5185 if (BYTES_BIG_ENDIAN
)
5186 word
|= (1 << (set_word_size
- 1 - bit_pos
));
5188 word
|= 1 << bit_pos
;
5192 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
5194 if (word
!= 0 || ! cleared
)
5196 rtx datum
= gen_int_mode (word
, mode
);
5199 /* The assumption here is that it is safe to use
5200 XEXP if the set is multi-word, but not if
5201 it's single-word. */
5202 if (GET_CODE (target
) == MEM
)
5203 to_rtx
= adjust_address (target
, mode
, offset
);
5204 else if (offset
== 0)
5208 emit_move_insn (to_rtx
, datum
);
5215 offset
+= set_word_size
/ BITS_PER_UNIT
;
5220 /* Don't bother clearing storage if the set is all ones. */
5221 if (TREE_CHAIN (elt
) != NULL_TREE
5222 || (TREE_PURPOSE (elt
) == NULL_TREE
5224 : ( ! host_integerp (TREE_VALUE (elt
), 0)
5225 || ! host_integerp (TREE_PURPOSE (elt
), 0)
5226 || (tree_low_cst (TREE_VALUE (elt
), 0)
5227 - tree_low_cst (TREE_PURPOSE (elt
), 0) + 1
5228 != (HOST_WIDE_INT
) nbits
))))
5229 clear_storage (target
, expr_size (exp
));
5231 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
5233 /* Start of range of element or NULL. */
5234 tree startbit
= TREE_PURPOSE (elt
);
5235 /* End of range of element, or element value. */
5236 tree endbit
= TREE_VALUE (elt
);
5237 HOST_WIDE_INT startb
, endb
;
5238 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
5240 bitlength_rtx
= expand_expr (bitlength
,
5241 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
5243 /* Handle non-range tuple element like [ expr ]. */
5244 if (startbit
== NULL_TREE
)
5246 startbit
= save_expr (endbit
);
5250 startbit
= convert (sizetype
, startbit
);
5251 endbit
= convert (sizetype
, endbit
);
5252 if (! integer_zerop (domain_min
))
5254 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
5255 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
5257 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
5258 EXPAND_CONST_ADDRESS
);
5259 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
5260 EXPAND_CONST_ADDRESS
);
5266 ((build_qualified_type (lang_hooks
.types
.type_for_mode
5267 (GET_MODE (target
), 0),
5270 emit_move_insn (targetx
, target
);
5273 else if (GET_CODE (target
) == MEM
)
5278 /* Optimization: If startbit and endbit are constants divisible
5279 by BITS_PER_UNIT, call memset instead. */
5280 if (TARGET_MEM_FUNCTIONS
5281 && TREE_CODE (startbit
) == INTEGER_CST
5282 && TREE_CODE (endbit
) == INTEGER_CST
5283 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
5284 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
5286 emit_library_call (memset_libfunc
, LCT_NORMAL
,
5288 plus_constant (XEXP (targetx
, 0),
5289 startb
/ BITS_PER_UNIT
),
5291 constm1_rtx
, TYPE_MODE (integer_type_node
),
5292 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
5293 TYPE_MODE (sizetype
));
5296 emit_library_call (setbits_libfunc
, LCT_NORMAL
,
5297 VOIDmode
, 4, XEXP (targetx
, 0),
5298 Pmode
, bitlength_rtx
, TYPE_MODE (sizetype
),
5299 startbit_rtx
, TYPE_MODE (sizetype
),
5300 endbit_rtx
, TYPE_MODE (sizetype
));
5303 emit_move_insn (target
, targetx
);
5311 /* Store the value of EXP (an expression tree)
5312 into a subfield of TARGET which has mode MODE and occupies
5313 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5314 If MODE is VOIDmode, it means that we are storing into a bit-field.
5316 If VALUE_MODE is VOIDmode, return nothing in particular.
5317 UNSIGNEDP is not used in this case.
5319 Otherwise, return an rtx for the value stored. This rtx
5320 has mode VALUE_MODE if that is convenient to do.
5321 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5323 TYPE is the type of the underlying object,
5325 ALIAS_SET is the alias set for the destination. This value will
5326 (in general) be different from that for TARGET, since TARGET is a
5327 reference to the containing structure. */
5330 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
5331 enum machine_mode mode
, tree exp
, enum machine_mode value_mode
,
5332 int unsignedp
, tree type
, int alias_set
)
5334 HOST_WIDE_INT width_mask
= 0;
5336 if (TREE_CODE (exp
) == ERROR_MARK
)
5339 /* If we have nothing to store, do nothing unless the expression has
5342 return expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
5343 else if (bitsize
>= 0 && bitsize
< HOST_BITS_PER_WIDE_INT
)
5344 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5346 /* If we are storing into an unaligned field of an aligned union that is
5347 in a register, we may have the mode of TARGET being an integer mode but
5348 MODE == BLKmode. In that case, get an aligned object whose size and
5349 alignment are the same as TARGET and store TARGET into it (we can avoid
5350 the store if the field being stored is the entire width of TARGET). Then
5351 call ourselves recursively to store the field into a BLKmode version of
5352 that object. Finally, load from the object into TARGET. This is not
5353 very efficient in general, but should only be slightly more expensive
5354 than the otherwise-required unaligned accesses. Perhaps this can be
5355 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5356 twice, once with emit_move_insn and once via store_field. */
5359 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
5361 rtx object
= assign_temp (type
, 0, 1, 1);
5362 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
5364 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5365 emit_move_insn (object
, target
);
5367 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
5370 emit_move_insn (target
, object
);
5372 /* We want to return the BLKmode version of the data. */
5376 if (GET_CODE (target
) == CONCAT
)
5378 /* We're storing into a struct containing a single __complex. */
5382 return store_expr (exp
, target
, value_mode
!= VOIDmode
);
5385 /* If the structure is in a register or if the component
5386 is a bit field, we cannot use addressing to access it.
5387 Use bit-field techniques or SUBREG to store in it. */
5389 if (mode
== VOIDmode
5390 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5391 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5392 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5393 || GET_CODE (target
) == REG
5394 || GET_CODE (target
) == SUBREG
5395 /* If the field isn't aligned enough to store as an ordinary memref,
5396 store it as a bit field. */
5398 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
5399 || bitpos
% GET_MODE_ALIGNMENT (mode
))
5400 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
5401 || (bitpos
% BITS_PER_UNIT
!= 0)))
5402 /* If the RHS and field are a constant size and the size of the
5403 RHS isn't the same size as the bitfield, we must use bitfield
5406 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5407 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5409 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
5411 /* If BITSIZE is narrower than the size of the type of EXP
5412 we will be narrowing TEMP. Normally, what's wanted are the
5413 low-order bits. However, if EXP's type is a record and this is
5414 big-endian machine, we want the upper BITSIZE bits. */
5415 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5416 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
5417 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5418 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5419 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5423 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5425 if (mode
!= VOIDmode
&& mode
!= BLKmode
5426 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5427 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5429 /* If the modes of TARGET and TEMP are both BLKmode, both
5430 must be in memory and BITPOS must be aligned on a byte
5431 boundary. If so, we simply do a block copy. */
5432 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5434 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
5435 || bitpos
% BITS_PER_UNIT
!= 0)
5438 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5439 emit_block_move (target
, temp
,
5440 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5444 return value_mode
== VOIDmode
? const0_rtx
: target
;
5447 /* Store the value in the bitfield. */
5448 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
,
5449 int_size_in_bytes (type
));
5451 if (value_mode
!= VOIDmode
)
5453 /* The caller wants an rtx for the value.
5454 If possible, avoid refetching from the bitfield itself. */
5456 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
5459 enum machine_mode tmode
;
5461 tmode
= GET_MODE (temp
);
5462 if (tmode
== VOIDmode
)
5466 return expand_and (tmode
, temp
,
5467 gen_int_mode (width_mask
, tmode
),
5470 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
5471 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5472 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5475 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
5476 NULL_RTX
, value_mode
, VOIDmode
,
5477 int_size_in_bytes (type
));
5483 rtx addr
= XEXP (target
, 0);
5484 rtx to_rtx
= target
;
5486 /* If a value is wanted, it must be the lhs;
5487 so make the address stable for multiple use. */
5489 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
5490 && ! CONSTANT_ADDRESS_P (addr
)
5491 /* A frame-pointer reference is already stable. */
5492 && ! (GET_CODE (addr
) == PLUS
5493 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
5494 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
5495 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
5496 to_rtx
= replace_equiv_address (to_rtx
, copy_to_reg (addr
));
5498 /* Now build a reference to just the desired component. */
5500 to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5502 if (to_rtx
== target
)
5503 to_rtx
= copy_rtx (to_rtx
);
5505 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5506 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5507 set_mem_alias_set (to_rtx
, alias_set
);
5509 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
5513 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5514 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5515 codes and find the ultimate containing object, which we return.
5517 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5518 bit position, and *PUNSIGNEDP to the signedness of the field.
5519 If the position of the field is variable, we store a tree
5520 giving the variable offset (in units) in *POFFSET.
5521 This offset is in addition to the bit position.
5522 If the position is not variable, we store 0 in *POFFSET.
5524 If any of the extraction expressions is volatile,
5525 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5527 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5528 is a mode that can be used to access the field. In that case, *PBITSIZE
5531 If the field describes a variable-sized object, *PMODE is set to
5532 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5533 this case, but the address of the object can be found. */
5536 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
5537 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
5538 enum machine_mode
*pmode
, int *punsignedp
,
5542 enum machine_mode mode
= VOIDmode
;
5543 tree offset
= size_zero_node
;
5544 tree bit_offset
= bitsize_zero_node
;
5547 /* First get the mode, signedness, and size. We do this from just the
5548 outermost expression. */
5549 if (TREE_CODE (exp
) == COMPONENT_REF
)
5551 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5552 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5553 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5555 *punsignedp
= DECL_UNSIGNED (TREE_OPERAND (exp
, 1));
5557 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5559 size_tree
= TREE_OPERAND (exp
, 1);
5560 *punsignedp
= BIT_FIELD_REF_UNSIGNED (exp
);
5564 mode
= TYPE_MODE (TREE_TYPE (exp
));
5565 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
5567 if (mode
== BLKmode
)
5568 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5570 *pbitsize
= GET_MODE_BITSIZE (mode
);
5575 if (! host_integerp (size_tree
, 1))
5576 mode
= BLKmode
, *pbitsize
= -1;
5578 *pbitsize
= tree_low_cst (size_tree
, 1);
5581 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5582 and find the ultimate containing object. */
5585 if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5586 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
, TREE_OPERAND (exp
, 2));
5587 else if (TREE_CODE (exp
) == COMPONENT_REF
)
5589 tree field
= TREE_OPERAND (exp
, 1);
5590 tree this_offset
= DECL_FIELD_OFFSET (field
);
5592 /* If this field hasn't been filled in yet, don't go
5593 past it. This should only happen when folding expressions
5594 made during type construction. */
5595 if (this_offset
== 0)
5598 this_offset
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (this_offset
, exp
);
5600 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5601 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5602 DECL_FIELD_BIT_OFFSET (field
));
5604 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5607 else if (TREE_CODE (exp
) == ARRAY_REF
5608 || TREE_CODE (exp
) == ARRAY_RANGE_REF
)
5610 tree index
= TREE_OPERAND (exp
, 1);
5611 tree array
= TREE_OPERAND (exp
, 0);
5612 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
5613 tree low_bound
= (domain
? TYPE_MIN_VALUE (domain
) : 0);
5614 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array
)));
5616 /* We assume all arrays have sizes that are a multiple of a byte.
5617 First subtract the lower bound, if any, in the type of the
5618 index, then convert to sizetype and multiply by the size of the
5620 if (low_bound
!= 0 && ! integer_zerop (low_bound
))
5621 index
= fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5624 /* If the index has a self-referential type, instantiate it with
5625 the object; likewise for the component size. */
5626 index
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (index
, exp
);
5627 unit_size
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (unit_size
, array
);
5628 offset
= size_binop (PLUS_EXPR
, offset
,
5629 size_binop (MULT_EXPR
,
5630 convert (sizetype
, index
),
5634 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5635 conversions that don't change the mode, and all view conversions
5636 except those that need to "step up" the alignment. */
5637 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
5638 && ! (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
5639 && ! ((TYPE_ALIGN (TREE_TYPE (exp
))
5640 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5642 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
5643 < BIGGEST_ALIGNMENT
)
5644 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
5645 || TYPE_ALIGN_OK (TREE_TYPE
5646 (TREE_OPERAND (exp
, 0))))))
5647 && ! ((TREE_CODE (exp
) == NOP_EXPR
5648 || TREE_CODE (exp
) == CONVERT_EXPR
)
5649 && (TYPE_MODE (TREE_TYPE (exp
))
5650 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
5653 /* If any reference in the chain is volatile, the effect is volatile. */
5654 if (TREE_THIS_VOLATILE (exp
))
5657 exp
= TREE_OPERAND (exp
, 0);
5660 /* If OFFSET is constant, see if we can return the whole thing as a
5661 constant bit position. Otherwise, split it up. */
5662 if (host_integerp (offset
, 0)
5663 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5665 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5666 && host_integerp (tem
, 0))
5667 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5669 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5675 /* Return 1 if T is an expression that get_inner_reference handles. */
5678 handled_component_p (tree t
)
5680 switch (TREE_CODE (t
))
5685 case ARRAY_RANGE_REF
:
5686 case NON_LVALUE_EXPR
:
5687 case VIEW_CONVERT_EXPR
:
5690 /* ??? Sure they are handled, but get_inner_reference may return
5691 a different PBITSIZE, depending upon whether the expression is
5692 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5695 return (TYPE_MODE (TREE_TYPE (t
))
5696 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t
, 0))));
5703 /* Given an rtx VALUE that may contain additions and multiplications, return
5704 an equivalent value that just refers to a register, memory, or constant.
5705 This is done by generating instructions to perform the arithmetic and
5706 returning a pseudo-register containing the value.
5708 The returned value may be a REG, SUBREG, MEM or constant. */
5711 force_operand (rtx value
, rtx target
)
5714 /* Use subtarget as the target for operand 0 of a binary operation. */
5715 rtx subtarget
= get_subtarget (target
);
5716 enum rtx_code code
= GET_CODE (value
);
5718 /* Check for subreg applied to an expression produced by loop optimizer. */
5720 && GET_CODE (SUBREG_REG (value
)) != REG
5721 && GET_CODE (SUBREG_REG (value
)) != MEM
)
5723 value
= simplify_gen_subreg (GET_MODE (value
),
5724 force_reg (GET_MODE (SUBREG_REG (value
)),
5725 force_operand (SUBREG_REG (value
),
5727 GET_MODE (SUBREG_REG (value
)),
5728 SUBREG_BYTE (value
));
5729 code
= GET_CODE (value
);
5732 /* Check for a PIC address load. */
5733 if ((code
== PLUS
|| code
== MINUS
)
5734 && XEXP (value
, 0) == pic_offset_table_rtx
5735 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5736 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5737 || GET_CODE (XEXP (value
, 1)) == CONST
))
5740 subtarget
= gen_reg_rtx (GET_MODE (value
));
5741 emit_move_insn (subtarget
, value
);
5745 if (code
== ZERO_EXTEND
|| code
== SIGN_EXTEND
)
5748 target
= gen_reg_rtx (GET_MODE (value
));
5749 convert_move (target
, force_operand (XEXP (value
, 0), NULL
),
5750 code
== ZERO_EXTEND
);
5754 if (ARITHMETIC_P (value
))
5756 op2
= XEXP (value
, 1);
5757 if (!CONSTANT_P (op2
) && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5759 if (code
== MINUS
&& GET_CODE (op2
) == CONST_INT
)
5762 op2
= negate_rtx (GET_MODE (value
), op2
);
5765 /* Check for an addition with OP2 a constant integer and our first
5766 operand a PLUS of a virtual register and something else. In that
5767 case, we want to emit the sum of the virtual register and the
5768 constant first and then add the other value. This allows virtual
5769 register instantiation to simply modify the constant rather than
5770 creating another one around this addition. */
5771 if (code
== PLUS
&& GET_CODE (op2
) == CONST_INT
5772 && GET_CODE (XEXP (value
, 0)) == PLUS
5773 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
5774 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5775 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5777 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
5778 XEXP (XEXP (value
, 0), 0), op2
,
5779 subtarget
, 0, OPTAB_LIB_WIDEN
);
5780 return expand_simple_binop (GET_MODE (value
), code
, temp
,
5781 force_operand (XEXP (XEXP (value
,
5783 target
, 0, OPTAB_LIB_WIDEN
);
5786 op1
= force_operand (XEXP (value
, 0), subtarget
);
5787 op2
= force_operand (op2
, NULL_RTX
);
5791 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
5793 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
5794 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5795 target
, 1, OPTAB_LIB_WIDEN
);
5797 return expand_divmod (0,
5798 FLOAT_MODE_P (GET_MODE (value
))
5799 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
5800 GET_MODE (value
), op1
, op2
, target
, 0);
5803 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5807 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
5811 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5815 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5816 target
, 0, OPTAB_LIB_WIDEN
);
5819 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5820 target
, 1, OPTAB_LIB_WIDEN
);
5823 if (UNARY_P (value
))
5825 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
5826 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
5829 #ifdef INSN_SCHEDULING
5830 /* On machines that have insn scheduling, we want all memory reference to be
5831 explicit, so we need to deal with such paradoxical SUBREGs. */
5832 if (GET_CODE (value
) == SUBREG
&& GET_CODE (SUBREG_REG (value
)) == MEM
5833 && (GET_MODE_SIZE (GET_MODE (value
))
5834 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
5836 = simplify_gen_subreg (GET_MODE (value
),
5837 force_reg (GET_MODE (SUBREG_REG (value
)),
5838 force_operand (SUBREG_REG (value
),
5840 GET_MODE (SUBREG_REG (value
)),
5841 SUBREG_BYTE (value
));
5847 /* Subroutine of expand_expr: return nonzero iff there is no way that
5848 EXP can reference X, which is being modified. TOP_P is nonzero if this
5849 call is going to be used to determine whether we need a temporary
5850 for EXP, as opposed to a recursive call to this function.
5852 It is always safe for this routine to return zero since it merely
5853 searches for optimization opportunities. */
5856 safe_from_p (rtx x
, tree exp
, int top_p
)
5860 static tree save_expr_list
;
5863 /* If EXP has varying size, we MUST use a target since we currently
5864 have no way of allocating temporaries of variable size
5865 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5866 So we assume here that something at a higher level has prevented a
5867 clash. This is somewhat bogus, but the best we can do. Only
5868 do this when X is BLKmode and when we are at the top level. */
5869 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
5870 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
5871 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
5872 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
5873 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
5875 && GET_MODE (x
) == BLKmode
)
5876 /* If X is in the outgoing argument area, it is always safe. */
5877 || (GET_CODE (x
) == MEM
5878 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
5879 || (GET_CODE (XEXP (x
, 0)) == PLUS
5880 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
5883 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5884 find the underlying pseudo. */
5885 if (GET_CODE (x
) == SUBREG
)
5888 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5892 /* A SAVE_EXPR might appear many times in the expression passed to the
5893 top-level safe_from_p call, and if it has a complex subexpression,
5894 examining it multiple times could result in a combinatorial explosion.
5895 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
5896 with optimization took about 28 minutes to compile -- even though it was
5897 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5898 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5899 we have processed. Note that the only test of top_p was above. */
5908 rtn
= safe_from_p (x
, exp
, 0);
5910 for (t
= save_expr_list
; t
!= 0; t
= TREE_CHAIN (t
))
5911 TREE_PRIVATE (TREE_PURPOSE (t
)) = 0;
5916 /* Now look at our tree code and possibly recurse. */
5917 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
5920 exp_rtl
= DECL_RTL_IF_SET (exp
);
5927 if (TREE_CODE (exp
) == TREE_LIST
)
5931 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
5933 exp
= TREE_CHAIN (exp
);
5936 if (TREE_CODE (exp
) != TREE_LIST
)
5937 return safe_from_p (x
, exp
, 0);
5940 else if (TREE_CODE (exp
) == ERROR_MARK
)
5941 return 1; /* An already-visited SAVE_EXPR? */
5947 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
5952 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5956 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5957 the expression. If it is set, we conflict iff we are that rtx or
5958 both are in memory. Otherwise, we check all operands of the
5959 expression recursively. */
5961 switch (TREE_CODE (exp
))
5964 /* If the operand is static or we are static, we can't conflict.
5965 Likewise if we don't conflict with the operand at all. */
5966 if (staticp (TREE_OPERAND (exp
, 0))
5967 || TREE_STATIC (exp
)
5968 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
5971 /* Otherwise, the only way this can conflict is if we are taking
5972 the address of a DECL a that address if part of X, which is
5974 exp
= TREE_OPERAND (exp
, 0);
5977 if (!DECL_RTL_SET_P (exp
)
5978 || GET_CODE (DECL_RTL (exp
)) != MEM
)
5981 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
5986 if (GET_CODE (x
) == MEM
5987 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
5988 get_alias_set (exp
)))
5993 /* Assume that the call will clobber all hard registers and
5995 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5996 || GET_CODE (x
) == MEM
)
6001 /* If a sequence exists, we would have to scan every instruction
6002 in the sequence to see if it was safe. This is probably not
6004 if (RTL_EXPR_SEQUENCE (exp
))
6007 exp_rtl
= RTL_EXPR_RTL (exp
);
6010 case WITH_CLEANUP_EXPR
:
6011 exp_rtl
= WITH_CLEANUP_EXPR_RTL (exp
);
6014 case CLEANUP_POINT_EXPR
:
6015 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6018 exp_rtl
= SAVE_EXPR_RTL (exp
);
6022 /* If we've already scanned this, don't do it again. Otherwise,
6023 show we've scanned it and record for clearing the flag if we're
6025 if (TREE_PRIVATE (exp
))
6028 TREE_PRIVATE (exp
) = 1;
6029 if (! safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
6031 TREE_PRIVATE (exp
) = 0;
6035 save_expr_list
= tree_cons (exp
, NULL_TREE
, save_expr_list
);
6039 /* The only operand we look at is operand 1. The rest aren't
6040 part of the expression. */
6041 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
6047 /* If we have an rtx, we do not need to scan our operands. */
6051 nops
= first_rtl_op (TREE_CODE (exp
));
6052 for (i
= 0; i
< nops
; i
++)
6053 if (TREE_OPERAND (exp
, i
) != 0
6054 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
6057 /* If this is a language-specific tree code, it may require
6058 special handling. */
6059 if ((unsigned int) TREE_CODE (exp
)
6060 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6061 && !lang_hooks
.safe_from_p (x
, exp
))
6065 /* If we have an rtl, find any enclosed object. Then see if we conflict
6069 if (GET_CODE (exp_rtl
) == SUBREG
)
6071 exp_rtl
= SUBREG_REG (exp_rtl
);
6072 if (GET_CODE (exp_rtl
) == REG
6073 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
6077 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6078 are memory and they conflict. */
6079 return ! (rtx_equal_p (x
, exp_rtl
)
6080 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
6081 && true_dependence (exp_rtl
, VOIDmode
, x
,
6082 rtx_addr_varies_p
)));
6085 /* If we reach here, it is safe. */
6089 /* Subroutine of expand_expr: return rtx if EXP is a
6090 variable or parameter; else return 0. */
6096 switch (TREE_CODE (exp
))
6100 return DECL_RTL (exp
);
6106 /* Return the highest power of two that EXP is known to be a multiple of.
6107 This is used in updating alignment of MEMs in array references. */
6109 static unsigned HOST_WIDE_INT
6110 highest_pow2_factor (tree exp
)
6112 unsigned HOST_WIDE_INT c0
, c1
;
6114 switch (TREE_CODE (exp
))
6117 /* We can find the lowest bit that's a one. If the low
6118 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6119 We need to handle this case since we can find it in a COND_EXPR,
6120 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6121 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6123 if (TREE_CONSTANT_OVERFLOW (exp
))
6124 return BIGGEST_ALIGNMENT
;
6127 /* Note: tree_low_cst is intentionally not used here,
6128 we don't care about the upper bits. */
6129 c0
= TREE_INT_CST_LOW (exp
);
6131 return c0
? c0
: BIGGEST_ALIGNMENT
;
6135 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
6136 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6137 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6138 return MIN (c0
, c1
);
6141 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6142 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6145 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6147 if (integer_pow2p (TREE_OPERAND (exp
, 1))
6148 && host_integerp (TREE_OPERAND (exp
, 1), 1))
6150 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6151 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
6152 return MAX (1, c0
/ c1
);
6156 case NON_LVALUE_EXPR
: case NOP_EXPR
: case CONVERT_EXPR
:
6158 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
6161 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6164 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6165 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
6166 return MIN (c0
, c1
);
6175 /* Similar, except that the alignment requirements of TARGET are
6176 taken into account. Assume it is at least as aligned as its
6177 type, unless it is a COMPONENT_REF in which case the layout of
6178 the structure gives the alignment. */
6180 static unsigned HOST_WIDE_INT
6181 highest_pow2_factor_for_target (tree target
, tree exp
)
6183 unsigned HOST_WIDE_INT target_align
, factor
;
6185 factor
= highest_pow2_factor (exp
);
6186 if (TREE_CODE (target
) == COMPONENT_REF
)
6187 target_align
= DECL_ALIGN (TREE_OPERAND (target
, 1)) / BITS_PER_UNIT
;
6189 target_align
= TYPE_ALIGN (TREE_TYPE (target
)) / BITS_PER_UNIT
;
6190 return MAX (factor
, target_align
);
6193 /* Expands variable VAR. */
6196 expand_var (tree var
)
6198 if (DECL_EXTERNAL (var
))
6201 if (TREE_STATIC (var
))
6202 /* If this is an inlined copy of a static local variable,
6203 look up the original decl. */
6204 var
= DECL_ORIGIN (var
);
6206 if (TREE_STATIC (var
)
6207 ? !TREE_ASM_WRITTEN (var
)
6208 : !DECL_RTL_SET_P (var
))
6210 if (TREE_CODE (var
) == VAR_DECL
&& DECL_DEFER_OUTPUT (var
))
6212 /* Prepare a mem & address for the decl. */
6215 if (TREE_STATIC (var
))
6218 x
= gen_rtx_MEM (DECL_MODE (var
),
6219 gen_reg_rtx (Pmode
));
6221 set_mem_attributes (x
, var
, 1);
6222 SET_DECL_RTL (var
, x
);
6224 else if (lang_hooks
.expand_decl (var
))
6226 else if (TREE_CODE (var
) == VAR_DECL
&& !TREE_STATIC (var
))
6228 else if (TREE_CODE (var
) == VAR_DECL
&& TREE_STATIC (var
))
6229 rest_of_decl_compilation (var
, NULL
, 0, 0);
6230 else if (TREE_CODE (var
) == TYPE_DECL
6231 || TREE_CODE (var
) == CONST_DECL
6232 || TREE_CODE (var
) == FUNCTION_DECL
6233 || TREE_CODE (var
) == LABEL_DECL
)
6234 /* No expansion needed. */;
6240 /* Expands declarations of variables in list VARS. */
6243 expand_vars (tree vars
)
6245 for (; vars
; vars
= TREE_CHAIN (vars
))
6249 if (DECL_EXTERNAL (var
))
6253 expand_decl_init (var
);
6257 /* Subroutine of expand_expr. Expand the two operands of a binary
6258 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6259 The value may be stored in TARGET if TARGET is nonzero. The
6260 MODIFIER argument is as documented by expand_expr. */
6263 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
6264 enum expand_modifier modifier
)
6266 if (! safe_from_p (target
, exp1
, 1))
6268 if (operand_equal_p (exp0
, exp1
, 0))
6270 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6271 *op1
= copy_rtx (*op0
);
6275 /* If we need to preserve evaluation order, copy exp0 into its own
6276 temporary variable so that it can't be clobbered by exp1. */
6277 if (flag_evaluation_order
&& TREE_SIDE_EFFECTS (exp1
))
6278 exp0
= save_expr (exp0
);
6279 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6280 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
6285 /* expand_expr: generate code for computing expression EXP.
6286 An rtx for the computed value is returned. The value is never null.
6287 In the case of a void EXP, const0_rtx is returned.
6289 The value may be stored in TARGET if TARGET is nonzero.
6290 TARGET is just a suggestion; callers must assume that
6291 the rtx returned may not be the same as TARGET.
6293 If TARGET is CONST0_RTX, it means that the value will be ignored.
6295 If TMODE is not VOIDmode, it suggests generating the
6296 result in mode TMODE. But this is done only when convenient.
6297 Otherwise, TMODE is ignored and the value generated in its natural mode.
6298 TMODE is just a suggestion; callers must assume that
6299 the rtx returned may not have mode TMODE.
6301 Note that TARGET may have neither TMODE nor MODE. In that case, it
6302 probably will not be used.
6304 If MODIFIER is EXPAND_SUM then when EXP is an addition
6305 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6306 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6307 products as above, or REG or MEM, or constant.
6308 Ordinarily in such cases we would output mul or add instructions
6309 and then return a pseudo reg containing the sum.
6311 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6312 it also marks a label as absolutely required (it can't be dead).
6313 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6314 This is used for outputting expressions used in initializers.
6316 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6317 with a constant address even if that address is not normally legitimate.
6318 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6320 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6321 a call parameter. Such targets require special care as we haven't yet
6322 marked TARGET so that it's safe from being trashed by libcalls. We
6323 don't want to use TARGET for anything but the final result;
6324 Intermediate values must go elsewhere. Additionally, calls to
6325 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6327 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6328 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6329 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6330 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6333 static rtx
expand_expr_real_1 (tree
, rtx
, enum machine_mode
,
6334 enum expand_modifier
, rtx
*);
6337 expand_expr_real (tree exp
, rtx target
, enum machine_mode tmode
,
6338 enum expand_modifier modifier
, rtx
*alt_rtl
)
6341 rtx ret
, last
= NULL
;
6343 /* Handle ERROR_MARK before anybody tries to access its type. */
6344 if (TREE_CODE (exp
) == ERROR_MARK
6345 || TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
)
6347 ret
= CONST0_RTX (tmode
);
6348 return ret
? ret
: const0_rtx
;
6351 if (flag_non_call_exceptions
)
6353 rn
= lookup_stmt_eh_region (exp
);
6354 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6356 last
= get_last_insn ();
6359 /* If this is an expression of some kind and it has an associated line
6360 number, then emit the line number before expanding the expression.
6362 We need to save and restore the file and line information so that
6363 errors discovered during expansion are emitted with the right
6364 information. It would be better of the diagnostic routines
6365 used the file/line information embedded in the tree nodes rather
6367 if (cfun
&& EXPR_HAS_LOCATION (exp
))
6369 location_t saved_location
= input_location
;
6370 input_location
= EXPR_LOCATION (exp
);
6371 emit_line_note (input_location
);
6373 /* Record where the insns produced belong. */
6374 if (cfun
->dont_emit_block_notes
)
6375 record_block_change (TREE_BLOCK (exp
));
6377 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
6379 input_location
= saved_location
;
6383 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
6386 /* If using non-call exceptions, mark all insns that may trap.
6387 expand_call() will mark CALL_INSNs before we get to this code,
6388 but it doesn't handle libcalls, and these may trap. */
6392 for (insn
= next_real_insn (last
); insn
;
6393 insn
= next_real_insn (insn
))
6395 if (! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
6396 /* If we want exceptions for non-call insns, any
6397 may_trap_p instruction may throw. */
6398 && GET_CODE (PATTERN (insn
)) != CLOBBER
6399 && GET_CODE (PATTERN (insn
)) != USE
6400 && (GET_CODE (insn
) == CALL_INSN
|| may_trap_p (PATTERN (insn
))))
6402 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_EH_REGION
, GEN_INT (rn
),
6412 expand_expr_real_1 (tree exp
, rtx target
, enum machine_mode tmode
,
6413 enum expand_modifier modifier
, rtx
*alt_rtl
)
6416 tree type
= TREE_TYPE (exp
);
6418 enum machine_mode mode
;
6419 enum tree_code code
= TREE_CODE (exp
);
6421 rtx subtarget
, original_target
;
6425 mode
= TYPE_MODE (type
);
6426 unsignedp
= TYPE_UNSIGNED (type
);
6428 /* Use subtarget as the target for operand 0 of a binary operation. */
6429 subtarget
= get_subtarget (target
);
6430 original_target
= target
;
6431 ignore
= (target
== const0_rtx
6432 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
6433 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
6434 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
6435 && TREE_CODE (type
) == VOID_TYPE
));
6437 /* If we are going to ignore this result, we need only do something
6438 if there is a side-effect somewhere in the expression. If there
6439 is, short-circuit the most common cases here. Note that we must
6440 not call expand_expr with anything but const0_rtx in case this
6441 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6445 if (! TREE_SIDE_EFFECTS (exp
))
6448 /* Ensure we reference a volatile object even if value is ignored, but
6449 don't do this if all we are doing is taking its address. */
6450 if (TREE_THIS_VOLATILE (exp
)
6451 && TREE_CODE (exp
) != FUNCTION_DECL
6452 && mode
!= VOIDmode
&& mode
!= BLKmode
6453 && modifier
!= EXPAND_CONST_ADDRESS
)
6455 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
6456 if (GET_CODE (temp
) == MEM
)
6457 temp
= copy_to_reg (temp
);
6461 if (TREE_CODE_CLASS (code
) == '1' || code
== COMPONENT_REF
6462 || code
== INDIRECT_REF
|| code
== BUFFER_REF
)
6463 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6466 else if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<'
6467 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
6469 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6470 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6473 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6474 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
6475 /* If the second operand has no side effects, just evaluate
6477 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6479 else if (code
== BIT_FIELD_REF
)
6481 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6482 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6483 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, modifier
);
6490 /* If will do cse, generate all results into pseudo registers
6491 since 1) that allows cse to find more things
6492 and 2) otherwise cse could produce an insn the machine
6493 cannot support. An exception is a CONSTRUCTOR into a multi-word
6494 MEM: that's much more likely to be most efficient into the MEM.
6495 Another is a CALL_EXPR which must return in memory. */
6497 if (! cse_not_expected
&& mode
!= BLKmode
&& target
6498 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
)
6499 && ! (code
== CONSTRUCTOR
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
6500 && ! (code
== CALL_EXPR
&& aggregate_value_p (exp
, exp
)))
6507 tree function
= decl_function_context (exp
);
6509 temp
= label_rtx (exp
);
6510 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
6512 if (function
!= current_function_decl
6514 LABEL_REF_NONLOCAL_P (temp
) = 1;
6516 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
6521 if (!DECL_RTL_SET_P (exp
))
6523 error ("%Jprior parameter's size depends on '%D'", exp
, exp
);
6524 return CONST0_RTX (mode
);
6527 /* ... fall through ... */
6530 /* If a static var's type was incomplete when the decl was written,
6531 but the type is complete now, lay out the decl now. */
6532 if (DECL_SIZE (exp
) == 0
6533 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
6534 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6535 layout_decl (exp
, 0);
6537 /* ... fall through ... */
6541 if (DECL_RTL (exp
) == 0)
6544 /* Ensure variable marked as used even if it doesn't go through
6545 a parser. If it hasn't be used yet, write out an external
6547 if (! TREE_USED (exp
))
6549 assemble_external (exp
);
6550 TREE_USED (exp
) = 1;
6553 /* Show we haven't gotten RTL for this yet. */
6556 /* Handle variables inherited from containing functions. */
6557 context
= decl_function_context (exp
);
6559 if (context
!= 0 && context
!= current_function_decl
6560 /* If var is static, we don't need a static chain to access it. */
6561 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
6562 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
6566 /* Mark as non-local and addressable. */
6567 DECL_NONLOCAL (exp
) = 1;
6568 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
6570 lang_hooks
.mark_addressable (exp
);
6571 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
6573 addr
= XEXP (DECL_RTL (exp
), 0);
6574 if (GET_CODE (addr
) == MEM
)
6576 = replace_equiv_address (addr
,
6577 fix_lexical_addr (XEXP (addr
, 0), exp
));
6579 addr
= fix_lexical_addr (addr
, exp
);
6581 temp
= replace_equiv_address (DECL_RTL (exp
), addr
);
6584 /* This is the case of an array whose size is to be determined
6585 from its initializer, while the initializer is still being parsed.
6588 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6589 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
6590 temp
= validize_mem (DECL_RTL (exp
));
6592 /* If DECL_RTL is memory, we are in the normal case and either
6593 the address is not valid or it is not a register and -fforce-addr
6594 is specified, get the address into a register. */
6596 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6597 && modifier
!= EXPAND_CONST_ADDRESS
6598 && modifier
!= EXPAND_SUM
6599 && modifier
!= EXPAND_INITIALIZER
6600 && (! memory_address_p (DECL_MODE (exp
),
6601 XEXP (DECL_RTL (exp
), 0))
6603 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
6606 *alt_rtl
= DECL_RTL (exp
);
6607 temp
= replace_equiv_address (DECL_RTL (exp
),
6608 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
6611 /* If we got something, return it. But first, set the alignment
6612 if the address is a register. */
6615 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
6616 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6621 /* If the mode of DECL_RTL does not match that of the decl, it
6622 must be a promoted value. We return a SUBREG of the wanted mode,
6623 but mark it so that we know that it was already extended. */
6625 if (GET_CODE (DECL_RTL (exp
)) == REG
6626 && GET_MODE (DECL_RTL (exp
)) != DECL_MODE (exp
))
6628 /* Get the signedness used for this variable. Ensure we get the
6629 same mode we got when the variable was declared. */
6630 if (GET_MODE (DECL_RTL (exp
))
6631 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
,
6632 (TREE_CODE (exp
) == RESULT_DECL
? 1 : 0)))
6635 temp
= gen_lowpart_SUBREG (mode
, DECL_RTL (exp
));
6636 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6637 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6641 return DECL_RTL (exp
);
6644 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
6645 TREE_INT_CST_HIGH (exp
), mode
);
6647 /* ??? If overflow is set, fold will have done an incomplete job,
6648 which can result in (plus xx (const_int 0)), which can get
6649 simplified by validate_replace_rtx during virtual register
6650 instantiation, which can result in unrecognizable insns.
6651 Avoid this by forcing all overflows into registers. */
6652 if (TREE_CONSTANT_OVERFLOW (exp
)
6653 && modifier
!= EXPAND_INITIALIZER
)
6654 temp
= force_reg (mode
, temp
);
6659 return const_vector_from_tree (exp
);
6662 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
6665 /* If optimized, generate immediate CONST_DOUBLE
6666 which will be turned into memory by reload if necessary.
6668 We used to force a register so that loop.c could see it. But
6669 this does not allow gen_* patterns to perform optimizations with
6670 the constants. It also produces two insns in cases like "x = 1.0;".
6671 On most machines, floating-point constants are not permitted in
6672 many insns, so we'd end up copying it to a register in any case.
6674 Now, we do the copying in expand_binop, if appropriate. */
6675 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
6676 TYPE_MODE (TREE_TYPE (exp
)));
6679 /* Handle evaluating a complex constant in a CONCAT target. */
6680 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
6682 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
6685 rtarg
= XEXP (original_target
, 0);
6686 itarg
= XEXP (original_target
, 1);
6688 /* Move the real and imaginary parts separately. */
6689 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, 0);
6690 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, 0);
6693 emit_move_insn (rtarg
, op0
);
6695 emit_move_insn (itarg
, op1
);
6697 return original_target
;
6700 /* ... fall through ... */
6703 temp
= output_constant_def (exp
, 1);
6705 /* temp contains a constant address.
6706 On RISC machines where a constant address isn't valid,
6707 make some insns to get that address into a register. */
6708 if (modifier
!= EXPAND_CONST_ADDRESS
6709 && modifier
!= EXPAND_INITIALIZER
6710 && modifier
!= EXPAND_SUM
6711 && (! memory_address_p (mode
, XEXP (temp
, 0))
6712 || flag_force_addr
))
6713 return replace_equiv_address (temp
,
6714 copy_rtx (XEXP (temp
, 0)));
6718 context
= decl_function_context (exp
);
6720 /* If this SAVE_EXPR was at global context, assume we are an
6721 initialization function and move it into our context. */
6723 SAVE_EXPR_CONTEXT (exp
) = current_function_decl
;
6725 if (context
== current_function_decl
)
6728 /* If this is non-local, handle it. */
6731 /* The following call just exists to abort if the context is
6732 not of a containing function. */
6733 find_function_data (context
);
6735 temp
= SAVE_EXPR_RTL (exp
);
6736 if (temp
&& GET_CODE (temp
) == REG
)
6738 put_var_into_stack (exp
, /*rescan=*/true);
6739 temp
= SAVE_EXPR_RTL (exp
);
6741 if (temp
== 0 || GET_CODE (temp
) != MEM
)
6744 replace_equiv_address (temp
,
6745 fix_lexical_addr (XEXP (temp
, 0), exp
));
6747 if (SAVE_EXPR_RTL (exp
) == 0)
6749 if (mode
== VOIDmode
)
6752 temp
= assign_temp (build_qualified_type (type
,
6754 | TYPE_QUAL_CONST
)),
6757 SAVE_EXPR_RTL (exp
) = temp
;
6758 if (!optimize
&& GET_CODE (temp
) == REG
)
6759 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, temp
,
6762 /* If the mode of TEMP does not match that of the expression, it
6763 must be a promoted value. We pass store_expr a SUBREG of the
6764 wanted mode but mark it so that we know that it was already
6767 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
6769 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6770 promote_mode (type
, mode
, &unsignedp
, 0);
6771 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6772 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6775 if (temp
== const0_rtx
)
6776 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
6778 store_expr (TREE_OPERAND (exp
, 0), temp
,
6779 modifier
== EXPAND_STACK_PARM
? 2 : 0);
6781 TREE_USED (exp
) = 1;
6784 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6785 must be a promoted value. We return a SUBREG of the wanted mode,
6786 but mark it so that we know that it was already extended. */
6788 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
6789 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
6791 /* Compute the signedness and make the proper SUBREG. */
6792 promote_mode (type
, mode
, &unsignedp
, 0);
6793 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6794 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6795 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6799 return SAVE_EXPR_RTL (exp
);
6804 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6805 TREE_OPERAND (exp
, 0)
6806 = lang_hooks
.unsave_expr_now (TREE_OPERAND (exp
, 0));
6811 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6812 expand_goto (TREE_OPERAND (exp
, 0));
6814 expand_computed_goto (TREE_OPERAND (exp
, 0));
6817 /* These are lowered during gimplification, so we should never ever
6823 case LABELED_BLOCK_EXPR
:
6824 if (LABELED_BLOCK_BODY (exp
))
6825 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp
), 0, 1);
6826 /* Should perhaps use expand_label, but this is simpler and safer. */
6827 do_pending_stack_adjust ();
6828 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp
)));
6831 case EXIT_BLOCK_EXPR
:
6832 if (EXIT_BLOCK_RETURN (exp
))
6833 sorry ("returned value in block_exit_expr");
6834 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp
)));
6839 tree block
= BIND_EXPR_BLOCK (exp
);
6842 if (TREE_CODE (BIND_EXPR_BODY (exp
)) != RTL_EXPR
)
6844 /* If we're in functions-as-trees mode, this BIND_EXPR represents
6845 the block, so we need to emit NOTE_INSN_BLOCK_* notes. */
6846 mark_ends
= (block
!= NULL_TREE
);
6847 expand_start_bindings_and_block (mark_ends
? 0 : 2, block
);
6851 /* If we're not in functions-as-trees mode, we've already emitted
6852 those notes into our RTL_EXPR, so we just want to splice our BLOCK
6853 into the enclosing one. */
6856 /* Need to open a binding contour here because
6857 if there are any cleanups they must be contained here. */
6858 expand_start_bindings_and_block (2, NULL_TREE
);
6860 /* Mark the corresponding BLOCK for output in its proper place. */
6863 if (TREE_USED (block
))
6865 lang_hooks
.decls
.insert_block (block
);
6869 /* If VARS have not yet been expanded, expand them now. */
6870 expand_vars (BIND_EXPR_VARS (exp
));
6872 /* TARGET was clobbered early in this function. The correct
6873 indicator or whether or not we need the value of this
6874 expression is the IGNORE variable. */
6875 temp
= expand_expr (BIND_EXPR_BODY (exp
),
6876 ignore
? const0_rtx
: target
,
6879 expand_end_bindings (BIND_EXPR_VARS (exp
), mark_ends
, 0);
6885 if (RTL_EXPR_SEQUENCE (exp
))
6887 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
6889 emit_insn (RTL_EXPR_SEQUENCE (exp
));
6890 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
6892 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
6893 free_temps_for_rtl_expr (exp
);
6895 *alt_rtl
= RTL_EXPR_ALT_RTL (exp
);
6896 return RTL_EXPR_RTL (exp
);
6899 /* If we don't need the result, just ensure we evaluate any
6905 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
6906 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
, 0);
6911 /* All elts simple constants => refer to a constant in memory. But
6912 if this is a non-BLKmode mode, let it store a field at a time
6913 since that should make a CONST_INT or CONST_DOUBLE when we
6914 fold. Likewise, if we have a target we can use, it is best to
6915 store directly into the target unless the type is large enough
6916 that memcpy will be used. If we are making an initializer and
6917 all operands are constant, put it in memory as well.
6919 FIXME: Avoid trying to fill vector constructors piece-meal.
6920 Output them with output_constant_def below unless we're sure
6921 they're zeros. This should go away when vector initializers
6922 are treated like VECTOR_CST instead of arrays.
6924 else if ((TREE_STATIC (exp
)
6925 && ((mode
== BLKmode
6926 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
6927 || TREE_ADDRESSABLE (exp
)
6928 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
6929 && (! MOVE_BY_PIECES_P
6930 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
6932 && ! mostly_zeros_p (exp
))))
6933 || ((modifier
== EXPAND_INITIALIZER
6934 || modifier
== EXPAND_CONST_ADDRESS
)
6935 && TREE_CONSTANT (exp
)))
6937 rtx constructor
= output_constant_def (exp
, 1);
6939 if (modifier
!= EXPAND_CONST_ADDRESS
6940 && modifier
!= EXPAND_INITIALIZER
6941 && modifier
!= EXPAND_SUM
)
6942 constructor
= validize_mem (constructor
);
6948 /* Handle calls that pass values in multiple non-contiguous
6949 locations. The Irix 6 ABI has examples of this. */
6950 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
6951 || GET_CODE (target
) == PARALLEL
6952 || modifier
== EXPAND_STACK_PARM
)
6954 = assign_temp (build_qualified_type (type
,
6956 | (TREE_READONLY (exp
)
6957 * TYPE_QUAL_CONST
))),
6958 0, TREE_ADDRESSABLE (exp
), 1);
6960 store_constructor (exp
, target
, 0, int_expr_size (exp
));
6966 tree exp1
= TREE_OPERAND (exp
, 0);
6968 if (modifier
!= EXPAND_WRITE
)
6972 t
= fold_read_from_constant_string (exp
);
6974 return expand_expr (t
, target
, tmode
, modifier
);
6977 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
6978 op0
= memory_address (mode
, op0
);
6979 temp
= gen_rtx_MEM (mode
, op0
);
6980 set_mem_attributes (temp
, exp
, 0);
6982 /* If we are writing to this object and its type is a record with
6983 readonly fields, we must mark it as readonly so it will
6984 conflict with readonly references to those fields. */
6985 if (modifier
== EXPAND_WRITE
&& readonly_fields_p (type
))
6986 RTX_UNCHANGING_P (temp
) = 1;
6993 #ifdef ENABLE_CHECKING
6994 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
6999 tree array
= TREE_OPERAND (exp
, 0);
7000 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
7001 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
7002 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
7005 /* Optimize the special-case of a zero lower bound.
7007 We convert the low_bound to sizetype to avoid some problems
7008 with constant folding. (E.g. suppose the lower bound is 1,
7009 and its mode is QI. Without the conversion, (ARRAY
7010 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7011 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7013 if (! integer_zerop (low_bound
))
7014 index
= size_diffop (index
, convert (sizetype
, low_bound
));
7016 /* Fold an expression like: "foo"[2].
7017 This is not done in fold so it won't happen inside &.
7018 Don't fold if this is for wide characters since it's too
7019 difficult to do correctly and this is a very rare case. */
7021 if (modifier
!= EXPAND_CONST_ADDRESS
7022 && modifier
!= EXPAND_INITIALIZER
7023 && modifier
!= EXPAND_MEMORY
)
7025 tree t
= fold_read_from_constant_string (exp
);
7028 return expand_expr (t
, target
, tmode
, modifier
);
7031 /* If this is a constant index into a constant array,
7032 just get the value from the array. Handle both the cases when
7033 we have an explicit constructor and when our operand is a variable
7034 that was declared const. */
7036 if (modifier
!= EXPAND_CONST_ADDRESS
7037 && modifier
!= EXPAND_INITIALIZER
7038 && modifier
!= EXPAND_MEMORY
7039 && TREE_CODE (array
) == CONSTRUCTOR
7040 && ! TREE_SIDE_EFFECTS (array
)
7041 && TREE_CODE (index
) == INTEGER_CST
7042 && 0 > compare_tree_int (index
,
7043 list_length (CONSTRUCTOR_ELTS
7044 (TREE_OPERAND (exp
, 0)))))
7048 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
7049 i
= TREE_INT_CST_LOW (index
);
7050 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
7054 return expand_expr (fold (TREE_VALUE (elem
)), target
, tmode
,
7058 else if (optimize
>= 1
7059 && modifier
!= EXPAND_CONST_ADDRESS
7060 && modifier
!= EXPAND_INITIALIZER
7061 && modifier
!= EXPAND_MEMORY
7062 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
7063 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
7064 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
7065 && targetm
.binds_local_p (array
))
7067 if (TREE_CODE (index
) == INTEGER_CST
)
7069 tree init
= DECL_INITIAL (array
);
7071 if (TREE_CODE (init
) == CONSTRUCTOR
)
7075 for (elem
= CONSTRUCTOR_ELTS (init
);
7077 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
7078 elem
= TREE_CHAIN (elem
))
7081 if (elem
&& !TREE_SIDE_EFFECTS (TREE_VALUE (elem
)))
7082 return expand_expr (fold (TREE_VALUE (elem
)), target
,
7085 else if (TREE_CODE (init
) == STRING_CST
7086 && 0 > compare_tree_int (index
,
7087 TREE_STRING_LENGTH (init
)))
7089 tree type
= TREE_TYPE (TREE_TYPE (init
));
7090 enum machine_mode mode
= TYPE_MODE (type
);
7092 if (GET_MODE_CLASS (mode
) == MODE_INT
7093 && GET_MODE_SIZE (mode
) == 1)
7094 return gen_int_mode (TREE_STRING_POINTER (init
)
7095 [TREE_INT_CST_LOW (index
)], mode
);
7100 goto normal_inner_ref
;
7103 /* If the operand is a CONSTRUCTOR, we can just extract the
7104 appropriate field if it is present. */
7105 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
)
7109 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
7110 elt
= TREE_CHAIN (elt
))
7111 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
7112 /* We can normally use the value of the field in the
7113 CONSTRUCTOR. However, if this is a bitfield in
7114 an integral mode that we can fit in a HOST_WIDE_INT,
7115 we must mask only the number of bits in the bitfield,
7116 since this is done implicitly by the constructor. If
7117 the bitfield does not meet either of those conditions,
7118 we can't do this optimization. */
7119 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
7120 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
7122 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
7123 <= HOST_BITS_PER_WIDE_INT
))))
7125 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
))
7126 && modifier
== EXPAND_STACK_PARM
)
7128 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
7129 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
7131 HOST_WIDE_INT bitsize
7132 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt
)));
7133 enum machine_mode imode
7134 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
7136 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
7138 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
7139 op0
= expand_and (imode
, op0
, op1
, target
);
7144 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
7147 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
7149 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
7157 goto normal_inner_ref
;
7160 case ARRAY_RANGE_REF
:
7163 enum machine_mode mode1
;
7164 HOST_WIDE_INT bitsize
, bitpos
;
7167 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7168 &mode1
, &unsignedp
, &volatilep
);
7171 /* If we got back the original object, something is wrong. Perhaps
7172 we are evaluating an expression too early. In any event, don't
7173 infinitely recurse. */
7177 /* If TEM's type is a union of variable size, pass TARGET to the inner
7178 computation, since it will need a temporary and TARGET is known
7179 to have to do. This occurs in unchecked conversion in Ada. */
7183 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
7184 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
7186 && modifier
!= EXPAND_STACK_PARM
7187 ? target
: NULL_RTX
),
7189 (modifier
== EXPAND_INITIALIZER
7190 || modifier
== EXPAND_CONST_ADDRESS
7191 || modifier
== EXPAND_STACK_PARM
)
7192 ? modifier
: EXPAND_NORMAL
);
7194 /* If this is a constant, put it into a register if it is a
7195 legitimate constant and OFFSET is 0 and memory if it isn't. */
7196 if (CONSTANT_P (op0
))
7198 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
7199 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
7201 op0
= force_reg (mode
, op0
);
7203 op0
= validize_mem (force_const_mem (mode
, op0
));
7206 /* Otherwise, if this object not in memory and we either have an
7207 offset or a BLKmode result, put it there. This case can't occur in
7208 C, but can in Ada if we have unchecked conversion of an expression
7209 from a scalar type to an array or record type or for an
7210 ARRAY_RANGE_REF whose type is BLKmode. */
7211 else if (GET_CODE (op0
) != MEM
7213 || (code
== ARRAY_RANGE_REF
&& mode
== BLKmode
)))
7215 /* If the operand is a SAVE_EXPR, we can deal with this by
7216 forcing the SAVE_EXPR into memory. */
7217 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
7219 put_var_into_stack (TREE_OPERAND (exp
, 0),
7221 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
7226 = build_qualified_type (TREE_TYPE (tem
),
7227 (TYPE_QUALS (TREE_TYPE (tem
))
7228 | TYPE_QUAL_CONST
));
7229 rtx memloc
= assign_temp (nt
, 1, 1, 1);
7231 emit_move_insn (memloc
, op0
);
7238 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
7241 if (GET_CODE (op0
) != MEM
)
7244 #ifdef POINTERS_EXTEND_UNSIGNED
7245 if (GET_MODE (offset_rtx
) != Pmode
)
7246 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
7248 if (GET_MODE (offset_rtx
) != ptr_mode
)
7249 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
7252 if (GET_MODE (op0
) == BLKmode
7253 /* A constant address in OP0 can have VOIDmode, we must
7254 not try to call force_reg in that case. */
7255 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
7257 && (bitpos
% bitsize
) == 0
7258 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
7259 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
7261 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7265 op0
= offset_address (op0
, offset_rtx
,
7266 highest_pow2_factor (offset
));
7269 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7270 record its alignment as BIGGEST_ALIGNMENT. */
7271 if (GET_CODE (op0
) == MEM
&& bitpos
== 0 && offset
!= 0
7272 && is_aligning_offset (offset
, tem
))
7273 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
7275 /* Don't forget about volatility even if this is a bitfield. */
7276 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
7278 if (op0
== orig_op0
)
7279 op0
= copy_rtx (op0
);
7281 MEM_VOLATILE_P (op0
) = 1;
7284 /* The following code doesn't handle CONCAT.
7285 Assume only bitpos == 0 can be used for CONCAT, due to
7286 one element arrays having the same mode as its element. */
7287 if (GET_CODE (op0
) == CONCAT
)
7289 if (bitpos
!= 0 || bitsize
!= GET_MODE_BITSIZE (GET_MODE (op0
)))
7294 /* In cases where an aligned union has an unaligned object
7295 as a field, we might be extracting a BLKmode value from
7296 an integer-mode (e.g., SImode) object. Handle this case
7297 by doing the extract into an object as wide as the field
7298 (which we know to be the width of a basic mode), then
7299 storing into memory, and changing the mode to BLKmode. */
7300 if (mode1
== VOIDmode
7301 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7302 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
7303 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7304 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
7305 && modifier
!= EXPAND_CONST_ADDRESS
7306 && modifier
!= EXPAND_INITIALIZER
)
7307 /* If the field isn't aligned enough to fetch as a memref,
7308 fetch it as a bit field. */
7309 || (mode1
!= BLKmode
7310 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
7311 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)
7312 || (GET_CODE (op0
) == MEM
7313 && (MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
7314 || (bitpos
% GET_MODE_ALIGNMENT (mode1
) != 0))))
7315 && ((modifier
== EXPAND_CONST_ADDRESS
7316 || modifier
== EXPAND_INITIALIZER
)
7318 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
7319 || (bitpos
% BITS_PER_UNIT
!= 0)))
7320 /* If the type and the field are a constant size and the
7321 size of the type isn't the same size as the bitfield,
7322 we must use bitfield operations. */
7324 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
)))
7326 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7329 enum machine_mode ext_mode
= mode
;
7331 if (ext_mode
== BLKmode
7332 && ! (target
!= 0 && GET_CODE (op0
) == MEM
7333 && GET_CODE (target
) == MEM
7334 && bitpos
% BITS_PER_UNIT
== 0))
7335 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7337 if (ext_mode
== BLKmode
)
7340 target
= assign_temp (type
, 0, 1, 1);
7345 /* In this case, BITPOS must start at a byte boundary and
7346 TARGET, if specified, must be a MEM. */
7347 if (GET_CODE (op0
) != MEM
7348 || (target
!= 0 && GET_CODE (target
) != MEM
)
7349 || bitpos
% BITS_PER_UNIT
!= 0)
7352 emit_block_move (target
,
7353 adjust_address (op0
, VOIDmode
,
7354 bitpos
/ BITS_PER_UNIT
),
7355 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7357 (modifier
== EXPAND_STACK_PARM
7358 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7363 op0
= validize_mem (op0
);
7365 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
7366 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7368 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
7369 (modifier
== EXPAND_STACK_PARM
7370 ? NULL_RTX
: target
),
7372 int_size_in_bytes (TREE_TYPE (tem
)));
7374 /* If the result is a record type and BITSIZE is narrower than
7375 the mode of OP0, an integral mode, and this is a big endian
7376 machine, we must put the field into the high-order bits. */
7377 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7378 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7379 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
7380 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7381 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7385 /* If the result type is BLKmode, store the data into a temporary
7386 of the appropriate type, but with the mode corresponding to the
7387 mode for the data we have (op0's mode). It's tempting to make
7388 this a constant type, since we know it's only being stored once,
7389 but that can cause problems if we are taking the address of this
7390 COMPONENT_REF because the MEM of any reference via that address
7391 will have flags corresponding to the type, which will not
7392 necessarily be constant. */
7393 if (mode
== BLKmode
)
7396 = assign_stack_temp_for_type
7397 (ext_mode
, GET_MODE_BITSIZE (ext_mode
), 0, type
);
7399 emit_move_insn (new, op0
);
7400 op0
= copy_rtx (new);
7401 PUT_MODE (op0
, BLKmode
);
7402 set_mem_attributes (op0
, exp
, 1);
7408 /* If the result is BLKmode, use that to access the object
7410 if (mode
== BLKmode
)
7413 /* Get a reference to just this component. */
7414 if (modifier
== EXPAND_CONST_ADDRESS
7415 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7416 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7418 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7420 if (op0
== orig_op0
)
7421 op0
= copy_rtx (op0
);
7423 set_mem_attributes (op0
, exp
, 0);
7424 if (GET_CODE (XEXP (op0
, 0)) == REG
)
7425 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7427 MEM_VOLATILE_P (op0
) |= volatilep
;
7428 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7429 || modifier
== EXPAND_CONST_ADDRESS
7430 || modifier
== EXPAND_INITIALIZER
)
7432 else if (target
== 0)
7433 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7435 convert_move (target
, op0
, unsignedp
);
7441 rtx insn
, before
= get_last_insn (), vtbl_ref
;
7443 /* Evaluate the interior expression. */
7444 subtarget
= expand_expr (TREE_OPERAND (exp
, 0), target
,
7447 /* Get or create an instruction off which to hang a note. */
7448 if (REG_P (subtarget
))
7451 insn
= get_last_insn ();
7454 if (! INSN_P (insn
))
7455 insn
= prev_nonnote_insn (insn
);
7459 target
= gen_reg_rtx (GET_MODE (subtarget
));
7460 insn
= emit_move_insn (target
, subtarget
);
7463 /* Collect the data for the note. */
7464 vtbl_ref
= XEXP (DECL_RTL (TREE_OPERAND (exp
, 1)), 0);
7465 vtbl_ref
= plus_constant (vtbl_ref
,
7466 tree_low_cst (TREE_OPERAND (exp
, 2), 0));
7467 /* Discard the initial CONST that was added. */
7468 vtbl_ref
= XEXP (vtbl_ref
, 0);
7471 = gen_rtx_EXPR_LIST (REG_VTABLE_REF
, vtbl_ref
, REG_NOTES (insn
));
7476 /* Intended for a reference to a buffer of a file-object in Pascal.
7477 But it's not certain that a special tree code will really be
7478 necessary for these. INDIRECT_REF might work for them. */
7484 /* Pascal set IN expression.
7487 rlo = set_low - (set_low%bits_per_word);
7488 the_word = set [ (index - rlo)/bits_per_word ];
7489 bit_index = index % bits_per_word;
7490 bitmask = 1 << bit_index;
7491 return !!(the_word & bitmask); */
7493 tree set
= TREE_OPERAND (exp
, 0);
7494 tree index
= TREE_OPERAND (exp
, 1);
7495 int iunsignedp
= TYPE_UNSIGNED (TREE_TYPE (index
));
7496 tree set_type
= TREE_TYPE (set
);
7497 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
7498 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
7499 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
7500 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
7501 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
7502 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
7503 rtx setaddr
= XEXP (setval
, 0);
7504 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
7506 rtx diff
, quo
, rem
, addr
, bit
, result
;
7508 /* If domain is empty, answer is no. Likewise if index is constant
7509 and out of bounds. */
7510 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
7511 && TREE_CODE (set_low_bound
) == INTEGER_CST
7512 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
7513 || (TREE_CODE (index
) == INTEGER_CST
7514 && TREE_CODE (set_low_bound
) == INTEGER_CST
7515 && tree_int_cst_lt (index
, set_low_bound
))
7516 || (TREE_CODE (set_high_bound
) == INTEGER_CST
7517 && TREE_CODE (index
) == INTEGER_CST
7518 && tree_int_cst_lt (set_high_bound
, index
))))
7522 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7524 /* If we get here, we have to generate the code for both cases
7525 (in range and out of range). */
7527 op0
= gen_label_rtx ();
7528 op1
= gen_label_rtx ();
7530 if (! (GET_CODE (index_val
) == CONST_INT
7531 && GET_CODE (lo_r
) == CONST_INT
))
7532 emit_cmp_and_jump_insns (index_val
, lo_r
, LT
, NULL_RTX
,
7533 GET_MODE (index_val
), iunsignedp
, op1
);
7535 if (! (GET_CODE (index_val
) == CONST_INT
7536 && GET_CODE (hi_r
) == CONST_INT
))
7537 emit_cmp_and_jump_insns (index_val
, hi_r
, GT
, NULL_RTX
,
7538 GET_MODE (index_val
), iunsignedp
, op1
);
7540 /* Calculate the element number of bit zero in the first word
7542 if (GET_CODE (lo_r
) == CONST_INT
)
7543 rlow
= GEN_INT (INTVAL (lo_r
)
7544 & ~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
7546 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
7547 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
7548 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7550 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
7551 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7553 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
7554 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7555 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
7556 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7558 addr
= memory_address (byte_mode
,
7559 expand_binop (index_mode
, add_optab
, diff
,
7560 setaddr
, NULL_RTX
, iunsignedp
,
7563 /* Extract the bit we want to examine. */
7564 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
7565 gen_rtx_MEM (byte_mode
, addr
),
7566 make_tree (TREE_TYPE (index
), rem
),
7568 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
7569 GET_MODE (target
) == byte_mode
? target
: 0,
7570 1, OPTAB_LIB_WIDEN
);
7572 if (result
!= target
)
7573 convert_move (target
, result
, 1);
7575 /* Output the code to handle the out-of-range case. */
7578 emit_move_insn (target
, const0_rtx
);
7583 case WITH_CLEANUP_EXPR
:
7584 if (WITH_CLEANUP_EXPR_RTL (exp
) == 0)
7586 WITH_CLEANUP_EXPR_RTL (exp
)
7587 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7588 expand_decl_cleanup_eh (NULL_TREE
, TREE_OPERAND (exp
, 1),
7589 CLEANUP_EH_ONLY (exp
));
7591 /* That's it for this cleanup. */
7592 TREE_OPERAND (exp
, 1) = 0;
7594 return WITH_CLEANUP_EXPR_RTL (exp
);
7596 case CLEANUP_POINT_EXPR
:
7598 /* Start a new binding layer that will keep track of all cleanup
7599 actions to be performed. */
7600 expand_start_bindings (2);
7602 target_temp_slot_level
= temp_slot_level
;
7604 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7605 /* If we're going to use this value, load it up now. */
7607 op0
= force_not_mem (op0
);
7608 preserve_temp_slots (op0
);
7609 expand_end_bindings (NULL_TREE
, 0, 0);
7614 /* Check for a built-in function. */
7615 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7616 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7618 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7620 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7621 == BUILT_IN_FRONTEND
)
7622 return lang_hooks
.expand_expr (exp
, original_target
,
7626 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7629 return expand_call (exp
, target
, ignore
);
7631 case NON_LVALUE_EXPR
:
7634 case REFERENCE_EXPR
:
7635 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7638 if (TREE_CODE (type
) == UNION_TYPE
)
7640 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7642 /* If both input and output are BLKmode, this conversion isn't doing
7643 anything except possibly changing memory attribute. */
7644 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7646 rtx result
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7649 result
= copy_rtx (result
);
7650 set_mem_attributes (result
, exp
, 0);
7656 if (TYPE_MODE (type
) != BLKmode
)
7657 target
= gen_reg_rtx (TYPE_MODE (type
));
7659 target
= assign_temp (type
, 0, 1, 1);
7662 if (GET_CODE (target
) == MEM
)
7663 /* Store data into beginning of memory target. */
7664 store_expr (TREE_OPERAND (exp
, 0),
7665 adjust_address (target
, TYPE_MODE (valtype
), 0),
7666 modifier
== EXPAND_STACK_PARM
? 2 : 0);
7668 else if (GET_CODE (target
) == REG
)
7669 /* Store this field into a union of the proper type. */
7670 store_field (target
,
7671 MIN ((int_size_in_bytes (TREE_TYPE
7672 (TREE_OPERAND (exp
, 0)))
7674 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7675 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7676 VOIDmode
, 0, type
, 0);
7680 /* Return the entire union. */
7684 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7686 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7689 /* If the signedness of the conversion differs and OP0 is
7690 a promoted SUBREG, clear that indication since we now
7691 have to do the proper extension. */
7692 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7693 && GET_CODE (op0
) == SUBREG
)
7694 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7699 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7700 if (GET_MODE (op0
) == mode
)
7703 /* If OP0 is a constant, just convert it into the proper mode. */
7704 if (CONSTANT_P (op0
))
7706 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7707 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
7709 if (modifier
== EXPAND_INITIALIZER
)
7710 return simplify_gen_subreg (mode
, op0
, inner_mode
,
7711 subreg_lowpart_offset (mode
,
7714 return convert_modes (mode
, inner_mode
, op0
,
7715 TYPE_UNSIGNED (inner_type
));
7718 if (modifier
== EXPAND_INITIALIZER
)
7719 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7723 convert_to_mode (mode
, op0
,
7724 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7726 convert_move (target
, op0
,
7727 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7730 case VIEW_CONVERT_EXPR
:
7731 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7733 /* If the input and output modes are both the same, we are done.
7734 Otherwise, if neither mode is BLKmode and both are integral and within
7735 a word, we can use gen_lowpart. If neither is true, make sure the
7736 operand is in memory and convert the MEM to the new mode. */
7737 if (TYPE_MODE (type
) == GET_MODE (op0
))
7739 else if (TYPE_MODE (type
) != BLKmode
&& GET_MODE (op0
) != BLKmode
7740 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7741 && GET_MODE_CLASS (TYPE_MODE (type
)) == MODE_INT
7742 && GET_MODE_SIZE (TYPE_MODE (type
)) <= UNITS_PER_WORD
7743 && GET_MODE_SIZE (GET_MODE (op0
)) <= UNITS_PER_WORD
)
7744 op0
= gen_lowpart (TYPE_MODE (type
), op0
);
7745 else if (GET_CODE (op0
) != MEM
)
7747 /* If the operand is not a MEM, force it into memory. Since we
7748 are going to be be changing the mode of the MEM, don't call
7749 force_const_mem for constants because we don't allow pool
7750 constants to change mode. */
7751 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7753 if (TREE_ADDRESSABLE (exp
))
7756 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
7758 = assign_stack_temp_for_type
7759 (TYPE_MODE (inner_type
),
7760 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
7762 emit_move_insn (target
, op0
);
7766 /* At this point, OP0 is in the correct mode. If the output type is such
7767 that the operand is known to be aligned, indicate that it is.
7768 Otherwise, we need only be concerned about alignment for non-BLKmode
7770 if (GET_CODE (op0
) == MEM
)
7772 op0
= copy_rtx (op0
);
7774 if (TYPE_ALIGN_OK (type
))
7775 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
7776 else if (TYPE_MODE (type
) != BLKmode
&& STRICT_ALIGNMENT
7777 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
7779 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7780 HOST_WIDE_INT temp_size
7781 = MAX (int_size_in_bytes (inner_type
),
7782 (HOST_WIDE_INT
) GET_MODE_SIZE (TYPE_MODE (type
)));
7783 rtx
new = assign_stack_temp_for_type (TYPE_MODE (type
),
7784 temp_size
, 0, type
);
7785 rtx new_with_op0_mode
= adjust_address (new, GET_MODE (op0
), 0);
7787 if (TREE_ADDRESSABLE (exp
))
7790 if (GET_MODE (op0
) == BLKmode
)
7791 emit_block_move (new_with_op0_mode
, op0
,
7792 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type
))),
7793 (modifier
== EXPAND_STACK_PARM
7794 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7796 emit_move_insn (new_with_op0_mode
, op0
);
7801 op0
= adjust_address (op0
, TYPE_MODE (type
), 0);
7807 this_optab
= ! unsignedp
&& flag_trapv
7808 && (GET_MODE_CLASS (mode
) == MODE_INT
)
7809 ? addv_optab
: add_optab
;
7811 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7812 something else, make sure we add the register to the constant and
7813 then to the other thing. This case can occur during strength
7814 reduction and doing it this way will produce better code if the
7815 frame pointer or argument pointer is eliminated.
7817 fold-const.c will ensure that the constant is always in the inner
7818 PLUS_EXPR, so the only case we need to do anything about is if
7819 sp, ap, or fp is our second argument, in which case we must swap
7820 the innermost first argument and our second argument. */
7822 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7823 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7824 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
7825 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7826 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7827 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7829 tree t
= TREE_OPERAND (exp
, 1);
7831 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7832 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
7835 /* If the result is to be ptr_mode and we are adding an integer to
7836 something, we might be forming a constant. So try to use
7837 plus_constant. If it produces a sum and we can't accept it,
7838 use force_operand. This allows P = &ARR[const] to generate
7839 efficient code on machines where a SYMBOL_REF is not a valid
7842 If this is an EXPAND_SUM call, always return the sum. */
7843 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7844 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
7846 if (modifier
== EXPAND_STACK_PARM
)
7848 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
7849 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7850 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
7854 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
7856 /* Use immed_double_const to ensure that the constant is
7857 truncated according to the mode of OP1, then sign extended
7858 to a HOST_WIDE_INT. Using the constant directly can result
7859 in non-canonical RTL in a 64x32 cross compile. */
7861 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
7863 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
7864 op1
= plus_constant (op1
, INTVAL (constant_part
));
7865 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7866 op1
= force_operand (op1
, target
);
7870 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7871 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
7872 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
7876 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7877 (modifier
== EXPAND_INITIALIZER
7878 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
7879 if (! CONSTANT_P (op0
))
7881 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7882 VOIDmode
, modifier
);
7883 /* Return a PLUS if modifier says it's OK. */
7884 if (modifier
== EXPAND_SUM
7885 || modifier
== EXPAND_INITIALIZER
)
7886 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7889 /* Use immed_double_const to ensure that the constant is
7890 truncated according to the mode of OP1, then sign extended
7891 to a HOST_WIDE_INT. Using the constant directly can result
7892 in non-canonical RTL in a 64x32 cross compile. */
7894 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
7896 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7897 op0
= plus_constant (op0
, INTVAL (constant_part
));
7898 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7899 op0
= force_operand (op0
, target
);
7904 /* No sense saving up arithmetic to be done
7905 if it's all in the wrong mode to form part of an address.
7906 And force_operand won't know whether to sign-extend or
7908 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7909 || mode
!= ptr_mode
)
7911 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7912 subtarget
, &op0
, &op1
, 0);
7913 if (op0
== const0_rtx
)
7915 if (op1
== const0_rtx
)
7920 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7921 subtarget
, &op0
, &op1
, modifier
);
7922 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7925 /* For initializers, we are allowed to return a MINUS of two
7926 symbolic constants. Here we handle all cases when both operands
7928 /* Handle difference of two symbolic constants,
7929 for the sake of an initializer. */
7930 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7931 && really_constant_p (TREE_OPERAND (exp
, 0))
7932 && really_constant_p (TREE_OPERAND (exp
, 1)))
7934 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7935 NULL_RTX
, &op0
, &op1
, modifier
);
7937 /* If the last operand is a CONST_INT, use plus_constant of
7938 the negated constant. Else make the MINUS. */
7939 if (GET_CODE (op1
) == CONST_INT
)
7940 return plus_constant (op0
, - INTVAL (op1
));
7942 return gen_rtx_MINUS (mode
, op0
, op1
);
7945 this_optab
= ! unsignedp
&& flag_trapv
7946 && (GET_MODE_CLASS(mode
) == MODE_INT
)
7947 ? subv_optab
: sub_optab
;
7949 /* No sense saving up arithmetic to be done
7950 if it's all in the wrong mode to form part of an address.
7951 And force_operand won't know whether to sign-extend or
7953 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7954 || mode
!= ptr_mode
)
7957 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7958 subtarget
, &op0
, &op1
, modifier
);
7960 /* Convert A - const to A + (-const). */
7961 if (GET_CODE (op1
) == CONST_INT
)
7963 op1
= negate_rtx (mode
, op1
);
7964 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7970 /* If first operand is constant, swap them.
7971 Thus the following special case checks need only
7972 check the second operand. */
7973 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
7975 tree t1
= TREE_OPERAND (exp
, 0);
7976 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
7977 TREE_OPERAND (exp
, 1) = t1
;
7980 /* Attempt to return something suitable for generating an
7981 indexed address, for machines that support that. */
7983 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
7984 && host_integerp (TREE_OPERAND (exp
, 1), 0))
7986 tree exp1
= TREE_OPERAND (exp
, 1);
7988 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7991 if (GET_CODE (op0
) != REG
)
7992 op0
= force_operand (op0
, NULL_RTX
);
7993 if (GET_CODE (op0
) != REG
)
7994 op0
= copy_to_mode_reg (mode
, op0
);
7996 return gen_rtx_MULT (mode
, op0
,
7997 gen_int_mode (tree_low_cst (exp1
, 0),
7998 TYPE_MODE (TREE_TYPE (exp1
))));
8001 if (modifier
== EXPAND_STACK_PARM
)
8004 /* Check for multiplying things that have been extended
8005 from a narrower type. If this machine supports multiplying
8006 in that narrower type with a result in the desired type,
8007 do it that way, and avoid the explicit type-conversion. */
8008 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
8009 && TREE_CODE (type
) == INTEGER_TYPE
8010 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8011 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
8012 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
8013 && int_fits_type_p (TREE_OPERAND (exp
, 1),
8014 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8015 /* Don't use a widening multiply if a shift will do. */
8016 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
8017 > HOST_BITS_PER_WIDE_INT
)
8018 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
8020 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8021 && (TYPE_PRECISION (TREE_TYPE
8022 (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
8023 == TYPE_PRECISION (TREE_TYPE
8025 (TREE_OPERAND (exp
, 0), 0))))
8026 /* If both operands are extended, they must either both
8027 be zero-extended or both be sign-extended. */
8028 && (TYPE_UNSIGNED (TREE_TYPE
8029 (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
8030 == TYPE_UNSIGNED (TREE_TYPE
8032 (TREE_OPERAND (exp
, 0), 0)))))))
8034 tree op0type
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0));
8035 enum machine_mode innermode
= TYPE_MODE (op0type
);
8036 bool zextend_p
= TYPE_UNSIGNED (op0type
);
8037 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
8038 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
8040 if (mode
== GET_MODE_WIDER_MODE (innermode
))
8042 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
8044 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
8045 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8046 TREE_OPERAND (exp
, 1),
8047 NULL_RTX
, &op0
, &op1
, 0);
8049 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8050 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
8051 NULL_RTX
, &op0
, &op1
, 0);
8054 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
8055 && innermode
== word_mode
)
8058 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8059 NULL_RTX
, VOIDmode
, 0);
8060 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
8061 op1
= convert_modes (innermode
, mode
,
8062 expand_expr (TREE_OPERAND (exp
, 1),
8063 NULL_RTX
, VOIDmode
, 0),
8066 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
8067 NULL_RTX
, VOIDmode
, 0);
8068 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8069 unsignedp
, OPTAB_LIB_WIDEN
);
8070 hipart
= gen_highpart (innermode
, temp
);
8071 htem
= expand_mult_highpart_adjust (innermode
, hipart
,
8075 emit_move_insn (hipart
, htem
);
8080 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8081 subtarget
, &op0
, &op1
, 0);
8082 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
8084 case TRUNC_DIV_EXPR
:
8085 case FLOOR_DIV_EXPR
:
8087 case ROUND_DIV_EXPR
:
8088 case EXACT_DIV_EXPR
:
8089 if (modifier
== EXPAND_STACK_PARM
)
8091 /* Possible optimization: compute the dividend with EXPAND_SUM
8092 then if the divisor is constant can optimize the case
8093 where some terms of the dividend have coeffs divisible by it. */
8094 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8095 subtarget
, &op0
, &op1
, 0);
8096 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
8099 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8100 expensive divide. If not, combine will rebuild the original
8102 if (flag_unsafe_math_optimizations
&& optimize
&& !optimize_size
8103 && TREE_CODE (type
) == REAL_TYPE
8104 && !real_onep (TREE_OPERAND (exp
, 0)))
8105 return expand_expr (build (MULT_EXPR
, type
, TREE_OPERAND (exp
, 0),
8106 build (RDIV_EXPR
, type
,
8107 build_real (type
, dconst1
),
8108 TREE_OPERAND (exp
, 1))),
8109 target
, tmode
, modifier
);
8110 this_optab
= sdiv_optab
;
8113 case TRUNC_MOD_EXPR
:
8114 case FLOOR_MOD_EXPR
:
8116 case ROUND_MOD_EXPR
:
8117 if (modifier
== EXPAND_STACK_PARM
)
8119 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8120 subtarget
, &op0
, &op1
, 0);
8121 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
8123 case FIX_ROUND_EXPR
:
8124 case FIX_FLOOR_EXPR
:
8126 abort (); /* Not used for C. */
8128 case FIX_TRUNC_EXPR
:
8129 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8130 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8131 target
= gen_reg_rtx (mode
);
8132 expand_fix (target
, op0
, unsignedp
);
8136 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8137 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8138 target
= gen_reg_rtx (mode
);
8139 /* expand_float can't figure out what to do if FROM has VOIDmode.
8140 So give it the correct mode. With -O, cse will optimize this. */
8141 if (GET_MODE (op0
) == VOIDmode
)
8142 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8144 expand_float (target
, op0
,
8145 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8149 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8150 if (modifier
== EXPAND_STACK_PARM
)
8152 temp
= expand_unop (mode
,
8153 ! unsignedp
&& flag_trapv
8154 && (GET_MODE_CLASS(mode
) == MODE_INT
)
8155 ? negv_optab
: neg_optab
, op0
, target
, 0);
8161 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8162 if (modifier
== EXPAND_STACK_PARM
)
8165 /* ABS_EXPR is not valid for complex arguments. */
8166 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
8167 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
8170 /* Unsigned abs is simply the operand. Testing here means we don't
8171 risk generating incorrect code below. */
8172 if (TYPE_UNSIGNED (type
))
8175 return expand_abs (mode
, op0
, target
, unsignedp
,
8176 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
8180 target
= original_target
;
8182 || modifier
== EXPAND_STACK_PARM
8183 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
8184 || GET_MODE (target
) != mode
8185 || (GET_CODE (target
) == REG
8186 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8187 target
= gen_reg_rtx (mode
);
8188 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8189 target
, &op0
, &op1
, 0);
8191 /* First try to do it with a special MIN or MAX instruction.
8192 If that does not win, use a conditional jump to select the proper
8194 this_optab
= (unsignedp
8195 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
8196 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
8198 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8203 /* At this point, a MEM target is no longer useful; we will get better
8206 if (GET_CODE (target
) == MEM
)
8207 target
= gen_reg_rtx (mode
);
8209 /* If op1 was placed in target, swap op0 and op1. */
8210 if (target
!= op0
&& target
== op1
)
8218 emit_move_insn (target
, op0
);
8220 op0
= gen_label_rtx ();
8222 /* If this mode is an integer too wide to compare properly,
8223 compare word by word. Rely on cse to optimize constant cases. */
8224 if (GET_MODE_CLASS (mode
) == MODE_INT
8225 && ! can_compare_p (GE
, mode
, ccp_jump
))
8227 if (code
== MAX_EXPR
)
8228 do_jump_by_parts_greater_rtx (mode
, unsignedp
, target
, op1
,
8231 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op1
, target
,
8236 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
8237 unsignedp
, mode
, NULL_RTX
, NULL_RTX
, op0
);
8239 emit_move_insn (target
, op1
);
8244 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8245 if (modifier
== EXPAND_STACK_PARM
)
8247 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8252 /* ??? Can optimize bitwise operations with one arg constant.
8253 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8254 and (a bitwise1 b) bitwise2 b (etc)
8255 but that is probably not worth while. */
8257 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8258 boolean values when we want in all cases to compute both of them. In
8259 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8260 as actual zero-or-1 values and then bitwise anding. In cases where
8261 there cannot be any side effects, better code would be made by
8262 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8263 how to recognize those cases. */
8265 case TRUTH_AND_EXPR
:
8267 this_optab
= and_optab
;
8272 this_optab
= ior_optab
;
8275 case TRUTH_XOR_EXPR
:
8277 this_optab
= xor_optab
;
8284 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8286 if (modifier
== EXPAND_STACK_PARM
)
8288 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8289 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
8292 /* Could determine the answer when only additive constants differ. Also,
8293 the addition of one can be handled by changing the condition. */
8300 case UNORDERED_EXPR
:
8307 temp
= do_store_flag (exp
,
8308 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
8309 tmode
!= VOIDmode
? tmode
: mode
, 0);
8313 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8314 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
8316 && GET_CODE (original_target
) == REG
8317 && (GET_MODE (original_target
)
8318 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
8320 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
8323 /* If temp is constant, we can just compute the result. */
8324 if (GET_CODE (temp
) == CONST_INT
)
8326 if (INTVAL (temp
) != 0)
8327 emit_move_insn (target
, const1_rtx
);
8329 emit_move_insn (target
, const0_rtx
);
8334 if (temp
!= original_target
)
8336 enum machine_mode mode1
= GET_MODE (temp
);
8337 if (mode1
== VOIDmode
)
8338 mode1
= tmode
!= VOIDmode
? tmode
: mode
;
8340 temp
= copy_to_mode_reg (mode1
, temp
);
8343 op1
= gen_label_rtx ();
8344 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
8345 GET_MODE (temp
), unsignedp
, op1
);
8346 emit_move_insn (temp
, const1_rtx
);
8351 /* If no set-flag instruction, must generate a conditional
8352 store into a temporary variable. Drop through
8353 and handle this like && and ||. */
8355 case TRUTH_ANDIF_EXPR
:
8356 case TRUTH_ORIF_EXPR
:
8359 || modifier
== EXPAND_STACK_PARM
8360 || ! safe_from_p (target
, exp
, 1)
8361 /* Make sure we don't have a hard reg (such as function's return
8362 value) live across basic blocks, if not optimizing. */
8363 || (!optimize
&& GET_CODE (target
) == REG
8364 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8365 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8368 emit_clr_insn (target
);
8370 op1
= gen_label_rtx ();
8371 jumpifnot (exp
, op1
);
8374 emit_0_to_1_insn (target
);
8377 return ignore
? const0_rtx
: target
;
8379 case TRUTH_NOT_EXPR
:
8380 if (modifier
== EXPAND_STACK_PARM
)
8382 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8383 /* The parser is careful to generate TRUTH_NOT_EXPR
8384 only with operands that are always zero or one. */
8385 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8386 target
, 1, OPTAB_LIB_WIDEN
);
8392 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
8394 return expand_expr_real (TREE_OPERAND (exp
, 1),
8395 (ignore
? const0_rtx
: target
),
8396 VOIDmode
, modifier
, alt_rtl
);
8398 case STATEMENT_LIST
:
8400 tree_stmt_iterator iter
;
8405 for (iter
= tsi_start (exp
); !tsi_end_p (iter
); tsi_next (&iter
))
8406 expand_expr (tsi_stmt (iter
), const0_rtx
, VOIDmode
, modifier
);
8411 /* If it's void, we don't need to worry about computing a value. */
8412 if (VOID_TYPE_P (TREE_TYPE (exp
)))
8414 tree pred
= TREE_OPERAND (exp
, 0);
8415 tree then_
= TREE_OPERAND (exp
, 1);
8416 tree else_
= TREE_OPERAND (exp
, 2);
8418 /* If we do not have any pending cleanups or stack_levels
8419 to restore, and at least one arm of the COND_EXPR is a
8420 GOTO_EXPR to a local label, then we can emit more efficient
8421 code by using jumpif/jumpifnot instead of the 'if' machinery. */
8423 || containing_blocks_have_cleanups_or_stack_level ())
8425 else if (TREE_CODE (then_
) == GOTO_EXPR
8426 && TREE_CODE (GOTO_DESTINATION (then_
)) == LABEL_DECL
)
8428 jumpif (pred
, label_rtx (GOTO_DESTINATION (then_
)));
8429 return expand_expr (else_
, const0_rtx
, VOIDmode
, 0);
8431 else if (TREE_CODE (else_
) == GOTO_EXPR
8432 && TREE_CODE (GOTO_DESTINATION (else_
)) == LABEL_DECL
)
8434 jumpifnot (pred
, label_rtx (GOTO_DESTINATION (else_
)));
8435 return expand_expr (then_
, const0_rtx
, VOIDmode
, 0);
8438 /* Just use the 'if' machinery. */
8439 expand_start_cond (pred
, 0);
8440 start_cleanup_deferral ();
8441 expand_expr (then_
, const0_rtx
, VOIDmode
, 0);
8445 /* Iterate over 'else if's instead of recursing. */
8446 for (; TREE_CODE (exp
) == COND_EXPR
; exp
= TREE_OPERAND (exp
, 2))
8448 expand_start_else ();
8449 if (EXPR_HAS_LOCATION (exp
))
8451 emit_line_note (EXPR_LOCATION (exp
));
8452 if (cfun
->dont_emit_block_notes
)
8453 record_block_change (TREE_BLOCK (exp
));
8455 expand_elseif (TREE_OPERAND (exp
, 0));
8456 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, 0);
8458 /* Don't emit the jump and label if there's no 'else' clause. */
8459 if (TREE_SIDE_EFFECTS (exp
))
8461 expand_start_else ();
8462 expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
8464 end_cleanup_deferral ();
8469 /* If we would have a "singleton" (see below) were it not for a
8470 conversion in each arm, bring that conversion back out. */
8471 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8472 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
8473 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
8474 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
8476 tree iftrue
= TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
8477 tree iffalse
= TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
8479 if ((TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '2'
8480 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8481 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '2'
8482 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0))
8483 || (TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '1'
8484 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8485 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '1'
8486 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0)))
8487 return expand_expr (build1 (NOP_EXPR
, type
,
8488 build (COND_EXPR
, TREE_TYPE (iftrue
),
8489 TREE_OPERAND (exp
, 0),
8491 target
, tmode
, modifier
);
8495 /* Note that COND_EXPRs whose type is a structure or union
8496 are required to be constructed to contain assignments of
8497 a temporary variable, so that we can evaluate them here
8498 for side effect only. If type is void, we must do likewise. */
8500 /* If an arm of the branch requires a cleanup,
8501 only that cleanup is performed. */
8504 tree binary_op
= 0, unary_op
= 0;
8506 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8507 convert it to our mode, if necessary. */
8508 if (integer_onep (TREE_OPERAND (exp
, 1))
8509 && integer_zerop (TREE_OPERAND (exp
, 2))
8510 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8514 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
8519 if (modifier
== EXPAND_STACK_PARM
)
8521 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, modifier
);
8522 if (GET_MODE (op0
) == mode
)
8526 target
= gen_reg_rtx (mode
);
8527 convert_move (target
, op0
, unsignedp
);
8531 /* Check for X ? A + B : A. If we have this, we can copy A to the
8532 output and conditionally add B. Similarly for unary operations.
8533 Don't do this if X has side-effects because those side effects
8534 might affect A or B and the "?" operation is a sequence point in
8535 ANSI. (operand_equal_p tests for side effects.) */
8537 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
8538 && operand_equal_p (TREE_OPERAND (exp
, 2),
8539 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8540 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
8541 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
8542 && operand_equal_p (TREE_OPERAND (exp
, 1),
8543 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8544 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
8545 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
8546 && operand_equal_p (TREE_OPERAND (exp
, 2),
8547 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8548 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
8549 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
8550 && operand_equal_p (TREE_OPERAND (exp
, 1),
8551 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8552 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
8554 /* If we are not to produce a result, we have no target. Otherwise,
8555 if a target was specified use it; it will not be used as an
8556 intermediate target unless it is safe. If no target, use a
8561 else if (modifier
== EXPAND_STACK_PARM
)
8562 temp
= assign_temp (type
, 0, 0, 1);
8563 else if (original_target
8564 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
8565 || (singleton
&& GET_CODE (original_target
) == REG
8566 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
8567 && original_target
== var_rtx (singleton
)))
8568 && GET_MODE (original_target
) == mode
8569 #ifdef HAVE_conditional_move
8570 && (! can_conditionally_move_p (mode
)
8571 || GET_CODE (original_target
) == REG
8572 || TREE_ADDRESSABLE (type
))
8574 && (GET_CODE (original_target
) != MEM
8575 || TREE_ADDRESSABLE (type
)))
8576 temp
= original_target
;
8577 else if (TREE_ADDRESSABLE (type
))
8580 temp
= assign_temp (type
, 0, 0, 1);
8582 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8583 do the test of X as a store-flag operation, do this as
8584 A + ((X != 0) << log C). Similarly for other simple binary
8585 operators. Only do for C == 1 if BRANCH_COST is low. */
8586 if (temp
&& singleton
&& binary_op
8587 && (TREE_CODE (binary_op
) == PLUS_EXPR
8588 || TREE_CODE (binary_op
) == MINUS_EXPR
8589 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
8590 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
8591 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
8592 : integer_onep (TREE_OPERAND (binary_op
, 1)))
8593 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8597 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
8598 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8599 ? addv_optab
: add_optab
)
8600 : TREE_CODE (binary_op
) == MINUS_EXPR
8601 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8602 ? subv_optab
: sub_optab
)
8603 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
8606 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8607 if (singleton
== TREE_OPERAND (exp
, 1))
8608 cond
= invert_truthvalue (TREE_OPERAND (exp
, 0));
8610 cond
= TREE_OPERAND (exp
, 0);
8612 result
= do_store_flag (cond
, (safe_from_p (temp
, singleton
, 1)
8614 mode
, BRANCH_COST
<= 1);
8616 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
8617 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
8618 build_int_2 (tree_log2
8622 (safe_from_p (temp
, singleton
, 1)
8623 ? temp
: NULL_RTX
), 0);
8627 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
8628 return expand_binop (mode
, boptab
, op1
, result
, temp
,
8629 unsignedp
, OPTAB_LIB_WIDEN
);
8633 do_pending_stack_adjust ();
8635 op0
= gen_label_rtx ();
8637 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
8641 /* If the target conflicts with the other operand of the
8642 binary op, we can't use it. Also, we can't use the target
8643 if it is a hard register, because evaluating the condition
8644 might clobber it. */
8646 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
8647 || (GET_CODE (temp
) == REG
8648 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
8649 temp
= gen_reg_rtx (mode
);
8650 store_expr (singleton
, temp
,
8651 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8654 expand_expr (singleton
,
8655 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8656 if (singleton
== TREE_OPERAND (exp
, 1))
8657 jumpif (TREE_OPERAND (exp
, 0), op0
);
8659 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8661 start_cleanup_deferral ();
8662 if (binary_op
&& temp
== 0)
8663 /* Just touch the other operand. */
8664 expand_expr (TREE_OPERAND (binary_op
, 1),
8665 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8667 store_expr (build (TREE_CODE (binary_op
), type
,
8668 make_tree (type
, temp
),
8669 TREE_OPERAND (binary_op
, 1)),
8670 temp
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8672 store_expr (build1 (TREE_CODE (unary_op
), type
,
8673 make_tree (type
, temp
)),
8674 temp
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8677 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8678 comparison operator. If we have one of these cases, set the
8679 output to A, branch on A (cse will merge these two references),
8680 then set the output to FOO. */
8682 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8683 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8684 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8685 TREE_OPERAND (exp
, 1), 0)
8686 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8687 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
8688 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
8690 if (GET_CODE (temp
) == REG
8691 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8692 temp
= gen_reg_rtx (mode
);
8693 store_expr (TREE_OPERAND (exp
, 1), temp
,
8694 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8695 jumpif (TREE_OPERAND (exp
, 0), op0
);
8697 start_cleanup_deferral ();
8698 if (TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8699 store_expr (TREE_OPERAND (exp
, 2), temp
,
8700 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8702 expand_expr (TREE_OPERAND (exp
, 2),
8703 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8707 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8708 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8709 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8710 TREE_OPERAND (exp
, 2), 0)
8711 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8712 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
8713 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
8715 if (GET_CODE (temp
) == REG
8716 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8717 temp
= gen_reg_rtx (mode
);
8718 store_expr (TREE_OPERAND (exp
, 2), temp
,
8719 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8720 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8722 start_cleanup_deferral ();
8723 if (TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8724 store_expr (TREE_OPERAND (exp
, 1), temp
,
8725 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8727 expand_expr (TREE_OPERAND (exp
, 1),
8728 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8733 op1
= gen_label_rtx ();
8734 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8736 start_cleanup_deferral ();
8738 /* One branch of the cond can be void, if it never returns. For
8739 example A ? throw : E */
8741 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8742 store_expr (TREE_OPERAND (exp
, 1), temp
,
8743 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8745 expand_expr (TREE_OPERAND (exp
, 1),
8746 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8747 end_cleanup_deferral ();
8749 emit_jump_insn (gen_jump (op1
));
8752 start_cleanup_deferral ();
8754 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8755 store_expr (TREE_OPERAND (exp
, 2), temp
,
8756 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8758 expand_expr (TREE_OPERAND (exp
, 2),
8759 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8762 end_cleanup_deferral ();
8773 /* Something needs to be initialized, but we didn't know
8774 where that thing was when building the tree. For example,
8775 it could be the return value of a function, or a parameter
8776 to a function which lays down in the stack, or a temporary
8777 variable which must be passed by reference.
8779 We guarantee that the expression will either be constructed
8780 or copied into our original target. */
8782 tree slot
= TREE_OPERAND (exp
, 0);
8783 tree cleanups
= NULL_TREE
;
8786 if (TREE_CODE (slot
) != VAR_DECL
)
8790 target
= original_target
;
8792 /* Set this here so that if we get a target that refers to a
8793 register variable that's already been used, put_reg_into_stack
8794 knows that it should fix up those uses. */
8795 TREE_USED (slot
) = 1;
8799 if (DECL_RTL_SET_P (slot
))
8801 target
= DECL_RTL (slot
);
8802 /* If we have already expanded the slot, so don't do
8804 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8809 target
= assign_temp (type
, 2, 0, 1);
8810 SET_DECL_RTL (slot
, target
);
8811 if (TREE_ADDRESSABLE (slot
))
8812 put_var_into_stack (slot
, /*rescan=*/false);
8814 /* Since SLOT is not known to the called function
8815 to belong to its stack frame, we must build an explicit
8816 cleanup. This case occurs when we must build up a reference
8817 to pass the reference as an argument. In this case,
8818 it is very likely that such a reference need not be
8821 if (TREE_OPERAND (exp
, 2) == 0)
8822 TREE_OPERAND (exp
, 2)
8823 = lang_hooks
.maybe_build_cleanup (slot
);
8824 cleanups
= TREE_OPERAND (exp
, 2);
8829 /* This case does occur, when expanding a parameter which
8830 needs to be constructed on the stack. The target
8831 is the actual stack address that we want to initialize.
8832 The function we call will perform the cleanup in this case. */
8834 /* If we have already assigned it space, use that space,
8835 not target that we were passed in, as our target
8836 parameter is only a hint. */
8837 if (DECL_RTL_SET_P (slot
))
8839 target
= DECL_RTL (slot
);
8840 /* If we have already expanded the slot, so don't do
8842 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8847 SET_DECL_RTL (slot
, target
);
8848 /* If we must have an addressable slot, then make sure that
8849 the RTL that we just stored in slot is OK. */
8850 if (TREE_ADDRESSABLE (slot
))
8851 put_var_into_stack (slot
, /*rescan=*/true);
8855 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
8856 /* Mark it as expanded. */
8857 TREE_OPERAND (exp
, 1) = NULL_TREE
;
8859 store_expr (exp1
, target
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8861 expand_decl_cleanup_eh (NULL_TREE
, cleanups
, CLEANUP_EH_ONLY (exp
));
8868 tree lhs
= TREE_OPERAND (exp
, 0);
8869 tree rhs
= TREE_OPERAND (exp
, 1);
8871 temp
= expand_assignment (lhs
, rhs
, ! ignore
);
8877 /* If lhs is complex, expand calls in rhs before computing it.
8878 That's so we don't compute a pointer and save it over a
8879 call. If lhs is simple, compute it first so we can give it
8880 as a target if the rhs is just a call. This avoids an
8881 extra temp and copy and that prevents a partial-subsumption
8882 which makes bad code. Actually we could treat
8883 component_ref's of vars like vars. */
8885 tree lhs
= TREE_OPERAND (exp
, 0);
8886 tree rhs
= TREE_OPERAND (exp
, 1);
8890 /* Check for |= or &= of a bitfield of size one into another bitfield
8891 of size 1. In this case, (unless we need the result of the
8892 assignment) we can do this more efficiently with a
8893 test followed by an assignment, if necessary.
8895 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8896 things change so we do, this code should be enhanced to
8899 && TREE_CODE (lhs
) == COMPONENT_REF
8900 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
8901 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
8902 && TREE_OPERAND (rhs
, 0) == lhs
8903 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
8904 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
8905 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
8907 rtx label
= gen_label_rtx ();
8909 do_jump (TREE_OPERAND (rhs
, 1),
8910 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
8911 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
8912 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
8913 (TREE_CODE (rhs
) == BIT_IOR_EXPR
8915 : integer_zero_node
)),
8917 do_pending_stack_adjust ();
8922 temp
= expand_assignment (lhs
, rhs
, ! ignore
);
8928 if (!TREE_OPERAND (exp
, 0))
8929 expand_null_return ();
8931 expand_return (TREE_OPERAND (exp
, 0));
8934 case PREINCREMENT_EXPR
:
8935 case PREDECREMENT_EXPR
:
8936 return expand_increment (exp
, 0, ignore
);
8938 case POSTINCREMENT_EXPR
:
8939 case POSTDECREMENT_EXPR
:
8940 /* Faster to treat as pre-increment if result is not used. */
8941 return expand_increment (exp
, ! ignore
, ignore
);
8944 if (modifier
== EXPAND_STACK_PARM
)
8946 /* If we are taking the address of something erroneous, just
8948 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
8950 /* If we are taking the address of a constant and are at the
8951 top level, we have to use output_constant_def since we can't
8952 call force_const_mem at top level. */
8954 && (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
8955 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0)))
8957 op0
= XEXP (output_constant_def (TREE_OPERAND (exp
, 0), 0), 0);
8960 /* We make sure to pass const0_rtx down if we came in with
8961 ignore set, to avoid doing the cleanups twice for something. */
8962 op0
= expand_expr (TREE_OPERAND (exp
, 0),
8963 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
8964 (modifier
== EXPAND_INITIALIZER
8965 ? modifier
: EXPAND_CONST_ADDRESS
));
8967 /* If we are going to ignore the result, OP0 will have been set
8968 to const0_rtx, so just return it. Don't get confused and
8969 think we are taking the address of the constant. */
8973 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8974 clever and returns a REG when given a MEM. */
8975 op0
= protect_from_queue (op0
, 1);
8977 /* We would like the object in memory. If it is a constant, we can
8978 have it be statically allocated into memory. For a non-constant,
8979 we need to allocate some memory and store the value into it. */
8981 if (CONSTANT_P (op0
))
8982 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8984 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
8985 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
8986 || GET_CODE (op0
) == PARALLEL
|| GET_CODE (op0
) == LO_SUM
)
8988 /* If the operand is a SAVE_EXPR, we can deal with this by
8989 forcing the SAVE_EXPR into memory. */
8990 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
8992 put_var_into_stack (TREE_OPERAND (exp
, 0),
8994 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
8998 /* If this object is in a register, it can't be BLKmode. */
8999 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9000 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
9002 if (GET_CODE (op0
) == PARALLEL
)
9003 /* Handle calls that pass values in multiple
9004 non-contiguous locations. The Irix 6 ABI has examples
9006 emit_group_store (memloc
, op0
, inner_type
,
9007 int_size_in_bytes (inner_type
));
9009 emit_move_insn (memloc
, op0
);
9015 if (GET_CODE (op0
) != MEM
)
9018 mark_temp_addr_taken (op0
);
9019 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
9021 op0
= XEXP (op0
, 0);
9022 if (GET_MODE (op0
) == Pmode
&& mode
== ptr_mode
)
9023 op0
= convert_memory_address (ptr_mode
, op0
);
9027 /* If OP0 is not aligned as least as much as the type requires, we
9028 need to make a temporary, copy OP0 to it, and take the address of
9029 the temporary. We want to use the alignment of the type, not of
9030 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9031 the test for BLKmode means that can't happen. The test for
9032 BLKmode is because we never make mis-aligned MEMs with
9035 We don't need to do this at all if the machine doesn't have
9036 strict alignment. */
9037 if (STRICT_ALIGNMENT
&& GET_MODE (op0
) == BLKmode
9038 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
9040 && MEM_ALIGN (op0
) < BIGGEST_ALIGNMENT
)
9042 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9045 if (TYPE_ALIGN_OK (inner_type
))
9048 if (TREE_ADDRESSABLE (inner_type
))
9050 /* We can't make a bitwise copy of this object, so fail. */
9051 error ("cannot take the address of an unaligned member");
9055 new = assign_stack_temp_for_type
9056 (TYPE_MODE (inner_type
),
9057 MEM_SIZE (op0
) ? INTVAL (MEM_SIZE (op0
))
9058 : int_size_in_bytes (inner_type
),
9059 1, build_qualified_type (inner_type
,
9060 (TYPE_QUALS (inner_type
)
9061 | TYPE_QUAL_CONST
)));
9063 emit_block_move (new, op0
, expr_size (TREE_OPERAND (exp
, 0)),
9064 (modifier
== EXPAND_STACK_PARM
9065 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
9070 op0
= force_operand (XEXP (op0
, 0), target
);
9074 && GET_CODE (op0
) != REG
9075 && modifier
!= EXPAND_CONST_ADDRESS
9076 && modifier
!= EXPAND_INITIALIZER
9077 && modifier
!= EXPAND_SUM
)
9078 op0
= force_reg (Pmode
, op0
);
9080 if (GET_CODE (op0
) == REG
9081 && ! REG_USERVAR_P (op0
))
9082 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)));
9084 if (GET_MODE (op0
) == Pmode
&& mode
== ptr_mode
)
9085 op0
= convert_memory_address (ptr_mode
, op0
);
9089 case ENTRY_VALUE_EXPR
:
9092 /* COMPLEX type for Extended Pascal & Fortran */
9095 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
9098 /* Get the rtx code of the operands. */
9099 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9100 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
9103 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
9107 /* Move the real (op0) and imaginary (op1) parts to their location. */
9108 emit_move_insn (gen_realpart (mode
, target
), op0
);
9109 emit_move_insn (gen_imagpart (mode
, target
), op1
);
9111 insns
= get_insns ();
9114 /* Complex construction should appear as a single unit. */
9115 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9116 each with a separate pseudo as destination.
9117 It's not correct for flow to treat them as a unit. */
9118 if (GET_CODE (target
) != CONCAT
)
9119 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
9127 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9128 return gen_realpart (mode
, op0
);
9131 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9132 return gen_imagpart (mode
, op0
);
9136 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
9140 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9143 target
= gen_reg_rtx (mode
);
9147 /* Store the realpart and the negated imagpart to target. */
9148 emit_move_insn (gen_realpart (partmode
, target
),
9149 gen_realpart (partmode
, op0
));
9151 imag_t
= gen_imagpart (partmode
, target
);
9152 temp
= expand_unop (partmode
,
9153 ! unsignedp
&& flag_trapv
9154 && (GET_MODE_CLASS(partmode
) == MODE_INT
)
9155 ? negv_optab
: neg_optab
,
9156 gen_imagpart (partmode
, op0
), imag_t
, 0);
9158 emit_move_insn (imag_t
, temp
);
9160 insns
= get_insns ();
9163 /* Conjugate should appear as a single unit
9164 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9165 each with a separate pseudo as destination.
9166 It's not correct for flow to treat them as a unit. */
9167 if (GET_CODE (target
) != CONCAT
)
9168 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
9176 expand_resx_expr (exp
);
9179 case TRY_CATCH_EXPR
:
9181 tree handler
= TREE_OPERAND (exp
, 1);
9183 expand_eh_region_start ();
9184 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9185 expand_eh_handler (handler
);
9191 expand_start_catch (CATCH_TYPES (exp
));
9192 expand_expr (CATCH_BODY (exp
), const0_rtx
, VOIDmode
, 0);
9193 expand_end_catch ();
9196 case EH_FILTER_EXPR
:
9197 /* Should have been handled in expand_eh_handler. */
9200 case TRY_FINALLY_EXPR
:
9202 tree try_block
= TREE_OPERAND (exp
, 0);
9203 tree finally_block
= TREE_OPERAND (exp
, 1);
9205 if ((!optimize
&& lang_protect_cleanup_actions
== NULL
)
9206 || unsafe_for_reeval (finally_block
) > 1)
9208 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9209 is not sufficient, so we cannot expand the block twice.
9210 So we play games with GOTO_SUBROUTINE_EXPR to let us
9211 expand the thing only once. */
9212 /* When not optimizing, we go ahead with this form since
9213 (1) user breakpoints operate more predictably without
9214 code duplication, and
9215 (2) we're not running any of the global optimizers
9216 that would explode in time/space with the highly
9217 connected CFG created by the indirect branching. */
9219 rtx finally_label
= gen_label_rtx ();
9220 rtx done_label
= gen_label_rtx ();
9221 rtx return_link
= gen_reg_rtx (Pmode
);
9222 tree cleanup
= build (GOTO_SUBROUTINE_EXPR
, void_type_node
,
9223 (tree
) finally_label
, (tree
) return_link
);
9224 TREE_SIDE_EFFECTS (cleanup
) = 1;
9226 /* Start a new binding layer that will keep track of all cleanup
9227 actions to be performed. */
9228 expand_start_bindings (2);
9229 target_temp_slot_level
= temp_slot_level
;
9231 expand_decl_cleanup (NULL_TREE
, cleanup
);
9232 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
9234 preserve_temp_slots (op0
);
9235 expand_end_bindings (NULL_TREE
, 0, 0);
9236 emit_jump (done_label
);
9237 emit_label (finally_label
);
9238 expand_expr (finally_block
, const0_rtx
, VOIDmode
, 0);
9239 emit_indirect_jump (return_link
);
9240 emit_label (done_label
);
9244 expand_start_bindings (2);
9245 target_temp_slot_level
= temp_slot_level
;
9247 expand_decl_cleanup (NULL_TREE
, finally_block
);
9248 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
9250 preserve_temp_slots (op0
);
9251 expand_end_bindings (NULL_TREE
, 0, 0);
9257 case GOTO_SUBROUTINE_EXPR
:
9259 rtx subr
= (rtx
) TREE_OPERAND (exp
, 0);
9260 rtx return_link
= *(rtx
*) &TREE_OPERAND (exp
, 1);
9261 rtx return_address
= gen_label_rtx ();
9262 emit_move_insn (return_link
,
9263 gen_rtx_LABEL_REF (Pmode
, return_address
));
9265 emit_label (return_address
);
9270 return expand_builtin_va_arg (TREE_OPERAND (exp
, 0), type
);
9273 return get_exception_pointer (cfun
);
9276 return get_exception_filter (cfun
);
9279 /* Function descriptors are not valid except for as
9280 initialization constants, and should not be expanded. */
9284 expand_start_case (0, SWITCH_COND (exp
), integer_type_node
,
9286 if (SWITCH_BODY (exp
))
9287 expand_expr_stmt (SWITCH_BODY (exp
));
9288 if (SWITCH_LABELS (exp
))
9291 tree vec
= SWITCH_LABELS (exp
);
9292 size_t i
, n
= TREE_VEC_LENGTH (vec
);
9294 for (i
= 0; i
< n
; ++i
)
9296 tree elt
= TREE_VEC_ELT (vec
, i
);
9297 tree controlling_expr_type
= TREE_TYPE (SWITCH_COND (exp
));
9298 tree min_value
= TYPE_MIN_VALUE (controlling_expr_type
);
9299 tree max_value
= TYPE_MAX_VALUE (controlling_expr_type
);
9301 tree case_low
= CASE_LOW (elt
);
9302 tree case_high
= CASE_HIGH (elt
) ? CASE_HIGH (elt
) : case_low
;
9303 if (case_low
&& case_high
)
9305 /* Case label is less than minimum for type. */
9306 if ((tree_int_cst_compare (case_low
, min_value
) < 0)
9307 && (tree_int_cst_compare (case_high
, min_value
) < 0))
9309 warning ("case label value %d is less than minimum value for type",
9310 TREE_INT_CST (case_low
));
9314 /* Case value is greater than maximum for type. */
9315 if ((tree_int_cst_compare (case_low
, max_value
) > 0)
9316 && (tree_int_cst_compare (case_high
, max_value
) > 0))
9318 warning ("case label value %d exceeds maximum value for type",
9319 TREE_INT_CST (case_high
));
9323 /* Saturate lower case label value to minimum. */
9324 if ((tree_int_cst_compare (case_high
, min_value
) >= 0)
9325 && (tree_int_cst_compare (case_low
, min_value
) < 0))
9327 warning ("lower value %d in case label range less than minimum value for type",
9328 TREE_INT_CST (case_low
));
9329 case_low
= min_value
;
9332 /* Saturate upper case label value to maximum. */
9333 if ((tree_int_cst_compare (case_low
, max_value
) <= 0)
9334 && (tree_int_cst_compare (case_high
, max_value
) > 0))
9336 warning ("upper value %d in case label range exceeds maximum value for type",
9337 TREE_INT_CST (case_high
));
9338 case_high
= max_value
;
9342 add_case_node (case_low
, case_high
, CASE_LABEL (elt
), &duplicate
, true);
9347 expand_end_case_type (SWITCH_COND (exp
), TREE_TYPE (exp
));
9351 expand_label (TREE_OPERAND (exp
, 0));
9354 case CASE_LABEL_EXPR
:
9357 add_case_node (CASE_LOW (exp
), CASE_HIGH (exp
), CASE_LABEL (exp
),
9365 expand_asm_expr (exp
);
9369 return lang_hooks
.expand_expr (exp
, original_target
, tmode
,
9373 /* Here to do an ordinary binary operator, generating an instruction
9374 from the optab already placed in `this_optab'. */
9376 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
9377 subtarget
, &op0
, &op1
, 0);
9379 if (modifier
== EXPAND_STACK_PARM
)
9381 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9382 unsignedp
, OPTAB_LIB_WIDEN
);
9388 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9389 when applied to the address of EXP produces an address known to be
9390 aligned more than BIGGEST_ALIGNMENT. */
9393 is_aligning_offset (tree offset
, tree exp
)
9395 /* Strip off any conversions. */
9396 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9397 || TREE_CODE (offset
) == NOP_EXPR
9398 || TREE_CODE (offset
) == CONVERT_EXPR
)
9399 offset
= TREE_OPERAND (offset
, 0);
9401 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9402 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9403 if (TREE_CODE (offset
) != BIT_AND_EXPR
9404 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
9405 || compare_tree_int (TREE_OPERAND (offset
, 1),
9406 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
9407 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
9410 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9411 It must be NEGATE_EXPR. Then strip any more conversions. */
9412 offset
= TREE_OPERAND (offset
, 0);
9413 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9414 || TREE_CODE (offset
) == NOP_EXPR
9415 || TREE_CODE (offset
) == CONVERT_EXPR
)
9416 offset
= TREE_OPERAND (offset
, 0);
9418 if (TREE_CODE (offset
) != NEGATE_EXPR
)
9421 offset
= TREE_OPERAND (offset
, 0);
9422 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9423 || TREE_CODE (offset
) == NOP_EXPR
9424 || TREE_CODE (offset
) == CONVERT_EXPR
)
9425 offset
= TREE_OPERAND (offset
, 0);
9427 /* This must now be the address of EXP. */
9428 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
9431 /* Return the tree node if an ARG corresponds to a string constant or zero
9432 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9433 in bytes within the string that ARG is accessing. The type of the
9434 offset will be `sizetype'. */
9437 string_constant (tree arg
, tree
*ptr_offset
)
9441 if (TREE_CODE (arg
) == ADDR_EXPR
9442 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
9444 *ptr_offset
= size_zero_node
;
9445 return TREE_OPERAND (arg
, 0);
9447 if (TREE_CODE (arg
) == ADDR_EXPR
9448 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
9449 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg
, 0), 0)) == STRING_CST
)
9451 *ptr_offset
= convert (sizetype
, TREE_OPERAND (TREE_OPERAND (arg
, 0), 1));
9452 return TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
9454 else if (TREE_CODE (arg
) == PLUS_EXPR
)
9456 tree arg0
= TREE_OPERAND (arg
, 0);
9457 tree arg1
= TREE_OPERAND (arg
, 1);
9462 if (TREE_CODE (arg0
) == ADDR_EXPR
9463 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
9465 *ptr_offset
= convert (sizetype
, arg1
);
9466 return TREE_OPERAND (arg0
, 0);
9468 else if (TREE_CODE (arg1
) == ADDR_EXPR
9469 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
9471 *ptr_offset
= convert (sizetype
, arg0
);
9472 return TREE_OPERAND (arg1
, 0);
9479 /* Expand code for a post- or pre- increment or decrement
9480 and return the RTX for the result.
9481 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9484 expand_increment (tree exp
, int post
, int ignore
)
9488 tree incremented
= TREE_OPERAND (exp
, 0);
9489 optab this_optab
= add_optab
;
9491 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
9492 int op0_is_copy
= 0;
9493 int single_insn
= 0;
9494 /* 1 means we can't store into OP0 directly,
9495 because it is a subreg narrower than a word,
9496 and we don't dare clobber the rest of the word. */
9499 /* Stabilize any component ref that might need to be
9500 evaluated more than once below. */
9502 || TREE_CODE (incremented
) == BIT_FIELD_REF
9503 || (TREE_CODE (incremented
) == COMPONENT_REF
9504 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
9505 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
9506 incremented
= stabilize_reference (incremented
);
9507 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9508 ones into save exprs so that they don't accidentally get evaluated
9509 more than once by the code below. */
9510 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
9511 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
9512 incremented
= save_expr (incremented
);
9514 /* Compute the operands as RTX.
9515 Note whether OP0 is the actual lvalue or a copy of it:
9516 I believe it is a copy iff it is a register or subreg
9517 and insns were generated in computing it. */
9519 temp
= get_last_insn ();
9520 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, 0);
9522 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9523 in place but instead must do sign- or zero-extension during assignment,
9524 so we copy it into a new register and let the code below use it as
9527 Note that we can safely modify this SUBREG since it is know not to be
9528 shared (it was made by the expand_expr call above). */
9530 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9533 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9537 else if (GET_CODE (op0
) == SUBREG
9538 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9540 /* We cannot increment this SUBREG in place. If we are
9541 post-incrementing, get a copy of the old value. Otherwise,
9542 just mark that we cannot increment in place. */
9544 op0
= copy_to_reg (op0
);
9549 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
9550 && temp
!= get_last_insn ());
9551 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9553 /* Decide whether incrementing or decrementing. */
9554 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9555 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9556 this_optab
= sub_optab
;
9558 /* Convert decrement by a constant into a negative increment. */
9559 if (this_optab
== sub_optab
9560 && GET_CODE (op1
) == CONST_INT
)
9562 op1
= GEN_INT (-INTVAL (op1
));
9563 this_optab
= add_optab
;
9566 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp
)))
9567 this_optab
= this_optab
== add_optab
? addv_optab
: subv_optab
;
9569 /* For a preincrement, see if we can do this with a single instruction. */
9572 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9573 if (icode
!= (int) CODE_FOR_nothing
9574 /* Make sure that OP0 is valid for operands 0 and 1
9575 of the insn we want to queue. */
9576 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9577 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
)
9578 && (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9582 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9583 then we cannot just increment OP0. We must therefore contrive to
9584 increment the original value. Then, for postincrement, we can return
9585 OP0 since it is a copy of the old value. For preincrement, expand here
9586 unless we can do it with a single insn.
9588 Likewise if storing directly into OP0 would clobber high bits
9589 we need to preserve (bad_subreg). */
9590 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9592 /* This is the easiest way to increment the value wherever it is.
9593 Problems with multiple evaluation of INCREMENTED are prevented
9594 because either (1) it is a component_ref or preincrement,
9595 in which case it was stabilized above, or (2) it is an array_ref
9596 with constant index in an array in a register, which is
9597 safe to reevaluate. */
9598 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9599 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9600 ? MINUS_EXPR
: PLUS_EXPR
),
9603 TREE_OPERAND (exp
, 1));
9605 while (TREE_CODE (incremented
) == NOP_EXPR
9606 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9608 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9609 incremented
= TREE_OPERAND (incremented
, 0);
9612 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
);
9613 return post
? op0
: temp
;
9618 /* We have a true reference to the value in OP0.
9619 If there is an insn to add or subtract in this mode, queue it.
9620 Queuing the increment insn avoids the register shuffling
9621 that often results if we must increment now and first save
9622 the old value for subsequent use. */
9624 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9625 op0
= stabilize (op0
);
9628 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9629 if (icode
!= (int) CODE_FOR_nothing
9630 /* Make sure that OP0 is valid for operands 0 and 1
9631 of the insn we want to queue. */
9632 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9633 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
))
9635 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9636 op1
= force_reg (mode
, op1
);
9638 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9640 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9642 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
9643 ? force_reg (Pmode
, XEXP (op0
, 0))
9644 : copy_to_reg (XEXP (op0
, 0)));
9647 op0
= replace_equiv_address (op0
, addr
);
9648 temp
= force_reg (GET_MODE (op0
), op0
);
9649 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9650 op1
= force_reg (mode
, op1
);
9652 /* The increment queue is LIFO, thus we have to `queue'
9653 the instructions in reverse order. */
9654 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9655 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9660 /* Preincrement, or we can't increment with one simple insn. */
9662 /* Save a copy of the value before inc or dec, to return it later. */
9663 temp
= value
= copy_to_reg (op0
);
9665 /* Arrange to return the incremented value. */
9666 /* Copy the rtx because expand_binop will protect from the queue,
9667 and the results of that would be invalid for us to return
9668 if our caller does emit_queue before using our result. */
9669 temp
= copy_rtx (value
= op0
);
9671 /* Increment however we can. */
9672 op1
= expand_binop (mode
, this_optab
, value
, op1
, op0
,
9673 TYPE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9675 /* Make sure the value is stored into OP0. */
9677 emit_move_insn (op0
, op1
);
9682 /* Generate code to calculate EXP using a store-flag instruction
9683 and return an rtx for the result. EXP is either a comparison
9684 or a TRUTH_NOT_EXPR whose operand is a comparison.
9686 If TARGET is nonzero, store the result there if convenient.
9688 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9691 Return zero if there is no suitable set-flag instruction
9692 available on this machine.
9694 Once expand_expr has been called on the arguments of the comparison,
9695 we are committed to doing the store flag, since it is not safe to
9696 re-evaluate the expression. We emit the store-flag insn by calling
9697 emit_store_flag, but only expand the arguments if we have a reason
9698 to believe that emit_store_flag will be successful. If we think that
9699 it will, but it isn't, we have to simulate the store-flag with a
9700 set/jump/set sequence. */
9703 do_store_flag (tree exp
, rtx target
, enum machine_mode mode
, int only_cheap
)
9706 tree arg0
, arg1
, type
;
9708 enum machine_mode operand_mode
;
9712 enum insn_code icode
;
9713 rtx subtarget
= target
;
9716 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9717 result at the end. We can't simply invert the test since it would
9718 have already been inverted if it were valid. This case occurs for
9719 some floating-point comparisons. */
9721 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
9722 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
9724 arg0
= TREE_OPERAND (exp
, 0);
9725 arg1
= TREE_OPERAND (exp
, 1);
9727 /* Don't crash if the comparison was erroneous. */
9728 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
9731 type
= TREE_TYPE (arg0
);
9732 operand_mode
= TYPE_MODE (type
);
9733 unsignedp
= TYPE_UNSIGNED (type
);
9735 /* We won't bother with BLKmode store-flag operations because it would mean
9736 passing a lot of information to emit_store_flag. */
9737 if (operand_mode
== BLKmode
)
9740 /* We won't bother with store-flag operations involving function pointers
9741 when function pointers must be canonicalized before comparisons. */
9742 #ifdef HAVE_canonicalize_funcptr_for_compare
9743 if (HAVE_canonicalize_funcptr_for_compare
9744 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
9745 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
9747 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
9748 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
9749 == FUNCTION_TYPE
))))
9756 /* Get the rtx comparison code to use. We know that EXP is a comparison
9757 operation of some type. Some comparisons against 1 and -1 can be
9758 converted to comparisons with zero. Do so here so that the tests
9759 below will be aware that we have a comparison with zero. These
9760 tests will not catch constants in the first operand, but constants
9761 are rarely passed as the first operand. */
9763 switch (TREE_CODE (exp
))
9772 if (integer_onep (arg1
))
9773 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
9775 code
= unsignedp
? LTU
: LT
;
9778 if (! unsignedp
&& integer_all_onesp (arg1
))
9779 arg1
= integer_zero_node
, code
= LT
;
9781 code
= unsignedp
? LEU
: LE
;
9784 if (! unsignedp
&& integer_all_onesp (arg1
))
9785 arg1
= integer_zero_node
, code
= GE
;
9787 code
= unsignedp
? GTU
: GT
;
9790 if (integer_onep (arg1
))
9791 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
9793 code
= unsignedp
? GEU
: GE
;
9796 case UNORDERED_EXPR
:
9822 /* Put a constant second. */
9823 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
9825 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
9826 code
= swap_condition (code
);
9829 /* If this is an equality or inequality test of a single bit, we can
9830 do this by shifting the bit being tested to the low-order bit and
9831 masking the result with the constant 1. If the condition was EQ,
9832 we xor it with 1. This does not require an scc insn and is faster
9833 than an scc insn even if we have it.
9835 The code to make this transformation was moved into fold_single_bit_test,
9836 so we just call into the folder and expand its result. */
9838 if ((code
== NE
|| code
== EQ
)
9839 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
9840 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
9842 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
9843 return expand_expr (fold_single_bit_test (code
== NE
? NE_EXPR
: EQ_EXPR
,
9845 target
, VOIDmode
, EXPAND_NORMAL
);
9848 /* Now see if we are likely to be able to do this. Return if not. */
9849 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
9852 icode
= setcc_gen_code
[(int) code
];
9853 if (icode
== CODE_FOR_nothing
9854 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
9856 /* We can only do this if it is one of the special cases that
9857 can be handled without an scc insn. */
9858 if ((code
== LT
&& integer_zerop (arg1
))
9859 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
9861 else if (BRANCH_COST
>= 0
9862 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
9863 && TREE_CODE (type
) != REAL_TYPE
9864 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
9865 != CODE_FOR_nothing
)
9866 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
9867 != CODE_FOR_nothing
)))
9873 if (! get_subtarget (target
)
9874 || GET_MODE (subtarget
) != operand_mode
)
9877 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, 0);
9880 target
= gen_reg_rtx (mode
);
9882 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9883 because, if the emit_store_flag does anything it will succeed and
9884 OP0 and OP1 will not be used subsequently. */
9886 result
= emit_store_flag (target
, code
,
9887 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
9888 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
9889 operand_mode
, unsignedp
, 1);
9894 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
9895 result
, 0, OPTAB_LIB_WIDEN
);
9899 /* If this failed, we have to do this with set/compare/jump/set code. */
9900 if (GET_CODE (target
) != REG
9901 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
9902 target
= gen_reg_rtx (GET_MODE (target
));
9904 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
9905 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
9906 operand_mode
, NULL_RTX
);
9907 if (GET_CODE (result
) == CONST_INT
)
9908 return (((result
== const0_rtx
&& ! invert
)
9909 || (result
!= const0_rtx
&& invert
))
9910 ? const0_rtx
: const1_rtx
);
9912 /* The code of RESULT may not match CODE if compare_from_rtx
9913 decided to swap its operands and reverse the original code.
9915 We know that compare_from_rtx returns either a CONST_INT or
9916 a new comparison code, so it is safe to just extract the
9917 code from RESULT. */
9918 code
= GET_CODE (result
);
9920 label
= gen_label_rtx ();
9921 if (bcc_gen_fctn
[(int) code
] == 0)
9924 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
9925 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
9932 /* Stubs in case we haven't got a casesi insn. */
9934 # define HAVE_casesi 0
9935 # define gen_casesi(a, b, c, d, e) (0)
9936 # define CODE_FOR_casesi CODE_FOR_nothing
9939 /* If the machine does not have a case insn that compares the bounds,
9940 this means extra overhead for dispatch tables, which raises the
9941 threshold for using them. */
9942 #ifndef CASE_VALUES_THRESHOLD
9943 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9944 #endif /* CASE_VALUES_THRESHOLD */
9947 case_values_threshold (void)
9949 return CASE_VALUES_THRESHOLD
;
9952 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9953 0 otherwise (i.e. if there is no casesi instruction). */
9955 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
9956 rtx table_label ATTRIBUTE_UNUSED
, rtx default_label
)
9958 enum machine_mode index_mode
= SImode
;
9959 int index_bits
= GET_MODE_BITSIZE (index_mode
);
9960 rtx op1
, op2
, index
;
9961 enum machine_mode op_mode
;
9966 /* Convert the index to SImode. */
9967 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
9969 enum machine_mode omode
= TYPE_MODE (index_type
);
9970 rtx rangertx
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
9972 /* We must handle the endpoints in the original mode. */
9973 index_expr
= build (MINUS_EXPR
, index_type
,
9974 index_expr
, minval
);
9975 minval
= integer_zero_node
;
9976 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
9977 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
9978 omode
, 1, default_label
);
9979 /* Now we can safely truncate. */
9980 index
= convert_to_mode (index_mode
, index
, 0);
9984 if (TYPE_MODE (index_type
) != index_mode
)
9986 index_expr
= convert (lang_hooks
.types
.type_for_size
9987 (index_bits
, 0), index_expr
);
9988 index_type
= TREE_TYPE (index_expr
);
9991 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
9994 index
= protect_from_queue (index
, 0);
9995 do_pending_stack_adjust ();
9997 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
9998 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
10000 index
= copy_to_mode_reg (op_mode
, index
);
10002 op1
= expand_expr (minval
, NULL_RTX
, VOIDmode
, 0);
10004 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
10005 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
10006 op1
, TYPE_UNSIGNED (TREE_TYPE (minval
)));
10007 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
10009 op1
= copy_to_mode_reg (op_mode
, op1
);
10011 op2
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
10013 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
10014 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
10015 op2
, TYPE_UNSIGNED (TREE_TYPE (range
)));
10016 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
10018 op2
= copy_to_mode_reg (op_mode
, op2
);
10020 emit_jump_insn (gen_casesi (index
, op1
, op2
,
10021 table_label
, default_label
));
10025 /* Attempt to generate a tablejump instruction; same concept. */
10026 #ifndef HAVE_tablejump
10027 #define HAVE_tablejump 0
10028 #define gen_tablejump(x, y) (0)
10031 /* Subroutine of the next function.
10033 INDEX is the value being switched on, with the lowest value
10034 in the table already subtracted.
10035 MODE is its expected mode (needed if INDEX is constant).
10036 RANGE is the length of the jump table.
10037 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10039 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10040 index value is out of range. */
10043 do_tablejump (rtx index
, enum machine_mode mode
, rtx range
, rtx table_label
,
10048 if (INTVAL (range
) > cfun
->max_jumptable_ents
)
10049 cfun
->max_jumptable_ents
= INTVAL (range
);
10051 /* Do an unsigned comparison (in the proper mode) between the index
10052 expression and the value which represents the length of the range.
10053 Since we just finished subtracting the lower bound of the range
10054 from the index expression, this comparison allows us to simultaneously
10055 check that the original index expression value is both greater than
10056 or equal to the minimum value of the range and less than or equal to
10057 the maximum value of the range. */
10059 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
10062 /* If index is in range, it must fit in Pmode.
10063 Convert to Pmode so we can index with it. */
10065 index
= convert_to_mode (Pmode
, index
, 1);
10067 /* Don't let a MEM slip through, because then INDEX that comes
10068 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10069 and break_out_memory_refs will go to work on it and mess it up. */
10070 #ifdef PIC_CASE_VECTOR_ADDRESS
10071 if (flag_pic
&& GET_CODE (index
) != REG
)
10072 index
= copy_to_mode_reg (Pmode
, index
);
10075 /* If flag_force_addr were to affect this address
10076 it could interfere with the tricky assumptions made
10077 about addresses that contain label-refs,
10078 which may be valid only very near the tablejump itself. */
10079 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10080 GET_MODE_SIZE, because this indicates how large insns are. The other
10081 uses should all be Pmode, because they are addresses. This code
10082 could fail if addresses and insns are not the same size. */
10083 index
= gen_rtx_PLUS (Pmode
,
10084 gen_rtx_MULT (Pmode
, index
,
10085 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
10086 gen_rtx_LABEL_REF (Pmode
, table_label
));
10087 #ifdef PIC_CASE_VECTOR_ADDRESS
10089 index
= PIC_CASE_VECTOR_ADDRESS (index
);
10092 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
10093 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
10094 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
10095 RTX_UNCHANGING_P (vector
) = 1;
10096 MEM_NOTRAP_P (vector
) = 1;
10097 convert_move (temp
, vector
, 0);
10099 emit_jump_insn (gen_tablejump (temp
, table_label
));
10101 /* If we are generating PIC code or if the table is PC-relative, the
10102 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10103 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
10108 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
10109 rtx table_label
, rtx default_label
)
10113 if (! HAVE_tablejump
)
10116 index_expr
= fold (build (MINUS_EXPR
, index_type
,
10117 convert (index_type
, index_expr
),
10118 convert (index_type
, minval
)));
10119 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10121 index
= protect_from_queue (index
, 0);
10122 do_pending_stack_adjust ();
10124 do_tablejump (index
, TYPE_MODE (index_type
),
10125 convert_modes (TYPE_MODE (index_type
),
10126 TYPE_MODE (TREE_TYPE (range
)),
10127 expand_expr (range
, NULL_RTX
,
10129 TYPE_UNSIGNED (TREE_TYPE (range
))),
10130 table_label
, default_label
);
10134 /* Nonzero if the mode is a valid vector mode for this architecture.
10135 This returns nonzero even if there is no hardware support for the
10136 vector mode, but we can emulate with narrower modes. */
10139 vector_mode_valid_p (enum machine_mode mode
)
10141 enum mode_class
class = GET_MODE_CLASS (mode
);
10142 enum machine_mode innermode
;
10144 /* Doh! What's going on? */
10145 if (class != MODE_VECTOR_INT
10146 && class != MODE_VECTOR_FLOAT
)
10149 /* Hardware support. Woo hoo! */
10150 if (VECTOR_MODE_SUPPORTED_P (mode
))
10153 innermode
= GET_MODE_INNER (mode
);
10155 /* We should probably return 1 if requesting V4DI and we have no DI,
10156 but we have V2DI, but this is probably very unlikely. */
10158 /* If we have support for the inner mode, we can safely emulate it.
10159 We may not have V2DI, but me can emulate with a pair of DIs. */
10160 return mov_optab
->handlers
[innermode
].insn_code
!= CODE_FOR_nothing
;
10163 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10165 const_vector_from_tree (tree exp
)
10170 enum machine_mode inner
, mode
;
10172 mode
= TYPE_MODE (TREE_TYPE (exp
));
10174 if (initializer_zerop (exp
))
10175 return CONST0_RTX (mode
);
10177 units
= GET_MODE_NUNITS (mode
);
10178 inner
= GET_MODE_INNER (mode
);
10180 v
= rtvec_alloc (units
);
10182 link
= TREE_VECTOR_CST_ELTS (exp
);
10183 for (i
= 0; link
; link
= TREE_CHAIN (link
), ++i
)
10185 elt
= TREE_VALUE (link
);
10187 if (TREE_CODE (elt
) == REAL_CST
)
10188 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
10191 RTVEC_ELT (v
, i
) = immed_double_const (TREE_INT_CST_LOW (elt
),
10192 TREE_INT_CST_HIGH (elt
),
10196 /* Initialize remaining elements to 0. */
10197 for (; i
< units
; ++i
)
10198 RTVEC_ELT (v
, i
) = CONST0_RTX (inner
);
10200 return gen_rtx_raw_CONST_VECTOR (mode
, v
);
10203 #include "gt-expr.h"