1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
42 #include "typeclass.h"
45 #include "langhooks.h"
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
57 #ifndef PUSH_ARGS_REVERSED
58 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
59 #define PUSH_ARGS_REVERSED /* If it's last to first. */
65 #ifndef STACK_PUSH_CODE
66 #ifdef STACK_GROWS_DOWNWARD
67 #define STACK_PUSH_CODE PRE_DEC
69 #define STACK_PUSH_CODE PRE_INC
73 /* Assume that case vectors are not pc-relative. */
74 #ifndef CASE_VECTOR_PC_RELATIVE
75 #define CASE_VECTOR_PC_RELATIVE 0
78 /* Convert defined/undefined to boolean. */
79 #ifdef TARGET_MEM_FUNCTIONS
80 #undef TARGET_MEM_FUNCTIONS
81 #define TARGET_MEM_FUNCTIONS 1
83 #define TARGET_MEM_FUNCTIONS 0
87 /* If this is nonzero, we do not bother generating VOLATILE
88 around volatile memory references, and we are willing to
89 output indirect addresses. If cse is to follow, we reject
90 indirect addresses so a useful potential cse is generated;
91 if it is used only once, instruction combination will produce
92 the same indirect address eventually. */
95 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
96 static tree placeholder_list
= 0;
98 /* This structure is used by move_by_pieces to describe the move to
100 struct move_by_pieces
109 int explicit_inc_from
;
110 unsigned HOST_WIDE_INT len
;
111 HOST_WIDE_INT offset
;
115 /* This structure is used by store_by_pieces to describe the clear to
118 struct store_by_pieces
124 unsigned HOST_WIDE_INT len
;
125 HOST_WIDE_INT offset
;
126 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
131 static rtx enqueue_insn
PARAMS ((rtx
, rtx
));
132 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
133 PARAMS ((unsigned HOST_WIDE_INT
,
135 static void move_by_pieces_1
PARAMS ((rtx (*) (rtx
, ...), enum machine_mode
,
136 struct move_by_pieces
*));
137 static bool block_move_libcall_safe_for_call_parm
PARAMS ((void));
138 static bool emit_block_move_via_movstr
PARAMS ((rtx
, rtx
, rtx
, unsigned));
139 static rtx emit_block_move_via_libcall
PARAMS ((rtx
, rtx
, rtx
));
140 static tree emit_block_move_libcall_fn
PARAMS ((int));
141 static void emit_block_move_via_loop
PARAMS ((rtx
, rtx
, rtx
, unsigned));
142 static rtx clear_by_pieces_1
PARAMS ((PTR
, HOST_WIDE_INT
,
144 static void clear_by_pieces
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
146 static void store_by_pieces_1
PARAMS ((struct store_by_pieces
*,
148 static void store_by_pieces_2
PARAMS ((rtx (*) (rtx
, ...),
150 struct store_by_pieces
*));
151 static bool clear_storage_via_clrstr
PARAMS ((rtx
, rtx
, unsigned));
152 static rtx clear_storage_via_libcall
PARAMS ((rtx
, rtx
));
153 static tree clear_storage_libcall_fn
PARAMS ((int));
154 static rtx compress_float_constant
PARAMS ((rtx
, rtx
));
155 static rtx get_subtarget
PARAMS ((rtx
));
156 static int is_zeros_p
PARAMS ((tree
));
157 static int mostly_zeros_p
PARAMS ((tree
));
158 static void store_constructor_field
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
159 HOST_WIDE_INT
, enum machine_mode
,
160 tree
, tree
, int, int));
161 static void store_constructor
PARAMS ((tree
, rtx
, int, HOST_WIDE_INT
));
162 static rtx store_field
PARAMS ((rtx
, HOST_WIDE_INT
,
163 HOST_WIDE_INT
, enum machine_mode
,
164 tree
, enum machine_mode
, int, tree
,
166 static rtx var_rtx
PARAMS ((tree
));
167 static HOST_WIDE_INT highest_pow2_factor
PARAMS ((tree
));
168 static HOST_WIDE_INT highest_pow2_factor_for_type
PARAMS ((tree
, tree
));
169 static int is_aligning_offset
PARAMS ((tree
, tree
));
170 static rtx expand_increment
PARAMS ((tree
, int, int));
171 static void do_jump_by_parts_greater
PARAMS ((tree
, int, rtx
, rtx
));
172 static void do_jump_by_parts_equality
PARAMS ((tree
, rtx
, rtx
));
173 static void do_compare_and_jump
PARAMS ((tree
, enum rtx_code
, enum rtx_code
,
175 static rtx do_store_flag
PARAMS ((tree
, rtx
, enum machine_mode
, int));
177 static void emit_single_push_insn
PARAMS ((enum machine_mode
, rtx
, tree
));
179 static void do_tablejump
PARAMS ((rtx
, enum machine_mode
, rtx
, rtx
, rtx
));
181 /* Record for each mode whether we can move a register directly to or
182 from an object of that mode in memory. If we can't, we won't try
183 to use that mode directly when accessing a field of that mode. */
185 static char direct_load
[NUM_MACHINE_MODES
];
186 static char direct_store
[NUM_MACHINE_MODES
];
188 /* Record for each mode whether we can float-extend from memory. */
190 static bool float_extend_from_mem
[NUM_MACHINE_MODES
][NUM_MACHINE_MODES
];
192 /* If a memory-to-memory move would take MOVE_RATIO or more simple
193 move-instruction sequences, we will do a movstr or libcall instead. */
196 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
199 /* If we are optimizing for space (-Os), cut down the default move ratio. */
200 #define MOVE_RATIO (optimize_size ? 3 : 15)
204 /* This macro is used to determine whether move_by_pieces should be called
205 to perform a structure copy. */
206 #ifndef MOVE_BY_PIECES_P
207 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
208 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
211 /* If a clear memory operation would take CLEAR_RATIO or more simple
212 move-instruction sequences, we will do a clrstr or libcall instead. */
215 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
216 #define CLEAR_RATIO 2
218 /* If we are optimizing for space, cut down the default clear ratio. */
219 #define CLEAR_RATIO (optimize_size ? 3 : 15)
223 /* This macro is used to determine whether clear_by_pieces should be
224 called to clear storage. */
225 #ifndef CLEAR_BY_PIECES_P
226 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
227 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
230 /* This array records the insn_code of insns to perform block moves. */
231 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
233 /* This array records the insn_code of insns to perform block clears. */
234 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
236 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
238 #ifndef SLOW_UNALIGNED_ACCESS
239 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
242 /* This is run once per compilation to set up which modes can be used
243 directly in memory and to initialize the block move optab. */
249 enum machine_mode mode
;
254 /* Try indexing by frame ptr and try by stack ptr.
255 It is known that on the Convex the stack ptr isn't a valid index.
256 With luck, one or the other is valid on any machine. */
257 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
258 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
260 /* A scratch register we can modify in-place below to avoid
261 useless RTL allocations. */
262 reg
= gen_rtx_REG (VOIDmode
, -1);
264 insn
= rtx_alloc (INSN
);
265 pat
= gen_rtx_SET (0, NULL_RTX
, NULL_RTX
);
266 PATTERN (insn
) = pat
;
268 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
269 mode
= (enum machine_mode
) ((int) mode
+ 1))
273 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
274 PUT_MODE (mem
, mode
);
275 PUT_MODE (mem1
, mode
);
276 PUT_MODE (reg
, mode
);
278 /* See if there is some register that can be used in this mode and
279 directly loaded or stored from memory. */
281 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
282 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
283 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
286 if (! HARD_REGNO_MODE_OK (regno
, mode
))
292 SET_DEST (pat
) = reg
;
293 if (recog (pat
, insn
, &num_clobbers
) >= 0)
294 direct_load
[(int) mode
] = 1;
296 SET_SRC (pat
) = mem1
;
297 SET_DEST (pat
) = reg
;
298 if (recog (pat
, insn
, &num_clobbers
) >= 0)
299 direct_load
[(int) mode
] = 1;
302 SET_DEST (pat
) = mem
;
303 if (recog (pat
, insn
, &num_clobbers
) >= 0)
304 direct_store
[(int) mode
] = 1;
307 SET_DEST (pat
) = mem1
;
308 if (recog (pat
, insn
, &num_clobbers
) >= 0)
309 direct_store
[(int) mode
] = 1;
313 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
315 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
316 mode
= GET_MODE_WIDER_MODE (mode
))
318 enum machine_mode srcmode
;
319 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
320 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
324 ic
= can_extend_p (mode
, srcmode
, 0);
325 if (ic
== CODE_FOR_nothing
)
328 PUT_MODE (mem
, srcmode
);
330 if ((*insn_data
[ic
].operand
[1].predicate
) (mem
, srcmode
))
331 float_extend_from_mem
[mode
][srcmode
] = true;
336 /* This is run at the start of compiling a function. */
341 cfun
->expr
= (struct expr_status
*) ggc_alloc (sizeof (struct expr_status
));
344 pending_stack_adjust
= 0;
345 stack_pointer_delta
= 0;
346 inhibit_defer_pop
= 0;
348 apply_args_value
= 0;
352 /* Small sanity check that the queue is empty at the end of a function. */
355 finish_expr_for_function ()
361 /* Manage the queue of increment instructions to be output
362 for POSTINCREMENT_EXPR expressions, etc. */
364 /* Queue up to increment (or change) VAR later. BODY says how:
365 BODY should be the same thing you would pass to emit_insn
366 to increment right away. It will go to emit_insn later on.
368 The value is a QUEUED expression to be used in place of VAR
369 where you want to guarantee the pre-incrementation value of VAR. */
372 enqueue_insn (var
, body
)
375 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
), var
, NULL_RTX
, NULL_RTX
,
376 body
, pending_chain
);
377 return pending_chain
;
380 /* Use protect_from_queue to convert a QUEUED expression
381 into something that you can put immediately into an instruction.
382 If the queued incrementation has not happened yet,
383 protect_from_queue returns the variable itself.
384 If the incrementation has happened, protect_from_queue returns a temp
385 that contains a copy of the old value of the variable.
387 Any time an rtx which might possibly be a QUEUED is to be put
388 into an instruction, it must be passed through protect_from_queue first.
389 QUEUED expressions are not meaningful in instructions.
391 Do not pass a value through protect_from_queue and then hold
392 on to it for a while before putting it in an instruction!
393 If the queue is flushed in between, incorrect code will result. */
396 protect_from_queue (x
, modify
)
400 RTX_CODE code
= GET_CODE (x
);
402 #if 0 /* A QUEUED can hang around after the queue is forced out. */
403 /* Shortcut for most common case. */
404 if (pending_chain
== 0)
410 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
411 use of autoincrement. Make a copy of the contents of the memory
412 location rather than a copy of the address, but not if the value is
413 of mode BLKmode. Don't modify X in place since it might be
415 if (code
== MEM
&& GET_MODE (x
) != BLKmode
416 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
419 rtx
new = replace_equiv_address_nv (x
, QUEUED_VAR (y
));
423 rtx temp
= gen_reg_rtx (GET_MODE (x
));
425 emit_insn_before (gen_move_insn (temp
, new),
430 /* Copy the address into a pseudo, so that the returned value
431 remains correct across calls to emit_queue. */
432 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
435 /* Otherwise, recursively protect the subexpressions of all
436 the kinds of rtx's that can contain a QUEUED. */
439 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
440 if (tem
!= XEXP (x
, 0))
446 else if (code
== PLUS
|| code
== MULT
)
448 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
449 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
450 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
459 /* If the increment has not happened, use the variable itself. Copy it
460 into a new pseudo so that the value remains correct across calls to
462 if (QUEUED_INSN (x
) == 0)
463 return copy_to_reg (QUEUED_VAR (x
));
464 /* If the increment has happened and a pre-increment copy exists,
466 if (QUEUED_COPY (x
) != 0)
467 return QUEUED_COPY (x
);
468 /* The increment has happened but we haven't set up a pre-increment copy.
469 Set one up now, and use it. */
470 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
471 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
473 return QUEUED_COPY (x
);
476 /* Return nonzero if X contains a QUEUED expression:
477 if it contains anything that will be altered by a queued increment.
478 We handle only combinations of MEM, PLUS, MINUS and MULT operators
479 since memory addresses generally contain only those. */
485 enum rtx_code code
= GET_CODE (x
);
491 return queued_subexp_p (XEXP (x
, 0));
495 return (queued_subexp_p (XEXP (x
, 0))
496 || queued_subexp_p (XEXP (x
, 1)));
502 /* Perform all the pending incrementations. */
508 while ((p
= pending_chain
))
510 rtx body
= QUEUED_BODY (p
);
512 switch (GET_CODE (body
))
520 QUEUED_INSN (p
) = body
;
524 #ifdef ENABLE_CHECKING
531 QUEUED_INSN (p
) = emit_insn (body
);
535 pending_chain
= QUEUED_NEXT (p
);
539 /* Copy data from FROM to TO, where the machine modes are not the same.
540 Both modes may be integer, or both may be floating.
541 UNSIGNEDP should be nonzero if FROM is an unsigned type.
542 This causes zero-extension instead of sign-extension. */
545 convert_move (to
, from
, unsignedp
)
549 enum machine_mode to_mode
= GET_MODE (to
);
550 enum machine_mode from_mode
= GET_MODE (from
);
551 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
552 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
556 /* rtx code for making an equivalent value. */
557 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
558 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
560 to
= protect_from_queue (to
, 1);
561 from
= protect_from_queue (from
, 0);
563 if (to_real
!= from_real
)
566 /* If FROM is a SUBREG that indicates that we have already done at least
567 the required extension, strip it. We don't handle such SUBREGs as
570 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
571 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
572 >= GET_MODE_SIZE (to_mode
))
573 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
574 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
576 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
579 if (to_mode
== from_mode
580 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
582 emit_move_insn (to
, from
);
586 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
588 if (GET_MODE_BITSIZE (from_mode
) != GET_MODE_BITSIZE (to_mode
))
591 if (VECTOR_MODE_P (to_mode
))
592 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
594 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
596 emit_move_insn (to
, from
);
600 if (to_real
!= from_real
)
607 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
))
609 /* Try converting directly if the insn is supported. */
610 if ((code
= can_extend_p (to_mode
, from_mode
, 0))
613 emit_unop_insn (code
, to
, from
, UNKNOWN
);
618 #ifdef HAVE_trunchfqf2
619 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
621 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
625 #ifdef HAVE_trunctqfqf2
626 if (HAVE_trunctqfqf2
&& from_mode
== TQFmode
&& to_mode
== QFmode
)
628 emit_unop_insn (CODE_FOR_trunctqfqf2
, to
, from
, UNKNOWN
);
632 #ifdef HAVE_truncsfqf2
633 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
635 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
639 #ifdef HAVE_truncdfqf2
640 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
642 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
646 #ifdef HAVE_truncxfqf2
647 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
649 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
653 #ifdef HAVE_trunctfqf2
654 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
656 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
661 #ifdef HAVE_trunctqfhf2
662 if (HAVE_trunctqfhf2
&& from_mode
== TQFmode
&& to_mode
== HFmode
)
664 emit_unop_insn (CODE_FOR_trunctqfhf2
, to
, from
, UNKNOWN
);
668 #ifdef HAVE_truncsfhf2
669 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
671 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
675 #ifdef HAVE_truncdfhf2
676 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
678 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
682 #ifdef HAVE_truncxfhf2
683 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
685 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
689 #ifdef HAVE_trunctfhf2
690 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
692 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
697 #ifdef HAVE_truncsftqf2
698 if (HAVE_truncsftqf2
&& from_mode
== SFmode
&& to_mode
== TQFmode
)
700 emit_unop_insn (CODE_FOR_truncsftqf2
, to
, from
, UNKNOWN
);
704 #ifdef HAVE_truncdftqf2
705 if (HAVE_truncdftqf2
&& from_mode
== DFmode
&& to_mode
== TQFmode
)
707 emit_unop_insn (CODE_FOR_truncdftqf2
, to
, from
, UNKNOWN
);
711 #ifdef HAVE_truncxftqf2
712 if (HAVE_truncxftqf2
&& from_mode
== XFmode
&& to_mode
== TQFmode
)
714 emit_unop_insn (CODE_FOR_truncxftqf2
, to
, from
, UNKNOWN
);
718 #ifdef HAVE_trunctftqf2
719 if (HAVE_trunctftqf2
&& from_mode
== TFmode
&& to_mode
== TQFmode
)
721 emit_unop_insn (CODE_FOR_trunctftqf2
, to
, from
, UNKNOWN
);
726 #ifdef HAVE_truncdfsf2
727 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
729 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
733 #ifdef HAVE_truncxfsf2
734 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
736 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
740 #ifdef HAVE_trunctfsf2
741 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
743 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
747 #ifdef HAVE_truncxfdf2
748 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
750 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
754 #ifdef HAVE_trunctfdf2
755 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
757 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
769 libcall
= extendsfdf2_libfunc
;
773 libcall
= extendsfxf2_libfunc
;
777 libcall
= extendsftf2_libfunc
;
789 libcall
= truncdfsf2_libfunc
;
793 libcall
= extenddfxf2_libfunc
;
797 libcall
= extenddftf2_libfunc
;
809 libcall
= truncxfsf2_libfunc
;
813 libcall
= truncxfdf2_libfunc
;
825 libcall
= trunctfsf2_libfunc
;
829 libcall
= trunctfdf2_libfunc
;
841 if (libcall
== (rtx
) 0)
842 /* This conversion is not implemented yet. */
846 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
848 insns
= get_insns ();
850 emit_libcall_block (insns
, to
, value
, gen_rtx_FLOAT_TRUNCATE (to_mode
,
855 /* Now both modes are integers. */
857 /* Handle expanding beyond a word. */
858 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
859 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
866 enum machine_mode lowpart_mode
;
867 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
869 /* Try converting directly if the insn is supported. */
870 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
873 /* If FROM is a SUBREG, put it into a register. Do this
874 so that we always generate the same set of insns for
875 better cse'ing; if an intermediate assignment occurred,
876 we won't be doing the operation directly on the SUBREG. */
877 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
878 from
= force_reg (from_mode
, from
);
879 emit_unop_insn (code
, to
, from
, equiv_code
);
882 /* Next, try converting via full word. */
883 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
884 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
885 != CODE_FOR_nothing
))
887 if (GET_CODE (to
) == REG
)
888 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
889 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
890 emit_unop_insn (code
, to
,
891 gen_lowpart (word_mode
, to
), equiv_code
);
895 /* No special multiword conversion insn; do it by hand. */
898 /* Since we will turn this into a no conflict block, we must ensure
899 that the source does not overlap the target. */
901 if (reg_overlap_mentioned_p (to
, from
))
902 from
= force_reg (from_mode
, from
);
904 /* Get a copy of FROM widened to a word, if necessary. */
905 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
906 lowpart_mode
= word_mode
;
908 lowpart_mode
= from_mode
;
910 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
912 lowpart
= gen_lowpart (lowpart_mode
, to
);
913 emit_move_insn (lowpart
, lowfrom
);
915 /* Compute the value to put in each remaining word. */
917 fill_value
= const0_rtx
;
922 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
923 && STORE_FLAG_VALUE
== -1)
925 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
927 fill_value
= gen_reg_rtx (word_mode
);
928 emit_insn (gen_slt (fill_value
));
934 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
935 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
937 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
941 /* Fill the remaining words. */
942 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
944 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
945 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
950 if (fill_value
!= subword
)
951 emit_move_insn (subword
, fill_value
);
954 insns
= get_insns ();
957 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
958 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
962 /* Truncating multi-word to a word or less. */
963 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
964 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
966 if (!((GET_CODE (from
) == MEM
967 && ! MEM_VOLATILE_P (from
)
968 && direct_load
[(int) to_mode
]
969 && ! mode_dependent_address_p (XEXP (from
, 0)))
970 || GET_CODE (from
) == REG
971 || GET_CODE (from
) == SUBREG
))
972 from
= force_reg (from_mode
, from
);
973 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
977 /* Handle pointer conversion. */ /* SPEE 900220. */
978 if (to_mode
== PQImode
)
980 if (from_mode
!= QImode
)
981 from
= convert_to_mode (QImode
, from
, unsignedp
);
983 #ifdef HAVE_truncqipqi2
984 if (HAVE_truncqipqi2
)
986 emit_unop_insn (CODE_FOR_truncqipqi2
, to
, from
, UNKNOWN
);
989 #endif /* HAVE_truncqipqi2 */
993 if (from_mode
== PQImode
)
995 if (to_mode
!= QImode
)
997 from
= convert_to_mode (QImode
, from
, unsignedp
);
1002 #ifdef HAVE_extendpqiqi2
1003 if (HAVE_extendpqiqi2
)
1005 emit_unop_insn (CODE_FOR_extendpqiqi2
, to
, from
, UNKNOWN
);
1008 #endif /* HAVE_extendpqiqi2 */
1013 if (to_mode
== PSImode
)
1015 if (from_mode
!= SImode
)
1016 from
= convert_to_mode (SImode
, from
, unsignedp
);
1018 #ifdef HAVE_truncsipsi2
1019 if (HAVE_truncsipsi2
)
1021 emit_unop_insn (CODE_FOR_truncsipsi2
, to
, from
, UNKNOWN
);
1024 #endif /* HAVE_truncsipsi2 */
1028 if (from_mode
== PSImode
)
1030 if (to_mode
!= SImode
)
1032 from
= convert_to_mode (SImode
, from
, unsignedp
);
1037 #ifdef HAVE_extendpsisi2
1038 if (! unsignedp
&& HAVE_extendpsisi2
)
1040 emit_unop_insn (CODE_FOR_extendpsisi2
, to
, from
, UNKNOWN
);
1043 #endif /* HAVE_extendpsisi2 */
1044 #ifdef HAVE_zero_extendpsisi2
1045 if (unsignedp
&& HAVE_zero_extendpsisi2
)
1047 emit_unop_insn (CODE_FOR_zero_extendpsisi2
, to
, from
, UNKNOWN
);
1050 #endif /* HAVE_zero_extendpsisi2 */
1055 if (to_mode
== PDImode
)
1057 if (from_mode
!= DImode
)
1058 from
= convert_to_mode (DImode
, from
, unsignedp
);
1060 #ifdef HAVE_truncdipdi2
1061 if (HAVE_truncdipdi2
)
1063 emit_unop_insn (CODE_FOR_truncdipdi2
, to
, from
, UNKNOWN
);
1066 #endif /* HAVE_truncdipdi2 */
1070 if (from_mode
== PDImode
)
1072 if (to_mode
!= DImode
)
1074 from
= convert_to_mode (DImode
, from
, unsignedp
);
1079 #ifdef HAVE_extendpdidi2
1080 if (HAVE_extendpdidi2
)
1082 emit_unop_insn (CODE_FOR_extendpdidi2
, to
, from
, UNKNOWN
);
1085 #endif /* HAVE_extendpdidi2 */
1090 /* Now follow all the conversions between integers
1091 no more than a word long. */
1093 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1094 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
1095 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1096 GET_MODE_BITSIZE (from_mode
)))
1098 if (!((GET_CODE (from
) == MEM
1099 && ! MEM_VOLATILE_P (from
)
1100 && direct_load
[(int) to_mode
]
1101 && ! mode_dependent_address_p (XEXP (from
, 0)))
1102 || GET_CODE (from
) == REG
1103 || GET_CODE (from
) == SUBREG
))
1104 from
= force_reg (from_mode
, from
);
1105 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
1106 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
1107 from
= copy_to_reg (from
);
1108 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
1112 /* Handle extension. */
1113 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
1115 /* Convert directly if that works. */
1116 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
1117 != CODE_FOR_nothing
)
1120 from
= force_not_mem (from
);
1122 emit_unop_insn (code
, to
, from
, equiv_code
);
1127 enum machine_mode intermediate
;
1131 /* Search for a mode to convert via. */
1132 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
1133 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
1134 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
1135 != CODE_FOR_nothing
)
1136 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
1137 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1138 GET_MODE_BITSIZE (intermediate
))))
1139 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
1140 != CODE_FOR_nothing
))
1142 convert_move (to
, convert_to_mode (intermediate
, from
,
1143 unsignedp
), unsignedp
);
1147 /* No suitable intermediate mode.
1148 Generate what we need with shifts. */
1149 shift_amount
= build_int_2 (GET_MODE_BITSIZE (to_mode
)
1150 - GET_MODE_BITSIZE (from_mode
), 0);
1151 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
1152 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
1154 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
1157 emit_move_insn (to
, tmp
);
1162 /* Support special truncate insns for certain modes. */
1164 if (from_mode
== DImode
&& to_mode
== SImode
)
1166 #ifdef HAVE_truncdisi2
1167 if (HAVE_truncdisi2
)
1169 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1173 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1177 if (from_mode
== DImode
&& to_mode
== HImode
)
1179 #ifdef HAVE_truncdihi2
1180 if (HAVE_truncdihi2
)
1182 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1186 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1190 if (from_mode
== DImode
&& to_mode
== QImode
)
1192 #ifdef HAVE_truncdiqi2
1193 if (HAVE_truncdiqi2
)
1195 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1199 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1203 if (from_mode
== SImode
&& to_mode
== HImode
)
1205 #ifdef HAVE_truncsihi2
1206 if (HAVE_truncsihi2
)
1208 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1212 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1216 if (from_mode
== SImode
&& to_mode
== QImode
)
1218 #ifdef HAVE_truncsiqi2
1219 if (HAVE_truncsiqi2
)
1221 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1225 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1229 if (from_mode
== HImode
&& to_mode
== QImode
)
1231 #ifdef HAVE_trunchiqi2
1232 if (HAVE_trunchiqi2
)
1234 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1238 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1242 if (from_mode
== TImode
&& to_mode
== DImode
)
1244 #ifdef HAVE_trunctidi2
1245 if (HAVE_trunctidi2
)
1247 emit_unop_insn (CODE_FOR_trunctidi2
, to
, from
, UNKNOWN
);
1251 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1255 if (from_mode
== TImode
&& to_mode
== SImode
)
1257 #ifdef HAVE_trunctisi2
1258 if (HAVE_trunctisi2
)
1260 emit_unop_insn (CODE_FOR_trunctisi2
, to
, from
, UNKNOWN
);
1264 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1268 if (from_mode
== TImode
&& to_mode
== HImode
)
1270 #ifdef HAVE_trunctihi2
1271 if (HAVE_trunctihi2
)
1273 emit_unop_insn (CODE_FOR_trunctihi2
, to
, from
, UNKNOWN
);
1277 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1281 if (from_mode
== TImode
&& to_mode
== QImode
)
1283 #ifdef HAVE_trunctiqi2
1284 if (HAVE_trunctiqi2
)
1286 emit_unop_insn (CODE_FOR_trunctiqi2
, to
, from
, UNKNOWN
);
1290 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1294 /* Handle truncation of volatile memrefs, and so on;
1295 the things that couldn't be truncated directly,
1296 and for which there was no special instruction. */
1297 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1299 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1300 emit_move_insn (to
, temp
);
1304 /* Mode combination is not recognized. */
1308 /* Return an rtx for a value that would result
1309 from converting X to mode MODE.
1310 Both X and MODE may be floating, or both integer.
1311 UNSIGNEDP is nonzero if X is an unsigned value.
1312 This can be done by referring to a part of X in place
1313 or by copying to a new temporary with conversion.
1315 This function *must not* call protect_from_queue
1316 except when putting X into an insn (in which case convert_move does it). */
1319 convert_to_mode (mode
, x
, unsignedp
)
1320 enum machine_mode mode
;
1324 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
1327 /* Return an rtx for a value that would result
1328 from converting X from mode OLDMODE to mode MODE.
1329 Both modes may be floating, or both integer.
1330 UNSIGNEDP is nonzero if X is an unsigned value.
1332 This can be done by referring to a part of X in place
1333 or by copying to a new temporary with conversion.
1335 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1337 This function *must not* call protect_from_queue
1338 except when putting X into an insn (in which case convert_move does it). */
1341 convert_modes (mode
, oldmode
, x
, unsignedp
)
1342 enum machine_mode mode
, oldmode
;
1348 /* If FROM is a SUBREG that indicates that we have already done at least
1349 the required extension, strip it. */
1351 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1352 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1353 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1354 x
= gen_lowpart (mode
, x
);
1356 if (GET_MODE (x
) != VOIDmode
)
1357 oldmode
= GET_MODE (x
);
1359 if (mode
== oldmode
)
1362 /* There is one case that we must handle specially: If we are converting
1363 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1364 we are to interpret the constant as unsigned, gen_lowpart will do
1365 the wrong if the constant appears negative. What we want to do is
1366 make the high-order word of the constant zero, not all ones. */
1368 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1369 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1370 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1372 HOST_WIDE_INT val
= INTVAL (x
);
1374 if (oldmode
!= VOIDmode
1375 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
1377 int width
= GET_MODE_BITSIZE (oldmode
);
1379 /* We need to zero extend VAL. */
1380 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1383 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
1386 /* We can do this with a gen_lowpart if both desired and current modes
1387 are integer, and this is either a constant integer, a register, or a
1388 non-volatile MEM. Except for the constant case where MODE is no
1389 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1391 if ((GET_CODE (x
) == CONST_INT
1392 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1393 || (GET_MODE_CLASS (mode
) == MODE_INT
1394 && GET_MODE_CLASS (oldmode
) == MODE_INT
1395 && (GET_CODE (x
) == CONST_DOUBLE
1396 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
1397 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
1398 && direct_load
[(int) mode
])
1399 || (GET_CODE (x
) == REG
1400 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
1401 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
1403 /* ?? If we don't know OLDMODE, we have to assume here that
1404 X does not need sign- or zero-extension. This may not be
1405 the case, but it's the best we can do. */
1406 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1407 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1409 HOST_WIDE_INT val
= INTVAL (x
);
1410 int width
= GET_MODE_BITSIZE (oldmode
);
1412 /* We must sign or zero-extend in this case. Start by
1413 zero-extending, then sign extend if we need to. */
1414 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1416 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1417 val
|= (HOST_WIDE_INT
) (-1) << width
;
1419 return gen_int_mode (val
, mode
);
1422 return gen_lowpart (mode
, x
);
1425 temp
= gen_reg_rtx (mode
);
1426 convert_move (temp
, x
, unsignedp
);
1430 /* This macro is used to determine what the largest unit size that
1431 move_by_pieces can use is. */
1433 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1434 move efficiently, as opposed to MOVE_MAX which is the maximum
1435 number of bytes we can move with a single instruction. */
1437 #ifndef MOVE_MAX_PIECES
1438 #define MOVE_MAX_PIECES MOVE_MAX
1441 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1442 store efficiently. Due to internal GCC limitations, this is
1443 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1444 for an immediate constant. */
1446 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1448 /* Generate several move instructions to copy LEN bytes from block FROM to
1449 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1450 and TO through protect_from_queue before calling.
1452 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1453 used to push FROM to the stack.
1455 ALIGN is maximum alignment we can assume. */
1458 move_by_pieces (to
, from
, len
, align
)
1460 unsigned HOST_WIDE_INT len
;
1463 struct move_by_pieces data
;
1464 rtx to_addr
, from_addr
= XEXP (from
, 0);
1465 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
1466 enum machine_mode mode
= VOIDmode
, tmode
;
1467 enum insn_code icode
;
1470 data
.from_addr
= from_addr
;
1473 to_addr
= XEXP (to
, 0);
1476 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1477 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1479 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1486 #ifdef STACK_GROWS_DOWNWARD
1492 data
.to_addr
= to_addr
;
1495 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1496 || GET_CODE (from_addr
) == POST_INC
1497 || GET_CODE (from_addr
) == POST_DEC
);
1499 data
.explicit_inc_from
= 0;
1500 data
.explicit_inc_to
= 0;
1501 if (data
.reverse
) data
.offset
= len
;
1504 /* If copying requires more than two move insns,
1505 copy addresses to registers (to make displacements shorter)
1506 and use post-increment if available. */
1507 if (!(data
.autinc_from
&& data
.autinc_to
)
1508 && move_by_pieces_ninsns (len
, align
) > 2)
1510 /* Find the mode of the largest move... */
1511 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1512 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1513 if (GET_MODE_SIZE (tmode
) < max_size
)
1516 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
1518 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1519 data
.autinc_from
= 1;
1520 data
.explicit_inc_from
= -1;
1522 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
1524 data
.from_addr
= copy_addr_to_reg (from_addr
);
1525 data
.autinc_from
= 1;
1526 data
.explicit_inc_from
= 1;
1528 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1529 data
.from_addr
= copy_addr_to_reg (from_addr
);
1530 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
1532 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1534 data
.explicit_inc_to
= -1;
1536 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
1538 data
.to_addr
= copy_addr_to_reg (to_addr
);
1540 data
.explicit_inc_to
= 1;
1542 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1543 data
.to_addr
= copy_addr_to_reg (to_addr
);
1546 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1547 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1548 align
= MOVE_MAX
* BITS_PER_UNIT
;
1550 /* First move what we can in the largest integer mode, then go to
1551 successively smaller modes. */
1553 while (max_size
> 1)
1555 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1556 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1557 if (GET_MODE_SIZE (tmode
) < max_size
)
1560 if (mode
== VOIDmode
)
1563 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1564 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1565 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1567 max_size
= GET_MODE_SIZE (mode
);
1570 /* The code above should have handled everything. */
1575 /* Return number of insns required to move L bytes by pieces.
1576 ALIGN (in bits) is maximum alignment we can assume. */
1578 static unsigned HOST_WIDE_INT
1579 move_by_pieces_ninsns (l
, align
)
1580 unsigned HOST_WIDE_INT l
;
1583 unsigned HOST_WIDE_INT n_insns
= 0;
1584 unsigned HOST_WIDE_INT max_size
= MOVE_MAX
+ 1;
1586 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1587 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1588 align
= MOVE_MAX
* BITS_PER_UNIT
;
1590 while (max_size
> 1)
1592 enum machine_mode mode
= VOIDmode
, tmode
;
1593 enum insn_code icode
;
1595 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1596 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1597 if (GET_MODE_SIZE (tmode
) < max_size
)
1600 if (mode
== VOIDmode
)
1603 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1604 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1605 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1607 max_size
= GET_MODE_SIZE (mode
);
1615 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1616 with move instructions for mode MODE. GENFUN is the gen_... function
1617 to make a move insn for that mode. DATA has all the other info. */
1620 move_by_pieces_1 (genfun
, mode
, data
)
1621 rtx (*genfun
) PARAMS ((rtx
, ...));
1622 enum machine_mode mode
;
1623 struct move_by_pieces
*data
;
1625 unsigned int size
= GET_MODE_SIZE (mode
);
1626 rtx to1
= NULL_RTX
, from1
;
1628 while (data
->len
>= size
)
1631 data
->offset
-= size
;
1635 if (data
->autinc_to
)
1636 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1639 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1642 if (data
->autinc_from
)
1643 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1646 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1648 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1649 emit_insn (gen_add2_insn (data
->to_addr
,
1650 GEN_INT (-(HOST_WIDE_INT
)size
)));
1651 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1652 emit_insn (gen_add2_insn (data
->from_addr
,
1653 GEN_INT (-(HOST_WIDE_INT
)size
)));
1656 emit_insn ((*genfun
) (to1
, from1
));
1659 #ifdef PUSH_ROUNDING
1660 emit_single_push_insn (mode
, from1
, NULL
);
1666 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1667 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1668 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1669 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1671 if (! data
->reverse
)
1672 data
->offset
+= size
;
1678 /* Emit code to move a block Y to a block X. This may be done with
1679 string-move instructions, with multiple scalar move instructions,
1680 or with a library call.
1682 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1683 SIZE is an rtx that says how long they are.
1684 ALIGN is the maximum alignment we can assume they have.
1685 METHOD describes what kind of copy this is, and what mechanisms may be used.
1687 Return the address of the new block, if memcpy is called and returns it,
1691 emit_block_move (x
, y
, size
, method
)
1693 enum block_op_methods method
;
1701 case BLOCK_OP_NORMAL
:
1702 may_use_call
= true;
1705 case BLOCK_OP_CALL_PARM
:
1706 may_use_call
= block_move_libcall_safe_for_call_parm ();
1708 /* Make inhibit_defer_pop nonzero around the library call
1709 to force it to pop the arguments right away. */
1713 case BLOCK_OP_NO_LIBCALL
:
1714 may_use_call
= false;
1721 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1723 if (GET_MODE (x
) != BLKmode
)
1725 if (GET_MODE (y
) != BLKmode
)
1728 x
= protect_from_queue (x
, 1);
1729 y
= protect_from_queue (y
, 0);
1730 size
= protect_from_queue (size
, 0);
1732 if (GET_CODE (x
) != MEM
)
1734 if (GET_CODE (y
) != MEM
)
1739 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1740 can be incorrect is coming from __builtin_memcpy. */
1741 if (GET_CODE (size
) == CONST_INT
)
1743 x
= shallow_copy_rtx (x
);
1744 y
= shallow_copy_rtx (y
);
1745 set_mem_size (x
, size
);
1746 set_mem_size (y
, size
);
1749 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1750 move_by_pieces (x
, y
, INTVAL (size
), align
);
1751 else if (emit_block_move_via_movstr (x
, y
, size
, align
))
1753 else if (may_use_call
)
1754 retval
= emit_block_move_via_libcall (x
, y
, size
);
1756 emit_block_move_via_loop (x
, y
, size
, align
);
1758 if (method
== BLOCK_OP_CALL_PARM
)
1764 /* A subroutine of emit_block_move. Returns true if calling the
1765 block move libcall will not clobber any parameters which may have
1766 already been placed on the stack. */
1769 block_move_libcall_safe_for_call_parm ()
1775 /* Check to see whether memcpy takes all register arguments. */
1777 takes_regs_uninit
, takes_regs_no
, takes_regs_yes
1778 } takes_regs
= takes_regs_uninit
;
1782 case takes_regs_uninit
:
1784 CUMULATIVE_ARGS args_so_far
;
1787 fn
= emit_block_move_libcall_fn (false);
1788 INIT_CUMULATIVE_ARGS (args_so_far
, TREE_TYPE (fn
), NULL_RTX
, 0);
1790 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1791 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1793 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1794 rtx tmp
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
1795 if (!tmp
|| !REG_P (tmp
))
1796 goto fail_takes_regs
;
1797 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1798 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, mode
,
1800 goto fail_takes_regs
;
1802 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, NULL_TREE
, 1);
1805 takes_regs
= takes_regs_yes
;
1808 case takes_regs_yes
:
1812 takes_regs
= takes_regs_no
;
1823 /* A subroutine of emit_block_move. Expand a movstr pattern;
1824 return true if successful. */
1827 emit_block_move_via_movstr (x
, y
, size
, align
)
1831 /* Try the most limited insn first, because there's no point
1832 including more than one in the machine description unless
1833 the more limited one has some advantage. */
1835 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1836 enum machine_mode mode
;
1838 /* Since this is a move insn, we don't care about volatility. */
1841 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1842 mode
= GET_MODE_WIDER_MODE (mode
))
1844 enum insn_code code
= movstr_optab
[(int) mode
];
1845 insn_operand_predicate_fn pred
;
1847 if (code
!= CODE_FOR_nothing
1848 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1849 here because if SIZE is less than the mode mask, as it is
1850 returned by the macro, it will definitely be less than the
1851 actual mode mask. */
1852 && ((GET_CODE (size
) == CONST_INT
1853 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1854 <= (GET_MODE_MASK (mode
) >> 1)))
1855 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1856 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1857 || (*pred
) (x
, BLKmode
))
1858 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1859 || (*pred
) (y
, BLKmode
))
1860 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1861 || (*pred
) (opalign
, VOIDmode
)))
1864 rtx last
= get_last_insn ();
1867 op2
= convert_to_mode (mode
, size
, 1);
1868 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1869 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1870 op2
= copy_to_mode_reg (mode
, op2
);
1872 /* ??? When called via emit_block_move_for_call, it'd be
1873 nice if there were some way to inform the backend, so
1874 that it doesn't fail the expansion because it thinks
1875 emitting the libcall would be more efficient. */
1877 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1885 delete_insns_since (last
);
1893 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1894 Return the return value from memcpy, 0 otherwise. */
1897 emit_block_move_via_libcall (dst
, src
, size
)
1900 tree call_expr
, arg_list
, fn
, src_tree
, dst_tree
, size_tree
;
1901 enum machine_mode size_mode
;
1904 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1906 It is unsafe to save the value generated by protect_from_queue
1907 and reuse it later. Consider what happens if emit_queue is
1908 called before the return value from protect_from_queue is used.
1910 Expansion of the CALL_EXPR below will call emit_queue before
1911 we are finished emitting RTL for argument setup. So if we are
1912 not careful we could get the wrong value for an argument.
1914 To avoid this problem we go ahead and emit code to copy X, Y &
1915 SIZE into new pseudos. We can then place those new pseudos
1916 into an RTL_EXPR and use them later, even after a call to
1919 Note this is not strictly needed for library calls since they
1920 do not call emit_queue before loading their arguments. However,
1921 we may need to have library calls call emit_queue in the future
1922 since failing to do so could cause problems for targets which
1923 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1925 dst
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
1926 src
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
1928 if (TARGET_MEM_FUNCTIONS
)
1929 size_mode
= TYPE_MODE (sizetype
);
1931 size_mode
= TYPE_MODE (unsigned_type_node
);
1932 size
= convert_to_mode (size_mode
, size
, 1);
1933 size
= copy_to_mode_reg (size_mode
, size
);
1935 /* It is incorrect to use the libcall calling conventions to call
1936 memcpy in this context. This could be a user call to memcpy and
1937 the user may wish to examine the return value from memcpy. For
1938 targets where libcalls and normal calls have different conventions
1939 for returning pointers, we could end up generating incorrect code.
1941 For convenience, we generate the call to bcopy this way as well. */
1943 dst_tree
= make_tree (ptr_type_node
, dst
);
1944 src_tree
= make_tree (ptr_type_node
, src
);
1945 if (TARGET_MEM_FUNCTIONS
)
1946 size_tree
= make_tree (sizetype
, size
);
1948 size_tree
= make_tree (unsigned_type_node
, size
);
1950 fn
= emit_block_move_libcall_fn (true);
1951 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
1952 if (TARGET_MEM_FUNCTIONS
)
1954 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1955 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1959 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1960 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1963 /* Now we have to build up the CALL_EXPR itself. */
1964 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1965 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1966 call_expr
, arg_list
, NULL_TREE
);
1967 TREE_SIDE_EFFECTS (call_expr
) = 1;
1969 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
1971 /* If we are initializing a readonly value, show the above call
1972 clobbered it. Otherwise, a load from it may erroneously be
1973 hoisted from a loop. */
1974 if (RTX_UNCHANGING_P (dst
))
1975 emit_insn (gen_rtx_CLOBBER (VOIDmode
, dst
));
1977 return (TARGET_MEM_FUNCTIONS
? retval
: NULL_RTX
);
1980 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1981 for the function we use for block copies. The first time FOR_CALL
1982 is true, we call assemble_external. */
1984 static GTY(()) tree block_move_fn
;
1987 emit_block_move_libcall_fn (for_call
)
1990 static bool emitted_extern
;
1991 tree fn
= block_move_fn
, args
;
1995 if (TARGET_MEM_FUNCTIONS
)
1997 fn
= get_identifier ("memcpy");
1998 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
1999 const_ptr_type_node
, sizetype
,
2004 fn
= get_identifier ("bcopy");
2005 args
= build_function_type_list (void_type_node
, const_ptr_type_node
,
2006 ptr_type_node
, unsigned_type_node
,
2010 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
2011 DECL_EXTERNAL (fn
) = 1;
2012 TREE_PUBLIC (fn
) = 1;
2013 DECL_ARTIFICIAL (fn
) = 1;
2014 TREE_NOTHROW (fn
) = 1;
2019 if (for_call
&& !emitted_extern
)
2021 emitted_extern
= true;
2022 make_decl_rtl (fn
, NULL
);
2023 assemble_external (fn
);
2029 /* A subroutine of emit_block_move. Copy the data via an explicit
2030 loop. This is used only when libcalls are forbidden. */
2031 /* ??? It'd be nice to copy in hunks larger than QImode. */
2034 emit_block_move_via_loop (x
, y
, size
, align
)
2036 unsigned int align ATTRIBUTE_UNUSED
;
2038 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
2039 enum machine_mode iter_mode
;
2041 iter_mode
= GET_MODE (size
);
2042 if (iter_mode
== VOIDmode
)
2043 iter_mode
= word_mode
;
2045 top_label
= gen_label_rtx ();
2046 cmp_label
= gen_label_rtx ();
2047 iter
= gen_reg_rtx (iter_mode
);
2049 emit_move_insn (iter
, const0_rtx
);
2051 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
2052 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
2053 do_pending_stack_adjust ();
2055 emit_note (NULL
, NOTE_INSN_LOOP_BEG
);
2057 emit_jump (cmp_label
);
2058 emit_label (top_label
);
2060 tmp
= convert_modes (Pmode
, iter_mode
, iter
, true);
2061 x_addr
= gen_rtx_PLUS (Pmode
, x_addr
, tmp
);
2062 y_addr
= gen_rtx_PLUS (Pmode
, y_addr
, tmp
);
2063 x
= change_address (x
, QImode
, x_addr
);
2064 y
= change_address (y
, QImode
, y_addr
);
2066 emit_move_insn (x
, y
);
2068 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
2069 true, OPTAB_LIB_WIDEN
);
2071 emit_move_insn (iter
, tmp
);
2073 emit_note (NULL
, NOTE_INSN_LOOP_CONT
);
2074 emit_label (cmp_label
);
2076 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
2079 emit_note (NULL
, NOTE_INSN_LOOP_END
);
2082 /* Copy all or part of a value X into registers starting at REGNO.
2083 The number of registers to be filled is NREGS. */
2086 move_block_to_reg (regno
, x
, nregs
, mode
)
2090 enum machine_mode mode
;
2093 #ifdef HAVE_load_multiple
2101 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
2102 x
= validize_mem (force_const_mem (mode
, x
));
2104 /* See if the machine can do this with a load multiple insn. */
2105 #ifdef HAVE_load_multiple
2106 if (HAVE_load_multiple
)
2108 last
= get_last_insn ();
2109 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
2117 delete_insns_since (last
);
2121 for (i
= 0; i
< nregs
; i
++)
2122 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
2123 operand_subword_force (x
, i
, mode
));
2126 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2127 The number of registers to be filled is NREGS. SIZE indicates the number
2128 of bytes in the object X. */
2131 move_block_from_reg (regno
, x
, nregs
, size
)
2138 #ifdef HAVE_store_multiple
2142 enum machine_mode mode
;
2147 /* If SIZE is that of a mode no bigger than a word, just use that
2148 mode's store operation. */
2149 if (size
<= UNITS_PER_WORD
2150 && (mode
= mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0)) != BLKmode
2151 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
)
2153 emit_move_insn (adjust_address (x
, mode
, 0), gen_rtx_REG (mode
, regno
));
2157 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
2158 to the left before storing to memory. Note that the previous test
2159 doesn't handle all cases (e.g. SIZE == 3). */
2160 if (size
< UNITS_PER_WORD
2162 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
)
2164 rtx tem
= operand_subword (x
, 0, 1, BLKmode
);
2170 shift
= expand_shift (LSHIFT_EXPR
, word_mode
,
2171 gen_rtx_REG (word_mode
, regno
),
2172 build_int_2 ((UNITS_PER_WORD
- size
)
2173 * BITS_PER_UNIT
, 0), NULL_RTX
, 0);
2174 emit_move_insn (tem
, shift
);
2178 /* See if the machine can do this with a store multiple insn. */
2179 #ifdef HAVE_store_multiple
2180 if (HAVE_store_multiple
)
2182 last
= get_last_insn ();
2183 pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
2191 delete_insns_since (last
);
2195 for (i
= 0; i
< nregs
; i
++)
2197 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
2202 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
2206 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2207 registers represented by a PARALLEL. SSIZE represents the total size of
2208 block SRC in bytes, or -1 if not known. */
2209 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2210 the balance will be in what would be the low-order memory addresses, i.e.
2211 left justified for big endian, right justified for little endian. This
2212 happens to be true for the targets currently using this support. If this
2213 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2217 emit_group_load (dst
, orig_src
, ssize
)
2224 if (GET_CODE (dst
) != PARALLEL
)
2227 /* Check for a NULL entry, used to indicate that the parameter goes
2228 both on the stack and in registers. */
2229 if (XEXP (XVECEXP (dst
, 0, 0), 0))
2234 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
2236 /* Process the pieces. */
2237 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2239 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
2240 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
2241 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2244 /* Handle trailing fragments that run over the size of the struct. */
2245 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2247 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2248 bytelen
= ssize
- bytepos
;
2253 /* If we won't be loading directly from memory, protect the real source
2254 from strange tricks we might play; but make sure that the source can
2255 be loaded directly into the destination. */
2257 if (GET_CODE (orig_src
) != MEM
2258 && (!CONSTANT_P (orig_src
)
2259 || (GET_MODE (orig_src
) != mode
2260 && GET_MODE (orig_src
) != VOIDmode
)))
2262 if (GET_MODE (orig_src
) == VOIDmode
)
2263 src
= gen_reg_rtx (mode
);
2265 src
= gen_reg_rtx (GET_MODE (orig_src
));
2267 emit_move_insn (src
, orig_src
);
2270 /* Optimize the access just a bit. */
2271 if (GET_CODE (src
) == MEM
2272 && MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
)
2273 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2274 && bytelen
== GET_MODE_SIZE (mode
))
2276 tmps
[i
] = gen_reg_rtx (mode
);
2277 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
2279 else if (GET_CODE (src
) == CONCAT
)
2281 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
2282 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
2284 if ((bytepos
== 0 && bytelen
== slen0
)
2285 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
2287 /* The following assumes that the concatenated objects all
2288 have the same size. In this case, a simple calculation
2289 can be used to determine the object and the bit field
2291 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
2292 if (! CONSTANT_P (tmps
[i
])
2293 && (GET_CODE (tmps
[i
]) != REG
|| GET_MODE (tmps
[i
]) != mode
))
2294 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
2295 (bytepos
% slen0
) * BITS_PER_UNIT
,
2296 1, NULL_RTX
, mode
, mode
, ssize
);
2298 else if (bytepos
== 0)
2300 rtx mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
2301 emit_move_insn (mem
, src
);
2302 tmps
[i
] = adjust_address (mem
, mode
, 0);
2307 else if (CONSTANT_P (src
)
2308 || (GET_CODE (src
) == REG
&& GET_MODE (src
) == mode
))
2311 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
2312 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
2315 if (BYTES_BIG_ENDIAN
&& shift
)
2316 expand_binop (mode
, ashl_optab
, tmps
[i
], GEN_INT (shift
),
2317 tmps
[i
], 0, OPTAB_WIDEN
);
2322 /* Copy the extracted pieces into the proper (probable) hard regs. */
2323 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2324 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
2327 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2328 registers represented by a PARALLEL. SSIZE represents the total size of
2329 block DST, or -1 if not known. */
2332 emit_group_store (orig_dst
, src
, ssize
)
2339 if (GET_CODE (src
) != PARALLEL
)
2342 /* Check for a NULL entry, used to indicate that the parameter goes
2343 both on the stack and in registers. */
2344 if (XEXP (XVECEXP (src
, 0, 0), 0))
2349 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (src
, 0));
2351 /* Copy the (probable) hard regs into pseudos. */
2352 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2354 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2355 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2356 emit_move_insn (tmps
[i
], reg
);
2360 /* If we won't be storing directly into memory, protect the real destination
2361 from strange tricks we might play. */
2363 if (GET_CODE (dst
) == PARALLEL
)
2367 /* We can get a PARALLEL dst if there is a conditional expression in
2368 a return statement. In that case, the dst and src are the same,
2369 so no action is necessary. */
2370 if (rtx_equal_p (dst
, src
))
2373 /* It is unclear if we can ever reach here, but we may as well handle
2374 it. Allocate a temporary, and split this into a store/load to/from
2377 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
2378 emit_group_store (temp
, src
, ssize
);
2379 emit_group_load (dst
, temp
, ssize
);
2382 else if (GET_CODE (dst
) != MEM
&& GET_CODE (dst
) != CONCAT
)
2384 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
2385 /* Make life a bit easier for combine. */
2386 emit_move_insn (dst
, const0_rtx
);
2389 /* Process the pieces. */
2390 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2392 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2393 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2394 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2397 /* Handle trailing fragments that run over the size of the struct. */
2398 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2400 if (BYTES_BIG_ENDIAN
)
2402 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2403 expand_binop (mode
, ashr_optab
, tmps
[i
], GEN_INT (shift
),
2404 tmps
[i
], 0, OPTAB_WIDEN
);
2406 bytelen
= ssize
- bytepos
;
2409 if (GET_CODE (dst
) == CONCAT
)
2411 if (bytepos
+ bytelen
<= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2412 dest
= XEXP (dst
, 0);
2413 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2415 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2416 dest
= XEXP (dst
, 1);
2422 /* Optimize the access just a bit. */
2423 if (GET_CODE (dest
) == MEM
2424 && MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
)
2425 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2426 && bytelen
== GET_MODE_SIZE (mode
))
2427 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2429 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2430 mode
, tmps
[i
], ssize
);
2435 /* Copy from the pseudo into the (probable) hard reg. */
2436 if (GET_CODE (dst
) == REG
)
2437 emit_move_insn (orig_dst
, dst
);
2440 /* Generate code to copy a BLKmode object of TYPE out of a
2441 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2442 is null, a stack temporary is created. TGTBLK is returned.
2444 The primary purpose of this routine is to handle functions
2445 that return BLKmode structures in registers. Some machines
2446 (the PA for example) want to return all small structures
2447 in registers regardless of the structure's alignment. */
2450 copy_blkmode_from_reg (tgtblk
, srcreg
, type
)
2455 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2456 rtx src
= NULL
, dst
= NULL
;
2457 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2458 unsigned HOST_WIDE_INT bitpos
, xbitpos
, big_endian_correction
= 0;
2462 tgtblk
= assign_temp (build_qualified_type (type
,
2464 | TYPE_QUAL_CONST
)),
2466 preserve_temp_slots (tgtblk
);
2469 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2470 into a new pseudo which is a full word.
2472 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2473 the wrong part of the register gets copied so we fake a type conversion
2475 if (GET_MODE (srcreg
) != BLKmode
2476 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2478 if (FUNCTION_ARG_REG_LITTLE_ENDIAN
)
2479 srcreg
= simplify_gen_subreg (word_mode
, srcreg
, GET_MODE (srcreg
), 0);
2481 srcreg
= convert_to_mode (word_mode
, srcreg
, TREE_UNSIGNED (type
));
2484 /* Structures whose size is not a multiple of a word are aligned
2485 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2486 machine, this means we must skip the empty high order bytes when
2487 calculating the bit offset. */
2488 if (BYTES_BIG_ENDIAN
2489 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2490 && bytes
% UNITS_PER_WORD
)
2491 big_endian_correction
2492 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2494 /* Copy the structure BITSIZE bites at a time.
2496 We could probably emit more efficient code for machines which do not use
2497 strict alignment, but it doesn't seem worth the effort at the current
2499 for (bitpos
= 0, xbitpos
= big_endian_correction
;
2500 bitpos
< bytes
* BITS_PER_UNIT
;
2501 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2503 /* We need a new source operand each time xbitpos is on a
2504 word boundary and when xbitpos == big_endian_correction
2505 (the first time through). */
2506 if (xbitpos
% BITS_PER_WORD
== 0
2507 || xbitpos
== big_endian_correction
)
2508 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2511 /* We need a new destination operand each time bitpos is on
2513 if (bitpos
% BITS_PER_WORD
== 0)
2514 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2516 /* Use xbitpos for the source extraction (right justified) and
2517 xbitpos for the destination store (left justified). */
2518 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2519 extract_bit_field (src
, bitsize
,
2520 xbitpos
% BITS_PER_WORD
, 1,
2521 NULL_RTX
, word_mode
, word_mode
,
2529 /* Add a USE expression for REG to the (possibly empty) list pointed
2530 to by CALL_FUSAGE. REG must denote a hard register. */
2533 use_reg (call_fusage
, reg
)
2534 rtx
*call_fusage
, reg
;
2536 if (GET_CODE (reg
) != REG
2537 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2541 = gen_rtx_EXPR_LIST (VOIDmode
,
2542 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2545 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2546 starting at REGNO. All of these registers must be hard registers. */
2549 use_regs (call_fusage
, regno
, nregs
)
2556 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
2559 for (i
= 0; i
< nregs
; i
++)
2560 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2563 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2564 PARALLEL REGS. This is for calls that pass values in multiple
2565 non-contiguous locations. The Irix 6 ABI has examples of this. */
2568 use_group_regs (call_fusage
, regs
)
2574 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2576 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2578 /* A NULL entry means the parameter goes both on the stack and in
2579 registers. This can also be a MEM for targets that pass values
2580 partially on the stack and partially in registers. */
2581 if (reg
!= 0 && GET_CODE (reg
) == REG
)
2582 use_reg (call_fusage
, reg
);
2587 /* Determine whether the LEN bytes generated by CONSTFUN can be
2588 stored to memory using several move instructions. CONSTFUNDATA is
2589 a pointer which will be passed as argument in every CONSTFUN call.
2590 ALIGN is maximum alignment we can assume. Return nonzero if a
2591 call to store_by_pieces should succeed. */
2594 can_store_by_pieces (len
, constfun
, constfundata
, align
)
2595 unsigned HOST_WIDE_INT len
;
2596 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
2600 unsigned HOST_WIDE_INT max_size
, l
;
2601 HOST_WIDE_INT offset
= 0;
2602 enum machine_mode mode
, tmode
;
2603 enum insn_code icode
;
2607 if (! MOVE_BY_PIECES_P (len
, align
))
2610 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2611 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2612 align
= MOVE_MAX
* BITS_PER_UNIT
;
2614 /* We would first store what we can in the largest integer mode, then go to
2615 successively smaller modes. */
2618 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2623 max_size
= STORE_MAX_PIECES
+ 1;
2624 while (max_size
> 1)
2626 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2627 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2628 if (GET_MODE_SIZE (tmode
) < max_size
)
2631 if (mode
== VOIDmode
)
2634 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2635 if (icode
!= CODE_FOR_nothing
2636 && align
>= GET_MODE_ALIGNMENT (mode
))
2638 unsigned int size
= GET_MODE_SIZE (mode
);
2645 cst
= (*constfun
) (constfundata
, offset
, mode
);
2646 if (!LEGITIMATE_CONSTANT_P (cst
))
2656 max_size
= GET_MODE_SIZE (mode
);
2659 /* The code above should have handled everything. */
2667 /* Generate several move instructions to store LEN bytes generated by
2668 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2669 pointer which will be passed as argument in every CONSTFUN call.
2670 ALIGN is maximum alignment we can assume. */
2673 store_by_pieces (to
, len
, constfun
, constfundata
, align
)
2675 unsigned HOST_WIDE_INT len
;
2676 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
2680 struct store_by_pieces data
;
2682 if (! MOVE_BY_PIECES_P (len
, align
))
2684 to
= protect_from_queue (to
, 1);
2685 data
.constfun
= constfun
;
2686 data
.constfundata
= constfundata
;
2689 store_by_pieces_1 (&data
, align
);
2692 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2693 rtx with BLKmode). The caller must pass TO through protect_from_queue
2694 before calling. ALIGN is maximum alignment we can assume. */
2697 clear_by_pieces (to
, len
, align
)
2699 unsigned HOST_WIDE_INT len
;
2702 struct store_by_pieces data
;
2704 data
.constfun
= clear_by_pieces_1
;
2705 data
.constfundata
= NULL
;
2708 store_by_pieces_1 (&data
, align
);
2711 /* Callback routine for clear_by_pieces.
2712 Return const0_rtx unconditionally. */
2715 clear_by_pieces_1 (data
, offset
, mode
)
2716 PTR data ATTRIBUTE_UNUSED
;
2717 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
;
2718 enum machine_mode mode ATTRIBUTE_UNUSED
;
2723 /* Subroutine of clear_by_pieces and store_by_pieces.
2724 Generate several move instructions to store LEN bytes of block TO. (A MEM
2725 rtx with BLKmode). The caller must pass TO through protect_from_queue
2726 before calling. ALIGN is maximum alignment we can assume. */
2729 store_by_pieces_1 (data
, align
)
2730 struct store_by_pieces
*data
;
2733 rtx to_addr
= XEXP (data
->to
, 0);
2734 unsigned HOST_WIDE_INT max_size
= STORE_MAX_PIECES
+ 1;
2735 enum machine_mode mode
= VOIDmode
, tmode
;
2736 enum insn_code icode
;
2739 data
->to_addr
= to_addr
;
2741 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2742 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2744 data
->explicit_inc_to
= 0;
2746 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2748 data
->offset
= data
->len
;
2750 /* If storing requires more than two move insns,
2751 copy addresses to registers (to make displacements shorter)
2752 and use post-increment if available. */
2753 if (!data
->autinc_to
2754 && move_by_pieces_ninsns (data
->len
, align
) > 2)
2756 /* Determine the main mode we'll be using. */
2757 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2758 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2759 if (GET_MODE_SIZE (tmode
) < max_size
)
2762 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2764 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2765 data
->autinc_to
= 1;
2766 data
->explicit_inc_to
= -1;
2769 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2770 && ! data
->autinc_to
)
2772 data
->to_addr
= copy_addr_to_reg (to_addr
);
2773 data
->autinc_to
= 1;
2774 data
->explicit_inc_to
= 1;
2777 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2778 data
->to_addr
= copy_addr_to_reg (to_addr
);
2781 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2782 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2783 align
= MOVE_MAX
* BITS_PER_UNIT
;
2785 /* First store what we can in the largest integer mode, then go to
2786 successively smaller modes. */
2788 while (max_size
> 1)
2790 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2791 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2792 if (GET_MODE_SIZE (tmode
) < max_size
)
2795 if (mode
== VOIDmode
)
2798 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2799 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2800 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2802 max_size
= GET_MODE_SIZE (mode
);
2805 /* The code above should have handled everything. */
2810 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2811 with move instructions for mode MODE. GENFUN is the gen_... function
2812 to make a move insn for that mode. DATA has all the other info. */
2815 store_by_pieces_2 (genfun
, mode
, data
)
2816 rtx (*genfun
) PARAMS ((rtx
, ...));
2817 enum machine_mode mode
;
2818 struct store_by_pieces
*data
;
2820 unsigned int size
= GET_MODE_SIZE (mode
);
2823 while (data
->len
>= size
)
2826 data
->offset
-= size
;
2828 if (data
->autinc_to
)
2829 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2832 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2834 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2835 emit_insn (gen_add2_insn (data
->to_addr
,
2836 GEN_INT (-(HOST_WIDE_INT
) size
)));
2838 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2839 emit_insn ((*genfun
) (to1
, cst
));
2841 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2842 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2844 if (! data
->reverse
)
2845 data
->offset
+= size
;
2851 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2852 its length in bytes. */
2855 clear_storage (object
, size
)
2860 unsigned int align
= (GET_CODE (object
) == MEM
? MEM_ALIGN (object
)
2861 : GET_MODE_ALIGNMENT (GET_MODE (object
)));
2863 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2864 just move a zero. Otherwise, do this a piece at a time. */
2865 if (GET_MODE (object
) != BLKmode
2866 && GET_CODE (size
) == CONST_INT
2867 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (object
)))
2868 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2871 object
= protect_from_queue (object
, 1);
2872 size
= protect_from_queue (size
, 0);
2874 if (GET_CODE (size
) == CONST_INT
2875 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2876 clear_by_pieces (object
, INTVAL (size
), align
);
2877 else if (clear_storage_via_clrstr (object
, size
, align
))
2880 retval
= clear_storage_via_libcall (object
, size
);
2886 /* A subroutine of clear_storage. Expand a clrstr pattern;
2887 return true if successful. */
2890 clear_storage_via_clrstr (object
, size
, align
)
2894 /* Try the most limited insn first, because there's no point
2895 including more than one in the machine description unless
2896 the more limited one has some advantage. */
2898 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2899 enum machine_mode mode
;
2901 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2902 mode
= GET_MODE_WIDER_MODE (mode
))
2904 enum insn_code code
= clrstr_optab
[(int) mode
];
2905 insn_operand_predicate_fn pred
;
2907 if (code
!= CODE_FOR_nothing
2908 /* We don't need MODE to be narrower than
2909 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2910 the mode mask, as it is returned by the macro, it will
2911 definitely be less than the actual mode mask. */
2912 && ((GET_CODE (size
) == CONST_INT
2913 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2914 <= (GET_MODE_MASK (mode
) >> 1)))
2915 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2916 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2917 || (*pred
) (object
, BLKmode
))
2918 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
2919 || (*pred
) (opalign
, VOIDmode
)))
2922 rtx last
= get_last_insn ();
2925 op1
= convert_to_mode (mode
, size
, 1);
2926 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2927 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
2928 op1
= copy_to_mode_reg (mode
, op1
);
2930 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2937 delete_insns_since (last
);
2944 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2945 Return the return value of memset, 0 otherwise. */
2948 clear_storage_via_libcall (object
, size
)
2951 tree call_expr
, arg_list
, fn
, object_tree
, size_tree
;
2952 enum machine_mode size_mode
;
2955 /* OBJECT or SIZE may have been passed through protect_from_queue.
2957 It is unsafe to save the value generated by protect_from_queue
2958 and reuse it later. Consider what happens if emit_queue is
2959 called before the return value from protect_from_queue is used.
2961 Expansion of the CALL_EXPR below will call emit_queue before
2962 we are finished emitting RTL for argument setup. So if we are
2963 not careful we could get the wrong value for an argument.
2965 To avoid this problem we go ahead and emit code to copy OBJECT
2966 and SIZE into new pseudos. We can then place those new pseudos
2967 into an RTL_EXPR and use them later, even after a call to
2970 Note this is not strictly needed for library calls since they
2971 do not call emit_queue before loading their arguments. However,
2972 we may need to have library calls call emit_queue in the future
2973 since failing to do so could cause problems for targets which
2974 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2976 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2978 if (TARGET_MEM_FUNCTIONS
)
2979 size_mode
= TYPE_MODE (sizetype
);
2981 size_mode
= TYPE_MODE (unsigned_type_node
);
2982 size
= convert_to_mode (size_mode
, size
, 1);
2983 size
= copy_to_mode_reg (size_mode
, size
);
2985 /* It is incorrect to use the libcall calling conventions to call
2986 memset in this context. This could be a user call to memset and
2987 the user may wish to examine the return value from memset. For
2988 targets where libcalls and normal calls have different conventions
2989 for returning pointers, we could end up generating incorrect code.
2991 For convenience, we generate the call to bzero this way as well. */
2993 object_tree
= make_tree (ptr_type_node
, object
);
2994 if (TARGET_MEM_FUNCTIONS
)
2995 size_tree
= make_tree (sizetype
, size
);
2997 size_tree
= make_tree (unsigned_type_node
, size
);
2999 fn
= clear_storage_libcall_fn (true);
3000 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
3001 if (TARGET_MEM_FUNCTIONS
)
3002 arg_list
= tree_cons (NULL_TREE
, integer_zero_node
, arg_list
);
3003 arg_list
= tree_cons (NULL_TREE
, object_tree
, arg_list
);
3005 /* Now we have to build up the CALL_EXPR itself. */
3006 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
3007 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
3008 call_expr
, arg_list
, NULL_TREE
);
3009 TREE_SIDE_EFFECTS (call_expr
) = 1;
3011 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
3013 /* If we are initializing a readonly value, show the above call
3014 clobbered it. Otherwise, a load from it may erroneously be
3015 hoisted from a loop. */
3016 if (RTX_UNCHANGING_P (object
))
3017 emit_insn (gen_rtx_CLOBBER (VOIDmode
, object
));
3019 return (TARGET_MEM_FUNCTIONS
? retval
: NULL_RTX
);
3022 /* A subroutine of clear_storage_via_libcall. Create the tree node
3023 for the function we use for block clears. The first time FOR_CALL
3024 is true, we call assemble_external. */
3026 static GTY(()) tree block_clear_fn
;
3029 clear_storage_libcall_fn (for_call
)
3032 static bool emitted_extern
;
3033 tree fn
= block_clear_fn
, args
;
3037 if (TARGET_MEM_FUNCTIONS
)
3039 fn
= get_identifier ("memset");
3040 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
3041 integer_type_node
, sizetype
,
3046 fn
= get_identifier ("bzero");
3047 args
= build_function_type_list (void_type_node
, ptr_type_node
,
3048 unsigned_type_node
, NULL_TREE
);
3051 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
3052 DECL_EXTERNAL (fn
) = 1;
3053 TREE_PUBLIC (fn
) = 1;
3054 DECL_ARTIFICIAL (fn
) = 1;
3055 TREE_NOTHROW (fn
) = 1;
3057 block_clear_fn
= fn
;
3060 if (for_call
&& !emitted_extern
)
3062 emitted_extern
= true;
3063 make_decl_rtl (fn
, NULL
);
3064 assemble_external (fn
);
3070 /* Generate code to copy Y into X.
3071 Both Y and X must have the same mode, except that
3072 Y can be a constant with VOIDmode.
3073 This mode cannot be BLKmode; use emit_block_move for that.
3075 Return the last instruction emitted. */
3078 emit_move_insn (x
, y
)
3081 enum machine_mode mode
= GET_MODE (x
);
3082 rtx y_cst
= NULL_RTX
;
3085 x
= protect_from_queue (x
, 1);
3086 y
= protect_from_queue (y
, 0);
3088 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
3091 /* Never force constant_p_rtx to memory. */
3092 if (GET_CODE (y
) == CONSTANT_P_RTX
)
3094 else if (CONSTANT_P (y
))
3097 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3098 && (last_insn
= compress_float_constant (x
, y
)))
3101 if (!LEGITIMATE_CONSTANT_P (y
))
3104 y
= force_const_mem (mode
, y
);
3108 /* If X or Y are memory references, verify that their addresses are valid
3110 if (GET_CODE (x
) == MEM
3111 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
3112 && ! push_operand (x
, GET_MODE (x
)))
3114 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
3115 x
= validize_mem (x
);
3117 if (GET_CODE (y
) == MEM
3118 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
3120 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
3121 y
= validize_mem (y
);
3123 if (mode
== BLKmode
)
3126 last_insn
= emit_move_insn_1 (x
, y
);
3128 if (y_cst
&& GET_CODE (x
) == REG
)
3129 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
3134 /* Low level part of emit_move_insn.
3135 Called just like emit_move_insn, but assumes X and Y
3136 are basically valid. */
3139 emit_move_insn_1 (x
, y
)
3142 enum machine_mode mode
= GET_MODE (x
);
3143 enum machine_mode submode
;
3144 enum mode_class
class = GET_MODE_CLASS (mode
);
3146 if ((unsigned int) mode
>= (unsigned int) MAX_MACHINE_MODE
)
3149 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
3151 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
3153 /* Expand complex moves by moving real part and imag part, if possible. */
3154 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
3155 && BLKmode
!= (submode
= mode_for_size ((GET_MODE_UNIT_SIZE (mode
)
3157 (class == MODE_COMPLEX_INT
3158 ? MODE_INT
: MODE_FLOAT
),
3160 && (mov_optab
->handlers
[(int) submode
].insn_code
3161 != CODE_FOR_nothing
))
3163 /* Don't split destination if it is a stack push. */
3164 int stack
= push_operand (x
, GET_MODE (x
));
3166 #ifdef PUSH_ROUNDING
3167 /* In case we output to the stack, but the size is smaller machine can
3168 push exactly, we need to use move instructions. */
3170 && (PUSH_ROUNDING (GET_MODE_SIZE (submode
))
3171 != GET_MODE_SIZE (submode
)))
3174 HOST_WIDE_INT offset1
, offset2
;
3176 /* Do not use anti_adjust_stack, since we don't want to update
3177 stack_pointer_delta. */
3178 temp
= expand_binop (Pmode
,
3179 #ifdef STACK_GROWS_DOWNWARD
3187 (GET_MODE_SIZE (GET_MODE (x
)))),
3188 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
3190 if (temp
!= stack_pointer_rtx
)
3191 emit_move_insn (stack_pointer_rtx
, temp
);
3193 #ifdef STACK_GROWS_DOWNWARD
3195 offset2
= GET_MODE_SIZE (submode
);
3197 offset1
= -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)));
3198 offset2
= (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))
3199 + GET_MODE_SIZE (submode
));
3202 emit_move_insn (change_address (x
, submode
,
3203 gen_rtx_PLUS (Pmode
,
3205 GEN_INT (offset1
))),
3206 gen_realpart (submode
, y
));
3207 emit_move_insn (change_address (x
, submode
,
3208 gen_rtx_PLUS (Pmode
,
3210 GEN_INT (offset2
))),
3211 gen_imagpart (submode
, y
));
3215 /* If this is a stack, push the highpart first, so it
3216 will be in the argument order.
3218 In that case, change_address is used only to convert
3219 the mode, not to change the address. */
3222 /* Note that the real part always precedes the imag part in memory
3223 regardless of machine's endianness. */
3224 #ifdef STACK_GROWS_DOWNWARD
3225 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
3226 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3227 gen_imagpart (submode
, y
)));
3228 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
3229 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3230 gen_realpart (submode
, y
)));
3232 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
3233 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3234 gen_realpart (submode
, y
)));
3235 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
3236 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3237 gen_imagpart (submode
, y
)));
3242 rtx realpart_x
, realpart_y
;
3243 rtx imagpart_x
, imagpart_y
;
3245 /* If this is a complex value with each part being smaller than a
3246 word, the usual calling sequence will likely pack the pieces into
3247 a single register. Unfortunately, SUBREG of hard registers only
3248 deals in terms of words, so we have a problem converting input
3249 arguments to the CONCAT of two registers that is used elsewhere
3250 for complex values. If this is before reload, we can copy it into
3251 memory and reload. FIXME, we should see about using extract and
3252 insert on integer registers, but complex short and complex char
3253 variables should be rarely used. */
3254 if (GET_MODE_BITSIZE (mode
) < 2 * BITS_PER_WORD
3255 && (reload_in_progress
| reload_completed
) == 0)
3258 = (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
3260 = (REG_P (y
) && REGNO (y
) < FIRST_PSEUDO_REGISTER
);
3262 if (packed_dest_p
|| packed_src_p
)
3264 enum mode_class reg_class
= ((class == MODE_COMPLEX_FLOAT
)
3265 ? MODE_FLOAT
: MODE_INT
);
3267 enum machine_mode reg_mode
3268 = mode_for_size (GET_MODE_BITSIZE (mode
), reg_class
, 1);
3270 if (reg_mode
!= BLKmode
)
3272 rtx mem
= assign_stack_temp (reg_mode
,
3273 GET_MODE_SIZE (mode
), 0);
3274 rtx cmem
= adjust_address (mem
, mode
, 0);
3277 = N_("function using short complex types cannot be inline");
3281 rtx sreg
= gen_rtx_SUBREG (reg_mode
, x
, 0);
3283 emit_move_insn_1 (cmem
, y
);
3284 return emit_move_insn_1 (sreg
, mem
);
3288 rtx sreg
= gen_rtx_SUBREG (reg_mode
, y
, 0);
3290 emit_move_insn_1 (mem
, sreg
);
3291 return emit_move_insn_1 (x
, cmem
);
3297 realpart_x
= gen_realpart (submode
, x
);
3298 realpart_y
= gen_realpart (submode
, y
);
3299 imagpart_x
= gen_imagpart (submode
, x
);
3300 imagpart_y
= gen_imagpart (submode
, y
);
3302 /* Show the output dies here. This is necessary for SUBREGs
3303 of pseudos since we cannot track their lifetimes correctly;
3304 hard regs shouldn't appear here except as return values.
3305 We never want to emit such a clobber after reload. */
3307 && ! (reload_in_progress
|| reload_completed
)
3308 && (GET_CODE (realpart_x
) == SUBREG
3309 || GET_CODE (imagpart_x
) == SUBREG
))
3310 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3312 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
3313 (realpart_x
, realpart_y
));
3314 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
3315 (imagpart_x
, imagpart_y
));
3318 return get_last_insn ();
3321 /* This will handle any multi-word or full-word mode that lacks a move_insn
3322 pattern. However, you will get better code if you define such patterns,
3323 even if they must turn into multiple assembler instructions. */
3324 else if (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
3331 #ifdef PUSH_ROUNDING
3333 /* If X is a push on the stack, do the push now and replace
3334 X with a reference to the stack pointer. */
3335 if (push_operand (x
, GET_MODE (x
)))
3340 /* Do not use anti_adjust_stack, since we don't want to update
3341 stack_pointer_delta. */
3342 temp
= expand_binop (Pmode
,
3343 #ifdef STACK_GROWS_DOWNWARD
3351 (GET_MODE_SIZE (GET_MODE (x
)))),
3352 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
3354 if (temp
!= stack_pointer_rtx
)
3355 emit_move_insn (stack_pointer_rtx
, temp
);
3357 code
= GET_CODE (XEXP (x
, 0));
3359 /* Just hope that small offsets off SP are OK. */
3360 if (code
== POST_INC
)
3361 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3362 GEN_INT (-((HOST_WIDE_INT
)
3363 GET_MODE_SIZE (GET_MODE (x
)))));
3364 else if (code
== POST_DEC
)
3365 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3366 GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
3368 temp
= stack_pointer_rtx
;
3370 x
= change_address (x
, VOIDmode
, temp
);
3374 /* If we are in reload, see if either operand is a MEM whose address
3375 is scheduled for replacement. */
3376 if (reload_in_progress
&& GET_CODE (x
) == MEM
3377 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3378 x
= replace_equiv_address_nv (x
, inner
);
3379 if (reload_in_progress
&& GET_CODE (y
) == MEM
3380 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3381 y
= replace_equiv_address_nv (y
, inner
);
3387 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3390 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3391 rtx ypart
= operand_subword (y
, i
, 1, mode
);
3393 /* If we can't get a part of Y, put Y into memory if it is a
3394 constant. Otherwise, force it into a register. If we still
3395 can't get a part of Y, abort. */
3396 if (ypart
== 0 && CONSTANT_P (y
))
3398 y
= force_const_mem (mode
, y
);
3399 ypart
= operand_subword (y
, i
, 1, mode
);
3401 else if (ypart
== 0)
3402 ypart
= operand_subword_force (y
, i
, mode
);
3404 if (xpart
== 0 || ypart
== 0)
3407 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3409 last_insn
= emit_move_insn (xpart
, ypart
);
3415 /* Show the output dies here. This is necessary for SUBREGs
3416 of pseudos since we cannot track their lifetimes correctly;
3417 hard regs shouldn't appear here except as return values.
3418 We never want to emit such a clobber after reload. */
3420 && ! (reload_in_progress
|| reload_completed
)
3421 && need_clobber
!= 0)
3422 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3432 /* If Y is representable exactly in a narrower mode, and the target can
3433 perform the extension directly from constant or memory, then emit the
3434 move as an extension. */
3437 compress_float_constant (x
, y
)
3440 enum machine_mode dstmode
= GET_MODE (x
);
3441 enum machine_mode orig_srcmode
= GET_MODE (y
);
3442 enum machine_mode srcmode
;
3445 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3447 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3448 srcmode
!= orig_srcmode
;
3449 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3452 rtx trunc_y
, last_insn
;
3454 /* Skip if the target can't extend this way. */
3455 ic
= can_extend_p (dstmode
, srcmode
, 0);
3456 if (ic
== CODE_FOR_nothing
)
3459 /* Skip if the narrowed value isn't exact. */
3460 if (! exact_real_truncate (srcmode
, &r
))
3463 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3465 if (LEGITIMATE_CONSTANT_P (trunc_y
))
3467 /* Skip if the target needs extra instructions to perform
3469 if (! (*insn_data
[ic
].operand
[1].predicate
) (trunc_y
, srcmode
))
3472 else if (float_extend_from_mem
[dstmode
][srcmode
])
3473 trunc_y
= validize_mem (force_const_mem (srcmode
, trunc_y
));
3477 emit_unop_insn (ic
, x
, trunc_y
, UNKNOWN
);
3478 last_insn
= get_last_insn ();
3480 if (GET_CODE (x
) == REG
)
3481 REG_NOTES (last_insn
)
3482 = gen_rtx_EXPR_LIST (REG_EQUAL
, y
, REG_NOTES (last_insn
));
3490 /* Pushing data onto the stack. */
3492 /* Push a block of length SIZE (perhaps variable)
3493 and return an rtx to address the beginning of the block.
3494 Note that it is not possible for the value returned to be a QUEUED.
3495 The value may be virtual_outgoing_args_rtx.
3497 EXTRA is the number of bytes of padding to push in addition to SIZE.
3498 BELOW nonzero means this padding comes at low addresses;
3499 otherwise, the padding comes at high addresses. */
3502 push_block (size
, extra
, below
)
3508 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3509 if (CONSTANT_P (size
))
3510 anti_adjust_stack (plus_constant (size
, extra
));
3511 else if (GET_CODE (size
) == REG
&& extra
== 0)
3512 anti_adjust_stack (size
);
3515 temp
= copy_to_mode_reg (Pmode
, size
);
3517 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3518 temp
, 0, OPTAB_LIB_WIDEN
);
3519 anti_adjust_stack (temp
);
3522 #ifndef STACK_GROWS_DOWNWARD
3528 temp
= virtual_outgoing_args_rtx
;
3529 if (extra
!= 0 && below
)
3530 temp
= plus_constant (temp
, extra
);
3534 if (GET_CODE (size
) == CONST_INT
)
3535 temp
= plus_constant (virtual_outgoing_args_rtx
,
3536 -INTVAL (size
) - (below
? 0 : extra
));
3537 else if (extra
!= 0 && !below
)
3538 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3539 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3541 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3542 negate_rtx (Pmode
, size
));
3545 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3548 #ifdef PUSH_ROUNDING
3550 /* Emit single push insn. */
3553 emit_single_push_insn (mode
, x
, type
)
3555 enum machine_mode mode
;
3559 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3561 enum insn_code icode
;
3562 insn_operand_predicate_fn pred
;
3564 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3565 /* If there is push pattern, use it. Otherwise try old way of throwing
3566 MEM representing push operation to move expander. */
3567 icode
= push_optab
->handlers
[(int) mode
].insn_code
;
3568 if (icode
!= CODE_FOR_nothing
)
3570 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3571 && !((*pred
) (x
, mode
))))
3572 x
= force_reg (mode
, x
);
3573 emit_insn (GEN_FCN (icode
) (x
));
3576 if (GET_MODE_SIZE (mode
) == rounded_size
)
3577 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3580 #ifdef STACK_GROWS_DOWNWARD
3581 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3582 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3584 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3585 GEN_INT (rounded_size
));
3587 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3590 dest
= gen_rtx_MEM (mode
, dest_addr
);
3594 set_mem_attributes (dest
, type
, 1);
3596 if (flag_optimize_sibling_calls
)
3597 /* Function incoming arguments may overlap with sibling call
3598 outgoing arguments and we cannot allow reordering of reads
3599 from function arguments with stores to outgoing arguments
3600 of sibling calls. */
3601 set_mem_alias_set (dest
, 0);
3603 emit_move_insn (dest
, x
);
3607 /* Generate code to push X onto the stack, assuming it has mode MODE and
3609 MODE is redundant except when X is a CONST_INT (since they don't
3611 SIZE is an rtx for the size of data to be copied (in bytes),
3612 needed only if X is BLKmode.
3614 ALIGN (in bits) is maximum alignment we can assume.
3616 If PARTIAL and REG are both nonzero, then copy that many of the first
3617 words of X into registers starting with REG, and push the rest of X.
3618 The amount of space pushed is decreased by PARTIAL words,
3619 rounded *down* to a multiple of PARM_BOUNDARY.
3620 REG must be a hard register in this case.
3621 If REG is zero but PARTIAL is not, take any all others actions for an
3622 argument partially in registers, but do not actually load any
3625 EXTRA is the amount in bytes of extra space to leave next to this arg.
3626 This is ignored if an argument block has already been allocated.
3628 On a machine that lacks real push insns, ARGS_ADDR is the address of
3629 the bottom of the argument block for this call. We use indexing off there
3630 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3631 argument block has not been preallocated.
3633 ARGS_SO_FAR is the size of args previously pushed for this call.
3635 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3636 for arguments passed in registers. If nonzero, it will be the number
3637 of bytes required. */
3640 emit_push_insn (x
, mode
, type
, size
, align
, partial
, reg
, extra
,
3641 args_addr
, args_so_far
, reg_parm_stack_space
,
3644 enum machine_mode mode
;
3653 int reg_parm_stack_space
;
3657 enum direction stack_direction
3658 #ifdef STACK_GROWS_DOWNWARD
3664 /* Decide where to pad the argument: `downward' for below,
3665 `upward' for above, or `none' for don't pad it.
3666 Default is below for small data on big-endian machines; else above. */
3667 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3669 /* Invert direction if stack is post-decrement.
3671 if (STACK_PUSH_CODE
== POST_DEC
)
3672 if (where_pad
!= none
)
3673 where_pad
= (where_pad
== downward
? upward
: downward
);
3675 xinner
= x
= protect_from_queue (x
, 0);
3677 if (mode
== BLKmode
)
3679 /* Copy a block into the stack, entirely or partially. */
3682 int used
= partial
* UNITS_PER_WORD
;
3683 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3691 /* USED is now the # of bytes we need not copy to the stack
3692 because registers will take care of them. */
3695 xinner
= adjust_address (xinner
, BLKmode
, used
);
3697 /* If the partial register-part of the arg counts in its stack size,
3698 skip the part of stack space corresponding to the registers.
3699 Otherwise, start copying to the beginning of the stack space,
3700 by setting SKIP to 0. */
3701 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3703 #ifdef PUSH_ROUNDING
3704 /* Do it with several push insns if that doesn't take lots of insns
3705 and if there is no difficulty with push insns that skip bytes
3706 on the stack for alignment purposes. */
3709 && GET_CODE (size
) == CONST_INT
3711 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3712 /* Here we avoid the case of a structure whose weak alignment
3713 forces many pushes of a small amount of data,
3714 and such small pushes do rounding that causes trouble. */
3715 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3716 || align
>= BIGGEST_ALIGNMENT
3717 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3718 == (align
/ BITS_PER_UNIT
)))
3719 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3721 /* Push padding now if padding above and stack grows down,
3722 or if padding below and stack grows up.
3723 But if space already allocated, this has already been done. */
3724 if (extra
&& args_addr
== 0
3725 && where_pad
!= none
&& where_pad
!= stack_direction
)
3726 anti_adjust_stack (GEN_INT (extra
));
3728 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
);
3731 #endif /* PUSH_ROUNDING */
3735 /* Otherwise make space on the stack and copy the data
3736 to the address of that space. */
3738 /* Deduct words put into registers from the size we must copy. */
3741 if (GET_CODE (size
) == CONST_INT
)
3742 size
= GEN_INT (INTVAL (size
) - used
);
3744 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3745 GEN_INT (used
), NULL_RTX
, 0,
3749 /* Get the address of the stack space.
3750 In this case, we do not deal with EXTRA separately.
3751 A single stack adjust will do. */
3754 temp
= push_block (size
, extra
, where_pad
== downward
);
3757 else if (GET_CODE (args_so_far
) == CONST_INT
)
3758 temp
= memory_address (BLKmode
,
3759 plus_constant (args_addr
,
3760 skip
+ INTVAL (args_so_far
)));
3762 temp
= memory_address (BLKmode
,
3763 plus_constant (gen_rtx_PLUS (Pmode
,
3768 if (!ACCUMULATE_OUTGOING_ARGS
)
3770 /* If the source is referenced relative to the stack pointer,
3771 copy it to another register to stabilize it. We do not need
3772 to do this if we know that we won't be changing sp. */
3774 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3775 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3776 temp
= copy_to_reg (temp
);
3779 target
= gen_rtx_MEM (BLKmode
, temp
);
3783 set_mem_attributes (target
, type
, 1);
3784 /* Function incoming arguments may overlap with sibling call
3785 outgoing arguments and we cannot allow reordering of reads
3786 from function arguments with stores to outgoing arguments
3787 of sibling calls. */
3788 set_mem_alias_set (target
, 0);
3791 /* ALIGN may well be better aligned than TYPE, e.g. due to
3792 PARM_BOUNDARY. Assume the caller isn't lying. */
3793 set_mem_align (target
, align
);
3795 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
3798 else if (partial
> 0)
3800 /* Scalar partly in registers. */
3802 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3805 /* # words of start of argument
3806 that we must make space for but need not store. */
3807 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
3808 int args_offset
= INTVAL (args_so_far
);
3811 /* Push padding now if padding above and stack grows down,
3812 or if padding below and stack grows up.
3813 But if space already allocated, this has already been done. */
3814 if (extra
&& args_addr
== 0
3815 && where_pad
!= none
&& where_pad
!= stack_direction
)
3816 anti_adjust_stack (GEN_INT (extra
));
3818 /* If we make space by pushing it, we might as well push
3819 the real data. Otherwise, we can leave OFFSET nonzero
3820 and leave the space uninitialized. */
3824 /* Now NOT_STACK gets the number of words that we don't need to
3825 allocate on the stack. */
3826 not_stack
= partial
- offset
;
3828 /* If the partial register-part of the arg counts in its stack size,
3829 skip the part of stack space corresponding to the registers.
3830 Otherwise, start copying to the beginning of the stack space,
3831 by setting SKIP to 0. */
3832 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3834 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3835 x
= validize_mem (force_const_mem (mode
, x
));
3837 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3838 SUBREGs of such registers are not allowed. */
3839 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
3840 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3841 x
= copy_to_reg (x
);
3843 /* Loop over all the words allocated on the stack for this arg. */
3844 /* We can do it by words, because any scalar bigger than a word
3845 has a size a multiple of a word. */
3846 #ifndef PUSH_ARGS_REVERSED
3847 for (i
= not_stack
; i
< size
; i
++)
3849 for (i
= size
- 1; i
>= not_stack
; i
--)
3851 if (i
>= not_stack
+ offset
)
3852 emit_push_insn (operand_subword_force (x
, i
, mode
),
3853 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3855 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3857 reg_parm_stack_space
, alignment_pad
);
3862 rtx target
= NULL_RTX
;
3865 /* Push padding now if padding above and stack grows down,
3866 or if padding below and stack grows up.
3867 But if space already allocated, this has already been done. */
3868 if (extra
&& args_addr
== 0
3869 && where_pad
!= none
&& where_pad
!= stack_direction
)
3870 anti_adjust_stack (GEN_INT (extra
));
3872 #ifdef PUSH_ROUNDING
3873 if (args_addr
== 0 && PUSH_ARGS
)
3874 emit_single_push_insn (mode
, x
, type
);
3878 if (GET_CODE (args_so_far
) == CONST_INT
)
3880 = memory_address (mode
,
3881 plus_constant (args_addr
,
3882 INTVAL (args_so_far
)));
3884 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3887 dest
= gen_rtx_MEM (mode
, addr
);
3890 set_mem_attributes (dest
, type
, 1);
3891 /* Function incoming arguments may overlap with sibling call
3892 outgoing arguments and we cannot allow reordering of reads
3893 from function arguments with stores to outgoing arguments
3894 of sibling calls. */
3895 set_mem_alias_set (dest
, 0);
3898 emit_move_insn (dest
, x
);
3902 /* If part should go in registers, copy that part
3903 into the appropriate registers. Do this now, at the end,
3904 since mem-to-mem copies above may do function calls. */
3905 if (partial
> 0 && reg
!= 0)
3907 /* Handle calls that pass values in multiple non-contiguous locations.
3908 The Irix 6 ABI has examples of this. */
3909 if (GET_CODE (reg
) == PARALLEL
)
3910 emit_group_load (reg
, x
, -1); /* ??? size? */
3912 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
3915 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3916 anti_adjust_stack (GEN_INT (extra
));
3918 if (alignment_pad
&& args_addr
== 0)
3919 anti_adjust_stack (alignment_pad
);
3922 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3930 /* Only registers can be subtargets. */
3931 || GET_CODE (x
) != REG
3932 /* If the register is readonly, it can't be set more than once. */
3933 || RTX_UNCHANGING_P (x
)
3934 /* Don't use hard regs to avoid extending their life. */
3935 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3936 /* Avoid subtargets inside loops,
3937 since they hide some invariant expressions. */
3938 || preserve_subexpressions_p ())
3942 /* Expand an assignment that stores the value of FROM into TO.
3943 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3944 (This may contain a QUEUED rtx;
3945 if the value is constant, this rtx is a constant.)
3946 Otherwise, the returned value is NULL_RTX.
3948 SUGGEST_REG is no longer actually used.
3949 It used to mean, copy the value through a register
3950 and return that register, if that is possible.
3951 We now use WANT_VALUE to decide whether to do this. */
3954 expand_assignment (to
, from
, want_value
, suggest_reg
)
3957 int suggest_reg ATTRIBUTE_UNUSED
;
3962 /* Don't crash if the lhs of the assignment was erroneous. */
3964 if (TREE_CODE (to
) == ERROR_MARK
)
3966 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3967 return want_value
? result
: NULL_RTX
;
3970 /* Assignment of a structure component needs special treatment
3971 if the structure component's rtx is not simply a MEM.
3972 Assignment of an array element at a constant index, and assignment of
3973 an array element in an unaligned packed structure field, has the same
3976 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
3977 || TREE_CODE (to
) == ARRAY_REF
|| TREE_CODE (to
) == ARRAY_RANGE_REF
)
3979 enum machine_mode mode1
;
3980 HOST_WIDE_INT bitsize
, bitpos
;
3988 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
3989 &unsignedp
, &volatilep
);
3991 /* If we are going to use store_bit_field and extract_bit_field,
3992 make sure to_rtx will be safe for multiple use. */
3994 if (mode1
== VOIDmode
&& want_value
)
3995 tem
= stabilize_reference (tem
);
3997 orig_to_rtx
= to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, 0);
4001 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
4003 if (GET_CODE (to_rtx
) != MEM
)
4006 #ifdef POINTERS_EXTEND_UNSIGNED
4007 if (GET_MODE (offset_rtx
) != Pmode
)
4008 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
4010 if (GET_MODE (offset_rtx
) != ptr_mode
)
4011 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4014 /* A constant address in TO_RTX can have VOIDmode, we must not try
4015 to call force_reg for that case. Avoid that case. */
4016 if (GET_CODE (to_rtx
) == MEM
4017 && GET_MODE (to_rtx
) == BLKmode
4018 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
4020 && (bitpos
% bitsize
) == 0
4021 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
4022 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
4024 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
4028 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4029 highest_pow2_factor_for_type (TREE_TYPE (to
),
4033 if (GET_CODE (to_rtx
) == MEM
)
4035 /* If the field is at offset zero, we could have been given the
4036 DECL_RTX of the parent struct. Don't munge it. */
4037 to_rtx
= shallow_copy_rtx (to_rtx
);
4039 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
4042 /* Deal with volatile and readonly fields. The former is only done
4043 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4044 if (volatilep
&& GET_CODE (to_rtx
) == MEM
)
4046 if (to_rtx
== orig_to_rtx
)
4047 to_rtx
= copy_rtx (to_rtx
);
4048 MEM_VOLATILE_P (to_rtx
) = 1;
4051 if (TREE_CODE (to
) == COMPONENT_REF
4052 && TREE_READONLY (TREE_OPERAND (to
, 1)))
4054 if (to_rtx
== orig_to_rtx
)
4055 to_rtx
= copy_rtx (to_rtx
);
4056 RTX_UNCHANGING_P (to_rtx
) = 1;
4059 if (GET_CODE (to_rtx
) == MEM
&& ! can_address_p (to
))
4061 if (to_rtx
== orig_to_rtx
)
4062 to_rtx
= copy_rtx (to_rtx
);
4063 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4066 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
4068 /* Spurious cast for HPUX compiler. */
4069 ? ((enum machine_mode
)
4070 TYPE_MODE (TREE_TYPE (to
)))
4072 unsignedp
, TREE_TYPE (tem
), get_alias_set (to
));
4074 preserve_temp_slots (result
);
4078 /* If the value is meaningful, convert RESULT to the proper mode.
4079 Otherwise, return nothing. */
4080 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
4081 TYPE_MODE (TREE_TYPE (from
)),
4083 TREE_UNSIGNED (TREE_TYPE (to
)))
4087 /* If the rhs is a function call and its value is not an aggregate,
4088 call the function before we start to compute the lhs.
4089 This is needed for correct code for cases such as
4090 val = setjmp (buf) on machines where reference to val
4091 requires loading up part of an address in a separate insn.
4093 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4094 since it might be a promoted variable where the zero- or sign- extension
4095 needs to be done. Handling this in the normal way is safe because no
4096 computation is done before the call. */
4097 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
)
4098 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
4099 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
4100 && GET_CODE (DECL_RTL (to
)) == REG
))
4105 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
4107 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4109 /* Handle calls that return values in multiple non-contiguous locations.
4110 The Irix 6 ABI has examples of this. */
4111 if (GET_CODE (to_rtx
) == PARALLEL
)
4112 emit_group_load (to_rtx
, value
, int_size_in_bytes (TREE_TYPE (from
)));
4113 else if (GET_MODE (to_rtx
) == BLKmode
)
4114 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
4117 #ifdef POINTERS_EXTEND_UNSIGNED
4118 if (POINTER_TYPE_P (TREE_TYPE (to
))
4119 && GET_MODE (to_rtx
) != GET_MODE (value
))
4120 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
4122 emit_move_insn (to_rtx
, value
);
4124 preserve_temp_slots (to_rtx
);
4127 return want_value
? to_rtx
: NULL_RTX
;
4130 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4131 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4134 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4136 /* Don't move directly into a return register. */
4137 if (TREE_CODE (to
) == RESULT_DECL
4138 && (GET_CODE (to_rtx
) == REG
|| GET_CODE (to_rtx
) == PARALLEL
))
4143 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
4145 if (GET_CODE (to_rtx
) == PARALLEL
)
4146 emit_group_load (to_rtx
, temp
, int_size_in_bytes (TREE_TYPE (from
)));
4148 emit_move_insn (to_rtx
, temp
);
4150 preserve_temp_slots (to_rtx
);
4153 return want_value
? to_rtx
: NULL_RTX
;
4156 /* In case we are returning the contents of an object which overlaps
4157 the place the value is being stored, use a safe function when copying
4158 a value through a pointer into a structure value return block. */
4159 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
4160 && current_function_returns_struct
4161 && !current_function_returns_pcc_struct
)
4166 size
= expr_size (from
);
4167 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
4169 if (TARGET_MEM_FUNCTIONS
)
4170 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
4171 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
4172 XEXP (from_rtx
, 0), Pmode
,
4173 convert_to_mode (TYPE_MODE (sizetype
),
4174 size
, TREE_UNSIGNED (sizetype
)),
4175 TYPE_MODE (sizetype
));
4177 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
4178 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
4179 XEXP (to_rtx
, 0), Pmode
,
4180 convert_to_mode (TYPE_MODE (integer_type_node
),
4182 TREE_UNSIGNED (integer_type_node
)),
4183 TYPE_MODE (integer_type_node
));
4185 preserve_temp_slots (to_rtx
);
4188 return want_value
? to_rtx
: NULL_RTX
;
4191 /* Compute FROM and store the value in the rtx we got. */
4194 result
= store_expr (from
, to_rtx
, want_value
);
4195 preserve_temp_slots (result
);
4198 return want_value
? result
: NULL_RTX
;
4201 /* Generate code for computing expression EXP,
4202 and storing the value into TARGET.
4203 TARGET may contain a QUEUED rtx.
4205 If WANT_VALUE is nonzero, return a copy of the value
4206 not in TARGET, so that we can be sure to use the proper
4207 value in a containing expression even if TARGET has something
4208 else stored in it. If possible, we copy the value through a pseudo
4209 and return that pseudo. Or, if the value is constant, we try to
4210 return the constant. In some cases, we return a pseudo
4211 copied *from* TARGET.
4213 If the mode is BLKmode then we may return TARGET itself.
4214 It turns out that in BLKmode it doesn't cause a problem.
4215 because C has no operators that could combine two different
4216 assignments into the same BLKmode object with different values
4217 with no sequence point. Will other languages need this to
4220 If WANT_VALUE is 0, we return NULL, to make sure
4221 to catch quickly any cases where the caller uses the value
4222 and fails to set WANT_VALUE. */
4225 store_expr (exp
, target
, want_value
)
4231 int dont_return_target
= 0;
4232 int dont_store_target
= 0;
4234 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4236 /* Perform first part of compound expression, then assign from second
4238 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
4240 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
4242 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4244 /* For conditional expression, get safe form of the target. Then
4245 test the condition, doing the appropriate assignment on either
4246 side. This avoids the creation of unnecessary temporaries.
4247 For non-BLKmode, it is more efficient not to do this. */
4249 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4252 target
= protect_from_queue (target
, 1);
4254 do_pending_stack_adjust ();
4256 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
4257 start_cleanup_deferral ();
4258 store_expr (TREE_OPERAND (exp
, 1), target
, 0);
4259 end_cleanup_deferral ();
4261 emit_jump_insn (gen_jump (lab2
));
4264 start_cleanup_deferral ();
4265 store_expr (TREE_OPERAND (exp
, 2), target
, 0);
4266 end_cleanup_deferral ();
4271 return want_value
? target
: NULL_RTX
;
4273 else if (queued_subexp_p (target
))
4274 /* If target contains a postincrement, let's not risk
4275 using it as the place to generate the rhs. */
4277 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
4279 /* Expand EXP into a new pseudo. */
4280 temp
= gen_reg_rtx (GET_MODE (target
));
4281 temp
= expand_expr (exp
, temp
, GET_MODE (target
), 0);
4284 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
), 0);
4286 /* If target is volatile, ANSI requires accessing the value
4287 *from* the target, if it is accessed. So make that happen.
4288 In no case return the target itself. */
4289 if (! MEM_VOLATILE_P (target
) && want_value
)
4290 dont_return_target
= 1;
4292 else if (want_value
&& GET_CODE (target
) == MEM
&& ! MEM_VOLATILE_P (target
)
4293 && GET_MODE (target
) != BLKmode
)
4294 /* If target is in memory and caller wants value in a register instead,
4295 arrange that. Pass TARGET as target for expand_expr so that,
4296 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4297 We know expand_expr will not use the target in that case.
4298 Don't do this if TARGET is volatile because we are supposed
4299 to write it and then read it. */
4301 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
4302 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
4304 /* If TEMP is already in the desired TARGET, only copy it from
4305 memory and don't store it there again. */
4307 || (rtx_equal_p (temp
, target
)
4308 && ! side_effects_p (temp
) && ! side_effects_p (target
)))
4309 dont_store_target
= 1;
4310 temp
= copy_to_reg (temp
);
4312 dont_return_target
= 1;
4314 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4315 /* If this is an scalar in a register that is stored in a wider mode
4316 than the declared mode, compute the result into its declared mode
4317 and then convert to the wider mode. Our value is the computed
4320 rtx inner_target
= 0;
4322 /* If we don't want a value, we can do the conversion inside EXP,
4323 which will often result in some optimizations. Do the conversion
4324 in two steps: first change the signedness, if needed, then
4325 the extend. But don't do this if the type of EXP is a subtype
4326 of something else since then the conversion might involve
4327 more than just converting modes. */
4328 if (! want_value
&& INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4329 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
4331 if (TREE_UNSIGNED (TREE_TYPE (exp
))
4332 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4334 ((*lang_hooks
.types
.signed_or_unsigned_type
)
4335 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)), exp
);
4337 exp
= convert ((*lang_hooks
.types
.type_for_mode
)
4338 (GET_MODE (SUBREG_REG (target
)),
4339 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4342 inner_target
= SUBREG_REG (target
);
4345 temp
= expand_expr (exp
, inner_target
, VOIDmode
, 0);
4347 /* If TEMP is a volatile MEM and we want a result value, make
4348 the access now so it gets done only once. Likewise if
4349 it contains TARGET. */
4350 if (GET_CODE (temp
) == MEM
&& want_value
4351 && (MEM_VOLATILE_P (temp
)
4352 || reg_mentioned_p (SUBREG_REG (target
), XEXP (temp
, 0))))
4353 temp
= copy_to_reg (temp
);
4355 /* If TEMP is a VOIDmode constant, use convert_modes to make
4356 sure that we properly convert it. */
4357 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4359 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4360 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4361 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4362 GET_MODE (target
), temp
,
4363 SUBREG_PROMOTED_UNSIGNED_P (target
));
4366 convert_move (SUBREG_REG (target
), temp
,
4367 SUBREG_PROMOTED_UNSIGNED_P (target
));
4369 /* If we promoted a constant, change the mode back down to match
4370 target. Otherwise, the caller might get confused by a result whose
4371 mode is larger than expected. */
4373 if (want_value
&& GET_MODE (temp
) != GET_MODE (target
))
4375 if (GET_MODE (temp
) != VOIDmode
)
4377 temp
= gen_lowpart_SUBREG (GET_MODE (target
), temp
);
4378 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4379 SUBREG_PROMOTED_UNSIGNED_SET (temp
,
4380 SUBREG_PROMOTED_UNSIGNED_P (target
));
4383 temp
= convert_modes (GET_MODE (target
),
4384 GET_MODE (SUBREG_REG (target
)),
4385 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4388 return want_value
? temp
: NULL_RTX
;
4392 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
4393 /* Return TARGET if it's a specified hardware register.
4394 If TARGET is a volatile mem ref, either return TARGET
4395 or return a reg copied *from* TARGET; ANSI requires this.
4397 Otherwise, if TEMP is not TARGET, return TEMP
4398 if it is constant (for efficiency),
4399 or if we really want the correct value. */
4400 if (!(target
&& GET_CODE (target
) == REG
4401 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4402 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
4403 && ! rtx_equal_p (temp
, target
)
4404 && (CONSTANT_P (temp
) || want_value
))
4405 dont_return_target
= 1;
4408 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4409 the same as that of TARGET, adjust the constant. This is needed, for
4410 example, in case it is a CONST_DOUBLE and we want only a word-sized
4412 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4413 && TREE_CODE (exp
) != ERROR_MARK
4414 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4415 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4416 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
4418 /* If value was not generated in the target, store it there.
4419 Convert the value to TARGET's type first if necessary.
4420 If TEMP and TARGET compare equal according to rtx_equal_p, but
4421 one or both of them are volatile memory refs, we have to distinguish
4423 - expand_expr has used TARGET. In this case, we must not generate
4424 another copy. This can be detected by TARGET being equal according
4426 - expand_expr has not used TARGET - that means that the source just
4427 happens to have the same RTX form. Since temp will have been created
4428 by expand_expr, it will compare unequal according to == .
4429 We must generate a copy in this case, to reach the correct number
4430 of volatile memory references. */
4432 if ((! rtx_equal_p (temp
, target
)
4433 || (temp
!= target
&& (side_effects_p (temp
)
4434 || side_effects_p (target
))))
4435 && TREE_CODE (exp
) != ERROR_MARK
4436 && ! dont_store_target
4437 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4438 but TARGET is not valid memory reference, TEMP will differ
4439 from TARGET although it is really the same location. */
4440 && (TREE_CODE_CLASS (TREE_CODE (exp
)) != 'd'
4441 || target
!= DECL_RTL_IF_SET (exp
))
4442 /* If there's nothing to copy, don't bother. Don't call expr_size
4443 unless necessary, because some front-ends (C++) expr_size-hook
4444 aborts on objects that are not supposed to be bit-copied or
4446 && expr_size (exp
) != const0_rtx
)
4448 target
= protect_from_queue (target
, 1);
4449 if (GET_MODE (temp
) != GET_MODE (target
)
4450 && GET_MODE (temp
) != VOIDmode
)
4452 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4453 if (dont_return_target
)
4455 /* In this case, we will return TEMP,
4456 so make sure it has the proper mode.
4457 But don't forget to store the value into TARGET. */
4458 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4459 emit_move_insn (target
, temp
);
4462 convert_move (target
, temp
, unsignedp
);
4465 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4467 /* Handle copying a string constant into an array. The string
4468 constant may be shorter than the array. So copy just the string's
4469 actual length, and clear the rest. First get the size of the data
4470 type of the string, which is actually the size of the target. */
4471 rtx size
= expr_size (exp
);
4473 if (GET_CODE (size
) == CONST_INT
4474 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4475 emit_block_move (target
, temp
, size
, BLOCK_OP_NORMAL
);
4478 /* Compute the size of the data to copy from the string. */
4480 = size_binop (MIN_EXPR
,
4481 make_tree (sizetype
, size
),
4482 size_int (TREE_STRING_LENGTH (exp
)));
4483 rtx copy_size_rtx
= expand_expr (copy_size
, NULL_RTX
,
4487 /* Copy that much. */
4488 copy_size_rtx
= convert_to_mode (ptr_mode
, copy_size_rtx
, 0);
4489 emit_block_move (target
, temp
, copy_size_rtx
, BLOCK_OP_NORMAL
);
4491 /* Figure out how much is left in TARGET that we have to clear.
4492 Do all calculations in ptr_mode. */
4493 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4495 size
= plus_constant (size
, -INTVAL (copy_size_rtx
));
4496 target
= adjust_address (target
, BLKmode
,
4497 INTVAL (copy_size_rtx
));
4501 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
4502 copy_size_rtx
, NULL_RTX
, 0,
4505 #ifdef POINTERS_EXTEND_UNSIGNED
4506 if (GET_MODE (copy_size_rtx
) != Pmode
)
4507 copy_size_rtx
= convert_memory_address (Pmode
,
4511 target
= offset_address (target
, copy_size_rtx
,
4512 highest_pow2_factor (copy_size
));
4513 label
= gen_label_rtx ();
4514 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4515 GET_MODE (size
), 0, label
);
4518 if (size
!= const0_rtx
)
4519 clear_storage (target
, size
);
4525 /* Handle calls that return values in multiple non-contiguous locations.
4526 The Irix 6 ABI has examples of this. */
4527 else if (GET_CODE (target
) == PARALLEL
)
4528 emit_group_load (target
, temp
, int_size_in_bytes (TREE_TYPE (exp
)));
4529 else if (GET_MODE (temp
) == BLKmode
)
4530 emit_block_move (target
, temp
, expr_size (exp
), BLOCK_OP_NORMAL
);
4532 emit_move_insn (target
, temp
);
4535 /* If we don't want a value, return NULL_RTX. */
4539 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4540 ??? The latter test doesn't seem to make sense. */
4541 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
4544 /* Return TARGET itself if it is a hard register. */
4545 else if (want_value
&& GET_MODE (target
) != BLKmode
4546 && ! (GET_CODE (target
) == REG
4547 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4548 return copy_to_reg (target
);
4554 /* Return 1 if EXP just contains zeros. */
4562 switch (TREE_CODE (exp
))
4566 case NON_LVALUE_EXPR
:
4567 case VIEW_CONVERT_EXPR
:
4568 return is_zeros_p (TREE_OPERAND (exp
, 0));
4571 return integer_zerop (exp
);
4575 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
4578 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp
), dconst0
);
4581 for (elt
= TREE_VECTOR_CST_ELTS (exp
); elt
;
4582 elt
= TREE_CHAIN (elt
))
4583 if (!is_zeros_p (TREE_VALUE (elt
)))
4589 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4590 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
4591 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4592 if (! is_zeros_p (TREE_VALUE (elt
)))
4602 /* Return 1 if EXP contains mostly (3/4) zeros. */
4605 mostly_zeros_p (exp
)
4608 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4610 int elts
= 0, zeros
= 0;
4611 tree elt
= CONSTRUCTOR_ELTS (exp
);
4612 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4614 /* If there are no ranges of true bits, it is all zero. */
4615 return elt
== NULL_TREE
;
4617 for (; elt
; elt
= TREE_CHAIN (elt
))
4619 /* We do not handle the case where the index is a RANGE_EXPR,
4620 so the statistic will be somewhat inaccurate.
4621 We do make a more accurate count in store_constructor itself,
4622 so since this function is only used for nested array elements,
4623 this should be close enough. */
4624 if (mostly_zeros_p (TREE_VALUE (elt
)))
4629 return 4 * zeros
>= 3 * elts
;
4632 return is_zeros_p (exp
);
4635 /* Helper function for store_constructor.
4636 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4637 TYPE is the type of the CONSTRUCTOR, not the element type.
4638 CLEARED is as for store_constructor.
4639 ALIAS_SET is the alias set to use for any stores.
4641 This provides a recursive shortcut back to store_constructor when it isn't
4642 necessary to go through store_field. This is so that we can pass through
4643 the cleared field to let store_constructor know that we may not have to
4644 clear a substructure if the outer structure has already been cleared. */
4647 store_constructor_field (target
, bitsize
, bitpos
, mode
, exp
, type
, cleared
,
4650 unsigned HOST_WIDE_INT bitsize
;
4651 HOST_WIDE_INT bitpos
;
4652 enum machine_mode mode
;
4657 if (TREE_CODE (exp
) == CONSTRUCTOR
4658 && bitpos
% BITS_PER_UNIT
== 0
4659 /* If we have a nonzero bitpos for a register target, then we just
4660 let store_field do the bitfield handling. This is unlikely to
4661 generate unnecessary clear instructions anyways. */
4662 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
4664 if (GET_CODE (target
) == MEM
)
4666 = adjust_address (target
,
4667 GET_MODE (target
) == BLKmode
4669 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4670 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
4673 /* Update the alias set, if required. */
4674 if (GET_CODE (target
) == MEM
&& ! MEM_KEEP_ALIAS_SET_P (target
)
4675 && MEM_ALIAS_SET (target
) != 0)
4677 target
= copy_rtx (target
);
4678 set_mem_alias_set (target
, alias_set
);
4681 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
4684 store_field (target
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
4688 /* Store the value of constructor EXP into the rtx TARGET.
4689 TARGET is either a REG or a MEM; we know it cannot conflict, since
4690 safe_from_p has been called.
4691 CLEARED is true if TARGET is known to have been zero'd.
4692 SIZE is the number of bytes of TARGET we are allowed to modify: this
4693 may not be the same as the size of EXP if we are assigning to a field
4694 which has been packed to exclude padding bits. */
4697 store_constructor (exp
, target
, cleared
, size
)
4703 tree type
= TREE_TYPE (exp
);
4704 #ifdef WORD_REGISTER_OPERATIONS
4705 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4708 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
4709 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4713 /* We either clear the aggregate or indicate the value is dead. */
4714 if ((TREE_CODE (type
) == UNION_TYPE
4715 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4717 && ! CONSTRUCTOR_ELTS (exp
))
4718 /* If the constructor is empty, clear the union. */
4720 clear_storage (target
, expr_size (exp
));
4724 /* If we are building a static constructor into a register,
4725 set the initial value as zero so we can fold the value into
4726 a constant. But if more than one register is involved,
4727 this probably loses. */
4728 else if (! cleared
&& GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
4729 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4731 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4735 /* If the constructor has fewer fields than the structure
4736 or if we are initializing the structure to mostly zeros,
4737 clear the whole structure first. Don't do this if TARGET is a
4738 register whose mode size isn't equal to SIZE since clear_storage
4739 can't handle this case. */
4740 else if (! cleared
&& size
> 0
4741 && ((list_length (CONSTRUCTOR_ELTS (exp
))
4742 != fields_length (type
))
4743 || mostly_zeros_p (exp
))
4744 && (GET_CODE (target
) != REG
4745 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
4748 clear_storage (target
, GEN_INT (size
));
4753 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4755 /* Store each element of the constructor into
4756 the corresponding field of TARGET. */
4758 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4760 tree field
= TREE_PURPOSE (elt
);
4761 tree value
= TREE_VALUE (elt
);
4762 enum machine_mode mode
;
4763 HOST_WIDE_INT bitsize
;
4764 HOST_WIDE_INT bitpos
= 0;
4767 rtx to_rtx
= target
;
4769 /* Just ignore missing fields.
4770 We cleared the whole structure, above,
4771 if any fields are missing. */
4775 if (cleared
&& is_zeros_p (value
))
4778 if (host_integerp (DECL_SIZE (field
), 1))
4779 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4783 unsignedp
= TREE_UNSIGNED (field
);
4784 mode
= DECL_MODE (field
);
4785 if (DECL_BIT_FIELD (field
))
4788 offset
= DECL_FIELD_OFFSET (field
);
4789 if (host_integerp (offset
, 0)
4790 && host_integerp (bit_position (field
), 0))
4792 bitpos
= int_bit_position (field
);
4796 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
4802 if (contains_placeholder_p (offset
))
4803 offset
= build (WITH_RECORD_EXPR
, sizetype
,
4804 offset
, make_tree (TREE_TYPE (exp
), target
));
4806 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4807 if (GET_CODE (to_rtx
) != MEM
)
4810 #ifdef POINTERS_EXTEND_UNSIGNED
4811 if (GET_MODE (offset_rtx
) != Pmode
)
4812 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
4814 if (GET_MODE (offset_rtx
) != ptr_mode
)
4815 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4818 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4819 highest_pow2_factor (offset
));
4822 if (TREE_READONLY (field
))
4824 if (GET_CODE (to_rtx
) == MEM
)
4825 to_rtx
= copy_rtx (to_rtx
);
4827 RTX_UNCHANGING_P (to_rtx
) = 1;
4830 #ifdef WORD_REGISTER_OPERATIONS
4831 /* If this initializes a field that is smaller than a word, at the
4832 start of a word, try to widen it to a full word.
4833 This special case allows us to output C++ member function
4834 initializations in a form that the optimizers can understand. */
4835 if (GET_CODE (target
) == REG
4836 && bitsize
< BITS_PER_WORD
4837 && bitpos
% BITS_PER_WORD
== 0
4838 && GET_MODE_CLASS (mode
) == MODE_INT
4839 && TREE_CODE (value
) == INTEGER_CST
4841 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
4843 tree type
= TREE_TYPE (value
);
4845 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4847 type
= (*lang_hooks
.types
.type_for_size
)
4848 (BITS_PER_WORD
, TREE_UNSIGNED (type
));
4849 value
= convert (type
, value
);
4852 if (BYTES_BIG_ENDIAN
)
4854 = fold (build (LSHIFT_EXPR
, type
, value
,
4855 build_int_2 (BITS_PER_WORD
- bitsize
, 0)));
4856 bitsize
= BITS_PER_WORD
;
4861 if (GET_CODE (to_rtx
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (to_rtx
)
4862 && DECL_NONADDRESSABLE_P (field
))
4864 to_rtx
= copy_rtx (to_rtx
);
4865 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4868 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
4869 value
, type
, cleared
,
4870 get_alias_set (TREE_TYPE (field
)));
4873 else if (TREE_CODE (type
) == ARRAY_TYPE
4874 || TREE_CODE (type
) == VECTOR_TYPE
)
4879 tree domain
= TYPE_DOMAIN (type
);
4880 tree elttype
= TREE_TYPE (type
);
4882 HOST_WIDE_INT minelt
= 0;
4883 HOST_WIDE_INT maxelt
= 0;
4885 /* Vectors are like arrays, but the domain is stored via an array
4887 if (TREE_CODE (type
) == VECTOR_TYPE
)
4889 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4890 the same field as TYPE_DOMAIN, we are not guaranteed that
4892 domain
= TYPE_DEBUG_REPRESENTATION_TYPE (type
);
4893 domain
= TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain
)));
4896 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
4897 && TYPE_MAX_VALUE (domain
)
4898 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
4899 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
4901 /* If we have constant bounds for the range of the type, get them. */
4904 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
4905 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
4908 /* If the constructor has fewer elements than the array,
4909 clear the whole array first. Similarly if this is
4910 static constructor of a non-BLKmode object. */
4911 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
4915 HOST_WIDE_INT count
= 0, zero_count
= 0;
4916 need_to_clear
= ! const_bounds_p
;
4918 /* This loop is a more accurate version of the loop in
4919 mostly_zeros_p (it handles RANGE_EXPR in an index).
4920 It is also needed to check for missing elements. */
4921 for (elt
= CONSTRUCTOR_ELTS (exp
);
4922 elt
!= NULL_TREE
&& ! need_to_clear
;
4923 elt
= TREE_CHAIN (elt
))
4925 tree index
= TREE_PURPOSE (elt
);
4926 HOST_WIDE_INT this_node_count
;
4928 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4930 tree lo_index
= TREE_OPERAND (index
, 0);
4931 tree hi_index
= TREE_OPERAND (index
, 1);
4933 if (! host_integerp (lo_index
, 1)
4934 || ! host_integerp (hi_index
, 1))
4940 this_node_count
= (tree_low_cst (hi_index
, 1)
4941 - tree_low_cst (lo_index
, 1) + 1);
4944 this_node_count
= 1;
4946 count
+= this_node_count
;
4947 if (mostly_zeros_p (TREE_VALUE (elt
)))
4948 zero_count
+= this_node_count
;
4951 /* Clear the entire array first if there are any missing elements,
4952 or if the incidence of zero elements is >= 75%. */
4954 && (count
< maxelt
- minelt
+ 1 || 4 * zero_count
>= 3 * count
))
4958 if (need_to_clear
&& size
> 0)
4963 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4965 clear_storage (target
, GEN_INT (size
));
4969 else if (REG_P (target
))
4970 /* Inform later passes that the old value is dead. */
4971 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4973 /* Store each element of the constructor into
4974 the corresponding element of TARGET, determined
4975 by counting the elements. */
4976 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
4978 elt
= TREE_CHAIN (elt
), i
++)
4980 enum machine_mode mode
;
4981 HOST_WIDE_INT bitsize
;
4982 HOST_WIDE_INT bitpos
;
4984 tree value
= TREE_VALUE (elt
);
4985 tree index
= TREE_PURPOSE (elt
);
4986 rtx xtarget
= target
;
4988 if (cleared
&& is_zeros_p (value
))
4991 unsignedp
= TREE_UNSIGNED (elttype
);
4992 mode
= TYPE_MODE (elttype
);
4993 if (mode
== BLKmode
)
4994 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
4995 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
4998 bitsize
= GET_MODE_BITSIZE (mode
);
5000 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
5002 tree lo_index
= TREE_OPERAND (index
, 0);
5003 tree hi_index
= TREE_OPERAND (index
, 1);
5004 rtx index_r
, pos_rtx
, hi_r
, loop_top
, loop_end
;
5005 struct nesting
*loop
;
5006 HOST_WIDE_INT lo
, hi
, count
;
5009 /* If the range is constant and "small", unroll the loop. */
5011 && host_integerp (lo_index
, 0)
5012 && host_integerp (hi_index
, 0)
5013 && (lo
= tree_low_cst (lo_index
, 0),
5014 hi
= tree_low_cst (hi_index
, 0),
5015 count
= hi
- lo
+ 1,
5016 (GET_CODE (target
) != MEM
5018 || (host_integerp (TYPE_SIZE (elttype
), 1)
5019 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
5022 lo
-= minelt
; hi
-= minelt
;
5023 for (; lo
<= hi
; lo
++)
5025 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
5027 if (GET_CODE (target
) == MEM
5028 && !MEM_KEEP_ALIAS_SET_P (target
)
5029 && TREE_CODE (type
) == ARRAY_TYPE
5030 && TYPE_NONALIASED_COMPONENT (type
))
5032 target
= copy_rtx (target
);
5033 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5036 store_constructor_field
5037 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
5038 get_alias_set (elttype
));
5043 hi_r
= expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
5044 loop_top
= gen_label_rtx ();
5045 loop_end
= gen_label_rtx ();
5047 unsignedp
= TREE_UNSIGNED (domain
);
5049 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
5052 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
5054 SET_DECL_RTL (index
, index_r
);
5055 if (TREE_CODE (value
) == SAVE_EXPR
5056 && SAVE_EXPR_RTL (value
) == 0)
5058 /* Make sure value gets expanded once before the
5060 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
5063 store_expr (lo_index
, index_r
, 0);
5064 loop
= expand_start_loop (0);
5066 /* Assign value to element index. */
5068 = convert (ssizetype
,
5069 fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5070 index
, TYPE_MIN_VALUE (domain
))));
5071 position
= size_binop (MULT_EXPR
, position
,
5073 TYPE_SIZE_UNIT (elttype
)));
5075 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
5076 xtarget
= offset_address (target
, pos_rtx
,
5077 highest_pow2_factor (position
));
5078 xtarget
= adjust_address (xtarget
, mode
, 0);
5079 if (TREE_CODE (value
) == CONSTRUCTOR
)
5080 store_constructor (value
, xtarget
, cleared
,
5081 bitsize
/ BITS_PER_UNIT
);
5083 store_expr (value
, xtarget
, 0);
5085 expand_exit_loop_if_false (loop
,
5086 build (LT_EXPR
, integer_type_node
,
5089 expand_increment (build (PREINCREMENT_EXPR
,
5091 index
, integer_one_node
), 0, 0);
5093 emit_label (loop_end
);
5096 else if ((index
!= 0 && ! host_integerp (index
, 0))
5097 || ! host_integerp (TYPE_SIZE (elttype
), 1))
5102 index
= ssize_int (1);
5105 index
= convert (ssizetype
,
5106 fold (build (MINUS_EXPR
, index
,
5107 TYPE_MIN_VALUE (domain
))));
5109 position
= size_binop (MULT_EXPR
, index
,
5111 TYPE_SIZE_UNIT (elttype
)));
5112 xtarget
= offset_address (target
,
5113 expand_expr (position
, 0, VOIDmode
, 0),
5114 highest_pow2_factor (position
));
5115 xtarget
= adjust_address (xtarget
, mode
, 0);
5116 store_expr (value
, xtarget
, 0);
5121 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
5122 * tree_low_cst (TYPE_SIZE (elttype
), 1));
5124 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
5126 if (GET_CODE (target
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (target
)
5127 && TREE_CODE (type
) == ARRAY_TYPE
5128 && TYPE_NONALIASED_COMPONENT (type
))
5130 target
= copy_rtx (target
);
5131 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5134 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
5135 type
, cleared
, get_alias_set (elttype
));
5141 /* Set constructor assignments. */
5142 else if (TREE_CODE (type
) == SET_TYPE
)
5144 tree elt
= CONSTRUCTOR_ELTS (exp
);
5145 unsigned HOST_WIDE_INT nbytes
= int_size_in_bytes (type
), nbits
;
5146 tree domain
= TYPE_DOMAIN (type
);
5147 tree domain_min
, domain_max
, bitlength
;
5149 /* The default implementation strategy is to extract the constant
5150 parts of the constructor, use that to initialize the target,
5151 and then "or" in whatever non-constant ranges we need in addition.
5153 If a large set is all zero or all ones, it is
5154 probably better to set it using memset (if available) or bzero.
5155 Also, if a large set has just a single range, it may also be
5156 better to first clear all the first clear the set (using
5157 bzero/memset), and set the bits we want. */
5159 /* Check for all zeros. */
5160 if (elt
== NULL_TREE
&& size
> 0)
5163 clear_storage (target
, GEN_INT (size
));
5167 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
5168 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
5169 bitlength
= size_binop (PLUS_EXPR
,
5170 size_diffop (domain_max
, domain_min
),
5173 nbits
= tree_low_cst (bitlength
, 1);
5175 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5176 are "complicated" (more than one range), initialize (the
5177 constant parts) by copying from a constant. */
5178 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
5179 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
5181 unsigned int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
5182 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
5183 char *bit_buffer
= (char *) alloca (nbits
);
5184 HOST_WIDE_INT word
= 0;
5185 unsigned int bit_pos
= 0;
5186 unsigned int ibit
= 0;
5187 unsigned int offset
= 0; /* In bytes from beginning of set. */
5189 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
5192 if (bit_buffer
[ibit
])
5194 if (BYTES_BIG_ENDIAN
)
5195 word
|= (1 << (set_word_size
- 1 - bit_pos
));
5197 word
|= 1 << bit_pos
;
5201 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
5203 if (word
!= 0 || ! cleared
)
5205 rtx datum
= GEN_INT (word
);
5208 /* The assumption here is that it is safe to use
5209 XEXP if the set is multi-word, but not if
5210 it's single-word. */
5211 if (GET_CODE (target
) == MEM
)
5212 to_rtx
= adjust_address (target
, mode
, offset
);
5213 else if (offset
== 0)
5217 emit_move_insn (to_rtx
, datum
);
5224 offset
+= set_word_size
/ BITS_PER_UNIT
;
5229 /* Don't bother clearing storage if the set is all ones. */
5230 if (TREE_CHAIN (elt
) != NULL_TREE
5231 || (TREE_PURPOSE (elt
) == NULL_TREE
5233 : ( ! host_integerp (TREE_VALUE (elt
), 0)
5234 || ! host_integerp (TREE_PURPOSE (elt
), 0)
5235 || (tree_low_cst (TREE_VALUE (elt
), 0)
5236 - tree_low_cst (TREE_PURPOSE (elt
), 0) + 1
5237 != (HOST_WIDE_INT
) nbits
))))
5238 clear_storage (target
, expr_size (exp
));
5240 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
5242 /* Start of range of element or NULL. */
5243 tree startbit
= TREE_PURPOSE (elt
);
5244 /* End of range of element, or element value. */
5245 tree endbit
= TREE_VALUE (elt
);
5246 HOST_WIDE_INT startb
, endb
;
5247 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
5249 bitlength_rtx
= expand_expr (bitlength
,
5250 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
5252 /* Handle non-range tuple element like [ expr ]. */
5253 if (startbit
== NULL_TREE
)
5255 startbit
= save_expr (endbit
);
5259 startbit
= convert (sizetype
, startbit
);
5260 endbit
= convert (sizetype
, endbit
);
5261 if (! integer_zerop (domain_min
))
5263 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
5264 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
5266 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
5267 EXPAND_CONST_ADDRESS
);
5268 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
5269 EXPAND_CONST_ADDRESS
);
5275 ((build_qualified_type ((*lang_hooks
.types
.type_for_mode
)
5276 (GET_MODE (target
), 0),
5279 emit_move_insn (targetx
, target
);
5282 else if (GET_CODE (target
) == MEM
)
5287 /* Optimization: If startbit and endbit are constants divisible
5288 by BITS_PER_UNIT, call memset instead. */
5289 if (TARGET_MEM_FUNCTIONS
5290 && TREE_CODE (startbit
) == INTEGER_CST
5291 && TREE_CODE (endbit
) == INTEGER_CST
5292 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
5293 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
5295 emit_library_call (memset_libfunc
, LCT_NORMAL
,
5297 plus_constant (XEXP (targetx
, 0),
5298 startb
/ BITS_PER_UNIT
),
5300 constm1_rtx
, TYPE_MODE (integer_type_node
),
5301 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
5302 TYPE_MODE (sizetype
));
5305 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__setbits"),
5306 LCT_NORMAL
, VOIDmode
, 4, XEXP (targetx
, 0),
5307 Pmode
, bitlength_rtx
, TYPE_MODE (sizetype
),
5308 startbit_rtx
, TYPE_MODE (sizetype
),
5309 endbit_rtx
, TYPE_MODE (sizetype
));
5312 emit_move_insn (target
, targetx
);
5320 /* Store the value of EXP (an expression tree)
5321 into a subfield of TARGET which has mode MODE and occupies
5322 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5323 If MODE is VOIDmode, it means that we are storing into a bit-field.
5325 If VALUE_MODE is VOIDmode, return nothing in particular.
5326 UNSIGNEDP is not used in this case.
5328 Otherwise, return an rtx for the value stored. This rtx
5329 has mode VALUE_MODE if that is convenient to do.
5330 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5332 TYPE is the type of the underlying object,
5334 ALIAS_SET is the alias set for the destination. This value will
5335 (in general) be different from that for TARGET, since TARGET is a
5336 reference to the containing structure. */
5339 store_field (target
, bitsize
, bitpos
, mode
, exp
, value_mode
, unsignedp
, type
,
5342 HOST_WIDE_INT bitsize
;
5343 HOST_WIDE_INT bitpos
;
5344 enum machine_mode mode
;
5346 enum machine_mode value_mode
;
5351 HOST_WIDE_INT width_mask
= 0;
5353 if (TREE_CODE (exp
) == ERROR_MARK
)
5356 /* If we have nothing to store, do nothing unless the expression has
5359 return expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
5360 else if (bitsize
>=0 && bitsize
< HOST_BITS_PER_WIDE_INT
)
5361 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5363 /* If we are storing into an unaligned field of an aligned union that is
5364 in a register, we may have the mode of TARGET being an integer mode but
5365 MODE == BLKmode. In that case, get an aligned object whose size and
5366 alignment are the same as TARGET and store TARGET into it (we can avoid
5367 the store if the field being stored is the entire width of TARGET). Then
5368 call ourselves recursively to store the field into a BLKmode version of
5369 that object. Finally, load from the object into TARGET. This is not
5370 very efficient in general, but should only be slightly more expensive
5371 than the otherwise-required unaligned accesses. Perhaps this can be
5372 cleaned up later. */
5375 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
5379 (build_qualified_type (type
, TYPE_QUALS (type
) | TYPE_QUAL_CONST
),
5381 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
5383 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5384 emit_move_insn (object
, target
);
5386 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
5389 emit_move_insn (target
, object
);
5391 /* We want to return the BLKmode version of the data. */
5395 if (GET_CODE (target
) == CONCAT
)
5397 /* We're storing into a struct containing a single __complex. */
5401 return store_expr (exp
, target
, 0);
5404 /* If the structure is in a register or if the component
5405 is a bit field, we cannot use addressing to access it.
5406 Use bit-field techniques or SUBREG to store in it. */
5408 if (mode
== VOIDmode
5409 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5410 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5411 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5412 || GET_CODE (target
) == REG
5413 || GET_CODE (target
) == SUBREG
5414 /* If the field isn't aligned enough to store as an ordinary memref,
5415 store it as a bit field. */
5416 || (mode
!= BLKmode
&& SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
))
5417 && (MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
)
5418 || bitpos
% GET_MODE_ALIGNMENT (mode
)))
5419 /* If the RHS and field are a constant size and the size of the
5420 RHS isn't the same size as the bitfield, we must use bitfield
5423 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5424 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5426 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
5428 /* If BITSIZE is narrower than the size of the type of EXP
5429 we will be narrowing TEMP. Normally, what's wanted are the
5430 low-order bits. However, if EXP's type is a record and this is
5431 big-endian machine, we want the upper BITSIZE bits. */
5432 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5433 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
5434 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5435 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5436 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5440 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5442 if (mode
!= VOIDmode
&& mode
!= BLKmode
5443 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5444 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5446 /* If the modes of TARGET and TEMP are both BLKmode, both
5447 must be in memory and BITPOS must be aligned on a byte
5448 boundary. If so, we simply do a block copy. */
5449 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5451 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
5452 || bitpos
% BITS_PER_UNIT
!= 0)
5455 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5456 emit_block_move (target
, temp
,
5457 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5461 return value_mode
== VOIDmode
? const0_rtx
: target
;
5464 /* Store the value in the bitfield. */
5465 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
,
5466 int_size_in_bytes (type
));
5468 if (value_mode
!= VOIDmode
)
5470 /* The caller wants an rtx for the value.
5471 If possible, avoid refetching from the bitfield itself. */
5473 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
5476 enum machine_mode tmode
;
5478 tmode
= GET_MODE (temp
);
5479 if (tmode
== VOIDmode
)
5483 return expand_and (tmode
, temp
,
5484 gen_int_mode (width_mask
, tmode
),
5487 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
5488 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5489 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5492 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
5493 NULL_RTX
, value_mode
, VOIDmode
,
5494 int_size_in_bytes (type
));
5500 rtx addr
= XEXP (target
, 0);
5501 rtx to_rtx
= target
;
5503 /* If a value is wanted, it must be the lhs;
5504 so make the address stable for multiple use. */
5506 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
5507 && ! CONSTANT_ADDRESS_P (addr
)
5508 /* A frame-pointer reference is already stable. */
5509 && ! (GET_CODE (addr
) == PLUS
5510 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
5511 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
5512 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
5513 to_rtx
= replace_equiv_address (to_rtx
, copy_to_reg (addr
));
5515 /* Now build a reference to just the desired component. */
5517 to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5519 if (to_rtx
== target
)
5520 to_rtx
= copy_rtx (to_rtx
);
5522 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5523 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5524 set_mem_alias_set (to_rtx
, alias_set
);
5526 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
5530 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5531 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5532 codes and find the ultimate containing object, which we return.
5534 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5535 bit position, and *PUNSIGNEDP to the signedness of the field.
5536 If the position of the field is variable, we store a tree
5537 giving the variable offset (in units) in *POFFSET.
5538 This offset is in addition to the bit position.
5539 If the position is not variable, we store 0 in *POFFSET.
5541 If any of the extraction expressions is volatile,
5542 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5544 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5545 is a mode that can be used to access the field. In that case, *PBITSIZE
5548 If the field describes a variable-sized object, *PMODE is set to
5549 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5550 this case, but the address of the object can be found. */
5553 get_inner_reference (exp
, pbitsize
, pbitpos
, poffset
, pmode
,
5554 punsignedp
, pvolatilep
)
5556 HOST_WIDE_INT
*pbitsize
;
5557 HOST_WIDE_INT
*pbitpos
;
5559 enum machine_mode
*pmode
;
5564 enum machine_mode mode
= VOIDmode
;
5565 tree offset
= size_zero_node
;
5566 tree bit_offset
= bitsize_zero_node
;
5567 tree placeholder_ptr
= 0;
5570 /* First get the mode, signedness, and size. We do this from just the
5571 outermost expression. */
5572 if (TREE_CODE (exp
) == COMPONENT_REF
)
5574 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5575 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5576 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5578 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
5580 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5582 size_tree
= TREE_OPERAND (exp
, 1);
5583 *punsignedp
= TREE_UNSIGNED (exp
);
5587 mode
= TYPE_MODE (TREE_TYPE (exp
));
5588 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
5590 if (mode
== BLKmode
)
5591 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5593 *pbitsize
= GET_MODE_BITSIZE (mode
);
5598 if (! host_integerp (size_tree
, 1))
5599 mode
= BLKmode
, *pbitsize
= -1;
5601 *pbitsize
= tree_low_cst (size_tree
, 1);
5604 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5605 and find the ultimate containing object. */
5608 if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5609 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
, TREE_OPERAND (exp
, 2));
5610 else if (TREE_CODE (exp
) == COMPONENT_REF
)
5612 tree field
= TREE_OPERAND (exp
, 1);
5613 tree this_offset
= DECL_FIELD_OFFSET (field
);
5615 /* If this field hasn't been filled in yet, don't go
5616 past it. This should only happen when folding expressions
5617 made during type construction. */
5618 if (this_offset
== 0)
5620 else if (! TREE_CONSTANT (this_offset
)
5621 && contains_placeholder_p (this_offset
))
5622 this_offset
= build (WITH_RECORD_EXPR
, sizetype
, this_offset
, exp
);
5624 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5625 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5626 DECL_FIELD_BIT_OFFSET (field
));
5628 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5631 else if (TREE_CODE (exp
) == ARRAY_REF
5632 || TREE_CODE (exp
) == ARRAY_RANGE_REF
)
5634 tree index
= TREE_OPERAND (exp
, 1);
5635 tree array
= TREE_OPERAND (exp
, 0);
5636 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
5637 tree low_bound
= (domain
? TYPE_MIN_VALUE (domain
) : 0);
5638 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array
)));
5640 /* We assume all arrays have sizes that are a multiple of a byte.
5641 First subtract the lower bound, if any, in the type of the
5642 index, then convert to sizetype and multiply by the size of the
5644 if (low_bound
!= 0 && ! integer_zerop (low_bound
))
5645 index
= fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5648 /* If the index has a self-referential type, pass it to a
5649 WITH_RECORD_EXPR; if the component size is, pass our
5650 component to one. */
5651 if (! TREE_CONSTANT (index
)
5652 && contains_placeholder_p (index
))
5653 index
= build (WITH_RECORD_EXPR
, TREE_TYPE (index
), index
, exp
);
5654 if (! TREE_CONSTANT (unit_size
)
5655 && contains_placeholder_p (unit_size
))
5656 unit_size
= build (WITH_RECORD_EXPR
, sizetype
, unit_size
, array
);
5658 offset
= size_binop (PLUS_EXPR
, offset
,
5659 size_binop (MULT_EXPR
,
5660 convert (sizetype
, index
),
5664 else if (TREE_CODE (exp
) == PLACEHOLDER_EXPR
)
5666 tree
new = find_placeholder (exp
, &placeholder_ptr
);
5668 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5669 We might have been called from tree optimization where we
5670 haven't set up an object yet. */
5678 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
5679 && TREE_CODE (exp
) != VIEW_CONVERT_EXPR
5680 && ! ((TREE_CODE (exp
) == NOP_EXPR
5681 || TREE_CODE (exp
) == CONVERT_EXPR
)
5682 && (TYPE_MODE (TREE_TYPE (exp
))
5683 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
5686 /* If any reference in the chain is volatile, the effect is volatile. */
5687 if (TREE_THIS_VOLATILE (exp
))
5690 exp
= TREE_OPERAND (exp
, 0);
5693 /* If OFFSET is constant, see if we can return the whole thing as a
5694 constant bit position. Otherwise, split it up. */
5695 if (host_integerp (offset
, 0)
5696 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5698 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5699 && host_integerp (tem
, 0))
5700 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5702 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5708 /* Return 1 if T is an expression that get_inner_reference handles. */
5711 handled_component_p (t
)
5714 switch (TREE_CODE (t
))
5719 case ARRAY_RANGE_REF
:
5720 case NON_LVALUE_EXPR
:
5721 case VIEW_CONVERT_EXPR
:
5726 return (TYPE_MODE (TREE_TYPE (t
))
5727 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t
, 0))));
5734 /* Given an rtx VALUE that may contain additions and multiplications, return
5735 an equivalent value that just refers to a register, memory, or constant.
5736 This is done by generating instructions to perform the arithmetic and
5737 returning a pseudo-register containing the value.
5739 The returned value may be a REG, SUBREG, MEM or constant. */
5742 force_operand (value
, target
)
5746 /* Use subtarget as the target for operand 0 of a binary operation. */
5747 rtx subtarget
= get_subtarget (target
);
5748 enum rtx_code code
= GET_CODE (value
);
5750 /* Check for a PIC address load. */
5751 if ((code
== PLUS
|| code
== MINUS
)
5752 && XEXP (value
, 0) == pic_offset_table_rtx
5753 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5754 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5755 || GET_CODE (XEXP (value
, 1)) == CONST
))
5758 subtarget
= gen_reg_rtx (GET_MODE (value
));
5759 emit_move_insn (subtarget
, value
);
5763 if (code
== ZERO_EXTEND
|| code
== SIGN_EXTEND
)
5766 target
= gen_reg_rtx (GET_MODE (value
));
5767 convert_move (target
, force_operand (XEXP (value
, 0), NULL
),
5768 code
== ZERO_EXTEND
);
5772 if (GET_RTX_CLASS (code
) == '2' || GET_RTX_CLASS (code
) == 'c')
5774 op2
= XEXP (value
, 1);
5775 if (!CONSTANT_P (op2
) && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5777 if (code
== MINUS
&& GET_CODE (op2
) == CONST_INT
)
5780 op2
= negate_rtx (GET_MODE (value
), op2
);
5783 /* Check for an addition with OP2 a constant integer and our first
5784 operand a PLUS of a virtual register and something else. In that
5785 case, we want to emit the sum of the virtual register and the
5786 constant first and then add the other value. This allows virtual
5787 register instantiation to simply modify the constant rather than
5788 creating another one around this addition. */
5789 if (code
== PLUS
&& GET_CODE (op2
) == CONST_INT
5790 && GET_CODE (XEXP (value
, 0)) == PLUS
5791 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
5792 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5793 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5795 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
5796 XEXP (XEXP (value
, 0), 0), op2
,
5797 subtarget
, 0, OPTAB_LIB_WIDEN
);
5798 return expand_simple_binop (GET_MODE (value
), code
, temp
,
5799 force_operand (XEXP (XEXP (value
,
5801 target
, 0, OPTAB_LIB_WIDEN
);
5804 op1
= force_operand (XEXP (value
, 0), subtarget
);
5805 op2
= force_operand (op2
, NULL_RTX
);
5809 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
5811 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
5812 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5813 target
, 1, OPTAB_LIB_WIDEN
);
5815 return expand_divmod (0,
5816 FLOAT_MODE_P (GET_MODE (value
))
5817 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
5818 GET_MODE (value
), op1
, op2
, target
, 0);
5821 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5825 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
5829 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5833 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5834 target
, 0, OPTAB_LIB_WIDEN
);
5837 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5838 target
, 1, OPTAB_LIB_WIDEN
);
5841 if (GET_RTX_CLASS (code
) == '1')
5843 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
5844 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
5847 #ifdef INSN_SCHEDULING
5848 /* On machines that have insn scheduling, we want all memory reference to be
5849 explicit, so we need to deal with such paradoxical SUBREGs. */
5850 if (GET_CODE (value
) == SUBREG
&& GET_CODE (SUBREG_REG (value
)) == MEM
5851 && (GET_MODE_SIZE (GET_MODE (value
))
5852 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
5854 = simplify_gen_subreg (GET_MODE (value
),
5855 force_reg (GET_MODE (SUBREG_REG (value
)),
5856 force_operand (SUBREG_REG (value
),
5858 GET_MODE (SUBREG_REG (value
)),
5859 SUBREG_BYTE (value
));
5865 /* Subroutine of expand_expr: return nonzero iff there is no way that
5866 EXP can reference X, which is being modified. TOP_P is nonzero if this
5867 call is going to be used to determine whether we need a temporary
5868 for EXP, as opposed to a recursive call to this function.
5870 It is always safe for this routine to return zero since it merely
5871 searches for optimization opportunities. */
5874 safe_from_p (x
, exp
, top_p
)
5881 static tree save_expr_list
;
5884 /* If EXP has varying size, we MUST use a target since we currently
5885 have no way of allocating temporaries of variable size
5886 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5887 So we assume here that something at a higher level has prevented a
5888 clash. This is somewhat bogus, but the best we can do. Only
5889 do this when X is BLKmode and when we are at the top level. */
5890 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
5891 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
5892 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
5893 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
5894 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
5896 && GET_MODE (x
) == BLKmode
)
5897 /* If X is in the outgoing argument area, it is always safe. */
5898 || (GET_CODE (x
) == MEM
5899 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
5900 || (GET_CODE (XEXP (x
, 0)) == PLUS
5901 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
5904 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5905 find the underlying pseudo. */
5906 if (GET_CODE (x
) == SUBREG
)
5909 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5913 /* A SAVE_EXPR might appear many times in the expression passed to the
5914 top-level safe_from_p call, and if it has a complex subexpression,
5915 examining it multiple times could result in a combinatorial explosion.
5916 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5917 with optimization took about 28 minutes to compile -- even though it was
5918 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5919 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5920 we have processed. Note that the only test of top_p was above. */
5929 rtn
= safe_from_p (x
, exp
, 0);
5931 for (t
= save_expr_list
; t
!= 0; t
= TREE_CHAIN (t
))
5932 TREE_PRIVATE (TREE_PURPOSE (t
)) = 0;
5937 /* Now look at our tree code and possibly recurse. */
5938 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
5941 exp_rtl
= DECL_RTL_IF_SET (exp
);
5948 if (TREE_CODE (exp
) == TREE_LIST
)
5949 return ((TREE_VALUE (exp
) == 0
5950 || safe_from_p (x
, TREE_VALUE (exp
), 0))
5951 && (TREE_CHAIN (exp
) == 0
5952 || safe_from_p (x
, TREE_CHAIN (exp
), 0)));
5953 else if (TREE_CODE (exp
) == ERROR_MARK
)
5954 return 1; /* An already-visited SAVE_EXPR? */
5959 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5963 return (safe_from_p (x
, TREE_OPERAND (exp
, 0), 0)
5964 && safe_from_p (x
, TREE_OPERAND (exp
, 1), 0));
5968 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5969 the expression. If it is set, we conflict iff we are that rtx or
5970 both are in memory. Otherwise, we check all operands of the
5971 expression recursively. */
5973 switch (TREE_CODE (exp
))
5976 /* If the operand is static or we are static, we can't conflict.
5977 Likewise if we don't conflict with the operand at all. */
5978 if (staticp (TREE_OPERAND (exp
, 0))
5979 || TREE_STATIC (exp
)
5980 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
5983 /* Otherwise, the only way this can conflict is if we are taking
5984 the address of a DECL a that address if part of X, which is
5986 exp
= TREE_OPERAND (exp
, 0);
5989 if (!DECL_RTL_SET_P (exp
)
5990 || GET_CODE (DECL_RTL (exp
)) != MEM
)
5993 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
5998 if (GET_CODE (x
) == MEM
5999 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
6000 get_alias_set (exp
)))
6005 /* Assume that the call will clobber all hard registers and
6007 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6008 || GET_CODE (x
) == MEM
)
6013 /* If a sequence exists, we would have to scan every instruction
6014 in the sequence to see if it was safe. This is probably not
6016 if (RTL_EXPR_SEQUENCE (exp
))
6019 exp_rtl
= RTL_EXPR_RTL (exp
);
6022 case WITH_CLEANUP_EXPR
:
6023 exp_rtl
= WITH_CLEANUP_EXPR_RTL (exp
);
6026 case CLEANUP_POINT_EXPR
:
6027 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6030 exp_rtl
= SAVE_EXPR_RTL (exp
);
6034 /* If we've already scanned this, don't do it again. Otherwise,
6035 show we've scanned it and record for clearing the flag if we're
6037 if (TREE_PRIVATE (exp
))
6040 TREE_PRIVATE (exp
) = 1;
6041 if (! safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
6043 TREE_PRIVATE (exp
) = 0;
6047 save_expr_list
= tree_cons (exp
, NULL_TREE
, save_expr_list
);
6051 /* The only operand we look at is operand 1. The rest aren't
6052 part of the expression. */
6053 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
6055 case METHOD_CALL_EXPR
:
6056 /* This takes an rtx argument, but shouldn't appear here. */
6063 /* If we have an rtx, we do not need to scan our operands. */
6067 nops
= first_rtl_op (TREE_CODE (exp
));
6068 for (i
= 0; i
< nops
; i
++)
6069 if (TREE_OPERAND (exp
, i
) != 0
6070 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
6073 /* If this is a language-specific tree code, it may require
6074 special handling. */
6075 if ((unsigned int) TREE_CODE (exp
)
6076 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6077 && !(*lang_hooks
.safe_from_p
) (x
, exp
))
6081 /* If we have an rtl, find any enclosed object. Then see if we conflict
6085 if (GET_CODE (exp_rtl
) == SUBREG
)
6087 exp_rtl
= SUBREG_REG (exp_rtl
);
6088 if (GET_CODE (exp_rtl
) == REG
6089 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
6093 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6094 are memory and they conflict. */
6095 return ! (rtx_equal_p (x
, exp_rtl
)
6096 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
6097 && true_dependence (exp_rtl
, VOIDmode
, x
,
6098 rtx_addr_varies_p
)));
6101 /* If we reach here, it is safe. */
6105 /* Subroutine of expand_expr: return rtx if EXP is a
6106 variable or parameter; else return 0. */
6113 switch (TREE_CODE (exp
))
6117 return DECL_RTL (exp
);
6123 #ifdef MAX_INTEGER_COMPUTATION_MODE
6126 check_max_integer_computation_mode (exp
)
6129 enum tree_code code
;
6130 enum machine_mode mode
;
6132 /* Strip any NOPs that don't change the mode. */
6134 code
= TREE_CODE (exp
);
6136 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6137 if (code
== NOP_EXPR
6138 && TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
6141 /* First check the type of the overall operation. We need only look at
6142 unary, binary and relational operations. */
6143 if (TREE_CODE_CLASS (code
) == '1'
6144 || TREE_CODE_CLASS (code
) == '2'
6145 || TREE_CODE_CLASS (code
) == '<')
6147 mode
= TYPE_MODE (TREE_TYPE (exp
));
6148 if (GET_MODE_CLASS (mode
) == MODE_INT
6149 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6150 internal_error ("unsupported wide integer operation");
6153 /* Check operand of a unary op. */
6154 if (TREE_CODE_CLASS (code
) == '1')
6156 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6157 if (GET_MODE_CLASS (mode
) == MODE_INT
6158 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6159 internal_error ("unsupported wide integer operation");
6162 /* Check operands of a binary/comparison op. */
6163 if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<')
6165 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6166 if (GET_MODE_CLASS (mode
) == MODE_INT
6167 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6168 internal_error ("unsupported wide integer operation");
6170 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
6171 if (GET_MODE_CLASS (mode
) == MODE_INT
6172 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6173 internal_error ("unsupported wide integer operation");
6178 /* Return the highest power of two that EXP is known to be a multiple of.
6179 This is used in updating alignment of MEMs in array references. */
6181 static HOST_WIDE_INT
6182 highest_pow2_factor (exp
)
6185 HOST_WIDE_INT c0
, c1
;
6187 switch (TREE_CODE (exp
))
6190 /* We can find the lowest bit that's a one. If the low
6191 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6192 We need to handle this case since we can find it in a COND_EXPR,
6193 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6194 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6196 if (TREE_CONSTANT_OVERFLOW (exp
))
6197 return BIGGEST_ALIGNMENT
;
6200 /* Note: tree_low_cst is intentionally not used here,
6201 we don't care about the upper bits. */
6202 c0
= TREE_INT_CST_LOW (exp
);
6204 return c0
? c0
: BIGGEST_ALIGNMENT
;
6208 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
6209 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6210 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6211 return MIN (c0
, c1
);
6214 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6215 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6218 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6220 if (integer_pow2p (TREE_OPERAND (exp
, 1))
6221 && host_integerp (TREE_OPERAND (exp
, 1), 1))
6223 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6224 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
6225 return MAX (1, c0
/ c1
);
6229 case NON_LVALUE_EXPR
: case NOP_EXPR
: case CONVERT_EXPR
:
6230 case SAVE_EXPR
: case WITH_RECORD_EXPR
:
6231 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
6234 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6237 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6238 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
6239 return MIN (c0
, c1
);
6248 /* Similar, except that it is known that the expression must be a multiple
6249 of the alignment of TYPE. */
6251 static HOST_WIDE_INT
6252 highest_pow2_factor_for_type (type
, exp
)
6256 HOST_WIDE_INT type_align
, factor
;
6258 factor
= highest_pow2_factor (exp
);
6259 type_align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
6260 return MAX (factor
, type_align
);
6263 /* Return an object on the placeholder list that matches EXP, a
6264 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6265 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6266 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6267 is a location which initially points to a starting location in the
6268 placeholder list (zero means start of the list) and where a pointer into
6269 the placeholder list at which the object is found is placed. */
6272 find_placeholder (exp
, plist
)
6276 tree type
= TREE_TYPE (exp
);
6277 tree placeholder_expr
;
6279 for (placeholder_expr
6280 = plist
&& *plist
? TREE_CHAIN (*plist
) : placeholder_list
;
6281 placeholder_expr
!= 0;
6282 placeholder_expr
= TREE_CHAIN (placeholder_expr
))
6284 tree need_type
= TYPE_MAIN_VARIANT (type
);
6287 /* Find the outermost reference that is of the type we want. If none,
6288 see if any object has a type that is a pointer to the type we
6290 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6291 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
6292 || TREE_CODE (elt
) == COND_EXPR
)
6293 ? TREE_OPERAND (elt
, 1)
6294 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6295 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6296 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6297 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6298 ? TREE_OPERAND (elt
, 0) : 0))
6299 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
6302 *plist
= placeholder_expr
;
6306 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6308 = ((TREE_CODE (elt
) == COMPOUND_EXPR
6309 || TREE_CODE (elt
) == COND_EXPR
)
6310 ? TREE_OPERAND (elt
, 1)
6311 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6312 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6313 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6314 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6315 ? TREE_OPERAND (elt
, 0) : 0))
6316 if (POINTER_TYPE_P (TREE_TYPE (elt
))
6317 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
6321 *plist
= placeholder_expr
;
6322 return build1 (INDIRECT_REF
, need_type
, elt
);
6329 /* expand_expr: generate code for computing expression EXP.
6330 An rtx for the computed value is returned. The value is never null.
6331 In the case of a void EXP, const0_rtx is returned.
6333 The value may be stored in TARGET if TARGET is nonzero.
6334 TARGET is just a suggestion; callers must assume that
6335 the rtx returned may not be the same as TARGET.
6337 If TARGET is CONST0_RTX, it means that the value will be ignored.
6339 If TMODE is not VOIDmode, it suggests generating the
6340 result in mode TMODE. But this is done only when convenient.
6341 Otherwise, TMODE is ignored and the value generated in its natural mode.
6342 TMODE is just a suggestion; callers must assume that
6343 the rtx returned may not have mode TMODE.
6345 Note that TARGET may have neither TMODE nor MODE. In that case, it
6346 probably will not be used.
6348 If MODIFIER is EXPAND_SUM then when EXP is an addition
6349 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6350 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6351 products as above, or REG or MEM, or constant.
6352 Ordinarily in such cases we would output mul or add instructions
6353 and then return a pseudo reg containing the sum.
6355 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6356 it also marks a label as absolutely required (it can't be dead).
6357 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6358 This is used for outputting expressions used in initializers.
6360 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6361 with a constant address even if that address is not normally legitimate.
6362 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6365 expand_expr (exp
, target
, tmode
, modifier
)
6368 enum machine_mode tmode
;
6369 enum expand_modifier modifier
;
6372 tree type
= TREE_TYPE (exp
);
6373 int unsignedp
= TREE_UNSIGNED (type
);
6374 enum machine_mode mode
;
6375 enum tree_code code
= TREE_CODE (exp
);
6377 rtx subtarget
, original_target
;
6381 /* Handle ERROR_MARK before anybody tries to access its type. */
6382 if (TREE_CODE (exp
) == ERROR_MARK
|| TREE_CODE (type
) == ERROR_MARK
)
6384 op0
= CONST0_RTX (tmode
);
6390 mode
= TYPE_MODE (type
);
6391 /* Use subtarget as the target for operand 0 of a binary operation. */
6392 subtarget
= get_subtarget (target
);
6393 original_target
= target
;
6394 ignore
= (target
== const0_rtx
6395 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
6396 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
6397 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
6398 && TREE_CODE (type
) == VOID_TYPE
));
6400 /* If we are going to ignore this result, we need only do something
6401 if there is a side-effect somewhere in the expression. If there
6402 is, short-circuit the most common cases here. Note that we must
6403 not call expand_expr with anything but const0_rtx in case this
6404 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6408 if (! TREE_SIDE_EFFECTS (exp
))
6411 /* Ensure we reference a volatile object even if value is ignored, but
6412 don't do this if all we are doing is taking its address. */
6413 if (TREE_THIS_VOLATILE (exp
)
6414 && TREE_CODE (exp
) != FUNCTION_DECL
6415 && mode
!= VOIDmode
&& mode
!= BLKmode
6416 && modifier
!= EXPAND_CONST_ADDRESS
)
6418 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
6419 if (GET_CODE (temp
) == MEM
)
6420 temp
= copy_to_reg (temp
);
6424 if (TREE_CODE_CLASS (code
) == '1' || code
== COMPONENT_REF
6425 || code
== INDIRECT_REF
|| code
== BUFFER_REF
)
6426 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6429 else if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<'
6430 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
6432 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6433 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6436 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6437 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
6438 /* If the second operand has no side effects, just evaluate
6440 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6442 else if (code
== BIT_FIELD_REF
)
6444 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6445 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6446 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, modifier
);
6453 #ifdef MAX_INTEGER_COMPUTATION_MODE
6454 /* Only check stuff here if the mode we want is different from the mode
6455 of the expression; if it's the same, check_max_integer_computiation_mode
6456 will handle it. Do we really need to check this stuff at all? */
6459 && GET_MODE (target
) != mode
6460 && TREE_CODE (exp
) != INTEGER_CST
6461 && TREE_CODE (exp
) != PARM_DECL
6462 && TREE_CODE (exp
) != ARRAY_REF
6463 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6464 && TREE_CODE (exp
) != COMPONENT_REF
6465 && TREE_CODE (exp
) != BIT_FIELD_REF
6466 && TREE_CODE (exp
) != INDIRECT_REF
6467 && TREE_CODE (exp
) != CALL_EXPR
6468 && TREE_CODE (exp
) != VAR_DECL
6469 && TREE_CODE (exp
) != RTL_EXPR
)
6471 enum machine_mode mode
= GET_MODE (target
);
6473 if (GET_MODE_CLASS (mode
) == MODE_INT
6474 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6475 internal_error ("unsupported wide integer operation");
6479 && TREE_CODE (exp
) != INTEGER_CST
6480 && TREE_CODE (exp
) != PARM_DECL
6481 && TREE_CODE (exp
) != ARRAY_REF
6482 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6483 && TREE_CODE (exp
) != COMPONENT_REF
6484 && TREE_CODE (exp
) != BIT_FIELD_REF
6485 && TREE_CODE (exp
) != INDIRECT_REF
6486 && TREE_CODE (exp
) != VAR_DECL
6487 && TREE_CODE (exp
) != CALL_EXPR
6488 && TREE_CODE (exp
) != RTL_EXPR
6489 && GET_MODE_CLASS (tmode
) == MODE_INT
6490 && tmode
> MAX_INTEGER_COMPUTATION_MODE
)
6491 internal_error ("unsupported wide integer operation");
6493 check_max_integer_computation_mode (exp
);
6496 /* If will do cse, generate all results into pseudo registers
6497 since 1) that allows cse to find more things
6498 and 2) otherwise cse could produce an insn the machine
6499 cannot support. And exception is a CONSTRUCTOR into a multi-word
6500 MEM: that's much more likely to be most efficient into the MEM. */
6502 if (! cse_not_expected
&& mode
!= BLKmode
&& target
6503 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
)
6504 && ! (code
== CONSTRUCTOR
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
))
6511 tree function
= decl_function_context (exp
);
6512 /* Handle using a label in a containing function. */
6513 if (function
!= current_function_decl
6514 && function
!= inline_function_decl
&& function
!= 0)
6516 struct function
*p
= find_function_data (function
);
6517 p
->expr
->x_forced_labels
6518 = gen_rtx_EXPR_LIST (VOIDmode
, label_rtx (exp
),
6519 p
->expr
->x_forced_labels
);
6523 if (modifier
== EXPAND_INITIALIZER
)
6524 forced_labels
= gen_rtx_EXPR_LIST (VOIDmode
,
6529 temp
= gen_rtx_MEM (FUNCTION_MODE
,
6530 gen_rtx_LABEL_REF (Pmode
, label_rtx (exp
)));
6531 if (function
!= current_function_decl
6532 && function
!= inline_function_decl
&& function
!= 0)
6533 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
6538 if (!DECL_RTL_SET_P (exp
))
6540 error_with_decl (exp
, "prior parameter's size depends on `%s'");
6541 return CONST0_RTX (mode
);
6544 /* ... fall through ... */
6547 /* If a static var's type was incomplete when the decl was written,
6548 but the type is complete now, lay out the decl now. */
6549 if (DECL_SIZE (exp
) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
6550 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6552 rtx value
= DECL_RTL_IF_SET (exp
);
6554 layout_decl (exp
, 0);
6556 /* If the RTL was already set, update its mode and memory
6560 PUT_MODE (value
, DECL_MODE (exp
));
6561 SET_DECL_RTL (exp
, 0);
6562 set_mem_attributes (value
, exp
, 1);
6563 SET_DECL_RTL (exp
, value
);
6567 /* ... fall through ... */
6571 if (DECL_RTL (exp
) == 0)
6574 /* Ensure variable marked as used even if it doesn't go through
6575 a parser. If it hasn't be used yet, write out an external
6577 if (! TREE_USED (exp
))
6579 assemble_external (exp
);
6580 TREE_USED (exp
) = 1;
6583 /* Show we haven't gotten RTL for this yet. */
6586 /* Handle variables inherited from containing functions. */
6587 context
= decl_function_context (exp
);
6589 /* We treat inline_function_decl as an alias for the current function
6590 because that is the inline function whose vars, types, etc.
6591 are being merged into the current function.
6592 See expand_inline_function. */
6594 if (context
!= 0 && context
!= current_function_decl
6595 && context
!= inline_function_decl
6596 /* If var is static, we don't need a static chain to access it. */
6597 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
6598 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
6602 /* Mark as non-local and addressable. */
6603 DECL_NONLOCAL (exp
) = 1;
6604 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
6606 (*lang_hooks
.mark_addressable
) (exp
);
6607 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
6609 addr
= XEXP (DECL_RTL (exp
), 0);
6610 if (GET_CODE (addr
) == MEM
)
6612 = replace_equiv_address (addr
,
6613 fix_lexical_addr (XEXP (addr
, 0), exp
));
6615 addr
= fix_lexical_addr (addr
, exp
);
6617 temp
= replace_equiv_address (DECL_RTL (exp
), addr
);
6620 /* This is the case of an array whose size is to be determined
6621 from its initializer, while the initializer is still being parsed.
6624 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6625 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
6626 temp
= validize_mem (DECL_RTL (exp
));
6628 /* If DECL_RTL is memory, we are in the normal case and either
6629 the address is not valid or it is not a register and -fforce-addr
6630 is specified, get the address into a register. */
6632 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6633 && modifier
!= EXPAND_CONST_ADDRESS
6634 && modifier
!= EXPAND_SUM
6635 && modifier
!= EXPAND_INITIALIZER
6636 && (! memory_address_p (DECL_MODE (exp
),
6637 XEXP (DECL_RTL (exp
), 0))
6639 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
6640 temp
= replace_equiv_address (DECL_RTL (exp
),
6641 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
6643 /* If we got something, return it. But first, set the alignment
6644 if the address is a register. */
6647 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
6648 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6653 /* If the mode of DECL_RTL does not match that of the decl, it
6654 must be a promoted value. We return a SUBREG of the wanted mode,
6655 but mark it so that we know that it was already extended. */
6657 if (GET_CODE (DECL_RTL (exp
)) == REG
6658 && GET_MODE (DECL_RTL (exp
)) != DECL_MODE (exp
))
6660 /* Get the signedness used for this variable. Ensure we get the
6661 same mode we got when the variable was declared. */
6662 if (GET_MODE (DECL_RTL (exp
))
6663 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
,
6664 (TREE_CODE (exp
) == RESULT_DECL
? 1 : 0)))
6667 temp
= gen_lowpart_SUBREG (mode
, DECL_RTL (exp
));
6668 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6669 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6673 return DECL_RTL (exp
);
6676 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
6677 TREE_INT_CST_HIGH (exp
), mode
);
6679 /* ??? If overflow is set, fold will have done an incomplete job,
6680 which can result in (plus xx (const_int 0)), which can get
6681 simplified by validate_replace_rtx during virtual register
6682 instantiation, which can result in unrecognizable insns.
6683 Avoid this by forcing all overflows into registers. */
6684 if (TREE_CONSTANT_OVERFLOW (exp
)
6685 && modifier
!= EXPAND_INITIALIZER
)
6686 temp
= force_reg (mode
, temp
);
6691 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, 0);
6694 /* If optimized, generate immediate CONST_DOUBLE
6695 which will be turned into memory by reload if necessary.
6697 We used to force a register so that loop.c could see it. But
6698 this does not allow gen_* patterns to perform optimizations with
6699 the constants. It also produces two insns in cases like "x = 1.0;".
6700 On most machines, floating-point constants are not permitted in
6701 many insns, so we'd end up copying it to a register in any case.
6703 Now, we do the copying in expand_binop, if appropriate. */
6704 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
6705 TYPE_MODE (TREE_TYPE (exp
)));
6709 if (! TREE_CST_RTL (exp
))
6710 output_constant_def (exp
, 1);
6712 /* TREE_CST_RTL probably contains a constant address.
6713 On RISC machines where a constant address isn't valid,
6714 make some insns to get that address into a register. */
6715 if (GET_CODE (TREE_CST_RTL (exp
)) == MEM
6716 && modifier
!= EXPAND_CONST_ADDRESS
6717 && modifier
!= EXPAND_INITIALIZER
6718 && modifier
!= EXPAND_SUM
6719 && (! memory_address_p (mode
, XEXP (TREE_CST_RTL (exp
), 0))
6721 && GET_CODE (XEXP (TREE_CST_RTL (exp
), 0)) != REG
)))
6722 return replace_equiv_address (TREE_CST_RTL (exp
),
6723 copy_rtx (XEXP (TREE_CST_RTL (exp
), 0)));
6724 return TREE_CST_RTL (exp
);
6726 case EXPR_WITH_FILE_LOCATION
:
6729 const char *saved_input_filename
= input_filename
;
6730 int saved_lineno
= lineno
;
6731 input_filename
= EXPR_WFL_FILENAME (exp
);
6732 lineno
= EXPR_WFL_LINENO (exp
);
6733 if (EXPR_WFL_EMIT_LINE_NOTE (exp
))
6734 emit_line_note (input_filename
, lineno
);
6735 /* Possibly avoid switching back and forth here. */
6736 to_return
= expand_expr (EXPR_WFL_NODE (exp
), target
, tmode
, modifier
);
6737 input_filename
= saved_input_filename
;
6738 lineno
= saved_lineno
;
6743 context
= decl_function_context (exp
);
6745 /* If this SAVE_EXPR was at global context, assume we are an
6746 initialization function and move it into our context. */
6748 SAVE_EXPR_CONTEXT (exp
) = current_function_decl
;
6750 /* We treat inline_function_decl as an alias for the current function
6751 because that is the inline function whose vars, types, etc.
6752 are being merged into the current function.
6753 See expand_inline_function. */
6754 if (context
== current_function_decl
|| context
== inline_function_decl
)
6757 /* If this is non-local, handle it. */
6760 /* The following call just exists to abort if the context is
6761 not of a containing function. */
6762 find_function_data (context
);
6764 temp
= SAVE_EXPR_RTL (exp
);
6765 if (temp
&& GET_CODE (temp
) == REG
)
6767 put_var_into_stack (exp
);
6768 temp
= SAVE_EXPR_RTL (exp
);
6770 if (temp
== 0 || GET_CODE (temp
) != MEM
)
6773 replace_equiv_address (temp
,
6774 fix_lexical_addr (XEXP (temp
, 0), exp
));
6776 if (SAVE_EXPR_RTL (exp
) == 0)
6778 if (mode
== VOIDmode
)
6781 temp
= assign_temp (build_qualified_type (type
,
6783 | TYPE_QUAL_CONST
)),
6786 SAVE_EXPR_RTL (exp
) = temp
;
6787 if (!optimize
&& GET_CODE (temp
) == REG
)
6788 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, temp
,
6791 /* If the mode of TEMP does not match that of the expression, it
6792 must be a promoted value. We pass store_expr a SUBREG of the
6793 wanted mode but mark it so that we know that it was already
6794 extended. Note that `unsignedp' was modified above in
6797 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
6799 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6800 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6801 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6804 if (temp
== const0_rtx
)
6805 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
6807 store_expr (TREE_OPERAND (exp
, 0), temp
, 0);
6809 TREE_USED (exp
) = 1;
6812 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6813 must be a promoted value. We return a SUBREG of the wanted mode,
6814 but mark it so that we know that it was already extended. */
6816 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
6817 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
6819 /* Compute the signedness and make the proper SUBREG. */
6820 promote_mode (type
, mode
, &unsignedp
, 0);
6821 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6822 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6823 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6827 return SAVE_EXPR_RTL (exp
);
6832 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6833 TREE_OPERAND (exp
, 0)
6834 = (*lang_hooks
.unsave_expr_now
) (TREE_OPERAND (exp
, 0));
6838 case PLACEHOLDER_EXPR
:
6840 tree old_list
= placeholder_list
;
6841 tree placeholder_expr
= 0;
6843 exp
= find_placeholder (exp
, &placeholder_expr
);
6847 placeholder_list
= TREE_CHAIN (placeholder_expr
);
6848 temp
= expand_expr (exp
, original_target
, tmode
, modifier
);
6849 placeholder_list
= old_list
;
6853 case WITH_RECORD_EXPR
:
6854 /* Put the object on the placeholder list, expand our first operand,
6855 and pop the list. */
6856 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
6858 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
, tmode
,
6860 placeholder_list
= TREE_CHAIN (placeholder_list
);
6864 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6865 expand_goto (TREE_OPERAND (exp
, 0));
6867 expand_computed_goto (TREE_OPERAND (exp
, 0));
6871 expand_exit_loop_if_false (NULL
,
6872 invert_truthvalue (TREE_OPERAND (exp
, 0)));
6875 case LABELED_BLOCK_EXPR
:
6876 if (LABELED_BLOCK_BODY (exp
))
6877 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp
), 0, 1);
6878 /* Should perhaps use expand_label, but this is simpler and safer. */
6879 do_pending_stack_adjust ();
6880 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp
)));
6883 case EXIT_BLOCK_EXPR
:
6884 if (EXIT_BLOCK_RETURN (exp
))
6885 sorry ("returned value in block_exit_expr");
6886 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp
)));
6891 expand_start_loop (1);
6892 expand_expr_stmt_value (TREE_OPERAND (exp
, 0), 0, 1);
6900 tree vars
= TREE_OPERAND (exp
, 0);
6901 int vars_need_expansion
= 0;
6903 /* Need to open a binding contour here because
6904 if there are any cleanups they must be contained here. */
6905 expand_start_bindings (2);
6907 /* Mark the corresponding BLOCK for output in its proper place. */
6908 if (TREE_OPERAND (exp
, 2) != 0
6909 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
6910 (*lang_hooks
.decls
.insert_block
) (TREE_OPERAND (exp
, 2));
6912 /* If VARS have not yet been expanded, expand them now. */
6915 if (!DECL_RTL_SET_P (vars
))
6917 vars_need_expansion
= 1;
6920 expand_decl_init (vars
);
6921 vars
= TREE_CHAIN (vars
);
6924 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, modifier
);
6926 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
6932 if (RTL_EXPR_SEQUENCE (exp
))
6934 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
6936 emit_insn (RTL_EXPR_SEQUENCE (exp
));
6937 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
6939 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
6940 free_temps_for_rtl_expr (exp
);
6941 return RTL_EXPR_RTL (exp
);
6944 /* If we don't need the result, just ensure we evaluate any
6950 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
6951 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
, 0);
6956 /* All elts simple constants => refer to a constant in memory. But
6957 if this is a non-BLKmode mode, let it store a field at a time
6958 since that should make a CONST_INT or CONST_DOUBLE when we
6959 fold. Likewise, if we have a target we can use, it is best to
6960 store directly into the target unless the type is large enough
6961 that memcpy will be used. If we are making an initializer and
6962 all operands are constant, put it in memory as well.
6964 FIXME: Avoid trying to fill vector constructors piece-meal.
6965 Output them with output_constant_def below unless we're sure
6966 they're zeros. This should go away when vector initializers
6967 are treated like VECTOR_CST instead of arrays.
6969 else if ((TREE_STATIC (exp
)
6970 && ((mode
== BLKmode
6971 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
6972 || TREE_ADDRESSABLE (exp
)
6973 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
6974 && (! MOVE_BY_PIECES_P
6975 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
6977 && ((TREE_CODE (type
) == VECTOR_TYPE
6978 && !is_zeros_p (exp
))
6979 || ! mostly_zeros_p (exp
)))))
6980 || (modifier
== EXPAND_INITIALIZER
&& TREE_CONSTANT (exp
)))
6982 rtx constructor
= output_constant_def (exp
, 1);
6984 if (modifier
!= EXPAND_CONST_ADDRESS
6985 && modifier
!= EXPAND_INITIALIZER
6986 && modifier
!= EXPAND_SUM
)
6987 constructor
= validize_mem (constructor
);
6993 /* Handle calls that pass values in multiple non-contiguous
6994 locations. The Irix 6 ABI has examples of this. */
6995 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
6996 || GET_CODE (target
) == PARALLEL
)
6998 = assign_temp (build_qualified_type (type
,
7000 | (TREE_READONLY (exp
)
7001 * TYPE_QUAL_CONST
))),
7002 0, TREE_ADDRESSABLE (exp
), 1);
7004 store_constructor (exp
, target
, 0, int_expr_size (exp
));
7010 tree exp1
= TREE_OPERAND (exp
, 0);
7012 tree string
= string_constant (exp1
, &index
);
7014 /* Try to optimize reads from const strings. */
7016 && TREE_CODE (string
) == STRING_CST
7017 && TREE_CODE (index
) == INTEGER_CST
7018 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
7019 && GET_MODE_CLASS (mode
) == MODE_INT
7020 && GET_MODE_SIZE (mode
) == 1
7021 && modifier
!= EXPAND_WRITE
)
7022 return gen_int_mode (TREE_STRING_POINTER (string
)
7023 [TREE_INT_CST_LOW (index
)], mode
);
7025 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
7026 op0
= memory_address (mode
, op0
);
7027 temp
= gen_rtx_MEM (mode
, op0
);
7028 set_mem_attributes (temp
, exp
, 0);
7030 /* If we are writing to this object and its type is a record with
7031 readonly fields, we must mark it as readonly so it will
7032 conflict with readonly references to those fields. */
7033 if (modifier
== EXPAND_WRITE
&& readonly_fields_p (type
))
7034 RTX_UNCHANGING_P (temp
) = 1;
7040 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
7044 tree array
= TREE_OPERAND (exp
, 0);
7045 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
7046 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
7047 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
7050 /* Optimize the special-case of a zero lower bound.
7052 We convert the low_bound to sizetype to avoid some problems
7053 with constant folding. (E.g. suppose the lower bound is 1,
7054 and its mode is QI. Without the conversion, (ARRAY
7055 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7056 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7058 if (! integer_zerop (low_bound
))
7059 index
= size_diffop (index
, convert (sizetype
, low_bound
));
7061 /* Fold an expression like: "foo"[2].
7062 This is not done in fold so it won't happen inside &.
7063 Don't fold if this is for wide characters since it's too
7064 difficult to do correctly and this is a very rare case. */
7066 if (modifier
!= EXPAND_CONST_ADDRESS
&& modifier
!= EXPAND_INITIALIZER
7067 && TREE_CODE (array
) == STRING_CST
7068 && TREE_CODE (index
) == INTEGER_CST
7069 && compare_tree_int (index
, TREE_STRING_LENGTH (array
)) < 0
7070 && GET_MODE_CLASS (mode
) == MODE_INT
7071 && GET_MODE_SIZE (mode
) == 1)
7072 return gen_int_mode (TREE_STRING_POINTER (array
)
7073 [TREE_INT_CST_LOW (index
)], mode
);
7075 /* If this is a constant index into a constant array,
7076 just get the value from the array. Handle both the cases when
7077 we have an explicit constructor and when our operand is a variable
7078 that was declared const. */
7080 if (modifier
!= EXPAND_CONST_ADDRESS
&& modifier
!= EXPAND_INITIALIZER
7081 && TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
)
7082 && TREE_CODE (index
) == INTEGER_CST
7083 && 0 > compare_tree_int (index
,
7084 list_length (CONSTRUCTOR_ELTS
7085 (TREE_OPERAND (exp
, 0)))))
7089 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
7090 i
= TREE_INT_CST_LOW (index
);
7091 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
7095 return expand_expr (fold (TREE_VALUE (elem
)), target
, tmode
,
7099 else if (optimize
>= 1
7100 && modifier
!= EXPAND_CONST_ADDRESS
7101 && modifier
!= EXPAND_INITIALIZER
7102 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
7103 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
7104 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
7106 if (TREE_CODE (index
) == INTEGER_CST
)
7108 tree init
= DECL_INITIAL (array
);
7110 if (TREE_CODE (init
) == CONSTRUCTOR
)
7114 for (elem
= CONSTRUCTOR_ELTS (init
);
7116 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
7117 elem
= TREE_CHAIN (elem
))
7120 if (elem
&& !TREE_SIDE_EFFECTS (TREE_VALUE (elem
)))
7121 return expand_expr (fold (TREE_VALUE (elem
)), target
,
7124 else if (TREE_CODE (init
) == STRING_CST
7125 && 0 > compare_tree_int (index
,
7126 TREE_STRING_LENGTH (init
)))
7128 tree type
= TREE_TYPE (TREE_TYPE (init
));
7129 enum machine_mode mode
= TYPE_MODE (type
);
7131 if (GET_MODE_CLASS (mode
) == MODE_INT
7132 && GET_MODE_SIZE (mode
) == 1)
7133 return gen_int_mode (TREE_STRING_POINTER (init
)
7134 [TREE_INT_CST_LOW (index
)], mode
);
7143 case ARRAY_RANGE_REF
:
7144 /* If the operand is a CONSTRUCTOR, we can just extract the
7145 appropriate field if it is present. Don't do this if we have
7146 already written the data since we want to refer to that copy
7147 and varasm.c assumes that's what we'll do. */
7148 if (code
== COMPONENT_REF
7149 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
7150 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
7154 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
7155 elt
= TREE_CHAIN (elt
))
7156 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
7157 /* We can normally use the value of the field in the
7158 CONSTRUCTOR. However, if this is a bitfield in
7159 an integral mode that we can fit in a HOST_WIDE_INT,
7160 we must mask only the number of bits in the bitfield,
7161 since this is done implicitly by the constructor. If
7162 the bitfield does not meet either of those conditions,
7163 we can't do this optimization. */
7164 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
7165 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
7167 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
7168 <= HOST_BITS_PER_WIDE_INT
))))
7170 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
7171 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
7173 HOST_WIDE_INT bitsize
7174 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt
)));
7175 enum machine_mode imode
7176 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
7178 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
7180 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
7181 op0
= expand_and (imode
, op0
, op1
, target
);
7186 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
7189 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
7191 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
7201 enum machine_mode mode1
;
7202 HOST_WIDE_INT bitsize
, bitpos
;
7205 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7206 &mode1
, &unsignedp
, &volatilep
);
7209 /* If we got back the original object, something is wrong. Perhaps
7210 we are evaluating an expression too early. In any event, don't
7211 infinitely recurse. */
7215 /* If TEM's type is a union of variable size, pass TARGET to the inner
7216 computation, since it will need a temporary and TARGET is known
7217 to have to do. This occurs in unchecked conversion in Ada. */
7221 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
7222 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
7224 ? target
: NULL_RTX
),
7226 (modifier
== EXPAND_INITIALIZER
7227 || modifier
== EXPAND_CONST_ADDRESS
)
7228 ? modifier
: EXPAND_NORMAL
);
7230 /* If this is a constant, put it into a register if it is a
7231 legitimate constant and OFFSET is 0 and memory if it isn't. */
7232 if (CONSTANT_P (op0
))
7234 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
7235 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
7237 op0
= force_reg (mode
, op0
);
7239 op0
= validize_mem (force_const_mem (mode
, op0
));
7244 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
7246 /* If this object is in a register, put it into memory.
7247 This case can't occur in C, but can in Ada if we have
7248 unchecked conversion of an expression from a scalar type to
7249 an array or record type. */
7250 if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7251 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
7253 /* If the operand is a SAVE_EXPR, we can deal with this by
7254 forcing the SAVE_EXPR into memory. */
7255 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
7257 put_var_into_stack (TREE_OPERAND (exp
, 0));
7258 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
7263 = build_qualified_type (TREE_TYPE (tem
),
7264 (TYPE_QUALS (TREE_TYPE (tem
))
7265 | TYPE_QUAL_CONST
));
7266 rtx memloc
= assign_temp (nt
, 1, 1, 1);
7268 emit_move_insn (memloc
, op0
);
7273 if (GET_CODE (op0
) != MEM
)
7276 #ifdef POINTERS_EXTEND_UNSIGNED
7277 if (GET_MODE (offset_rtx
) != Pmode
)
7278 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
7280 if (GET_MODE (offset_rtx
) != ptr_mode
)
7281 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
7284 /* A constant address in OP0 can have VOIDmode, we must not try
7285 to call force_reg for that case. Avoid that case. */
7286 if (GET_CODE (op0
) == MEM
7287 && GET_MODE (op0
) == BLKmode
7288 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
7290 && (bitpos
% bitsize
) == 0
7291 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
7292 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
7294 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7298 op0
= offset_address (op0
, offset_rtx
,
7299 highest_pow2_factor (offset
));
7302 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7303 record its alignment as BIGGEST_ALIGNMENT. */
7304 if (GET_CODE (op0
) == MEM
&& bitpos
== 0 && offset
!= 0
7305 && is_aligning_offset (offset
, tem
))
7306 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
7308 /* Don't forget about volatility even if this is a bitfield. */
7309 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
7311 if (op0
== orig_op0
)
7312 op0
= copy_rtx (op0
);
7314 MEM_VOLATILE_P (op0
) = 1;
7317 /* The following code doesn't handle CONCAT.
7318 Assume only bitpos == 0 can be used for CONCAT, due to
7319 one element arrays having the same mode as its element. */
7320 if (GET_CODE (op0
) == CONCAT
)
7322 if (bitpos
!= 0 || bitsize
!= GET_MODE_BITSIZE (GET_MODE (op0
)))
7327 /* In cases where an aligned union has an unaligned object
7328 as a field, we might be extracting a BLKmode value from
7329 an integer-mode (e.g., SImode) object. Handle this case
7330 by doing the extract into an object as wide as the field
7331 (which we know to be the width of a basic mode), then
7332 storing into memory, and changing the mode to BLKmode. */
7333 if (mode1
== VOIDmode
7334 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7335 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
7336 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7337 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
7338 && modifier
!= EXPAND_CONST_ADDRESS
7339 && modifier
!= EXPAND_INITIALIZER
)
7340 /* If the field isn't aligned enough to fetch as a memref,
7341 fetch it as a bit field. */
7342 || (mode1
!= BLKmode
7343 && SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))
7344 && ((TYPE_ALIGN (TREE_TYPE (tem
))
7345 < GET_MODE_ALIGNMENT (mode
))
7346 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)))
7347 /* If the type and the field are a constant size and the
7348 size of the type isn't the same size as the bitfield,
7349 we must use bitfield operations. */
7351 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
)))
7353 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7356 enum machine_mode ext_mode
= mode
;
7358 if (ext_mode
== BLKmode
7359 && ! (target
!= 0 && GET_CODE (op0
) == MEM
7360 && GET_CODE (target
) == MEM
7361 && bitpos
% BITS_PER_UNIT
== 0))
7362 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7364 if (ext_mode
== BLKmode
)
7366 /* In this case, BITPOS must start at a byte boundary and
7367 TARGET, if specified, must be a MEM. */
7368 if (GET_CODE (op0
) != MEM
7369 || (target
!= 0 && GET_CODE (target
) != MEM
)
7370 || bitpos
% BITS_PER_UNIT
!= 0)
7373 op0
= adjust_address (op0
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
7375 target
= assign_temp (type
, 0, 1, 1);
7377 emit_block_move (target
, op0
,
7378 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7385 op0
= validize_mem (op0
);
7387 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
7388 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7390 op0
= extract_bit_field (op0
, bitsize
, bitpos
,
7391 unsignedp
, target
, ext_mode
, ext_mode
,
7392 int_size_in_bytes (TREE_TYPE (tem
)));
7394 /* If the result is a record type and BITSIZE is narrower than
7395 the mode of OP0, an integral mode, and this is a big endian
7396 machine, we must put the field into the high-order bits. */
7397 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7398 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7399 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
7400 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7401 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7405 if (mode
== BLKmode
)
7407 rtx
new = assign_temp (build_qualified_type
7408 ((*lang_hooks
.types
.type_for_mode
)
7410 TYPE_QUAL_CONST
), 0, 1, 1);
7412 emit_move_insn (new, op0
);
7413 op0
= copy_rtx (new);
7414 PUT_MODE (op0
, BLKmode
);
7415 set_mem_attributes (op0
, exp
, 1);
7421 /* If the result is BLKmode, use that to access the object
7423 if (mode
== BLKmode
)
7426 /* Get a reference to just this component. */
7427 if (modifier
== EXPAND_CONST_ADDRESS
7428 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7429 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7431 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7433 if (op0
== orig_op0
)
7434 op0
= copy_rtx (op0
);
7436 set_mem_attributes (op0
, exp
, 0);
7437 if (GET_CODE (XEXP (op0
, 0)) == REG
)
7438 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7440 MEM_VOLATILE_P (op0
) |= volatilep
;
7441 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7442 || modifier
== EXPAND_CONST_ADDRESS
7443 || modifier
== EXPAND_INITIALIZER
)
7445 else if (target
== 0)
7446 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7448 convert_move (target
, op0
, unsignedp
);
7454 rtx insn
, before
= get_last_insn (), vtbl_ref
;
7456 /* Evaluate the interior expression. */
7457 subtarget
= expand_expr (TREE_OPERAND (exp
, 0), target
,
7460 /* Get or create an instruction off which to hang a note. */
7461 if (REG_P (subtarget
))
7464 insn
= get_last_insn ();
7467 if (! INSN_P (insn
))
7468 insn
= prev_nonnote_insn (insn
);
7472 target
= gen_reg_rtx (GET_MODE (subtarget
));
7473 insn
= emit_move_insn (target
, subtarget
);
7476 /* Collect the data for the note. */
7477 vtbl_ref
= XEXP (DECL_RTL (TREE_OPERAND (exp
, 1)), 0);
7478 vtbl_ref
= plus_constant (vtbl_ref
,
7479 tree_low_cst (TREE_OPERAND (exp
, 2), 0));
7480 /* Discard the initial CONST that was added. */
7481 vtbl_ref
= XEXP (vtbl_ref
, 0);
7484 = gen_rtx_EXPR_LIST (REG_VTABLE_REF
, vtbl_ref
, REG_NOTES (insn
));
7489 /* Intended for a reference to a buffer of a file-object in Pascal.
7490 But it's not certain that a special tree code will really be
7491 necessary for these. INDIRECT_REF might work for them. */
7497 /* Pascal set IN expression.
7500 rlo = set_low - (set_low%bits_per_word);
7501 the_word = set [ (index - rlo)/bits_per_word ];
7502 bit_index = index % bits_per_word;
7503 bitmask = 1 << bit_index;
7504 return !!(the_word & bitmask); */
7506 tree set
= TREE_OPERAND (exp
, 0);
7507 tree index
= TREE_OPERAND (exp
, 1);
7508 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
7509 tree set_type
= TREE_TYPE (set
);
7510 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
7511 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
7512 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
7513 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
7514 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
7515 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
7516 rtx setaddr
= XEXP (setval
, 0);
7517 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
7519 rtx diff
, quo
, rem
, addr
, bit
, result
;
7521 /* If domain is empty, answer is no. Likewise if index is constant
7522 and out of bounds. */
7523 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
7524 && TREE_CODE (set_low_bound
) == INTEGER_CST
7525 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
7526 || (TREE_CODE (index
) == INTEGER_CST
7527 && TREE_CODE (set_low_bound
) == INTEGER_CST
7528 && tree_int_cst_lt (index
, set_low_bound
))
7529 || (TREE_CODE (set_high_bound
) == INTEGER_CST
7530 && TREE_CODE (index
) == INTEGER_CST
7531 && tree_int_cst_lt (set_high_bound
, index
))))
7535 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7537 /* If we get here, we have to generate the code for both cases
7538 (in range and out of range). */
7540 op0
= gen_label_rtx ();
7541 op1
= gen_label_rtx ();
7543 if (! (GET_CODE (index_val
) == CONST_INT
7544 && GET_CODE (lo_r
) == CONST_INT
))
7545 emit_cmp_and_jump_insns (index_val
, lo_r
, LT
, NULL_RTX
,
7546 GET_MODE (index_val
), iunsignedp
, op1
);
7548 if (! (GET_CODE (index_val
) == CONST_INT
7549 && GET_CODE (hi_r
) == CONST_INT
))
7550 emit_cmp_and_jump_insns (index_val
, hi_r
, GT
, NULL_RTX
,
7551 GET_MODE (index_val
), iunsignedp
, op1
);
7553 /* Calculate the element number of bit zero in the first word
7555 if (GET_CODE (lo_r
) == CONST_INT
)
7556 rlow
= GEN_INT (INTVAL (lo_r
)
7557 & ~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
7559 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
7560 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
7561 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7563 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
7564 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7566 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
7567 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7568 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
7569 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7571 addr
= memory_address (byte_mode
,
7572 expand_binop (index_mode
, add_optab
, diff
,
7573 setaddr
, NULL_RTX
, iunsignedp
,
7576 /* Extract the bit we want to examine. */
7577 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
7578 gen_rtx_MEM (byte_mode
, addr
),
7579 make_tree (TREE_TYPE (index
), rem
),
7581 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
7582 GET_MODE (target
) == byte_mode
? target
: 0,
7583 1, OPTAB_LIB_WIDEN
);
7585 if (result
!= target
)
7586 convert_move (target
, result
, 1);
7588 /* Output the code to handle the out-of-range case. */
7591 emit_move_insn (target
, const0_rtx
);
7596 case WITH_CLEANUP_EXPR
:
7597 if (WITH_CLEANUP_EXPR_RTL (exp
) == 0)
7599 WITH_CLEANUP_EXPR_RTL (exp
)
7600 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7601 expand_decl_cleanup_eh (NULL_TREE
, TREE_OPERAND (exp
, 1),
7602 CLEANUP_EH_ONLY (exp
));
7604 /* That's it for this cleanup. */
7605 TREE_OPERAND (exp
, 1) = 0;
7607 return WITH_CLEANUP_EXPR_RTL (exp
);
7609 case CLEANUP_POINT_EXPR
:
7611 /* Start a new binding layer that will keep track of all cleanup
7612 actions to be performed. */
7613 expand_start_bindings (2);
7615 target_temp_slot_level
= temp_slot_level
;
7617 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7618 /* If we're going to use this value, load it up now. */
7620 op0
= force_not_mem (op0
);
7621 preserve_temp_slots (op0
);
7622 expand_end_bindings (NULL_TREE
, 0, 0);
7627 /* Check for a built-in function. */
7628 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7629 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7631 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7633 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7634 == BUILT_IN_FRONTEND
)
7635 return (*lang_hooks
.expand_expr
)
7636 (exp
, original_target
, tmode
, modifier
);
7638 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7641 return expand_call (exp
, target
, ignore
);
7643 case NON_LVALUE_EXPR
:
7646 case REFERENCE_EXPR
:
7647 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7650 if (TREE_CODE (type
) == UNION_TYPE
)
7652 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7654 /* If both input and output are BLKmode, this conversion isn't doing
7655 anything except possibly changing memory attribute. */
7656 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7658 rtx result
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7661 result
= copy_rtx (result
);
7662 set_mem_attributes (result
, exp
, 0);
7667 target
= assign_temp (type
, 0, 1, 1);
7669 if (GET_CODE (target
) == MEM
)
7670 /* Store data into beginning of memory target. */
7671 store_expr (TREE_OPERAND (exp
, 0),
7672 adjust_address (target
, TYPE_MODE (valtype
), 0), 0);
7674 else if (GET_CODE (target
) == REG
)
7675 /* Store this field into a union of the proper type. */
7676 store_field (target
,
7677 MIN ((int_size_in_bytes (TREE_TYPE
7678 (TREE_OPERAND (exp
, 0)))
7680 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7681 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7682 VOIDmode
, 0, type
, 0);
7686 /* Return the entire union. */
7690 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7692 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7695 /* If the signedness of the conversion differs and OP0 is
7696 a promoted SUBREG, clear that indication since we now
7697 have to do the proper extension. */
7698 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7699 && GET_CODE (op0
) == SUBREG
)
7700 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7705 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7706 if (GET_MODE (op0
) == mode
)
7709 /* If OP0 is a constant, just convert it into the proper mode. */
7710 if (CONSTANT_P (op0
))
7712 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7713 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
7715 if (modifier
== EXPAND_INITIALIZER
)
7716 return simplify_gen_subreg (mode
, op0
, inner_mode
,
7717 subreg_lowpart_offset (mode
,
7720 return convert_modes (mode
, inner_mode
, op0
,
7721 TREE_UNSIGNED (inner_type
));
7724 if (modifier
== EXPAND_INITIALIZER
)
7725 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7729 convert_to_mode (mode
, op0
,
7730 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7732 convert_move (target
, op0
,
7733 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7736 case VIEW_CONVERT_EXPR
:
7737 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7739 /* If the input and output modes are both the same, we are done.
7740 Otherwise, if neither mode is BLKmode and both are within a word, we
7741 can use gen_lowpart. If neither is true, make sure the operand is
7742 in memory and convert the MEM to the new mode. */
7743 if (TYPE_MODE (type
) == GET_MODE (op0
))
7745 else if (TYPE_MODE (type
) != BLKmode
&& GET_MODE (op0
) != BLKmode
7746 && GET_MODE_SIZE (TYPE_MODE (type
)) <= UNITS_PER_WORD
7747 && GET_MODE_SIZE (GET_MODE (op0
)) <= UNITS_PER_WORD
)
7748 op0
= gen_lowpart (TYPE_MODE (type
), op0
);
7749 else if (GET_CODE (op0
) != MEM
)
7751 /* If the operand is not a MEM, force it into memory. Since we
7752 are going to be be changing the mode of the MEM, don't call
7753 force_const_mem for constants because we don't allow pool
7754 constants to change mode. */
7755 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7757 if (TREE_ADDRESSABLE (exp
))
7760 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
7762 = assign_stack_temp_for_type
7763 (TYPE_MODE (inner_type
),
7764 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
7766 emit_move_insn (target
, op0
);
7770 /* At this point, OP0 is in the correct mode. If the output type is such
7771 that the operand is known to be aligned, indicate that it is.
7772 Otherwise, we need only be concerned about alignment for non-BLKmode
7774 if (GET_CODE (op0
) == MEM
)
7776 op0
= copy_rtx (op0
);
7778 if (TYPE_ALIGN_OK (type
))
7779 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
7780 else if (TYPE_MODE (type
) != BLKmode
&& STRICT_ALIGNMENT
7781 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
7783 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7784 HOST_WIDE_INT temp_size
7785 = MAX (int_size_in_bytes (inner_type
),
7786 (HOST_WIDE_INT
) GET_MODE_SIZE (TYPE_MODE (type
)));
7787 rtx
new = assign_stack_temp_for_type (TYPE_MODE (type
),
7788 temp_size
, 0, type
);
7789 rtx new_with_op0_mode
= adjust_address (new, GET_MODE (op0
), 0);
7791 if (TREE_ADDRESSABLE (exp
))
7794 if (GET_MODE (op0
) == BLKmode
)
7795 emit_block_move (new_with_op0_mode
, op0
,
7796 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type
))),
7799 emit_move_insn (new_with_op0_mode
, op0
);
7804 op0
= adjust_address (op0
, TYPE_MODE (type
), 0);
7810 this_optab
= ! unsignedp
&& flag_trapv
7811 && (GET_MODE_CLASS (mode
) == MODE_INT
)
7812 ? addv_optab
: add_optab
;
7814 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7815 something else, make sure we add the register to the constant and
7816 then to the other thing. This case can occur during strength
7817 reduction and doing it this way will produce better code if the
7818 frame pointer or argument pointer is eliminated.
7820 fold-const.c will ensure that the constant is always in the inner
7821 PLUS_EXPR, so the only case we need to do anything about is if
7822 sp, ap, or fp is our second argument, in which case we must swap
7823 the innermost first argument and our second argument. */
7825 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7826 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7827 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
7828 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7829 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7830 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7832 tree t
= TREE_OPERAND (exp
, 1);
7834 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7835 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
7838 /* If the result is to be ptr_mode and we are adding an integer to
7839 something, we might be forming a constant. So try to use
7840 plus_constant. If it produces a sum and we can't accept it,
7841 use force_operand. This allows P = &ARR[const] to generate
7842 efficient code on machines where a SYMBOL_REF is not a valid
7845 If this is an EXPAND_SUM call, always return the sum. */
7846 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7847 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
7849 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
7850 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7851 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
7855 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
7857 /* Use immed_double_const to ensure that the constant is
7858 truncated according to the mode of OP1, then sign extended
7859 to a HOST_WIDE_INT. Using the constant directly can result
7860 in non-canonical RTL in a 64x32 cross compile. */
7862 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
7864 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
7865 op1
= plus_constant (op1
, INTVAL (constant_part
));
7866 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7867 op1
= force_operand (op1
, target
);
7871 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7872 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
7873 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
7877 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7878 (modifier
== EXPAND_INITIALIZER
7879 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
7880 if (! CONSTANT_P (op0
))
7882 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7883 VOIDmode
, modifier
);
7884 /* Don't go to both_summands if modifier
7885 says it's not right to return a PLUS. */
7886 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7890 /* Use immed_double_const to ensure that the constant is
7891 truncated according to the mode of OP1, then sign extended
7892 to a HOST_WIDE_INT. Using the constant directly can result
7893 in non-canonical RTL in a 64x32 cross compile. */
7895 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
7897 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7898 op0
= plus_constant (op0
, INTVAL (constant_part
));
7899 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7900 op0
= force_operand (op0
, target
);
7905 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7908 /* No sense saving up arithmetic to be done
7909 if it's all in the wrong mode to form part of an address.
7910 And force_operand won't know whether to sign-extend or
7912 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7913 || mode
!= ptr_mode
)
7915 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7916 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7917 if (op0
== const0_rtx
)
7919 if (op1
== const0_rtx
)
7924 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, modifier
);
7925 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, modifier
);
7927 /* We come here from MINUS_EXPR when the second operand is a
7930 /* Make sure any term that's a sum with a constant comes last. */
7931 if (GET_CODE (op0
) == PLUS
7932 && CONSTANT_P (XEXP (op0
, 1)))
7938 /* If adding to a sum including a constant,
7939 associate it to put the constant outside. */
7940 if (GET_CODE (op1
) == PLUS
7941 && CONSTANT_P (XEXP (op1
, 1)))
7943 rtx constant_term
= const0_rtx
;
7945 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
7948 /* Ensure that MULT comes first if there is one. */
7949 else if (GET_CODE (op0
) == MULT
)
7950 op0
= gen_rtx_PLUS (mode
, op0
, XEXP (op1
, 0));
7952 op0
= gen_rtx_PLUS (mode
, XEXP (op1
, 0), op0
);
7954 /* Let's also eliminate constants from op0 if possible. */
7955 op0
= eliminate_constant_term (op0
, &constant_term
);
7957 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7958 their sum should be a constant. Form it into OP1, since the
7959 result we want will then be OP0 + OP1. */
7961 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
7966 op1
= gen_rtx_PLUS (mode
, constant_term
, XEXP (op1
, 1));
7969 /* Put a constant term last and put a multiplication first. */
7970 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
7971 temp
= op1
, op1
= op0
, op0
= temp
;
7973 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
7974 return temp
? temp
: gen_rtx_PLUS (mode
, op0
, op1
);
7977 /* For initializers, we are allowed to return a MINUS of two
7978 symbolic constants. Here we handle all cases when both operands
7980 /* Handle difference of two symbolic constants,
7981 for the sake of an initializer. */
7982 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7983 && really_constant_p (TREE_OPERAND (exp
, 0))
7984 && really_constant_p (TREE_OPERAND (exp
, 1)))
7986 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
,
7988 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
,
7991 /* If the last operand is a CONST_INT, use plus_constant of
7992 the negated constant. Else make the MINUS. */
7993 if (GET_CODE (op1
) == CONST_INT
)
7994 return plus_constant (op0
, - INTVAL (op1
));
7996 return gen_rtx_MINUS (mode
, op0
, op1
);
7999 this_optab
= ! unsignedp
&& flag_trapv
8000 && (GET_MODE_CLASS(mode
) == MODE_INT
)
8001 ? subv_optab
: sub_optab
;
8003 /* No sense saving up arithmetic to be done
8004 if it's all in the wrong mode to form part of an address.
8005 And force_operand won't know whether to sign-extend or
8007 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8008 || mode
!= ptr_mode
)
8011 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8014 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, modifier
);
8015 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, modifier
);
8017 /* Convert A - const to A + (-const). */
8018 if (GET_CODE (op1
) == CONST_INT
)
8020 op1
= negate_rtx (mode
, op1
);
8027 /* If first operand is constant, swap them.
8028 Thus the following special case checks need only
8029 check the second operand. */
8030 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
8032 tree t1
= TREE_OPERAND (exp
, 0);
8033 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
8034 TREE_OPERAND (exp
, 1) = t1
;
8037 /* Attempt to return something suitable for generating an
8038 indexed address, for machines that support that. */
8040 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
8041 && host_integerp (TREE_OPERAND (exp
, 1), 0))
8043 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
8046 /* If we knew for certain that this is arithmetic for an array
8047 reference, and we knew the bounds of the array, then we could
8048 apply the distributive law across (PLUS X C) for constant C.
8049 Without such knowledge, we risk overflowing the computation
8050 when both X and C are large, but X+C isn't. */
8051 /* ??? Could perhaps special-case EXP being unsigned and C being
8052 positive. In that case we are certain that X+C is no smaller
8053 than X and so the transformed expression will overflow iff the
8054 original would have. */
8056 if (GET_CODE (op0
) != REG
)
8057 op0
= force_operand (op0
, NULL_RTX
);
8058 if (GET_CODE (op0
) != REG
)
8059 op0
= copy_to_mode_reg (mode
, op0
);
8062 gen_rtx_MULT (mode
, op0
,
8063 GEN_INT (tree_low_cst (TREE_OPERAND (exp
, 1), 0)));
8066 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8069 /* Check for multiplying things that have been extended
8070 from a narrower type. If this machine supports multiplying
8071 in that narrower type with a result in the desired type,
8072 do it that way, and avoid the explicit type-conversion. */
8073 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
8074 && TREE_CODE (type
) == INTEGER_TYPE
8075 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8076 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
8077 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
8078 && int_fits_type_p (TREE_OPERAND (exp
, 1),
8079 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8080 /* Don't use a widening multiply if a shift will do. */
8081 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
8082 > HOST_BITS_PER_WIDE_INT
)
8083 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
8085 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8086 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
8088 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
8089 /* If both operands are extended, they must either both
8090 be zero-extended or both be sign-extended. */
8091 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
8093 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
8095 enum machine_mode innermode
8096 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
8097 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8098 ? smul_widen_optab
: umul_widen_optab
);
8099 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8100 ? umul_widen_optab
: smul_widen_optab
);
8101 if (mode
== GET_MODE_WIDER_MODE (innermode
))
8103 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
8105 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8106 NULL_RTX
, VOIDmode
, 0);
8107 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
8108 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
8111 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
8112 NULL_RTX
, VOIDmode
, 0);
8115 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
8116 && innermode
== word_mode
)
8119 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8120 NULL_RTX
, VOIDmode
, 0);
8121 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
8122 op1
= convert_modes (innermode
, mode
,
8123 expand_expr (TREE_OPERAND (exp
, 1),
8124 NULL_RTX
, VOIDmode
, 0),
8127 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
8128 NULL_RTX
, VOIDmode
, 0);
8129 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8130 unsignedp
, OPTAB_LIB_WIDEN
);
8131 htem
= expand_mult_highpart_adjust (innermode
,
8132 gen_highpart (innermode
, temp
),
8134 gen_highpart (innermode
, temp
),
8136 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
8141 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8142 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8143 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
8145 case TRUNC_DIV_EXPR
:
8146 case FLOOR_DIV_EXPR
:
8148 case ROUND_DIV_EXPR
:
8149 case EXACT_DIV_EXPR
:
8150 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8152 /* Possible optimization: compute the dividend with EXPAND_SUM
8153 then if the divisor is constant can optimize the case
8154 where some terms of the dividend have coeffs divisible by it. */
8155 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8156 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8157 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
8160 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8161 expensive divide. If not, combine will rebuild the original
8163 if (flag_unsafe_math_optimizations
&& optimize
&& !optimize_size
8164 && TREE_CODE (type
) == REAL_TYPE
8165 && !real_onep (TREE_OPERAND (exp
, 0)))
8166 return expand_expr (build (MULT_EXPR
, type
, TREE_OPERAND (exp
, 0),
8167 build (RDIV_EXPR
, type
,
8168 build_real (type
, dconst1
),
8169 TREE_OPERAND (exp
, 1))),
8170 target
, tmode
, unsignedp
);
8171 this_optab
= sdiv_optab
;
8174 case TRUNC_MOD_EXPR
:
8175 case FLOOR_MOD_EXPR
:
8177 case ROUND_MOD_EXPR
:
8178 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8180 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8181 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8182 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
8184 case FIX_ROUND_EXPR
:
8185 case FIX_FLOOR_EXPR
:
8187 abort (); /* Not used for C. */
8189 case FIX_TRUNC_EXPR
:
8190 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8192 target
= gen_reg_rtx (mode
);
8193 expand_fix (target
, op0
, unsignedp
);
8197 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8199 target
= gen_reg_rtx (mode
);
8200 /* expand_float can't figure out what to do if FROM has VOIDmode.
8201 So give it the correct mode. With -O, cse will optimize this. */
8202 if (GET_MODE (op0
) == VOIDmode
)
8203 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8205 expand_float (target
, op0
,
8206 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8210 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8211 temp
= expand_unop (mode
,
8212 ! unsignedp
&& flag_trapv
8213 && (GET_MODE_CLASS(mode
) == MODE_INT
)
8214 ? negv_optab
: neg_optab
, op0
, target
, 0);
8220 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8222 /* Handle complex values specially. */
8223 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
8224 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
8225 return expand_complex_abs (mode
, op0
, target
, unsignedp
);
8227 /* Unsigned abs is simply the operand. Testing here means we don't
8228 risk generating incorrect code below. */
8229 if (TREE_UNSIGNED (type
))
8232 return expand_abs (mode
, op0
, target
, unsignedp
,
8233 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
8237 target
= original_target
;
8238 if (target
== 0 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1), 1)
8239 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
8240 || GET_MODE (target
) != mode
8241 || (GET_CODE (target
) == REG
8242 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8243 target
= gen_reg_rtx (mode
);
8244 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8245 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8247 /* First try to do it with a special MIN or MAX instruction.
8248 If that does not win, use a conditional jump to select the proper
8250 this_optab
= (TREE_UNSIGNED (type
)
8251 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
8252 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
8254 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8259 /* At this point, a MEM target is no longer useful; we will get better
8262 if (GET_CODE (target
) == MEM
)
8263 target
= gen_reg_rtx (mode
);
8266 emit_move_insn (target
, op0
);
8268 op0
= gen_label_rtx ();
8270 /* If this mode is an integer too wide to compare properly,
8271 compare word by word. Rely on cse to optimize constant cases. */
8272 if (GET_MODE_CLASS (mode
) == MODE_INT
8273 && ! can_compare_p (GE
, mode
, ccp_jump
))
8275 if (code
== MAX_EXPR
)
8276 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8277 target
, op1
, NULL_RTX
, op0
);
8279 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8280 op1
, target
, NULL_RTX
, op0
);
8284 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)));
8285 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
8286 unsignedp
, mode
, NULL_RTX
, NULL_RTX
,
8289 emit_move_insn (target
, op1
);
8294 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8295 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8301 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8302 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
8307 /* ??? Can optimize bitwise operations with one arg constant.
8308 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8309 and (a bitwise1 b) bitwise2 b (etc)
8310 but that is probably not worth while. */
8312 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8313 boolean values when we want in all cases to compute both of them. In
8314 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8315 as actual zero-or-1 values and then bitwise anding. In cases where
8316 there cannot be any side effects, better code would be made by
8317 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8318 how to recognize those cases. */
8320 case TRUTH_AND_EXPR
:
8322 this_optab
= and_optab
;
8327 this_optab
= ior_optab
;
8330 case TRUTH_XOR_EXPR
:
8332 this_optab
= xor_optab
;
8339 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8341 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8342 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
8345 /* Could determine the answer when only additive constants differ. Also,
8346 the addition of one can be handled by changing the condition. */
8353 case UNORDERED_EXPR
:
8360 temp
= do_store_flag (exp
, target
, tmode
!= VOIDmode
? tmode
: mode
, 0);
8364 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8365 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
8367 && GET_CODE (original_target
) == REG
8368 && (GET_MODE (original_target
)
8369 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
8371 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
8374 /* If temp is constant, we can just compute the result. */
8375 if (GET_CODE (temp
) == CONST_INT
)
8377 if (INTVAL (temp
) != 0)
8378 emit_move_insn (target
, const1_rtx
);
8380 emit_move_insn (target
, const0_rtx
);
8385 if (temp
!= original_target
)
8387 enum machine_mode mode1
= GET_MODE (temp
);
8388 if (mode1
== VOIDmode
)
8389 mode1
= tmode
!= VOIDmode
? tmode
: mode
;
8391 temp
= copy_to_mode_reg (mode1
, temp
);
8394 op1
= gen_label_rtx ();
8395 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
8396 GET_MODE (temp
), unsignedp
, op1
);
8397 emit_move_insn (temp
, const1_rtx
);
8402 /* If no set-flag instruction, must generate a conditional
8403 store into a temporary variable. Drop through
8404 and handle this like && and ||. */
8406 case TRUTH_ANDIF_EXPR
:
8407 case TRUTH_ORIF_EXPR
:
8409 && (target
== 0 || ! safe_from_p (target
, exp
, 1)
8410 /* Make sure we don't have a hard reg (such as function's return
8411 value) live across basic blocks, if not optimizing. */
8412 || (!optimize
&& GET_CODE (target
) == REG
8413 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8414 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8417 emit_clr_insn (target
);
8419 op1
= gen_label_rtx ();
8420 jumpifnot (exp
, op1
);
8423 emit_0_to_1_insn (target
);
8426 return ignore
? const0_rtx
: target
;
8428 case TRUTH_NOT_EXPR
:
8429 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8430 /* The parser is careful to generate TRUTH_NOT_EXPR
8431 only with operands that are always zero or one. */
8432 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8433 target
, 1, OPTAB_LIB_WIDEN
);
8439 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
8441 return expand_expr (TREE_OPERAND (exp
, 1),
8442 (ignore
? const0_rtx
: target
),
8446 /* If we would have a "singleton" (see below) were it not for a
8447 conversion in each arm, bring that conversion back out. */
8448 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8449 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
8450 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
8451 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
8453 tree iftrue
= TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
8454 tree iffalse
= TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
8456 if ((TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '2'
8457 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8458 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '2'
8459 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0))
8460 || (TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '1'
8461 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8462 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '1'
8463 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0)))
8464 return expand_expr (build1 (NOP_EXPR
, type
,
8465 build (COND_EXPR
, TREE_TYPE (iftrue
),
8466 TREE_OPERAND (exp
, 0),
8468 target
, tmode
, modifier
);
8472 /* Note that COND_EXPRs whose type is a structure or union
8473 are required to be constructed to contain assignments of
8474 a temporary variable, so that we can evaluate them here
8475 for side effect only. If type is void, we must do likewise. */
8477 /* If an arm of the branch requires a cleanup,
8478 only that cleanup is performed. */
8481 tree binary_op
= 0, unary_op
= 0;
8483 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8484 convert it to our mode, if necessary. */
8485 if (integer_onep (TREE_OPERAND (exp
, 1))
8486 && integer_zerop (TREE_OPERAND (exp
, 2))
8487 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8491 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
8496 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, modifier
);
8497 if (GET_MODE (op0
) == mode
)
8501 target
= gen_reg_rtx (mode
);
8502 convert_move (target
, op0
, unsignedp
);
8506 /* Check for X ? A + B : A. If we have this, we can copy A to the
8507 output and conditionally add B. Similarly for unary operations.
8508 Don't do this if X has side-effects because those side effects
8509 might affect A or B and the "?" operation is a sequence point in
8510 ANSI. (operand_equal_p tests for side effects.) */
8512 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
8513 && operand_equal_p (TREE_OPERAND (exp
, 2),
8514 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8515 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
8516 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
8517 && operand_equal_p (TREE_OPERAND (exp
, 1),
8518 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8519 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
8520 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
8521 && operand_equal_p (TREE_OPERAND (exp
, 2),
8522 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8523 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
8524 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
8525 && operand_equal_p (TREE_OPERAND (exp
, 1),
8526 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8527 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
8529 /* If we are not to produce a result, we have no target. Otherwise,
8530 if a target was specified use it; it will not be used as an
8531 intermediate target unless it is safe. If no target, use a
8536 else if (original_target
8537 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
8538 || (singleton
&& GET_CODE (original_target
) == REG
8539 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
8540 && original_target
== var_rtx (singleton
)))
8541 && GET_MODE (original_target
) == mode
8542 #ifdef HAVE_conditional_move
8543 && (! can_conditionally_move_p (mode
)
8544 || GET_CODE (original_target
) == REG
8545 || TREE_ADDRESSABLE (type
))
8547 && (GET_CODE (original_target
) != MEM
8548 || TREE_ADDRESSABLE (type
)))
8549 temp
= original_target
;
8550 else if (TREE_ADDRESSABLE (type
))
8553 temp
= assign_temp (type
, 0, 0, 1);
8555 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8556 do the test of X as a store-flag operation, do this as
8557 A + ((X != 0) << log C). Similarly for other simple binary
8558 operators. Only do for C == 1 if BRANCH_COST is low. */
8559 if (temp
&& singleton
&& binary_op
8560 && (TREE_CODE (binary_op
) == PLUS_EXPR
8561 || TREE_CODE (binary_op
) == MINUS_EXPR
8562 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
8563 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
8564 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
8565 : integer_onep (TREE_OPERAND (binary_op
, 1)))
8566 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8569 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
8570 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8571 ? addv_optab
: add_optab
)
8572 : TREE_CODE (binary_op
) == MINUS_EXPR
8573 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8574 ? subv_optab
: sub_optab
)
8575 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
8578 /* If we had X ? A : A + 1, do this as A + (X == 0).
8580 We have to invert the truth value here and then put it
8581 back later if do_store_flag fails. We cannot simply copy
8582 TREE_OPERAND (exp, 0) to another variable and modify that
8583 because invert_truthvalue can modify the tree pointed to
8585 if (singleton
== TREE_OPERAND (exp
, 1))
8586 TREE_OPERAND (exp
, 0)
8587 = invert_truthvalue (TREE_OPERAND (exp
, 0));
8589 result
= do_store_flag (TREE_OPERAND (exp
, 0),
8590 (safe_from_p (temp
, singleton
, 1)
8592 mode
, BRANCH_COST
<= 1);
8594 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
8595 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
8596 build_int_2 (tree_log2
8600 (safe_from_p (temp
, singleton
, 1)
8601 ? temp
: NULL_RTX
), 0);
8605 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
8606 return expand_binop (mode
, boptab
, op1
, result
, temp
,
8607 unsignedp
, OPTAB_LIB_WIDEN
);
8609 else if (singleton
== TREE_OPERAND (exp
, 1))
8610 TREE_OPERAND (exp
, 0)
8611 = invert_truthvalue (TREE_OPERAND (exp
, 0));
8614 do_pending_stack_adjust ();
8616 op0
= gen_label_rtx ();
8618 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
8622 /* If the target conflicts with the other operand of the
8623 binary op, we can't use it. Also, we can't use the target
8624 if it is a hard register, because evaluating the condition
8625 might clobber it. */
8627 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
8628 || (GET_CODE (temp
) == REG
8629 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
8630 temp
= gen_reg_rtx (mode
);
8631 store_expr (singleton
, temp
, 0);
8634 expand_expr (singleton
,
8635 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8636 if (singleton
== TREE_OPERAND (exp
, 1))
8637 jumpif (TREE_OPERAND (exp
, 0), op0
);
8639 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8641 start_cleanup_deferral ();
8642 if (binary_op
&& temp
== 0)
8643 /* Just touch the other operand. */
8644 expand_expr (TREE_OPERAND (binary_op
, 1),
8645 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8647 store_expr (build (TREE_CODE (binary_op
), type
,
8648 make_tree (type
, temp
),
8649 TREE_OPERAND (binary_op
, 1)),
8652 store_expr (build1 (TREE_CODE (unary_op
), type
,
8653 make_tree (type
, temp
)),
8657 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8658 comparison operator. If we have one of these cases, set the
8659 output to A, branch on A (cse will merge these two references),
8660 then set the output to FOO. */
8662 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8663 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8664 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8665 TREE_OPERAND (exp
, 1), 0)
8666 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8667 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
8668 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
8670 if (GET_CODE (temp
) == REG
8671 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8672 temp
= gen_reg_rtx (mode
);
8673 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8674 jumpif (TREE_OPERAND (exp
, 0), op0
);
8676 start_cleanup_deferral ();
8677 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8681 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8682 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8683 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8684 TREE_OPERAND (exp
, 2), 0)
8685 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8686 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
8687 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
8689 if (GET_CODE (temp
) == REG
8690 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8691 temp
= gen_reg_rtx (mode
);
8692 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8693 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8695 start_cleanup_deferral ();
8696 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8701 op1
= gen_label_rtx ();
8702 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8704 start_cleanup_deferral ();
8706 /* One branch of the cond can be void, if it never returns. For
8707 example A ? throw : E */
8709 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8710 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8712 expand_expr (TREE_OPERAND (exp
, 1),
8713 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8714 end_cleanup_deferral ();
8716 emit_jump_insn (gen_jump (op1
));
8719 start_cleanup_deferral ();
8721 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8722 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8724 expand_expr (TREE_OPERAND (exp
, 2),
8725 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8728 end_cleanup_deferral ();
8739 /* Something needs to be initialized, but we didn't know
8740 where that thing was when building the tree. For example,
8741 it could be the return value of a function, or a parameter
8742 to a function which lays down in the stack, or a temporary
8743 variable which must be passed by reference.
8745 We guarantee that the expression will either be constructed
8746 or copied into our original target. */
8748 tree slot
= TREE_OPERAND (exp
, 0);
8749 tree cleanups
= NULL_TREE
;
8752 if (TREE_CODE (slot
) != VAR_DECL
)
8756 target
= original_target
;
8758 /* Set this here so that if we get a target that refers to a
8759 register variable that's already been used, put_reg_into_stack
8760 knows that it should fix up those uses. */
8761 TREE_USED (slot
) = 1;
8765 if (DECL_RTL_SET_P (slot
))
8767 target
= DECL_RTL (slot
);
8768 /* If we have already expanded the slot, so don't do
8770 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8775 target
= assign_temp (type
, 2, 0, 1);
8776 /* All temp slots at this level must not conflict. */
8777 preserve_temp_slots (target
);
8778 SET_DECL_RTL (slot
, target
);
8779 if (TREE_ADDRESSABLE (slot
))
8780 put_var_into_stack (slot
);
8782 /* Since SLOT is not known to the called function
8783 to belong to its stack frame, we must build an explicit
8784 cleanup. This case occurs when we must build up a reference
8785 to pass the reference as an argument. In this case,
8786 it is very likely that such a reference need not be
8789 if (TREE_OPERAND (exp
, 2) == 0)
8790 TREE_OPERAND (exp
, 2)
8791 = (*lang_hooks
.maybe_build_cleanup
) (slot
);
8792 cleanups
= TREE_OPERAND (exp
, 2);
8797 /* This case does occur, when expanding a parameter which
8798 needs to be constructed on the stack. The target
8799 is the actual stack address that we want to initialize.
8800 The function we call will perform the cleanup in this case. */
8802 /* If we have already assigned it space, use that space,
8803 not target that we were passed in, as our target
8804 parameter is only a hint. */
8805 if (DECL_RTL_SET_P (slot
))
8807 target
= DECL_RTL (slot
);
8808 /* If we have already expanded the slot, so don't do
8810 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8815 SET_DECL_RTL (slot
, target
);
8816 /* If we must have an addressable slot, then make sure that
8817 the RTL that we just stored in slot is OK. */
8818 if (TREE_ADDRESSABLE (slot
))
8819 put_var_into_stack (slot
);
8823 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
8824 /* Mark it as expanded. */
8825 TREE_OPERAND (exp
, 1) = NULL_TREE
;
8827 store_expr (exp1
, target
, 0);
8829 expand_decl_cleanup_eh (NULL_TREE
, cleanups
, CLEANUP_EH_ONLY (exp
));
8836 tree lhs
= TREE_OPERAND (exp
, 0);
8837 tree rhs
= TREE_OPERAND (exp
, 1);
8839 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8845 /* If lhs is complex, expand calls in rhs before computing it.
8846 That's so we don't compute a pointer and save it over a
8847 call. If lhs is simple, compute it first so we can give it
8848 as a target if the rhs is just a call. This avoids an
8849 extra temp and copy and that prevents a partial-subsumption
8850 which makes bad code. Actually we could treat
8851 component_ref's of vars like vars. */
8853 tree lhs
= TREE_OPERAND (exp
, 0);
8854 tree rhs
= TREE_OPERAND (exp
, 1);
8858 /* Check for |= or &= of a bitfield of size one into another bitfield
8859 of size 1. In this case, (unless we need the result of the
8860 assignment) we can do this more efficiently with a
8861 test followed by an assignment, if necessary.
8863 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8864 things change so we do, this code should be enhanced to
8867 && TREE_CODE (lhs
) == COMPONENT_REF
8868 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
8869 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
8870 && TREE_OPERAND (rhs
, 0) == lhs
8871 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
8872 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
8873 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
8875 rtx label
= gen_label_rtx ();
8877 do_jump (TREE_OPERAND (rhs
, 1),
8878 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
8879 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
8880 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
8881 (TREE_CODE (rhs
) == BIT_IOR_EXPR
8883 : integer_zero_node
)),
8885 do_pending_stack_adjust ();
8890 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8896 if (!TREE_OPERAND (exp
, 0))
8897 expand_null_return ();
8899 expand_return (TREE_OPERAND (exp
, 0));
8902 case PREINCREMENT_EXPR
:
8903 case PREDECREMENT_EXPR
:
8904 return expand_increment (exp
, 0, ignore
);
8906 case POSTINCREMENT_EXPR
:
8907 case POSTDECREMENT_EXPR
:
8908 /* Faster to treat as pre-increment if result is not used. */
8909 return expand_increment (exp
, ! ignore
, ignore
);
8912 /* Are we taking the address of a nested function? */
8913 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
8914 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
8915 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0))
8916 && ! TREE_STATIC (exp
))
8918 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
8919 op0
= force_operand (op0
, target
);
8921 /* If we are taking the address of something erroneous, just
8923 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
8925 /* If we are taking the address of a constant and are at the
8926 top level, we have to use output_constant_def since we can't
8927 call force_const_mem at top level. */
8929 && (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
8930 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0)))
8932 op0
= XEXP (output_constant_def (TREE_OPERAND (exp
, 0), 0), 0);
8935 /* We make sure to pass const0_rtx down if we came in with
8936 ignore set, to avoid doing the cleanups twice for something. */
8937 op0
= expand_expr (TREE_OPERAND (exp
, 0),
8938 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
8939 (modifier
== EXPAND_INITIALIZER
8940 ? modifier
: EXPAND_CONST_ADDRESS
));
8942 /* If we are going to ignore the result, OP0 will have been set
8943 to const0_rtx, so just return it. Don't get confused and
8944 think we are taking the address of the constant. */
8948 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8949 clever and returns a REG when given a MEM. */
8950 op0
= protect_from_queue (op0
, 1);
8952 /* We would like the object in memory. If it is a constant, we can
8953 have it be statically allocated into memory. For a non-constant,
8954 we need to allocate some memory and store the value into it. */
8956 if (CONSTANT_P (op0
))
8957 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8959 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
8960 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
8961 || GET_CODE (op0
) == PARALLEL
)
8963 /* If the operand is a SAVE_EXPR, we can deal with this by
8964 forcing the SAVE_EXPR into memory. */
8965 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
8967 put_var_into_stack (TREE_OPERAND (exp
, 0));
8968 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
8972 /* If this object is in a register, it can't be BLKmode. */
8973 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8974 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
8976 if (GET_CODE (op0
) == PARALLEL
)
8977 /* Handle calls that pass values in multiple
8978 non-contiguous locations. The Irix 6 ABI has examples
8980 emit_group_store (memloc
, op0
,
8981 int_size_in_bytes (inner_type
));
8983 emit_move_insn (memloc
, op0
);
8989 if (GET_CODE (op0
) != MEM
)
8992 mark_temp_addr_taken (op0
);
8993 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8995 op0
= XEXP (op0
, 0);
8996 #ifdef POINTERS_EXTEND_UNSIGNED
8997 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
8998 && mode
== ptr_mode
)
8999 op0
= convert_memory_address (ptr_mode
, op0
);
9004 /* If OP0 is not aligned as least as much as the type requires, we
9005 need to make a temporary, copy OP0 to it, and take the address of
9006 the temporary. We want to use the alignment of the type, not of
9007 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9008 the test for BLKmode means that can't happen. The test for
9009 BLKmode is because we never make mis-aligned MEMs with
9012 We don't need to do this at all if the machine doesn't have
9013 strict alignment. */
9014 if (STRICT_ALIGNMENT
&& GET_MODE (op0
) == BLKmode
9015 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
9017 && MEM_ALIGN (op0
) < BIGGEST_ALIGNMENT
)
9019 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9021 = assign_stack_temp_for_type
9022 (TYPE_MODE (inner_type
),
9023 MEM_SIZE (op0
) ? INTVAL (MEM_SIZE (op0
))
9024 : int_size_in_bytes (inner_type
),
9025 1, build_qualified_type (inner_type
,
9026 (TYPE_QUALS (inner_type
)
9027 | TYPE_QUAL_CONST
)));
9029 if (TYPE_ALIGN_OK (inner_type
))
9032 emit_block_move (new, op0
, expr_size (TREE_OPERAND (exp
, 0)),
9037 op0
= force_operand (XEXP (op0
, 0), target
);
9041 && GET_CODE (op0
) != REG
9042 && modifier
!= EXPAND_CONST_ADDRESS
9043 && modifier
!= EXPAND_INITIALIZER
9044 && modifier
!= EXPAND_SUM
)
9045 op0
= force_reg (Pmode
, op0
);
9047 if (GET_CODE (op0
) == REG
9048 && ! REG_USERVAR_P (op0
))
9049 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)));
9051 #ifdef POINTERS_EXTEND_UNSIGNED
9052 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
9053 && mode
== ptr_mode
)
9054 op0
= convert_memory_address (ptr_mode
, op0
);
9059 case ENTRY_VALUE_EXPR
:
9062 /* COMPLEX type for Extended Pascal & Fortran */
9065 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
9068 /* Get the rtx code of the operands. */
9069 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9070 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
9073 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
9077 /* Move the real (op0) and imaginary (op1) parts to their location. */
9078 emit_move_insn (gen_realpart (mode
, target
), op0
);
9079 emit_move_insn (gen_imagpart (mode
, target
), op1
);
9081 insns
= get_insns ();
9084 /* Complex construction should appear as a single unit. */
9085 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9086 each with a separate pseudo as destination.
9087 It's not correct for flow to treat them as a unit. */
9088 if (GET_CODE (target
) != CONCAT
)
9089 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
9097 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9098 return gen_realpart (mode
, op0
);
9101 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9102 return gen_imagpart (mode
, op0
);
9106 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
9110 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9113 target
= gen_reg_rtx (mode
);
9117 /* Store the realpart and the negated imagpart to target. */
9118 emit_move_insn (gen_realpart (partmode
, target
),
9119 gen_realpart (partmode
, op0
));
9121 imag_t
= gen_imagpart (partmode
, target
);
9122 temp
= expand_unop (partmode
,
9123 ! unsignedp
&& flag_trapv
9124 && (GET_MODE_CLASS(partmode
) == MODE_INT
)
9125 ? negv_optab
: neg_optab
,
9126 gen_imagpart (partmode
, op0
), imag_t
, 0);
9128 emit_move_insn (imag_t
, temp
);
9130 insns
= get_insns ();
9133 /* Conjugate should appear as a single unit
9134 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9135 each with a separate pseudo as destination.
9136 It's not correct for flow to treat them as a unit. */
9137 if (GET_CODE (target
) != CONCAT
)
9138 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
9145 case TRY_CATCH_EXPR
:
9147 tree handler
= TREE_OPERAND (exp
, 1);
9149 expand_eh_region_start ();
9151 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9153 expand_eh_region_end_cleanup (handler
);
9158 case TRY_FINALLY_EXPR
:
9160 tree try_block
= TREE_OPERAND (exp
, 0);
9161 tree finally_block
= TREE_OPERAND (exp
, 1);
9163 if (!optimize
|| unsafe_for_reeval (finally_block
) > 1)
9165 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9166 is not sufficient, so we cannot expand the block twice.
9167 So we play games with GOTO_SUBROUTINE_EXPR to let us
9168 expand the thing only once. */
9169 /* When not optimizing, we go ahead with this form since
9170 (1) user breakpoints operate more predictably without
9171 code duplication, and
9172 (2) we're not running any of the global optimizers
9173 that would explode in time/space with the highly
9174 connected CFG created by the indirect branching. */
9176 rtx finally_label
= gen_label_rtx ();
9177 rtx done_label
= gen_label_rtx ();
9178 rtx return_link
= gen_reg_rtx (Pmode
);
9179 tree cleanup
= build (GOTO_SUBROUTINE_EXPR
, void_type_node
,
9180 (tree
) finally_label
, (tree
) return_link
);
9181 TREE_SIDE_EFFECTS (cleanup
) = 1;
9183 /* Start a new binding layer that will keep track of all cleanup
9184 actions to be performed. */
9185 expand_start_bindings (2);
9186 target_temp_slot_level
= temp_slot_level
;
9188 expand_decl_cleanup (NULL_TREE
, cleanup
);
9189 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
9191 preserve_temp_slots (op0
);
9192 expand_end_bindings (NULL_TREE
, 0, 0);
9193 emit_jump (done_label
);
9194 emit_label (finally_label
);
9195 expand_expr (finally_block
, const0_rtx
, VOIDmode
, 0);
9196 emit_indirect_jump (return_link
);
9197 emit_label (done_label
);
9201 expand_start_bindings (2);
9202 target_temp_slot_level
= temp_slot_level
;
9204 expand_decl_cleanup (NULL_TREE
, finally_block
);
9205 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
9207 preserve_temp_slots (op0
);
9208 expand_end_bindings (NULL_TREE
, 0, 0);
9214 case GOTO_SUBROUTINE_EXPR
:
9216 rtx subr
= (rtx
) TREE_OPERAND (exp
, 0);
9217 rtx return_link
= *(rtx
*) &TREE_OPERAND (exp
, 1);
9218 rtx return_address
= gen_label_rtx ();
9219 emit_move_insn (return_link
,
9220 gen_rtx_LABEL_REF (Pmode
, return_address
));
9222 emit_label (return_address
);
9227 return expand_builtin_va_arg (TREE_OPERAND (exp
, 0), type
);
9230 return get_exception_pointer (cfun
);
9233 /* Function descriptors are not valid except for as
9234 initialization constants, and should not be expanded. */
9238 return (*lang_hooks
.expand_expr
) (exp
, original_target
, tmode
, modifier
);
9241 /* Here to do an ordinary binary operator, generating an instruction
9242 from the optab already placed in `this_optab'. */
9244 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
9246 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
9247 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9249 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9250 unsignedp
, OPTAB_LIB_WIDEN
);
9256 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9257 when applied to the address of EXP produces an address known to be
9258 aligned more than BIGGEST_ALIGNMENT. */
9261 is_aligning_offset (offset
, exp
)
9265 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9266 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9267 || TREE_CODE (offset
) == NOP_EXPR
9268 || TREE_CODE (offset
) == CONVERT_EXPR
9269 || TREE_CODE (offset
) == WITH_RECORD_EXPR
)
9270 offset
= TREE_OPERAND (offset
, 0);
9272 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9273 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9274 if (TREE_CODE (offset
) != BIT_AND_EXPR
9275 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
9276 || compare_tree_int (TREE_OPERAND (offset
, 1), BIGGEST_ALIGNMENT
) <= 0
9277 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
9280 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9281 It must be NEGATE_EXPR. Then strip any more conversions. */
9282 offset
= TREE_OPERAND (offset
, 0);
9283 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9284 || TREE_CODE (offset
) == NOP_EXPR
9285 || TREE_CODE (offset
) == CONVERT_EXPR
)
9286 offset
= TREE_OPERAND (offset
, 0);
9288 if (TREE_CODE (offset
) != NEGATE_EXPR
)
9291 offset
= TREE_OPERAND (offset
, 0);
9292 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9293 || TREE_CODE (offset
) == NOP_EXPR
9294 || TREE_CODE (offset
) == CONVERT_EXPR
)
9295 offset
= TREE_OPERAND (offset
, 0);
9297 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9298 whose type is the same as EXP. */
9299 return (TREE_CODE (offset
) == ADDR_EXPR
9300 && (TREE_OPERAND (offset
, 0) == exp
9301 || (TREE_CODE (TREE_OPERAND (offset
, 0)) == PLACEHOLDER_EXPR
9302 && (TREE_TYPE (TREE_OPERAND (offset
, 0))
9303 == TREE_TYPE (exp
)))));
9306 /* Return the tree node if an ARG corresponds to a string constant or zero
9307 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9308 in bytes within the string that ARG is accessing. The type of the
9309 offset will be `sizetype'. */
9312 string_constant (arg
, ptr_offset
)
9318 if (TREE_CODE (arg
) == ADDR_EXPR
9319 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
9321 *ptr_offset
= size_zero_node
;
9322 return TREE_OPERAND (arg
, 0);
9324 else if (TREE_CODE (arg
) == PLUS_EXPR
)
9326 tree arg0
= TREE_OPERAND (arg
, 0);
9327 tree arg1
= TREE_OPERAND (arg
, 1);
9332 if (TREE_CODE (arg0
) == ADDR_EXPR
9333 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
9335 *ptr_offset
= convert (sizetype
, arg1
);
9336 return TREE_OPERAND (arg0
, 0);
9338 else if (TREE_CODE (arg1
) == ADDR_EXPR
9339 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
9341 *ptr_offset
= convert (sizetype
, arg0
);
9342 return TREE_OPERAND (arg1
, 0);
9349 /* Expand code for a post- or pre- increment or decrement
9350 and return the RTX for the result.
9351 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9354 expand_increment (exp
, post
, ignore
)
9360 tree incremented
= TREE_OPERAND (exp
, 0);
9361 optab this_optab
= add_optab
;
9363 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
9364 int op0_is_copy
= 0;
9365 int single_insn
= 0;
9366 /* 1 means we can't store into OP0 directly,
9367 because it is a subreg narrower than a word,
9368 and we don't dare clobber the rest of the word. */
9371 /* Stabilize any component ref that might need to be
9372 evaluated more than once below. */
9374 || TREE_CODE (incremented
) == BIT_FIELD_REF
9375 || (TREE_CODE (incremented
) == COMPONENT_REF
9376 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
9377 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
9378 incremented
= stabilize_reference (incremented
);
9379 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9380 ones into save exprs so that they don't accidentally get evaluated
9381 more than once by the code below. */
9382 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
9383 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
9384 incremented
= save_expr (incremented
);
9386 /* Compute the operands as RTX.
9387 Note whether OP0 is the actual lvalue or a copy of it:
9388 I believe it is a copy iff it is a register or subreg
9389 and insns were generated in computing it. */
9391 temp
= get_last_insn ();
9392 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, 0);
9394 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9395 in place but instead must do sign- or zero-extension during assignment,
9396 so we copy it into a new register and let the code below use it as
9399 Note that we can safely modify this SUBREG since it is know not to be
9400 shared (it was made by the expand_expr call above). */
9402 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9405 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9409 else if (GET_CODE (op0
) == SUBREG
9410 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9412 /* We cannot increment this SUBREG in place. If we are
9413 post-incrementing, get a copy of the old value. Otherwise,
9414 just mark that we cannot increment in place. */
9416 op0
= copy_to_reg (op0
);
9421 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
9422 && temp
!= get_last_insn ());
9423 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9425 /* Decide whether incrementing or decrementing. */
9426 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9427 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9428 this_optab
= sub_optab
;
9430 /* Convert decrement by a constant into a negative increment. */
9431 if (this_optab
== sub_optab
9432 && GET_CODE (op1
) == CONST_INT
)
9434 op1
= GEN_INT (-INTVAL (op1
));
9435 this_optab
= add_optab
;
9438 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp
)))
9439 this_optab
= this_optab
== add_optab
? addv_optab
: subv_optab
;
9441 /* For a preincrement, see if we can do this with a single instruction. */
9444 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9445 if (icode
!= (int) CODE_FOR_nothing
9446 /* Make sure that OP0 is valid for operands 0 and 1
9447 of the insn we want to queue. */
9448 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9449 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
)
9450 && (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9454 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9455 then we cannot just increment OP0. We must therefore contrive to
9456 increment the original value. Then, for postincrement, we can return
9457 OP0 since it is a copy of the old value. For preincrement, expand here
9458 unless we can do it with a single insn.
9460 Likewise if storing directly into OP0 would clobber high bits
9461 we need to preserve (bad_subreg). */
9462 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9464 /* This is the easiest way to increment the value wherever it is.
9465 Problems with multiple evaluation of INCREMENTED are prevented
9466 because either (1) it is a component_ref or preincrement,
9467 in which case it was stabilized above, or (2) it is an array_ref
9468 with constant index in an array in a register, which is
9469 safe to reevaluate. */
9470 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9471 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9472 ? MINUS_EXPR
: PLUS_EXPR
),
9475 TREE_OPERAND (exp
, 1));
9477 while (TREE_CODE (incremented
) == NOP_EXPR
9478 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9480 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9481 incremented
= TREE_OPERAND (incremented
, 0);
9484 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
, 0);
9485 return post
? op0
: temp
;
9490 /* We have a true reference to the value in OP0.
9491 If there is an insn to add or subtract in this mode, queue it.
9492 Queueing the increment insn avoids the register shuffling
9493 that often results if we must increment now and first save
9494 the old value for subsequent use. */
9496 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9497 op0
= stabilize (op0
);
9500 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9501 if (icode
!= (int) CODE_FOR_nothing
9502 /* Make sure that OP0 is valid for operands 0 and 1
9503 of the insn we want to queue. */
9504 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9505 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
))
9507 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9508 op1
= force_reg (mode
, op1
);
9510 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9512 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9514 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
9515 ? force_reg (Pmode
, XEXP (op0
, 0))
9516 : copy_to_reg (XEXP (op0
, 0)));
9519 op0
= replace_equiv_address (op0
, addr
);
9520 temp
= force_reg (GET_MODE (op0
), op0
);
9521 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9522 op1
= force_reg (mode
, op1
);
9524 /* The increment queue is LIFO, thus we have to `queue'
9525 the instructions in reverse order. */
9526 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9527 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9532 /* Preincrement, or we can't increment with one simple insn. */
9534 /* Save a copy of the value before inc or dec, to return it later. */
9535 temp
= value
= copy_to_reg (op0
);
9537 /* Arrange to return the incremented value. */
9538 /* Copy the rtx because expand_binop will protect from the queue,
9539 and the results of that would be invalid for us to return
9540 if our caller does emit_queue before using our result. */
9541 temp
= copy_rtx (value
= op0
);
9543 /* Increment however we can. */
9544 op1
= expand_binop (mode
, this_optab
, value
, op1
, op0
,
9545 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9547 /* Make sure the value is stored into OP0. */
9549 emit_move_insn (op0
, op1
);
9554 /* At the start of a function, record that we have no previously-pushed
9555 arguments waiting to be popped. */
9558 init_pending_stack_adjust ()
9560 pending_stack_adjust
= 0;
9563 /* When exiting from function, if safe, clear out any pending stack adjust
9564 so the adjustment won't get done.
9566 Note, if the current function calls alloca, then it must have a
9567 frame pointer regardless of the value of flag_omit_frame_pointer. */
9570 clear_pending_stack_adjust ()
9572 #ifdef EXIT_IGNORE_STACK
9574 && (! flag_omit_frame_pointer
|| current_function_calls_alloca
)
9575 && EXIT_IGNORE_STACK
9576 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
9577 && ! flag_inline_functions
)
9579 stack_pointer_delta
-= pending_stack_adjust
,
9580 pending_stack_adjust
= 0;
9585 /* Pop any previously-pushed arguments that have not been popped yet. */
9588 do_pending_stack_adjust ()
9590 if (inhibit_defer_pop
== 0)
9592 if (pending_stack_adjust
!= 0)
9593 adjust_stack (GEN_INT (pending_stack_adjust
));
9594 pending_stack_adjust
= 0;
9598 /* Expand conditional expressions. */
9600 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9601 LABEL is an rtx of code CODE_LABEL, in this function and all the
9605 jumpifnot (exp
, label
)
9609 do_jump (exp
, label
, NULL_RTX
);
9612 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9619 do_jump (exp
, NULL_RTX
, label
);
9622 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9623 the result is zero, or IF_TRUE_LABEL if the result is one.
9624 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9625 meaning fall through in that case.
9627 do_jump always does any pending stack adjust except when it does not
9628 actually perform a jump. An example where there is no jump
9629 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9631 This function is responsible for optimizing cases such as
9632 &&, || and comparison operators in EXP. */
9635 do_jump (exp
, if_false_label
, if_true_label
)
9637 rtx if_false_label
, if_true_label
;
9639 enum tree_code code
= TREE_CODE (exp
);
9640 /* Some cases need to create a label to jump to
9641 in order to properly fall through.
9642 These cases set DROP_THROUGH_LABEL nonzero. */
9643 rtx drop_through_label
= 0;
9647 enum machine_mode mode
;
9649 #ifdef MAX_INTEGER_COMPUTATION_MODE
9650 check_max_integer_computation_mode (exp
);
9661 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
9667 /* This is not true with #pragma weak */
9669 /* The address of something can never be zero. */
9671 emit_jump (if_true_label
);
9676 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
9677 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
9678 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
9679 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_RANGE_REF
)
9682 /* If we are narrowing the operand, we have to do the compare in the
9684 if ((TYPE_PRECISION (TREE_TYPE (exp
))
9685 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9687 case NON_LVALUE_EXPR
:
9688 case REFERENCE_EXPR
:
9693 /* These cannot change zero->nonzero or vice versa. */
9694 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9697 case WITH_RECORD_EXPR
:
9698 /* Put the object on the placeholder list, recurse through our first
9699 operand, and pop the list. */
9700 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
9702 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9703 placeholder_list
= TREE_CHAIN (placeholder_list
);
9707 /* This is never less insns than evaluating the PLUS_EXPR followed by
9708 a test and can be longer if the test is eliminated. */
9710 /* Reduce to minus. */
9711 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
9712 TREE_OPERAND (exp
, 0),
9713 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
9714 TREE_OPERAND (exp
, 1))));
9715 /* Process as MINUS. */
9719 /* Nonzero iff operands of minus differ. */
9720 do_compare_and_jump (build (NE_EXPR
, TREE_TYPE (exp
),
9721 TREE_OPERAND (exp
, 0),
9722 TREE_OPERAND (exp
, 1)),
9723 NE
, NE
, if_false_label
, if_true_label
);
9727 /* If we are AND'ing with a small constant, do this comparison in the
9728 smallest type that fits. If the machine doesn't have comparisons
9729 that small, it will be converted back to the wider comparison.
9730 This helps if we are testing the sign bit of a narrower object.
9731 combine can't do this for us because it can't know whether a
9732 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9734 if (! SLOW_BYTE_ACCESS
9735 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
9736 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
9737 && (i
= tree_floor_log2 (TREE_OPERAND (exp
, 1))) >= 0
9738 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
9739 && (type
= (*lang_hooks
.types
.type_for_mode
) (mode
, 1)) != 0
9740 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9741 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9742 != CODE_FOR_nothing
))
9744 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9749 case TRUTH_NOT_EXPR
:
9750 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9753 case TRUTH_ANDIF_EXPR
:
9754 if (if_false_label
== 0)
9755 if_false_label
= drop_through_label
= gen_label_rtx ();
9756 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
9757 start_cleanup_deferral ();
9758 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9759 end_cleanup_deferral ();
9762 case TRUTH_ORIF_EXPR
:
9763 if (if_true_label
== 0)
9764 if_true_label
= drop_through_label
= gen_label_rtx ();
9765 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
9766 start_cleanup_deferral ();
9767 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9768 end_cleanup_deferral ();
9773 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
9774 preserve_temp_slots (NULL_RTX
);
9778 do_pending_stack_adjust ();
9779 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9785 case ARRAY_RANGE_REF
:
9787 HOST_WIDE_INT bitsize
, bitpos
;
9789 enum machine_mode mode
;
9794 /* Get description of this reference. We don't actually care
9795 about the underlying object here. */
9796 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
9797 &unsignedp
, &volatilep
);
9799 type
= (*lang_hooks
.types
.type_for_size
) (bitsize
, unsignedp
);
9800 if (! SLOW_BYTE_ACCESS
9801 && type
!= 0 && bitsize
>= 0
9802 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9803 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9804 != CODE_FOR_nothing
))
9806 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9813 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9814 if (integer_onep (TREE_OPERAND (exp
, 1))
9815 && integer_zerop (TREE_OPERAND (exp
, 2)))
9816 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9818 else if (integer_zerop (TREE_OPERAND (exp
, 1))
9819 && integer_onep (TREE_OPERAND (exp
, 2)))
9820 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9824 rtx label1
= gen_label_rtx ();
9825 drop_through_label
= gen_label_rtx ();
9827 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
9829 start_cleanup_deferral ();
9830 /* Now the THEN-expression. */
9831 do_jump (TREE_OPERAND (exp
, 1),
9832 if_false_label
? if_false_label
: drop_through_label
,
9833 if_true_label
? if_true_label
: drop_through_label
);
9834 /* In case the do_jump just above never jumps. */
9835 do_pending_stack_adjust ();
9836 emit_label (label1
);
9838 /* Now the ELSE-expression. */
9839 do_jump (TREE_OPERAND (exp
, 2),
9840 if_false_label
? if_false_label
: drop_through_label
,
9841 if_true_label
? if_true_label
: drop_through_label
);
9842 end_cleanup_deferral ();
9848 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9850 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9851 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9853 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
9854 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
9857 (build (TRUTH_ANDIF_EXPR
, TREE_TYPE (exp
),
9858 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9859 fold (build1 (REALPART_EXPR
,
9860 TREE_TYPE (inner_type
),
9862 fold (build1 (REALPART_EXPR
,
9863 TREE_TYPE (inner_type
),
9865 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9866 fold (build1 (IMAGPART_EXPR
,
9867 TREE_TYPE (inner_type
),
9869 fold (build1 (IMAGPART_EXPR
,
9870 TREE_TYPE (inner_type
),
9872 if_false_label
, if_true_label
);
9875 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
9876 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9878 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9879 && !can_compare_p (EQ
, TYPE_MODE (inner_type
), ccp_jump
))
9880 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
9882 do_compare_and_jump (exp
, EQ
, EQ
, if_false_label
, if_true_label
);
9888 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9890 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9891 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9893 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
9894 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
9897 (build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
),
9898 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9899 fold (build1 (REALPART_EXPR
,
9900 TREE_TYPE (inner_type
),
9902 fold (build1 (REALPART_EXPR
,
9903 TREE_TYPE (inner_type
),
9905 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9906 fold (build1 (IMAGPART_EXPR
,
9907 TREE_TYPE (inner_type
),
9909 fold (build1 (IMAGPART_EXPR
,
9910 TREE_TYPE (inner_type
),
9912 if_false_label
, if_true_label
);
9915 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
9916 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9918 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9919 && !can_compare_p (NE
, TYPE_MODE (inner_type
), ccp_jump
))
9920 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
9922 do_compare_and_jump (exp
, NE
, NE
, if_false_label
, if_true_label
);
9927 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9928 if (GET_MODE_CLASS (mode
) == MODE_INT
9929 && ! can_compare_p (LT
, mode
, ccp_jump
))
9930 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
9932 do_compare_and_jump (exp
, LT
, LTU
, if_false_label
, if_true_label
);
9936 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9937 if (GET_MODE_CLASS (mode
) == MODE_INT
9938 && ! can_compare_p (LE
, mode
, ccp_jump
))
9939 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
9941 do_compare_and_jump (exp
, LE
, LEU
, if_false_label
, if_true_label
);
9945 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9946 if (GET_MODE_CLASS (mode
) == MODE_INT
9947 && ! can_compare_p (GT
, mode
, ccp_jump
))
9948 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
9950 do_compare_and_jump (exp
, GT
, GTU
, if_false_label
, if_true_label
);
9954 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9955 if (GET_MODE_CLASS (mode
) == MODE_INT
9956 && ! can_compare_p (GE
, mode
, ccp_jump
))
9957 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
9959 do_compare_and_jump (exp
, GE
, GEU
, if_false_label
, if_true_label
);
9962 case UNORDERED_EXPR
:
9965 enum rtx_code cmp
, rcmp
;
9968 if (code
== UNORDERED_EXPR
)
9969 cmp
= UNORDERED
, rcmp
= ORDERED
;
9971 cmp
= ORDERED
, rcmp
= UNORDERED
;
9972 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9975 if (! can_compare_p (cmp
, mode
, ccp_jump
)
9976 && (can_compare_p (rcmp
, mode
, ccp_jump
)
9977 /* If the target doesn't provide either UNORDERED or ORDERED
9978 comparisons, canonicalize on UNORDERED for the library. */
9979 || rcmp
== UNORDERED
))
9983 do_compare_and_jump (exp
, cmp
, cmp
, if_false_label
, if_true_label
);
9985 do_compare_and_jump (exp
, rcmp
, rcmp
, if_true_label
, if_false_label
);
9990 enum rtx_code rcode1
;
9991 enum tree_code tcode2
;
10000 goto unordered_bcc
;
10004 goto unordered_bcc
;
10008 goto unordered_bcc
;
10012 goto unordered_bcc
;
10015 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10016 if (can_compare_p (rcode1
, mode
, ccp_jump
))
10017 do_compare_and_jump (exp
, rcode1
, rcode1
, if_false_label
,
10021 tree op0
= save_expr (TREE_OPERAND (exp
, 0));
10022 tree op1
= save_expr (TREE_OPERAND (exp
, 1));
10025 /* If the target doesn't support combined unordered
10026 compares, decompose into UNORDERED + comparison. */
10027 cmp0
= fold (build (UNORDERED_EXPR
, TREE_TYPE (exp
), op0
, op1
));
10028 cmp1
= fold (build (tcode2
, TREE_TYPE (exp
), op0
, op1
));
10029 exp
= build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
), cmp0
, cmp1
);
10030 do_jump (exp
, if_false_label
, if_true_label
);
10036 __builtin_expect (<test>, 0) and
10037 __builtin_expect (<test>, 1)
10039 We need to do this here, so that <test> is not converted to a SCC
10040 operation on machines that use condition code registers and COMPARE
10041 like the PowerPC, and then the jump is done based on whether the SCC
10042 operation produced a 1 or 0. */
10044 /* Check for a built-in function. */
10045 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
10047 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
10048 tree arglist
= TREE_OPERAND (exp
, 1);
10050 if (TREE_CODE (fndecl
) == FUNCTION_DECL
10051 && DECL_BUILT_IN (fndecl
)
10052 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
10053 && arglist
!= NULL_TREE
10054 && TREE_CHAIN (arglist
) != NULL_TREE
)
10056 rtx seq
= expand_builtin_expect_jump (exp
, if_false_label
,
10059 if (seq
!= NULL_RTX
)
10066 /* fall through and generate the normal code. */
10070 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
10072 /* This is not needed any more and causes poor code since it causes
10073 comparisons and tests from non-SI objects to have different code
10075 /* Copy to register to avoid generating bad insns by cse
10076 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10077 if (!cse_not_expected
&& GET_CODE (temp
) == MEM
)
10078 temp
= copy_to_reg (temp
);
10080 do_pending_stack_adjust ();
10081 /* Do any postincrements in the expression that was tested. */
10084 if (GET_CODE (temp
) == CONST_INT
10085 || (GET_CODE (temp
) == CONST_DOUBLE
&& GET_MODE (temp
) == VOIDmode
)
10086 || GET_CODE (temp
) == LABEL_REF
)
10088 rtx target
= temp
== const0_rtx
? if_false_label
: if_true_label
;
10090 emit_jump (target
);
10092 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
10093 && ! can_compare_p (NE
, GET_MODE (temp
), ccp_jump
))
10094 /* Note swapping the labels gives us not-equal. */
10095 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
10096 else if (GET_MODE (temp
) != VOIDmode
)
10097 do_compare_rtx_and_jump (temp
, CONST0_RTX (GET_MODE (temp
)),
10098 NE
, TREE_UNSIGNED (TREE_TYPE (exp
)),
10099 GET_MODE (temp
), NULL_RTX
,
10100 if_false_label
, if_true_label
);
10105 if (drop_through_label
)
10107 /* If do_jump produces code that might be jumped around,
10108 do any stack adjusts from that code, before the place
10109 where control merges in. */
10110 do_pending_stack_adjust ();
10111 emit_label (drop_through_label
);
10115 /* Given a comparison expression EXP for values too wide to be compared
10116 with one insn, test the comparison and jump to the appropriate label.
10117 The code of EXP is ignored; we always test GT if SWAP is 0,
10118 and LT if SWAP is 1. */
10121 do_jump_by_parts_greater (exp
, swap
, if_false_label
, if_true_label
)
10124 rtx if_false_label
, if_true_label
;
10126 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
10127 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
10128 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10129 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10131 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
);
10134 /* Compare OP0 with OP1, word at a time, in mode MODE.
10135 UNSIGNEDP says to do unsigned comparison.
10136 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10139 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
)
10140 enum machine_mode mode
;
10143 rtx if_false_label
, if_true_label
;
10145 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10146 rtx drop_through_label
= 0;
10149 if (! if_true_label
|| ! if_false_label
)
10150 drop_through_label
= gen_label_rtx ();
10151 if (! if_true_label
)
10152 if_true_label
= drop_through_label
;
10153 if (! if_false_label
)
10154 if_false_label
= drop_through_label
;
10156 /* Compare a word at a time, high order first. */
10157 for (i
= 0; i
< nwords
; i
++)
10159 rtx op0_word
, op1_word
;
10161 if (WORDS_BIG_ENDIAN
)
10163 op0_word
= operand_subword_force (op0
, i
, mode
);
10164 op1_word
= operand_subword_force (op1
, i
, mode
);
10168 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
10169 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
10172 /* All but high-order word must be compared as unsigned. */
10173 do_compare_rtx_and_jump (op0_word
, op1_word
, GT
,
10174 (unsignedp
|| i
> 0), word_mode
, NULL_RTX
,
10175 NULL_RTX
, if_true_label
);
10177 /* Consider lower words only if these are equal. */
10178 do_compare_rtx_and_jump (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
10179 NULL_RTX
, NULL_RTX
, if_false_label
);
10182 if (if_false_label
)
10183 emit_jump (if_false_label
);
10184 if (drop_through_label
)
10185 emit_label (drop_through_label
);
10188 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10189 with one insn, test the comparison and jump to the appropriate label. */
10192 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
)
10194 rtx if_false_label
, if_true_label
;
10196 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
10197 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
10198 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10199 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10201 rtx drop_through_label
= 0;
10203 if (! if_false_label
)
10204 drop_through_label
= if_false_label
= gen_label_rtx ();
10206 for (i
= 0; i
< nwords
; i
++)
10207 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, mode
),
10208 operand_subword_force (op1
, i
, mode
),
10209 EQ
, TREE_UNSIGNED (TREE_TYPE (exp
)),
10210 word_mode
, NULL_RTX
, if_false_label
, NULL_RTX
);
10213 emit_jump (if_true_label
);
10214 if (drop_through_label
)
10215 emit_label (drop_through_label
);
10218 /* Jump according to whether OP0 is 0.
10219 We assume that OP0 has an integer mode that is too wide
10220 for the available compare insns. */
10223 do_jump_by_parts_equality_rtx (op0
, if_false_label
, if_true_label
)
10225 rtx if_false_label
, if_true_label
;
10227 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
10230 rtx drop_through_label
= 0;
10232 /* The fastest way of doing this comparison on almost any machine is to
10233 "or" all the words and compare the result. If all have to be loaded
10234 from memory and this is a very wide item, it's possible this may
10235 be slower, but that's highly unlikely. */
10237 part
= gen_reg_rtx (word_mode
);
10238 emit_move_insn (part
, operand_subword_force (op0
, 0, GET_MODE (op0
)));
10239 for (i
= 1; i
< nwords
&& part
!= 0; i
++)
10240 part
= expand_binop (word_mode
, ior_optab
, part
,
10241 operand_subword_force (op0
, i
, GET_MODE (op0
)),
10242 part
, 1, OPTAB_WIDEN
);
10246 do_compare_rtx_and_jump (part
, const0_rtx
, EQ
, 1, word_mode
,
10247 NULL_RTX
, if_false_label
, if_true_label
);
10252 /* If we couldn't do the "or" simply, do this with a series of compares. */
10253 if (! if_false_label
)
10254 drop_through_label
= if_false_label
= gen_label_rtx ();
10256 for (i
= 0; i
< nwords
; i
++)
10257 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, GET_MODE (op0
)),
10258 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
,
10259 if_false_label
, NULL_RTX
);
10262 emit_jump (if_true_label
);
10264 if (drop_through_label
)
10265 emit_label (drop_through_label
);
10268 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10269 (including code to compute the values to be compared)
10270 and set (CC0) according to the result.
10271 The decision as to signed or unsigned comparison must be made by the caller.
10273 We force a stack adjustment unless there are currently
10274 things pushed on the stack that aren't yet used.
10276 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10280 compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
, size
)
10282 enum rtx_code code
;
10284 enum machine_mode mode
;
10287 enum rtx_code ucode
;
10290 /* If one operand is constant, make it the second one. Only do this
10291 if the other operand is not constant as well. */
10293 if (swap_commutative_operands_p (op0
, op1
))
10298 code
= swap_condition (code
);
10301 if (flag_force_mem
)
10303 op0
= force_not_mem (op0
);
10304 op1
= force_not_mem (op1
);
10307 do_pending_stack_adjust ();
10309 ucode
= unsignedp
? unsigned_condition (code
) : code
;
10310 if ((tem
= simplify_relational_operation (ucode
, mode
, op0
, op1
)) != 0)
10314 /* There's no need to do this now that combine.c can eliminate lots of
10315 sign extensions. This can be less efficient in certain cases on other
10318 /* If this is a signed equality comparison, we can do it as an
10319 unsigned comparison since zero-extension is cheaper than sign
10320 extension and comparisons with zero are done as unsigned. This is
10321 the case even on machines that can do fast sign extension, since
10322 zero-extension is easier to combine with other operations than
10323 sign-extension is. If we are comparing against a constant, we must
10324 convert it to what it would look like unsigned. */
10325 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10326 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10328 if (GET_CODE (op1
) == CONST_INT
10329 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10330 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10335 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
);
10338 return gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
10340 return gen_rtx_fmt_ee (code
, VOIDmode
, op0
, op1
);
10344 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10345 The decision as to signed or unsigned comparison must be made by the caller.
10347 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10351 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
, size
,
10352 if_false_label
, if_true_label
)
10354 enum rtx_code code
;
10356 enum machine_mode mode
;
10358 rtx if_false_label
, if_true_label
;
10360 enum rtx_code ucode
;
10362 int dummy_true_label
= 0;
10364 /* Reverse the comparison if that is safe and we want to jump if it is
10366 if (! if_true_label
&& ! FLOAT_MODE_P (mode
))
10368 if_true_label
= if_false_label
;
10369 if_false_label
= 0;
10370 code
= reverse_condition (code
);
10373 /* If one operand is constant, make it the second one. Only do this
10374 if the other operand is not constant as well. */
10376 if (swap_commutative_operands_p (op0
, op1
))
10381 code
= swap_condition (code
);
10384 if (flag_force_mem
)
10386 op0
= force_not_mem (op0
);
10387 op1
= force_not_mem (op1
);
10390 do_pending_stack_adjust ();
10392 ucode
= unsignedp
? unsigned_condition (code
) : code
;
10393 if ((tem
= simplify_relational_operation (ucode
, mode
, op0
, op1
)) != 0)
10395 if (tem
== const_true_rtx
)
10398 emit_jump (if_true_label
);
10402 if (if_false_label
)
10403 emit_jump (if_false_label
);
10409 /* There's no need to do this now that combine.c can eliminate lots of
10410 sign extensions. This can be less efficient in certain cases on other
10413 /* If this is a signed equality comparison, we can do it as an
10414 unsigned comparison since zero-extension is cheaper than sign
10415 extension and comparisons with zero are done as unsigned. This is
10416 the case even on machines that can do fast sign extension, since
10417 zero-extension is easier to combine with other operations than
10418 sign-extension is. If we are comparing against a constant, we must
10419 convert it to what it would look like unsigned. */
10420 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10421 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10423 if (GET_CODE (op1
) == CONST_INT
10424 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10425 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10430 if (! if_true_label
)
10432 dummy_true_label
= 1;
10433 if_true_label
= gen_label_rtx ();
10436 emit_cmp_and_jump_insns (op0
, op1
, code
, size
, mode
, unsignedp
,
10439 if (if_false_label
)
10440 emit_jump (if_false_label
);
10441 if (dummy_true_label
)
10442 emit_label (if_true_label
);
10445 /* Generate code for a comparison expression EXP (including code to compute
10446 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10447 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10448 generated code will drop through.
10449 SIGNED_CODE should be the rtx operation for this comparison for
10450 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10452 We force a stack adjustment unless there are currently
10453 things pushed on the stack that aren't yet used. */
10456 do_compare_and_jump (exp
, signed_code
, unsigned_code
, if_false_label
,
10459 enum rtx_code signed_code
, unsigned_code
;
10460 rtx if_false_label
, if_true_label
;
10464 enum machine_mode mode
;
10466 enum rtx_code code
;
10468 /* Don't crash if the comparison was erroneous. */
10469 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
10470 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
10473 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
10474 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == ERROR_MARK
)
10477 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10478 mode
= TYPE_MODE (type
);
10479 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
10480 && (TREE_CODE (TREE_OPERAND (exp
, 1)) != INTEGER_CST
10481 || (GET_MODE_BITSIZE (mode
)
10482 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
,
10485 /* op0 might have been replaced by promoted constant, in which
10486 case the type of second argument should be used. */
10487 type
= TREE_TYPE (TREE_OPERAND (exp
, 1));
10488 mode
= TYPE_MODE (type
);
10490 unsignedp
= TREE_UNSIGNED (type
);
10491 code
= unsignedp
? unsigned_code
: signed_code
;
10493 #ifdef HAVE_canonicalize_funcptr_for_compare
10494 /* If function pointers need to be "canonicalized" before they can
10495 be reliably compared, then canonicalize them. */
10496 if (HAVE_canonicalize_funcptr_for_compare
10497 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10498 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10501 rtx new_op0
= gen_reg_rtx (mode
);
10503 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
10507 if (HAVE_canonicalize_funcptr_for_compare
10508 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10509 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10512 rtx new_op1
= gen_reg_rtx (mode
);
10514 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
10519 /* Do any postincrements in the expression that was tested. */
10522 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
,
10524 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
10525 if_false_label
, if_true_label
);
10528 /* Generate code to calculate EXP using a store-flag instruction
10529 and return an rtx for the result. EXP is either a comparison
10530 or a TRUTH_NOT_EXPR whose operand is a comparison.
10532 If TARGET is nonzero, store the result there if convenient.
10534 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
10537 Return zero if there is no suitable set-flag instruction
10538 available on this machine.
10540 Once expand_expr has been called on the arguments of the comparison,
10541 we are committed to doing the store flag, since it is not safe to
10542 re-evaluate the expression. We emit the store-flag insn by calling
10543 emit_store_flag, but only expand the arguments if we have a reason
10544 to believe that emit_store_flag will be successful. If we think that
10545 it will, but it isn't, we have to simulate the store-flag with a
10546 set/jump/set sequence. */
10549 do_store_flag (exp
, target
, mode
, only_cheap
)
10552 enum machine_mode mode
;
10555 enum rtx_code code
;
10556 tree arg0
, arg1
, type
;
10558 enum machine_mode operand_mode
;
10562 enum insn_code icode
;
10563 rtx subtarget
= target
;
10566 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10567 result at the end. We can't simply invert the test since it would
10568 have already been inverted if it were valid. This case occurs for
10569 some floating-point comparisons. */
10571 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
10572 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
10574 arg0
= TREE_OPERAND (exp
, 0);
10575 arg1
= TREE_OPERAND (exp
, 1);
10577 /* Don't crash if the comparison was erroneous. */
10578 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
10581 type
= TREE_TYPE (arg0
);
10582 operand_mode
= TYPE_MODE (type
);
10583 unsignedp
= TREE_UNSIGNED (type
);
10585 /* We won't bother with BLKmode store-flag operations because it would mean
10586 passing a lot of information to emit_store_flag. */
10587 if (operand_mode
== BLKmode
)
10590 /* We won't bother with store-flag operations involving function pointers
10591 when function pointers must be canonicalized before comparisons. */
10592 #ifdef HAVE_canonicalize_funcptr_for_compare
10593 if (HAVE_canonicalize_funcptr_for_compare
10594 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10595 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10597 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10598 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10599 == FUNCTION_TYPE
))))
10606 /* Get the rtx comparison code to use. We know that EXP is a comparison
10607 operation of some type. Some comparisons against 1 and -1 can be
10608 converted to comparisons with zero. Do so here so that the tests
10609 below will be aware that we have a comparison with zero. These
10610 tests will not catch constants in the first operand, but constants
10611 are rarely passed as the first operand. */
10613 switch (TREE_CODE (exp
))
10622 if (integer_onep (arg1
))
10623 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10625 code
= unsignedp
? LTU
: LT
;
10628 if (! unsignedp
&& integer_all_onesp (arg1
))
10629 arg1
= integer_zero_node
, code
= LT
;
10631 code
= unsignedp
? LEU
: LE
;
10634 if (! unsignedp
&& integer_all_onesp (arg1
))
10635 arg1
= integer_zero_node
, code
= GE
;
10637 code
= unsignedp
? GTU
: GT
;
10640 if (integer_onep (arg1
))
10641 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
10643 code
= unsignedp
? GEU
: GE
;
10646 case UNORDERED_EXPR
:
10672 /* Put a constant second. */
10673 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
10675 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
10676 code
= swap_condition (code
);
10679 /* If this is an equality or inequality test of a single bit, we can
10680 do this by shifting the bit being tested to the low-order bit and
10681 masking the result with the constant 1. If the condition was EQ,
10682 we xor it with 1. This does not require an scc insn and is faster
10683 than an scc insn even if we have it. */
10685 if ((code
== NE
|| code
== EQ
)
10686 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
10687 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10689 tree inner
= TREE_OPERAND (arg0
, 0);
10690 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
10693 /* If INNER is a right shift of a constant and it plus BITNUM does
10694 not overflow, adjust BITNUM and INNER. */
10696 if (TREE_CODE (inner
) == RSHIFT_EXPR
10697 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
10698 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
10699 && bitnum
< TYPE_PRECISION (type
)
10700 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
10701 bitnum
- TYPE_PRECISION (type
)))
10703 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
10704 inner
= TREE_OPERAND (inner
, 0);
10707 /* If we are going to be able to omit the AND below, we must do our
10708 operations as unsigned. If we must use the AND, we have a choice.
10709 Normally unsigned is faster, but for some machines signed is. */
10710 ops_unsignedp
= (bitnum
== TYPE_PRECISION (type
) - 1 ? 1
10711 #ifdef LOAD_EXTEND_OP
10712 : (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1)
10718 if (! get_subtarget (subtarget
)
10719 || GET_MODE (subtarget
) != operand_mode
10720 || ! safe_from_p (subtarget
, inner
, 1))
10723 op0
= expand_expr (inner
, subtarget
, VOIDmode
, 0);
10726 op0
= expand_shift (RSHIFT_EXPR
, operand_mode
, op0
,
10727 size_int (bitnum
), subtarget
, ops_unsignedp
);
10729 if (GET_MODE (op0
) != mode
)
10730 op0
= convert_to_mode (mode
, op0
, ops_unsignedp
);
10732 if ((code
== EQ
&& ! invert
) || (code
== NE
&& invert
))
10733 op0
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
, subtarget
,
10734 ops_unsignedp
, OPTAB_LIB_WIDEN
);
10736 /* Put the AND last so it can combine with more things. */
10737 if (bitnum
!= TYPE_PRECISION (type
) - 1)
10738 op0
= expand_and (mode
, op0
, const1_rtx
, subtarget
);
10743 /* Now see if we are likely to be able to do this. Return if not. */
10744 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
10747 icode
= setcc_gen_code
[(int) code
];
10748 if (icode
== CODE_FOR_nothing
10749 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
10751 /* We can only do this if it is one of the special cases that
10752 can be handled without an scc insn. */
10753 if ((code
== LT
&& integer_zerop (arg1
))
10754 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
10756 else if (BRANCH_COST
>= 0
10757 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
10758 && TREE_CODE (type
) != REAL_TYPE
10759 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
10760 != CODE_FOR_nothing
)
10761 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
10762 != CODE_FOR_nothing
)))
10768 if (! get_subtarget (target
)
10769 || GET_MODE (subtarget
) != operand_mode
10770 || ! safe_from_p (subtarget
, arg1
, 1))
10773 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
10774 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
10777 target
= gen_reg_rtx (mode
);
10779 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10780 because, if the emit_store_flag does anything it will succeed and
10781 OP0 and OP1 will not be used subsequently. */
10783 result
= emit_store_flag (target
, code
,
10784 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
10785 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
10786 operand_mode
, unsignedp
, 1);
10791 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
10792 result
, 0, OPTAB_LIB_WIDEN
);
10796 /* If this failed, we have to do this with set/compare/jump/set code. */
10797 if (GET_CODE (target
) != REG
10798 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
10799 target
= gen_reg_rtx (GET_MODE (target
));
10801 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
10802 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
10803 operand_mode
, NULL_RTX
);
10804 if (GET_CODE (result
) == CONST_INT
)
10805 return (((result
== const0_rtx
&& ! invert
)
10806 || (result
!= const0_rtx
&& invert
))
10807 ? const0_rtx
: const1_rtx
);
10809 /* The code of RESULT may not match CODE if compare_from_rtx
10810 decided to swap its operands and reverse the original code.
10812 We know that compare_from_rtx returns either a CONST_INT or
10813 a new comparison code, so it is safe to just extract the
10814 code from RESULT. */
10815 code
= GET_CODE (result
);
10817 label
= gen_label_rtx ();
10818 if (bcc_gen_fctn
[(int) code
] == 0)
10821 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
10822 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
10823 emit_label (label
);
10829 /* Stubs in case we haven't got a casesi insn. */
10830 #ifndef HAVE_casesi
10831 # define HAVE_casesi 0
10832 # define gen_casesi(a, b, c, d, e) (0)
10833 # define CODE_FOR_casesi CODE_FOR_nothing
10836 /* If the machine does not have a case insn that compares the bounds,
10837 this means extra overhead for dispatch tables, which raises the
10838 threshold for using them. */
10839 #ifndef CASE_VALUES_THRESHOLD
10840 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10841 #endif /* CASE_VALUES_THRESHOLD */
10844 case_values_threshold ()
10846 return CASE_VALUES_THRESHOLD
;
10849 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10850 0 otherwise (i.e. if there is no casesi instruction). */
10852 try_casesi (index_type
, index_expr
, minval
, range
,
10853 table_label
, default_label
)
10854 tree index_type
, index_expr
, minval
, range
;
10855 rtx table_label ATTRIBUTE_UNUSED
;
10858 enum machine_mode index_mode
= SImode
;
10859 int index_bits
= GET_MODE_BITSIZE (index_mode
);
10860 rtx op1
, op2
, index
;
10861 enum machine_mode op_mode
;
10866 /* Convert the index to SImode. */
10867 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
10869 enum machine_mode omode
= TYPE_MODE (index_type
);
10870 rtx rangertx
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
10872 /* We must handle the endpoints in the original mode. */
10873 index_expr
= build (MINUS_EXPR
, index_type
,
10874 index_expr
, minval
);
10875 minval
= integer_zero_node
;
10876 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10877 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
10878 omode
, 1, default_label
);
10879 /* Now we can safely truncate. */
10880 index
= convert_to_mode (index_mode
, index
, 0);
10884 if (TYPE_MODE (index_type
) != index_mode
)
10886 index_expr
= convert ((*lang_hooks
.types
.type_for_size
)
10887 (index_bits
, 0), index_expr
);
10888 index_type
= TREE_TYPE (index_expr
);
10891 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10894 index
= protect_from_queue (index
, 0);
10895 do_pending_stack_adjust ();
10897 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
10898 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
10900 index
= copy_to_mode_reg (op_mode
, index
);
10902 op1
= expand_expr (minval
, NULL_RTX
, VOIDmode
, 0);
10904 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
10905 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
10906 op1
, TREE_UNSIGNED (TREE_TYPE (minval
)));
10907 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
10909 op1
= copy_to_mode_reg (op_mode
, op1
);
10911 op2
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
10913 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
10914 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
10915 op2
, TREE_UNSIGNED (TREE_TYPE (range
)));
10916 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
10918 op2
= copy_to_mode_reg (op_mode
, op2
);
10920 emit_jump_insn (gen_casesi (index
, op1
, op2
,
10921 table_label
, default_label
));
10925 /* Attempt to generate a tablejump instruction; same concept. */
10926 #ifndef HAVE_tablejump
10927 #define HAVE_tablejump 0
10928 #define gen_tablejump(x, y) (0)
10931 /* Subroutine of the next function.
10933 INDEX is the value being switched on, with the lowest value
10934 in the table already subtracted.
10935 MODE is its expected mode (needed if INDEX is constant).
10936 RANGE is the length of the jump table.
10937 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10939 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10940 index value is out of range. */
10943 do_tablejump (index
, mode
, range
, table_label
, default_label
)
10944 rtx index
, range
, table_label
, default_label
;
10945 enum machine_mode mode
;
10949 if (INTVAL (range
) > cfun
->max_jumptable_ents
)
10950 cfun
->max_jumptable_ents
= INTVAL (range
);
10952 /* Do an unsigned comparison (in the proper mode) between the index
10953 expression and the value which represents the length of the range.
10954 Since we just finished subtracting the lower bound of the range
10955 from the index expression, this comparison allows us to simultaneously
10956 check that the original index expression value is both greater than
10957 or equal to the minimum value of the range and less than or equal to
10958 the maximum value of the range. */
10960 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
10963 /* If index is in range, it must fit in Pmode.
10964 Convert to Pmode so we can index with it. */
10966 index
= convert_to_mode (Pmode
, index
, 1);
10968 /* Don't let a MEM slip thru, because then INDEX that comes
10969 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10970 and break_out_memory_refs will go to work on it and mess it up. */
10971 #ifdef PIC_CASE_VECTOR_ADDRESS
10972 if (flag_pic
&& GET_CODE (index
) != REG
)
10973 index
= copy_to_mode_reg (Pmode
, index
);
10976 /* If flag_force_addr were to affect this address
10977 it could interfere with the tricky assumptions made
10978 about addresses that contain label-refs,
10979 which may be valid only very near the tablejump itself. */
10980 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10981 GET_MODE_SIZE, because this indicates how large insns are. The other
10982 uses should all be Pmode, because they are addresses. This code
10983 could fail if addresses and insns are not the same size. */
10984 index
= gen_rtx_PLUS (Pmode
,
10985 gen_rtx_MULT (Pmode
, index
,
10986 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
10987 gen_rtx_LABEL_REF (Pmode
, table_label
));
10988 #ifdef PIC_CASE_VECTOR_ADDRESS
10990 index
= PIC_CASE_VECTOR_ADDRESS (index
);
10993 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
10994 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
10995 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
10996 RTX_UNCHANGING_P (vector
) = 1;
10997 convert_move (temp
, vector
, 0);
10999 emit_jump_insn (gen_tablejump (temp
, table_label
));
11001 /* If we are generating PIC code or if the table is PC-relative, the
11002 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11003 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
11008 try_tablejump (index_type
, index_expr
, minval
, range
,
11009 table_label
, default_label
)
11010 tree index_type
, index_expr
, minval
, range
;
11011 rtx table_label
, default_label
;
11015 if (! HAVE_tablejump
)
11018 index_expr
= fold (build (MINUS_EXPR
, index_type
,
11019 convert (index_type
, index_expr
),
11020 convert (index_type
, minval
)));
11021 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
11023 index
= protect_from_queue (index
, 0);
11024 do_pending_stack_adjust ();
11026 do_tablejump (index
, TYPE_MODE (index_type
),
11027 convert_modes (TYPE_MODE (index_type
),
11028 TYPE_MODE (TREE_TYPE (range
)),
11029 expand_expr (range
, NULL_RTX
,
11031 TREE_UNSIGNED (TREE_TYPE (range
))),
11032 table_label
, default_label
);
11036 /* Nonzero if the mode is a valid vector mode for this architecture.
11037 This returns nonzero even if there is no hardware support for the
11038 vector mode, but we can emulate with narrower modes. */
11041 vector_mode_valid_p (mode
)
11042 enum machine_mode mode
;
11044 enum mode_class
class = GET_MODE_CLASS (mode
);
11045 enum machine_mode innermode
;
11047 /* Doh! What's going on? */
11048 if (class != MODE_VECTOR_INT
11049 && class != MODE_VECTOR_FLOAT
)
11052 /* Hardware support. Woo hoo! */
11053 if (VECTOR_MODE_SUPPORTED_P (mode
))
11056 innermode
= GET_MODE_INNER (mode
);
11058 /* We should probably return 1 if requesting V4DI and we have no DI,
11059 but we have V2DI, but this is probably very unlikely. */
11061 /* If we have support for the inner mode, we can safely emulate it.
11062 We may not have V2DI, but me can emulate with a pair of DIs. */
11063 return mov_optab
->handlers
[innermode
].insn_code
!= CODE_FOR_nothing
;
11066 #include "gt-expr.h"