1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
31 #include "hard-reg-set.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
43 #include "typeclass.h"
46 #include "langhooks.h"
50 /* Decide whether a function's arguments should be processed
51 from first to last or from last to first.
53 They should if the stack and args grow in opposite directions, but
54 only if we have push insns. */
58 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
59 #define PUSH_ARGS_REVERSED /* If it's last to first. */
64 #ifndef STACK_PUSH_CODE
65 #ifdef STACK_GROWS_DOWNWARD
66 #define STACK_PUSH_CODE PRE_DEC
68 #define STACK_PUSH_CODE PRE_INC
72 /* Assume that case vectors are not pc-relative. */
73 #ifndef CASE_VECTOR_PC_RELATIVE
74 #define CASE_VECTOR_PC_RELATIVE 0
77 /* If this is nonzero, we do not bother generating VOLATILE
78 around volatile memory references, and we are willing to
79 output indirect addresses. If cse is to follow, we reject
80 indirect addresses so a useful potential cse is generated;
81 if it is used only once, instruction combination will produce
82 the same indirect address eventually. */
85 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
86 static tree placeholder_list
= 0;
88 /* This structure is used by move_by_pieces to describe the move to
99 int explicit_inc_from
;
100 unsigned HOST_WIDE_INT len
;
101 HOST_WIDE_INT offset
;
105 /* This structure is used by store_by_pieces to describe the clear to
108 struct store_by_pieces
114 unsigned HOST_WIDE_INT len
;
115 HOST_WIDE_INT offset
;
116 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
121 extern struct obstack permanent_obstack
;
123 static rtx enqueue_insn
PARAMS ((rtx
, rtx
));
124 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
125 PARAMS ((unsigned HOST_WIDE_INT
,
127 static void move_by_pieces_1
PARAMS ((rtx (*) (rtx
, ...), enum machine_mode
,
128 struct move_by_pieces
*));
129 static rtx clear_by_pieces_1
PARAMS ((PTR
, HOST_WIDE_INT
,
131 static void clear_by_pieces
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
133 static void store_by_pieces_1
PARAMS ((struct store_by_pieces
*,
135 static void store_by_pieces_2
PARAMS ((rtx (*) (rtx
, ...),
137 struct store_by_pieces
*));
138 static rtx compress_float_constant
PARAMS ((rtx
, rtx
));
139 static rtx get_subtarget
PARAMS ((rtx
));
140 static int is_zeros_p
PARAMS ((tree
));
141 static int mostly_zeros_p
PARAMS ((tree
));
142 static void store_constructor_field
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
143 HOST_WIDE_INT
, enum machine_mode
,
144 tree
, tree
, int, int));
145 static void store_constructor
PARAMS ((tree
, rtx
, int, HOST_WIDE_INT
));
146 static rtx store_field
PARAMS ((rtx
, HOST_WIDE_INT
,
147 HOST_WIDE_INT
, enum machine_mode
,
148 tree
, enum machine_mode
, int, tree
,
150 static rtx var_rtx
PARAMS ((tree
));
151 static HOST_WIDE_INT highest_pow2_factor
PARAMS ((tree
));
152 static HOST_WIDE_INT highest_pow2_factor_for_type
PARAMS ((tree
, tree
));
153 static int is_aligning_offset
PARAMS ((tree
, tree
));
154 static rtx expand_increment
PARAMS ((tree
, int, int));
155 static void do_jump_by_parts_greater
PARAMS ((tree
, int, rtx
, rtx
));
156 static void do_jump_by_parts_equality
PARAMS ((tree
, rtx
, rtx
));
157 static void do_compare_and_jump
PARAMS ((tree
, enum rtx_code
, enum rtx_code
,
159 static rtx do_store_flag
PARAMS ((tree
, rtx
, enum machine_mode
, int));
161 static void emit_single_push_insn
PARAMS ((enum machine_mode
, rtx
, tree
));
163 static void do_tablejump
PARAMS ((rtx
, enum machine_mode
, rtx
, rtx
, rtx
));
165 /* Record for each mode whether we can move a register directly to or
166 from an object of that mode in memory. If we can't, we won't try
167 to use that mode directly when accessing a field of that mode. */
169 static char direct_load
[NUM_MACHINE_MODES
];
170 static char direct_store
[NUM_MACHINE_MODES
];
172 /* Record for each mode whether we can float-extend from memory. */
174 static bool float_extend_from_mem
[NUM_MACHINE_MODES
][NUM_MACHINE_MODES
];
176 /* If a memory-to-memory move would take MOVE_RATIO or more simple
177 move-instruction sequences, we will do a movstr or libcall instead. */
180 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
183 /* If we are optimizing for space (-Os), cut down the default move ratio. */
184 #define MOVE_RATIO (optimize_size ? 3 : 15)
188 /* This macro is used to determine whether move_by_pieces should be called
189 to perform a structure copy. */
190 #ifndef MOVE_BY_PIECES_P
191 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
192 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
195 /* This array records the insn_code of insns to perform block moves. */
196 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
198 /* This array records the insn_code of insns to perform block clears. */
199 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
201 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
203 #ifndef SLOW_UNALIGNED_ACCESS
204 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
207 /* This is run once per compilation to set up which modes can be used
208 directly in memory and to initialize the block move optab. */
214 enum machine_mode mode
;
218 /* Try indexing by frame ptr and try by stack ptr.
219 It is known that on the Convex the stack ptr isn't a valid index.
220 With luck, one or the other is valid on any machine. */
221 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
222 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
224 insn
= rtx_alloc (INSN
);
225 pat
= gen_rtx_SET (0, NULL_RTX
, NULL_RTX
);
226 PATTERN (insn
) = pat
;
228 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
229 mode
= (enum machine_mode
) ((int) mode
+ 1))
234 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
235 PUT_MODE (mem
, mode
);
236 PUT_MODE (mem1
, mode
);
238 /* See if there is some register that can be used in this mode and
239 directly loaded or stored from memory. */
241 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
242 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
243 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
246 if (! HARD_REGNO_MODE_OK (regno
, mode
))
249 reg
= gen_rtx_REG (mode
, regno
);
252 SET_DEST (pat
) = reg
;
253 if (recog (pat
, insn
, &num_clobbers
) >= 0)
254 direct_load
[(int) mode
] = 1;
256 SET_SRC (pat
) = mem1
;
257 SET_DEST (pat
) = reg
;
258 if (recog (pat
, insn
, &num_clobbers
) >= 0)
259 direct_load
[(int) mode
] = 1;
262 SET_DEST (pat
) = mem
;
263 if (recog (pat
, insn
, &num_clobbers
) >= 0)
264 direct_store
[(int) mode
] = 1;
267 SET_DEST (pat
) = mem1
;
268 if (recog (pat
, insn
, &num_clobbers
) >= 0)
269 direct_store
[(int) mode
] = 1;
273 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
275 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
276 mode
= GET_MODE_WIDER_MODE (mode
))
278 enum machine_mode srcmode
;
279 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
280 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
284 ic
= can_extend_p (mode
, srcmode
, 0);
285 if (ic
== CODE_FOR_nothing
)
288 PUT_MODE (mem
, srcmode
);
290 if ((*insn_data
[ic
].operand
[1].predicate
) (mem
, srcmode
))
291 float_extend_from_mem
[mode
][srcmode
] = true;
296 /* This is run at the start of compiling a function. */
301 cfun
->expr
= (struct expr_status
*) xmalloc (sizeof (struct expr_status
));
304 pending_stack_adjust
= 0;
305 stack_pointer_delta
= 0;
306 inhibit_defer_pop
= 0;
308 apply_args_value
= 0;
314 struct expr_status
*p
;
319 ggc_mark_rtx (p
->x_saveregs_value
);
320 ggc_mark_rtx (p
->x_apply_args_value
);
321 ggc_mark_rtx (p
->x_forced_labels
);
332 /* Small sanity check that the queue is empty at the end of a function. */
335 finish_expr_for_function ()
341 /* Manage the queue of increment instructions to be output
342 for POSTINCREMENT_EXPR expressions, etc. */
344 /* Queue up to increment (or change) VAR later. BODY says how:
345 BODY should be the same thing you would pass to emit_insn
346 to increment right away. It will go to emit_insn later on.
348 The value is a QUEUED expression to be used in place of VAR
349 where you want to guarantee the pre-incrementation value of VAR. */
352 enqueue_insn (var
, body
)
355 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
), var
, NULL_RTX
, NULL_RTX
,
356 body
, pending_chain
);
357 return pending_chain
;
360 /* Use protect_from_queue to convert a QUEUED expression
361 into something that you can put immediately into an instruction.
362 If the queued incrementation has not happened yet,
363 protect_from_queue returns the variable itself.
364 If the incrementation has happened, protect_from_queue returns a temp
365 that contains a copy of the old value of the variable.
367 Any time an rtx which might possibly be a QUEUED is to be put
368 into an instruction, it must be passed through protect_from_queue first.
369 QUEUED expressions are not meaningful in instructions.
371 Do not pass a value through protect_from_queue and then hold
372 on to it for a while before putting it in an instruction!
373 If the queue is flushed in between, incorrect code will result. */
376 protect_from_queue (x
, modify
)
380 RTX_CODE code
= GET_CODE (x
);
382 #if 0 /* A QUEUED can hang around after the queue is forced out. */
383 /* Shortcut for most common case. */
384 if (pending_chain
== 0)
390 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
391 use of autoincrement. Make a copy of the contents of the memory
392 location rather than a copy of the address, but not if the value is
393 of mode BLKmode. Don't modify X in place since it might be
395 if (code
== MEM
&& GET_MODE (x
) != BLKmode
396 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
399 rtx
new = replace_equiv_address_nv (x
, QUEUED_VAR (y
));
403 rtx temp
= gen_reg_rtx (GET_MODE (x
));
405 emit_insn_before (gen_move_insn (temp
, new),
410 /* Copy the address into a pseudo, so that the returned value
411 remains correct across calls to emit_queue. */
412 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
415 /* Otherwise, recursively protect the subexpressions of all
416 the kinds of rtx's that can contain a QUEUED. */
419 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
420 if (tem
!= XEXP (x
, 0))
426 else if (code
== PLUS
|| code
== MULT
)
428 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
429 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
430 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
439 /* If the increment has not happened, use the variable itself. Copy it
440 into a new pseudo so that the value remains correct across calls to
442 if (QUEUED_INSN (x
) == 0)
443 return copy_to_reg (QUEUED_VAR (x
));
444 /* If the increment has happened and a pre-increment copy exists,
446 if (QUEUED_COPY (x
) != 0)
447 return QUEUED_COPY (x
);
448 /* The increment has happened but we haven't set up a pre-increment copy.
449 Set one up now, and use it. */
450 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
451 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
453 return QUEUED_COPY (x
);
456 /* Return nonzero if X contains a QUEUED expression:
457 if it contains anything that will be altered by a queued increment.
458 We handle only combinations of MEM, PLUS, MINUS and MULT operators
459 since memory addresses generally contain only those. */
465 enum rtx_code code
= GET_CODE (x
);
471 return queued_subexp_p (XEXP (x
, 0));
475 return (queued_subexp_p (XEXP (x
, 0))
476 || queued_subexp_p (XEXP (x
, 1)));
482 /* Perform all the pending incrementations. */
488 while ((p
= pending_chain
))
490 rtx body
= QUEUED_BODY (p
);
492 if (GET_CODE (body
) == SEQUENCE
)
494 QUEUED_INSN (p
) = XVECEXP (QUEUED_BODY (p
), 0, 0);
495 emit_insn (QUEUED_BODY (p
));
498 QUEUED_INSN (p
) = emit_insn (QUEUED_BODY (p
));
499 pending_chain
= QUEUED_NEXT (p
);
503 /* Copy data from FROM to TO, where the machine modes are not the same.
504 Both modes may be integer, or both may be floating.
505 UNSIGNEDP should be nonzero if FROM is an unsigned type.
506 This causes zero-extension instead of sign-extension. */
509 convert_move (to
, from
, unsignedp
)
513 enum machine_mode to_mode
= GET_MODE (to
);
514 enum machine_mode from_mode
= GET_MODE (from
);
515 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
516 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
520 /* rtx code for making an equivalent value. */
521 enum rtx_code equiv_code
= (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
);
523 to
= protect_from_queue (to
, 1);
524 from
= protect_from_queue (from
, 0);
526 if (to_real
!= from_real
)
529 /* If FROM is a SUBREG that indicates that we have already done at least
530 the required extension, strip it. We don't handle such SUBREGs as
533 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
534 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
535 >= GET_MODE_SIZE (to_mode
))
536 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
537 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
539 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
542 if (to_mode
== from_mode
543 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
545 emit_move_insn (to
, from
);
549 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
551 if (GET_MODE_BITSIZE (from_mode
) != GET_MODE_BITSIZE (to_mode
))
554 if (VECTOR_MODE_P (to_mode
))
555 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
557 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
559 emit_move_insn (to
, from
);
563 if (to_real
!= from_real
)
570 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
))
572 /* Try converting directly if the insn is supported. */
573 if ((code
= can_extend_p (to_mode
, from_mode
, 0))
576 emit_unop_insn (code
, to
, from
, UNKNOWN
);
581 #ifdef HAVE_trunchfqf2
582 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
584 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
588 #ifdef HAVE_trunctqfqf2
589 if (HAVE_trunctqfqf2
&& from_mode
== TQFmode
&& to_mode
== QFmode
)
591 emit_unop_insn (CODE_FOR_trunctqfqf2
, to
, from
, UNKNOWN
);
595 #ifdef HAVE_truncsfqf2
596 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
598 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
602 #ifdef HAVE_truncdfqf2
603 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
605 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
609 #ifdef HAVE_truncxfqf2
610 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
612 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
616 #ifdef HAVE_trunctfqf2
617 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
619 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
624 #ifdef HAVE_trunctqfhf2
625 if (HAVE_trunctqfhf2
&& from_mode
== TQFmode
&& to_mode
== HFmode
)
627 emit_unop_insn (CODE_FOR_trunctqfhf2
, to
, from
, UNKNOWN
);
631 #ifdef HAVE_truncsfhf2
632 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
634 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
638 #ifdef HAVE_truncdfhf2
639 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
641 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
645 #ifdef HAVE_truncxfhf2
646 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
648 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
652 #ifdef HAVE_trunctfhf2
653 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
655 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
660 #ifdef HAVE_truncsftqf2
661 if (HAVE_truncsftqf2
&& from_mode
== SFmode
&& to_mode
== TQFmode
)
663 emit_unop_insn (CODE_FOR_truncsftqf2
, to
, from
, UNKNOWN
);
667 #ifdef HAVE_truncdftqf2
668 if (HAVE_truncdftqf2
&& from_mode
== DFmode
&& to_mode
== TQFmode
)
670 emit_unop_insn (CODE_FOR_truncdftqf2
, to
, from
, UNKNOWN
);
674 #ifdef HAVE_truncxftqf2
675 if (HAVE_truncxftqf2
&& from_mode
== XFmode
&& to_mode
== TQFmode
)
677 emit_unop_insn (CODE_FOR_truncxftqf2
, to
, from
, UNKNOWN
);
681 #ifdef HAVE_trunctftqf2
682 if (HAVE_trunctftqf2
&& from_mode
== TFmode
&& to_mode
== TQFmode
)
684 emit_unop_insn (CODE_FOR_trunctftqf2
, to
, from
, UNKNOWN
);
689 #ifdef HAVE_truncdfsf2
690 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
692 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
696 #ifdef HAVE_truncxfsf2
697 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
699 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
703 #ifdef HAVE_trunctfsf2
704 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
706 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
710 #ifdef HAVE_truncxfdf2
711 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
713 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
717 #ifdef HAVE_trunctfdf2
718 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
720 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
732 libcall
= extendsfdf2_libfunc
;
736 libcall
= extendsfxf2_libfunc
;
740 libcall
= extendsftf2_libfunc
;
752 libcall
= truncdfsf2_libfunc
;
756 libcall
= extenddfxf2_libfunc
;
760 libcall
= extenddftf2_libfunc
;
772 libcall
= truncxfsf2_libfunc
;
776 libcall
= truncxfdf2_libfunc
;
788 libcall
= trunctfsf2_libfunc
;
792 libcall
= trunctfdf2_libfunc
;
804 if (libcall
== (rtx
) 0)
805 /* This conversion is not implemented yet. */
809 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
811 insns
= get_insns ();
813 emit_libcall_block (insns
, to
, value
, gen_rtx_FLOAT_TRUNCATE (to_mode
,
818 /* Now both modes are integers. */
820 /* Handle expanding beyond a word. */
821 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
822 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
829 enum machine_mode lowpart_mode
;
830 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
832 /* Try converting directly if the insn is supported. */
833 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
836 /* If FROM is a SUBREG, put it into a register. Do this
837 so that we always generate the same set of insns for
838 better cse'ing; if an intermediate assignment occurred,
839 we won't be doing the operation directly on the SUBREG. */
840 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
841 from
= force_reg (from_mode
, from
);
842 emit_unop_insn (code
, to
, from
, equiv_code
);
845 /* Next, try converting via full word. */
846 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
847 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
848 != CODE_FOR_nothing
))
850 if (GET_CODE (to
) == REG
)
851 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
852 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
853 emit_unop_insn (code
, to
,
854 gen_lowpart (word_mode
, to
), equiv_code
);
858 /* No special multiword conversion insn; do it by hand. */
861 /* Since we will turn this into a no conflict block, we must ensure
862 that the source does not overlap the target. */
864 if (reg_overlap_mentioned_p (to
, from
))
865 from
= force_reg (from_mode
, from
);
867 /* Get a copy of FROM widened to a word, if necessary. */
868 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
869 lowpart_mode
= word_mode
;
871 lowpart_mode
= from_mode
;
873 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
875 lowpart
= gen_lowpart (lowpart_mode
, to
);
876 emit_move_insn (lowpart
, lowfrom
);
878 /* Compute the value to put in each remaining word. */
880 fill_value
= const0_rtx
;
885 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
886 && STORE_FLAG_VALUE
== -1)
888 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
890 fill_value
= gen_reg_rtx (word_mode
);
891 emit_insn (gen_slt (fill_value
));
897 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
898 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
900 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
904 /* Fill the remaining words. */
905 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
907 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
908 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
913 if (fill_value
!= subword
)
914 emit_move_insn (subword
, fill_value
);
917 insns
= get_insns ();
920 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
921 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
925 /* Truncating multi-word to a word or less. */
926 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
927 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
929 if (!((GET_CODE (from
) == MEM
930 && ! MEM_VOLATILE_P (from
)
931 && direct_load
[(int) to_mode
]
932 && ! mode_dependent_address_p (XEXP (from
, 0)))
933 || GET_CODE (from
) == REG
934 || GET_CODE (from
) == SUBREG
))
935 from
= force_reg (from_mode
, from
);
936 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
940 /* Handle pointer conversion. */ /* SPEE 900220. */
941 if (to_mode
== PQImode
)
943 if (from_mode
!= QImode
)
944 from
= convert_to_mode (QImode
, from
, unsignedp
);
946 #ifdef HAVE_truncqipqi2
947 if (HAVE_truncqipqi2
)
949 emit_unop_insn (CODE_FOR_truncqipqi2
, to
, from
, UNKNOWN
);
952 #endif /* HAVE_truncqipqi2 */
956 if (from_mode
== PQImode
)
958 if (to_mode
!= QImode
)
960 from
= convert_to_mode (QImode
, from
, unsignedp
);
965 #ifdef HAVE_extendpqiqi2
966 if (HAVE_extendpqiqi2
)
968 emit_unop_insn (CODE_FOR_extendpqiqi2
, to
, from
, UNKNOWN
);
971 #endif /* HAVE_extendpqiqi2 */
976 if (to_mode
== PSImode
)
978 if (from_mode
!= SImode
)
979 from
= convert_to_mode (SImode
, from
, unsignedp
);
981 #ifdef HAVE_truncsipsi2
982 if (HAVE_truncsipsi2
)
984 emit_unop_insn (CODE_FOR_truncsipsi2
, to
, from
, UNKNOWN
);
987 #endif /* HAVE_truncsipsi2 */
991 if (from_mode
== PSImode
)
993 if (to_mode
!= SImode
)
995 from
= convert_to_mode (SImode
, from
, unsignedp
);
1000 #ifdef HAVE_extendpsisi2
1001 if (! unsignedp
&& HAVE_extendpsisi2
)
1003 emit_unop_insn (CODE_FOR_extendpsisi2
, to
, from
, UNKNOWN
);
1006 #endif /* HAVE_extendpsisi2 */
1007 #ifdef HAVE_zero_extendpsisi2
1008 if (unsignedp
&& HAVE_zero_extendpsisi2
)
1010 emit_unop_insn (CODE_FOR_zero_extendpsisi2
, to
, from
, UNKNOWN
);
1013 #endif /* HAVE_zero_extendpsisi2 */
1018 if (to_mode
== PDImode
)
1020 if (from_mode
!= DImode
)
1021 from
= convert_to_mode (DImode
, from
, unsignedp
);
1023 #ifdef HAVE_truncdipdi2
1024 if (HAVE_truncdipdi2
)
1026 emit_unop_insn (CODE_FOR_truncdipdi2
, to
, from
, UNKNOWN
);
1029 #endif /* HAVE_truncdipdi2 */
1033 if (from_mode
== PDImode
)
1035 if (to_mode
!= DImode
)
1037 from
= convert_to_mode (DImode
, from
, unsignedp
);
1042 #ifdef HAVE_extendpdidi2
1043 if (HAVE_extendpdidi2
)
1045 emit_unop_insn (CODE_FOR_extendpdidi2
, to
, from
, UNKNOWN
);
1048 #endif /* HAVE_extendpdidi2 */
1053 /* Now follow all the conversions between integers
1054 no more than a word long. */
1056 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1057 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
1058 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1059 GET_MODE_BITSIZE (from_mode
)))
1061 if (!((GET_CODE (from
) == MEM
1062 && ! MEM_VOLATILE_P (from
)
1063 && direct_load
[(int) to_mode
]
1064 && ! mode_dependent_address_p (XEXP (from
, 0)))
1065 || GET_CODE (from
) == REG
1066 || GET_CODE (from
) == SUBREG
))
1067 from
= force_reg (from_mode
, from
);
1068 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
1069 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
1070 from
= copy_to_reg (from
);
1071 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
1075 /* Handle extension. */
1076 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
1078 /* Convert directly if that works. */
1079 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
1080 != CODE_FOR_nothing
)
1083 from
= force_not_mem (from
);
1085 emit_unop_insn (code
, to
, from
, equiv_code
);
1090 enum machine_mode intermediate
;
1094 /* Search for a mode to convert via. */
1095 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
1096 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
1097 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
1098 != CODE_FOR_nothing
)
1099 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
1100 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1101 GET_MODE_BITSIZE (intermediate
))))
1102 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
1103 != CODE_FOR_nothing
))
1105 convert_move (to
, convert_to_mode (intermediate
, from
,
1106 unsignedp
), unsignedp
);
1110 /* No suitable intermediate mode.
1111 Generate what we need with shifts. */
1112 shift_amount
= build_int_2 (GET_MODE_BITSIZE (to_mode
)
1113 - GET_MODE_BITSIZE (from_mode
), 0);
1114 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
1115 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
1117 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
1120 emit_move_insn (to
, tmp
);
1125 /* Support special truncate insns for certain modes. */
1127 if (from_mode
== DImode
&& to_mode
== SImode
)
1129 #ifdef HAVE_truncdisi2
1130 if (HAVE_truncdisi2
)
1132 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1136 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1140 if (from_mode
== DImode
&& to_mode
== HImode
)
1142 #ifdef HAVE_truncdihi2
1143 if (HAVE_truncdihi2
)
1145 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1149 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1153 if (from_mode
== DImode
&& to_mode
== QImode
)
1155 #ifdef HAVE_truncdiqi2
1156 if (HAVE_truncdiqi2
)
1158 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1162 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1166 if (from_mode
== SImode
&& to_mode
== HImode
)
1168 #ifdef HAVE_truncsihi2
1169 if (HAVE_truncsihi2
)
1171 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1175 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1179 if (from_mode
== SImode
&& to_mode
== QImode
)
1181 #ifdef HAVE_truncsiqi2
1182 if (HAVE_truncsiqi2
)
1184 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1188 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1192 if (from_mode
== HImode
&& to_mode
== QImode
)
1194 #ifdef HAVE_trunchiqi2
1195 if (HAVE_trunchiqi2
)
1197 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1201 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1205 if (from_mode
== TImode
&& to_mode
== DImode
)
1207 #ifdef HAVE_trunctidi2
1208 if (HAVE_trunctidi2
)
1210 emit_unop_insn (CODE_FOR_trunctidi2
, to
, from
, UNKNOWN
);
1214 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1218 if (from_mode
== TImode
&& to_mode
== SImode
)
1220 #ifdef HAVE_trunctisi2
1221 if (HAVE_trunctisi2
)
1223 emit_unop_insn (CODE_FOR_trunctisi2
, to
, from
, UNKNOWN
);
1227 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1231 if (from_mode
== TImode
&& to_mode
== HImode
)
1233 #ifdef HAVE_trunctihi2
1234 if (HAVE_trunctihi2
)
1236 emit_unop_insn (CODE_FOR_trunctihi2
, to
, from
, UNKNOWN
);
1240 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1244 if (from_mode
== TImode
&& to_mode
== QImode
)
1246 #ifdef HAVE_trunctiqi2
1247 if (HAVE_trunctiqi2
)
1249 emit_unop_insn (CODE_FOR_trunctiqi2
, to
, from
, UNKNOWN
);
1253 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1257 /* Handle truncation of volatile memrefs, and so on;
1258 the things that couldn't be truncated directly,
1259 and for which there was no special instruction. */
1260 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1262 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1263 emit_move_insn (to
, temp
);
1267 /* Mode combination is not recognized. */
1271 /* Return an rtx for a value that would result
1272 from converting X to mode MODE.
1273 Both X and MODE may be floating, or both integer.
1274 UNSIGNEDP is nonzero if X is an unsigned value.
1275 This can be done by referring to a part of X in place
1276 or by copying to a new temporary with conversion.
1278 This function *must not* call protect_from_queue
1279 except when putting X into an insn (in which case convert_move does it). */
1282 convert_to_mode (mode
, x
, unsignedp
)
1283 enum machine_mode mode
;
1287 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
1290 /* Return an rtx for a value that would result
1291 from converting X from mode OLDMODE to mode MODE.
1292 Both modes may be floating, or both integer.
1293 UNSIGNEDP is nonzero if X is an unsigned value.
1295 This can be done by referring to a part of X in place
1296 or by copying to a new temporary with conversion.
1298 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1300 This function *must not* call protect_from_queue
1301 except when putting X into an insn (in which case convert_move does it). */
1304 convert_modes (mode
, oldmode
, x
, unsignedp
)
1305 enum machine_mode mode
, oldmode
;
1311 /* If FROM is a SUBREG that indicates that we have already done at least
1312 the required extension, strip it. */
1314 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1315 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1316 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1317 x
= gen_lowpart (mode
, x
);
1319 if (GET_MODE (x
) != VOIDmode
)
1320 oldmode
= GET_MODE (x
);
1322 if (mode
== oldmode
)
1325 /* There is one case that we must handle specially: If we are converting
1326 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1327 we are to interpret the constant as unsigned, gen_lowpart will do
1328 the wrong if the constant appears negative. What we want to do is
1329 make the high-order word of the constant zero, not all ones. */
1331 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1332 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1333 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1335 HOST_WIDE_INT val
= INTVAL (x
);
1337 if (oldmode
!= VOIDmode
1338 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
1340 int width
= GET_MODE_BITSIZE (oldmode
);
1342 /* We need to zero extend VAL. */
1343 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1346 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
1349 /* We can do this with a gen_lowpart if both desired and current modes
1350 are integer, and this is either a constant integer, a register, or a
1351 non-volatile MEM. Except for the constant case where MODE is no
1352 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1354 if ((GET_CODE (x
) == CONST_INT
1355 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1356 || (GET_MODE_CLASS (mode
) == MODE_INT
1357 && GET_MODE_CLASS (oldmode
) == MODE_INT
1358 && (GET_CODE (x
) == CONST_DOUBLE
1359 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
1360 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
1361 && direct_load
[(int) mode
])
1362 || (GET_CODE (x
) == REG
1363 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
1364 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
1366 /* ?? If we don't know OLDMODE, we have to assume here that
1367 X does not need sign- or zero-extension. This may not be
1368 the case, but it's the best we can do. */
1369 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1370 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1372 HOST_WIDE_INT val
= INTVAL (x
);
1373 int width
= GET_MODE_BITSIZE (oldmode
);
1375 /* We must sign or zero-extend in this case. Start by
1376 zero-extending, then sign extend if we need to. */
1377 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1379 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1380 val
|= (HOST_WIDE_INT
) (-1) << width
;
1382 return gen_int_mode (val
, mode
);
1385 return gen_lowpart (mode
, x
);
1388 temp
= gen_reg_rtx (mode
);
1389 convert_move (temp
, x
, unsignedp
);
1393 /* This macro is used to determine what the largest unit size that
1394 move_by_pieces can use is. */
1396 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1397 move efficiently, as opposed to MOVE_MAX which is the maximum
1398 number of bytes we can move with a single instruction. */
1400 #ifndef MOVE_MAX_PIECES
1401 #define MOVE_MAX_PIECES MOVE_MAX
1404 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1405 store efficiently. Due to internal GCC limitations, this is
1406 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1407 for an immediate constant. */
1409 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1411 /* Generate several move instructions to copy LEN bytes from block FROM to
1412 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1413 and TO through protect_from_queue before calling.
1415 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1416 used to push FROM to the stack.
1418 ALIGN is maximum alignment we can assume. */
1421 move_by_pieces (to
, from
, len
, align
)
1423 unsigned HOST_WIDE_INT len
;
1426 struct move_by_pieces data
;
1427 rtx to_addr
, from_addr
= XEXP (from
, 0);
1428 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
1429 enum machine_mode mode
= VOIDmode
, tmode
;
1430 enum insn_code icode
;
1433 data
.from_addr
= from_addr
;
1436 to_addr
= XEXP (to
, 0);
1439 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1440 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1442 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1449 #ifdef STACK_GROWS_DOWNWARD
1455 data
.to_addr
= to_addr
;
1458 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1459 || GET_CODE (from_addr
) == POST_INC
1460 || GET_CODE (from_addr
) == POST_DEC
);
1462 data
.explicit_inc_from
= 0;
1463 data
.explicit_inc_to
= 0;
1464 if (data
.reverse
) data
.offset
= len
;
1467 /* If copying requires more than two move insns,
1468 copy addresses to registers (to make displacements shorter)
1469 and use post-increment if available. */
1470 if (!(data
.autinc_from
&& data
.autinc_to
)
1471 && move_by_pieces_ninsns (len
, align
) > 2)
1473 /* Find the mode of the largest move... */
1474 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1475 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1476 if (GET_MODE_SIZE (tmode
) < max_size
)
1479 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
1481 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1482 data
.autinc_from
= 1;
1483 data
.explicit_inc_from
= -1;
1485 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
1487 data
.from_addr
= copy_addr_to_reg (from_addr
);
1488 data
.autinc_from
= 1;
1489 data
.explicit_inc_from
= 1;
1491 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1492 data
.from_addr
= copy_addr_to_reg (from_addr
);
1493 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
1495 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1497 data
.explicit_inc_to
= -1;
1499 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
1501 data
.to_addr
= copy_addr_to_reg (to_addr
);
1503 data
.explicit_inc_to
= 1;
1505 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1506 data
.to_addr
= copy_addr_to_reg (to_addr
);
1509 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1510 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1511 align
= MOVE_MAX
* BITS_PER_UNIT
;
1513 /* First move what we can in the largest integer mode, then go to
1514 successively smaller modes. */
1516 while (max_size
> 1)
1518 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1519 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1520 if (GET_MODE_SIZE (tmode
) < max_size
)
1523 if (mode
== VOIDmode
)
1526 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1527 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1528 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1530 max_size
= GET_MODE_SIZE (mode
);
1533 /* The code above should have handled everything. */
1538 /* Return number of insns required to move L bytes by pieces.
1539 ALIGN (in bits) is maximum alignment we can assume. */
1541 static unsigned HOST_WIDE_INT
1542 move_by_pieces_ninsns (l
, align
)
1543 unsigned HOST_WIDE_INT l
;
1546 unsigned HOST_WIDE_INT n_insns
= 0;
1547 unsigned HOST_WIDE_INT max_size
= MOVE_MAX
+ 1;
1549 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1550 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1551 align
= MOVE_MAX
* BITS_PER_UNIT
;
1553 while (max_size
> 1)
1555 enum machine_mode mode
= VOIDmode
, tmode
;
1556 enum insn_code icode
;
1558 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1559 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1560 if (GET_MODE_SIZE (tmode
) < max_size
)
1563 if (mode
== VOIDmode
)
1566 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1567 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1568 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1570 max_size
= GET_MODE_SIZE (mode
);
1578 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1579 with move instructions for mode MODE. GENFUN is the gen_... function
1580 to make a move insn for that mode. DATA has all the other info. */
1583 move_by_pieces_1 (genfun
, mode
, data
)
1584 rtx (*genfun
) PARAMS ((rtx
, ...));
1585 enum machine_mode mode
;
1586 struct move_by_pieces
*data
;
1588 unsigned int size
= GET_MODE_SIZE (mode
);
1589 rtx to1
= NULL_RTX
, from1
;
1591 while (data
->len
>= size
)
1594 data
->offset
-= size
;
1598 if (data
->autinc_to
)
1599 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1602 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1605 if (data
->autinc_from
)
1606 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1609 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1611 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1612 emit_insn (gen_add2_insn (data
->to_addr
,
1613 GEN_INT (-(HOST_WIDE_INT
)size
)));
1614 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1615 emit_insn (gen_add2_insn (data
->from_addr
,
1616 GEN_INT (-(HOST_WIDE_INT
)size
)));
1619 emit_insn ((*genfun
) (to1
, from1
));
1622 #ifdef PUSH_ROUNDING
1623 emit_single_push_insn (mode
, from1
, NULL
);
1629 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1630 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1631 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1632 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1634 if (! data
->reverse
)
1635 data
->offset
+= size
;
1641 /* Emit code to move a block Y to a block X.
1642 This may be done with string-move instructions,
1643 with multiple scalar move instructions, or with a library call.
1645 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1647 SIZE is an rtx that says how long they are.
1648 ALIGN is the maximum alignment we can assume they have.
1650 Return the address of the new block, if memcpy is called and returns it,
1654 emit_block_move (x
, y
, size
)
1659 #ifdef TARGET_MEM_FUNCTIONS
1661 tree call_expr
, arg_list
;
1663 unsigned int align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1665 if (GET_MODE (x
) != BLKmode
)
1668 if (GET_MODE (y
) != BLKmode
)
1671 x
= protect_from_queue (x
, 1);
1672 y
= protect_from_queue (y
, 0);
1673 size
= protect_from_queue (size
, 0);
1675 if (GET_CODE (x
) != MEM
)
1677 if (GET_CODE (y
) != MEM
)
1682 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1683 move_by_pieces (x
, y
, INTVAL (size
), align
);
1686 /* Try the most limited insn first, because there's no point
1687 including more than one in the machine description unless
1688 the more limited one has some advantage. */
1690 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1691 enum machine_mode mode
;
1693 /* Since this is a move insn, we don't care about volatility. */
1696 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1697 mode
= GET_MODE_WIDER_MODE (mode
))
1699 enum insn_code code
= movstr_optab
[(int) mode
];
1700 insn_operand_predicate_fn pred
;
1702 if (code
!= CODE_FOR_nothing
1703 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1704 here because if SIZE is less than the mode mask, as it is
1705 returned by the macro, it will definitely be less than the
1706 actual mode mask. */
1707 && ((GET_CODE (size
) == CONST_INT
1708 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1709 <= (GET_MODE_MASK (mode
) >> 1)))
1710 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1711 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1712 || (*pred
) (x
, BLKmode
))
1713 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1714 || (*pred
) (y
, BLKmode
))
1715 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1716 || (*pred
) (opalign
, VOIDmode
)))
1719 rtx last
= get_last_insn ();
1722 op2
= convert_to_mode (mode
, size
, 1);
1723 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1724 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1725 op2
= copy_to_mode_reg (mode
, op2
);
1727 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1735 delete_insns_since (last
);
1741 /* X, Y, or SIZE may have been passed through protect_from_queue.
1743 It is unsafe to save the value generated by protect_from_queue
1744 and reuse it later. Consider what happens if emit_queue is
1745 called before the return value from protect_from_queue is used.
1747 Expansion of the CALL_EXPR below will call emit_queue before
1748 we are finished emitting RTL for argument setup. So if we are
1749 not careful we could get the wrong value for an argument.
1751 To avoid this problem we go ahead and emit code to copy X, Y &
1752 SIZE into new pseudos. We can then place those new pseudos
1753 into an RTL_EXPR and use them later, even after a call to
1756 Note this is not strictly needed for library calls since they
1757 do not call emit_queue before loading their arguments. However,
1758 we may need to have library calls call emit_queue in the future
1759 since failing to do so could cause problems for targets which
1760 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1761 x
= copy_to_mode_reg (Pmode
, XEXP (x
, 0));
1762 y
= copy_to_mode_reg (Pmode
, XEXP (y
, 0));
1764 #ifdef TARGET_MEM_FUNCTIONS
1765 size
= copy_to_mode_reg (TYPE_MODE (sizetype
), size
);
1767 size
= convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1768 TREE_UNSIGNED (integer_type_node
));
1769 size
= copy_to_mode_reg (TYPE_MODE (integer_type_node
), size
);
1772 #ifdef TARGET_MEM_FUNCTIONS
1773 /* It is incorrect to use the libcall calling conventions to call
1774 memcpy in this context.
1776 This could be a user call to memcpy and the user may wish to
1777 examine the return value from memcpy.
1779 For targets where libcalls and normal calls have different conventions
1780 for returning pointers, we could end up generating incorrect code.
1782 So instead of using a libcall sequence we build up a suitable
1783 CALL_EXPR and expand the call in the normal fashion. */
1784 if (fn
== NULL_TREE
)
1788 /* This was copied from except.c, I don't know if all this is
1789 necessary in this context or not. */
1790 fn
= get_identifier ("memcpy");
1791 fntype
= build_pointer_type (void_type_node
);
1792 fntype
= build_function_type (fntype
, NULL_TREE
);
1793 fn
= build_decl (FUNCTION_DECL
, fn
, fntype
);
1794 ggc_add_tree_root (&fn
, 1);
1795 DECL_EXTERNAL (fn
) = 1;
1796 TREE_PUBLIC (fn
) = 1;
1797 DECL_ARTIFICIAL (fn
) = 1;
1798 TREE_NOTHROW (fn
) = 1;
1799 make_decl_rtl (fn
, NULL
);
1800 assemble_external (fn
);
1803 /* We need to make an argument list for the function call.
1805 memcpy has three arguments, the first two are void * addresses and
1806 the last is a size_t byte count for the copy. */
1808 = build_tree_list (NULL_TREE
,
1809 make_tree (build_pointer_type (void_type_node
), x
));
1810 TREE_CHAIN (arg_list
)
1811 = build_tree_list (NULL_TREE
,
1812 make_tree (build_pointer_type (void_type_node
), y
));
1813 TREE_CHAIN (TREE_CHAIN (arg_list
))
1814 = build_tree_list (NULL_TREE
, make_tree (sizetype
, size
));
1815 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list
))) = NULL_TREE
;
1817 /* Now we have to build up the CALL_EXPR itself. */
1818 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1819 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1820 call_expr
, arg_list
, NULL_TREE
);
1821 TREE_SIDE_EFFECTS (call_expr
) = 1;
1823 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
1825 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
1826 VOIDmode
, 3, y
, Pmode
, x
, Pmode
,
1827 convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1828 TREE_UNSIGNED (integer_type_node
)),
1829 TYPE_MODE (integer_type_node
));
1832 /* If we are initializing a readonly value, show the above call
1833 clobbered it. Otherwise, a load from it may erroneously be hoisted
1835 if (RTX_UNCHANGING_P (x
))
1836 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
1842 /* Copy all or part of a value X into registers starting at REGNO.
1843 The number of registers to be filled is NREGS. */
1846 move_block_to_reg (regno
, x
, nregs
, mode
)
1850 enum machine_mode mode
;
1853 #ifdef HAVE_load_multiple
1861 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1862 x
= validize_mem (force_const_mem (mode
, x
));
1864 /* See if the machine can do this with a load multiple insn. */
1865 #ifdef HAVE_load_multiple
1866 if (HAVE_load_multiple
)
1868 last
= get_last_insn ();
1869 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1877 delete_insns_since (last
);
1881 for (i
= 0; i
< nregs
; i
++)
1882 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1883 operand_subword_force (x
, i
, mode
));
1886 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1887 The number of registers to be filled is NREGS. SIZE indicates the number
1888 of bytes in the object X. */
1891 move_block_from_reg (regno
, x
, nregs
, size
)
1898 #ifdef HAVE_store_multiple
1902 enum machine_mode mode
;
1907 /* If SIZE is that of a mode no bigger than a word, just use that
1908 mode's store operation. */
1909 if (size
<= UNITS_PER_WORD
1910 && (mode
= mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0)) != BLKmode
1911 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
)
1913 emit_move_insn (adjust_address (x
, mode
, 0), gen_rtx_REG (mode
, regno
));
1917 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1918 to the left before storing to memory. Note that the previous test
1919 doesn't handle all cases (e.g. SIZE == 3). */
1920 if (size
< UNITS_PER_WORD
1922 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
)
1924 rtx tem
= operand_subword (x
, 0, 1, BLKmode
);
1930 shift
= expand_shift (LSHIFT_EXPR
, word_mode
,
1931 gen_rtx_REG (word_mode
, regno
),
1932 build_int_2 ((UNITS_PER_WORD
- size
)
1933 * BITS_PER_UNIT
, 0), NULL_RTX
, 0);
1934 emit_move_insn (tem
, shift
);
1938 /* See if the machine can do this with a store multiple insn. */
1939 #ifdef HAVE_store_multiple
1940 if (HAVE_store_multiple
)
1942 last
= get_last_insn ();
1943 pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1951 delete_insns_since (last
);
1955 for (i
= 0; i
< nregs
; i
++)
1957 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1962 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1966 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1967 registers represented by a PARALLEL. SSIZE represents the total size of
1968 block SRC in bytes, or -1 if not known. */
1969 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
1970 the balance will be in what would be the low-order memory addresses, i.e.
1971 left justified for big endian, right justified for little endian. This
1972 happens to be true for the targets currently using this support. If this
1973 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1977 emit_group_load (dst
, orig_src
, ssize
)
1984 if (GET_CODE (dst
) != PARALLEL
)
1987 /* Check for a NULL entry, used to indicate that the parameter goes
1988 both on the stack and in registers. */
1989 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1994 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
1996 /* Process the pieces. */
1997 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1999 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
2000 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
2001 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2004 /* Handle trailing fragments that run over the size of the struct. */
2005 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2007 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2008 bytelen
= ssize
- bytepos
;
2013 /* If we won't be loading directly from memory, protect the real source
2014 from strange tricks we might play; but make sure that the source can
2015 be loaded directly into the destination. */
2017 if (GET_CODE (orig_src
) != MEM
2018 && (!CONSTANT_P (orig_src
)
2019 || (GET_MODE (orig_src
) != mode
2020 && GET_MODE (orig_src
) != VOIDmode
)))
2022 if (GET_MODE (orig_src
) == VOIDmode
)
2023 src
= gen_reg_rtx (mode
);
2025 src
= gen_reg_rtx (GET_MODE (orig_src
));
2027 emit_move_insn (src
, orig_src
);
2030 /* Optimize the access just a bit. */
2031 if (GET_CODE (src
) == MEM
2032 && MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
)
2033 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2034 && bytelen
== GET_MODE_SIZE (mode
))
2036 tmps
[i
] = gen_reg_rtx (mode
);
2037 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
2039 else if (GET_CODE (src
) == CONCAT
)
2042 && bytelen
== GET_MODE_SIZE (GET_MODE (XEXP (src
, 0))))
2043 || (bytepos
== (HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)))
2044 && bytelen
== GET_MODE_SIZE (GET_MODE (XEXP (src
, 1)))))
2046 tmps
[i
] = XEXP (src
, bytepos
!= 0);
2047 if (! CONSTANT_P (tmps
[i
])
2048 && (GET_CODE (tmps
[i
]) != REG
|| GET_MODE (tmps
[i
]) != mode
))
2049 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
2050 0, 1, NULL_RTX
, mode
, mode
, ssize
);
2052 else if (bytepos
== 0)
2054 rtx mem
= assign_stack_temp (GET_MODE (src
),
2055 GET_MODE_SIZE (GET_MODE (src
)), 0);
2056 emit_move_insn (mem
, src
);
2057 tmps
[i
] = adjust_address (mem
, mode
, 0);
2062 else if (CONSTANT_P (src
)
2063 || (GET_CODE (src
) == REG
&& GET_MODE (src
) == mode
))
2066 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
2067 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
2070 if (BYTES_BIG_ENDIAN
&& shift
)
2071 expand_binop (mode
, ashl_optab
, tmps
[i
], GEN_INT (shift
),
2072 tmps
[i
], 0, OPTAB_WIDEN
);
2077 /* Copy the extracted pieces into the proper (probable) hard regs. */
2078 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2079 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
2082 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2083 registers represented by a PARALLEL. SSIZE represents the total size of
2084 block DST, or -1 if not known. */
2087 emit_group_store (orig_dst
, src
, ssize
)
2094 if (GET_CODE (src
) != PARALLEL
)
2097 /* Check for a NULL entry, used to indicate that the parameter goes
2098 both on the stack and in registers. */
2099 if (XEXP (XVECEXP (src
, 0, 0), 0))
2104 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (src
, 0));
2106 /* Copy the (probable) hard regs into pseudos. */
2107 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2109 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2110 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2111 emit_move_insn (tmps
[i
], reg
);
2115 /* If we won't be storing directly into memory, protect the real destination
2116 from strange tricks we might play. */
2118 if (GET_CODE (dst
) == PARALLEL
)
2122 /* We can get a PARALLEL dst if there is a conditional expression in
2123 a return statement. In that case, the dst and src are the same,
2124 so no action is necessary. */
2125 if (rtx_equal_p (dst
, src
))
2128 /* It is unclear if we can ever reach here, but we may as well handle
2129 it. Allocate a temporary, and split this into a store/load to/from
2132 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
2133 emit_group_store (temp
, src
, ssize
);
2134 emit_group_load (dst
, temp
, ssize
);
2137 else if (GET_CODE (dst
) != MEM
&& GET_CODE (dst
) != CONCAT
)
2139 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
2140 /* Make life a bit easier for combine. */
2141 emit_move_insn (dst
, const0_rtx
);
2144 /* Process the pieces. */
2145 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2147 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2148 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2149 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2152 /* Handle trailing fragments that run over the size of the struct. */
2153 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2155 if (BYTES_BIG_ENDIAN
)
2157 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2158 expand_binop (mode
, ashr_optab
, tmps
[i
], GEN_INT (shift
),
2159 tmps
[i
], 0, OPTAB_WIDEN
);
2161 bytelen
= ssize
- bytepos
;
2164 if (GET_CODE (dst
) == CONCAT
)
2166 if (bytepos
+ bytelen
<= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2167 dest
= XEXP (dst
, 0);
2168 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2170 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2171 dest
= XEXP (dst
, 1);
2177 /* Optimize the access just a bit. */
2178 if (GET_CODE (dest
) == MEM
2179 && MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
)
2180 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2181 && bytelen
== GET_MODE_SIZE (mode
))
2182 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2184 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2185 mode
, tmps
[i
], ssize
);
2190 /* Copy from the pseudo into the (probable) hard reg. */
2191 if (GET_CODE (dst
) == REG
)
2192 emit_move_insn (orig_dst
, dst
);
2195 /* Generate code to copy a BLKmode object of TYPE out of a
2196 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2197 is null, a stack temporary is created. TGTBLK is returned.
2199 The primary purpose of this routine is to handle functions
2200 that return BLKmode structures in registers. Some machines
2201 (the PA for example) want to return all small structures
2202 in registers regardless of the structure's alignment. */
2205 copy_blkmode_from_reg (tgtblk
, srcreg
, type
)
2210 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2211 rtx src
= NULL
, dst
= NULL
;
2212 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2213 unsigned HOST_WIDE_INT bitpos
, xbitpos
, big_endian_correction
= 0;
2217 tgtblk
= assign_temp (build_qualified_type (type
,
2219 | TYPE_QUAL_CONST
)),
2221 preserve_temp_slots (tgtblk
);
2224 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2225 into a new pseudo which is a full word.
2227 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2228 the wrong part of the register gets copied so we fake a type conversion
2230 if (GET_MODE (srcreg
) != BLKmode
2231 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2233 if (FUNCTION_ARG_REG_LITTLE_ENDIAN
)
2234 srcreg
= simplify_gen_subreg (word_mode
, srcreg
, GET_MODE (srcreg
), 0);
2236 srcreg
= convert_to_mode (word_mode
, srcreg
, TREE_UNSIGNED (type
));
2239 /* Structures whose size is not a multiple of a word are aligned
2240 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2241 machine, this means we must skip the empty high order bytes when
2242 calculating the bit offset. */
2243 if (BYTES_BIG_ENDIAN
2244 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2245 && bytes
% UNITS_PER_WORD
)
2246 big_endian_correction
2247 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2249 /* Copy the structure BITSIZE bites at a time.
2251 We could probably emit more efficient code for machines which do not use
2252 strict alignment, but it doesn't seem worth the effort at the current
2254 for (bitpos
= 0, xbitpos
= big_endian_correction
;
2255 bitpos
< bytes
* BITS_PER_UNIT
;
2256 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2258 /* We need a new source operand each time xbitpos is on a
2259 word boundary and when xbitpos == big_endian_correction
2260 (the first time through). */
2261 if (xbitpos
% BITS_PER_WORD
== 0
2262 || xbitpos
== big_endian_correction
)
2263 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2266 /* We need a new destination operand each time bitpos is on
2268 if (bitpos
% BITS_PER_WORD
== 0)
2269 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2271 /* Use xbitpos for the source extraction (right justified) and
2272 xbitpos for the destination store (left justified). */
2273 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2274 extract_bit_field (src
, bitsize
,
2275 xbitpos
% BITS_PER_WORD
, 1,
2276 NULL_RTX
, word_mode
, word_mode
,
2284 /* Add a USE expression for REG to the (possibly empty) list pointed
2285 to by CALL_FUSAGE. REG must denote a hard register. */
2288 use_reg (call_fusage
, reg
)
2289 rtx
*call_fusage
, reg
;
2291 if (GET_CODE (reg
) != REG
2292 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2296 = gen_rtx_EXPR_LIST (VOIDmode
,
2297 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2300 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2301 starting at REGNO. All of these registers must be hard registers. */
2304 use_regs (call_fusage
, regno
, nregs
)
2311 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
2314 for (i
= 0; i
< nregs
; i
++)
2315 use_reg (call_fusage
, gen_rtx_REG (reg_raw_mode
[regno
+ i
], regno
+ i
));
2318 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2319 PARALLEL REGS. This is for calls that pass values in multiple
2320 non-contiguous locations. The Irix 6 ABI has examples of this. */
2323 use_group_regs (call_fusage
, regs
)
2329 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2331 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2333 /* A NULL entry means the parameter goes both on the stack and in
2334 registers. This can also be a MEM for targets that pass values
2335 partially on the stack and partially in registers. */
2336 if (reg
!= 0 && GET_CODE (reg
) == REG
)
2337 use_reg (call_fusage
, reg
);
2342 /* Determine whether the LEN bytes generated by CONSTFUN can be
2343 stored to memory using several move instructions. CONSTFUNDATA is
2344 a pointer which will be passed as argument in every CONSTFUN call.
2345 ALIGN is maximum alignment we can assume. Return nonzero if a
2346 call to store_by_pieces should succeed. */
2349 can_store_by_pieces (len
, constfun
, constfundata
, align
)
2350 unsigned HOST_WIDE_INT len
;
2351 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
2355 unsigned HOST_WIDE_INT max_size
, l
;
2356 HOST_WIDE_INT offset
= 0;
2357 enum machine_mode mode
, tmode
;
2358 enum insn_code icode
;
2362 if (! MOVE_BY_PIECES_P (len
, align
))
2365 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2366 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2367 align
= MOVE_MAX
* BITS_PER_UNIT
;
2369 /* We would first store what we can in the largest integer mode, then go to
2370 successively smaller modes. */
2373 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2378 max_size
= STORE_MAX_PIECES
+ 1;
2379 while (max_size
> 1)
2381 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2382 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2383 if (GET_MODE_SIZE (tmode
) < max_size
)
2386 if (mode
== VOIDmode
)
2389 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2390 if (icode
!= CODE_FOR_nothing
2391 && align
>= GET_MODE_ALIGNMENT (mode
))
2393 unsigned int size
= GET_MODE_SIZE (mode
);
2400 cst
= (*constfun
) (constfundata
, offset
, mode
);
2401 if (!LEGITIMATE_CONSTANT_P (cst
))
2411 max_size
= GET_MODE_SIZE (mode
);
2414 /* The code above should have handled everything. */
2422 /* Generate several move instructions to store LEN bytes generated by
2423 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2424 pointer which will be passed as argument in every CONSTFUN call.
2425 ALIGN is maximum alignment we can assume. */
2428 store_by_pieces (to
, len
, constfun
, constfundata
, align
)
2430 unsigned HOST_WIDE_INT len
;
2431 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
2435 struct store_by_pieces data
;
2437 if (! MOVE_BY_PIECES_P (len
, align
))
2439 to
= protect_from_queue (to
, 1);
2440 data
.constfun
= constfun
;
2441 data
.constfundata
= constfundata
;
2444 store_by_pieces_1 (&data
, align
);
2447 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2448 rtx with BLKmode). The caller must pass TO through protect_from_queue
2449 before calling. ALIGN is maximum alignment we can assume. */
2452 clear_by_pieces (to
, len
, align
)
2454 unsigned HOST_WIDE_INT len
;
2457 struct store_by_pieces data
;
2459 data
.constfun
= clear_by_pieces_1
;
2460 data
.constfundata
= NULL
;
2463 store_by_pieces_1 (&data
, align
);
2466 /* Callback routine for clear_by_pieces.
2467 Return const0_rtx unconditionally. */
2470 clear_by_pieces_1 (data
, offset
, mode
)
2471 PTR data ATTRIBUTE_UNUSED
;
2472 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
;
2473 enum machine_mode mode ATTRIBUTE_UNUSED
;
2478 /* Subroutine of clear_by_pieces and store_by_pieces.
2479 Generate several move instructions to store LEN bytes of block TO. (A MEM
2480 rtx with BLKmode). The caller must pass TO through protect_from_queue
2481 before calling. ALIGN is maximum alignment we can assume. */
2484 store_by_pieces_1 (data
, align
)
2485 struct store_by_pieces
*data
;
2488 rtx to_addr
= XEXP (data
->to
, 0);
2489 unsigned HOST_WIDE_INT max_size
= STORE_MAX_PIECES
+ 1;
2490 enum machine_mode mode
= VOIDmode
, tmode
;
2491 enum insn_code icode
;
2494 data
->to_addr
= to_addr
;
2496 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2497 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2499 data
->explicit_inc_to
= 0;
2501 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2503 data
->offset
= data
->len
;
2505 /* If storing requires more than two move insns,
2506 copy addresses to registers (to make displacements shorter)
2507 and use post-increment if available. */
2508 if (!data
->autinc_to
2509 && move_by_pieces_ninsns (data
->len
, align
) > 2)
2511 /* Determine the main mode we'll be using. */
2512 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2513 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2514 if (GET_MODE_SIZE (tmode
) < max_size
)
2517 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2519 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2520 data
->autinc_to
= 1;
2521 data
->explicit_inc_to
= -1;
2524 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2525 && ! data
->autinc_to
)
2527 data
->to_addr
= copy_addr_to_reg (to_addr
);
2528 data
->autinc_to
= 1;
2529 data
->explicit_inc_to
= 1;
2532 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2533 data
->to_addr
= copy_addr_to_reg (to_addr
);
2536 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2537 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2538 align
= MOVE_MAX
* BITS_PER_UNIT
;
2540 /* First store what we can in the largest integer mode, then go to
2541 successively smaller modes. */
2543 while (max_size
> 1)
2545 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2546 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2547 if (GET_MODE_SIZE (tmode
) < max_size
)
2550 if (mode
== VOIDmode
)
2553 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2554 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2555 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2557 max_size
= GET_MODE_SIZE (mode
);
2560 /* The code above should have handled everything. */
2565 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2566 with move instructions for mode MODE. GENFUN is the gen_... function
2567 to make a move insn for that mode. DATA has all the other info. */
2570 store_by_pieces_2 (genfun
, mode
, data
)
2571 rtx (*genfun
) PARAMS ((rtx
, ...));
2572 enum machine_mode mode
;
2573 struct store_by_pieces
*data
;
2575 unsigned int size
= GET_MODE_SIZE (mode
);
2578 while (data
->len
>= size
)
2581 data
->offset
-= size
;
2583 if (data
->autinc_to
)
2584 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2587 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2589 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2590 emit_insn (gen_add2_insn (data
->to_addr
,
2591 GEN_INT (-(HOST_WIDE_INT
) size
)));
2593 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2594 emit_insn ((*genfun
) (to1
, cst
));
2596 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2597 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2599 if (! data
->reverse
)
2600 data
->offset
+= size
;
2606 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2607 its length in bytes. */
2610 clear_storage (object
, size
)
2614 #ifdef TARGET_MEM_FUNCTIONS
2616 tree call_expr
, arg_list
;
2619 unsigned int align
= (GET_CODE (object
) == MEM
? MEM_ALIGN (object
)
2620 : GET_MODE_ALIGNMENT (GET_MODE (object
)));
2622 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2623 just move a zero. Otherwise, do this a piece at a time. */
2624 if (GET_MODE (object
) != BLKmode
2625 && GET_CODE (size
) == CONST_INT
2626 && GET_MODE_SIZE (GET_MODE (object
)) == (unsigned int) INTVAL (size
))
2627 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2630 object
= protect_from_queue (object
, 1);
2631 size
= protect_from_queue (size
, 0);
2633 if (GET_CODE (size
) == CONST_INT
2634 && MOVE_BY_PIECES_P (INTVAL (size
), align
))
2635 clear_by_pieces (object
, INTVAL (size
), align
);
2638 /* Try the most limited insn first, because there's no point
2639 including more than one in the machine description unless
2640 the more limited one has some advantage. */
2642 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2643 enum machine_mode mode
;
2645 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2646 mode
= GET_MODE_WIDER_MODE (mode
))
2648 enum insn_code code
= clrstr_optab
[(int) mode
];
2649 insn_operand_predicate_fn pred
;
2651 if (code
!= CODE_FOR_nothing
2652 /* We don't need MODE to be narrower than
2653 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2654 the mode mask, as it is returned by the macro, it will
2655 definitely be less than the actual mode mask. */
2656 && ((GET_CODE (size
) == CONST_INT
2657 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2658 <= (GET_MODE_MASK (mode
) >> 1)))
2659 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2660 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2661 || (*pred
) (object
, BLKmode
))
2662 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
2663 || (*pred
) (opalign
, VOIDmode
)))
2666 rtx last
= get_last_insn ();
2669 op1
= convert_to_mode (mode
, size
, 1);
2670 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2671 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
2672 op1
= copy_to_mode_reg (mode
, op1
);
2674 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2681 delete_insns_since (last
);
2685 /* OBJECT or SIZE may have been passed through protect_from_queue.
2687 It is unsafe to save the value generated by protect_from_queue
2688 and reuse it later. Consider what happens if emit_queue is
2689 called before the return value from protect_from_queue is used.
2691 Expansion of the CALL_EXPR below will call emit_queue before
2692 we are finished emitting RTL for argument setup. So if we are
2693 not careful we could get the wrong value for an argument.
2695 To avoid this problem we go ahead and emit code to copy OBJECT
2696 and SIZE into new pseudos. We can then place those new pseudos
2697 into an RTL_EXPR and use them later, even after a call to
2700 Note this is not strictly needed for library calls since they
2701 do not call emit_queue before loading their arguments. However,
2702 we may need to have library calls call emit_queue in the future
2703 since failing to do so could cause problems for targets which
2704 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2705 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2707 #ifdef TARGET_MEM_FUNCTIONS
2708 size
= copy_to_mode_reg (TYPE_MODE (sizetype
), size
);
2710 size
= convert_to_mode (TYPE_MODE (integer_type_node
), size
,
2711 TREE_UNSIGNED (integer_type_node
));
2712 size
= copy_to_mode_reg (TYPE_MODE (integer_type_node
), size
);
2715 #ifdef TARGET_MEM_FUNCTIONS
2716 /* It is incorrect to use the libcall calling conventions to call
2717 memset in this context.
2719 This could be a user call to memset and the user may wish to
2720 examine the return value from memset.
2722 For targets where libcalls and normal calls have different
2723 conventions for returning pointers, we could end up generating
2726 So instead of using a libcall sequence we build up a suitable
2727 CALL_EXPR and expand the call in the normal fashion. */
2728 if (fn
== NULL_TREE
)
2732 /* This was copied from except.c, I don't know if all this is
2733 necessary in this context or not. */
2734 fn
= get_identifier ("memset");
2735 fntype
= build_pointer_type (void_type_node
);
2736 fntype
= build_function_type (fntype
, NULL_TREE
);
2737 fn
= build_decl (FUNCTION_DECL
, fn
, fntype
);
2738 ggc_add_tree_root (&fn
, 1);
2739 DECL_EXTERNAL (fn
) = 1;
2740 TREE_PUBLIC (fn
) = 1;
2741 DECL_ARTIFICIAL (fn
) = 1;
2742 TREE_NOTHROW (fn
) = 1;
2743 make_decl_rtl (fn
, NULL
);
2744 assemble_external (fn
);
2747 /* We need to make an argument list for the function call.
2749 memset has three arguments, the first is a void * addresses, the
2750 second an integer with the initialization value, the last is a
2751 size_t byte count for the copy. */
2753 = build_tree_list (NULL_TREE
,
2754 make_tree (build_pointer_type (void_type_node
),
2756 TREE_CHAIN (arg_list
)
2757 = build_tree_list (NULL_TREE
,
2758 make_tree (integer_type_node
, const0_rtx
));
2759 TREE_CHAIN (TREE_CHAIN (arg_list
))
2760 = build_tree_list (NULL_TREE
, make_tree (sizetype
, size
));
2761 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list
))) = NULL_TREE
;
2763 /* Now we have to build up the CALL_EXPR itself. */
2764 call_expr
= build1 (ADDR_EXPR
,
2765 build_pointer_type (TREE_TYPE (fn
)), fn
);
2766 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2767 call_expr
, arg_list
, NULL_TREE
);
2768 TREE_SIDE_EFFECTS (call_expr
) = 1;
2770 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
2772 emit_library_call (bzero_libfunc
, LCT_NORMAL
,
2773 VOIDmode
, 2, object
, Pmode
, size
,
2774 TYPE_MODE (integer_type_node
));
2777 /* If we are initializing a readonly value, show the above call
2778 clobbered it. Otherwise, a load from it may erroneously be
2779 hoisted from a loop. */
2780 if (RTX_UNCHANGING_P (object
))
2781 emit_insn (gen_rtx_CLOBBER (VOIDmode
, object
));
2788 /* Generate code to copy Y into X.
2789 Both Y and X must have the same mode, except that
2790 Y can be a constant with VOIDmode.
2791 This mode cannot be BLKmode; use emit_block_move for that.
2793 Return the last instruction emitted. */
2796 emit_move_insn (x
, y
)
2799 enum machine_mode mode
= GET_MODE (x
);
2800 rtx y_cst
= NULL_RTX
;
2803 x
= protect_from_queue (x
, 1);
2804 y
= protect_from_queue (y
, 0);
2806 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
2809 /* Never force constant_p_rtx to memory. */
2810 if (GET_CODE (y
) == CONSTANT_P_RTX
)
2812 else if (CONSTANT_P (y
))
2815 && FLOAT_MODE_P (GET_MODE (x
))
2816 && (last_insn
= compress_float_constant (x
, y
)))
2819 if (!LEGITIMATE_CONSTANT_P (y
))
2822 y
= force_const_mem (mode
, y
);
2826 /* If X or Y are memory references, verify that their addresses are valid
2828 if (GET_CODE (x
) == MEM
2829 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2830 && ! push_operand (x
, GET_MODE (x
)))
2832 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2833 x
= validize_mem (x
);
2835 if (GET_CODE (y
) == MEM
2836 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2838 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2839 y
= validize_mem (y
);
2841 if (mode
== BLKmode
)
2844 last_insn
= emit_move_insn_1 (x
, y
);
2846 if (y_cst
&& GET_CODE (x
) == REG
)
2847 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
2852 /* Low level part of emit_move_insn.
2853 Called just like emit_move_insn, but assumes X and Y
2854 are basically valid. */
2857 emit_move_insn_1 (x
, y
)
2860 enum machine_mode mode
= GET_MODE (x
);
2861 enum machine_mode submode
;
2862 enum mode_class
class = GET_MODE_CLASS (mode
);
2864 if ((unsigned int) mode
>= (unsigned int) MAX_MACHINE_MODE
)
2867 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2869 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2871 /* Expand complex moves by moving real part and imag part, if possible. */
2872 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
2873 && BLKmode
!= (submode
= mode_for_size ((GET_MODE_UNIT_SIZE (mode
)
2875 (class == MODE_COMPLEX_INT
2876 ? MODE_INT
: MODE_FLOAT
),
2878 && (mov_optab
->handlers
[(int) submode
].insn_code
2879 != CODE_FOR_nothing
))
2881 /* Don't split destination if it is a stack push. */
2882 int stack
= push_operand (x
, GET_MODE (x
));
2884 #ifdef PUSH_ROUNDING
2885 /* In case we output to the stack, but the size is smaller machine can
2886 push exactly, we need to use move instructions. */
2888 && (PUSH_ROUNDING (GET_MODE_SIZE (submode
))
2889 != GET_MODE_SIZE (submode
)))
2892 HOST_WIDE_INT offset1
, offset2
;
2894 /* Do not use anti_adjust_stack, since we don't want to update
2895 stack_pointer_delta. */
2896 temp
= expand_binop (Pmode
,
2897 #ifdef STACK_GROWS_DOWNWARD
2905 (GET_MODE_SIZE (GET_MODE (x
)))),
2906 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
2908 if (temp
!= stack_pointer_rtx
)
2909 emit_move_insn (stack_pointer_rtx
, temp
);
2911 #ifdef STACK_GROWS_DOWNWARD
2913 offset2
= GET_MODE_SIZE (submode
);
2915 offset1
= -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)));
2916 offset2
= (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))
2917 + GET_MODE_SIZE (submode
));
2920 emit_move_insn (change_address (x
, submode
,
2921 gen_rtx_PLUS (Pmode
,
2923 GEN_INT (offset1
))),
2924 gen_realpart (submode
, y
));
2925 emit_move_insn (change_address (x
, submode
,
2926 gen_rtx_PLUS (Pmode
,
2928 GEN_INT (offset2
))),
2929 gen_imagpart (submode
, y
));
2933 /* If this is a stack, push the highpart first, so it
2934 will be in the argument order.
2936 In that case, change_address is used only to convert
2937 the mode, not to change the address. */
2940 /* Note that the real part always precedes the imag part in memory
2941 regardless of machine's endianness. */
2942 #ifdef STACK_GROWS_DOWNWARD
2943 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2944 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2945 gen_imagpart (submode
, y
)));
2946 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2947 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2948 gen_realpart (submode
, y
)));
2950 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2951 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2952 gen_realpart (submode
, y
)));
2953 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2954 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2955 gen_imagpart (submode
, y
)));
2960 rtx realpart_x
, realpart_y
;
2961 rtx imagpart_x
, imagpart_y
;
2963 /* If this is a complex value with each part being smaller than a
2964 word, the usual calling sequence will likely pack the pieces into
2965 a single register. Unfortunately, SUBREG of hard registers only
2966 deals in terms of words, so we have a problem converting input
2967 arguments to the CONCAT of two registers that is used elsewhere
2968 for complex values. If this is before reload, we can copy it into
2969 memory and reload. FIXME, we should see about using extract and
2970 insert on integer registers, but complex short and complex char
2971 variables should be rarely used. */
2972 if (GET_MODE_BITSIZE (mode
) < 2 * BITS_PER_WORD
2973 && (reload_in_progress
| reload_completed
) == 0)
2976 = (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
2978 = (REG_P (y
) && REGNO (y
) < FIRST_PSEUDO_REGISTER
);
2980 if (packed_dest_p
|| packed_src_p
)
2982 enum mode_class reg_class
= ((class == MODE_COMPLEX_FLOAT
)
2983 ? MODE_FLOAT
: MODE_INT
);
2985 enum machine_mode reg_mode
2986 = mode_for_size (GET_MODE_BITSIZE (mode
), reg_class
, 1);
2988 if (reg_mode
!= BLKmode
)
2990 rtx mem
= assign_stack_temp (reg_mode
,
2991 GET_MODE_SIZE (mode
), 0);
2992 rtx cmem
= adjust_address (mem
, mode
, 0);
2995 = N_("function using short complex types cannot be inline");
2999 rtx sreg
= gen_rtx_SUBREG (reg_mode
, x
, 0);
3001 emit_move_insn_1 (cmem
, y
);
3002 return emit_move_insn_1 (sreg
, mem
);
3006 rtx sreg
= gen_rtx_SUBREG (reg_mode
, y
, 0);
3008 emit_move_insn_1 (mem
, sreg
);
3009 return emit_move_insn_1 (x
, cmem
);
3015 realpart_x
= gen_realpart (submode
, x
);
3016 realpart_y
= gen_realpart (submode
, y
);
3017 imagpart_x
= gen_imagpart (submode
, x
);
3018 imagpart_y
= gen_imagpart (submode
, y
);
3020 /* Show the output dies here. This is necessary for SUBREGs
3021 of pseudos since we cannot track their lifetimes correctly;
3022 hard regs shouldn't appear here except as return values.
3023 We never want to emit such a clobber after reload. */
3025 && ! (reload_in_progress
|| reload_completed
)
3026 && (GET_CODE (realpart_x
) == SUBREG
3027 || GET_CODE (imagpart_x
) == SUBREG
))
3028 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3030 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
3031 (realpart_x
, realpart_y
));
3032 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
3033 (imagpart_x
, imagpart_y
));
3036 return get_last_insn ();
3039 /* This will handle any multi-word mode that lacks a move_insn pattern.
3040 However, you will get better code if you define such patterns,
3041 even if they must turn into multiple assembler instructions. */
3042 else if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
3049 #ifdef PUSH_ROUNDING
3051 /* If X is a push on the stack, do the push now and replace
3052 X with a reference to the stack pointer. */
3053 if (push_operand (x
, GET_MODE (x
)))
3058 /* Do not use anti_adjust_stack, since we don't want to update
3059 stack_pointer_delta. */
3060 temp
= expand_binop (Pmode
,
3061 #ifdef STACK_GROWS_DOWNWARD
3069 (GET_MODE_SIZE (GET_MODE (x
)))),
3070 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
3072 if (temp
!= stack_pointer_rtx
)
3073 emit_move_insn (stack_pointer_rtx
, temp
);
3075 code
= GET_CODE (XEXP (x
, 0));
3077 /* Just hope that small offsets off SP are OK. */
3078 if (code
== POST_INC
)
3079 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3080 GEN_INT (-((HOST_WIDE_INT
)
3081 GET_MODE_SIZE (GET_MODE (x
)))));
3082 else if (code
== POST_DEC
)
3083 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3084 GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
3086 temp
= stack_pointer_rtx
;
3088 x
= change_address (x
, VOIDmode
, temp
);
3092 /* If we are in reload, see if either operand is a MEM whose address
3093 is scheduled for replacement. */
3094 if (reload_in_progress
&& GET_CODE (x
) == MEM
3095 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3096 x
= replace_equiv_address_nv (x
, inner
);
3097 if (reload_in_progress
&& GET_CODE (y
) == MEM
3098 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3099 y
= replace_equiv_address_nv (y
, inner
);
3105 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3108 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3109 rtx ypart
= operand_subword (y
, i
, 1, mode
);
3111 /* If we can't get a part of Y, put Y into memory if it is a
3112 constant. Otherwise, force it into a register. If we still
3113 can't get a part of Y, abort. */
3114 if (ypart
== 0 && CONSTANT_P (y
))
3116 y
= force_const_mem (mode
, y
);
3117 ypart
= operand_subword (y
, i
, 1, mode
);
3119 else if (ypart
== 0)
3120 ypart
= operand_subword_force (y
, i
, mode
);
3122 if (xpart
== 0 || ypart
== 0)
3125 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3127 last_insn
= emit_move_insn (xpart
, ypart
);
3130 seq
= gen_sequence ();
3133 /* Show the output dies here. This is necessary for SUBREGs
3134 of pseudos since we cannot track their lifetimes correctly;
3135 hard regs shouldn't appear here except as return values.
3136 We never want to emit such a clobber after reload. */
3138 && ! (reload_in_progress
|| reload_completed
)
3139 && need_clobber
!= 0)
3140 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3150 /* If Y is representable exactly in a narrower mode, and the target can
3151 perform the extension directly from constant or memory, then emit the
3152 move as an extension. */
3155 compress_float_constant (x
, y
)
3158 enum machine_mode dstmode
= GET_MODE (x
);
3159 enum machine_mode orig_srcmode
= GET_MODE (y
);
3160 enum machine_mode srcmode
;
3163 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3165 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3166 srcmode
!= orig_srcmode
;
3167 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3170 rtx trunc_y
, last_insn
;
3172 /* Skip if the target can't extend this way. */
3173 ic
= can_extend_p (dstmode
, srcmode
, 0);
3174 if (ic
== CODE_FOR_nothing
)
3177 /* Skip if the narrowed value isn't exact. */
3178 if (! exact_real_truncate (srcmode
, &r
))
3181 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3183 if (LEGITIMATE_CONSTANT_P (trunc_y
))
3185 /* Skip if the target needs extra instructions to perform
3187 if (! (*insn_data
[ic
].operand
[1].predicate
) (trunc_y
, srcmode
))
3190 else if (float_extend_from_mem
[dstmode
][srcmode
])
3191 trunc_y
= validize_mem (force_const_mem (srcmode
, trunc_y
));
3195 emit_unop_insn (ic
, x
, trunc_y
, UNKNOWN
);
3196 last_insn
= get_last_insn ();
3198 if (GET_CODE (x
) == REG
)
3199 REG_NOTES (last_insn
)
3200 = gen_rtx_EXPR_LIST (REG_EQUAL
, y
, REG_NOTES (last_insn
));
3208 /* Pushing data onto the stack. */
3210 /* Push a block of length SIZE (perhaps variable)
3211 and return an rtx to address the beginning of the block.
3212 Note that it is not possible for the value returned to be a QUEUED.
3213 The value may be virtual_outgoing_args_rtx.
3215 EXTRA is the number of bytes of padding to push in addition to SIZE.
3216 BELOW nonzero means this padding comes at low addresses;
3217 otherwise, the padding comes at high addresses. */
3220 push_block (size
, extra
, below
)
3226 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3227 if (CONSTANT_P (size
))
3228 anti_adjust_stack (plus_constant (size
, extra
));
3229 else if (GET_CODE (size
) == REG
&& extra
== 0)
3230 anti_adjust_stack (size
);
3233 temp
= copy_to_mode_reg (Pmode
, size
);
3235 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3236 temp
, 0, OPTAB_LIB_WIDEN
);
3237 anti_adjust_stack (temp
);
3240 #ifndef STACK_GROWS_DOWNWARD
3246 temp
= virtual_outgoing_args_rtx
;
3247 if (extra
!= 0 && below
)
3248 temp
= plus_constant (temp
, extra
);
3252 if (GET_CODE (size
) == CONST_INT
)
3253 temp
= plus_constant (virtual_outgoing_args_rtx
,
3254 -INTVAL (size
) - (below
? 0 : extra
));
3255 else if (extra
!= 0 && !below
)
3256 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3257 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3259 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3260 negate_rtx (Pmode
, size
));
3263 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3266 #ifdef PUSH_ROUNDING
3268 /* Emit single push insn. */
3271 emit_single_push_insn (mode
, x
, type
)
3273 enum machine_mode mode
;
3277 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3279 enum insn_code icode
;
3280 insn_operand_predicate_fn pred
;
3282 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3283 /* If there is push pattern, use it. Otherwise try old way of throwing
3284 MEM representing push operation to move expander. */
3285 icode
= push_optab
->handlers
[(int) mode
].insn_code
;
3286 if (icode
!= CODE_FOR_nothing
)
3288 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3289 && !((*pred
) (x
, mode
))))
3290 x
= force_reg (mode
, x
);
3291 emit_insn (GEN_FCN (icode
) (x
));
3294 if (GET_MODE_SIZE (mode
) == rounded_size
)
3295 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3298 #ifdef STACK_GROWS_DOWNWARD
3299 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3300 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3302 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3303 GEN_INT (rounded_size
));
3305 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3308 dest
= gen_rtx_MEM (mode
, dest_addr
);
3312 set_mem_attributes (dest
, type
, 1);
3314 if (flag_optimize_sibling_calls
)
3315 /* Function incoming arguments may overlap with sibling call
3316 outgoing arguments and we cannot allow reordering of reads
3317 from function arguments with stores to outgoing arguments
3318 of sibling calls. */
3319 set_mem_alias_set (dest
, 0);
3321 emit_move_insn (dest
, x
);
3325 /* Generate code to push X onto the stack, assuming it has mode MODE and
3327 MODE is redundant except when X is a CONST_INT (since they don't
3329 SIZE is an rtx for the size of data to be copied (in bytes),
3330 needed only if X is BLKmode.
3332 ALIGN (in bits) is maximum alignment we can assume.
3334 If PARTIAL and REG are both nonzero, then copy that many of the first
3335 words of X into registers starting with REG, and push the rest of X.
3336 The amount of space pushed is decreased by PARTIAL words,
3337 rounded *down* to a multiple of PARM_BOUNDARY.
3338 REG must be a hard register in this case.
3339 If REG is zero but PARTIAL is not, take any all others actions for an
3340 argument partially in registers, but do not actually load any
3343 EXTRA is the amount in bytes of extra space to leave next to this arg.
3344 This is ignored if an argument block has already been allocated.
3346 On a machine that lacks real push insns, ARGS_ADDR is the address of
3347 the bottom of the argument block for this call. We use indexing off there
3348 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3349 argument block has not been preallocated.
3351 ARGS_SO_FAR is the size of args previously pushed for this call.
3353 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3354 for arguments passed in registers. If nonzero, it will be the number
3355 of bytes required. */
3358 emit_push_insn (x
, mode
, type
, size
, align
, partial
, reg
, extra
,
3359 args_addr
, args_so_far
, reg_parm_stack_space
,
3362 enum machine_mode mode
;
3371 int reg_parm_stack_space
;
3375 enum direction stack_direction
3376 #ifdef STACK_GROWS_DOWNWARD
3382 /* Decide where to pad the argument: `downward' for below,
3383 `upward' for above, or `none' for don't pad it.
3384 Default is below for small data on big-endian machines; else above. */
3385 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3387 /* Invert direction if stack is post-decrement.
3389 if (STACK_PUSH_CODE
== POST_DEC
)
3390 if (where_pad
!= none
)
3391 where_pad
= (where_pad
== downward
? upward
: downward
);
3393 xinner
= x
= protect_from_queue (x
, 0);
3395 if (mode
== BLKmode
)
3397 /* Copy a block into the stack, entirely or partially. */
3400 int used
= partial
* UNITS_PER_WORD
;
3401 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3409 /* USED is now the # of bytes we need not copy to the stack
3410 because registers will take care of them. */
3413 xinner
= adjust_address (xinner
, BLKmode
, used
);
3415 /* If the partial register-part of the arg counts in its stack size,
3416 skip the part of stack space corresponding to the registers.
3417 Otherwise, start copying to the beginning of the stack space,
3418 by setting SKIP to 0. */
3419 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3421 #ifdef PUSH_ROUNDING
3422 /* Do it with several push insns if that doesn't take lots of insns
3423 and if there is no difficulty with push insns that skip bytes
3424 on the stack for alignment purposes. */
3427 && GET_CODE (size
) == CONST_INT
3429 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3430 /* Here we avoid the case of a structure whose weak alignment
3431 forces many pushes of a small amount of data,
3432 and such small pushes do rounding that causes trouble. */
3433 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3434 || align
>= BIGGEST_ALIGNMENT
3435 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3436 == (align
/ BITS_PER_UNIT
)))
3437 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3439 /* Push padding now if padding above and stack grows down,
3440 or if padding below and stack grows up.
3441 But if space already allocated, this has already been done. */
3442 if (extra
&& args_addr
== 0
3443 && where_pad
!= none
&& where_pad
!= stack_direction
)
3444 anti_adjust_stack (GEN_INT (extra
));
3446 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
);
3449 #endif /* PUSH_ROUNDING */
3453 /* Otherwise make space on the stack and copy the data
3454 to the address of that space. */
3456 /* Deduct words put into registers from the size we must copy. */
3459 if (GET_CODE (size
) == CONST_INT
)
3460 size
= GEN_INT (INTVAL (size
) - used
);
3462 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3463 GEN_INT (used
), NULL_RTX
, 0,
3467 /* Get the address of the stack space.
3468 In this case, we do not deal with EXTRA separately.
3469 A single stack adjust will do. */
3472 temp
= push_block (size
, extra
, where_pad
== downward
);
3475 else if (GET_CODE (args_so_far
) == CONST_INT
)
3476 temp
= memory_address (BLKmode
,
3477 plus_constant (args_addr
,
3478 skip
+ INTVAL (args_so_far
)));
3480 temp
= memory_address (BLKmode
,
3481 plus_constant (gen_rtx_PLUS (Pmode
,
3485 target
= gen_rtx_MEM (BLKmode
, temp
);
3489 set_mem_attributes (target
, type
, 1);
3490 /* Function incoming arguments may overlap with sibling call
3491 outgoing arguments and we cannot allow reordering of reads
3492 from function arguments with stores to outgoing arguments
3493 of sibling calls. */
3494 set_mem_alias_set (target
, 0);
3497 set_mem_align (target
, align
);
3499 /* TEMP is the address of the block. Copy the data there. */
3500 if (GET_CODE (size
) == CONST_INT
3501 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size
), align
))
3503 move_by_pieces (target
, xinner
, INTVAL (size
), align
);
3508 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
3509 enum machine_mode mode
;
3511 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
3513 mode
= GET_MODE_WIDER_MODE (mode
))
3515 enum insn_code code
= movstr_optab
[(int) mode
];
3516 insn_operand_predicate_fn pred
;
3518 if (code
!= CODE_FOR_nothing
3519 && ((GET_CODE (size
) == CONST_INT
3520 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
3521 <= (GET_MODE_MASK (mode
) >> 1)))
3522 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
3523 && (!(pred
= insn_data
[(int) code
].operand
[0].predicate
)
3524 || ((*pred
) (target
, BLKmode
)))
3525 && (!(pred
= insn_data
[(int) code
].operand
[1].predicate
)
3526 || ((*pred
) (xinner
, BLKmode
)))
3527 && (!(pred
= insn_data
[(int) code
].operand
[3].predicate
)
3528 || ((*pred
) (opalign
, VOIDmode
))))
3530 rtx op2
= convert_to_mode (mode
, size
, 1);
3531 rtx last
= get_last_insn ();
3534 pred
= insn_data
[(int) code
].operand
[2].predicate
;
3535 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
3536 op2
= copy_to_mode_reg (mode
, op2
);
3538 pat
= GEN_FCN ((int) code
) (target
, xinner
,
3546 delete_insns_since (last
);
3551 if (!ACCUMULATE_OUTGOING_ARGS
)
3553 /* If the source is referenced relative to the stack pointer,
3554 copy it to another register to stabilize it. We do not need
3555 to do this if we know that we won't be changing sp. */
3557 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3558 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3559 temp
= copy_to_reg (temp
);
3562 /* Make inhibit_defer_pop nonzero around the library call
3563 to force it to pop the bcopy-arguments right away. */
3565 #ifdef TARGET_MEM_FUNCTIONS
3566 emit_library_call (memcpy_libfunc
, LCT_NORMAL
,
3567 VOIDmode
, 3, temp
, Pmode
, XEXP (xinner
, 0), Pmode
,
3568 convert_to_mode (TYPE_MODE (sizetype
),
3569 size
, TREE_UNSIGNED (sizetype
)),
3570 TYPE_MODE (sizetype
));
3572 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
3573 VOIDmode
, 3, XEXP (xinner
, 0), Pmode
, temp
, Pmode
,
3574 convert_to_mode (TYPE_MODE (integer_type_node
),
3576 TREE_UNSIGNED (integer_type_node
)),
3577 TYPE_MODE (integer_type_node
));
3582 else if (partial
> 0)
3584 /* Scalar partly in registers. */
3586 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3589 /* # words of start of argument
3590 that we must make space for but need not store. */
3591 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
3592 int args_offset
= INTVAL (args_so_far
);
3595 /* Push padding now if padding above and stack grows down,
3596 or if padding below and stack grows up.
3597 But if space already allocated, this has already been done. */
3598 if (extra
&& args_addr
== 0
3599 && where_pad
!= none
&& where_pad
!= stack_direction
)
3600 anti_adjust_stack (GEN_INT (extra
));
3602 /* If we make space by pushing it, we might as well push
3603 the real data. Otherwise, we can leave OFFSET nonzero
3604 and leave the space uninitialized. */
3608 /* Now NOT_STACK gets the number of words that we don't need to
3609 allocate on the stack. */
3610 not_stack
= partial
- offset
;
3612 /* If the partial register-part of the arg counts in its stack size,
3613 skip the part of stack space corresponding to the registers.
3614 Otherwise, start copying to the beginning of the stack space,
3615 by setting SKIP to 0. */
3616 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3618 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3619 x
= validize_mem (force_const_mem (mode
, x
));
3621 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3622 SUBREGs of such registers are not allowed. */
3623 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
3624 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3625 x
= copy_to_reg (x
);
3627 /* Loop over all the words allocated on the stack for this arg. */
3628 /* We can do it by words, because any scalar bigger than a word
3629 has a size a multiple of a word. */
3630 #ifndef PUSH_ARGS_REVERSED
3631 for (i
= not_stack
; i
< size
; i
++)
3633 for (i
= size
- 1; i
>= not_stack
; i
--)
3635 if (i
>= not_stack
+ offset
)
3636 emit_push_insn (operand_subword_force (x
, i
, mode
),
3637 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3639 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3641 reg_parm_stack_space
, alignment_pad
);
3646 rtx target
= NULL_RTX
;
3649 /* Push padding now if padding above and stack grows down,
3650 or if padding below and stack grows up.
3651 But if space already allocated, this has already been done. */
3652 if (extra
&& args_addr
== 0
3653 && where_pad
!= none
&& where_pad
!= stack_direction
)
3654 anti_adjust_stack (GEN_INT (extra
));
3656 #ifdef PUSH_ROUNDING
3657 if (args_addr
== 0 && PUSH_ARGS
)
3658 emit_single_push_insn (mode
, x
, type
);
3662 if (GET_CODE (args_so_far
) == CONST_INT
)
3664 = memory_address (mode
,
3665 plus_constant (args_addr
,
3666 INTVAL (args_so_far
)));
3668 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3671 dest
= gen_rtx_MEM (mode
, addr
);
3674 set_mem_attributes (dest
, type
, 1);
3675 /* Function incoming arguments may overlap with sibling call
3676 outgoing arguments and we cannot allow reordering of reads
3677 from function arguments with stores to outgoing arguments
3678 of sibling calls. */
3679 set_mem_alias_set (dest
, 0);
3682 emit_move_insn (dest
, x
);
3688 /* If part should go in registers, copy that part
3689 into the appropriate registers. Do this now, at the end,
3690 since mem-to-mem copies above may do function calls. */
3691 if (partial
> 0 && reg
!= 0)
3693 /* Handle calls that pass values in multiple non-contiguous locations.
3694 The Irix 6 ABI has examples of this. */
3695 if (GET_CODE (reg
) == PARALLEL
)
3696 emit_group_load (reg
, x
, -1); /* ??? size? */
3698 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
3701 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3702 anti_adjust_stack (GEN_INT (extra
));
3704 if (alignment_pad
&& args_addr
== 0)
3705 anti_adjust_stack (alignment_pad
);
3708 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3716 /* Only registers can be subtargets. */
3717 || GET_CODE (x
) != REG
3718 /* If the register is readonly, it can't be set more than once. */
3719 || RTX_UNCHANGING_P (x
)
3720 /* Don't use hard regs to avoid extending their life. */
3721 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3722 /* Avoid subtargets inside loops,
3723 since they hide some invariant expressions. */
3724 || preserve_subexpressions_p ())
3728 /* Expand an assignment that stores the value of FROM into TO.
3729 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3730 (This may contain a QUEUED rtx;
3731 if the value is constant, this rtx is a constant.)
3732 Otherwise, the returned value is NULL_RTX.
3734 SUGGEST_REG is no longer actually used.
3735 It used to mean, copy the value through a register
3736 and return that register, if that is possible.
3737 We now use WANT_VALUE to decide whether to do this. */
3740 expand_assignment (to
, from
, want_value
, suggest_reg
)
3743 int suggest_reg ATTRIBUTE_UNUSED
;
3748 /* Don't crash if the lhs of the assignment was erroneous. */
3750 if (TREE_CODE (to
) == ERROR_MARK
)
3752 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3753 return want_value
? result
: NULL_RTX
;
3756 /* Assignment of a structure component needs special treatment
3757 if the structure component's rtx is not simply a MEM.
3758 Assignment of an array element at a constant index, and assignment of
3759 an array element in an unaligned packed structure field, has the same
3762 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
3763 || TREE_CODE (to
) == ARRAY_REF
|| TREE_CODE (to
) == ARRAY_RANGE_REF
)
3765 enum machine_mode mode1
;
3766 HOST_WIDE_INT bitsize
, bitpos
;
3774 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
3775 &unsignedp
, &volatilep
);
3777 /* If we are going to use store_bit_field and extract_bit_field,
3778 make sure to_rtx will be safe for multiple use. */
3780 if (mode1
== VOIDmode
&& want_value
)
3781 tem
= stabilize_reference (tem
);
3783 orig_to_rtx
= to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, 0);
3787 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
3789 if (GET_CODE (to_rtx
) != MEM
)
3792 #ifdef POINTERS_EXTEND_UNSIGNED
3793 if (GET_MODE (offset_rtx
) != Pmode
)
3794 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
3796 if (GET_MODE (offset_rtx
) != ptr_mode
)
3797 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
3800 /* A constant address in TO_RTX can have VOIDmode, we must not try
3801 to call force_reg for that case. Avoid that case. */
3802 if (GET_CODE (to_rtx
) == MEM
3803 && GET_MODE (to_rtx
) == BLKmode
3804 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
3806 && (bitpos
% bitsize
) == 0
3807 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
3808 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
3810 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
3814 to_rtx
= offset_address (to_rtx
, offset_rtx
,
3815 highest_pow2_factor_for_type (TREE_TYPE (to
),
3819 if (GET_CODE (to_rtx
) == MEM
)
3821 tree old_expr
= MEM_EXPR (to_rtx
);
3823 /* If the field is at offset zero, we could have been given the
3824 DECL_RTX of the parent struct. Don't munge it. */
3825 to_rtx
= shallow_copy_rtx (to_rtx
);
3827 set_mem_attributes (to_rtx
, to
, 0);
3829 /* If we changed MEM_EXPR, that means we're now referencing
3830 the COMPONENT_REF, which means that MEM_OFFSET must be
3831 relative to that field. But we've not yet reflected BITPOS
3832 in TO_RTX. This will be done in store_field. Adjust for
3833 that by biasing MEM_OFFSET by -bitpos. */
3834 if (MEM_EXPR (to_rtx
) != old_expr
&& MEM_OFFSET (to_rtx
)
3835 && (bitpos
/ BITS_PER_UNIT
) != 0)
3836 set_mem_offset (to_rtx
, GEN_INT (INTVAL (MEM_OFFSET (to_rtx
))
3837 - (bitpos
/ BITS_PER_UNIT
)));
3840 /* Deal with volatile and readonly fields. The former is only done
3841 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3842 if (volatilep
&& GET_CODE (to_rtx
) == MEM
)
3844 if (to_rtx
== orig_to_rtx
)
3845 to_rtx
= copy_rtx (to_rtx
);
3846 MEM_VOLATILE_P (to_rtx
) = 1;
3849 if (TREE_CODE (to
) == COMPONENT_REF
3850 && TREE_READONLY (TREE_OPERAND (to
, 1)))
3852 if (to_rtx
== orig_to_rtx
)
3853 to_rtx
= copy_rtx (to_rtx
);
3854 RTX_UNCHANGING_P (to_rtx
) = 1;
3857 if (GET_CODE (to_rtx
) == MEM
&& ! can_address_p (to
))
3859 if (to_rtx
== orig_to_rtx
)
3860 to_rtx
= copy_rtx (to_rtx
);
3861 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
3864 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
3866 /* Spurious cast for HPUX compiler. */
3867 ? ((enum machine_mode
)
3868 TYPE_MODE (TREE_TYPE (to
)))
3870 unsignedp
, TREE_TYPE (tem
), get_alias_set (to
));
3872 preserve_temp_slots (result
);
3876 /* If the value is meaningful, convert RESULT to the proper mode.
3877 Otherwise, return nothing. */
3878 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
3879 TYPE_MODE (TREE_TYPE (from
)),
3881 TREE_UNSIGNED (TREE_TYPE (to
)))
3885 /* If the rhs is a function call and its value is not an aggregate,
3886 call the function before we start to compute the lhs.
3887 This is needed for correct code for cases such as
3888 val = setjmp (buf) on machines where reference to val
3889 requires loading up part of an address in a separate insn.
3891 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3892 since it might be a promoted variable where the zero- or sign- extension
3893 needs to be done. Handling this in the normal way is safe because no
3894 computation is done before the call. */
3895 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
)
3896 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
3897 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
3898 && GET_CODE (DECL_RTL (to
)) == REG
))
3903 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3905 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3907 /* Handle calls that return values in multiple non-contiguous locations.
3908 The Irix 6 ABI has examples of this. */
3909 if (GET_CODE (to_rtx
) == PARALLEL
)
3910 emit_group_load (to_rtx
, value
, int_size_in_bytes (TREE_TYPE (from
)));
3911 else if (GET_MODE (to_rtx
) == BLKmode
)
3912 emit_block_move (to_rtx
, value
, expr_size (from
));
3915 #ifdef POINTERS_EXTEND_UNSIGNED
3916 if (POINTER_TYPE_P (TREE_TYPE (to
))
3917 && GET_MODE (to_rtx
) != GET_MODE (value
))
3918 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
3920 emit_move_insn (to_rtx
, value
);
3922 preserve_temp_slots (to_rtx
);
3925 return want_value
? to_rtx
: NULL_RTX
;
3928 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3929 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3932 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3934 /* Don't move directly into a return register. */
3935 if (TREE_CODE (to
) == RESULT_DECL
3936 && (GET_CODE (to_rtx
) == REG
|| GET_CODE (to_rtx
) == PARALLEL
))
3941 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
3943 if (GET_CODE (to_rtx
) == PARALLEL
)
3944 emit_group_load (to_rtx
, temp
, int_size_in_bytes (TREE_TYPE (from
)));
3946 emit_move_insn (to_rtx
, temp
);
3948 preserve_temp_slots (to_rtx
);
3951 return want_value
? to_rtx
: NULL_RTX
;
3954 /* In case we are returning the contents of an object which overlaps
3955 the place the value is being stored, use a safe function when copying
3956 a value through a pointer into a structure value return block. */
3957 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
3958 && current_function_returns_struct
3959 && !current_function_returns_pcc_struct
)
3964 size
= expr_size (from
);
3965 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3967 #ifdef TARGET_MEM_FUNCTIONS
3968 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
3969 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3970 XEXP (from_rtx
, 0), Pmode
,
3971 convert_to_mode (TYPE_MODE (sizetype
),
3972 size
, TREE_UNSIGNED (sizetype
)),
3973 TYPE_MODE (sizetype
));
3975 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
3976 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
3977 XEXP (to_rtx
, 0), Pmode
,
3978 convert_to_mode (TYPE_MODE (integer_type_node
),
3979 size
, TREE_UNSIGNED (integer_type_node
)),
3980 TYPE_MODE (integer_type_node
));
3983 preserve_temp_slots (to_rtx
);
3986 return want_value
? to_rtx
: NULL_RTX
;
3989 /* Compute FROM and store the value in the rtx we got. */
3992 result
= store_expr (from
, to_rtx
, want_value
);
3993 preserve_temp_slots (result
);
3996 return want_value
? result
: NULL_RTX
;
3999 /* Generate code for computing expression EXP,
4000 and storing the value into TARGET.
4001 TARGET may contain a QUEUED rtx.
4003 If WANT_VALUE is nonzero, return a copy of the value
4004 not in TARGET, so that we can be sure to use the proper
4005 value in a containing expression even if TARGET has something
4006 else stored in it. If possible, we copy the value through a pseudo
4007 and return that pseudo. Or, if the value is constant, we try to
4008 return the constant. In some cases, we return a pseudo
4009 copied *from* TARGET.
4011 If the mode is BLKmode then we may return TARGET itself.
4012 It turns out that in BLKmode it doesn't cause a problem.
4013 because C has no operators that could combine two different
4014 assignments into the same BLKmode object with different values
4015 with no sequence point. Will other languages need this to
4018 If WANT_VALUE is 0, we return NULL, to make sure
4019 to catch quickly any cases where the caller uses the value
4020 and fails to set WANT_VALUE. */
4023 store_expr (exp
, target
, want_value
)
4029 int dont_return_target
= 0;
4030 int dont_store_target
= 0;
4032 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4034 /* Perform first part of compound expression, then assign from second
4036 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
4038 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
4040 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4042 /* For conditional expression, get safe form of the target. Then
4043 test the condition, doing the appropriate assignment on either
4044 side. This avoids the creation of unnecessary temporaries.
4045 For non-BLKmode, it is more efficient not to do this. */
4047 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4050 target
= protect_from_queue (target
, 1);
4052 do_pending_stack_adjust ();
4054 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
4055 start_cleanup_deferral ();
4056 store_expr (TREE_OPERAND (exp
, 1), target
, 0);
4057 end_cleanup_deferral ();
4059 emit_jump_insn (gen_jump (lab2
));
4062 start_cleanup_deferral ();
4063 store_expr (TREE_OPERAND (exp
, 2), target
, 0);
4064 end_cleanup_deferral ();
4069 return want_value
? target
: NULL_RTX
;
4071 else if (queued_subexp_p (target
))
4072 /* If target contains a postincrement, let's not risk
4073 using it as the place to generate the rhs. */
4075 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
4077 /* Expand EXP into a new pseudo. */
4078 temp
= gen_reg_rtx (GET_MODE (target
));
4079 temp
= expand_expr (exp
, temp
, GET_MODE (target
), 0);
4082 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
), 0);
4084 /* If target is volatile, ANSI requires accessing the value
4085 *from* the target, if it is accessed. So make that happen.
4086 In no case return the target itself. */
4087 if (! MEM_VOLATILE_P (target
) && want_value
)
4088 dont_return_target
= 1;
4090 else if (want_value
&& GET_CODE (target
) == MEM
&& ! MEM_VOLATILE_P (target
)
4091 && GET_MODE (target
) != BLKmode
)
4092 /* If target is in memory and caller wants value in a register instead,
4093 arrange that. Pass TARGET as target for expand_expr so that,
4094 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4095 We know expand_expr will not use the target in that case.
4096 Don't do this if TARGET is volatile because we are supposed
4097 to write it and then read it. */
4099 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
4100 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
4102 /* If TEMP is already in the desired TARGET, only copy it from
4103 memory and don't store it there again. */
4105 || (rtx_equal_p (temp
, target
)
4106 && ! side_effects_p (temp
) && ! side_effects_p (target
)))
4107 dont_store_target
= 1;
4108 temp
= copy_to_reg (temp
);
4110 dont_return_target
= 1;
4112 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4113 /* If this is an scalar in a register that is stored in a wider mode
4114 than the declared mode, compute the result into its declared mode
4115 and then convert to the wider mode. Our value is the computed
4118 rtx inner_target
= 0;
4120 /* If we don't want a value, we can do the conversion inside EXP,
4121 which will often result in some optimizations. Do the conversion
4122 in two steps: first change the signedness, if needed, then
4123 the extend. But don't do this if the type of EXP is a subtype
4124 of something else since then the conversion might involve
4125 more than just converting modes. */
4126 if (! want_value
&& INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4127 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
4129 if (TREE_UNSIGNED (TREE_TYPE (exp
))
4130 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4132 ((*lang_hooks
.types
.signed_or_unsigned_type
)
4133 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)), exp
);
4135 exp
= convert ((*lang_hooks
.types
.type_for_mode
)
4136 (GET_MODE (SUBREG_REG (target
)),
4137 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4140 inner_target
= SUBREG_REG (target
);
4143 temp
= expand_expr (exp
, inner_target
, VOIDmode
, 0);
4145 /* If TEMP is a volatile MEM and we want a result value, make
4146 the access now so it gets done only once. Likewise if
4147 it contains TARGET. */
4148 if (GET_CODE (temp
) == MEM
&& want_value
4149 && (MEM_VOLATILE_P (temp
)
4150 || reg_mentioned_p (SUBREG_REG (target
), XEXP (temp
, 0))))
4151 temp
= copy_to_reg (temp
);
4153 /* If TEMP is a VOIDmode constant, use convert_modes to make
4154 sure that we properly convert it. */
4155 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4157 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4158 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4159 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4160 GET_MODE (target
), temp
,
4161 SUBREG_PROMOTED_UNSIGNED_P (target
));
4164 convert_move (SUBREG_REG (target
), temp
,
4165 SUBREG_PROMOTED_UNSIGNED_P (target
));
4167 /* If we promoted a constant, change the mode back down to match
4168 target. Otherwise, the caller might get confused by a result whose
4169 mode is larger than expected. */
4171 if (want_value
&& GET_MODE (temp
) != GET_MODE (target
))
4173 if (GET_MODE (temp
) != VOIDmode
)
4175 temp
= gen_lowpart_SUBREG (GET_MODE (target
), temp
);
4176 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4177 SUBREG_PROMOTED_UNSIGNED_SET (temp
,
4178 SUBREG_PROMOTED_UNSIGNED_P (target
));
4181 temp
= convert_modes (GET_MODE (target
),
4182 GET_MODE (SUBREG_REG (target
)),
4183 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4186 return want_value
? temp
: NULL_RTX
;
4190 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
4191 /* Return TARGET if it's a specified hardware register.
4192 If TARGET is a volatile mem ref, either return TARGET
4193 or return a reg copied *from* TARGET; ANSI requires this.
4195 Otherwise, if TEMP is not TARGET, return TEMP
4196 if it is constant (for efficiency),
4197 or if we really want the correct value. */
4198 if (!(target
&& GET_CODE (target
) == REG
4199 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4200 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
4201 && ! rtx_equal_p (temp
, target
)
4202 && (CONSTANT_P (temp
) || want_value
))
4203 dont_return_target
= 1;
4206 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4207 the same as that of TARGET, adjust the constant. This is needed, for
4208 example, in case it is a CONST_DOUBLE and we want only a word-sized
4210 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4211 && TREE_CODE (exp
) != ERROR_MARK
4212 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4213 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4214 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
4216 /* If value was not generated in the target, store it there.
4217 Convert the value to TARGET's type first if necessary.
4218 If TEMP and TARGET compare equal according to rtx_equal_p, but
4219 one or both of them are volatile memory refs, we have to distinguish
4221 - expand_expr has used TARGET. In this case, we must not generate
4222 another copy. This can be detected by TARGET being equal according
4224 - expand_expr has not used TARGET - that means that the source just
4225 happens to have the same RTX form. Since temp will have been created
4226 by expand_expr, it will compare unequal according to == .
4227 We must generate a copy in this case, to reach the correct number
4228 of volatile memory references. */
4230 if ((! rtx_equal_p (temp
, target
)
4231 || (temp
!= target
&& (side_effects_p (temp
)
4232 || side_effects_p (target
))))
4233 && TREE_CODE (exp
) != ERROR_MARK
4234 && ! dont_store_target
4235 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4236 but TARGET is not valid memory reference, TEMP will differ
4237 from TARGET although it is really the same location. */
4238 && (TREE_CODE_CLASS (TREE_CODE (exp
)) != 'd'
4239 || target
!= DECL_RTL_IF_SET (exp
)))
4241 target
= protect_from_queue (target
, 1);
4242 if (GET_MODE (temp
) != GET_MODE (target
)
4243 && GET_MODE (temp
) != VOIDmode
)
4245 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4246 if (dont_return_target
)
4248 /* In this case, we will return TEMP,
4249 so make sure it has the proper mode.
4250 But don't forget to store the value into TARGET. */
4251 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4252 emit_move_insn (target
, temp
);
4255 convert_move (target
, temp
, unsignedp
);
4258 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4260 /* Handle copying a string constant into an array. The string
4261 constant may be shorter than the array. So copy just the string's
4262 actual length, and clear the rest. First get the size of the data
4263 type of the string, which is actually the size of the target. */
4264 rtx size
= expr_size (exp
);
4266 if (GET_CODE (size
) == CONST_INT
4267 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4268 emit_block_move (target
, temp
, size
);
4271 /* Compute the size of the data to copy from the string. */
4273 = size_binop (MIN_EXPR
,
4274 make_tree (sizetype
, size
),
4275 size_int (TREE_STRING_LENGTH (exp
)));
4276 rtx copy_size_rtx
= expand_expr (copy_size
, NULL_RTX
,
4280 /* Copy that much. */
4281 copy_size_rtx
= convert_to_mode (ptr_mode
, copy_size_rtx
, 0);
4282 emit_block_move (target
, temp
, copy_size_rtx
);
4284 /* Figure out how much is left in TARGET that we have to clear.
4285 Do all calculations in ptr_mode. */
4286 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4288 size
= plus_constant (size
, -INTVAL (copy_size_rtx
));
4289 target
= adjust_address (target
, BLKmode
,
4290 INTVAL (copy_size_rtx
));
4294 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
4295 copy_size_rtx
, NULL_RTX
, 0,
4298 #ifdef POINTERS_EXTEND_UNSIGNED
4299 if (GET_MODE (copy_size_rtx
) != Pmode
)
4300 copy_size_rtx
= convert_memory_address (Pmode
,
4304 target
= offset_address (target
, copy_size_rtx
,
4305 highest_pow2_factor (copy_size
));
4306 label
= gen_label_rtx ();
4307 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4308 GET_MODE (size
), 0, label
);
4311 if (size
!= const0_rtx
)
4312 clear_storage (target
, size
);
4318 /* Handle calls that return values in multiple non-contiguous locations.
4319 The Irix 6 ABI has examples of this. */
4320 else if (GET_CODE (target
) == PARALLEL
)
4321 emit_group_load (target
, temp
, int_size_in_bytes (TREE_TYPE (exp
)));
4322 else if (GET_MODE (temp
) == BLKmode
)
4323 emit_block_move (target
, temp
, expr_size (exp
));
4325 emit_move_insn (target
, temp
);
4328 /* If we don't want a value, return NULL_RTX. */
4332 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4333 ??? The latter test doesn't seem to make sense. */
4334 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
4337 /* Return TARGET itself if it is a hard register. */
4338 else if (want_value
&& GET_MODE (target
) != BLKmode
4339 && ! (GET_CODE (target
) == REG
4340 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4341 return copy_to_reg (target
);
4347 /* Return 1 if EXP just contains zeros. */
4355 switch (TREE_CODE (exp
))
4359 case NON_LVALUE_EXPR
:
4360 case VIEW_CONVERT_EXPR
:
4361 return is_zeros_p (TREE_OPERAND (exp
, 0));
4364 return integer_zerop (exp
);
4368 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
4371 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp
), dconst0
);
4374 for (elt
= TREE_VECTOR_CST_ELTS (exp
); elt
;
4375 elt
= TREE_CHAIN (elt
))
4376 if (!is_zeros_p (TREE_VALUE (elt
)))
4382 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4383 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
4384 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4385 if (! is_zeros_p (TREE_VALUE (elt
)))
4395 /* Return 1 if EXP contains mostly (3/4) zeros. */
4398 mostly_zeros_p (exp
)
4401 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4403 int elts
= 0, zeros
= 0;
4404 tree elt
= CONSTRUCTOR_ELTS (exp
);
4405 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4407 /* If there are no ranges of true bits, it is all zero. */
4408 return elt
== NULL_TREE
;
4410 for (; elt
; elt
= TREE_CHAIN (elt
))
4412 /* We do not handle the case where the index is a RANGE_EXPR,
4413 so the statistic will be somewhat inaccurate.
4414 We do make a more accurate count in store_constructor itself,
4415 so since this function is only used for nested array elements,
4416 this should be close enough. */
4417 if (mostly_zeros_p (TREE_VALUE (elt
)))
4422 return 4 * zeros
>= 3 * elts
;
4425 return is_zeros_p (exp
);
4428 /* Helper function for store_constructor.
4429 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4430 TYPE is the type of the CONSTRUCTOR, not the element type.
4431 CLEARED is as for store_constructor.
4432 ALIAS_SET is the alias set to use for any stores.
4434 This provides a recursive shortcut back to store_constructor when it isn't
4435 necessary to go through store_field. This is so that we can pass through
4436 the cleared field to let store_constructor know that we may not have to
4437 clear a substructure if the outer structure has already been cleared. */
4440 store_constructor_field (target
, bitsize
, bitpos
, mode
, exp
, type
, cleared
,
4443 unsigned HOST_WIDE_INT bitsize
;
4444 HOST_WIDE_INT bitpos
;
4445 enum machine_mode mode
;
4450 if (TREE_CODE (exp
) == CONSTRUCTOR
4451 && bitpos
% BITS_PER_UNIT
== 0
4452 /* If we have a non-zero bitpos for a register target, then we just
4453 let store_field do the bitfield handling. This is unlikely to
4454 generate unnecessary clear instructions anyways. */
4455 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
4457 if (GET_CODE (target
) == MEM
)
4459 = adjust_address (target
,
4460 GET_MODE (target
) == BLKmode
4462 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4463 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
4466 /* Update the alias set, if required. */
4467 if (GET_CODE (target
) == MEM
&& ! MEM_KEEP_ALIAS_SET_P (target
)
4468 && MEM_ALIAS_SET (target
) != 0)
4470 target
= copy_rtx (target
);
4471 set_mem_alias_set (target
, alias_set
);
4474 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
4477 store_field (target
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
4481 /* Store the value of constructor EXP into the rtx TARGET.
4482 TARGET is either a REG or a MEM; we know it cannot conflict, since
4483 safe_from_p has been called.
4484 CLEARED is true if TARGET is known to have been zero'd.
4485 SIZE is the number of bytes of TARGET we are allowed to modify: this
4486 may not be the same as the size of EXP if we are assigning to a field
4487 which has been packed to exclude padding bits. */
4490 store_constructor (exp
, target
, cleared
, size
)
4496 tree type
= TREE_TYPE (exp
);
4497 #ifdef WORD_REGISTER_OPERATIONS
4498 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4501 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
4502 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4506 /* We either clear the aggregate or indicate the value is dead. */
4507 if ((TREE_CODE (type
) == UNION_TYPE
4508 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4510 && ! CONSTRUCTOR_ELTS (exp
))
4511 /* If the constructor is empty, clear the union. */
4513 clear_storage (target
, expr_size (exp
));
4517 /* If we are building a static constructor into a register,
4518 set the initial value as zero so we can fold the value into
4519 a constant. But if more than one register is involved,
4520 this probably loses. */
4521 else if (! cleared
&& GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
4522 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4524 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4528 /* If the constructor has fewer fields than the structure
4529 or if we are initializing the structure to mostly zeros,
4530 clear the whole structure first. Don't do this if TARGET is a
4531 register whose mode size isn't equal to SIZE since clear_storage
4532 can't handle this case. */
4533 else if (! cleared
&& size
> 0
4534 && ((list_length (CONSTRUCTOR_ELTS (exp
))
4535 != fields_length (type
))
4536 || mostly_zeros_p (exp
))
4537 && (GET_CODE (target
) != REG
4538 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
4541 clear_storage (target
, GEN_INT (size
));
4546 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4548 /* Store each element of the constructor into
4549 the corresponding field of TARGET. */
4551 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4553 tree field
= TREE_PURPOSE (elt
);
4554 tree value
= TREE_VALUE (elt
);
4555 enum machine_mode mode
;
4556 HOST_WIDE_INT bitsize
;
4557 HOST_WIDE_INT bitpos
= 0;
4560 rtx to_rtx
= target
;
4562 /* Just ignore missing fields.
4563 We cleared the whole structure, above,
4564 if any fields are missing. */
4568 if (cleared
&& is_zeros_p (value
))
4571 if (host_integerp (DECL_SIZE (field
), 1))
4572 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4576 unsignedp
= TREE_UNSIGNED (field
);
4577 mode
= DECL_MODE (field
);
4578 if (DECL_BIT_FIELD (field
))
4581 offset
= DECL_FIELD_OFFSET (field
);
4582 if (host_integerp (offset
, 0)
4583 && host_integerp (bit_position (field
), 0))
4585 bitpos
= int_bit_position (field
);
4589 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
4595 if (contains_placeholder_p (offset
))
4596 offset
= build (WITH_RECORD_EXPR
, sizetype
,
4597 offset
, make_tree (TREE_TYPE (exp
), target
));
4599 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4600 if (GET_CODE (to_rtx
) != MEM
)
4603 #ifdef POINTERS_EXTEND_UNSIGNED
4604 if (GET_MODE (offset_rtx
) != Pmode
)
4605 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
4607 if (GET_MODE (offset_rtx
) != ptr_mode
)
4608 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4611 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4612 highest_pow2_factor (offset
));
4615 if (TREE_READONLY (field
))
4617 if (GET_CODE (to_rtx
) == MEM
)
4618 to_rtx
= copy_rtx (to_rtx
);
4620 RTX_UNCHANGING_P (to_rtx
) = 1;
4623 #ifdef WORD_REGISTER_OPERATIONS
4624 /* If this initializes a field that is smaller than a word, at the
4625 start of a word, try to widen it to a full word.
4626 This special case allows us to output C++ member function
4627 initializations in a form that the optimizers can understand. */
4628 if (GET_CODE (target
) == REG
4629 && bitsize
< BITS_PER_WORD
4630 && bitpos
% BITS_PER_WORD
== 0
4631 && GET_MODE_CLASS (mode
) == MODE_INT
4632 && TREE_CODE (value
) == INTEGER_CST
4634 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
4636 tree type
= TREE_TYPE (value
);
4638 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4640 type
= (*lang_hooks
.types
.type_for_size
)
4641 (BITS_PER_WORD
, TREE_UNSIGNED (type
));
4642 value
= convert (type
, value
);
4645 if (BYTES_BIG_ENDIAN
)
4647 = fold (build (LSHIFT_EXPR
, type
, value
,
4648 build_int_2 (BITS_PER_WORD
- bitsize
, 0)));
4649 bitsize
= BITS_PER_WORD
;
4654 if (GET_CODE (to_rtx
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (to_rtx
)
4655 && DECL_NONADDRESSABLE_P (field
))
4657 to_rtx
= copy_rtx (to_rtx
);
4658 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4661 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
4662 value
, type
, cleared
,
4663 get_alias_set (TREE_TYPE (field
)));
4666 else if (TREE_CODE (type
) == ARRAY_TYPE
4667 || TREE_CODE (type
) == VECTOR_TYPE
)
4672 tree domain
= TYPE_DOMAIN (type
);
4673 tree elttype
= TREE_TYPE (type
);
4675 HOST_WIDE_INT minelt
= 0;
4676 HOST_WIDE_INT maxelt
= 0;
4678 /* Vectors are like arrays, but the domain is stored via an array
4680 if (TREE_CODE (type
) == VECTOR_TYPE
)
4682 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4683 the same field as TYPE_DOMAIN, we are not guaranteed that
4685 domain
= TYPE_DEBUG_REPRESENTATION_TYPE (type
);
4686 domain
= TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain
)));
4689 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
4690 && TYPE_MAX_VALUE (domain
)
4691 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
4692 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
4694 /* If we have constant bounds for the range of the type, get them. */
4697 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
4698 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
4701 /* If the constructor has fewer elements than the array,
4702 clear the whole array first. Similarly if this is
4703 static constructor of a non-BLKmode object. */
4704 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
4708 HOST_WIDE_INT count
= 0, zero_count
= 0;
4709 need_to_clear
= ! const_bounds_p
;
4711 /* This loop is a more accurate version of the loop in
4712 mostly_zeros_p (it handles RANGE_EXPR in an index).
4713 It is also needed to check for missing elements. */
4714 for (elt
= CONSTRUCTOR_ELTS (exp
);
4715 elt
!= NULL_TREE
&& ! need_to_clear
;
4716 elt
= TREE_CHAIN (elt
))
4718 tree index
= TREE_PURPOSE (elt
);
4719 HOST_WIDE_INT this_node_count
;
4721 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4723 tree lo_index
= TREE_OPERAND (index
, 0);
4724 tree hi_index
= TREE_OPERAND (index
, 1);
4726 if (! host_integerp (lo_index
, 1)
4727 || ! host_integerp (hi_index
, 1))
4733 this_node_count
= (tree_low_cst (hi_index
, 1)
4734 - tree_low_cst (lo_index
, 1) + 1);
4737 this_node_count
= 1;
4739 count
+= this_node_count
;
4740 if (mostly_zeros_p (TREE_VALUE (elt
)))
4741 zero_count
+= this_node_count
;
4744 /* Clear the entire array first if there are any missing elements,
4745 or if the incidence of zero elements is >= 75%. */
4747 && (count
< maxelt
- minelt
+ 1 || 4 * zero_count
>= 3 * count
))
4751 if (need_to_clear
&& size
> 0)
4756 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4758 clear_storage (target
, GEN_INT (size
));
4762 else if (REG_P (target
))
4763 /* Inform later passes that the old value is dead. */
4764 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4766 /* Store each element of the constructor into
4767 the corresponding element of TARGET, determined
4768 by counting the elements. */
4769 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
4771 elt
= TREE_CHAIN (elt
), i
++)
4773 enum machine_mode mode
;
4774 HOST_WIDE_INT bitsize
;
4775 HOST_WIDE_INT bitpos
;
4777 tree value
= TREE_VALUE (elt
);
4778 tree index
= TREE_PURPOSE (elt
);
4779 rtx xtarget
= target
;
4781 if (cleared
&& is_zeros_p (value
))
4784 unsignedp
= TREE_UNSIGNED (elttype
);
4785 mode
= TYPE_MODE (elttype
);
4786 if (mode
== BLKmode
)
4787 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
4788 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
4791 bitsize
= GET_MODE_BITSIZE (mode
);
4793 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4795 tree lo_index
= TREE_OPERAND (index
, 0);
4796 tree hi_index
= TREE_OPERAND (index
, 1);
4797 rtx index_r
, pos_rtx
, hi_r
, loop_top
, loop_end
;
4798 struct nesting
*loop
;
4799 HOST_WIDE_INT lo
, hi
, count
;
4802 /* If the range is constant and "small", unroll the loop. */
4804 && host_integerp (lo_index
, 0)
4805 && host_integerp (hi_index
, 0)
4806 && (lo
= tree_low_cst (lo_index
, 0),
4807 hi
= tree_low_cst (hi_index
, 0),
4808 count
= hi
- lo
+ 1,
4809 (GET_CODE (target
) != MEM
4811 || (host_integerp (TYPE_SIZE (elttype
), 1)
4812 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
4815 lo
-= minelt
; hi
-= minelt
;
4816 for (; lo
<= hi
; lo
++)
4818 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
4820 if (GET_CODE (target
) == MEM
4821 && !MEM_KEEP_ALIAS_SET_P (target
)
4822 && TREE_CODE (type
) == ARRAY_TYPE
4823 && TYPE_NONALIASED_COMPONENT (type
))
4825 target
= copy_rtx (target
);
4826 MEM_KEEP_ALIAS_SET_P (target
) = 1;
4829 store_constructor_field
4830 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
4831 get_alias_set (elttype
));
4836 hi_r
= expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
4837 loop_top
= gen_label_rtx ();
4838 loop_end
= gen_label_rtx ();
4840 unsignedp
= TREE_UNSIGNED (domain
);
4842 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
4845 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
4847 SET_DECL_RTL (index
, index_r
);
4848 if (TREE_CODE (value
) == SAVE_EXPR
4849 && SAVE_EXPR_RTL (value
) == 0)
4851 /* Make sure value gets expanded once before the
4853 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
4856 store_expr (lo_index
, index_r
, 0);
4857 loop
= expand_start_loop (0);
4859 /* Assign value to element index. */
4861 = convert (ssizetype
,
4862 fold (build (MINUS_EXPR
, TREE_TYPE (index
),
4863 index
, TYPE_MIN_VALUE (domain
))));
4864 position
= size_binop (MULT_EXPR
, position
,
4866 TYPE_SIZE_UNIT (elttype
)));
4868 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
4869 xtarget
= offset_address (target
, pos_rtx
,
4870 highest_pow2_factor (position
));
4871 xtarget
= adjust_address (xtarget
, mode
, 0);
4872 if (TREE_CODE (value
) == CONSTRUCTOR
)
4873 store_constructor (value
, xtarget
, cleared
,
4874 bitsize
/ BITS_PER_UNIT
);
4876 store_expr (value
, xtarget
, 0);
4878 expand_exit_loop_if_false (loop
,
4879 build (LT_EXPR
, integer_type_node
,
4882 expand_increment (build (PREINCREMENT_EXPR
,
4884 index
, integer_one_node
), 0, 0);
4886 emit_label (loop_end
);
4889 else if ((index
!= 0 && ! host_integerp (index
, 0))
4890 || ! host_integerp (TYPE_SIZE (elttype
), 1))
4895 index
= ssize_int (1);
4898 index
= convert (ssizetype
,
4899 fold (build (MINUS_EXPR
, index
,
4900 TYPE_MIN_VALUE (domain
))));
4902 position
= size_binop (MULT_EXPR
, index
,
4904 TYPE_SIZE_UNIT (elttype
)));
4905 xtarget
= offset_address (target
,
4906 expand_expr (position
, 0, VOIDmode
, 0),
4907 highest_pow2_factor (position
));
4908 xtarget
= adjust_address (xtarget
, mode
, 0);
4909 store_expr (value
, xtarget
, 0);
4914 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
4915 * tree_low_cst (TYPE_SIZE (elttype
), 1));
4917 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
4919 if (GET_CODE (target
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (target
)
4920 && TREE_CODE (type
) == ARRAY_TYPE
4921 && TYPE_NONALIASED_COMPONENT (type
))
4923 target
= copy_rtx (target
);
4924 MEM_KEEP_ALIAS_SET_P (target
) = 1;
4927 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
4928 type
, cleared
, get_alias_set (elttype
));
4934 /* Set constructor assignments. */
4935 else if (TREE_CODE (type
) == SET_TYPE
)
4937 tree elt
= CONSTRUCTOR_ELTS (exp
);
4938 unsigned HOST_WIDE_INT nbytes
= int_size_in_bytes (type
), nbits
;
4939 tree domain
= TYPE_DOMAIN (type
);
4940 tree domain_min
, domain_max
, bitlength
;
4942 /* The default implementation strategy is to extract the constant
4943 parts of the constructor, use that to initialize the target,
4944 and then "or" in whatever non-constant ranges we need in addition.
4946 If a large set is all zero or all ones, it is
4947 probably better to set it using memset (if available) or bzero.
4948 Also, if a large set has just a single range, it may also be
4949 better to first clear all the first clear the set (using
4950 bzero/memset), and set the bits we want. */
4952 /* Check for all zeros. */
4953 if (elt
== NULL_TREE
&& size
> 0)
4956 clear_storage (target
, GEN_INT (size
));
4960 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
4961 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
4962 bitlength
= size_binop (PLUS_EXPR
,
4963 size_diffop (domain_max
, domain_min
),
4966 nbits
= tree_low_cst (bitlength
, 1);
4968 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4969 are "complicated" (more than one range), initialize (the
4970 constant parts) by copying from a constant. */
4971 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
4972 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
4974 unsigned int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
4975 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
4976 char *bit_buffer
= (char *) alloca (nbits
);
4977 HOST_WIDE_INT word
= 0;
4978 unsigned int bit_pos
= 0;
4979 unsigned int ibit
= 0;
4980 unsigned int offset
= 0; /* In bytes from beginning of set. */
4982 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
4985 if (bit_buffer
[ibit
])
4987 if (BYTES_BIG_ENDIAN
)
4988 word
|= (1 << (set_word_size
- 1 - bit_pos
));
4990 word
|= 1 << bit_pos
;
4994 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
4996 if (word
!= 0 || ! cleared
)
4998 rtx datum
= GEN_INT (word
);
5001 /* The assumption here is that it is safe to use
5002 XEXP if the set is multi-word, but not if
5003 it's single-word. */
5004 if (GET_CODE (target
) == MEM
)
5005 to_rtx
= adjust_address (target
, mode
, offset
);
5006 else if (offset
== 0)
5010 emit_move_insn (to_rtx
, datum
);
5017 offset
+= set_word_size
/ BITS_PER_UNIT
;
5022 /* Don't bother clearing storage if the set is all ones. */
5023 if (TREE_CHAIN (elt
) != NULL_TREE
5024 || (TREE_PURPOSE (elt
) == NULL_TREE
5026 : ( ! host_integerp (TREE_VALUE (elt
), 0)
5027 || ! host_integerp (TREE_PURPOSE (elt
), 0)
5028 || (tree_low_cst (TREE_VALUE (elt
), 0)
5029 - tree_low_cst (TREE_PURPOSE (elt
), 0) + 1
5030 != (HOST_WIDE_INT
) nbits
))))
5031 clear_storage (target
, expr_size (exp
));
5033 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
5035 /* Start of range of element or NULL. */
5036 tree startbit
= TREE_PURPOSE (elt
);
5037 /* End of range of element, or element value. */
5038 tree endbit
= TREE_VALUE (elt
);
5039 #ifdef TARGET_MEM_FUNCTIONS
5040 HOST_WIDE_INT startb
, endb
;
5042 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
5044 bitlength_rtx
= expand_expr (bitlength
,
5045 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
5047 /* Handle non-range tuple element like [ expr ]. */
5048 if (startbit
== NULL_TREE
)
5050 startbit
= save_expr (endbit
);
5054 startbit
= convert (sizetype
, startbit
);
5055 endbit
= convert (sizetype
, endbit
);
5056 if (! integer_zerop (domain_min
))
5058 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
5059 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
5061 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
5062 EXPAND_CONST_ADDRESS
);
5063 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
5064 EXPAND_CONST_ADDRESS
);
5070 ((build_qualified_type ((*lang_hooks
.types
.type_for_mode
)
5071 (GET_MODE (target
), 0),
5074 emit_move_insn (targetx
, target
);
5077 else if (GET_CODE (target
) == MEM
)
5082 #ifdef TARGET_MEM_FUNCTIONS
5083 /* Optimization: If startbit and endbit are
5084 constants divisible by BITS_PER_UNIT,
5085 call memset instead. */
5086 if (TREE_CODE (startbit
) == INTEGER_CST
5087 && TREE_CODE (endbit
) == INTEGER_CST
5088 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
5089 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
5091 emit_library_call (memset_libfunc
, LCT_NORMAL
,
5093 plus_constant (XEXP (targetx
, 0),
5094 startb
/ BITS_PER_UNIT
),
5096 constm1_rtx
, TYPE_MODE (integer_type_node
),
5097 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
5098 TYPE_MODE (sizetype
));
5102 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__setbits"),
5103 LCT_NORMAL
, VOIDmode
, 4, XEXP (targetx
, 0),
5104 Pmode
, bitlength_rtx
, TYPE_MODE (sizetype
),
5105 startbit_rtx
, TYPE_MODE (sizetype
),
5106 endbit_rtx
, TYPE_MODE (sizetype
));
5109 emit_move_insn (target
, targetx
);
5117 /* Store the value of EXP (an expression tree)
5118 into a subfield of TARGET which has mode MODE and occupies
5119 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5120 If MODE is VOIDmode, it means that we are storing into a bit-field.
5122 If VALUE_MODE is VOIDmode, return nothing in particular.
5123 UNSIGNEDP is not used in this case.
5125 Otherwise, return an rtx for the value stored. This rtx
5126 has mode VALUE_MODE if that is convenient to do.
5127 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5129 TYPE is the type of the underlying object,
5131 ALIAS_SET is the alias set for the destination. This value will
5132 (in general) be different from that for TARGET, since TARGET is a
5133 reference to the containing structure. */
5136 store_field (target
, bitsize
, bitpos
, mode
, exp
, value_mode
, unsignedp
, type
,
5139 HOST_WIDE_INT bitsize
;
5140 HOST_WIDE_INT bitpos
;
5141 enum machine_mode mode
;
5143 enum machine_mode value_mode
;
5148 HOST_WIDE_INT width_mask
= 0;
5150 if (TREE_CODE (exp
) == ERROR_MARK
)
5153 /* If we have nothing to store, do nothing unless the expression has
5156 return expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
5157 else if (bitsize
>=0 && bitsize
< HOST_BITS_PER_WIDE_INT
)
5158 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5160 /* If we are storing into an unaligned field of an aligned union that is
5161 in a register, we may have the mode of TARGET being an integer mode but
5162 MODE == BLKmode. In that case, get an aligned object whose size and
5163 alignment are the same as TARGET and store TARGET into it (we can avoid
5164 the store if the field being stored is the entire width of TARGET). Then
5165 call ourselves recursively to store the field into a BLKmode version of
5166 that object. Finally, load from the object into TARGET. This is not
5167 very efficient in general, but should only be slightly more expensive
5168 than the otherwise-required unaligned accesses. Perhaps this can be
5169 cleaned up later. */
5172 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
5176 (build_qualified_type (type
, TYPE_QUALS (type
) | TYPE_QUAL_CONST
),
5178 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
5180 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5181 emit_move_insn (object
, target
);
5183 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
5186 emit_move_insn (target
, object
);
5188 /* We want to return the BLKmode version of the data. */
5192 if (GET_CODE (target
) == CONCAT
)
5194 /* We're storing into a struct containing a single __complex. */
5198 return store_expr (exp
, target
, 0);
5201 /* If the structure is in a register or if the component
5202 is a bit field, we cannot use addressing to access it.
5203 Use bit-field techniques or SUBREG to store in it. */
5205 if (mode
== VOIDmode
5206 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5207 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5208 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5209 || GET_CODE (target
) == REG
5210 || GET_CODE (target
) == SUBREG
5211 /* If the field isn't aligned enough to store as an ordinary memref,
5212 store it as a bit field. */
5213 || (mode
!= BLKmode
&& SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
))
5214 && (MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
)
5215 || bitpos
% GET_MODE_ALIGNMENT (mode
)))
5216 /* If the RHS and field are a constant size and the size of the
5217 RHS isn't the same size as the bitfield, we must use bitfield
5220 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5221 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5223 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
5225 /* If BITSIZE is narrower than the size of the type of EXP
5226 we will be narrowing TEMP. Normally, what's wanted are the
5227 low-order bits. However, if EXP's type is a record and this is
5228 big-endian machine, we want the upper BITSIZE bits. */
5229 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5230 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
5231 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5232 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5233 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5237 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5239 if (mode
!= VOIDmode
&& mode
!= BLKmode
5240 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5241 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5243 /* If the modes of TARGET and TEMP are both BLKmode, both
5244 must be in memory and BITPOS must be aligned on a byte
5245 boundary. If so, we simply do a block copy. */
5246 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5248 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
5249 || bitpos
% BITS_PER_UNIT
!= 0)
5252 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5253 emit_block_move (target
, temp
,
5254 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5257 return value_mode
== VOIDmode
? const0_rtx
: target
;
5260 /* Store the value in the bitfield. */
5261 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
,
5262 int_size_in_bytes (type
));
5264 if (value_mode
!= VOIDmode
)
5266 /* The caller wants an rtx for the value.
5267 If possible, avoid refetching from the bitfield itself. */
5269 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
5272 enum machine_mode tmode
;
5274 tmode
= GET_MODE (temp
);
5275 if (tmode
== VOIDmode
)
5279 return expand_and (tmode
, temp
,
5280 gen_int_mode (width_mask
, tmode
),
5283 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
5284 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5285 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5288 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
5289 NULL_RTX
, value_mode
, VOIDmode
,
5290 int_size_in_bytes (type
));
5296 rtx addr
= XEXP (target
, 0);
5297 rtx to_rtx
= target
;
5299 /* If a value is wanted, it must be the lhs;
5300 so make the address stable for multiple use. */
5302 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
5303 && ! CONSTANT_ADDRESS_P (addr
)
5304 /* A frame-pointer reference is already stable. */
5305 && ! (GET_CODE (addr
) == PLUS
5306 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
5307 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
5308 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
5309 to_rtx
= replace_equiv_address (to_rtx
, copy_to_reg (addr
));
5311 /* Now build a reference to just the desired component. */
5313 to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5315 if (to_rtx
== target
)
5316 to_rtx
= copy_rtx (to_rtx
);
5318 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5319 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5320 set_mem_alias_set (to_rtx
, alias_set
);
5322 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
5326 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5327 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5328 codes and find the ultimate containing object, which we return.
5330 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5331 bit position, and *PUNSIGNEDP to the signedness of the field.
5332 If the position of the field is variable, we store a tree
5333 giving the variable offset (in units) in *POFFSET.
5334 This offset is in addition to the bit position.
5335 If the position is not variable, we store 0 in *POFFSET.
5337 If any of the extraction expressions is volatile,
5338 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5340 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5341 is a mode that can be used to access the field. In that case, *PBITSIZE
5344 If the field describes a variable-sized object, *PMODE is set to
5345 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5346 this case, but the address of the object can be found. */
5349 get_inner_reference (exp
, pbitsize
, pbitpos
, poffset
, pmode
,
5350 punsignedp
, pvolatilep
)
5352 HOST_WIDE_INT
*pbitsize
;
5353 HOST_WIDE_INT
*pbitpos
;
5355 enum machine_mode
*pmode
;
5360 enum machine_mode mode
= VOIDmode
;
5361 tree offset
= size_zero_node
;
5362 tree bit_offset
= bitsize_zero_node
;
5363 tree placeholder_ptr
= 0;
5366 /* First get the mode, signedness, and size. We do this from just the
5367 outermost expression. */
5368 if (TREE_CODE (exp
) == COMPONENT_REF
)
5370 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5371 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5372 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5374 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
5376 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5378 size_tree
= TREE_OPERAND (exp
, 1);
5379 *punsignedp
= TREE_UNSIGNED (exp
);
5383 mode
= TYPE_MODE (TREE_TYPE (exp
));
5384 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
5386 if (mode
== BLKmode
)
5387 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5389 *pbitsize
= GET_MODE_BITSIZE (mode
);
5394 if (! host_integerp (size_tree
, 1))
5395 mode
= BLKmode
, *pbitsize
= -1;
5397 *pbitsize
= tree_low_cst (size_tree
, 1);
5400 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5401 and find the ultimate containing object. */
5404 if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5405 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
, TREE_OPERAND (exp
, 2));
5406 else if (TREE_CODE (exp
) == COMPONENT_REF
)
5408 tree field
= TREE_OPERAND (exp
, 1);
5409 tree this_offset
= DECL_FIELD_OFFSET (field
);
5411 /* If this field hasn't been filled in yet, don't go
5412 past it. This should only happen when folding expressions
5413 made during type construction. */
5414 if (this_offset
== 0)
5416 else if (! TREE_CONSTANT (this_offset
)
5417 && contains_placeholder_p (this_offset
))
5418 this_offset
= build (WITH_RECORD_EXPR
, sizetype
, this_offset
, exp
);
5420 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5421 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5422 DECL_FIELD_BIT_OFFSET (field
));
5424 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5427 else if (TREE_CODE (exp
) == ARRAY_REF
5428 || TREE_CODE (exp
) == ARRAY_RANGE_REF
)
5430 tree index
= TREE_OPERAND (exp
, 1);
5431 tree array
= TREE_OPERAND (exp
, 0);
5432 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
5433 tree low_bound
= (domain
? TYPE_MIN_VALUE (domain
) : 0);
5434 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array
)));
5436 /* We assume all arrays have sizes that are a multiple of a byte.
5437 First subtract the lower bound, if any, in the type of the
5438 index, then convert to sizetype and multiply by the size of the
5440 if (low_bound
!= 0 && ! integer_zerop (low_bound
))
5441 index
= fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5444 /* If the index has a self-referential type, pass it to a
5445 WITH_RECORD_EXPR; if the component size is, pass our
5446 component to one. */
5447 if (! TREE_CONSTANT (index
)
5448 && contains_placeholder_p (index
))
5449 index
= build (WITH_RECORD_EXPR
, TREE_TYPE (index
), index
, exp
);
5450 if (! TREE_CONSTANT (unit_size
)
5451 && contains_placeholder_p (unit_size
))
5452 unit_size
= build (WITH_RECORD_EXPR
, sizetype
, unit_size
, array
);
5454 offset
= size_binop (PLUS_EXPR
, offset
,
5455 size_binop (MULT_EXPR
,
5456 convert (sizetype
, index
),
5460 else if (TREE_CODE (exp
) == PLACEHOLDER_EXPR
)
5462 tree
new = find_placeholder (exp
, &placeholder_ptr
);
5464 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5465 We might have been called from tree optimization where we
5466 haven't set up an object yet. */
5474 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
5475 && TREE_CODE (exp
) != VIEW_CONVERT_EXPR
5476 && ! ((TREE_CODE (exp
) == NOP_EXPR
5477 || TREE_CODE (exp
) == CONVERT_EXPR
)
5478 && (TYPE_MODE (TREE_TYPE (exp
))
5479 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
5482 /* If any reference in the chain is volatile, the effect is volatile. */
5483 if (TREE_THIS_VOLATILE (exp
))
5486 exp
= TREE_OPERAND (exp
, 0);
5489 /* If OFFSET is constant, see if we can return the whole thing as a
5490 constant bit position. Otherwise, split it up. */
5491 if (host_integerp (offset
, 0)
5492 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5494 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5495 && host_integerp (tem
, 0))
5496 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5498 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5504 /* Return 1 if T is an expression that get_inner_reference handles. */
5507 handled_component_p (t
)
5510 switch (TREE_CODE (t
))
5515 case ARRAY_RANGE_REF
:
5516 case NON_LVALUE_EXPR
:
5517 case VIEW_CONVERT_EXPR
:
5522 return (TYPE_MODE (TREE_TYPE (t
))
5523 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t
, 0))));
5530 /* Given an rtx VALUE that may contain additions and multiplications, return
5531 an equivalent value that just refers to a register, memory, or constant.
5532 This is done by generating instructions to perform the arithmetic and
5533 returning a pseudo-register containing the value.
5535 The returned value may be a REG, SUBREG, MEM or constant. */
5538 force_operand (value
, target
)
5542 /* Use subtarget as the target for operand 0 of a binary operation. */
5543 rtx subtarget
= get_subtarget (target
);
5544 enum rtx_code code
= GET_CODE (value
);
5546 /* Check for a PIC address load. */
5547 if ((code
== PLUS
|| code
== MINUS
)
5548 && XEXP (value
, 0) == pic_offset_table_rtx
5549 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5550 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5551 || GET_CODE (XEXP (value
, 1)) == CONST
))
5554 subtarget
= gen_reg_rtx (GET_MODE (value
));
5555 emit_move_insn (subtarget
, value
);
5559 if (code
== ZERO_EXTEND
|| code
== SIGN_EXTEND
)
5562 target
= gen_reg_rtx (GET_MODE (value
));
5563 convert_move (target
, force_operand (XEXP (value
, 0), NULL
),
5564 code
== ZERO_EXTEND
);
5568 if (GET_RTX_CLASS (code
) == '2' || GET_RTX_CLASS (code
) == 'c')
5570 op2
= XEXP (value
, 1);
5571 if (!CONSTANT_P (op2
) && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5573 if (code
== MINUS
&& GET_CODE (op2
) == CONST_INT
)
5576 op2
= negate_rtx (GET_MODE (value
), op2
);
5579 /* Check for an addition with OP2 a constant integer and our first
5580 operand a PLUS of a virtual register and something else. In that
5581 case, we want to emit the sum of the virtual register and the
5582 constant first and then add the other value. This allows virtual
5583 register instantiation to simply modify the constant rather than
5584 creating another one around this addition. */
5585 if (code
== PLUS
&& GET_CODE (op2
) == CONST_INT
5586 && GET_CODE (XEXP (value
, 0)) == PLUS
5587 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
5588 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5589 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5591 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
5592 XEXP (XEXP (value
, 0), 0), op2
,
5593 subtarget
, 0, OPTAB_LIB_WIDEN
);
5594 return expand_simple_binop (GET_MODE (value
), code
, temp
,
5595 force_operand (XEXP (XEXP (value
,
5597 target
, 0, OPTAB_LIB_WIDEN
);
5600 op1
= force_operand (XEXP (value
, 0), subtarget
);
5601 op2
= force_operand (op2
, NULL_RTX
);
5605 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
5607 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
5608 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5609 target
, 1, OPTAB_LIB_WIDEN
);
5611 return expand_divmod (0,
5612 FLOAT_MODE_P (GET_MODE (value
))
5613 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
5614 GET_MODE (value
), op1
, op2
, target
, 0);
5617 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5621 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
5625 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5629 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5630 target
, 0, OPTAB_LIB_WIDEN
);
5633 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5634 target
, 1, OPTAB_LIB_WIDEN
);
5637 if (GET_RTX_CLASS (code
) == '1')
5639 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
5640 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
5643 #ifdef INSN_SCHEDULING
5644 /* On machines that have insn scheduling, we want all memory reference to be
5645 explicit, so we need to deal with such paradoxical SUBREGs. */
5646 if (GET_CODE (value
) == SUBREG
&& GET_CODE (SUBREG_REG (value
)) == MEM
5647 && (GET_MODE_SIZE (GET_MODE (value
))
5648 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
5650 = simplify_gen_subreg (GET_MODE (value
),
5651 force_reg (GET_MODE (SUBREG_REG (value
)),
5652 force_operand (SUBREG_REG (value
),
5654 GET_MODE (SUBREG_REG (value
)),
5655 SUBREG_BYTE (value
));
5661 /* Subroutine of expand_expr: return nonzero iff there is no way that
5662 EXP can reference X, which is being modified. TOP_P is nonzero if this
5663 call is going to be used to determine whether we need a temporary
5664 for EXP, as opposed to a recursive call to this function.
5666 It is always safe for this routine to return zero since it merely
5667 searches for optimization opportunities. */
5670 safe_from_p (x
, exp
, top_p
)
5677 static tree save_expr_list
;
5680 /* If EXP has varying size, we MUST use a target since we currently
5681 have no way of allocating temporaries of variable size
5682 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5683 So we assume here that something at a higher level has prevented a
5684 clash. This is somewhat bogus, but the best we can do. Only
5685 do this when X is BLKmode and when we are at the top level. */
5686 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
5687 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
5688 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
5689 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
5690 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
5692 && GET_MODE (x
) == BLKmode
)
5693 /* If X is in the outgoing argument area, it is always safe. */
5694 || (GET_CODE (x
) == MEM
5695 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
5696 || (GET_CODE (XEXP (x
, 0)) == PLUS
5697 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
5700 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5701 find the underlying pseudo. */
5702 if (GET_CODE (x
) == SUBREG
)
5705 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5709 /* A SAVE_EXPR might appear many times in the expression passed to the
5710 top-level safe_from_p call, and if it has a complex subexpression,
5711 examining it multiple times could result in a combinatorial explosion.
5712 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5713 with optimization took about 28 minutes to compile -- even though it was
5714 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5715 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5716 we have processed. Note that the only test of top_p was above. */
5725 rtn
= safe_from_p (x
, exp
, 0);
5727 for (t
= save_expr_list
; t
!= 0; t
= TREE_CHAIN (t
))
5728 TREE_PRIVATE (TREE_PURPOSE (t
)) = 0;
5733 /* Now look at our tree code and possibly recurse. */
5734 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
5737 exp_rtl
= DECL_RTL_IF_SET (exp
);
5744 if (TREE_CODE (exp
) == TREE_LIST
)
5745 return ((TREE_VALUE (exp
) == 0
5746 || safe_from_p (x
, TREE_VALUE (exp
), 0))
5747 && (TREE_CHAIN (exp
) == 0
5748 || safe_from_p (x
, TREE_CHAIN (exp
), 0)));
5749 else if (TREE_CODE (exp
) == ERROR_MARK
)
5750 return 1; /* An already-visited SAVE_EXPR? */
5755 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5759 return (safe_from_p (x
, TREE_OPERAND (exp
, 0), 0)
5760 && safe_from_p (x
, TREE_OPERAND (exp
, 1), 0));
5764 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5765 the expression. If it is set, we conflict iff we are that rtx or
5766 both are in memory. Otherwise, we check all operands of the
5767 expression recursively. */
5769 switch (TREE_CODE (exp
))
5772 /* If the operand is static or we are static, we can't conflict.
5773 Likewise if we don't conflict with the operand at all. */
5774 if (staticp (TREE_OPERAND (exp
, 0))
5775 || TREE_STATIC (exp
)
5776 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
5779 /* Otherwise, the only way this can conflict is if we are taking
5780 the address of a DECL a that address if part of X, which is
5782 exp
= TREE_OPERAND (exp
, 0);
5785 if (!DECL_RTL_SET_P (exp
)
5786 || GET_CODE (DECL_RTL (exp
)) != MEM
)
5789 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
5794 if (GET_CODE (x
) == MEM
5795 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
5796 get_alias_set (exp
)))
5801 /* Assume that the call will clobber all hard registers and
5803 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5804 || GET_CODE (x
) == MEM
)
5809 /* If a sequence exists, we would have to scan every instruction
5810 in the sequence to see if it was safe. This is probably not
5812 if (RTL_EXPR_SEQUENCE (exp
))
5815 exp_rtl
= RTL_EXPR_RTL (exp
);
5818 case WITH_CLEANUP_EXPR
:
5819 exp_rtl
= WITH_CLEANUP_EXPR_RTL (exp
);
5822 case CLEANUP_POINT_EXPR
:
5823 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5826 exp_rtl
= SAVE_EXPR_RTL (exp
);
5830 /* If we've already scanned this, don't do it again. Otherwise,
5831 show we've scanned it and record for clearing the flag if we're
5833 if (TREE_PRIVATE (exp
))
5836 TREE_PRIVATE (exp
) = 1;
5837 if (! safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
5839 TREE_PRIVATE (exp
) = 0;
5843 save_expr_list
= tree_cons (exp
, NULL_TREE
, save_expr_list
);
5847 /* The only operand we look at is operand 1. The rest aren't
5848 part of the expression. */
5849 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
5851 case METHOD_CALL_EXPR
:
5852 /* This takes an rtx argument, but shouldn't appear here. */
5859 /* If we have an rtx, we do not need to scan our operands. */
5863 nops
= first_rtl_op (TREE_CODE (exp
));
5864 for (i
= 0; i
< nops
; i
++)
5865 if (TREE_OPERAND (exp
, i
) != 0
5866 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
5869 /* If this is a language-specific tree code, it may require
5870 special handling. */
5871 if ((unsigned int) TREE_CODE (exp
)
5872 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5873 && !(*lang_hooks
.safe_from_p
) (x
, exp
))
5877 /* If we have an rtl, find any enclosed object. Then see if we conflict
5881 if (GET_CODE (exp_rtl
) == SUBREG
)
5883 exp_rtl
= SUBREG_REG (exp_rtl
);
5884 if (GET_CODE (exp_rtl
) == REG
5885 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
5889 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5890 are memory and they conflict. */
5891 return ! (rtx_equal_p (x
, exp_rtl
)
5892 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
5893 && true_dependence (exp_rtl
, VOIDmode
, x
,
5894 rtx_addr_varies_p
)));
5897 /* If we reach here, it is safe. */
5901 /* Subroutine of expand_expr: return rtx if EXP is a
5902 variable or parameter; else return 0. */
5909 switch (TREE_CODE (exp
))
5913 return DECL_RTL (exp
);
5919 #ifdef MAX_INTEGER_COMPUTATION_MODE
5922 check_max_integer_computation_mode (exp
)
5925 enum tree_code code
;
5926 enum machine_mode mode
;
5928 /* Strip any NOPs that don't change the mode. */
5930 code
= TREE_CODE (exp
);
5932 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5933 if (code
== NOP_EXPR
5934 && TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
5937 /* First check the type of the overall operation. We need only look at
5938 unary, binary and relational operations. */
5939 if (TREE_CODE_CLASS (code
) == '1'
5940 || TREE_CODE_CLASS (code
) == '2'
5941 || TREE_CODE_CLASS (code
) == '<')
5943 mode
= TYPE_MODE (TREE_TYPE (exp
));
5944 if (GET_MODE_CLASS (mode
) == MODE_INT
5945 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5946 internal_error ("unsupported wide integer operation");
5949 /* Check operand of a unary op. */
5950 if (TREE_CODE_CLASS (code
) == '1')
5952 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5953 if (GET_MODE_CLASS (mode
) == MODE_INT
5954 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5955 internal_error ("unsupported wide integer operation");
5958 /* Check operands of a binary/comparison op. */
5959 if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<')
5961 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5962 if (GET_MODE_CLASS (mode
) == MODE_INT
5963 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5964 internal_error ("unsupported wide integer operation");
5966 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
5967 if (GET_MODE_CLASS (mode
) == MODE_INT
5968 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5969 internal_error ("unsupported wide integer operation");
5974 /* Return the highest power of two that EXP is known to be a multiple of.
5975 This is used in updating alignment of MEMs in array references. */
5977 static HOST_WIDE_INT
5978 highest_pow2_factor (exp
)
5981 HOST_WIDE_INT c0
, c1
;
5983 switch (TREE_CODE (exp
))
5986 /* We can find the lowest bit that's a one. If the low
5987 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5988 We need to handle this case since we can find it in a COND_EXPR,
5989 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
5990 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5992 if (TREE_CONSTANT_OVERFLOW (exp
))
5993 return BIGGEST_ALIGNMENT
;
5996 /* Note: tree_low_cst is intentionally not used here,
5997 we don't care about the upper bits. */
5998 c0
= TREE_INT_CST_LOW (exp
);
6000 return c0
? c0
: BIGGEST_ALIGNMENT
;
6004 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
6005 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6006 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6007 return MIN (c0
, c1
);
6010 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6011 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6014 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6016 if (integer_pow2p (TREE_OPERAND (exp
, 1))
6017 && host_integerp (TREE_OPERAND (exp
, 1), 1))
6019 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6020 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
6021 return MAX (1, c0
/ c1
);
6025 case NON_LVALUE_EXPR
: case NOP_EXPR
: case CONVERT_EXPR
:
6026 case SAVE_EXPR
: case WITH_RECORD_EXPR
:
6027 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
6030 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6033 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6034 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
6035 return MIN (c0
, c1
);
6044 /* Similar, except that it is known that the expression must be a multiple
6045 of the alignment of TYPE. */
6047 static HOST_WIDE_INT
6048 highest_pow2_factor_for_type (type
, exp
)
6052 HOST_WIDE_INT type_align
, factor
;
6054 factor
= highest_pow2_factor (exp
);
6055 type_align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
6056 return MAX (factor
, type_align
);
6059 /* Return an object on the placeholder list that matches EXP, a
6060 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6061 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6062 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6063 is a location which initially points to a starting location in the
6064 placeholder list (zero means start of the list) and where a pointer into
6065 the placeholder list at which the object is found is placed. */
6068 find_placeholder (exp
, plist
)
6072 tree type
= TREE_TYPE (exp
);
6073 tree placeholder_expr
;
6075 for (placeholder_expr
6076 = plist
&& *plist
? TREE_CHAIN (*plist
) : placeholder_list
;
6077 placeholder_expr
!= 0;
6078 placeholder_expr
= TREE_CHAIN (placeholder_expr
))
6080 tree need_type
= TYPE_MAIN_VARIANT (type
);
6083 /* Find the outermost reference that is of the type we want. If none,
6084 see if any object has a type that is a pointer to the type we
6086 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6087 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
6088 || TREE_CODE (elt
) == COND_EXPR
)
6089 ? TREE_OPERAND (elt
, 1)
6090 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6091 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6092 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6093 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6094 ? TREE_OPERAND (elt
, 0) : 0))
6095 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
6098 *plist
= placeholder_expr
;
6102 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6104 = ((TREE_CODE (elt
) == COMPOUND_EXPR
6105 || TREE_CODE (elt
) == COND_EXPR
)
6106 ? TREE_OPERAND (elt
, 1)
6107 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6108 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6109 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6110 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6111 ? TREE_OPERAND (elt
, 0) : 0))
6112 if (POINTER_TYPE_P (TREE_TYPE (elt
))
6113 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
6117 *plist
= placeholder_expr
;
6118 return build1 (INDIRECT_REF
, need_type
, elt
);
6125 /* expand_expr: generate code for computing expression EXP.
6126 An rtx for the computed value is returned. The value is never null.
6127 In the case of a void EXP, const0_rtx is returned.
6129 The value may be stored in TARGET if TARGET is nonzero.
6130 TARGET is just a suggestion; callers must assume that
6131 the rtx returned may not be the same as TARGET.
6133 If TARGET is CONST0_RTX, it means that the value will be ignored.
6135 If TMODE is not VOIDmode, it suggests generating the
6136 result in mode TMODE. But this is done only when convenient.
6137 Otherwise, TMODE is ignored and the value generated in its natural mode.
6138 TMODE is just a suggestion; callers must assume that
6139 the rtx returned may not have mode TMODE.
6141 Note that TARGET may have neither TMODE nor MODE. In that case, it
6142 probably will not be used.
6144 If MODIFIER is EXPAND_SUM then when EXP is an addition
6145 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6146 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6147 products as above, or REG or MEM, or constant.
6148 Ordinarily in such cases we would output mul or add instructions
6149 and then return a pseudo reg containing the sum.
6151 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6152 it also marks a label as absolutely required (it can't be dead).
6153 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6154 This is used for outputting expressions used in initializers.
6156 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6157 with a constant address even if that address is not normally legitimate.
6158 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6161 expand_expr (exp
, target
, tmode
, modifier
)
6164 enum machine_mode tmode
;
6165 enum expand_modifier modifier
;
6168 tree type
= TREE_TYPE (exp
);
6169 int unsignedp
= TREE_UNSIGNED (type
);
6170 enum machine_mode mode
;
6171 enum tree_code code
= TREE_CODE (exp
);
6173 rtx subtarget
, original_target
;
6177 /* Handle ERROR_MARK before anybody tries to access its type. */
6178 if (TREE_CODE (exp
) == ERROR_MARK
|| TREE_CODE (type
) == ERROR_MARK
)
6180 op0
= CONST0_RTX (tmode
);
6186 mode
= TYPE_MODE (type
);
6187 /* Use subtarget as the target for operand 0 of a binary operation. */
6188 subtarget
= get_subtarget (target
);
6189 original_target
= target
;
6190 ignore
= (target
== const0_rtx
6191 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
6192 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
6193 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
6194 && TREE_CODE (type
) == VOID_TYPE
));
6196 /* If we are going to ignore this result, we need only do something
6197 if there is a side-effect somewhere in the expression. If there
6198 is, short-circuit the most common cases here. Note that we must
6199 not call expand_expr with anything but const0_rtx in case this
6200 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6204 if (! TREE_SIDE_EFFECTS (exp
))
6207 /* Ensure we reference a volatile object even if value is ignored, but
6208 don't do this if all we are doing is taking its address. */
6209 if (TREE_THIS_VOLATILE (exp
)
6210 && TREE_CODE (exp
) != FUNCTION_DECL
6211 && mode
!= VOIDmode
&& mode
!= BLKmode
6212 && modifier
!= EXPAND_CONST_ADDRESS
)
6214 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
6215 if (GET_CODE (temp
) == MEM
)
6216 temp
= copy_to_reg (temp
);
6220 if (TREE_CODE_CLASS (code
) == '1' || code
== COMPONENT_REF
6221 || code
== INDIRECT_REF
|| code
== BUFFER_REF
)
6222 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6225 else if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<'
6226 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
6228 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6229 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6232 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6233 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
6234 /* If the second operand has no side effects, just evaluate
6236 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6238 else if (code
== BIT_FIELD_REF
)
6240 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6241 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6242 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, modifier
);
6249 #ifdef MAX_INTEGER_COMPUTATION_MODE
6250 /* Only check stuff here if the mode we want is different from the mode
6251 of the expression; if it's the same, check_max_integer_computiation_mode
6252 will handle it. Do we really need to check this stuff at all? */
6255 && GET_MODE (target
) != mode
6256 && TREE_CODE (exp
) != INTEGER_CST
6257 && TREE_CODE (exp
) != PARM_DECL
6258 && TREE_CODE (exp
) != ARRAY_REF
6259 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6260 && TREE_CODE (exp
) != COMPONENT_REF
6261 && TREE_CODE (exp
) != BIT_FIELD_REF
6262 && TREE_CODE (exp
) != INDIRECT_REF
6263 && TREE_CODE (exp
) != CALL_EXPR
6264 && TREE_CODE (exp
) != VAR_DECL
6265 && TREE_CODE (exp
) != RTL_EXPR
)
6267 enum machine_mode mode
= GET_MODE (target
);
6269 if (GET_MODE_CLASS (mode
) == MODE_INT
6270 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6271 internal_error ("unsupported wide integer operation");
6275 && TREE_CODE (exp
) != INTEGER_CST
6276 && TREE_CODE (exp
) != PARM_DECL
6277 && TREE_CODE (exp
) != ARRAY_REF
6278 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6279 && TREE_CODE (exp
) != COMPONENT_REF
6280 && TREE_CODE (exp
) != BIT_FIELD_REF
6281 && TREE_CODE (exp
) != INDIRECT_REF
6282 && TREE_CODE (exp
) != VAR_DECL
6283 && TREE_CODE (exp
) != CALL_EXPR
6284 && TREE_CODE (exp
) != RTL_EXPR
6285 && GET_MODE_CLASS (tmode
) == MODE_INT
6286 && tmode
> MAX_INTEGER_COMPUTATION_MODE
)
6287 internal_error ("unsupported wide integer operation");
6289 check_max_integer_computation_mode (exp
);
6292 /* If will do cse, generate all results into pseudo registers
6293 since 1) that allows cse to find more things
6294 and 2) otherwise cse could produce an insn the machine
6295 cannot support. And exception is a CONSTRUCTOR into a multi-word
6296 MEM: that's much more likely to be most efficient into the MEM. */
6298 if (! cse_not_expected
&& mode
!= BLKmode
&& target
6299 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
)
6300 && ! (code
== CONSTRUCTOR
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
))
6307 tree function
= decl_function_context (exp
);
6308 /* Handle using a label in a containing function. */
6309 if (function
!= current_function_decl
6310 && function
!= inline_function_decl
&& function
!= 0)
6312 struct function
*p
= find_function_data (function
);
6313 p
->expr
->x_forced_labels
6314 = gen_rtx_EXPR_LIST (VOIDmode
, label_rtx (exp
),
6315 p
->expr
->x_forced_labels
);
6319 if (modifier
== EXPAND_INITIALIZER
)
6320 forced_labels
= gen_rtx_EXPR_LIST (VOIDmode
,
6325 temp
= gen_rtx_MEM (FUNCTION_MODE
,
6326 gen_rtx_LABEL_REF (Pmode
, label_rtx (exp
)));
6327 if (function
!= current_function_decl
6328 && function
!= inline_function_decl
&& function
!= 0)
6329 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
6334 if (DECL_RTL (exp
) == 0)
6336 error_with_decl (exp
, "prior parameter's size depends on `%s'");
6337 return CONST0_RTX (mode
);
6340 /* ... fall through ... */
6343 /* If a static var's type was incomplete when the decl was written,
6344 but the type is complete now, lay out the decl now. */
6345 if (DECL_SIZE (exp
) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
6346 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6348 rtx value
= DECL_RTL_IF_SET (exp
);
6350 layout_decl (exp
, 0);
6352 /* If the RTL was already set, update its mode and memory
6356 PUT_MODE (value
, DECL_MODE (exp
));
6357 SET_DECL_RTL (exp
, 0);
6358 set_mem_attributes (value
, exp
, 1);
6359 SET_DECL_RTL (exp
, value
);
6363 /* ... fall through ... */
6367 if (DECL_RTL (exp
) == 0)
6370 /* Ensure variable marked as used even if it doesn't go through
6371 a parser. If it hasn't be used yet, write out an external
6373 if (! TREE_USED (exp
))
6375 assemble_external (exp
);
6376 TREE_USED (exp
) = 1;
6379 /* Show we haven't gotten RTL for this yet. */
6382 /* Handle variables inherited from containing functions. */
6383 context
= decl_function_context (exp
);
6385 /* We treat inline_function_decl as an alias for the current function
6386 because that is the inline function whose vars, types, etc.
6387 are being merged into the current function.
6388 See expand_inline_function. */
6390 if (context
!= 0 && context
!= current_function_decl
6391 && context
!= inline_function_decl
6392 /* If var is static, we don't need a static chain to access it. */
6393 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
6394 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
6398 /* Mark as non-local and addressable. */
6399 DECL_NONLOCAL (exp
) = 1;
6400 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
6402 (*lang_hooks
.mark_addressable
) (exp
);
6403 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
6405 addr
= XEXP (DECL_RTL (exp
), 0);
6406 if (GET_CODE (addr
) == MEM
)
6408 = replace_equiv_address (addr
,
6409 fix_lexical_addr (XEXP (addr
, 0), exp
));
6411 addr
= fix_lexical_addr (addr
, exp
);
6413 temp
= replace_equiv_address (DECL_RTL (exp
), addr
);
6416 /* This is the case of an array whose size is to be determined
6417 from its initializer, while the initializer is still being parsed.
6420 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6421 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
6422 temp
= validize_mem (DECL_RTL (exp
));
6424 /* If DECL_RTL is memory, we are in the normal case and either
6425 the address is not valid or it is not a register and -fforce-addr
6426 is specified, get the address into a register. */
6428 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6429 && modifier
!= EXPAND_CONST_ADDRESS
6430 && modifier
!= EXPAND_SUM
6431 && modifier
!= EXPAND_INITIALIZER
6432 && (! memory_address_p (DECL_MODE (exp
),
6433 XEXP (DECL_RTL (exp
), 0))
6435 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
6436 temp
= replace_equiv_address (DECL_RTL (exp
),
6437 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
6439 /* If we got something, return it. But first, set the alignment
6440 if the address is a register. */
6443 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
6444 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6449 /* If the mode of DECL_RTL does not match that of the decl, it
6450 must be a promoted value. We return a SUBREG of the wanted mode,
6451 but mark it so that we know that it was already extended. */
6453 if (GET_CODE (DECL_RTL (exp
)) == REG
6454 && GET_MODE (DECL_RTL (exp
)) != DECL_MODE (exp
))
6456 /* Get the signedness used for this variable. Ensure we get the
6457 same mode we got when the variable was declared. */
6458 if (GET_MODE (DECL_RTL (exp
))
6459 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
,
6460 (TREE_CODE (exp
) == RESULT_DECL
? 1 : 0)))
6463 temp
= gen_lowpart_SUBREG (mode
, DECL_RTL (exp
));
6464 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6465 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6469 return DECL_RTL (exp
);
6472 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
6473 TREE_INT_CST_HIGH (exp
), mode
);
6475 /* ??? If overflow is set, fold will have done an incomplete job,
6476 which can result in (plus xx (const_int 0)), which can get
6477 simplified by validate_replace_rtx during virtual register
6478 instantiation, which can result in unrecognizable insns.
6479 Avoid this by forcing all overflows into registers. */
6480 if (TREE_CONSTANT_OVERFLOW (exp
)
6481 && modifier
!= EXPAND_INITIALIZER
)
6482 temp
= force_reg (mode
, temp
);
6487 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, 0);
6490 /* If optimized, generate immediate CONST_DOUBLE
6491 which will be turned into memory by reload if necessary.
6493 We used to force a register so that loop.c could see it. But
6494 this does not allow gen_* patterns to perform optimizations with
6495 the constants. It also produces two insns in cases like "x = 1.0;".
6496 On most machines, floating-point constants are not permitted in
6497 many insns, so we'd end up copying it to a register in any case.
6499 Now, we do the copying in expand_binop, if appropriate. */
6500 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
6501 TYPE_MODE (TREE_TYPE (exp
)));
6505 if (! TREE_CST_RTL (exp
))
6506 output_constant_def (exp
, 1);
6508 /* TREE_CST_RTL probably contains a constant address.
6509 On RISC machines where a constant address isn't valid,
6510 make some insns to get that address into a register. */
6511 if (GET_CODE (TREE_CST_RTL (exp
)) == MEM
6512 && modifier
!= EXPAND_CONST_ADDRESS
6513 && modifier
!= EXPAND_INITIALIZER
6514 && modifier
!= EXPAND_SUM
6515 && (! memory_address_p (mode
, XEXP (TREE_CST_RTL (exp
), 0))
6517 && GET_CODE (XEXP (TREE_CST_RTL (exp
), 0)) != REG
)))
6518 return replace_equiv_address (TREE_CST_RTL (exp
),
6519 copy_rtx (XEXP (TREE_CST_RTL (exp
), 0)));
6520 return TREE_CST_RTL (exp
);
6522 case EXPR_WITH_FILE_LOCATION
:
6525 const char *saved_input_filename
= input_filename
;
6526 int saved_lineno
= lineno
;
6527 input_filename
= EXPR_WFL_FILENAME (exp
);
6528 lineno
= EXPR_WFL_LINENO (exp
);
6529 if (EXPR_WFL_EMIT_LINE_NOTE (exp
))
6530 emit_line_note (input_filename
, lineno
);
6531 /* Possibly avoid switching back and forth here. */
6532 to_return
= expand_expr (EXPR_WFL_NODE (exp
), target
, tmode
, modifier
);
6533 input_filename
= saved_input_filename
;
6534 lineno
= saved_lineno
;
6539 context
= decl_function_context (exp
);
6541 /* If this SAVE_EXPR was at global context, assume we are an
6542 initialization function and move it into our context. */
6544 SAVE_EXPR_CONTEXT (exp
) = current_function_decl
;
6546 /* We treat inline_function_decl as an alias for the current function
6547 because that is the inline function whose vars, types, etc.
6548 are being merged into the current function.
6549 See expand_inline_function. */
6550 if (context
== current_function_decl
|| context
== inline_function_decl
)
6553 /* If this is non-local, handle it. */
6556 /* The following call just exists to abort if the context is
6557 not of a containing function. */
6558 find_function_data (context
);
6560 temp
= SAVE_EXPR_RTL (exp
);
6561 if (temp
&& GET_CODE (temp
) == REG
)
6563 put_var_into_stack (exp
);
6564 temp
= SAVE_EXPR_RTL (exp
);
6566 if (temp
== 0 || GET_CODE (temp
) != MEM
)
6569 replace_equiv_address (temp
,
6570 fix_lexical_addr (XEXP (temp
, 0), exp
));
6572 if (SAVE_EXPR_RTL (exp
) == 0)
6574 if (mode
== VOIDmode
)
6577 temp
= assign_temp (build_qualified_type (type
,
6579 | TYPE_QUAL_CONST
)),
6582 SAVE_EXPR_RTL (exp
) = temp
;
6583 if (!optimize
&& GET_CODE (temp
) == REG
)
6584 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, temp
,
6587 /* If the mode of TEMP does not match that of the expression, it
6588 must be a promoted value. We pass store_expr a SUBREG of the
6589 wanted mode but mark it so that we know that it was already
6590 extended. Note that `unsignedp' was modified above in
6593 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
6595 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6596 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6597 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6600 if (temp
== const0_rtx
)
6601 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
6603 store_expr (TREE_OPERAND (exp
, 0), temp
, 0);
6605 TREE_USED (exp
) = 1;
6608 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6609 must be a promoted value. We return a SUBREG of the wanted mode,
6610 but mark it so that we know that it was already extended. */
6612 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
6613 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
6615 /* Compute the signedness and make the proper SUBREG. */
6616 promote_mode (type
, mode
, &unsignedp
, 0);
6617 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6618 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6619 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6623 return SAVE_EXPR_RTL (exp
);
6628 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6629 TREE_OPERAND (exp
, 0)
6630 = (*lang_hooks
.unsave_expr_now
) (TREE_OPERAND (exp
, 0));
6634 case PLACEHOLDER_EXPR
:
6636 tree old_list
= placeholder_list
;
6637 tree placeholder_expr
= 0;
6639 exp
= find_placeholder (exp
, &placeholder_expr
);
6643 placeholder_list
= TREE_CHAIN (placeholder_expr
);
6644 temp
= expand_expr (exp
, original_target
, tmode
, modifier
);
6645 placeholder_list
= old_list
;
6649 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6652 case WITH_RECORD_EXPR
:
6653 /* Put the object on the placeholder list, expand our first operand,
6654 and pop the list. */
6655 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
6657 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
, tmode
,
6659 placeholder_list
= TREE_CHAIN (placeholder_list
);
6663 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6664 expand_goto (TREE_OPERAND (exp
, 0));
6666 expand_computed_goto (TREE_OPERAND (exp
, 0));
6670 expand_exit_loop_if_false (NULL
,
6671 invert_truthvalue (TREE_OPERAND (exp
, 0)));
6674 case LABELED_BLOCK_EXPR
:
6675 if (LABELED_BLOCK_BODY (exp
))
6676 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp
), 0, 1);
6677 /* Should perhaps use expand_label, but this is simpler and safer. */
6678 do_pending_stack_adjust ();
6679 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp
)));
6682 case EXIT_BLOCK_EXPR
:
6683 if (EXIT_BLOCK_RETURN (exp
))
6684 sorry ("returned value in block_exit_expr");
6685 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp
)));
6690 expand_start_loop (1);
6691 expand_expr_stmt_value (TREE_OPERAND (exp
, 0), 0, 1);
6699 tree vars
= TREE_OPERAND (exp
, 0);
6700 int vars_need_expansion
= 0;
6702 /* Need to open a binding contour here because
6703 if there are any cleanups they must be contained here. */
6704 expand_start_bindings (2);
6706 /* Mark the corresponding BLOCK for output in its proper place. */
6707 if (TREE_OPERAND (exp
, 2) != 0
6708 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
6709 (*lang_hooks
.decls
.insert_block
) (TREE_OPERAND (exp
, 2));
6711 /* If VARS have not yet been expanded, expand them now. */
6714 if (!DECL_RTL_SET_P (vars
))
6716 vars_need_expansion
= 1;
6719 expand_decl_init (vars
);
6720 vars
= TREE_CHAIN (vars
);
6723 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, modifier
);
6725 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
6731 if (RTL_EXPR_SEQUENCE (exp
))
6733 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
6735 emit_insns (RTL_EXPR_SEQUENCE (exp
));
6736 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
6738 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
6739 free_temps_for_rtl_expr (exp
);
6740 return RTL_EXPR_RTL (exp
);
6743 /* If we don't need the result, just ensure we evaluate any
6749 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
6750 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
, 0);
6755 /* All elts simple constants => refer to a constant in memory. But
6756 if this is a non-BLKmode mode, let it store a field at a time
6757 since that should make a CONST_INT or CONST_DOUBLE when we
6758 fold. Likewise, if we have a target we can use, it is best to
6759 store directly into the target unless the type is large enough
6760 that memcpy will be used. If we are making an initializer and
6761 all operands are constant, put it in memory as well. */
6762 else if ((TREE_STATIC (exp
)
6763 && ((mode
== BLKmode
6764 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
6765 || TREE_ADDRESSABLE (exp
)
6766 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
6767 && (! MOVE_BY_PIECES_P
6768 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
6770 && ! mostly_zeros_p (exp
))))
6771 || (modifier
== EXPAND_INITIALIZER
&& TREE_CONSTANT (exp
)))
6773 rtx constructor
= output_constant_def (exp
, 1);
6775 if (modifier
!= EXPAND_CONST_ADDRESS
6776 && modifier
!= EXPAND_INITIALIZER
6777 && modifier
!= EXPAND_SUM
)
6778 constructor
= validize_mem (constructor
);
6784 /* Handle calls that pass values in multiple non-contiguous
6785 locations. The Irix 6 ABI has examples of this. */
6786 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
6787 || GET_CODE (target
) == PARALLEL
)
6789 = assign_temp (build_qualified_type (type
,
6791 | (TREE_READONLY (exp
)
6792 * TYPE_QUAL_CONST
))),
6793 0, TREE_ADDRESSABLE (exp
), 1);
6795 store_constructor (exp
, target
, 0,
6796 int_size_in_bytes (TREE_TYPE (exp
)));
6802 tree exp1
= TREE_OPERAND (exp
, 0);
6804 tree string
= string_constant (exp1
, &index
);
6806 /* Try to optimize reads from const strings. */
6808 && TREE_CODE (string
) == STRING_CST
6809 && TREE_CODE (index
) == INTEGER_CST
6810 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
6811 && GET_MODE_CLASS (mode
) == MODE_INT
6812 && GET_MODE_SIZE (mode
) == 1
6813 && modifier
!= EXPAND_WRITE
)
6814 return gen_int_mode (TREE_STRING_POINTER (string
)
6815 [TREE_INT_CST_LOW (index
)], mode
);
6817 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
6818 op0
= memory_address (mode
, op0
);
6819 temp
= gen_rtx_MEM (mode
, op0
);
6820 set_mem_attributes (temp
, exp
, 0);
6822 /* If we are writing to this object and its type is a record with
6823 readonly fields, we must mark it as readonly so it will
6824 conflict with readonly references to those fields. */
6825 if (modifier
== EXPAND_WRITE
&& readonly_fields_p (type
))
6826 RTX_UNCHANGING_P (temp
) = 1;
6832 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
6836 tree array
= TREE_OPERAND (exp
, 0);
6837 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
6838 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
6839 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
6842 /* Optimize the special-case of a zero lower bound.
6844 We convert the low_bound to sizetype to avoid some problems
6845 with constant folding. (E.g. suppose the lower bound is 1,
6846 and its mode is QI. Without the conversion, (ARRAY
6847 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6848 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6850 if (! integer_zerop (low_bound
))
6851 index
= size_diffop (index
, convert (sizetype
, low_bound
));
6853 /* Fold an expression like: "foo"[2].
6854 This is not done in fold so it won't happen inside &.
6855 Don't fold if this is for wide characters since it's too
6856 difficult to do correctly and this is a very rare case. */
6858 if (modifier
!= EXPAND_CONST_ADDRESS
&& modifier
!= EXPAND_INITIALIZER
6859 && TREE_CODE (array
) == STRING_CST
6860 && TREE_CODE (index
) == INTEGER_CST
6861 && compare_tree_int (index
, TREE_STRING_LENGTH (array
)) < 0
6862 && GET_MODE_CLASS (mode
) == MODE_INT
6863 && GET_MODE_SIZE (mode
) == 1)
6864 return gen_int_mode (TREE_STRING_POINTER (array
)
6865 [TREE_INT_CST_LOW (index
)], mode
);
6867 /* If this is a constant index into a constant array,
6868 just get the value from the array. Handle both the cases when
6869 we have an explicit constructor and when our operand is a variable
6870 that was declared const. */
6872 if (modifier
!= EXPAND_CONST_ADDRESS
&& modifier
!= EXPAND_INITIALIZER
6873 && TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
)
6874 && TREE_CODE (index
) == INTEGER_CST
6875 && 0 > compare_tree_int (index
,
6876 list_length (CONSTRUCTOR_ELTS
6877 (TREE_OPERAND (exp
, 0)))))
6881 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
6882 i
= TREE_INT_CST_LOW (index
);
6883 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
6887 return expand_expr (fold (TREE_VALUE (elem
)), target
, tmode
,
6891 else if (optimize
>= 1
6892 && modifier
!= EXPAND_CONST_ADDRESS
6893 && modifier
!= EXPAND_INITIALIZER
6894 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
6895 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
6896 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
6898 if (TREE_CODE (index
) == INTEGER_CST
)
6900 tree init
= DECL_INITIAL (array
);
6902 if (TREE_CODE (init
) == CONSTRUCTOR
)
6906 for (elem
= CONSTRUCTOR_ELTS (init
);
6908 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
6909 elem
= TREE_CHAIN (elem
))
6912 if (elem
&& !TREE_SIDE_EFFECTS (TREE_VALUE (elem
)))
6913 return expand_expr (fold (TREE_VALUE (elem
)), target
,
6916 else if (TREE_CODE (init
) == STRING_CST
6917 && 0 > compare_tree_int (index
,
6918 TREE_STRING_LENGTH (init
)))
6920 tree type
= TREE_TYPE (TREE_TYPE (init
));
6921 enum machine_mode mode
= TYPE_MODE (type
);
6923 if (GET_MODE_CLASS (mode
) == MODE_INT
6924 && GET_MODE_SIZE (mode
) == 1)
6925 return gen_int_mode (TREE_STRING_POINTER (init
)
6926 [TREE_INT_CST_LOW (index
)], mode
);
6935 case ARRAY_RANGE_REF
:
6936 /* If the operand is a CONSTRUCTOR, we can just extract the
6937 appropriate field if it is present. Don't do this if we have
6938 already written the data since we want to refer to that copy
6939 and varasm.c assumes that's what we'll do. */
6940 if (code
== COMPONENT_REF
6941 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
6942 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
6946 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
6947 elt
= TREE_CHAIN (elt
))
6948 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
6949 /* We can normally use the value of the field in the
6950 CONSTRUCTOR. However, if this is a bitfield in
6951 an integral mode that we can fit in a HOST_WIDE_INT,
6952 we must mask only the number of bits in the bitfield,
6953 since this is done implicitly by the constructor. If
6954 the bitfield does not meet either of those conditions,
6955 we can't do this optimization. */
6956 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
6957 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
6959 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
6960 <= HOST_BITS_PER_WIDE_INT
))))
6962 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
6963 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
6965 HOST_WIDE_INT bitsize
6966 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt
)));
6967 enum machine_mode imode
6968 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
6970 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
6972 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
6973 op0
= expand_and (imode
, op0
, op1
, target
);
6978 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
6981 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
6983 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
6993 enum machine_mode mode1
;
6994 HOST_WIDE_INT bitsize
, bitpos
;
6997 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
6998 &mode1
, &unsignedp
, &volatilep
);
7001 /* If we got back the original object, something is wrong. Perhaps
7002 we are evaluating an expression too early. In any event, don't
7003 infinitely recurse. */
7007 /* If TEM's type is a union of variable size, pass TARGET to the inner
7008 computation, since it will need a temporary and TARGET is known
7009 to have to do. This occurs in unchecked conversion in Ada. */
7013 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
7014 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
7016 ? target
: NULL_RTX
),
7018 (modifier
== EXPAND_INITIALIZER
7019 || modifier
== EXPAND_CONST_ADDRESS
)
7020 ? modifier
: EXPAND_NORMAL
);
7022 /* If this is a constant, put it into a register if it is a
7023 legitimate constant and OFFSET is 0 and memory if it isn't. */
7024 if (CONSTANT_P (op0
))
7026 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
7027 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
7029 op0
= force_reg (mode
, op0
);
7031 op0
= validize_mem (force_const_mem (mode
, op0
));
7036 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
7038 /* If this object is in a register, put it into memory.
7039 This case can't occur in C, but can in Ada if we have
7040 unchecked conversion of an expression from a scalar type to
7041 an array or record type. */
7042 if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7043 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
7045 /* If the operand is a SAVE_EXPR, we can deal with this by
7046 forcing the SAVE_EXPR into memory. */
7047 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
7049 put_var_into_stack (TREE_OPERAND (exp
, 0));
7050 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
7055 = build_qualified_type (TREE_TYPE (tem
),
7056 (TYPE_QUALS (TREE_TYPE (tem
))
7057 | TYPE_QUAL_CONST
));
7058 rtx memloc
= assign_temp (nt
, 1, 1, 1);
7060 emit_move_insn (memloc
, op0
);
7065 if (GET_CODE (op0
) != MEM
)
7068 #ifdef POINTERS_EXTEND_UNSIGNED
7069 if (GET_MODE (offset_rtx
) != Pmode
)
7070 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
7072 if (GET_MODE (offset_rtx
) != ptr_mode
)
7073 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
7076 /* A constant address in OP0 can have VOIDmode, we must not try
7077 to call force_reg for that case. Avoid that case. */
7078 if (GET_CODE (op0
) == MEM
7079 && GET_MODE (op0
) == BLKmode
7080 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
7082 && (bitpos
% bitsize
) == 0
7083 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
7084 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
7086 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7090 op0
= offset_address (op0
, offset_rtx
,
7091 highest_pow2_factor (offset
));
7094 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7095 record its alignment as BIGGEST_ALIGNMENT. */
7096 if (GET_CODE (op0
) == MEM
&& bitpos
== 0 && offset
!= 0
7097 && is_aligning_offset (offset
, tem
))
7098 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
7100 /* Don't forget about volatility even if this is a bitfield. */
7101 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
7103 if (op0
== orig_op0
)
7104 op0
= copy_rtx (op0
);
7106 MEM_VOLATILE_P (op0
) = 1;
7109 /* The following code doesn't handle CONCAT.
7110 Assume only bitpos == 0 can be used for CONCAT, due to
7111 one element arrays having the same mode as its element. */
7112 if (GET_CODE (op0
) == CONCAT
)
7114 if (bitpos
!= 0 || bitsize
!= GET_MODE_BITSIZE (GET_MODE (op0
)))
7119 /* In cases where an aligned union has an unaligned object
7120 as a field, we might be extracting a BLKmode value from
7121 an integer-mode (e.g., SImode) object. Handle this case
7122 by doing the extract into an object as wide as the field
7123 (which we know to be the width of a basic mode), then
7124 storing into memory, and changing the mode to BLKmode. */
7125 if (mode1
== VOIDmode
7126 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7127 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
7128 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7129 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
7130 && modifier
!= EXPAND_CONST_ADDRESS
7131 && modifier
!= EXPAND_INITIALIZER
)
7132 /* If the field isn't aligned enough to fetch as a memref,
7133 fetch it as a bit field. */
7134 || (mode1
!= BLKmode
7135 && SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))
7136 && ((TYPE_ALIGN (TREE_TYPE (tem
))
7137 < GET_MODE_ALIGNMENT (mode
))
7138 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)))
7139 /* If the type and the field are a constant size and the
7140 size of the type isn't the same size as the bitfield,
7141 we must use bitfield operations. */
7143 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
)))
7145 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7148 enum machine_mode ext_mode
= mode
;
7150 if (ext_mode
== BLKmode
7151 && ! (target
!= 0 && GET_CODE (op0
) == MEM
7152 && GET_CODE (target
) == MEM
7153 && bitpos
% BITS_PER_UNIT
== 0))
7154 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7156 if (ext_mode
== BLKmode
)
7158 /* In this case, BITPOS must start at a byte boundary and
7159 TARGET, if specified, must be a MEM. */
7160 if (GET_CODE (op0
) != MEM
7161 || (target
!= 0 && GET_CODE (target
) != MEM
)
7162 || bitpos
% BITS_PER_UNIT
!= 0)
7165 op0
= adjust_address (op0
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
7167 target
= assign_temp (type
, 0, 1, 1);
7169 emit_block_move (target
, op0
,
7170 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7176 op0
= validize_mem (op0
);
7178 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
7179 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7181 op0
= extract_bit_field (op0
, bitsize
, bitpos
,
7182 unsignedp
, target
, ext_mode
, ext_mode
,
7183 int_size_in_bytes (TREE_TYPE (tem
)));
7185 /* If the result is a record type and BITSIZE is narrower than
7186 the mode of OP0, an integral mode, and this is a big endian
7187 machine, we must put the field into the high-order bits. */
7188 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7189 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7190 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
7191 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7192 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7196 if (mode
== BLKmode
)
7198 rtx
new = assign_temp (build_qualified_type
7199 ((*lang_hooks
.types
.type_for_mode
)
7201 TYPE_QUAL_CONST
), 0, 1, 1);
7203 emit_move_insn (new, op0
);
7204 op0
= copy_rtx (new);
7205 PUT_MODE (op0
, BLKmode
);
7206 set_mem_attributes (op0
, exp
, 1);
7212 /* If the result is BLKmode, use that to access the object
7214 if (mode
== BLKmode
)
7217 /* Get a reference to just this component. */
7218 if (modifier
== EXPAND_CONST_ADDRESS
7219 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7220 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7222 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7224 if (op0
== orig_op0
)
7225 op0
= copy_rtx (op0
);
7227 set_mem_attributes (op0
, exp
, 0);
7228 if (GET_CODE (XEXP (op0
, 0)) == REG
)
7229 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7231 MEM_VOLATILE_P (op0
) |= volatilep
;
7232 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7233 || modifier
== EXPAND_CONST_ADDRESS
7234 || modifier
== EXPAND_INITIALIZER
)
7236 else if (target
== 0)
7237 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7239 convert_move (target
, op0
, unsignedp
);
7245 rtx insn
, before
= get_last_insn (), vtbl_ref
;
7247 /* Evaluate the interior expression. */
7248 subtarget
= expand_expr (TREE_OPERAND (exp
, 0), target
,
7251 /* Get or create an instruction off which to hang a note. */
7252 if (REG_P (subtarget
))
7255 insn
= get_last_insn ();
7258 if (! INSN_P (insn
))
7259 insn
= prev_nonnote_insn (insn
);
7263 target
= gen_reg_rtx (GET_MODE (subtarget
));
7264 insn
= emit_move_insn (target
, subtarget
);
7267 /* Collect the data for the note. */
7268 vtbl_ref
= XEXP (DECL_RTL (TREE_OPERAND (exp
, 1)), 0);
7269 vtbl_ref
= plus_constant (vtbl_ref
,
7270 tree_low_cst (TREE_OPERAND (exp
, 2), 0));
7271 /* Discard the initial CONST that was added. */
7272 vtbl_ref
= XEXP (vtbl_ref
, 0);
7275 = gen_rtx_EXPR_LIST (REG_VTABLE_REF
, vtbl_ref
, REG_NOTES (insn
));
7280 /* Intended for a reference to a buffer of a file-object in Pascal.
7281 But it's not certain that a special tree code will really be
7282 necessary for these. INDIRECT_REF might work for them. */
7288 /* Pascal set IN expression.
7291 rlo = set_low - (set_low%bits_per_word);
7292 the_word = set [ (index - rlo)/bits_per_word ];
7293 bit_index = index % bits_per_word;
7294 bitmask = 1 << bit_index;
7295 return !!(the_word & bitmask); */
7297 tree set
= TREE_OPERAND (exp
, 0);
7298 tree index
= TREE_OPERAND (exp
, 1);
7299 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
7300 tree set_type
= TREE_TYPE (set
);
7301 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
7302 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
7303 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
7304 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
7305 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
7306 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
7307 rtx setaddr
= XEXP (setval
, 0);
7308 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
7310 rtx diff
, quo
, rem
, addr
, bit
, result
;
7312 /* If domain is empty, answer is no. Likewise if index is constant
7313 and out of bounds. */
7314 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
7315 && TREE_CODE (set_low_bound
) == INTEGER_CST
7316 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
7317 || (TREE_CODE (index
) == INTEGER_CST
7318 && TREE_CODE (set_low_bound
) == INTEGER_CST
7319 && tree_int_cst_lt (index
, set_low_bound
))
7320 || (TREE_CODE (set_high_bound
) == INTEGER_CST
7321 && TREE_CODE (index
) == INTEGER_CST
7322 && tree_int_cst_lt (set_high_bound
, index
))))
7326 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7328 /* If we get here, we have to generate the code for both cases
7329 (in range and out of range). */
7331 op0
= gen_label_rtx ();
7332 op1
= gen_label_rtx ();
7334 if (! (GET_CODE (index_val
) == CONST_INT
7335 && GET_CODE (lo_r
) == CONST_INT
))
7336 emit_cmp_and_jump_insns (index_val
, lo_r
, LT
, NULL_RTX
,
7337 GET_MODE (index_val
), iunsignedp
, op1
);
7339 if (! (GET_CODE (index_val
) == CONST_INT
7340 && GET_CODE (hi_r
) == CONST_INT
))
7341 emit_cmp_and_jump_insns (index_val
, hi_r
, GT
, NULL_RTX
,
7342 GET_MODE (index_val
), iunsignedp
, op1
);
7344 /* Calculate the element number of bit zero in the first word
7346 if (GET_CODE (lo_r
) == CONST_INT
)
7347 rlow
= GEN_INT (INTVAL (lo_r
)
7348 & ~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
7350 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
7351 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
7352 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7354 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
7355 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7357 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
7358 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7359 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
7360 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7362 addr
= memory_address (byte_mode
,
7363 expand_binop (index_mode
, add_optab
, diff
,
7364 setaddr
, NULL_RTX
, iunsignedp
,
7367 /* Extract the bit we want to examine. */
7368 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
7369 gen_rtx_MEM (byte_mode
, addr
),
7370 make_tree (TREE_TYPE (index
), rem
),
7372 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
7373 GET_MODE (target
) == byte_mode
? target
: 0,
7374 1, OPTAB_LIB_WIDEN
);
7376 if (result
!= target
)
7377 convert_move (target
, result
, 1);
7379 /* Output the code to handle the out-of-range case. */
7382 emit_move_insn (target
, const0_rtx
);
7387 case WITH_CLEANUP_EXPR
:
7388 if (WITH_CLEANUP_EXPR_RTL (exp
) == 0)
7390 WITH_CLEANUP_EXPR_RTL (exp
)
7391 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7392 expand_decl_cleanup_eh (NULL_TREE
, TREE_OPERAND (exp
, 1),
7393 CLEANUP_EH_ONLY (exp
));
7395 /* That's it for this cleanup. */
7396 TREE_OPERAND (exp
, 1) = 0;
7398 return WITH_CLEANUP_EXPR_RTL (exp
);
7400 case CLEANUP_POINT_EXPR
:
7402 /* Start a new binding layer that will keep track of all cleanup
7403 actions to be performed. */
7404 expand_start_bindings (2);
7406 target_temp_slot_level
= temp_slot_level
;
7408 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7409 /* If we're going to use this value, load it up now. */
7411 op0
= force_not_mem (op0
);
7412 preserve_temp_slots (op0
);
7413 expand_end_bindings (NULL_TREE
, 0, 0);
7418 /* Check for a built-in function. */
7419 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7420 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7422 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7424 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7425 == BUILT_IN_FRONTEND
)
7426 return (*lang_hooks
.expand_expr
)
7427 (exp
, original_target
, tmode
, modifier
);
7429 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7432 return expand_call (exp
, target
, ignore
);
7434 case NON_LVALUE_EXPR
:
7437 case REFERENCE_EXPR
:
7438 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7441 if (TREE_CODE (type
) == UNION_TYPE
)
7443 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7445 /* If both input and output are BLKmode, this conversion isn't doing
7446 anything except possibly changing memory attribute. */
7447 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7449 rtx result
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7452 result
= copy_rtx (result
);
7453 set_mem_attributes (result
, exp
, 0);
7458 target
= assign_temp (type
, 0, 1, 1);
7460 if (GET_CODE (target
) == MEM
)
7461 /* Store data into beginning of memory target. */
7462 store_expr (TREE_OPERAND (exp
, 0),
7463 adjust_address (target
, TYPE_MODE (valtype
), 0), 0);
7465 else if (GET_CODE (target
) == REG
)
7466 /* Store this field into a union of the proper type. */
7467 store_field (target
,
7468 MIN ((int_size_in_bytes (TREE_TYPE
7469 (TREE_OPERAND (exp
, 0)))
7471 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7472 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7473 VOIDmode
, 0, type
, 0);
7477 /* Return the entire union. */
7481 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7483 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7486 /* If the signedness of the conversion differs and OP0 is
7487 a promoted SUBREG, clear that indication since we now
7488 have to do the proper extension. */
7489 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7490 && GET_CODE (op0
) == SUBREG
)
7491 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7496 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7497 if (GET_MODE (op0
) == mode
)
7500 /* If OP0 is a constant, just convert it into the proper mode. */
7501 if (CONSTANT_P (op0
))
7503 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7504 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
7506 if (modifier
== EXPAND_INITIALIZER
)
7507 return simplify_gen_subreg (mode
, op0
, inner_mode
,
7508 subreg_lowpart_offset (mode
,
7511 return convert_modes (mode
, inner_mode
, op0
,
7512 TREE_UNSIGNED (inner_type
));
7515 if (modifier
== EXPAND_INITIALIZER
)
7516 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7520 convert_to_mode (mode
, op0
,
7521 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7523 convert_move (target
, op0
,
7524 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7527 case VIEW_CONVERT_EXPR
:
7528 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7530 /* If the input and output modes are both the same, we are done.
7531 Otherwise, if neither mode is BLKmode and both are within a word, we
7532 can use gen_lowpart. If neither is true, make sure the operand is
7533 in memory and convert the MEM to the new mode. */
7534 if (TYPE_MODE (type
) == GET_MODE (op0
))
7536 else if (TYPE_MODE (type
) != BLKmode
&& GET_MODE (op0
) != BLKmode
7537 && GET_MODE_SIZE (TYPE_MODE (type
)) <= UNITS_PER_WORD
7538 && GET_MODE_SIZE (GET_MODE (op0
)) <= UNITS_PER_WORD
)
7539 op0
= gen_lowpart (TYPE_MODE (type
), op0
);
7540 else if (GET_CODE (op0
) != MEM
)
7542 /* If the operand is not a MEM, force it into memory. Since we
7543 are going to be be changing the mode of the MEM, don't call
7544 force_const_mem for constants because we don't allow pool
7545 constants to change mode. */
7546 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7548 if (TREE_ADDRESSABLE (exp
))
7551 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
7553 = assign_stack_temp_for_type
7554 (TYPE_MODE (inner_type
),
7555 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
7557 emit_move_insn (target
, op0
);
7561 /* At this point, OP0 is in the correct mode. If the output type is such
7562 that the operand is known to be aligned, indicate that it is.
7563 Otherwise, we need only be concerned about alignment for non-BLKmode
7565 if (GET_CODE (op0
) == MEM
)
7567 op0
= copy_rtx (op0
);
7569 if (TYPE_ALIGN_OK (type
))
7570 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
7571 else if (TYPE_MODE (type
) != BLKmode
&& STRICT_ALIGNMENT
7572 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
7574 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7575 HOST_WIDE_INT temp_size
7576 = MAX (int_size_in_bytes (inner_type
),
7577 (HOST_WIDE_INT
) GET_MODE_SIZE (TYPE_MODE (type
)));
7578 rtx
new = assign_stack_temp_for_type (TYPE_MODE (type
),
7579 temp_size
, 0, type
);
7580 rtx new_with_op0_mode
= adjust_address (new, GET_MODE (op0
), 0);
7582 if (TREE_ADDRESSABLE (exp
))
7585 if (GET_MODE (op0
) == BLKmode
)
7586 emit_block_move (new_with_op0_mode
, op0
,
7587 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type
))));
7589 emit_move_insn (new_with_op0_mode
, op0
);
7594 op0
= adjust_address (op0
, TYPE_MODE (type
), 0);
7600 /* We come here from MINUS_EXPR when the second operand is a
7603 this_optab
= ! unsignedp
&& flag_trapv
7604 && (GET_MODE_CLASS (mode
) == MODE_INT
)
7605 ? addv_optab
: add_optab
;
7607 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7608 something else, make sure we add the register to the constant and
7609 then to the other thing. This case can occur during strength
7610 reduction and doing it this way will produce better code if the
7611 frame pointer or argument pointer is eliminated.
7613 fold-const.c will ensure that the constant is always in the inner
7614 PLUS_EXPR, so the only case we need to do anything about is if
7615 sp, ap, or fp is our second argument, in which case we must swap
7616 the innermost first argument and our second argument. */
7618 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7619 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7620 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
7621 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7622 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7623 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7625 tree t
= TREE_OPERAND (exp
, 1);
7627 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7628 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
7631 /* If the result is to be ptr_mode and we are adding an integer to
7632 something, we might be forming a constant. So try to use
7633 plus_constant. If it produces a sum and we can't accept it,
7634 use force_operand. This allows P = &ARR[const] to generate
7635 efficient code on machines where a SYMBOL_REF is not a valid
7638 If this is an EXPAND_SUM call, always return the sum. */
7639 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7640 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
7642 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
7643 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7644 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
7648 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
7650 /* Use immed_double_const to ensure that the constant is
7651 truncated according to the mode of OP1, then sign extended
7652 to a HOST_WIDE_INT. Using the constant directly can result
7653 in non-canonical RTL in a 64x32 cross compile. */
7655 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
7657 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
7658 op1
= plus_constant (op1
, INTVAL (constant_part
));
7659 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7660 op1
= force_operand (op1
, target
);
7664 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7665 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
7666 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
7670 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7671 (modifier
== EXPAND_INITIALIZER
7672 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
7673 if (! CONSTANT_P (op0
))
7675 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7676 VOIDmode
, modifier
);
7677 /* Don't go to both_summands if modifier
7678 says it's not right to return a PLUS. */
7679 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7683 /* Use immed_double_const to ensure that the constant is
7684 truncated according to the mode of OP1, then sign extended
7685 to a HOST_WIDE_INT. Using the constant directly can result
7686 in non-canonical RTL in a 64x32 cross compile. */
7688 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
7690 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7691 op0
= plus_constant (op0
, INTVAL (constant_part
));
7692 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7693 op0
= force_operand (op0
, target
);
7698 /* No sense saving up arithmetic to be done
7699 if it's all in the wrong mode to form part of an address.
7700 And force_operand won't know whether to sign-extend or
7702 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7703 || mode
!= ptr_mode
)
7706 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7709 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, modifier
);
7710 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, modifier
);
7713 /* Make sure any term that's a sum with a constant comes last. */
7714 if (GET_CODE (op0
) == PLUS
7715 && CONSTANT_P (XEXP (op0
, 1)))
7721 /* If adding to a sum including a constant,
7722 associate it to put the constant outside. */
7723 if (GET_CODE (op1
) == PLUS
7724 && CONSTANT_P (XEXP (op1
, 1)))
7726 rtx constant_term
= const0_rtx
;
7728 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
7731 /* Ensure that MULT comes first if there is one. */
7732 else if (GET_CODE (op0
) == MULT
)
7733 op0
= gen_rtx_PLUS (mode
, op0
, XEXP (op1
, 0));
7735 op0
= gen_rtx_PLUS (mode
, XEXP (op1
, 0), op0
);
7737 /* Let's also eliminate constants from op0 if possible. */
7738 op0
= eliminate_constant_term (op0
, &constant_term
);
7740 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7741 their sum should be a constant. Form it into OP1, since the
7742 result we want will then be OP0 + OP1. */
7744 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
7749 op1
= gen_rtx_PLUS (mode
, constant_term
, XEXP (op1
, 1));
7752 /* Put a constant term last and put a multiplication first. */
7753 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
7754 temp
= op1
, op1
= op0
, op0
= temp
;
7756 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
7757 return temp
? temp
: gen_rtx_PLUS (mode
, op0
, op1
);
7760 /* For initializers, we are allowed to return a MINUS of two
7761 symbolic constants. Here we handle all cases when both operands
7763 /* Handle difference of two symbolic constants,
7764 for the sake of an initializer. */
7765 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7766 && really_constant_p (TREE_OPERAND (exp
, 0))
7767 && really_constant_p (TREE_OPERAND (exp
, 1)))
7769 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
,
7771 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
,
7774 /* If the last operand is a CONST_INT, use plus_constant of
7775 the negated constant. Else make the MINUS. */
7776 if (GET_CODE (op1
) == CONST_INT
)
7777 return plus_constant (op0
, - INTVAL (op1
));
7779 return gen_rtx_MINUS (mode
, op0
, op1
);
7781 /* Convert A - const to A + (-const). */
7782 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7784 tree negated
= fold (build1 (NEGATE_EXPR
, type
,
7785 TREE_OPERAND (exp
, 1)));
7787 if (TREE_UNSIGNED (type
) || TREE_OVERFLOW (negated
))
7788 /* If we can't negate the constant in TYPE, leave it alone and
7789 expand_binop will negate it for us. We used to try to do it
7790 here in the signed version of TYPE, but that doesn't work
7791 on POINTER_TYPEs. */;
7794 exp
= build (PLUS_EXPR
, type
, TREE_OPERAND (exp
, 0), negated
);
7798 this_optab
= ! unsignedp
&& flag_trapv
7799 && (GET_MODE_CLASS(mode
) == MODE_INT
)
7800 ? subv_optab
: sub_optab
;
7804 /* If first operand is constant, swap them.
7805 Thus the following special case checks need only
7806 check the second operand. */
7807 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
7809 tree t1
= TREE_OPERAND (exp
, 0);
7810 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
7811 TREE_OPERAND (exp
, 1) = t1
;
7814 /* Attempt to return something suitable for generating an
7815 indexed address, for machines that support that. */
7817 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
7818 && host_integerp (TREE_OPERAND (exp
, 1), 0))
7820 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7823 /* If we knew for certain that this is arithmetic for an array
7824 reference, and we knew the bounds of the array, then we could
7825 apply the distributive law across (PLUS X C) for constant C.
7826 Without such knowledge, we risk overflowing the computation
7827 when both X and C are large, but X+C isn't. */
7828 /* ??? Could perhaps special-case EXP being unsigned and C being
7829 positive. In that case we are certain that X+C is no smaller
7830 than X and so the transformed expression will overflow iff the
7831 original would have. */
7833 if (GET_CODE (op0
) != REG
)
7834 op0
= force_operand (op0
, NULL_RTX
);
7835 if (GET_CODE (op0
) != REG
)
7836 op0
= copy_to_mode_reg (mode
, op0
);
7839 gen_rtx_MULT (mode
, op0
,
7840 GEN_INT (tree_low_cst (TREE_OPERAND (exp
, 1), 0)));
7843 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7846 /* Check for multiplying things that have been extended
7847 from a narrower type. If this machine supports multiplying
7848 in that narrower type with a result in the desired type,
7849 do it that way, and avoid the explicit type-conversion. */
7850 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
7851 && TREE_CODE (type
) == INTEGER_TYPE
7852 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7853 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7854 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7855 && int_fits_type_p (TREE_OPERAND (exp
, 1),
7856 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7857 /* Don't use a widening multiply if a shift will do. */
7858 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
7859 > HOST_BITS_PER_WIDE_INT
)
7860 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
7862 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7863 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7865 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
7866 /* If both operands are extended, they must either both
7867 be zero-extended or both be sign-extended. */
7868 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7870 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
7872 enum machine_mode innermode
7873 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
7874 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7875 ? smul_widen_optab
: umul_widen_optab
);
7876 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7877 ? umul_widen_optab
: smul_widen_optab
);
7878 if (mode
== GET_MODE_WIDER_MODE (innermode
))
7880 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
7882 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7883 NULL_RTX
, VOIDmode
, 0);
7884 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7885 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7888 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7889 NULL_RTX
, VOIDmode
, 0);
7892 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
7893 && innermode
== word_mode
)
7896 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7897 NULL_RTX
, VOIDmode
, 0);
7898 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7899 op1
= convert_modes (innermode
, mode
,
7900 expand_expr (TREE_OPERAND (exp
, 1),
7901 NULL_RTX
, VOIDmode
, 0),
7904 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7905 NULL_RTX
, VOIDmode
, 0);
7906 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
7907 unsignedp
, OPTAB_LIB_WIDEN
);
7908 htem
= expand_mult_highpart_adjust (innermode
,
7909 gen_highpart (innermode
, temp
),
7911 gen_highpart (innermode
, temp
),
7913 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
7918 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7919 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7920 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
7922 case TRUNC_DIV_EXPR
:
7923 case FLOOR_DIV_EXPR
:
7925 case ROUND_DIV_EXPR
:
7926 case EXACT_DIV_EXPR
:
7927 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7929 /* Possible optimization: compute the dividend with EXPAND_SUM
7930 then if the divisor is constant can optimize the case
7931 where some terms of the dividend have coeffs divisible by it. */
7932 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7933 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7934 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
7937 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7938 expensive divide. If not, combine will rebuild the original
7940 if (flag_unsafe_math_optimizations
&& optimize
&& !optimize_size
7941 && TREE_CODE (type
) == REAL_TYPE
7942 && !real_onep (TREE_OPERAND (exp
, 0)))
7943 return expand_expr (build (MULT_EXPR
, type
, TREE_OPERAND (exp
, 0),
7944 build (RDIV_EXPR
, type
,
7945 build_real (type
, dconst1
),
7946 TREE_OPERAND (exp
, 1))),
7947 target
, tmode
, unsignedp
);
7948 this_optab
= sdiv_optab
;
7951 case TRUNC_MOD_EXPR
:
7952 case FLOOR_MOD_EXPR
:
7954 case ROUND_MOD_EXPR
:
7955 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7957 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7958 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7959 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
7961 case FIX_ROUND_EXPR
:
7962 case FIX_FLOOR_EXPR
:
7964 abort (); /* Not used for C. */
7966 case FIX_TRUNC_EXPR
:
7967 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7969 target
= gen_reg_rtx (mode
);
7970 expand_fix (target
, op0
, unsignedp
);
7974 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7976 target
= gen_reg_rtx (mode
);
7977 /* expand_float can't figure out what to do if FROM has VOIDmode.
7978 So give it the correct mode. With -O, cse will optimize this. */
7979 if (GET_MODE (op0
) == VOIDmode
)
7980 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7982 expand_float (target
, op0
,
7983 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7987 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7988 temp
= expand_unop (mode
,
7989 ! unsignedp
&& flag_trapv
7990 && (GET_MODE_CLASS(mode
) == MODE_INT
)
7991 ? negv_optab
: neg_optab
, op0
, target
, 0);
7997 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7999 /* Handle complex values specially. */
8000 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
8001 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
8002 return expand_complex_abs (mode
, op0
, target
, unsignedp
);
8004 /* Unsigned abs is simply the operand. Testing here means we don't
8005 risk generating incorrect code below. */
8006 if (TREE_UNSIGNED (type
))
8009 return expand_abs (mode
, op0
, target
, unsignedp
,
8010 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
8014 target
= original_target
;
8015 if (target
== 0 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1), 1)
8016 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
8017 || GET_MODE (target
) != mode
8018 || (GET_CODE (target
) == REG
8019 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8020 target
= gen_reg_rtx (mode
);
8021 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8022 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8024 /* First try to do it with a special MIN or MAX instruction.
8025 If that does not win, use a conditional jump to select the proper
8027 this_optab
= (TREE_UNSIGNED (type
)
8028 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
8029 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
8031 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8036 /* At this point, a MEM target is no longer useful; we will get better
8039 if (GET_CODE (target
) == MEM
)
8040 target
= gen_reg_rtx (mode
);
8043 emit_move_insn (target
, op0
);
8045 op0
= gen_label_rtx ();
8047 /* If this mode is an integer too wide to compare properly,
8048 compare word by word. Rely on cse to optimize constant cases. */
8049 if (GET_MODE_CLASS (mode
) == MODE_INT
8050 && ! can_compare_p (GE
, mode
, ccp_jump
))
8052 if (code
== MAX_EXPR
)
8053 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8054 target
, op1
, NULL_RTX
, op0
);
8056 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8057 op1
, target
, NULL_RTX
, op0
);
8061 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)));
8062 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
8063 unsignedp
, mode
, NULL_RTX
, NULL_RTX
,
8066 emit_move_insn (target
, op1
);
8071 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8072 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8078 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8079 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
8084 /* ??? Can optimize bitwise operations with one arg constant.
8085 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8086 and (a bitwise1 b) bitwise2 b (etc)
8087 but that is probably not worth while. */
8089 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8090 boolean values when we want in all cases to compute both of them. In
8091 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8092 as actual zero-or-1 values and then bitwise anding. In cases where
8093 there cannot be any side effects, better code would be made by
8094 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8095 how to recognize those cases. */
8097 case TRUTH_AND_EXPR
:
8099 this_optab
= and_optab
;
8104 this_optab
= ior_optab
;
8107 case TRUTH_XOR_EXPR
:
8109 this_optab
= xor_optab
;
8116 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8118 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8119 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
8122 /* Could determine the answer when only additive constants differ. Also,
8123 the addition of one can be handled by changing the condition. */
8130 case UNORDERED_EXPR
:
8137 temp
= do_store_flag (exp
, target
, tmode
!= VOIDmode
? tmode
: mode
, 0);
8141 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8142 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
8144 && GET_CODE (original_target
) == REG
8145 && (GET_MODE (original_target
)
8146 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
8148 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
8151 /* If temp is constant, we can just compute the result. */
8152 if (GET_CODE (temp
) == CONST_INT
)
8154 if (INTVAL (temp
) != 0)
8155 emit_move_insn (target
, const1_rtx
);
8157 emit_move_insn (target
, const0_rtx
);
8162 if (temp
!= original_target
)
8164 enum machine_mode mode1
= GET_MODE (temp
);
8165 if (mode1
== VOIDmode
)
8166 mode1
= tmode
!= VOIDmode
? tmode
: mode
;
8168 temp
= copy_to_mode_reg (mode1
, temp
);
8171 op1
= gen_label_rtx ();
8172 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
8173 GET_MODE (temp
), unsignedp
, op1
);
8174 emit_move_insn (temp
, const1_rtx
);
8179 /* If no set-flag instruction, must generate a conditional
8180 store into a temporary variable. Drop through
8181 and handle this like && and ||. */
8183 case TRUTH_ANDIF_EXPR
:
8184 case TRUTH_ORIF_EXPR
:
8186 && (target
== 0 || ! safe_from_p (target
, exp
, 1)
8187 /* Make sure we don't have a hard reg (such as function's return
8188 value) live across basic blocks, if not optimizing. */
8189 || (!optimize
&& GET_CODE (target
) == REG
8190 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8191 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8194 emit_clr_insn (target
);
8196 op1
= gen_label_rtx ();
8197 jumpifnot (exp
, op1
);
8200 emit_0_to_1_insn (target
);
8203 return ignore
? const0_rtx
: target
;
8205 case TRUTH_NOT_EXPR
:
8206 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8207 /* The parser is careful to generate TRUTH_NOT_EXPR
8208 only with operands that are always zero or one. */
8209 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8210 target
, 1, OPTAB_LIB_WIDEN
);
8216 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
8218 return expand_expr (TREE_OPERAND (exp
, 1),
8219 (ignore
? const0_rtx
: target
),
8223 /* If we would have a "singleton" (see below) were it not for a
8224 conversion in each arm, bring that conversion back out. */
8225 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8226 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
8227 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
8228 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
8230 tree iftrue
= TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
8231 tree iffalse
= TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
8233 if ((TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '2'
8234 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8235 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '2'
8236 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0))
8237 || (TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '1'
8238 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8239 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '1'
8240 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0)))
8241 return expand_expr (build1 (NOP_EXPR
, type
,
8242 build (COND_EXPR
, TREE_TYPE (iftrue
),
8243 TREE_OPERAND (exp
, 0),
8245 target
, tmode
, modifier
);
8249 /* Note that COND_EXPRs whose type is a structure or union
8250 are required to be constructed to contain assignments of
8251 a temporary variable, so that we can evaluate them here
8252 for side effect only. If type is void, we must do likewise. */
8254 /* If an arm of the branch requires a cleanup,
8255 only that cleanup is performed. */
8258 tree binary_op
= 0, unary_op
= 0;
8260 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8261 convert it to our mode, if necessary. */
8262 if (integer_onep (TREE_OPERAND (exp
, 1))
8263 && integer_zerop (TREE_OPERAND (exp
, 2))
8264 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8268 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
8273 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, modifier
);
8274 if (GET_MODE (op0
) == mode
)
8278 target
= gen_reg_rtx (mode
);
8279 convert_move (target
, op0
, unsignedp
);
8283 /* Check for X ? A + B : A. If we have this, we can copy A to the
8284 output and conditionally add B. Similarly for unary operations.
8285 Don't do this if X has side-effects because those side effects
8286 might affect A or B and the "?" operation is a sequence point in
8287 ANSI. (operand_equal_p tests for side effects.) */
8289 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
8290 && operand_equal_p (TREE_OPERAND (exp
, 2),
8291 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8292 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
8293 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
8294 && operand_equal_p (TREE_OPERAND (exp
, 1),
8295 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8296 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
8297 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
8298 && operand_equal_p (TREE_OPERAND (exp
, 2),
8299 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8300 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
8301 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
8302 && operand_equal_p (TREE_OPERAND (exp
, 1),
8303 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8304 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
8306 /* If we are not to produce a result, we have no target. Otherwise,
8307 if a target was specified use it; it will not be used as an
8308 intermediate target unless it is safe. If no target, use a
8313 else if (original_target
8314 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
8315 || (singleton
&& GET_CODE (original_target
) == REG
8316 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
8317 && original_target
== var_rtx (singleton
)))
8318 && GET_MODE (original_target
) == mode
8319 #ifdef HAVE_conditional_move
8320 && (! can_conditionally_move_p (mode
)
8321 || GET_CODE (original_target
) == REG
8322 || TREE_ADDRESSABLE (type
))
8324 && (GET_CODE (original_target
) != MEM
8325 || TREE_ADDRESSABLE (type
)))
8326 temp
= original_target
;
8327 else if (TREE_ADDRESSABLE (type
))
8330 temp
= assign_temp (type
, 0, 0, 1);
8332 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8333 do the test of X as a store-flag operation, do this as
8334 A + ((X != 0) << log C). Similarly for other simple binary
8335 operators. Only do for C == 1 if BRANCH_COST is low. */
8336 if (temp
&& singleton
&& binary_op
8337 && (TREE_CODE (binary_op
) == PLUS_EXPR
8338 || TREE_CODE (binary_op
) == MINUS_EXPR
8339 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
8340 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
8341 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
8342 : integer_onep (TREE_OPERAND (binary_op
, 1)))
8343 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8346 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
8347 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8348 ? addv_optab
: add_optab
)
8349 : TREE_CODE (binary_op
) == MINUS_EXPR
8350 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8351 ? subv_optab
: sub_optab
)
8352 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
8355 /* If we had X ? A : A + 1, do this as A + (X == 0).
8357 We have to invert the truth value here and then put it
8358 back later if do_store_flag fails. We cannot simply copy
8359 TREE_OPERAND (exp, 0) to another variable and modify that
8360 because invert_truthvalue can modify the tree pointed to
8362 if (singleton
== TREE_OPERAND (exp
, 1))
8363 TREE_OPERAND (exp
, 0)
8364 = invert_truthvalue (TREE_OPERAND (exp
, 0));
8366 result
= do_store_flag (TREE_OPERAND (exp
, 0),
8367 (safe_from_p (temp
, singleton
, 1)
8369 mode
, BRANCH_COST
<= 1);
8371 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
8372 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
8373 build_int_2 (tree_log2
8377 (safe_from_p (temp
, singleton
, 1)
8378 ? temp
: NULL_RTX
), 0);
8382 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
8383 return expand_binop (mode
, boptab
, op1
, result
, temp
,
8384 unsignedp
, OPTAB_LIB_WIDEN
);
8386 else if (singleton
== TREE_OPERAND (exp
, 1))
8387 TREE_OPERAND (exp
, 0)
8388 = invert_truthvalue (TREE_OPERAND (exp
, 0));
8391 do_pending_stack_adjust ();
8393 op0
= gen_label_rtx ();
8395 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
8399 /* If the target conflicts with the other operand of the
8400 binary op, we can't use it. Also, we can't use the target
8401 if it is a hard register, because evaluating the condition
8402 might clobber it. */
8404 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
8405 || (GET_CODE (temp
) == REG
8406 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
8407 temp
= gen_reg_rtx (mode
);
8408 store_expr (singleton
, temp
, 0);
8411 expand_expr (singleton
,
8412 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8413 if (singleton
== TREE_OPERAND (exp
, 1))
8414 jumpif (TREE_OPERAND (exp
, 0), op0
);
8416 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8418 start_cleanup_deferral ();
8419 if (binary_op
&& temp
== 0)
8420 /* Just touch the other operand. */
8421 expand_expr (TREE_OPERAND (binary_op
, 1),
8422 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8424 store_expr (build (TREE_CODE (binary_op
), type
,
8425 make_tree (type
, temp
),
8426 TREE_OPERAND (binary_op
, 1)),
8429 store_expr (build1 (TREE_CODE (unary_op
), type
,
8430 make_tree (type
, temp
)),
8434 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8435 comparison operator. If we have one of these cases, set the
8436 output to A, branch on A (cse will merge these two references),
8437 then set the output to FOO. */
8439 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8440 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8441 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8442 TREE_OPERAND (exp
, 1), 0)
8443 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8444 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
8445 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
8447 if (GET_CODE (temp
) == REG
8448 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8449 temp
= gen_reg_rtx (mode
);
8450 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8451 jumpif (TREE_OPERAND (exp
, 0), op0
);
8453 start_cleanup_deferral ();
8454 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8458 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8459 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8460 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8461 TREE_OPERAND (exp
, 2), 0)
8462 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8463 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
8464 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
8466 if (GET_CODE (temp
) == REG
8467 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8468 temp
= gen_reg_rtx (mode
);
8469 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8470 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8472 start_cleanup_deferral ();
8473 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8478 op1
= gen_label_rtx ();
8479 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8481 start_cleanup_deferral ();
8483 /* One branch of the cond can be void, if it never returns. For
8484 example A ? throw : E */
8486 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8487 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8489 expand_expr (TREE_OPERAND (exp
, 1),
8490 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8491 end_cleanup_deferral ();
8493 emit_jump_insn (gen_jump (op1
));
8496 start_cleanup_deferral ();
8498 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8499 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8501 expand_expr (TREE_OPERAND (exp
, 2),
8502 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8505 end_cleanup_deferral ();
8516 /* Something needs to be initialized, but we didn't know
8517 where that thing was when building the tree. For example,
8518 it could be the return value of a function, or a parameter
8519 to a function which lays down in the stack, or a temporary
8520 variable which must be passed by reference.
8522 We guarantee that the expression will either be constructed
8523 or copied into our original target. */
8525 tree slot
= TREE_OPERAND (exp
, 0);
8526 tree cleanups
= NULL_TREE
;
8529 if (TREE_CODE (slot
) != VAR_DECL
)
8533 target
= original_target
;
8535 /* Set this here so that if we get a target that refers to a
8536 register variable that's already been used, put_reg_into_stack
8537 knows that it should fix up those uses. */
8538 TREE_USED (slot
) = 1;
8542 if (DECL_RTL_SET_P (slot
))
8544 target
= DECL_RTL (slot
);
8545 /* If we have already expanded the slot, so don't do
8547 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8552 target
= assign_temp (type
, 2, 0, 1);
8553 /* All temp slots at this level must not conflict. */
8554 preserve_temp_slots (target
);
8555 SET_DECL_RTL (slot
, target
);
8556 if (TREE_ADDRESSABLE (slot
))
8557 put_var_into_stack (slot
);
8559 /* Since SLOT is not known to the called function
8560 to belong to its stack frame, we must build an explicit
8561 cleanup. This case occurs when we must build up a reference
8562 to pass the reference as an argument. In this case,
8563 it is very likely that such a reference need not be
8566 if (TREE_OPERAND (exp
, 2) == 0)
8567 TREE_OPERAND (exp
, 2)
8568 = (*lang_hooks
.maybe_build_cleanup
) (slot
);
8569 cleanups
= TREE_OPERAND (exp
, 2);
8574 /* This case does occur, when expanding a parameter which
8575 needs to be constructed on the stack. The target
8576 is the actual stack address that we want to initialize.
8577 The function we call will perform the cleanup in this case. */
8579 /* If we have already assigned it space, use that space,
8580 not target that we were passed in, as our target
8581 parameter is only a hint. */
8582 if (DECL_RTL_SET_P (slot
))
8584 target
= DECL_RTL (slot
);
8585 /* If we have already expanded the slot, so don't do
8587 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8592 SET_DECL_RTL (slot
, target
);
8593 /* If we must have an addressable slot, then make sure that
8594 the RTL that we just stored in slot is OK. */
8595 if (TREE_ADDRESSABLE (slot
))
8596 put_var_into_stack (slot
);
8600 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
8601 /* Mark it as expanded. */
8602 TREE_OPERAND (exp
, 1) = NULL_TREE
;
8604 store_expr (exp1
, target
, 0);
8606 expand_decl_cleanup_eh (NULL_TREE
, cleanups
, CLEANUP_EH_ONLY (exp
));
8613 tree lhs
= TREE_OPERAND (exp
, 0);
8614 tree rhs
= TREE_OPERAND (exp
, 1);
8616 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8622 /* If lhs is complex, expand calls in rhs before computing it.
8623 That's so we don't compute a pointer and save it over a
8624 call. If lhs is simple, compute it first so we can give it
8625 as a target if the rhs is just a call. This avoids an
8626 extra temp and copy and that prevents a partial-subsumption
8627 which makes bad code. Actually we could treat
8628 component_ref's of vars like vars. */
8630 tree lhs
= TREE_OPERAND (exp
, 0);
8631 tree rhs
= TREE_OPERAND (exp
, 1);
8635 /* Check for |= or &= of a bitfield of size one into another bitfield
8636 of size 1. In this case, (unless we need the result of the
8637 assignment) we can do this more efficiently with a
8638 test followed by an assignment, if necessary.
8640 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8641 things change so we do, this code should be enhanced to
8644 && TREE_CODE (lhs
) == COMPONENT_REF
8645 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
8646 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
8647 && TREE_OPERAND (rhs
, 0) == lhs
8648 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
8649 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
8650 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
8652 rtx label
= gen_label_rtx ();
8654 do_jump (TREE_OPERAND (rhs
, 1),
8655 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
8656 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
8657 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
8658 (TREE_CODE (rhs
) == BIT_IOR_EXPR
8660 : integer_zero_node
)),
8662 do_pending_stack_adjust ();
8667 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8673 if (!TREE_OPERAND (exp
, 0))
8674 expand_null_return ();
8676 expand_return (TREE_OPERAND (exp
, 0));
8679 case PREINCREMENT_EXPR
:
8680 case PREDECREMENT_EXPR
:
8681 return expand_increment (exp
, 0, ignore
);
8683 case POSTINCREMENT_EXPR
:
8684 case POSTDECREMENT_EXPR
:
8685 /* Faster to treat as pre-increment if result is not used. */
8686 return expand_increment (exp
, ! ignore
, ignore
);
8689 /* Are we taking the address of a nested function? */
8690 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
8691 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
8692 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0))
8693 && ! TREE_STATIC (exp
))
8695 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
8696 op0
= force_operand (op0
, target
);
8698 /* If we are taking the address of something erroneous, just
8700 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
8702 /* If we are taking the address of a constant and are at the
8703 top level, we have to use output_constant_def since we can't
8704 call force_const_mem at top level. */
8706 && (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
8707 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0)))
8709 op0
= XEXP (output_constant_def (TREE_OPERAND (exp
, 0), 0), 0);
8712 /* We make sure to pass const0_rtx down if we came in with
8713 ignore set, to avoid doing the cleanups twice for something. */
8714 op0
= expand_expr (TREE_OPERAND (exp
, 0),
8715 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
8716 (modifier
== EXPAND_INITIALIZER
8717 ? modifier
: EXPAND_CONST_ADDRESS
));
8719 /* If we are going to ignore the result, OP0 will have been set
8720 to const0_rtx, so just return it. Don't get confused and
8721 think we are taking the address of the constant. */
8725 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8726 clever and returns a REG when given a MEM. */
8727 op0
= protect_from_queue (op0
, 1);
8729 /* We would like the object in memory. If it is a constant, we can
8730 have it be statically allocated into memory. For a non-constant,
8731 we need to allocate some memory and store the value into it. */
8733 if (CONSTANT_P (op0
))
8734 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8736 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
8737 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
8738 || GET_CODE (op0
) == PARALLEL
)
8740 /* If the operand is a SAVE_EXPR, we can deal with this by
8741 forcing the SAVE_EXPR into memory. */
8742 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
8744 put_var_into_stack (TREE_OPERAND (exp
, 0));
8745 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
8749 /* If this object is in a register, it can't be BLKmode. */
8750 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8751 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
8753 if (GET_CODE (op0
) == PARALLEL
)
8754 /* Handle calls that pass values in multiple
8755 non-contiguous locations. The Irix 6 ABI has examples
8757 emit_group_store (memloc
, op0
,
8758 int_size_in_bytes (inner_type
));
8760 emit_move_insn (memloc
, op0
);
8766 if (GET_CODE (op0
) != MEM
)
8769 mark_temp_addr_taken (op0
);
8770 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8772 op0
= XEXP (op0
, 0);
8773 #ifdef POINTERS_EXTEND_UNSIGNED
8774 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
8775 && mode
== ptr_mode
)
8776 op0
= convert_memory_address (ptr_mode
, op0
);
8781 /* If OP0 is not aligned as least as much as the type requires, we
8782 need to make a temporary, copy OP0 to it, and take the address of
8783 the temporary. We want to use the alignment of the type, not of
8784 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8785 the test for BLKmode means that can't happen. The test for
8786 BLKmode is because we never make mis-aligned MEMs with
8789 We don't need to do this at all if the machine doesn't have
8790 strict alignment. */
8791 if (STRICT_ALIGNMENT
&& GET_MODE (op0
) == BLKmode
8792 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
8794 && MEM_ALIGN (op0
) < BIGGEST_ALIGNMENT
)
8796 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8798 = assign_stack_temp_for_type
8799 (TYPE_MODE (inner_type
),
8800 MEM_SIZE (op0
) ? INTVAL (MEM_SIZE (op0
))
8801 : int_size_in_bytes (inner_type
),
8802 1, build_qualified_type (inner_type
,
8803 (TYPE_QUALS (inner_type
)
8804 | TYPE_QUAL_CONST
)));
8806 if (TYPE_ALIGN_OK (inner_type
))
8809 emit_block_move (new, op0
, expr_size (TREE_OPERAND (exp
, 0)));
8813 op0
= force_operand (XEXP (op0
, 0), target
);
8817 && GET_CODE (op0
) != REG
8818 && modifier
!= EXPAND_CONST_ADDRESS
8819 && modifier
!= EXPAND_INITIALIZER
8820 && modifier
!= EXPAND_SUM
)
8821 op0
= force_reg (Pmode
, op0
);
8823 if (GET_CODE (op0
) == REG
8824 && ! REG_USERVAR_P (op0
))
8825 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)));
8827 #ifdef POINTERS_EXTEND_UNSIGNED
8828 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
8829 && mode
== ptr_mode
)
8830 op0
= convert_memory_address (ptr_mode
, op0
);
8835 case ENTRY_VALUE_EXPR
:
8838 /* COMPLEX type for Extended Pascal & Fortran */
8841 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8844 /* Get the rtx code of the operands. */
8845 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8846 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
8849 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8853 /* Move the real (op0) and imaginary (op1) parts to their location. */
8854 emit_move_insn (gen_realpart (mode
, target
), op0
);
8855 emit_move_insn (gen_imagpart (mode
, target
), op1
);
8857 insns
= get_insns ();
8860 /* Complex construction should appear as a single unit. */
8861 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8862 each with a separate pseudo as destination.
8863 It's not correct for flow to treat them as a unit. */
8864 if (GET_CODE (target
) != CONCAT
)
8865 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
8873 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8874 return gen_realpart (mode
, op0
);
8877 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8878 return gen_imagpart (mode
, op0
);
8882 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8886 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8889 target
= gen_reg_rtx (mode
);
8893 /* Store the realpart and the negated imagpart to target. */
8894 emit_move_insn (gen_realpart (partmode
, target
),
8895 gen_realpart (partmode
, op0
));
8897 imag_t
= gen_imagpart (partmode
, target
);
8898 temp
= expand_unop (partmode
,
8899 ! unsignedp
&& flag_trapv
8900 && (GET_MODE_CLASS(partmode
) == MODE_INT
)
8901 ? negv_optab
: neg_optab
,
8902 gen_imagpart (partmode
, op0
), imag_t
, 0);
8904 emit_move_insn (imag_t
, temp
);
8906 insns
= get_insns ();
8909 /* Conjugate should appear as a single unit
8910 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8911 each with a separate pseudo as destination.
8912 It's not correct for flow to treat them as a unit. */
8913 if (GET_CODE (target
) != CONCAT
)
8914 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
8921 case TRY_CATCH_EXPR
:
8923 tree handler
= TREE_OPERAND (exp
, 1);
8925 expand_eh_region_start ();
8927 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8929 expand_eh_region_end_cleanup (handler
);
8934 case TRY_FINALLY_EXPR
:
8936 tree try_block
= TREE_OPERAND (exp
, 0);
8937 tree finally_block
= TREE_OPERAND (exp
, 1);
8938 rtx finally_label
= gen_label_rtx ();
8939 rtx done_label
= gen_label_rtx ();
8940 rtx return_link
= gen_reg_rtx (Pmode
);
8941 tree cleanup
= build (GOTO_SUBROUTINE_EXPR
, void_type_node
,
8942 (tree
) finally_label
, (tree
) return_link
);
8943 TREE_SIDE_EFFECTS (cleanup
) = 1;
8945 /* Start a new binding layer that will keep track of all cleanup
8946 actions to be performed. */
8947 expand_start_bindings (2);
8949 target_temp_slot_level
= temp_slot_level
;
8951 expand_decl_cleanup (NULL_TREE
, cleanup
);
8952 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
8954 preserve_temp_slots (op0
);
8955 expand_end_bindings (NULL_TREE
, 0, 0);
8956 emit_jump (done_label
);
8957 emit_label (finally_label
);
8958 expand_expr (finally_block
, const0_rtx
, VOIDmode
, 0);
8959 emit_indirect_jump (return_link
);
8960 emit_label (done_label
);
8964 case GOTO_SUBROUTINE_EXPR
:
8966 rtx subr
= (rtx
) TREE_OPERAND (exp
, 0);
8967 rtx return_link
= *(rtx
*) &TREE_OPERAND (exp
, 1);
8968 rtx return_address
= gen_label_rtx ();
8969 emit_move_insn (return_link
,
8970 gen_rtx_LABEL_REF (Pmode
, return_address
));
8972 emit_label (return_address
);
8977 return expand_builtin_va_arg (TREE_OPERAND (exp
, 0), type
);
8980 return get_exception_pointer (cfun
);
8983 /* Function descriptors are not valid except for as
8984 initialization constants, and should not be expanded. */
8988 return (*lang_hooks
.expand_expr
) (exp
, original_target
, tmode
, modifier
);
8991 /* Here to do an ordinary binary operator, generating an instruction
8992 from the optab already placed in `this_optab'. */
8994 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8996 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8997 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8999 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9000 unsignedp
, OPTAB_LIB_WIDEN
);
9006 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9007 when applied to the address of EXP produces an address known to be
9008 aligned more than BIGGEST_ALIGNMENT. */
9011 is_aligning_offset (offset
, exp
)
9015 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9016 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9017 || TREE_CODE (offset
) == NOP_EXPR
9018 || TREE_CODE (offset
) == CONVERT_EXPR
9019 || TREE_CODE (offset
) == WITH_RECORD_EXPR
)
9020 offset
= TREE_OPERAND (offset
, 0);
9022 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9023 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9024 if (TREE_CODE (offset
) != BIT_AND_EXPR
9025 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
9026 || compare_tree_int (TREE_OPERAND (offset
, 1), BIGGEST_ALIGNMENT
) <= 0
9027 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
9030 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9031 It must be NEGATE_EXPR. Then strip any more conversions. */
9032 offset
= TREE_OPERAND (offset
, 0);
9033 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9034 || TREE_CODE (offset
) == NOP_EXPR
9035 || TREE_CODE (offset
) == CONVERT_EXPR
)
9036 offset
= TREE_OPERAND (offset
, 0);
9038 if (TREE_CODE (offset
) != NEGATE_EXPR
)
9041 offset
= TREE_OPERAND (offset
, 0);
9042 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9043 || TREE_CODE (offset
) == NOP_EXPR
9044 || TREE_CODE (offset
) == CONVERT_EXPR
)
9045 offset
= TREE_OPERAND (offset
, 0);
9047 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9048 whose type is the same as EXP. */
9049 return (TREE_CODE (offset
) == ADDR_EXPR
9050 && (TREE_OPERAND (offset
, 0) == exp
9051 || (TREE_CODE (TREE_OPERAND (offset
, 0)) == PLACEHOLDER_EXPR
9052 && (TREE_TYPE (TREE_OPERAND (offset
, 0))
9053 == TREE_TYPE (exp
)))));
9056 /* Return the tree node if an ARG corresponds to a string constant or zero
9057 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9058 in bytes within the string that ARG is accessing. The type of the
9059 offset will be `sizetype'. */
9062 string_constant (arg
, ptr_offset
)
9068 if (TREE_CODE (arg
) == ADDR_EXPR
9069 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
9071 *ptr_offset
= size_zero_node
;
9072 return TREE_OPERAND (arg
, 0);
9074 else if (TREE_CODE (arg
) == PLUS_EXPR
)
9076 tree arg0
= TREE_OPERAND (arg
, 0);
9077 tree arg1
= TREE_OPERAND (arg
, 1);
9082 if (TREE_CODE (arg0
) == ADDR_EXPR
9083 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
9085 *ptr_offset
= convert (sizetype
, arg1
);
9086 return TREE_OPERAND (arg0
, 0);
9088 else if (TREE_CODE (arg1
) == ADDR_EXPR
9089 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
9091 *ptr_offset
= convert (sizetype
, arg0
);
9092 return TREE_OPERAND (arg1
, 0);
9099 /* Expand code for a post- or pre- increment or decrement
9100 and return the RTX for the result.
9101 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9104 expand_increment (exp
, post
, ignore
)
9110 tree incremented
= TREE_OPERAND (exp
, 0);
9111 optab this_optab
= add_optab
;
9113 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
9114 int op0_is_copy
= 0;
9115 int single_insn
= 0;
9116 /* 1 means we can't store into OP0 directly,
9117 because it is a subreg narrower than a word,
9118 and we don't dare clobber the rest of the word. */
9121 /* Stabilize any component ref that might need to be
9122 evaluated more than once below. */
9124 || TREE_CODE (incremented
) == BIT_FIELD_REF
9125 || (TREE_CODE (incremented
) == COMPONENT_REF
9126 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
9127 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
9128 incremented
= stabilize_reference (incremented
);
9129 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9130 ones into save exprs so that they don't accidentally get evaluated
9131 more than once by the code below. */
9132 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
9133 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
9134 incremented
= save_expr (incremented
);
9136 /* Compute the operands as RTX.
9137 Note whether OP0 is the actual lvalue or a copy of it:
9138 I believe it is a copy iff it is a register or subreg
9139 and insns were generated in computing it. */
9141 temp
= get_last_insn ();
9142 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, 0);
9144 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9145 in place but instead must do sign- or zero-extension during assignment,
9146 so we copy it into a new register and let the code below use it as
9149 Note that we can safely modify this SUBREG since it is know not to be
9150 shared (it was made by the expand_expr call above). */
9152 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9155 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9159 else if (GET_CODE (op0
) == SUBREG
9160 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9162 /* We cannot increment this SUBREG in place. If we are
9163 post-incrementing, get a copy of the old value. Otherwise,
9164 just mark that we cannot increment in place. */
9166 op0
= copy_to_reg (op0
);
9171 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
9172 && temp
!= get_last_insn ());
9173 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9175 /* Decide whether incrementing or decrementing. */
9176 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9177 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9178 this_optab
= sub_optab
;
9180 /* Convert decrement by a constant into a negative increment. */
9181 if (this_optab
== sub_optab
9182 && GET_CODE (op1
) == CONST_INT
)
9184 op1
= GEN_INT (-INTVAL (op1
));
9185 this_optab
= add_optab
;
9188 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp
)))
9189 this_optab
= this_optab
== add_optab
? addv_optab
: subv_optab
;
9191 /* For a preincrement, see if we can do this with a single instruction. */
9194 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9195 if (icode
!= (int) CODE_FOR_nothing
9196 /* Make sure that OP0 is valid for operands 0 and 1
9197 of the insn we want to queue. */
9198 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9199 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
)
9200 && (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9204 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9205 then we cannot just increment OP0. We must therefore contrive to
9206 increment the original value. Then, for postincrement, we can return
9207 OP0 since it is a copy of the old value. For preincrement, expand here
9208 unless we can do it with a single insn.
9210 Likewise if storing directly into OP0 would clobber high bits
9211 we need to preserve (bad_subreg). */
9212 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9214 /* This is the easiest way to increment the value wherever it is.
9215 Problems with multiple evaluation of INCREMENTED are prevented
9216 because either (1) it is a component_ref or preincrement,
9217 in which case it was stabilized above, or (2) it is an array_ref
9218 with constant index in an array in a register, which is
9219 safe to reevaluate. */
9220 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9221 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9222 ? MINUS_EXPR
: PLUS_EXPR
),
9225 TREE_OPERAND (exp
, 1));
9227 while (TREE_CODE (incremented
) == NOP_EXPR
9228 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9230 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9231 incremented
= TREE_OPERAND (incremented
, 0);
9234 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
, 0);
9235 return post
? op0
: temp
;
9240 /* We have a true reference to the value in OP0.
9241 If there is an insn to add or subtract in this mode, queue it.
9242 Queueing the increment insn avoids the register shuffling
9243 that often results if we must increment now and first save
9244 the old value for subsequent use. */
9246 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9247 op0
= stabilize (op0
);
9250 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9251 if (icode
!= (int) CODE_FOR_nothing
9252 /* Make sure that OP0 is valid for operands 0 and 1
9253 of the insn we want to queue. */
9254 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9255 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
))
9257 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9258 op1
= force_reg (mode
, op1
);
9260 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9262 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9264 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
9265 ? force_reg (Pmode
, XEXP (op0
, 0))
9266 : copy_to_reg (XEXP (op0
, 0)));
9269 op0
= replace_equiv_address (op0
, addr
);
9270 temp
= force_reg (GET_MODE (op0
), op0
);
9271 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9272 op1
= force_reg (mode
, op1
);
9274 /* The increment queue is LIFO, thus we have to `queue'
9275 the instructions in reverse order. */
9276 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9277 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9282 /* Preincrement, or we can't increment with one simple insn. */
9284 /* Save a copy of the value before inc or dec, to return it later. */
9285 temp
= value
= copy_to_reg (op0
);
9287 /* Arrange to return the incremented value. */
9288 /* Copy the rtx because expand_binop will protect from the queue,
9289 and the results of that would be invalid for us to return
9290 if our caller does emit_queue before using our result. */
9291 temp
= copy_rtx (value
= op0
);
9293 /* Increment however we can. */
9294 op1
= expand_binop (mode
, this_optab
, value
, op1
, op0
,
9295 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9297 /* Make sure the value is stored into OP0. */
9299 emit_move_insn (op0
, op1
);
9304 /* At the start of a function, record that we have no previously-pushed
9305 arguments waiting to be popped. */
9308 init_pending_stack_adjust ()
9310 pending_stack_adjust
= 0;
9313 /* When exiting from function, if safe, clear out any pending stack adjust
9314 so the adjustment won't get done.
9316 Note, if the current function calls alloca, then it must have a
9317 frame pointer regardless of the value of flag_omit_frame_pointer. */
9320 clear_pending_stack_adjust ()
9322 #ifdef EXIT_IGNORE_STACK
9324 && (! flag_omit_frame_pointer
|| current_function_calls_alloca
)
9325 && EXIT_IGNORE_STACK
9326 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
9327 && ! flag_inline_functions
)
9329 stack_pointer_delta
-= pending_stack_adjust
,
9330 pending_stack_adjust
= 0;
9335 /* Pop any previously-pushed arguments that have not been popped yet. */
9338 do_pending_stack_adjust ()
9340 if (inhibit_defer_pop
== 0)
9342 if (pending_stack_adjust
!= 0)
9343 adjust_stack (GEN_INT (pending_stack_adjust
));
9344 pending_stack_adjust
= 0;
9348 /* Expand conditional expressions. */
9350 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9351 LABEL is an rtx of code CODE_LABEL, in this function and all the
9355 jumpifnot (exp
, label
)
9359 do_jump (exp
, label
, NULL_RTX
);
9362 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9369 do_jump (exp
, NULL_RTX
, label
);
9372 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9373 the result is zero, or IF_TRUE_LABEL if the result is one.
9374 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9375 meaning fall through in that case.
9377 do_jump always does any pending stack adjust except when it does not
9378 actually perform a jump. An example where there is no jump
9379 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9381 This function is responsible for optimizing cases such as
9382 &&, || and comparison operators in EXP. */
9385 do_jump (exp
, if_false_label
, if_true_label
)
9387 rtx if_false_label
, if_true_label
;
9389 enum tree_code code
= TREE_CODE (exp
);
9390 /* Some cases need to create a label to jump to
9391 in order to properly fall through.
9392 These cases set DROP_THROUGH_LABEL nonzero. */
9393 rtx drop_through_label
= 0;
9397 enum machine_mode mode
;
9399 #ifdef MAX_INTEGER_COMPUTATION_MODE
9400 check_max_integer_computation_mode (exp
);
9411 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
9417 /* This is not true with #pragma weak */
9419 /* The address of something can never be zero. */
9421 emit_jump (if_true_label
);
9426 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
9427 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
9428 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
9429 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_RANGE_REF
)
9432 /* If we are narrowing the operand, we have to do the compare in the
9434 if ((TYPE_PRECISION (TREE_TYPE (exp
))
9435 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9437 case NON_LVALUE_EXPR
:
9438 case REFERENCE_EXPR
:
9443 /* These cannot change zero->non-zero or vice versa. */
9444 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9447 case WITH_RECORD_EXPR
:
9448 /* Put the object on the placeholder list, recurse through our first
9449 operand, and pop the list. */
9450 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
9452 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9453 placeholder_list
= TREE_CHAIN (placeholder_list
);
9457 /* This is never less insns than evaluating the PLUS_EXPR followed by
9458 a test and can be longer if the test is eliminated. */
9460 /* Reduce to minus. */
9461 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
9462 TREE_OPERAND (exp
, 0),
9463 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
9464 TREE_OPERAND (exp
, 1))));
9465 /* Process as MINUS. */
9469 /* Non-zero iff operands of minus differ. */
9470 do_compare_and_jump (build (NE_EXPR
, TREE_TYPE (exp
),
9471 TREE_OPERAND (exp
, 0),
9472 TREE_OPERAND (exp
, 1)),
9473 NE
, NE
, if_false_label
, if_true_label
);
9477 /* If we are AND'ing with a small constant, do this comparison in the
9478 smallest type that fits. If the machine doesn't have comparisons
9479 that small, it will be converted back to the wider comparison.
9480 This helps if we are testing the sign bit of a narrower object.
9481 combine can't do this for us because it can't know whether a
9482 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9484 if (! SLOW_BYTE_ACCESS
9485 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
9486 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
9487 && (i
= tree_floor_log2 (TREE_OPERAND (exp
, 1))) >= 0
9488 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
9489 && (type
= (*lang_hooks
.types
.type_for_mode
) (mode
, 1)) != 0
9490 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9491 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9492 != CODE_FOR_nothing
))
9494 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9499 case TRUTH_NOT_EXPR
:
9500 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9503 case TRUTH_ANDIF_EXPR
:
9504 if (if_false_label
== 0)
9505 if_false_label
= drop_through_label
= gen_label_rtx ();
9506 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
9507 start_cleanup_deferral ();
9508 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9509 end_cleanup_deferral ();
9512 case TRUTH_ORIF_EXPR
:
9513 if (if_true_label
== 0)
9514 if_true_label
= drop_through_label
= gen_label_rtx ();
9515 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
9516 start_cleanup_deferral ();
9517 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9518 end_cleanup_deferral ();
9523 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
9524 preserve_temp_slots (NULL_RTX
);
9528 do_pending_stack_adjust ();
9529 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9535 case ARRAY_RANGE_REF
:
9537 HOST_WIDE_INT bitsize
, bitpos
;
9539 enum machine_mode mode
;
9544 /* Get description of this reference. We don't actually care
9545 about the underlying object here. */
9546 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
9547 &unsignedp
, &volatilep
);
9549 type
= (*lang_hooks
.types
.type_for_size
) (bitsize
, unsignedp
);
9550 if (! SLOW_BYTE_ACCESS
9551 && type
!= 0 && bitsize
>= 0
9552 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9553 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9554 != CODE_FOR_nothing
))
9556 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9563 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9564 if (integer_onep (TREE_OPERAND (exp
, 1))
9565 && integer_zerop (TREE_OPERAND (exp
, 2)))
9566 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9568 else if (integer_zerop (TREE_OPERAND (exp
, 1))
9569 && integer_onep (TREE_OPERAND (exp
, 2)))
9570 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9574 rtx label1
= gen_label_rtx ();
9575 drop_through_label
= gen_label_rtx ();
9577 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
9579 start_cleanup_deferral ();
9580 /* Now the THEN-expression. */
9581 do_jump (TREE_OPERAND (exp
, 1),
9582 if_false_label
? if_false_label
: drop_through_label
,
9583 if_true_label
? if_true_label
: drop_through_label
);
9584 /* In case the do_jump just above never jumps. */
9585 do_pending_stack_adjust ();
9586 emit_label (label1
);
9588 /* Now the ELSE-expression. */
9589 do_jump (TREE_OPERAND (exp
, 2),
9590 if_false_label
? if_false_label
: drop_through_label
,
9591 if_true_label
? if_true_label
: drop_through_label
);
9592 end_cleanup_deferral ();
9598 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9600 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9601 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9603 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
9604 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
9607 (build (TRUTH_ANDIF_EXPR
, TREE_TYPE (exp
),
9608 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9609 fold (build1 (REALPART_EXPR
,
9610 TREE_TYPE (inner_type
),
9612 fold (build1 (REALPART_EXPR
,
9613 TREE_TYPE (inner_type
),
9615 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9616 fold (build1 (IMAGPART_EXPR
,
9617 TREE_TYPE (inner_type
),
9619 fold (build1 (IMAGPART_EXPR
,
9620 TREE_TYPE (inner_type
),
9622 if_false_label
, if_true_label
);
9625 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
9626 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9628 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9629 && !can_compare_p (EQ
, TYPE_MODE (inner_type
), ccp_jump
))
9630 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
9632 do_compare_and_jump (exp
, EQ
, EQ
, if_false_label
, if_true_label
);
9638 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9640 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9641 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9643 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
9644 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
9647 (build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
),
9648 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9649 fold (build1 (REALPART_EXPR
,
9650 TREE_TYPE (inner_type
),
9652 fold (build1 (REALPART_EXPR
,
9653 TREE_TYPE (inner_type
),
9655 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9656 fold (build1 (IMAGPART_EXPR
,
9657 TREE_TYPE (inner_type
),
9659 fold (build1 (IMAGPART_EXPR
,
9660 TREE_TYPE (inner_type
),
9662 if_false_label
, if_true_label
);
9665 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
9666 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9668 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9669 && !can_compare_p (NE
, TYPE_MODE (inner_type
), ccp_jump
))
9670 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
9672 do_compare_and_jump (exp
, NE
, NE
, if_false_label
, if_true_label
);
9677 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9678 if (GET_MODE_CLASS (mode
) == MODE_INT
9679 && ! can_compare_p (LT
, mode
, ccp_jump
))
9680 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
9682 do_compare_and_jump (exp
, LT
, LTU
, if_false_label
, if_true_label
);
9686 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9687 if (GET_MODE_CLASS (mode
) == MODE_INT
9688 && ! can_compare_p (LE
, mode
, ccp_jump
))
9689 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
9691 do_compare_and_jump (exp
, LE
, LEU
, if_false_label
, if_true_label
);
9695 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9696 if (GET_MODE_CLASS (mode
) == MODE_INT
9697 && ! can_compare_p (GT
, mode
, ccp_jump
))
9698 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
9700 do_compare_and_jump (exp
, GT
, GTU
, if_false_label
, if_true_label
);
9704 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9705 if (GET_MODE_CLASS (mode
) == MODE_INT
9706 && ! can_compare_p (GE
, mode
, ccp_jump
))
9707 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
9709 do_compare_and_jump (exp
, GE
, GEU
, if_false_label
, if_true_label
);
9712 case UNORDERED_EXPR
:
9715 enum rtx_code cmp
, rcmp
;
9718 if (code
== UNORDERED_EXPR
)
9719 cmp
= UNORDERED
, rcmp
= ORDERED
;
9721 cmp
= ORDERED
, rcmp
= UNORDERED
;
9722 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9725 if (! can_compare_p (cmp
, mode
, ccp_jump
)
9726 && (can_compare_p (rcmp
, mode
, ccp_jump
)
9727 /* If the target doesn't provide either UNORDERED or ORDERED
9728 comparisons, canonicalize on UNORDERED for the library. */
9729 || rcmp
== UNORDERED
))
9733 do_compare_and_jump (exp
, cmp
, cmp
, if_false_label
, if_true_label
);
9735 do_compare_and_jump (exp
, rcmp
, rcmp
, if_true_label
, if_false_label
);
9740 enum rtx_code rcode1
;
9741 enum tree_code tcode2
;
9765 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9766 if (can_compare_p (rcode1
, mode
, ccp_jump
))
9767 do_compare_and_jump (exp
, rcode1
, rcode1
, if_false_label
,
9771 tree op0
= save_expr (TREE_OPERAND (exp
, 0));
9772 tree op1
= save_expr (TREE_OPERAND (exp
, 1));
9775 /* If the target doesn't support combined unordered
9776 compares, decompose into UNORDERED + comparison. */
9777 cmp0
= fold (build (UNORDERED_EXPR
, TREE_TYPE (exp
), op0
, op1
));
9778 cmp1
= fold (build (tcode2
, TREE_TYPE (exp
), op0
, op1
));
9779 exp
= build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
), cmp0
, cmp1
);
9780 do_jump (exp
, if_false_label
, if_true_label
);
9786 __builtin_expect (<test>, 0) and
9787 __builtin_expect (<test>, 1)
9789 We need to do this here, so that <test> is not converted to a SCC
9790 operation on machines that use condition code registers and COMPARE
9791 like the PowerPC, and then the jump is done based on whether the SCC
9792 operation produced a 1 or 0. */
9794 /* Check for a built-in function. */
9795 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
9797 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
9798 tree arglist
= TREE_OPERAND (exp
, 1);
9800 if (TREE_CODE (fndecl
) == FUNCTION_DECL
9801 && DECL_BUILT_IN (fndecl
)
9802 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
9803 && arglist
!= NULL_TREE
9804 && TREE_CHAIN (arglist
) != NULL_TREE
)
9806 rtx seq
= expand_builtin_expect_jump (exp
, if_false_label
,
9809 if (seq
!= NULL_RTX
)
9816 /* fall through and generate the normal code. */
9820 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
9822 /* This is not needed any more and causes poor code since it causes
9823 comparisons and tests from non-SI objects to have different code
9825 /* Copy to register to avoid generating bad insns by cse
9826 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9827 if (!cse_not_expected
&& GET_CODE (temp
) == MEM
)
9828 temp
= copy_to_reg (temp
);
9830 do_pending_stack_adjust ();
9831 /* Do any postincrements in the expression that was tested. */
9834 if (GET_CODE (temp
) == CONST_INT
9835 || (GET_CODE (temp
) == CONST_DOUBLE
&& GET_MODE (temp
) == VOIDmode
)
9836 || GET_CODE (temp
) == LABEL_REF
)
9838 rtx target
= temp
== const0_rtx
? if_false_label
: if_true_label
;
9842 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
9843 && ! can_compare_p (NE
, GET_MODE (temp
), ccp_jump
))
9844 /* Note swapping the labels gives us not-equal. */
9845 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
9846 else if (GET_MODE (temp
) != VOIDmode
)
9847 do_compare_rtx_and_jump (temp
, CONST0_RTX (GET_MODE (temp
)),
9848 NE
, TREE_UNSIGNED (TREE_TYPE (exp
)),
9849 GET_MODE (temp
), NULL_RTX
,
9850 if_false_label
, if_true_label
);
9855 if (drop_through_label
)
9857 /* If do_jump produces code that might be jumped around,
9858 do any stack adjusts from that code, before the place
9859 where control merges in. */
9860 do_pending_stack_adjust ();
9861 emit_label (drop_through_label
);
9865 /* Given a comparison expression EXP for values too wide to be compared
9866 with one insn, test the comparison and jump to the appropriate label.
9867 The code of EXP is ignored; we always test GT if SWAP is 0,
9868 and LT if SWAP is 1. */
9871 do_jump_by_parts_greater (exp
, swap
, if_false_label
, if_true_label
)
9874 rtx if_false_label
, if_true_label
;
9876 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
9877 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
9878 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9879 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9881 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
);
9884 /* Compare OP0 with OP1, word at a time, in mode MODE.
9885 UNSIGNEDP says to do unsigned comparison.
9886 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9889 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
)
9890 enum machine_mode mode
;
9893 rtx if_false_label
, if_true_label
;
9895 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
9896 rtx drop_through_label
= 0;
9899 if (! if_true_label
|| ! if_false_label
)
9900 drop_through_label
= gen_label_rtx ();
9901 if (! if_true_label
)
9902 if_true_label
= drop_through_label
;
9903 if (! if_false_label
)
9904 if_false_label
= drop_through_label
;
9906 /* Compare a word at a time, high order first. */
9907 for (i
= 0; i
< nwords
; i
++)
9909 rtx op0_word
, op1_word
;
9911 if (WORDS_BIG_ENDIAN
)
9913 op0_word
= operand_subword_force (op0
, i
, mode
);
9914 op1_word
= operand_subword_force (op1
, i
, mode
);
9918 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
9919 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
9922 /* All but high-order word must be compared as unsigned. */
9923 do_compare_rtx_and_jump (op0_word
, op1_word
, GT
,
9924 (unsignedp
|| i
> 0), word_mode
, NULL_RTX
,
9925 NULL_RTX
, if_true_label
);
9927 /* Consider lower words only if these are equal. */
9928 do_compare_rtx_and_jump (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
9929 NULL_RTX
, NULL_RTX
, if_false_label
);
9933 emit_jump (if_false_label
);
9934 if (drop_through_label
)
9935 emit_label (drop_through_label
);
9938 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9939 with one insn, test the comparison and jump to the appropriate label. */
9942 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
)
9944 rtx if_false_label
, if_true_label
;
9946 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
9947 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9948 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9949 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
9951 rtx drop_through_label
= 0;
9953 if (! if_false_label
)
9954 drop_through_label
= if_false_label
= gen_label_rtx ();
9956 for (i
= 0; i
< nwords
; i
++)
9957 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, mode
),
9958 operand_subword_force (op1
, i
, mode
),
9959 EQ
, TREE_UNSIGNED (TREE_TYPE (exp
)),
9960 word_mode
, NULL_RTX
, if_false_label
, NULL_RTX
);
9963 emit_jump (if_true_label
);
9964 if (drop_through_label
)
9965 emit_label (drop_through_label
);
9968 /* Jump according to whether OP0 is 0.
9969 We assume that OP0 has an integer mode that is too wide
9970 for the available compare insns. */
9973 do_jump_by_parts_equality_rtx (op0
, if_false_label
, if_true_label
)
9975 rtx if_false_label
, if_true_label
;
9977 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
9980 rtx drop_through_label
= 0;
9982 /* The fastest way of doing this comparison on almost any machine is to
9983 "or" all the words and compare the result. If all have to be loaded
9984 from memory and this is a very wide item, it's possible this may
9985 be slower, but that's highly unlikely. */
9987 part
= gen_reg_rtx (word_mode
);
9988 emit_move_insn (part
, operand_subword_force (op0
, 0, GET_MODE (op0
)));
9989 for (i
= 1; i
< nwords
&& part
!= 0; i
++)
9990 part
= expand_binop (word_mode
, ior_optab
, part
,
9991 operand_subword_force (op0
, i
, GET_MODE (op0
)),
9992 part
, 1, OPTAB_WIDEN
);
9996 do_compare_rtx_and_jump (part
, const0_rtx
, EQ
, 1, word_mode
,
9997 NULL_RTX
, if_false_label
, if_true_label
);
10002 /* If we couldn't do the "or" simply, do this with a series of compares. */
10003 if (! if_false_label
)
10004 drop_through_label
= if_false_label
= gen_label_rtx ();
10006 for (i
= 0; i
< nwords
; i
++)
10007 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, GET_MODE (op0
)),
10008 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
,
10009 if_false_label
, NULL_RTX
);
10012 emit_jump (if_true_label
);
10014 if (drop_through_label
)
10015 emit_label (drop_through_label
);
10018 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10019 (including code to compute the values to be compared)
10020 and set (CC0) according to the result.
10021 The decision as to signed or unsigned comparison must be made by the caller.
10023 We force a stack adjustment unless there are currently
10024 things pushed on the stack that aren't yet used.
10026 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10030 compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
, size
)
10032 enum rtx_code code
;
10034 enum machine_mode mode
;
10039 /* If one operand is constant, make it the second one. Only do this
10040 if the other operand is not constant as well. */
10042 if (swap_commutative_operands_p (op0
, op1
))
10047 code
= swap_condition (code
);
10050 if (flag_force_mem
)
10052 op0
= force_not_mem (op0
);
10053 op1
= force_not_mem (op1
);
10056 do_pending_stack_adjust ();
10058 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
10059 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
10063 /* There's no need to do this now that combine.c can eliminate lots of
10064 sign extensions. This can be less efficient in certain cases on other
10067 /* If this is a signed equality comparison, we can do it as an
10068 unsigned comparison since zero-extension is cheaper than sign
10069 extension and comparisons with zero are done as unsigned. This is
10070 the case even on machines that can do fast sign extension, since
10071 zero-extension is easier to combine with other operations than
10072 sign-extension is. If we are comparing against a constant, we must
10073 convert it to what it would look like unsigned. */
10074 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10075 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10077 if (GET_CODE (op1
) == CONST_INT
10078 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10079 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10084 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
);
10086 return gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
10089 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10090 The decision as to signed or unsigned comparison must be made by the caller.
10092 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10096 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
, size
,
10097 if_false_label
, if_true_label
)
10099 enum rtx_code code
;
10101 enum machine_mode mode
;
10103 rtx if_false_label
, if_true_label
;
10106 int dummy_true_label
= 0;
10108 /* Reverse the comparison if that is safe and we want to jump if it is
10110 if (! if_true_label
&& ! FLOAT_MODE_P (mode
))
10112 if_true_label
= if_false_label
;
10113 if_false_label
= 0;
10114 code
= reverse_condition (code
);
10117 /* If one operand is constant, make it the second one. Only do this
10118 if the other operand is not constant as well. */
10120 if (swap_commutative_operands_p (op0
, op1
))
10125 code
= swap_condition (code
);
10128 if (flag_force_mem
)
10130 op0
= force_not_mem (op0
);
10131 op1
= force_not_mem (op1
);
10134 do_pending_stack_adjust ();
10136 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
10137 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
10139 if (tem
== const_true_rtx
)
10142 emit_jump (if_true_label
);
10146 if (if_false_label
)
10147 emit_jump (if_false_label
);
10153 /* There's no need to do this now that combine.c can eliminate lots of
10154 sign extensions. This can be less efficient in certain cases on other
10157 /* If this is a signed equality comparison, we can do it as an
10158 unsigned comparison since zero-extension is cheaper than sign
10159 extension and comparisons with zero are done as unsigned. This is
10160 the case even on machines that can do fast sign extension, since
10161 zero-extension is easier to combine with other operations than
10162 sign-extension is. If we are comparing against a constant, we must
10163 convert it to what it would look like unsigned. */
10164 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10165 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10167 if (GET_CODE (op1
) == CONST_INT
10168 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10169 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10174 if (! if_true_label
)
10176 dummy_true_label
= 1;
10177 if_true_label
= gen_label_rtx ();
10180 emit_cmp_and_jump_insns (op0
, op1
, code
, size
, mode
, unsignedp
,
10183 if (if_false_label
)
10184 emit_jump (if_false_label
);
10185 if (dummy_true_label
)
10186 emit_label (if_true_label
);
10189 /* Generate code for a comparison expression EXP (including code to compute
10190 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10191 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10192 generated code will drop through.
10193 SIGNED_CODE should be the rtx operation for this comparison for
10194 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10196 We force a stack adjustment unless there are currently
10197 things pushed on the stack that aren't yet used. */
10200 do_compare_and_jump (exp
, signed_code
, unsigned_code
, if_false_label
,
10203 enum rtx_code signed_code
, unsigned_code
;
10204 rtx if_false_label
, if_true_label
;
10208 enum machine_mode mode
;
10210 enum rtx_code code
;
10212 /* Don't crash if the comparison was erroneous. */
10213 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
10214 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
10217 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
10218 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == ERROR_MARK
)
10221 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10222 mode
= TYPE_MODE (type
);
10223 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
10224 && (TREE_CODE (TREE_OPERAND (exp
, 1)) != INTEGER_CST
10225 || (GET_MODE_BITSIZE (mode
)
10226 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
,
10229 /* op0 might have been replaced by promoted constant, in which
10230 case the type of second argument should be used. */
10231 type
= TREE_TYPE (TREE_OPERAND (exp
, 1));
10232 mode
= TYPE_MODE (type
);
10234 unsignedp
= TREE_UNSIGNED (type
);
10235 code
= unsignedp
? unsigned_code
: signed_code
;
10237 #ifdef HAVE_canonicalize_funcptr_for_compare
10238 /* If function pointers need to be "canonicalized" before they can
10239 be reliably compared, then canonicalize them. */
10240 if (HAVE_canonicalize_funcptr_for_compare
10241 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10242 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10245 rtx new_op0
= gen_reg_rtx (mode
);
10247 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
10251 if (HAVE_canonicalize_funcptr_for_compare
10252 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10253 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10256 rtx new_op1
= gen_reg_rtx (mode
);
10258 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
10263 /* Do any postincrements in the expression that was tested. */
10266 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
,
10268 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
10269 if_false_label
, if_true_label
);
10272 /* Generate code to calculate EXP using a store-flag instruction
10273 and return an rtx for the result. EXP is either a comparison
10274 or a TRUTH_NOT_EXPR whose operand is a comparison.
10276 If TARGET is nonzero, store the result there if convenient.
10278 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10281 Return zero if there is no suitable set-flag instruction
10282 available on this machine.
10284 Once expand_expr has been called on the arguments of the comparison,
10285 we are committed to doing the store flag, since it is not safe to
10286 re-evaluate the expression. We emit the store-flag insn by calling
10287 emit_store_flag, but only expand the arguments if we have a reason
10288 to believe that emit_store_flag will be successful. If we think that
10289 it will, but it isn't, we have to simulate the store-flag with a
10290 set/jump/set sequence. */
10293 do_store_flag (exp
, target
, mode
, only_cheap
)
10296 enum machine_mode mode
;
10299 enum rtx_code code
;
10300 tree arg0
, arg1
, type
;
10302 enum machine_mode operand_mode
;
10306 enum insn_code icode
;
10307 rtx subtarget
= target
;
10310 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10311 result at the end. We can't simply invert the test since it would
10312 have already been inverted if it were valid. This case occurs for
10313 some floating-point comparisons. */
10315 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
10316 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
10318 arg0
= TREE_OPERAND (exp
, 0);
10319 arg1
= TREE_OPERAND (exp
, 1);
10321 /* Don't crash if the comparison was erroneous. */
10322 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
10325 type
= TREE_TYPE (arg0
);
10326 operand_mode
= TYPE_MODE (type
);
10327 unsignedp
= TREE_UNSIGNED (type
);
10329 /* We won't bother with BLKmode store-flag operations because it would mean
10330 passing a lot of information to emit_store_flag. */
10331 if (operand_mode
== BLKmode
)
10334 /* We won't bother with store-flag operations involving function pointers
10335 when function pointers must be canonicalized before comparisons. */
10336 #ifdef HAVE_canonicalize_funcptr_for_compare
10337 if (HAVE_canonicalize_funcptr_for_compare
10338 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10339 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10341 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10342 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10343 == FUNCTION_TYPE
))))
10350 /* Get the rtx comparison code to use. We know that EXP is a comparison
10351 operation of some type. Some comparisons against 1 and -1 can be
10352 converted to comparisons with zero. Do so here so that the tests
10353 below will be aware that we have a comparison with zero. These
10354 tests will not catch constants in the first operand, but constants
10355 are rarely passed as the first operand. */
10357 switch (TREE_CODE (exp
))
10366 if (integer_onep (arg1
))
10367 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10369 code
= unsignedp
? LTU
: LT
;
10372 if (! unsignedp
&& integer_all_onesp (arg1
))
10373 arg1
= integer_zero_node
, code
= LT
;
10375 code
= unsignedp
? LEU
: LE
;
10378 if (! unsignedp
&& integer_all_onesp (arg1
))
10379 arg1
= integer_zero_node
, code
= GE
;
10381 code
= unsignedp
? GTU
: GT
;
10384 if (integer_onep (arg1
))
10385 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
10387 code
= unsignedp
? GEU
: GE
;
10390 case UNORDERED_EXPR
:
10416 /* Put a constant second. */
10417 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
10419 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
10420 code
= swap_condition (code
);
10423 /* If this is an equality or inequality test of a single bit, we can
10424 do this by shifting the bit being tested to the low-order bit and
10425 masking the result with the constant 1. If the condition was EQ,
10426 we xor it with 1. This does not require an scc insn and is faster
10427 than an scc insn even if we have it. */
10429 if ((code
== NE
|| code
== EQ
)
10430 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
10431 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10433 tree inner
= TREE_OPERAND (arg0
, 0);
10434 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
10437 /* If INNER is a right shift of a constant and it plus BITNUM does
10438 not overflow, adjust BITNUM and INNER. */
10440 if (TREE_CODE (inner
) == RSHIFT_EXPR
10441 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
10442 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
10443 && bitnum
< TYPE_PRECISION (type
)
10444 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
10445 bitnum
- TYPE_PRECISION (type
)))
10447 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
10448 inner
= TREE_OPERAND (inner
, 0);
10451 /* If we are going to be able to omit the AND below, we must do our
10452 operations as unsigned. If we must use the AND, we have a choice.
10453 Normally unsigned is faster, but for some machines signed is. */
10454 ops_unsignedp
= (bitnum
== TYPE_PRECISION (type
) - 1 ? 1
10455 #ifdef LOAD_EXTEND_OP
10456 : (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1)
10462 if (! get_subtarget (subtarget
)
10463 || GET_MODE (subtarget
) != operand_mode
10464 || ! safe_from_p (subtarget
, inner
, 1))
10467 op0
= expand_expr (inner
, subtarget
, VOIDmode
, 0);
10470 op0
= expand_shift (RSHIFT_EXPR
, operand_mode
, op0
,
10471 size_int (bitnum
), subtarget
, ops_unsignedp
);
10473 if (GET_MODE (op0
) != mode
)
10474 op0
= convert_to_mode (mode
, op0
, ops_unsignedp
);
10476 if ((code
== EQ
&& ! invert
) || (code
== NE
&& invert
))
10477 op0
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
, subtarget
,
10478 ops_unsignedp
, OPTAB_LIB_WIDEN
);
10480 /* Put the AND last so it can combine with more things. */
10481 if (bitnum
!= TYPE_PRECISION (type
) - 1)
10482 op0
= expand_and (mode
, op0
, const1_rtx
, subtarget
);
10487 /* Now see if we are likely to be able to do this. Return if not. */
10488 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
10491 icode
= setcc_gen_code
[(int) code
];
10492 if (icode
== CODE_FOR_nothing
10493 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
10495 /* We can only do this if it is one of the special cases that
10496 can be handled without an scc insn. */
10497 if ((code
== LT
&& integer_zerop (arg1
))
10498 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
10500 else if (BRANCH_COST
>= 0
10501 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
10502 && TREE_CODE (type
) != REAL_TYPE
10503 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
10504 != CODE_FOR_nothing
)
10505 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
10506 != CODE_FOR_nothing
)))
10512 if (! get_subtarget (target
)
10513 || GET_MODE (subtarget
) != operand_mode
10514 || ! safe_from_p (subtarget
, arg1
, 1))
10517 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
10518 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
10521 target
= gen_reg_rtx (mode
);
10523 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10524 because, if the emit_store_flag does anything it will succeed and
10525 OP0 and OP1 will not be used subsequently. */
10527 result
= emit_store_flag (target
, code
,
10528 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
10529 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
10530 operand_mode
, unsignedp
, 1);
10535 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
10536 result
, 0, OPTAB_LIB_WIDEN
);
10540 /* If this failed, we have to do this with set/compare/jump/set code. */
10541 if (GET_CODE (target
) != REG
10542 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
10543 target
= gen_reg_rtx (GET_MODE (target
));
10545 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
10546 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
10547 operand_mode
, NULL_RTX
);
10548 if (GET_CODE (result
) == CONST_INT
)
10549 return (((result
== const0_rtx
&& ! invert
)
10550 || (result
!= const0_rtx
&& invert
))
10551 ? const0_rtx
: const1_rtx
);
10553 /* The code of RESULT may not match CODE if compare_from_rtx
10554 decided to swap its operands and reverse the original code.
10556 We know that compare_from_rtx returns either a CONST_INT or
10557 a new comparison code, so it is safe to just extract the
10558 code from RESULT. */
10559 code
= GET_CODE (result
);
10561 label
= gen_label_rtx ();
10562 if (bcc_gen_fctn
[(int) code
] == 0)
10565 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
10566 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
10567 emit_label (label
);
10573 /* Stubs in case we haven't got a casesi insn. */
10574 #ifndef HAVE_casesi
10575 # define HAVE_casesi 0
10576 # define gen_casesi(a, b, c, d, e) (0)
10577 # define CODE_FOR_casesi CODE_FOR_nothing
10580 /* If the machine does not have a case insn that compares the bounds,
10581 this means extra overhead for dispatch tables, which raises the
10582 threshold for using them. */
10583 #ifndef CASE_VALUES_THRESHOLD
10584 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10585 #endif /* CASE_VALUES_THRESHOLD */
10588 case_values_threshold ()
10590 return CASE_VALUES_THRESHOLD
;
10593 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10594 0 otherwise (i.e. if there is no casesi instruction). */
10596 try_casesi (index_type
, index_expr
, minval
, range
,
10597 table_label
, default_label
)
10598 tree index_type
, index_expr
, minval
, range
;
10599 rtx table_label ATTRIBUTE_UNUSED
;
10602 enum machine_mode index_mode
= SImode
;
10603 int index_bits
= GET_MODE_BITSIZE (index_mode
);
10604 rtx op1
, op2
, index
;
10605 enum machine_mode op_mode
;
10610 /* Convert the index to SImode. */
10611 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
10613 enum machine_mode omode
= TYPE_MODE (index_type
);
10614 rtx rangertx
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
10616 /* We must handle the endpoints in the original mode. */
10617 index_expr
= build (MINUS_EXPR
, index_type
,
10618 index_expr
, minval
);
10619 minval
= integer_zero_node
;
10620 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10621 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
10622 omode
, 1, default_label
);
10623 /* Now we can safely truncate. */
10624 index
= convert_to_mode (index_mode
, index
, 0);
10628 if (TYPE_MODE (index_type
) != index_mode
)
10630 index_expr
= convert ((*lang_hooks
.types
.type_for_size
)
10631 (index_bits
, 0), index_expr
);
10632 index_type
= TREE_TYPE (index_expr
);
10635 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10638 index
= protect_from_queue (index
, 0);
10639 do_pending_stack_adjust ();
10641 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
10642 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
10644 index
= copy_to_mode_reg (op_mode
, index
);
10646 op1
= expand_expr (minval
, NULL_RTX
, VOIDmode
, 0);
10648 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
10649 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
10650 op1
, TREE_UNSIGNED (TREE_TYPE (minval
)));
10651 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
10653 op1
= copy_to_mode_reg (op_mode
, op1
);
10655 op2
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
10657 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
10658 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
10659 op2
, TREE_UNSIGNED (TREE_TYPE (range
)));
10660 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
10662 op2
= copy_to_mode_reg (op_mode
, op2
);
10664 emit_jump_insn (gen_casesi (index
, op1
, op2
,
10665 table_label
, default_label
));
10669 /* Attempt to generate a tablejump instruction; same concept. */
10670 #ifndef HAVE_tablejump
10671 #define HAVE_tablejump 0
10672 #define gen_tablejump(x, y) (0)
10675 /* Subroutine of the next function.
10677 INDEX is the value being switched on, with the lowest value
10678 in the table already subtracted.
10679 MODE is its expected mode (needed if INDEX is constant).
10680 RANGE is the length of the jump table.
10681 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10683 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10684 index value is out of range. */
10687 do_tablejump (index
, mode
, range
, table_label
, default_label
)
10688 rtx index
, range
, table_label
, default_label
;
10689 enum machine_mode mode
;
10693 /* Do an unsigned comparison (in the proper mode) between the index
10694 expression and the value which represents the length of the range.
10695 Since we just finished subtracting the lower bound of the range
10696 from the index expression, this comparison allows us to simultaneously
10697 check that the original index expression value is both greater than
10698 or equal to the minimum value of the range and less than or equal to
10699 the maximum value of the range. */
10701 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
10704 /* If index is in range, it must fit in Pmode.
10705 Convert to Pmode so we can index with it. */
10707 index
= convert_to_mode (Pmode
, index
, 1);
10709 /* Don't let a MEM slip thru, because then INDEX that comes
10710 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10711 and break_out_memory_refs will go to work on it and mess it up. */
10712 #ifdef PIC_CASE_VECTOR_ADDRESS
10713 if (flag_pic
&& GET_CODE (index
) != REG
)
10714 index
= copy_to_mode_reg (Pmode
, index
);
10717 /* If flag_force_addr were to affect this address
10718 it could interfere with the tricky assumptions made
10719 about addresses that contain label-refs,
10720 which may be valid only very near the tablejump itself. */
10721 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10722 GET_MODE_SIZE, because this indicates how large insns are. The other
10723 uses should all be Pmode, because they are addresses. This code
10724 could fail if addresses and insns are not the same size. */
10725 index
= gen_rtx_PLUS (Pmode
,
10726 gen_rtx_MULT (Pmode
, index
,
10727 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
10728 gen_rtx_LABEL_REF (Pmode
, table_label
));
10729 #ifdef PIC_CASE_VECTOR_ADDRESS
10731 index
= PIC_CASE_VECTOR_ADDRESS (index
);
10734 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
10735 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
10736 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
10737 RTX_UNCHANGING_P (vector
) = 1;
10738 convert_move (temp
, vector
, 0);
10740 emit_jump_insn (gen_tablejump (temp
, table_label
));
10742 /* If we are generating PIC code or if the table is PC-relative, the
10743 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10744 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
10749 try_tablejump (index_type
, index_expr
, minval
, range
,
10750 table_label
, default_label
)
10751 tree index_type
, index_expr
, minval
, range
;
10752 rtx table_label
, default_label
;
10756 if (! HAVE_tablejump
)
10759 index_expr
= fold (build (MINUS_EXPR
, index_type
,
10760 convert (index_type
, index_expr
),
10761 convert (index_type
, minval
)));
10762 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10764 index
= protect_from_queue (index
, 0);
10765 do_pending_stack_adjust ();
10767 do_tablejump (index
, TYPE_MODE (index_type
),
10768 convert_modes (TYPE_MODE (index_type
),
10769 TYPE_MODE (TREE_TYPE (range
)),
10770 expand_expr (range
, NULL_RTX
,
10772 TREE_UNSIGNED (TREE_TYPE (range
))),
10773 table_label
, default_label
);