1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
42 #include "typeclass.h"
45 #include "langhooks.h"
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first. */
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
67 #define STACK_PUSH_CODE PRE_INC
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
84 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
85 static tree placeholder_list
= 0;
87 /* This structure is used by move_by_pieces to describe the move to
98 int explicit_inc_from
;
99 unsigned HOST_WIDE_INT len
;
100 HOST_WIDE_INT offset
;
104 /* This structure is used by store_by_pieces to describe the clear to
107 struct store_by_pieces
113 unsigned HOST_WIDE_INT len
;
114 HOST_WIDE_INT offset
;
115 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
120 extern struct obstack permanent_obstack
;
122 static rtx enqueue_insn
PARAMS ((rtx
, rtx
));
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
124 PARAMS ((unsigned HOST_WIDE_INT
,
126 static void move_by_pieces_1
PARAMS ((rtx (*) (rtx
, ...), enum machine_mode
,
127 struct move_by_pieces
*));
128 static rtx clear_by_pieces_1
PARAMS ((PTR
, HOST_WIDE_INT
,
130 static void clear_by_pieces
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
132 static void store_by_pieces_1
PARAMS ((struct store_by_pieces
*,
134 static void store_by_pieces_2
PARAMS ((rtx (*) (rtx
, ...),
136 struct store_by_pieces
*));
137 static rtx get_subtarget
PARAMS ((rtx
));
138 static int is_zeros_p
PARAMS ((tree
));
139 static int mostly_zeros_p
PARAMS ((tree
));
140 static void store_constructor_field
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
141 HOST_WIDE_INT
, enum machine_mode
,
142 tree
, tree
, int, int));
143 static void store_constructor
PARAMS ((tree
, rtx
, int, HOST_WIDE_INT
));
144 static rtx store_field
PARAMS ((rtx
, HOST_WIDE_INT
,
145 HOST_WIDE_INT
, enum machine_mode
,
146 tree
, enum machine_mode
, int, tree
,
148 static rtx var_rtx
PARAMS ((tree
));
149 static HOST_WIDE_INT highest_pow2_factor
PARAMS ((tree
));
150 static int is_aligning_offset
PARAMS ((tree
, tree
));
151 static rtx expand_increment
PARAMS ((tree
, int, int));
152 static void do_jump_by_parts_greater
PARAMS ((tree
, int, rtx
, rtx
));
153 static void do_jump_by_parts_equality
PARAMS ((tree
, rtx
, rtx
));
154 static void do_compare_and_jump
PARAMS ((tree
, enum rtx_code
, enum rtx_code
,
156 static rtx do_store_flag
PARAMS ((tree
, rtx
, enum machine_mode
, int));
158 static void emit_single_push_insn
PARAMS ((enum machine_mode
, rtx
, tree
));
160 static void do_tablejump
PARAMS ((rtx
, enum machine_mode
, rtx
, rtx
, rtx
));
162 /* Record for each mode whether we can move a register directly to or
163 from an object of that mode in memory. If we can't, we won't try
164 to use that mode directly when accessing a field of that mode. */
166 static char direct_load
[NUM_MACHINE_MODES
];
167 static char direct_store
[NUM_MACHINE_MODES
];
169 /* If a memory-to-memory move would take MOVE_RATIO or more simple
170 move-instruction sequences, we will do a movstr or libcall instead. */
173 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
176 /* If we are optimizing for space (-Os), cut down the default move ratio. */
177 #define MOVE_RATIO (optimize_size ? 3 : 15)
181 /* This macro is used to determine whether move_by_pieces should be called
182 to perform a structure copy. */
183 #ifndef MOVE_BY_PIECES_P
184 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
185 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
188 /* This array records the insn_code of insns to perform block moves. */
189 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
191 /* This array records the insn_code of insns to perform block clears. */
192 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
194 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
196 #ifndef SLOW_UNALIGNED_ACCESS
197 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
200 /* This is run once per compilation to set up which modes can be used
201 directly in memory and to initialize the block move optab. */
207 enum machine_mode mode
;
213 /* Try indexing by frame ptr and try by stack ptr.
214 It is known that on the Convex the stack ptr isn't a valid index.
215 With luck, one or the other is valid on any machine. */
216 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
217 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
219 insn
= emit_insn (gen_rtx_SET (0, NULL_RTX
, NULL_RTX
));
220 pat
= PATTERN (insn
);
222 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
223 mode
= (enum machine_mode
) ((int) mode
+ 1))
228 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
229 PUT_MODE (mem
, mode
);
230 PUT_MODE (mem1
, mode
);
232 /* See if there is some register that can be used in this mode and
233 directly loaded or stored from memory. */
235 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
236 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
237 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
240 if (! HARD_REGNO_MODE_OK (regno
, mode
))
243 reg
= gen_rtx_REG (mode
, regno
);
246 SET_DEST (pat
) = reg
;
247 if (recog (pat
, insn
, &num_clobbers
) >= 0)
248 direct_load
[(int) mode
] = 1;
250 SET_SRC (pat
) = mem1
;
251 SET_DEST (pat
) = reg
;
252 if (recog (pat
, insn
, &num_clobbers
) >= 0)
253 direct_load
[(int) mode
] = 1;
256 SET_DEST (pat
) = mem
;
257 if (recog (pat
, insn
, &num_clobbers
) >= 0)
258 direct_store
[(int) mode
] = 1;
261 SET_DEST (pat
) = mem1
;
262 if (recog (pat
, insn
, &num_clobbers
) >= 0)
263 direct_store
[(int) mode
] = 1;
270 /* This is run at the start of compiling a function. */
275 cfun
->expr
= (struct expr_status
*) xmalloc (sizeof (struct expr_status
));
278 pending_stack_adjust
= 0;
279 stack_pointer_delta
= 0;
280 inhibit_defer_pop
= 0;
282 apply_args_value
= 0;
288 struct expr_status
*p
;
293 ggc_mark_rtx (p
->x_saveregs_value
);
294 ggc_mark_rtx (p
->x_apply_args_value
);
295 ggc_mark_rtx (p
->x_forced_labels
);
306 /* Small sanity check that the queue is empty at the end of a function. */
309 finish_expr_for_function ()
315 /* Manage the queue of increment instructions to be output
316 for POSTINCREMENT_EXPR expressions, etc. */
318 /* Queue up to increment (or change) VAR later. BODY says how:
319 BODY should be the same thing you would pass to emit_insn
320 to increment right away. It will go to emit_insn later on.
322 The value is a QUEUED expression to be used in place of VAR
323 where you want to guarantee the pre-incrementation value of VAR. */
326 enqueue_insn (var
, body
)
329 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
), var
, NULL_RTX
, NULL_RTX
,
330 body
, pending_chain
);
331 return pending_chain
;
334 /* Use protect_from_queue to convert a QUEUED expression
335 into something that you can put immediately into an instruction.
336 If the queued incrementation has not happened yet,
337 protect_from_queue returns the variable itself.
338 If the incrementation has happened, protect_from_queue returns a temp
339 that contains a copy of the old value of the variable.
341 Any time an rtx which might possibly be a QUEUED is to be put
342 into an instruction, it must be passed through protect_from_queue first.
343 QUEUED expressions are not meaningful in instructions.
345 Do not pass a value through protect_from_queue and then hold
346 on to it for a while before putting it in an instruction!
347 If the queue is flushed in between, incorrect code will result. */
350 protect_from_queue (x
, modify
)
354 RTX_CODE code
= GET_CODE (x
);
356 #if 0 /* A QUEUED can hang around after the queue is forced out. */
357 /* Shortcut for most common case. */
358 if (pending_chain
== 0)
364 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
365 use of autoincrement. Make a copy of the contents of the memory
366 location rather than a copy of the address, but not if the value is
367 of mode BLKmode. Don't modify X in place since it might be
369 if (code
== MEM
&& GET_MODE (x
) != BLKmode
370 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
373 rtx
new = replace_equiv_address_nv (x
, QUEUED_VAR (y
));
377 rtx temp
= gen_reg_rtx (GET_MODE (x
));
379 emit_insn_before (gen_move_insn (temp
, new),
384 /* Copy the address into a pseudo, so that the returned value
385 remains correct across calls to emit_queue. */
386 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
389 /* Otherwise, recursively protect the subexpressions of all
390 the kinds of rtx's that can contain a QUEUED. */
393 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
394 if (tem
!= XEXP (x
, 0))
400 else if (code
== PLUS
|| code
== MULT
)
402 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
403 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
404 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
413 /* If the increment has not happened, use the variable itself. Copy it
414 into a new pseudo so that the value remains correct across calls to
416 if (QUEUED_INSN (x
) == 0)
417 return copy_to_reg (QUEUED_VAR (x
));
418 /* If the increment has happened and a pre-increment copy exists,
420 if (QUEUED_COPY (x
) != 0)
421 return QUEUED_COPY (x
);
422 /* The increment has happened but we haven't set up a pre-increment copy.
423 Set one up now, and use it. */
424 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
425 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
427 return QUEUED_COPY (x
);
430 /* Return nonzero if X contains a QUEUED expression:
431 if it contains anything that will be altered by a queued increment.
432 We handle only combinations of MEM, PLUS, MINUS and MULT operators
433 since memory addresses generally contain only those. */
439 enum rtx_code code
= GET_CODE (x
);
445 return queued_subexp_p (XEXP (x
, 0));
449 return (queued_subexp_p (XEXP (x
, 0))
450 || queued_subexp_p (XEXP (x
, 1)));
456 /* Perform all the pending incrementations. */
462 while ((p
= pending_chain
))
464 rtx body
= QUEUED_BODY (p
);
466 if (GET_CODE (body
) == SEQUENCE
)
468 QUEUED_INSN (p
) = XVECEXP (QUEUED_BODY (p
), 0, 0);
469 emit_insn (QUEUED_BODY (p
));
472 QUEUED_INSN (p
) = emit_insn (QUEUED_BODY (p
));
473 pending_chain
= QUEUED_NEXT (p
);
477 /* Copy data from FROM to TO, where the machine modes are not the same.
478 Both modes may be integer, or both may be floating.
479 UNSIGNEDP should be nonzero if FROM is an unsigned type.
480 This causes zero-extension instead of sign-extension. */
483 convert_move (to
, from
, unsignedp
)
487 enum machine_mode to_mode
= GET_MODE (to
);
488 enum machine_mode from_mode
= GET_MODE (from
);
489 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
490 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
494 /* rtx code for making an equivalent value. */
495 enum rtx_code equiv_code
= (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
);
497 to
= protect_from_queue (to
, 1);
498 from
= protect_from_queue (from
, 0);
500 if (to_real
!= from_real
)
503 /* If FROM is a SUBREG that indicates that we have already done at least
504 the required extension, strip it. We don't handle such SUBREGs as
507 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
508 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
509 >= GET_MODE_SIZE (to_mode
))
510 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
511 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
513 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
516 if (to_mode
== from_mode
517 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
519 emit_move_insn (to
, from
);
523 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
525 if (GET_MODE_BITSIZE (from_mode
) != GET_MODE_BITSIZE (to_mode
))
528 if (VECTOR_MODE_P (to_mode
))
529 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
531 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
533 emit_move_insn (to
, from
);
537 if (to_real
!= from_real
)
544 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
))
546 /* Try converting directly if the insn is supported. */
547 if ((code
= can_extend_p (to_mode
, from_mode
, 0))
550 emit_unop_insn (code
, to
, from
, UNKNOWN
);
555 #ifdef HAVE_trunchfqf2
556 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
558 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
562 #ifdef HAVE_trunctqfqf2
563 if (HAVE_trunctqfqf2
&& from_mode
== TQFmode
&& to_mode
== QFmode
)
565 emit_unop_insn (CODE_FOR_trunctqfqf2
, to
, from
, UNKNOWN
);
569 #ifdef HAVE_truncsfqf2
570 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
572 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
576 #ifdef HAVE_truncdfqf2
577 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
579 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
583 #ifdef HAVE_truncxfqf2
584 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
586 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
590 #ifdef HAVE_trunctfqf2
591 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
593 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
598 #ifdef HAVE_trunctqfhf2
599 if (HAVE_trunctqfhf2
&& from_mode
== TQFmode
&& to_mode
== HFmode
)
601 emit_unop_insn (CODE_FOR_trunctqfhf2
, to
, from
, UNKNOWN
);
605 #ifdef HAVE_truncsfhf2
606 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
608 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
612 #ifdef HAVE_truncdfhf2
613 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
615 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
619 #ifdef HAVE_truncxfhf2
620 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
622 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
626 #ifdef HAVE_trunctfhf2
627 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
629 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
634 #ifdef HAVE_truncsftqf2
635 if (HAVE_truncsftqf2
&& from_mode
== SFmode
&& to_mode
== TQFmode
)
637 emit_unop_insn (CODE_FOR_truncsftqf2
, to
, from
, UNKNOWN
);
641 #ifdef HAVE_truncdftqf2
642 if (HAVE_truncdftqf2
&& from_mode
== DFmode
&& to_mode
== TQFmode
)
644 emit_unop_insn (CODE_FOR_truncdftqf2
, to
, from
, UNKNOWN
);
648 #ifdef HAVE_truncxftqf2
649 if (HAVE_truncxftqf2
&& from_mode
== XFmode
&& to_mode
== TQFmode
)
651 emit_unop_insn (CODE_FOR_truncxftqf2
, to
, from
, UNKNOWN
);
655 #ifdef HAVE_trunctftqf2
656 if (HAVE_trunctftqf2
&& from_mode
== TFmode
&& to_mode
== TQFmode
)
658 emit_unop_insn (CODE_FOR_trunctftqf2
, to
, from
, UNKNOWN
);
663 #ifdef HAVE_truncdfsf2
664 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
666 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
670 #ifdef HAVE_truncxfsf2
671 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
673 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
677 #ifdef HAVE_trunctfsf2
678 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
680 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
684 #ifdef HAVE_truncxfdf2
685 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
687 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
691 #ifdef HAVE_trunctfdf2
692 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
694 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
706 libcall
= extendsfdf2_libfunc
;
710 libcall
= extendsfxf2_libfunc
;
714 libcall
= extendsftf2_libfunc
;
726 libcall
= truncdfsf2_libfunc
;
730 libcall
= extenddfxf2_libfunc
;
734 libcall
= extenddftf2_libfunc
;
746 libcall
= truncxfsf2_libfunc
;
750 libcall
= truncxfdf2_libfunc
;
762 libcall
= trunctfsf2_libfunc
;
766 libcall
= trunctfdf2_libfunc
;
778 if (libcall
== (rtx
) 0)
779 /* This conversion is not implemented yet. */
783 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
785 insns
= get_insns ();
787 emit_libcall_block (insns
, to
, value
, gen_rtx_FLOAT_TRUNCATE (to_mode
,
792 /* Now both modes are integers. */
794 /* Handle expanding beyond a word. */
795 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
796 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
803 enum machine_mode lowpart_mode
;
804 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
806 /* Try converting directly if the insn is supported. */
807 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
810 /* If FROM is a SUBREG, put it into a register. Do this
811 so that we always generate the same set of insns for
812 better cse'ing; if an intermediate assignment occurred,
813 we won't be doing the operation directly on the SUBREG. */
814 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
815 from
= force_reg (from_mode
, from
);
816 emit_unop_insn (code
, to
, from
, equiv_code
);
819 /* Next, try converting via full word. */
820 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
821 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
822 != CODE_FOR_nothing
))
824 if (GET_CODE (to
) == REG
)
825 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
826 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
827 emit_unop_insn (code
, to
,
828 gen_lowpart (word_mode
, to
), equiv_code
);
832 /* No special multiword conversion insn; do it by hand. */
835 /* Since we will turn this into a no conflict block, we must ensure
836 that the source does not overlap the target. */
838 if (reg_overlap_mentioned_p (to
, from
))
839 from
= force_reg (from_mode
, from
);
841 /* Get a copy of FROM widened to a word, if necessary. */
842 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
843 lowpart_mode
= word_mode
;
845 lowpart_mode
= from_mode
;
847 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
849 lowpart
= gen_lowpart (lowpart_mode
, to
);
850 emit_move_insn (lowpart
, lowfrom
);
852 /* Compute the value to put in each remaining word. */
854 fill_value
= const0_rtx
;
859 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
860 && STORE_FLAG_VALUE
== -1)
862 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
864 fill_value
= gen_reg_rtx (word_mode
);
865 emit_insn (gen_slt (fill_value
));
871 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
872 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
874 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
878 /* Fill the remaining words. */
879 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
881 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
882 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
887 if (fill_value
!= subword
)
888 emit_move_insn (subword
, fill_value
);
891 insns
= get_insns ();
894 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
895 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
899 /* Truncating multi-word to a word or less. */
900 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
901 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
903 if (!((GET_CODE (from
) == MEM
904 && ! MEM_VOLATILE_P (from
)
905 && direct_load
[(int) to_mode
]
906 && ! mode_dependent_address_p (XEXP (from
, 0)))
907 || GET_CODE (from
) == REG
908 || GET_CODE (from
) == SUBREG
))
909 from
= force_reg (from_mode
, from
);
910 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
914 /* Handle pointer conversion. */ /* SPEE 900220. */
915 if (to_mode
== PQImode
)
917 if (from_mode
!= QImode
)
918 from
= convert_to_mode (QImode
, from
, unsignedp
);
920 #ifdef HAVE_truncqipqi2
921 if (HAVE_truncqipqi2
)
923 emit_unop_insn (CODE_FOR_truncqipqi2
, to
, from
, UNKNOWN
);
926 #endif /* HAVE_truncqipqi2 */
930 if (from_mode
== PQImode
)
932 if (to_mode
!= QImode
)
934 from
= convert_to_mode (QImode
, from
, unsignedp
);
939 #ifdef HAVE_extendpqiqi2
940 if (HAVE_extendpqiqi2
)
942 emit_unop_insn (CODE_FOR_extendpqiqi2
, to
, from
, UNKNOWN
);
945 #endif /* HAVE_extendpqiqi2 */
950 if (to_mode
== PSImode
)
952 if (from_mode
!= SImode
)
953 from
= convert_to_mode (SImode
, from
, unsignedp
);
955 #ifdef HAVE_truncsipsi2
956 if (HAVE_truncsipsi2
)
958 emit_unop_insn (CODE_FOR_truncsipsi2
, to
, from
, UNKNOWN
);
961 #endif /* HAVE_truncsipsi2 */
965 if (from_mode
== PSImode
)
967 if (to_mode
!= SImode
)
969 from
= convert_to_mode (SImode
, from
, unsignedp
);
974 #ifdef HAVE_extendpsisi2
975 if (! unsignedp
&& HAVE_extendpsisi2
)
977 emit_unop_insn (CODE_FOR_extendpsisi2
, to
, from
, UNKNOWN
);
980 #endif /* HAVE_extendpsisi2 */
981 #ifdef HAVE_zero_extendpsisi2
982 if (unsignedp
&& HAVE_zero_extendpsisi2
)
984 emit_unop_insn (CODE_FOR_zero_extendpsisi2
, to
, from
, UNKNOWN
);
987 #endif /* HAVE_zero_extendpsisi2 */
992 if (to_mode
== PDImode
)
994 if (from_mode
!= DImode
)
995 from
= convert_to_mode (DImode
, from
, unsignedp
);
997 #ifdef HAVE_truncdipdi2
998 if (HAVE_truncdipdi2
)
1000 emit_unop_insn (CODE_FOR_truncdipdi2
, to
, from
, UNKNOWN
);
1003 #endif /* HAVE_truncdipdi2 */
1007 if (from_mode
== PDImode
)
1009 if (to_mode
!= DImode
)
1011 from
= convert_to_mode (DImode
, from
, unsignedp
);
1016 #ifdef HAVE_extendpdidi2
1017 if (HAVE_extendpdidi2
)
1019 emit_unop_insn (CODE_FOR_extendpdidi2
, to
, from
, UNKNOWN
);
1022 #endif /* HAVE_extendpdidi2 */
1027 /* Now follow all the conversions between integers
1028 no more than a word long. */
1030 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1031 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
1032 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1033 GET_MODE_BITSIZE (from_mode
)))
1035 if (!((GET_CODE (from
) == MEM
1036 && ! MEM_VOLATILE_P (from
)
1037 && direct_load
[(int) to_mode
]
1038 && ! mode_dependent_address_p (XEXP (from
, 0)))
1039 || GET_CODE (from
) == REG
1040 || GET_CODE (from
) == SUBREG
))
1041 from
= force_reg (from_mode
, from
);
1042 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
1043 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
1044 from
= copy_to_reg (from
);
1045 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
1049 /* Handle extension. */
1050 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
1052 /* Convert directly if that works. */
1053 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
1054 != CODE_FOR_nothing
)
1057 from
= force_not_mem (from
);
1059 emit_unop_insn (code
, to
, from
, equiv_code
);
1064 enum machine_mode intermediate
;
1068 /* Search for a mode to convert via. */
1069 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
1070 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
1071 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
1072 != CODE_FOR_nothing
)
1073 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
1074 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1075 GET_MODE_BITSIZE (intermediate
))))
1076 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
1077 != CODE_FOR_nothing
))
1079 convert_move (to
, convert_to_mode (intermediate
, from
,
1080 unsignedp
), unsignedp
);
1084 /* No suitable intermediate mode.
1085 Generate what we need with shifts. */
1086 shift_amount
= build_int_2 (GET_MODE_BITSIZE (to_mode
)
1087 - GET_MODE_BITSIZE (from_mode
), 0);
1088 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
1089 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
1091 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
1094 emit_move_insn (to
, tmp
);
1099 /* Support special truncate insns for certain modes. */
1101 if (from_mode
== DImode
&& to_mode
== SImode
)
1103 #ifdef HAVE_truncdisi2
1104 if (HAVE_truncdisi2
)
1106 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1110 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1114 if (from_mode
== DImode
&& to_mode
== HImode
)
1116 #ifdef HAVE_truncdihi2
1117 if (HAVE_truncdihi2
)
1119 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1123 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1127 if (from_mode
== DImode
&& to_mode
== QImode
)
1129 #ifdef HAVE_truncdiqi2
1130 if (HAVE_truncdiqi2
)
1132 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1136 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1140 if (from_mode
== SImode
&& to_mode
== HImode
)
1142 #ifdef HAVE_truncsihi2
1143 if (HAVE_truncsihi2
)
1145 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1149 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1153 if (from_mode
== SImode
&& to_mode
== QImode
)
1155 #ifdef HAVE_truncsiqi2
1156 if (HAVE_truncsiqi2
)
1158 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1162 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1166 if (from_mode
== HImode
&& to_mode
== QImode
)
1168 #ifdef HAVE_trunchiqi2
1169 if (HAVE_trunchiqi2
)
1171 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1175 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1179 if (from_mode
== TImode
&& to_mode
== DImode
)
1181 #ifdef HAVE_trunctidi2
1182 if (HAVE_trunctidi2
)
1184 emit_unop_insn (CODE_FOR_trunctidi2
, to
, from
, UNKNOWN
);
1188 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1192 if (from_mode
== TImode
&& to_mode
== SImode
)
1194 #ifdef HAVE_trunctisi2
1195 if (HAVE_trunctisi2
)
1197 emit_unop_insn (CODE_FOR_trunctisi2
, to
, from
, UNKNOWN
);
1201 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1205 if (from_mode
== TImode
&& to_mode
== HImode
)
1207 #ifdef HAVE_trunctihi2
1208 if (HAVE_trunctihi2
)
1210 emit_unop_insn (CODE_FOR_trunctihi2
, to
, from
, UNKNOWN
);
1214 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1218 if (from_mode
== TImode
&& to_mode
== QImode
)
1220 #ifdef HAVE_trunctiqi2
1221 if (HAVE_trunctiqi2
)
1223 emit_unop_insn (CODE_FOR_trunctiqi2
, to
, from
, UNKNOWN
);
1227 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1231 /* Handle truncation of volatile memrefs, and so on;
1232 the things that couldn't be truncated directly,
1233 and for which there was no special instruction. */
1234 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1236 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1237 emit_move_insn (to
, temp
);
1241 /* Mode combination is not recognized. */
1245 /* Return an rtx for a value that would result
1246 from converting X to mode MODE.
1247 Both X and MODE may be floating, or both integer.
1248 UNSIGNEDP is nonzero if X is an unsigned value.
1249 This can be done by referring to a part of X in place
1250 or by copying to a new temporary with conversion.
1252 This function *must not* call protect_from_queue
1253 except when putting X into an insn (in which case convert_move does it). */
1256 convert_to_mode (mode
, x
, unsignedp
)
1257 enum machine_mode mode
;
1261 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
1264 /* Return an rtx for a value that would result
1265 from converting X from mode OLDMODE to mode MODE.
1266 Both modes may be floating, or both integer.
1267 UNSIGNEDP is nonzero if X is an unsigned value.
1269 This can be done by referring to a part of X in place
1270 or by copying to a new temporary with conversion.
1272 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1274 This function *must not* call protect_from_queue
1275 except when putting X into an insn (in which case convert_move does it). */
1278 convert_modes (mode
, oldmode
, x
, unsignedp
)
1279 enum machine_mode mode
, oldmode
;
1285 /* If FROM is a SUBREG that indicates that we have already done at least
1286 the required extension, strip it. */
1288 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1289 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1290 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1291 x
= gen_lowpart (mode
, x
);
1293 if (GET_MODE (x
) != VOIDmode
)
1294 oldmode
= GET_MODE (x
);
1296 if (mode
== oldmode
)
1299 /* There is one case that we must handle specially: If we are converting
1300 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1301 we are to interpret the constant as unsigned, gen_lowpart will do
1302 the wrong if the constant appears negative. What we want to do is
1303 make the high-order word of the constant zero, not all ones. */
1305 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1306 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1307 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1309 HOST_WIDE_INT val
= INTVAL (x
);
1311 if (oldmode
!= VOIDmode
1312 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
1314 int width
= GET_MODE_BITSIZE (oldmode
);
1316 /* We need to zero extend VAL. */
1317 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1320 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
1323 /* We can do this with a gen_lowpart if both desired and current modes
1324 are integer, and this is either a constant integer, a register, or a
1325 non-volatile MEM. Except for the constant case where MODE is no
1326 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1328 if ((GET_CODE (x
) == CONST_INT
1329 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1330 || (GET_MODE_CLASS (mode
) == MODE_INT
1331 && GET_MODE_CLASS (oldmode
) == MODE_INT
1332 && (GET_CODE (x
) == CONST_DOUBLE
1333 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
1334 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
1335 && direct_load
[(int) mode
])
1336 || (GET_CODE (x
) == REG
1337 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
1338 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
1340 /* ?? If we don't know OLDMODE, we have to assume here that
1341 X does not need sign- or zero-extension. This may not be
1342 the case, but it's the best we can do. */
1343 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1344 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1346 HOST_WIDE_INT val
= INTVAL (x
);
1347 int width
= GET_MODE_BITSIZE (oldmode
);
1349 /* We must sign or zero-extend in this case. Start by
1350 zero-extending, then sign extend if we need to. */
1351 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1353 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1354 val
|= (HOST_WIDE_INT
) (-1) << width
;
1356 return gen_int_mode (val
, mode
);
1359 return gen_lowpart (mode
, x
);
1362 temp
= gen_reg_rtx (mode
);
1363 convert_move (temp
, x
, unsignedp
);
1367 /* This macro is used to determine what the largest unit size that
1368 move_by_pieces can use is. */
1370 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1371 move efficiently, as opposed to MOVE_MAX which is the maximum
1372 number of bytes we can move with a single instruction. */
1374 #ifndef MOVE_MAX_PIECES
1375 #define MOVE_MAX_PIECES MOVE_MAX
1378 /* Generate several move instructions to copy LEN bytes from block FROM to
1379 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1380 and TO through protect_from_queue before calling.
1382 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1383 used to push FROM to the stack.
1385 ALIGN is maximum alignment we can assume. */
1388 move_by_pieces (to
, from
, len
, align
)
1390 unsigned HOST_WIDE_INT len
;
1393 struct move_by_pieces data
;
1394 rtx to_addr
, from_addr
= XEXP (from
, 0);
1395 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
1396 enum machine_mode mode
= VOIDmode
, tmode
;
1397 enum insn_code icode
;
1400 data
.from_addr
= from_addr
;
1403 to_addr
= XEXP (to
, 0);
1406 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1407 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1409 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1416 #ifdef STACK_GROWS_DOWNWARD
1422 data
.to_addr
= to_addr
;
1425 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1426 || GET_CODE (from_addr
) == POST_INC
1427 || GET_CODE (from_addr
) == POST_DEC
);
1429 data
.explicit_inc_from
= 0;
1430 data
.explicit_inc_to
= 0;
1431 if (data
.reverse
) data
.offset
= len
;
1434 /* If copying requires more than two move insns,
1435 copy addresses to registers (to make displacements shorter)
1436 and use post-increment if available. */
1437 if (!(data
.autinc_from
&& data
.autinc_to
)
1438 && move_by_pieces_ninsns (len
, align
) > 2)
1440 /* Find the mode of the largest move... */
1441 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1442 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1443 if (GET_MODE_SIZE (tmode
) < max_size
)
1446 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
1448 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1449 data
.autinc_from
= 1;
1450 data
.explicit_inc_from
= -1;
1452 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
1454 data
.from_addr
= copy_addr_to_reg (from_addr
);
1455 data
.autinc_from
= 1;
1456 data
.explicit_inc_from
= 1;
1458 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1459 data
.from_addr
= copy_addr_to_reg (from_addr
);
1460 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
1462 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1464 data
.explicit_inc_to
= -1;
1466 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
1468 data
.to_addr
= copy_addr_to_reg (to_addr
);
1470 data
.explicit_inc_to
= 1;
1472 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1473 data
.to_addr
= copy_addr_to_reg (to_addr
);
1476 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1477 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1478 align
= MOVE_MAX
* BITS_PER_UNIT
;
1480 /* First move what we can in the largest integer mode, then go to
1481 successively smaller modes. */
1483 while (max_size
> 1)
1485 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1486 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1487 if (GET_MODE_SIZE (tmode
) < max_size
)
1490 if (mode
== VOIDmode
)
1493 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1494 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1495 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1497 max_size
= GET_MODE_SIZE (mode
);
1500 /* The code above should have handled everything. */
1505 /* Return number of insns required to move L bytes by pieces.
1506 ALIGN (in bits) is maximum alignment we can assume. */
1508 static unsigned HOST_WIDE_INT
1509 move_by_pieces_ninsns (l
, align
)
1510 unsigned HOST_WIDE_INT l
;
1513 unsigned HOST_WIDE_INT n_insns
= 0;
1514 unsigned HOST_WIDE_INT max_size
= MOVE_MAX
+ 1;
1516 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1517 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1518 align
= MOVE_MAX
* BITS_PER_UNIT
;
1520 while (max_size
> 1)
1522 enum machine_mode mode
= VOIDmode
, tmode
;
1523 enum insn_code icode
;
1525 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1526 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1527 if (GET_MODE_SIZE (tmode
) < max_size
)
1530 if (mode
== VOIDmode
)
1533 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1534 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1535 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1537 max_size
= GET_MODE_SIZE (mode
);
1545 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1546 with move instructions for mode MODE. GENFUN is the gen_... function
1547 to make a move insn for that mode. DATA has all the other info. */
1550 move_by_pieces_1 (genfun
, mode
, data
)
1551 rtx (*genfun
) PARAMS ((rtx
, ...));
1552 enum machine_mode mode
;
1553 struct move_by_pieces
*data
;
1555 unsigned int size
= GET_MODE_SIZE (mode
);
1556 rtx to1
= NULL_RTX
, from1
;
1558 while (data
->len
>= size
)
1561 data
->offset
-= size
;
1565 if (data
->autinc_to
)
1566 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1569 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1572 if (data
->autinc_from
)
1573 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1576 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1578 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1579 emit_insn (gen_add2_insn (data
->to_addr
,
1580 GEN_INT (-(HOST_WIDE_INT
)size
)));
1581 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1582 emit_insn (gen_add2_insn (data
->from_addr
,
1583 GEN_INT (-(HOST_WIDE_INT
)size
)));
1586 emit_insn ((*genfun
) (to1
, from1
));
1589 #ifdef PUSH_ROUNDING
1590 emit_single_push_insn (mode
, from1
, NULL
);
1596 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1597 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1598 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1599 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1601 if (! data
->reverse
)
1602 data
->offset
+= size
;
1608 /* Emit code to move a block Y to a block X.
1609 This may be done with string-move instructions,
1610 with multiple scalar move instructions, or with a library call.
1612 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1614 SIZE is an rtx that says how long they are.
1615 ALIGN is the maximum alignment we can assume they have.
1617 Return the address of the new block, if memcpy is called and returns it,
1621 emit_block_move (x
, y
, size
)
1626 #ifdef TARGET_MEM_FUNCTIONS
1628 tree call_expr
, arg_list
;
1630 unsigned int align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1632 if (GET_MODE (x
) != BLKmode
)
1635 if (GET_MODE (y
) != BLKmode
)
1638 x
= protect_from_queue (x
, 1);
1639 y
= protect_from_queue (y
, 0);
1640 size
= protect_from_queue (size
, 0);
1642 if (GET_CODE (x
) != MEM
)
1644 if (GET_CODE (y
) != MEM
)
1649 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1650 move_by_pieces (x
, y
, INTVAL (size
), align
);
1653 /* Try the most limited insn first, because there's no point
1654 including more than one in the machine description unless
1655 the more limited one has some advantage. */
1657 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1658 enum machine_mode mode
;
1660 /* Since this is a move insn, we don't care about volatility. */
1663 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1664 mode
= GET_MODE_WIDER_MODE (mode
))
1666 enum insn_code code
= movstr_optab
[(int) mode
];
1667 insn_operand_predicate_fn pred
;
1669 if (code
!= CODE_FOR_nothing
1670 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1671 here because if SIZE is less than the mode mask, as it is
1672 returned by the macro, it will definitely be less than the
1673 actual mode mask. */
1674 && ((GET_CODE (size
) == CONST_INT
1675 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1676 <= (GET_MODE_MASK (mode
) >> 1)))
1677 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1678 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1679 || (*pred
) (x
, BLKmode
))
1680 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1681 || (*pred
) (y
, BLKmode
))
1682 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1683 || (*pred
) (opalign
, VOIDmode
)))
1686 rtx last
= get_last_insn ();
1689 op2
= convert_to_mode (mode
, size
, 1);
1690 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1691 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1692 op2
= copy_to_mode_reg (mode
, op2
);
1694 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1702 delete_insns_since (last
);
1708 /* X, Y, or SIZE may have been passed through protect_from_queue.
1710 It is unsafe to save the value generated by protect_from_queue
1711 and reuse it later. Consider what happens if emit_queue is
1712 called before the return value from protect_from_queue is used.
1714 Expansion of the CALL_EXPR below will call emit_queue before
1715 we are finished emitting RTL for argument setup. So if we are
1716 not careful we could get the wrong value for an argument.
1718 To avoid this problem we go ahead and emit code to copy X, Y &
1719 SIZE into new pseudos. We can then place those new pseudos
1720 into an RTL_EXPR and use them later, even after a call to
1723 Note this is not strictly needed for library calls since they
1724 do not call emit_queue before loading their arguments. However,
1725 we may need to have library calls call emit_queue in the future
1726 since failing to do so could cause problems for targets which
1727 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1728 x
= copy_to_mode_reg (Pmode
, XEXP (x
, 0));
1729 y
= copy_to_mode_reg (Pmode
, XEXP (y
, 0));
1731 #ifdef TARGET_MEM_FUNCTIONS
1732 size
= copy_to_mode_reg (TYPE_MODE (sizetype
), size
);
1734 size
= convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1735 TREE_UNSIGNED (integer_type_node
));
1736 size
= copy_to_mode_reg (TYPE_MODE (integer_type_node
), size
);
1739 #ifdef TARGET_MEM_FUNCTIONS
1740 /* It is incorrect to use the libcall calling conventions to call
1741 memcpy in this context.
1743 This could be a user call to memcpy and the user may wish to
1744 examine the return value from memcpy.
1746 For targets where libcalls and normal calls have different conventions
1747 for returning pointers, we could end up generating incorrect code.
1749 So instead of using a libcall sequence we build up a suitable
1750 CALL_EXPR and expand the call in the normal fashion. */
1751 if (fn
== NULL_TREE
)
1755 /* This was copied from except.c, I don't know if all this is
1756 necessary in this context or not. */
1757 fn
= get_identifier ("memcpy");
1758 fntype
= build_pointer_type (void_type_node
);
1759 fntype
= build_function_type (fntype
, NULL_TREE
);
1760 fn
= build_decl (FUNCTION_DECL
, fn
, fntype
);
1761 ggc_add_tree_root (&fn
, 1);
1762 DECL_EXTERNAL (fn
) = 1;
1763 TREE_PUBLIC (fn
) = 1;
1764 DECL_ARTIFICIAL (fn
) = 1;
1765 TREE_NOTHROW (fn
) = 1;
1766 make_decl_rtl (fn
, NULL
);
1767 assemble_external (fn
);
1770 /* We need to make an argument list for the function call.
1772 memcpy has three arguments, the first two are void * addresses and
1773 the last is a size_t byte count for the copy. */
1775 = build_tree_list (NULL_TREE
,
1776 make_tree (build_pointer_type (void_type_node
), x
));
1777 TREE_CHAIN (arg_list
)
1778 = build_tree_list (NULL_TREE
,
1779 make_tree (build_pointer_type (void_type_node
), y
));
1780 TREE_CHAIN (TREE_CHAIN (arg_list
))
1781 = build_tree_list (NULL_TREE
, make_tree (sizetype
, size
));
1782 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list
))) = NULL_TREE
;
1784 /* Now we have to build up the CALL_EXPR itself. */
1785 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1786 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1787 call_expr
, arg_list
, NULL_TREE
);
1788 TREE_SIDE_EFFECTS (call_expr
) = 1;
1790 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
1792 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
1793 VOIDmode
, 3, y
, Pmode
, x
, Pmode
,
1794 convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1795 TREE_UNSIGNED (integer_type_node
)),
1796 TYPE_MODE (integer_type_node
));
1799 /* If we are initializing a readonly value, show the above call
1800 clobbered it. Otherwise, a load from it may erroneously be hoisted
1802 if (RTX_UNCHANGING_P (x
))
1803 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
1809 /* Copy all or part of a value X into registers starting at REGNO.
1810 The number of registers to be filled is NREGS. */
1813 move_block_to_reg (regno
, x
, nregs
, mode
)
1817 enum machine_mode mode
;
1820 #ifdef HAVE_load_multiple
1828 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1829 x
= validize_mem (force_const_mem (mode
, x
));
1831 /* See if the machine can do this with a load multiple insn. */
1832 #ifdef HAVE_load_multiple
1833 if (HAVE_load_multiple
)
1835 last
= get_last_insn ();
1836 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1844 delete_insns_since (last
);
1848 for (i
= 0; i
< nregs
; i
++)
1849 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1850 operand_subword_force (x
, i
, mode
));
1853 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1854 The number of registers to be filled is NREGS. SIZE indicates the number
1855 of bytes in the object X. */
1858 move_block_from_reg (regno
, x
, nregs
, size
)
1865 #ifdef HAVE_store_multiple
1869 enum machine_mode mode
;
1874 /* If SIZE is that of a mode no bigger than a word, just use that
1875 mode's store operation. */
1876 if (size
<= UNITS_PER_WORD
1877 && (mode
= mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0)) != BLKmode
1878 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
)
1880 emit_move_insn (adjust_address (x
, mode
, 0), gen_rtx_REG (mode
, regno
));
1884 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1885 to the left before storing to memory. Note that the previous test
1886 doesn't handle all cases (e.g. SIZE == 3). */
1887 if (size
< UNITS_PER_WORD
1889 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
)
1891 rtx tem
= operand_subword (x
, 0, 1, BLKmode
);
1897 shift
= expand_shift (LSHIFT_EXPR
, word_mode
,
1898 gen_rtx_REG (word_mode
, regno
),
1899 build_int_2 ((UNITS_PER_WORD
- size
)
1900 * BITS_PER_UNIT
, 0), NULL_RTX
, 0);
1901 emit_move_insn (tem
, shift
);
1905 /* See if the machine can do this with a store multiple insn. */
1906 #ifdef HAVE_store_multiple
1907 if (HAVE_store_multiple
)
1909 last
= get_last_insn ();
1910 pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1918 delete_insns_since (last
);
1922 for (i
= 0; i
< nregs
; i
++)
1924 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1929 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1933 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1934 registers represented by a PARALLEL. SSIZE represents the total size of
1935 block SRC in bytes, or -1 if not known. */
1936 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
1937 the balance will be in what would be the low-order memory addresses, i.e.
1938 left justified for big endian, right justified for little endian. This
1939 happens to be true for the targets currently using this support. If this
1940 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1944 emit_group_load (dst
, orig_src
, ssize
)
1951 if (GET_CODE (dst
) != PARALLEL
)
1954 /* Check for a NULL entry, used to indicate that the parameter goes
1955 both on the stack and in registers. */
1956 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1961 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
1963 /* Process the pieces. */
1964 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1966 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1967 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1968 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1971 /* Handle trailing fragments that run over the size of the struct. */
1972 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1974 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1975 bytelen
= ssize
- bytepos
;
1980 /* If we won't be loading directly from memory, protect the real source
1981 from strange tricks we might play; but make sure that the source can
1982 be loaded directly into the destination. */
1984 if (GET_CODE (orig_src
) != MEM
1985 && (!CONSTANT_P (orig_src
)
1986 || (GET_MODE (orig_src
) != mode
1987 && GET_MODE (orig_src
) != VOIDmode
)))
1989 if (GET_MODE (orig_src
) == VOIDmode
)
1990 src
= gen_reg_rtx (mode
);
1992 src
= gen_reg_rtx (GET_MODE (orig_src
));
1994 emit_move_insn (src
, orig_src
);
1997 /* Optimize the access just a bit. */
1998 if (GET_CODE (src
) == MEM
1999 && MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
)
2000 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2001 && bytelen
== GET_MODE_SIZE (mode
))
2003 tmps
[i
] = gen_reg_rtx (mode
);
2004 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
2006 else if (GET_CODE (src
) == CONCAT
)
2009 && bytelen
== GET_MODE_SIZE (GET_MODE (XEXP (src
, 0))))
2010 || (bytepos
== (HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)))
2011 && bytelen
== GET_MODE_SIZE (GET_MODE (XEXP (src
, 1)))))
2013 tmps
[i
] = XEXP (src
, bytepos
!= 0);
2014 if (! CONSTANT_P (tmps
[i
])
2015 && (GET_CODE (tmps
[i
]) != REG
|| GET_MODE (tmps
[i
]) != mode
))
2016 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
2017 0, 1, NULL_RTX
, mode
, mode
, ssize
);
2019 else if (bytepos
== 0)
2021 rtx mem
= assign_stack_temp (GET_MODE (src
),
2022 GET_MODE_SIZE (GET_MODE (src
)), 0);
2023 emit_move_insn (mem
, src
);
2024 tmps
[i
] = adjust_address (mem
, mode
, 0);
2029 else if (CONSTANT_P (src
)
2030 || (GET_CODE (src
) == REG
&& GET_MODE (src
) == mode
))
2033 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
2034 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
2037 if (BYTES_BIG_ENDIAN
&& shift
)
2038 expand_binop (mode
, ashl_optab
, tmps
[i
], GEN_INT (shift
),
2039 tmps
[i
], 0, OPTAB_WIDEN
);
2044 /* Copy the extracted pieces into the proper (probable) hard regs. */
2045 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2046 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
2049 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2050 registers represented by a PARALLEL. SSIZE represents the total size of
2051 block DST, or -1 if not known. */
2054 emit_group_store (orig_dst
, src
, ssize
)
2061 if (GET_CODE (src
) != PARALLEL
)
2064 /* Check for a NULL entry, used to indicate that the parameter goes
2065 both on the stack and in registers. */
2066 if (XEXP (XVECEXP (src
, 0, 0), 0))
2071 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (src
, 0));
2073 /* Copy the (probable) hard regs into pseudos. */
2074 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2076 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2077 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2078 emit_move_insn (tmps
[i
], reg
);
2082 /* If we won't be storing directly into memory, protect the real destination
2083 from strange tricks we might play. */
2085 if (GET_CODE (dst
) == PARALLEL
)
2089 /* We can get a PARALLEL dst if there is a conditional expression in
2090 a return statement. In that case, the dst and src are the same,
2091 so no action is necessary. */
2092 if (rtx_equal_p (dst
, src
))
2095 /* It is unclear if we can ever reach here, but we may as well handle
2096 it. Allocate a temporary, and split this into a store/load to/from
2099 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
2100 emit_group_store (temp
, src
, ssize
);
2101 emit_group_load (dst
, temp
, ssize
);
2104 else if (GET_CODE (dst
) != MEM
&& GET_CODE (dst
) != CONCAT
)
2106 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
2107 /* Make life a bit easier for combine. */
2108 emit_move_insn (dst
, const0_rtx
);
2111 /* Process the pieces. */
2112 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2114 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2115 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2116 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2119 /* Handle trailing fragments that run over the size of the struct. */
2120 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2122 if (BYTES_BIG_ENDIAN
)
2124 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2125 expand_binop (mode
, ashr_optab
, tmps
[i
], GEN_INT (shift
),
2126 tmps
[i
], 0, OPTAB_WIDEN
);
2128 bytelen
= ssize
- bytepos
;
2131 if (GET_CODE (dst
) == CONCAT
)
2133 if (bytepos
+ bytelen
<= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2134 dest
= XEXP (dst
, 0);
2135 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2137 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2138 dest
= XEXP (dst
, 1);
2144 /* Optimize the access just a bit. */
2145 if (GET_CODE (dest
) == MEM
2146 && MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
)
2147 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2148 && bytelen
== GET_MODE_SIZE (mode
))
2149 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2151 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2152 mode
, tmps
[i
], ssize
);
2157 /* Copy from the pseudo into the (probable) hard reg. */
2158 if (GET_CODE (dst
) == REG
)
2159 emit_move_insn (orig_dst
, dst
);
2162 /* Generate code to copy a BLKmode object of TYPE out of a
2163 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2164 is null, a stack temporary is created. TGTBLK is returned.
2166 The primary purpose of this routine is to handle functions
2167 that return BLKmode structures in registers. Some machines
2168 (the PA for example) want to return all small structures
2169 in registers regardless of the structure's alignment. */
2172 copy_blkmode_from_reg (tgtblk
, srcreg
, type
)
2177 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2178 rtx src
= NULL
, dst
= NULL
;
2179 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2180 unsigned HOST_WIDE_INT bitpos
, xbitpos
, big_endian_correction
= 0;
2184 tgtblk
= assign_temp (build_qualified_type (type
,
2186 | TYPE_QUAL_CONST
)),
2188 preserve_temp_slots (tgtblk
);
2191 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2192 into a new pseudo which is a full word.
2194 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2195 the wrong part of the register gets copied so we fake a type conversion
2197 if (GET_MODE (srcreg
) != BLKmode
2198 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2200 if (FUNCTION_ARG_REG_LITTLE_ENDIAN
)
2201 srcreg
= simplify_gen_subreg (word_mode
, srcreg
, GET_MODE (srcreg
), 0);
2203 srcreg
= convert_to_mode (word_mode
, srcreg
, TREE_UNSIGNED (type
));
2206 /* Structures whose size is not a multiple of a word are aligned
2207 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2208 machine, this means we must skip the empty high order bytes when
2209 calculating the bit offset. */
2210 if (BYTES_BIG_ENDIAN
2211 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2212 && bytes
% UNITS_PER_WORD
)
2213 big_endian_correction
2214 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2216 /* Copy the structure BITSIZE bites at a time.
2218 We could probably emit more efficient code for machines which do not use
2219 strict alignment, but it doesn't seem worth the effort at the current
2221 for (bitpos
= 0, xbitpos
= big_endian_correction
;
2222 bitpos
< bytes
* BITS_PER_UNIT
;
2223 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2225 /* We need a new source operand each time xbitpos is on a
2226 word boundary and when xbitpos == big_endian_correction
2227 (the first time through). */
2228 if (xbitpos
% BITS_PER_WORD
== 0
2229 || xbitpos
== big_endian_correction
)
2230 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2233 /* We need a new destination operand each time bitpos is on
2235 if (bitpos
% BITS_PER_WORD
== 0)
2236 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2238 /* Use xbitpos for the source extraction (right justified) and
2239 xbitpos for the destination store (left justified). */
2240 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2241 extract_bit_field (src
, bitsize
,
2242 xbitpos
% BITS_PER_WORD
, 1,
2243 NULL_RTX
, word_mode
, word_mode
,
2251 /* Add a USE expression for REG to the (possibly empty) list pointed
2252 to by CALL_FUSAGE. REG must denote a hard register. */
2255 use_reg (call_fusage
, reg
)
2256 rtx
*call_fusage
, reg
;
2258 if (GET_CODE (reg
) != REG
2259 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2263 = gen_rtx_EXPR_LIST (VOIDmode
,
2264 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2267 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2268 starting at REGNO. All of these registers must be hard registers. */
2271 use_regs (call_fusage
, regno
, nregs
)
2278 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
2281 for (i
= 0; i
< nregs
; i
++)
2282 use_reg (call_fusage
, gen_rtx_REG (reg_raw_mode
[regno
+ i
], regno
+ i
));
2285 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2286 PARALLEL REGS. This is for calls that pass values in multiple
2287 non-contiguous locations. The Irix 6 ABI has examples of this. */
2290 use_group_regs (call_fusage
, regs
)
2296 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2298 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2300 /* A NULL entry means the parameter goes both on the stack and in
2301 registers. This can also be a MEM for targets that pass values
2302 partially on the stack and partially in registers. */
2303 if (reg
!= 0 && GET_CODE (reg
) == REG
)
2304 use_reg (call_fusage
, reg
);
2310 can_store_by_pieces (len
, constfun
, constfundata
, align
)
2311 unsigned HOST_WIDE_INT len
;
2312 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
2316 unsigned HOST_WIDE_INT max_size
, l
;
2317 HOST_WIDE_INT offset
= 0;
2318 enum machine_mode mode
, tmode
;
2319 enum insn_code icode
;
2323 if (! MOVE_BY_PIECES_P (len
, align
))
2326 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2327 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2328 align
= MOVE_MAX
* BITS_PER_UNIT
;
2330 /* We would first store what we can in the largest integer mode, then go to
2331 successively smaller modes. */
2334 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2339 max_size
= MOVE_MAX_PIECES
+ 1;
2340 while (max_size
> 1)
2342 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2343 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2344 if (GET_MODE_SIZE (tmode
) < max_size
)
2347 if (mode
== VOIDmode
)
2350 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2351 if (icode
!= CODE_FOR_nothing
2352 && align
>= GET_MODE_ALIGNMENT (mode
))
2354 unsigned int size
= GET_MODE_SIZE (mode
);
2361 cst
= (*constfun
) (constfundata
, offset
, mode
);
2362 if (!LEGITIMATE_CONSTANT_P (cst
))
2372 max_size
= GET_MODE_SIZE (mode
);
2375 /* The code above should have handled everything. */
2383 /* Generate several move instructions to store LEN bytes generated by
2384 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2385 pointer which will be passed as argument in every CONSTFUN call.
2386 ALIGN is maximum alignment we can assume. */
2389 store_by_pieces (to
, len
, constfun
, constfundata
, align
)
2391 unsigned HOST_WIDE_INT len
;
2392 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
2396 struct store_by_pieces data
;
2398 if (! MOVE_BY_PIECES_P (len
, align
))
2400 to
= protect_from_queue (to
, 1);
2401 data
.constfun
= constfun
;
2402 data
.constfundata
= constfundata
;
2405 store_by_pieces_1 (&data
, align
);
2408 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2409 rtx with BLKmode). The caller must pass TO through protect_from_queue
2410 before calling. ALIGN is maximum alignment we can assume. */
2413 clear_by_pieces (to
, len
, align
)
2415 unsigned HOST_WIDE_INT len
;
2418 struct store_by_pieces data
;
2420 data
.constfun
= clear_by_pieces_1
;
2421 data
.constfundata
= NULL
;
2424 store_by_pieces_1 (&data
, align
);
2427 /* Callback routine for clear_by_pieces.
2428 Return const0_rtx unconditionally. */
2431 clear_by_pieces_1 (data
, offset
, mode
)
2432 PTR data ATTRIBUTE_UNUSED
;
2433 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
;
2434 enum machine_mode mode ATTRIBUTE_UNUSED
;
2439 /* Subroutine of clear_by_pieces and store_by_pieces.
2440 Generate several move instructions to store LEN bytes of block TO. (A MEM
2441 rtx with BLKmode). The caller must pass TO through protect_from_queue
2442 before calling. ALIGN is maximum alignment we can assume. */
2445 store_by_pieces_1 (data
, align
)
2446 struct store_by_pieces
*data
;
2449 rtx to_addr
= XEXP (data
->to
, 0);
2450 unsigned HOST_WIDE_INT max_size
= MOVE_MAX_PIECES
+ 1;
2451 enum machine_mode mode
= VOIDmode
, tmode
;
2452 enum insn_code icode
;
2455 data
->to_addr
= to_addr
;
2457 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2458 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2460 data
->explicit_inc_to
= 0;
2462 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2464 data
->offset
= data
->len
;
2466 /* If storing requires more than two move insns,
2467 copy addresses to registers (to make displacements shorter)
2468 and use post-increment if available. */
2469 if (!data
->autinc_to
2470 && move_by_pieces_ninsns (data
->len
, align
) > 2)
2472 /* Determine the main mode we'll be using. */
2473 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2474 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2475 if (GET_MODE_SIZE (tmode
) < max_size
)
2478 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2480 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2481 data
->autinc_to
= 1;
2482 data
->explicit_inc_to
= -1;
2485 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2486 && ! data
->autinc_to
)
2488 data
->to_addr
= copy_addr_to_reg (to_addr
);
2489 data
->autinc_to
= 1;
2490 data
->explicit_inc_to
= 1;
2493 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2494 data
->to_addr
= copy_addr_to_reg (to_addr
);
2497 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2498 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2499 align
= MOVE_MAX
* BITS_PER_UNIT
;
2501 /* First store what we can in the largest integer mode, then go to
2502 successively smaller modes. */
2504 while (max_size
> 1)
2506 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2507 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2508 if (GET_MODE_SIZE (tmode
) < max_size
)
2511 if (mode
== VOIDmode
)
2514 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2515 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2516 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2518 max_size
= GET_MODE_SIZE (mode
);
2521 /* The code above should have handled everything. */
2526 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2527 with move instructions for mode MODE. GENFUN is the gen_... function
2528 to make a move insn for that mode. DATA has all the other info. */
2531 store_by_pieces_2 (genfun
, mode
, data
)
2532 rtx (*genfun
) PARAMS ((rtx
, ...));
2533 enum machine_mode mode
;
2534 struct store_by_pieces
*data
;
2536 unsigned int size
= GET_MODE_SIZE (mode
);
2539 while (data
->len
>= size
)
2542 data
->offset
-= size
;
2544 if (data
->autinc_to
)
2545 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2548 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2550 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2551 emit_insn (gen_add2_insn (data
->to_addr
,
2552 GEN_INT (-(HOST_WIDE_INT
) size
)));
2554 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2555 emit_insn ((*genfun
) (to1
, cst
));
2557 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2558 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2560 if (! data
->reverse
)
2561 data
->offset
+= size
;
2567 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2568 its length in bytes. */
2571 clear_storage (object
, size
)
2575 #ifdef TARGET_MEM_FUNCTIONS
2577 tree call_expr
, arg_list
;
2580 unsigned int align
= (GET_CODE (object
) == MEM
? MEM_ALIGN (object
)
2581 : GET_MODE_ALIGNMENT (GET_MODE (object
)));
2583 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2584 just move a zero. Otherwise, do this a piece at a time. */
2585 if (GET_MODE (object
) != BLKmode
2586 && GET_CODE (size
) == CONST_INT
2587 && GET_MODE_SIZE (GET_MODE (object
)) == (unsigned int) INTVAL (size
))
2588 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2591 object
= protect_from_queue (object
, 1);
2592 size
= protect_from_queue (size
, 0);
2594 if (GET_CODE (size
) == CONST_INT
2595 && MOVE_BY_PIECES_P (INTVAL (size
), align
))
2596 clear_by_pieces (object
, INTVAL (size
), align
);
2599 /* Try the most limited insn first, because there's no point
2600 including more than one in the machine description unless
2601 the more limited one has some advantage. */
2603 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2604 enum machine_mode mode
;
2606 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2607 mode
= GET_MODE_WIDER_MODE (mode
))
2609 enum insn_code code
= clrstr_optab
[(int) mode
];
2610 insn_operand_predicate_fn pred
;
2612 if (code
!= CODE_FOR_nothing
2613 /* We don't need MODE to be narrower than
2614 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2615 the mode mask, as it is returned by the macro, it will
2616 definitely be less than the actual mode mask. */
2617 && ((GET_CODE (size
) == CONST_INT
2618 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2619 <= (GET_MODE_MASK (mode
) >> 1)))
2620 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2621 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2622 || (*pred
) (object
, BLKmode
))
2623 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
2624 || (*pred
) (opalign
, VOIDmode
)))
2627 rtx last
= get_last_insn ();
2630 op1
= convert_to_mode (mode
, size
, 1);
2631 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2632 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
2633 op1
= copy_to_mode_reg (mode
, op1
);
2635 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2642 delete_insns_since (last
);
2646 /* OBJECT or SIZE may have been passed through protect_from_queue.
2648 It is unsafe to save the value generated by protect_from_queue
2649 and reuse it later. Consider what happens if emit_queue is
2650 called before the return value from protect_from_queue is used.
2652 Expansion of the CALL_EXPR below will call emit_queue before
2653 we are finished emitting RTL for argument setup. So if we are
2654 not careful we could get the wrong value for an argument.
2656 To avoid this problem we go ahead and emit code to copy OBJECT
2657 and SIZE into new pseudos. We can then place those new pseudos
2658 into an RTL_EXPR and use them later, even after a call to
2661 Note this is not strictly needed for library calls since they
2662 do not call emit_queue before loading their arguments. However,
2663 we may need to have library calls call emit_queue in the future
2664 since failing to do so could cause problems for targets which
2665 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2666 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2668 #ifdef TARGET_MEM_FUNCTIONS
2669 size
= copy_to_mode_reg (TYPE_MODE (sizetype
), size
);
2671 size
= convert_to_mode (TYPE_MODE (integer_type_node
), size
,
2672 TREE_UNSIGNED (integer_type_node
));
2673 size
= copy_to_mode_reg (TYPE_MODE (integer_type_node
), size
);
2676 #ifdef TARGET_MEM_FUNCTIONS
2677 /* It is incorrect to use the libcall calling conventions to call
2678 memset in this context.
2680 This could be a user call to memset and the user may wish to
2681 examine the return value from memset.
2683 For targets where libcalls and normal calls have different
2684 conventions for returning pointers, we could end up generating
2687 So instead of using a libcall sequence we build up a suitable
2688 CALL_EXPR and expand the call in the normal fashion. */
2689 if (fn
== NULL_TREE
)
2693 /* This was copied from except.c, I don't know if all this is
2694 necessary in this context or not. */
2695 fn
= get_identifier ("memset");
2696 fntype
= build_pointer_type (void_type_node
);
2697 fntype
= build_function_type (fntype
, NULL_TREE
);
2698 fn
= build_decl (FUNCTION_DECL
, fn
, fntype
);
2699 ggc_add_tree_root (&fn
, 1);
2700 DECL_EXTERNAL (fn
) = 1;
2701 TREE_PUBLIC (fn
) = 1;
2702 DECL_ARTIFICIAL (fn
) = 1;
2703 TREE_NOTHROW (fn
) = 1;
2704 make_decl_rtl (fn
, NULL
);
2705 assemble_external (fn
);
2708 /* We need to make an argument list for the function call.
2710 memset has three arguments, the first is a void * addresses, the
2711 second an integer with the initialization value, the last is a
2712 size_t byte count for the copy. */
2714 = build_tree_list (NULL_TREE
,
2715 make_tree (build_pointer_type (void_type_node
),
2717 TREE_CHAIN (arg_list
)
2718 = build_tree_list (NULL_TREE
,
2719 make_tree (integer_type_node
, const0_rtx
));
2720 TREE_CHAIN (TREE_CHAIN (arg_list
))
2721 = build_tree_list (NULL_TREE
, make_tree (sizetype
, size
));
2722 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list
))) = NULL_TREE
;
2724 /* Now we have to build up the CALL_EXPR itself. */
2725 call_expr
= build1 (ADDR_EXPR
,
2726 build_pointer_type (TREE_TYPE (fn
)), fn
);
2727 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2728 call_expr
, arg_list
, NULL_TREE
);
2729 TREE_SIDE_EFFECTS (call_expr
) = 1;
2731 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
2733 emit_library_call (bzero_libfunc
, LCT_NORMAL
,
2734 VOIDmode
, 2, object
, Pmode
, size
,
2735 TYPE_MODE (integer_type_node
));
2738 /* If we are initializing a readonly value, show the above call
2739 clobbered it. Otherwise, a load from it may erroneously be
2740 hoisted from a loop. */
2741 if (RTX_UNCHANGING_P (object
))
2742 emit_insn (gen_rtx_CLOBBER (VOIDmode
, object
));
2749 /* Generate code to copy Y into X.
2750 Both Y and X must have the same mode, except that
2751 Y can be a constant with VOIDmode.
2752 This mode cannot be BLKmode; use emit_block_move for that.
2754 Return the last instruction emitted. */
2757 emit_move_insn (x
, y
)
2760 enum machine_mode mode
= GET_MODE (x
);
2761 rtx y_cst
= NULL_RTX
;
2764 x
= protect_from_queue (x
, 1);
2765 y
= protect_from_queue (y
, 0);
2767 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
2770 /* Never force constant_p_rtx to memory. */
2771 if (GET_CODE (y
) == CONSTANT_P_RTX
)
2773 else if (CONSTANT_P (y
) && ! LEGITIMATE_CONSTANT_P (y
))
2776 y
= force_const_mem (mode
, y
);
2779 /* If X or Y are memory references, verify that their addresses are valid
2781 if (GET_CODE (x
) == MEM
2782 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2783 && ! push_operand (x
, GET_MODE (x
)))
2785 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2786 x
= validize_mem (x
);
2788 if (GET_CODE (y
) == MEM
2789 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2791 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2792 y
= validize_mem (y
);
2794 if (mode
== BLKmode
)
2797 last_insn
= emit_move_insn_1 (x
, y
);
2799 if (y_cst
&& GET_CODE (x
) == REG
)
2800 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
2805 /* Low level part of emit_move_insn.
2806 Called just like emit_move_insn, but assumes X and Y
2807 are basically valid. */
2810 emit_move_insn_1 (x
, y
)
2813 enum machine_mode mode
= GET_MODE (x
);
2814 enum machine_mode submode
;
2815 enum mode_class
class = GET_MODE_CLASS (mode
);
2817 if ((unsigned int) mode
>= (unsigned int) MAX_MACHINE_MODE
)
2820 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2822 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2824 /* Expand complex moves by moving real part and imag part, if possible. */
2825 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
2826 && BLKmode
!= (submode
= mode_for_size ((GET_MODE_UNIT_SIZE (mode
)
2828 (class == MODE_COMPLEX_INT
2829 ? MODE_INT
: MODE_FLOAT
),
2831 && (mov_optab
->handlers
[(int) submode
].insn_code
2832 != CODE_FOR_nothing
))
2834 /* Don't split destination if it is a stack push. */
2835 int stack
= push_operand (x
, GET_MODE (x
));
2837 #ifdef PUSH_ROUNDING
2838 /* In case we output to the stack, but the size is smaller machine can
2839 push exactly, we need to use move instructions. */
2841 && (PUSH_ROUNDING (GET_MODE_SIZE (submode
))
2842 != GET_MODE_SIZE (submode
)))
2845 HOST_WIDE_INT offset1
, offset2
;
2847 /* Do not use anti_adjust_stack, since we don't want to update
2848 stack_pointer_delta. */
2849 temp
= expand_binop (Pmode
,
2850 #ifdef STACK_GROWS_DOWNWARD
2858 (GET_MODE_SIZE (GET_MODE (x
)))),
2859 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
2861 if (temp
!= stack_pointer_rtx
)
2862 emit_move_insn (stack_pointer_rtx
, temp
);
2864 #ifdef STACK_GROWS_DOWNWARD
2866 offset2
= GET_MODE_SIZE (submode
);
2868 offset1
= -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)));
2869 offset2
= (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))
2870 + GET_MODE_SIZE (submode
));
2873 emit_move_insn (change_address (x
, submode
,
2874 gen_rtx_PLUS (Pmode
,
2876 GEN_INT (offset1
))),
2877 gen_realpart (submode
, y
));
2878 emit_move_insn (change_address (x
, submode
,
2879 gen_rtx_PLUS (Pmode
,
2881 GEN_INT (offset2
))),
2882 gen_imagpart (submode
, y
));
2886 /* If this is a stack, push the highpart first, so it
2887 will be in the argument order.
2889 In that case, change_address is used only to convert
2890 the mode, not to change the address. */
2893 /* Note that the real part always precedes the imag part in memory
2894 regardless of machine's endianness. */
2895 #ifdef STACK_GROWS_DOWNWARD
2896 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2897 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2898 gen_imagpart (submode
, y
)));
2899 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2900 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2901 gen_realpart (submode
, y
)));
2903 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2904 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2905 gen_realpart (submode
, y
)));
2906 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2907 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2908 gen_imagpart (submode
, y
)));
2913 rtx realpart_x
, realpart_y
;
2914 rtx imagpart_x
, imagpart_y
;
2916 /* If this is a complex value with each part being smaller than a
2917 word, the usual calling sequence will likely pack the pieces into
2918 a single register. Unfortunately, SUBREG of hard registers only
2919 deals in terms of words, so we have a problem converting input
2920 arguments to the CONCAT of two registers that is used elsewhere
2921 for complex values. If this is before reload, we can copy it into
2922 memory and reload. FIXME, we should see about using extract and
2923 insert on integer registers, but complex short and complex char
2924 variables should be rarely used. */
2925 if (GET_MODE_BITSIZE (mode
) < 2 * BITS_PER_WORD
2926 && (reload_in_progress
| reload_completed
) == 0)
2929 = (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
2931 = (REG_P (y
) && REGNO (y
) < FIRST_PSEUDO_REGISTER
);
2933 if (packed_dest_p
|| packed_src_p
)
2935 enum mode_class reg_class
= ((class == MODE_COMPLEX_FLOAT
)
2936 ? MODE_FLOAT
: MODE_INT
);
2938 enum machine_mode reg_mode
2939 = mode_for_size (GET_MODE_BITSIZE (mode
), reg_class
, 1);
2941 if (reg_mode
!= BLKmode
)
2943 rtx mem
= assign_stack_temp (reg_mode
,
2944 GET_MODE_SIZE (mode
), 0);
2945 rtx cmem
= adjust_address (mem
, mode
, 0);
2948 = N_("function using short complex types cannot be inline");
2952 rtx sreg
= gen_rtx_SUBREG (reg_mode
, x
, 0);
2954 emit_move_insn_1 (cmem
, y
);
2955 return emit_move_insn_1 (sreg
, mem
);
2959 rtx sreg
= gen_rtx_SUBREG (reg_mode
, y
, 0);
2961 emit_move_insn_1 (mem
, sreg
);
2962 return emit_move_insn_1 (x
, cmem
);
2968 realpart_x
= gen_realpart (submode
, x
);
2969 realpart_y
= gen_realpart (submode
, y
);
2970 imagpart_x
= gen_imagpart (submode
, x
);
2971 imagpart_y
= gen_imagpart (submode
, y
);
2973 /* Show the output dies here. This is necessary for SUBREGs
2974 of pseudos since we cannot track their lifetimes correctly;
2975 hard regs shouldn't appear here except as return values.
2976 We never want to emit such a clobber after reload. */
2978 && ! (reload_in_progress
|| reload_completed
)
2979 && (GET_CODE (realpart_x
) == SUBREG
2980 || GET_CODE (imagpart_x
) == SUBREG
))
2981 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
2983 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2984 (realpart_x
, realpart_y
));
2985 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2986 (imagpart_x
, imagpart_y
));
2989 return get_last_insn ();
2992 /* This will handle any multi-word mode that lacks a move_insn pattern.
2993 However, you will get better code if you define such patterns,
2994 even if they must turn into multiple assembler instructions. */
2995 else if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
3002 #ifdef PUSH_ROUNDING
3004 /* If X is a push on the stack, do the push now and replace
3005 X with a reference to the stack pointer. */
3006 if (push_operand (x
, GET_MODE (x
)))
3011 /* Do not use anti_adjust_stack, since we don't want to update
3012 stack_pointer_delta. */
3013 temp
= expand_binop (Pmode
,
3014 #ifdef STACK_GROWS_DOWNWARD
3022 (GET_MODE_SIZE (GET_MODE (x
)))),
3023 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
3025 if (temp
!= stack_pointer_rtx
)
3026 emit_move_insn (stack_pointer_rtx
, temp
);
3028 code
= GET_CODE (XEXP (x
, 0));
3030 /* Just hope that small offsets off SP are OK. */
3031 if (code
== POST_INC
)
3032 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3033 GEN_INT (-((HOST_WIDE_INT
)
3034 GET_MODE_SIZE (GET_MODE (x
)))));
3035 else if (code
== POST_DEC
)
3036 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3037 GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
3039 temp
= stack_pointer_rtx
;
3041 x
= change_address (x
, VOIDmode
, temp
);
3045 /* If we are in reload, see if either operand is a MEM whose address
3046 is scheduled for replacement. */
3047 if (reload_in_progress
&& GET_CODE (x
) == MEM
3048 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3049 x
= replace_equiv_address_nv (x
, inner
);
3050 if (reload_in_progress
&& GET_CODE (y
) == MEM
3051 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3052 y
= replace_equiv_address_nv (y
, inner
);
3058 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3061 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3062 rtx ypart
= operand_subword (y
, i
, 1, mode
);
3064 /* If we can't get a part of Y, put Y into memory if it is a
3065 constant. Otherwise, force it into a register. If we still
3066 can't get a part of Y, abort. */
3067 if (ypart
== 0 && CONSTANT_P (y
))
3069 y
= force_const_mem (mode
, y
);
3070 ypart
= operand_subword (y
, i
, 1, mode
);
3072 else if (ypart
== 0)
3073 ypart
= operand_subword_force (y
, i
, mode
);
3075 if (xpart
== 0 || ypart
== 0)
3078 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3080 last_insn
= emit_move_insn (xpart
, ypart
);
3083 seq
= gen_sequence ();
3086 /* Show the output dies here. This is necessary for SUBREGs
3087 of pseudos since we cannot track their lifetimes correctly;
3088 hard regs shouldn't appear here except as return values.
3089 We never want to emit such a clobber after reload. */
3091 && ! (reload_in_progress
|| reload_completed
)
3092 && need_clobber
!= 0)
3093 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3103 /* Pushing data onto the stack. */
3105 /* Push a block of length SIZE (perhaps variable)
3106 and return an rtx to address the beginning of the block.
3107 Note that it is not possible for the value returned to be a QUEUED.
3108 The value may be virtual_outgoing_args_rtx.
3110 EXTRA is the number of bytes of padding to push in addition to SIZE.
3111 BELOW nonzero means this padding comes at low addresses;
3112 otherwise, the padding comes at high addresses. */
3115 push_block (size
, extra
, below
)
3121 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3122 if (CONSTANT_P (size
))
3123 anti_adjust_stack (plus_constant (size
, extra
));
3124 else if (GET_CODE (size
) == REG
&& extra
== 0)
3125 anti_adjust_stack (size
);
3128 temp
= copy_to_mode_reg (Pmode
, size
);
3130 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3131 temp
, 0, OPTAB_LIB_WIDEN
);
3132 anti_adjust_stack (temp
);
3135 #ifndef STACK_GROWS_DOWNWARD
3141 temp
= virtual_outgoing_args_rtx
;
3142 if (extra
!= 0 && below
)
3143 temp
= plus_constant (temp
, extra
);
3147 if (GET_CODE (size
) == CONST_INT
)
3148 temp
= plus_constant (virtual_outgoing_args_rtx
,
3149 -INTVAL (size
) - (below
? 0 : extra
));
3150 else if (extra
!= 0 && !below
)
3151 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3152 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3154 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3155 negate_rtx (Pmode
, size
));
3158 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3161 #ifdef PUSH_ROUNDING
3163 /* Emit single push insn. */
3166 emit_single_push_insn (mode
, x
, type
)
3168 enum machine_mode mode
;
3172 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3174 enum insn_code icode
;
3175 insn_operand_predicate_fn pred
;
3177 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3178 /* If there is push pattern, use it. Otherwise try old way of throwing
3179 MEM representing push operation to move expander. */
3180 icode
= push_optab
->handlers
[(int) mode
].insn_code
;
3181 if (icode
!= CODE_FOR_nothing
)
3183 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3184 && !((*pred
) (x
, mode
))))
3185 x
= force_reg (mode
, x
);
3186 emit_insn (GEN_FCN (icode
) (x
));
3189 if (GET_MODE_SIZE (mode
) == rounded_size
)
3190 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3193 #ifdef STACK_GROWS_DOWNWARD
3194 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3195 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3197 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3198 GEN_INT (rounded_size
));
3200 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3203 dest
= gen_rtx_MEM (mode
, dest_addr
);
3207 set_mem_attributes (dest
, type
, 1);
3209 if (flag_optimize_sibling_calls
)
3210 /* Function incoming arguments may overlap with sibling call
3211 outgoing arguments and we cannot allow reordering of reads
3212 from function arguments with stores to outgoing arguments
3213 of sibling calls. */
3214 set_mem_alias_set (dest
, 0);
3216 emit_move_insn (dest
, x
);
3220 /* Generate code to push X onto the stack, assuming it has mode MODE and
3222 MODE is redundant except when X is a CONST_INT (since they don't
3224 SIZE is an rtx for the size of data to be copied (in bytes),
3225 needed only if X is BLKmode.
3227 ALIGN (in bits) is maximum alignment we can assume.
3229 If PARTIAL and REG are both nonzero, then copy that many of the first
3230 words of X into registers starting with REG, and push the rest of X.
3231 The amount of space pushed is decreased by PARTIAL words,
3232 rounded *down* to a multiple of PARM_BOUNDARY.
3233 REG must be a hard register in this case.
3234 If REG is zero but PARTIAL is not, take any all others actions for an
3235 argument partially in registers, but do not actually load any
3238 EXTRA is the amount in bytes of extra space to leave next to this arg.
3239 This is ignored if an argument block has already been allocated.
3241 On a machine that lacks real push insns, ARGS_ADDR is the address of
3242 the bottom of the argument block for this call. We use indexing off there
3243 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3244 argument block has not been preallocated.
3246 ARGS_SO_FAR is the size of args previously pushed for this call.
3248 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3249 for arguments passed in registers. If nonzero, it will be the number
3250 of bytes required. */
3253 emit_push_insn (x
, mode
, type
, size
, align
, partial
, reg
, extra
,
3254 args_addr
, args_so_far
, reg_parm_stack_space
,
3257 enum machine_mode mode
;
3266 int reg_parm_stack_space
;
3270 enum direction stack_direction
3271 #ifdef STACK_GROWS_DOWNWARD
3277 /* Decide where to pad the argument: `downward' for below,
3278 `upward' for above, or `none' for don't pad it.
3279 Default is below for small data on big-endian machines; else above. */
3280 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3282 /* Invert direction if stack is post-decrement.
3284 if (STACK_PUSH_CODE
== POST_DEC
)
3285 if (where_pad
!= none
)
3286 where_pad
= (where_pad
== downward
? upward
: downward
);
3288 xinner
= x
= protect_from_queue (x
, 0);
3290 if (mode
== BLKmode
)
3292 /* Copy a block into the stack, entirely or partially. */
3295 int used
= partial
* UNITS_PER_WORD
;
3296 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3304 /* USED is now the # of bytes we need not copy to the stack
3305 because registers will take care of them. */
3308 xinner
= adjust_address (xinner
, BLKmode
, used
);
3310 /* If the partial register-part of the arg counts in its stack size,
3311 skip the part of stack space corresponding to the registers.
3312 Otherwise, start copying to the beginning of the stack space,
3313 by setting SKIP to 0. */
3314 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3316 #ifdef PUSH_ROUNDING
3317 /* Do it with several push insns if that doesn't take lots of insns
3318 and if there is no difficulty with push insns that skip bytes
3319 on the stack for alignment purposes. */
3322 && GET_CODE (size
) == CONST_INT
3324 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3325 /* Here we avoid the case of a structure whose weak alignment
3326 forces many pushes of a small amount of data,
3327 and such small pushes do rounding that causes trouble. */
3328 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3329 || align
>= BIGGEST_ALIGNMENT
3330 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3331 == (align
/ BITS_PER_UNIT
)))
3332 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3334 /* Push padding now if padding above and stack grows down,
3335 or if padding below and stack grows up.
3336 But if space already allocated, this has already been done. */
3337 if (extra
&& args_addr
== 0
3338 && where_pad
!= none
&& where_pad
!= stack_direction
)
3339 anti_adjust_stack (GEN_INT (extra
));
3341 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
);
3344 #endif /* PUSH_ROUNDING */
3348 /* Otherwise make space on the stack and copy the data
3349 to the address of that space. */
3351 /* Deduct words put into registers from the size we must copy. */
3354 if (GET_CODE (size
) == CONST_INT
)
3355 size
= GEN_INT (INTVAL (size
) - used
);
3357 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3358 GEN_INT (used
), NULL_RTX
, 0,
3362 /* Get the address of the stack space.
3363 In this case, we do not deal with EXTRA separately.
3364 A single stack adjust will do. */
3367 temp
= push_block (size
, extra
, where_pad
== downward
);
3370 else if (GET_CODE (args_so_far
) == CONST_INT
)
3371 temp
= memory_address (BLKmode
,
3372 plus_constant (args_addr
,
3373 skip
+ INTVAL (args_so_far
)));
3375 temp
= memory_address (BLKmode
,
3376 plus_constant (gen_rtx_PLUS (Pmode
,
3380 target
= gen_rtx_MEM (BLKmode
, temp
);
3384 set_mem_attributes (target
, type
, 1);
3385 /* Function incoming arguments may overlap with sibling call
3386 outgoing arguments and we cannot allow reordering of reads
3387 from function arguments with stores to outgoing arguments
3388 of sibling calls. */
3389 set_mem_alias_set (target
, 0);
3392 set_mem_align (target
, align
);
3394 /* TEMP is the address of the block. Copy the data there. */
3395 if (GET_CODE (size
) == CONST_INT
3396 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size
), align
))
3398 move_by_pieces (target
, xinner
, INTVAL (size
), align
);
3403 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
3404 enum machine_mode mode
;
3406 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
3408 mode
= GET_MODE_WIDER_MODE (mode
))
3410 enum insn_code code
= movstr_optab
[(int) mode
];
3411 insn_operand_predicate_fn pred
;
3413 if (code
!= CODE_FOR_nothing
3414 && ((GET_CODE (size
) == CONST_INT
3415 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
3416 <= (GET_MODE_MASK (mode
) >> 1)))
3417 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
3418 && (!(pred
= insn_data
[(int) code
].operand
[0].predicate
)
3419 || ((*pred
) (target
, BLKmode
)))
3420 && (!(pred
= insn_data
[(int) code
].operand
[1].predicate
)
3421 || ((*pred
) (xinner
, BLKmode
)))
3422 && (!(pred
= insn_data
[(int) code
].operand
[3].predicate
)
3423 || ((*pred
) (opalign
, VOIDmode
))))
3425 rtx op2
= convert_to_mode (mode
, size
, 1);
3426 rtx last
= get_last_insn ();
3429 pred
= insn_data
[(int) code
].operand
[2].predicate
;
3430 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
3431 op2
= copy_to_mode_reg (mode
, op2
);
3433 pat
= GEN_FCN ((int) code
) (target
, xinner
,
3441 delete_insns_since (last
);
3446 if (!ACCUMULATE_OUTGOING_ARGS
)
3448 /* If the source is referenced relative to the stack pointer,
3449 copy it to another register to stabilize it. We do not need
3450 to do this if we know that we won't be changing sp. */
3452 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3453 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3454 temp
= copy_to_reg (temp
);
3457 /* Make inhibit_defer_pop nonzero around the library call
3458 to force it to pop the bcopy-arguments right away. */
3460 #ifdef TARGET_MEM_FUNCTIONS
3461 emit_library_call (memcpy_libfunc
, LCT_NORMAL
,
3462 VOIDmode
, 3, temp
, Pmode
, XEXP (xinner
, 0), Pmode
,
3463 convert_to_mode (TYPE_MODE (sizetype
),
3464 size
, TREE_UNSIGNED (sizetype
)),
3465 TYPE_MODE (sizetype
));
3467 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
3468 VOIDmode
, 3, XEXP (xinner
, 0), Pmode
, temp
, Pmode
,
3469 convert_to_mode (TYPE_MODE (integer_type_node
),
3471 TREE_UNSIGNED (integer_type_node
)),
3472 TYPE_MODE (integer_type_node
));
3477 else if (partial
> 0)
3479 /* Scalar partly in registers. */
3481 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3484 /* # words of start of argument
3485 that we must make space for but need not store. */
3486 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
3487 int args_offset
= INTVAL (args_so_far
);
3490 /* Push padding now if padding above and stack grows down,
3491 or if padding below and stack grows up.
3492 But if space already allocated, this has already been done. */
3493 if (extra
&& args_addr
== 0
3494 && where_pad
!= none
&& where_pad
!= stack_direction
)
3495 anti_adjust_stack (GEN_INT (extra
));
3497 /* If we make space by pushing it, we might as well push
3498 the real data. Otherwise, we can leave OFFSET nonzero
3499 and leave the space uninitialized. */
3503 /* Now NOT_STACK gets the number of words that we don't need to
3504 allocate on the stack. */
3505 not_stack
= partial
- offset
;
3507 /* If the partial register-part of the arg counts in its stack size,
3508 skip the part of stack space corresponding to the registers.
3509 Otherwise, start copying to the beginning of the stack space,
3510 by setting SKIP to 0. */
3511 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3513 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3514 x
= validize_mem (force_const_mem (mode
, x
));
3516 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3517 SUBREGs of such registers are not allowed. */
3518 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
3519 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3520 x
= copy_to_reg (x
);
3522 /* Loop over all the words allocated on the stack for this arg. */
3523 /* We can do it by words, because any scalar bigger than a word
3524 has a size a multiple of a word. */
3525 #ifndef PUSH_ARGS_REVERSED
3526 for (i
= not_stack
; i
< size
; i
++)
3528 for (i
= size
- 1; i
>= not_stack
; i
--)
3530 if (i
>= not_stack
+ offset
)
3531 emit_push_insn (operand_subword_force (x
, i
, mode
),
3532 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3534 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3536 reg_parm_stack_space
, alignment_pad
);
3541 rtx target
= NULL_RTX
;
3544 /* Push padding now if padding above and stack grows down,
3545 or if padding below and stack grows up.
3546 But if space already allocated, this has already been done. */
3547 if (extra
&& args_addr
== 0
3548 && where_pad
!= none
&& where_pad
!= stack_direction
)
3549 anti_adjust_stack (GEN_INT (extra
));
3551 #ifdef PUSH_ROUNDING
3552 if (args_addr
== 0 && PUSH_ARGS
)
3553 emit_single_push_insn (mode
, x
, type
);
3557 if (GET_CODE (args_so_far
) == CONST_INT
)
3559 = memory_address (mode
,
3560 plus_constant (args_addr
,
3561 INTVAL (args_so_far
)));
3563 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3566 dest
= gen_rtx_MEM (mode
, addr
);
3569 set_mem_attributes (dest
, type
, 1);
3570 /* Function incoming arguments may overlap with sibling call
3571 outgoing arguments and we cannot allow reordering of reads
3572 from function arguments with stores to outgoing arguments
3573 of sibling calls. */
3574 set_mem_alias_set (dest
, 0);
3577 emit_move_insn (dest
, x
);
3583 /* If part should go in registers, copy that part
3584 into the appropriate registers. Do this now, at the end,
3585 since mem-to-mem copies above may do function calls. */
3586 if (partial
> 0 && reg
!= 0)
3588 /* Handle calls that pass values in multiple non-contiguous locations.
3589 The Irix 6 ABI has examples of this. */
3590 if (GET_CODE (reg
) == PARALLEL
)
3591 emit_group_load (reg
, x
, -1); /* ??? size? */
3593 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
3596 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3597 anti_adjust_stack (GEN_INT (extra
));
3599 if (alignment_pad
&& args_addr
== 0)
3600 anti_adjust_stack (alignment_pad
);
3603 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3611 /* Only registers can be subtargets. */
3612 || GET_CODE (x
) != REG
3613 /* If the register is readonly, it can't be set more than once. */
3614 || RTX_UNCHANGING_P (x
)
3615 /* Don't use hard regs to avoid extending their life. */
3616 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3617 /* Avoid subtargets inside loops,
3618 since they hide some invariant expressions. */
3619 || preserve_subexpressions_p ())
3623 /* Expand an assignment that stores the value of FROM into TO.
3624 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3625 (This may contain a QUEUED rtx;
3626 if the value is constant, this rtx is a constant.)
3627 Otherwise, the returned value is NULL_RTX.
3629 SUGGEST_REG is no longer actually used.
3630 It used to mean, copy the value through a register
3631 and return that register, if that is possible.
3632 We now use WANT_VALUE to decide whether to do this. */
3635 expand_assignment (to
, from
, want_value
, suggest_reg
)
3638 int suggest_reg ATTRIBUTE_UNUSED
;
3643 /* Don't crash if the lhs of the assignment was erroneous. */
3645 if (TREE_CODE (to
) == ERROR_MARK
)
3647 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3648 return want_value
? result
: NULL_RTX
;
3651 /* Assignment of a structure component needs special treatment
3652 if the structure component's rtx is not simply a MEM.
3653 Assignment of an array element at a constant index, and assignment of
3654 an array element in an unaligned packed structure field, has the same
3657 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
3658 || TREE_CODE (to
) == ARRAY_REF
|| TREE_CODE (to
) == ARRAY_RANGE_REF
)
3660 enum machine_mode mode1
;
3661 HOST_WIDE_INT bitsize
, bitpos
;
3669 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
3670 &unsignedp
, &volatilep
);
3672 /* If we are going to use store_bit_field and extract_bit_field,
3673 make sure to_rtx will be safe for multiple use. */
3675 if (mode1
== VOIDmode
&& want_value
)
3676 tem
= stabilize_reference (tem
);
3678 orig_to_rtx
= to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, 0);
3682 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
3684 if (GET_CODE (to_rtx
) != MEM
)
3687 #ifdef POINTERS_EXTEND_UNSIGNED
3688 if (GET_MODE (offset_rtx
) != Pmode
)
3689 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
3691 if (GET_MODE (offset_rtx
) != ptr_mode
)
3692 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
3695 /* A constant address in TO_RTX can have VOIDmode, we must not try
3696 to call force_reg for that case. Avoid that case. */
3697 if (GET_CODE (to_rtx
) == MEM
3698 && GET_MODE (to_rtx
) == BLKmode
3699 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
3701 && (bitpos
% bitsize
) == 0
3702 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
3703 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
3705 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
3709 to_rtx
= offset_address (to_rtx
, offset_rtx
,
3710 highest_pow2_factor (offset
));
3713 if (GET_CODE (to_rtx
) == MEM
)
3715 tree old_expr
= MEM_EXPR (to_rtx
);
3717 /* If the field is at offset zero, we could have been given the
3718 DECL_RTX of the parent struct. Don't munge it. */
3719 to_rtx
= shallow_copy_rtx (to_rtx
);
3721 set_mem_attributes (to_rtx
, to
, 0);
3723 /* If we changed MEM_EXPR, that means we're now referencing
3724 the COMPONENT_REF, which means that MEM_OFFSET must be
3725 relative to that field. But we've not yet reflected BITPOS
3726 in TO_RTX. This will be done in store_field. Adjust for
3727 that by biasing MEM_OFFSET by -bitpos. */
3728 if (MEM_EXPR (to_rtx
) != old_expr
&& MEM_OFFSET (to_rtx
)
3729 && (bitpos
/ BITS_PER_UNIT
) != 0)
3730 set_mem_offset (to_rtx
, GEN_INT (INTVAL (MEM_OFFSET (to_rtx
))
3731 - (bitpos
/ BITS_PER_UNIT
)));
3734 /* Deal with volatile and readonly fields. The former is only done
3735 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3736 if (volatilep
&& GET_CODE (to_rtx
) == MEM
)
3738 if (to_rtx
== orig_to_rtx
)
3739 to_rtx
= copy_rtx (to_rtx
);
3740 MEM_VOLATILE_P (to_rtx
) = 1;
3743 if (TREE_CODE (to
) == COMPONENT_REF
3744 && TREE_READONLY (TREE_OPERAND (to
, 1)))
3746 if (to_rtx
== orig_to_rtx
)
3747 to_rtx
= copy_rtx (to_rtx
);
3748 RTX_UNCHANGING_P (to_rtx
) = 1;
3751 if (GET_CODE (to_rtx
) == MEM
&& ! can_address_p (to
))
3753 if (to_rtx
== orig_to_rtx
)
3754 to_rtx
= copy_rtx (to_rtx
);
3755 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
3758 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
3760 /* Spurious cast for HPUX compiler. */
3761 ? ((enum machine_mode
)
3762 TYPE_MODE (TREE_TYPE (to
)))
3764 unsignedp
, TREE_TYPE (tem
), get_alias_set (to
));
3766 preserve_temp_slots (result
);
3770 /* If the value is meaningful, convert RESULT to the proper mode.
3771 Otherwise, return nothing. */
3772 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
3773 TYPE_MODE (TREE_TYPE (from
)),
3775 TREE_UNSIGNED (TREE_TYPE (to
)))
3779 /* If the rhs is a function call and its value is not an aggregate,
3780 call the function before we start to compute the lhs.
3781 This is needed for correct code for cases such as
3782 val = setjmp (buf) on machines where reference to val
3783 requires loading up part of an address in a separate insn.
3785 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3786 since it might be a promoted variable where the zero- or sign- extension
3787 needs to be done. Handling this in the normal way is safe because no
3788 computation is done before the call. */
3789 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
)
3790 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
3791 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
3792 && GET_CODE (DECL_RTL (to
)) == REG
))
3797 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3799 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3801 /* Handle calls that return values in multiple non-contiguous locations.
3802 The Irix 6 ABI has examples of this. */
3803 if (GET_CODE (to_rtx
) == PARALLEL
)
3804 emit_group_load (to_rtx
, value
, int_size_in_bytes (TREE_TYPE (from
)));
3805 else if (GET_MODE (to_rtx
) == BLKmode
)
3806 emit_block_move (to_rtx
, value
, expr_size (from
));
3809 #ifdef POINTERS_EXTEND_UNSIGNED
3810 if (POINTER_TYPE_P (TREE_TYPE (to
))
3811 && GET_MODE (to_rtx
) != GET_MODE (value
))
3812 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
3814 emit_move_insn (to_rtx
, value
);
3816 preserve_temp_slots (to_rtx
);
3819 return want_value
? to_rtx
: NULL_RTX
;
3822 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3823 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3826 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3828 /* Don't move directly into a return register. */
3829 if (TREE_CODE (to
) == RESULT_DECL
3830 && (GET_CODE (to_rtx
) == REG
|| GET_CODE (to_rtx
) == PARALLEL
))
3835 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
3837 if (GET_CODE (to_rtx
) == PARALLEL
)
3838 emit_group_load (to_rtx
, temp
, int_size_in_bytes (TREE_TYPE (from
)));
3840 emit_move_insn (to_rtx
, temp
);
3842 preserve_temp_slots (to_rtx
);
3845 return want_value
? to_rtx
: NULL_RTX
;
3848 /* In case we are returning the contents of an object which overlaps
3849 the place the value is being stored, use a safe function when copying
3850 a value through a pointer into a structure value return block. */
3851 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
3852 && current_function_returns_struct
3853 && !current_function_returns_pcc_struct
)
3858 size
= expr_size (from
);
3859 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3861 #ifdef TARGET_MEM_FUNCTIONS
3862 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
3863 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3864 XEXP (from_rtx
, 0), Pmode
,
3865 convert_to_mode (TYPE_MODE (sizetype
),
3866 size
, TREE_UNSIGNED (sizetype
)),
3867 TYPE_MODE (sizetype
));
3869 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
3870 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
3871 XEXP (to_rtx
, 0), Pmode
,
3872 convert_to_mode (TYPE_MODE (integer_type_node
),
3873 size
, TREE_UNSIGNED (integer_type_node
)),
3874 TYPE_MODE (integer_type_node
));
3877 preserve_temp_slots (to_rtx
);
3880 return want_value
? to_rtx
: NULL_RTX
;
3883 /* Compute FROM and store the value in the rtx we got. */
3886 result
= store_expr (from
, to_rtx
, want_value
);
3887 preserve_temp_slots (result
);
3890 return want_value
? result
: NULL_RTX
;
3893 /* Generate code for computing expression EXP,
3894 and storing the value into TARGET.
3895 TARGET may contain a QUEUED rtx.
3897 If WANT_VALUE is nonzero, return a copy of the value
3898 not in TARGET, so that we can be sure to use the proper
3899 value in a containing expression even if TARGET has something
3900 else stored in it. If possible, we copy the value through a pseudo
3901 and return that pseudo. Or, if the value is constant, we try to
3902 return the constant. In some cases, we return a pseudo
3903 copied *from* TARGET.
3905 If the mode is BLKmode then we may return TARGET itself.
3906 It turns out that in BLKmode it doesn't cause a problem.
3907 because C has no operators that could combine two different
3908 assignments into the same BLKmode object with different values
3909 with no sequence point. Will other languages need this to
3912 If WANT_VALUE is 0, we return NULL, to make sure
3913 to catch quickly any cases where the caller uses the value
3914 and fails to set WANT_VALUE. */
3917 store_expr (exp
, target
, want_value
)
3923 int dont_return_target
= 0;
3924 int dont_store_target
= 0;
3926 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
3928 /* Perform first part of compound expression, then assign from second
3930 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
3932 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
3934 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
3936 /* For conditional expression, get safe form of the target. Then
3937 test the condition, doing the appropriate assignment on either
3938 side. This avoids the creation of unnecessary temporaries.
3939 For non-BLKmode, it is more efficient not to do this. */
3941 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
3944 target
= protect_from_queue (target
, 1);
3946 do_pending_stack_adjust ();
3948 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
3949 start_cleanup_deferral ();
3950 store_expr (TREE_OPERAND (exp
, 1), target
, 0);
3951 end_cleanup_deferral ();
3953 emit_jump_insn (gen_jump (lab2
));
3956 start_cleanup_deferral ();
3957 store_expr (TREE_OPERAND (exp
, 2), target
, 0);
3958 end_cleanup_deferral ();
3963 return want_value
? target
: NULL_RTX
;
3965 else if (queued_subexp_p (target
))
3966 /* If target contains a postincrement, let's not risk
3967 using it as the place to generate the rhs. */
3969 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
3971 /* Expand EXP into a new pseudo. */
3972 temp
= gen_reg_rtx (GET_MODE (target
));
3973 temp
= expand_expr (exp
, temp
, GET_MODE (target
), 0);
3976 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
), 0);
3978 /* If target is volatile, ANSI requires accessing the value
3979 *from* the target, if it is accessed. So make that happen.
3980 In no case return the target itself. */
3981 if (! MEM_VOLATILE_P (target
) && want_value
)
3982 dont_return_target
= 1;
3984 else if (want_value
&& GET_CODE (target
) == MEM
&& ! MEM_VOLATILE_P (target
)
3985 && GET_MODE (target
) != BLKmode
)
3986 /* If target is in memory and caller wants value in a register instead,
3987 arrange that. Pass TARGET as target for expand_expr so that,
3988 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3989 We know expand_expr will not use the target in that case.
3990 Don't do this if TARGET is volatile because we are supposed
3991 to write it and then read it. */
3993 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
3994 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
3996 /* If TEMP is already in the desired TARGET, only copy it from
3997 memory and don't store it there again. */
3999 || (rtx_equal_p (temp
, target
)
4000 && ! side_effects_p (temp
) && ! side_effects_p (target
)))
4001 dont_store_target
= 1;
4002 temp
= copy_to_reg (temp
);
4004 dont_return_target
= 1;
4006 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4007 /* If this is an scalar in a register that is stored in a wider mode
4008 than the declared mode, compute the result into its declared mode
4009 and then convert to the wider mode. Our value is the computed
4012 rtx inner_target
= 0;
4014 /* If we don't want a value, we can do the conversion inside EXP,
4015 which will often result in some optimizations. Do the conversion
4016 in two steps: first change the signedness, if needed, then
4017 the extend. But don't do this if the type of EXP is a subtype
4018 of something else since then the conversion might involve
4019 more than just converting modes. */
4020 if (! want_value
&& INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4021 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
4023 if (TREE_UNSIGNED (TREE_TYPE (exp
))
4024 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4026 ((*lang_hooks
.types
.signed_or_unsigned_type
)
4027 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)), exp
);
4029 exp
= convert ((*lang_hooks
.types
.type_for_mode
)
4030 (GET_MODE (SUBREG_REG (target
)),
4031 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4034 inner_target
= SUBREG_REG (target
);
4037 temp
= expand_expr (exp
, inner_target
, VOIDmode
, 0);
4039 /* If TEMP is a volatile MEM and we want a result value, make
4040 the access now so it gets done only once. Likewise if
4041 it contains TARGET. */
4042 if (GET_CODE (temp
) == MEM
&& want_value
4043 && (MEM_VOLATILE_P (temp
)
4044 || reg_mentioned_p (SUBREG_REG (target
), XEXP (temp
, 0))))
4045 temp
= copy_to_reg (temp
);
4047 /* If TEMP is a VOIDmode constant, use convert_modes to make
4048 sure that we properly convert it. */
4049 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4051 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4052 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4053 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4054 GET_MODE (target
), temp
,
4055 SUBREG_PROMOTED_UNSIGNED_P (target
));
4058 convert_move (SUBREG_REG (target
), temp
,
4059 SUBREG_PROMOTED_UNSIGNED_P (target
));
4061 /* If we promoted a constant, change the mode back down to match
4062 target. Otherwise, the caller might get confused by a result whose
4063 mode is larger than expected. */
4065 if (want_value
&& GET_MODE (temp
) != GET_MODE (target
))
4067 if (GET_MODE (temp
) != VOIDmode
)
4069 temp
= gen_lowpart_SUBREG (GET_MODE (target
), temp
);
4070 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4071 SUBREG_PROMOTED_UNSIGNED_SET (temp
,
4072 SUBREG_PROMOTED_UNSIGNED_P (target
));
4075 temp
= convert_modes (GET_MODE (target
),
4076 GET_MODE (SUBREG_REG (target
)),
4077 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4080 return want_value
? temp
: NULL_RTX
;
4084 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
4085 /* Return TARGET if it's a specified hardware register.
4086 If TARGET is a volatile mem ref, either return TARGET
4087 or return a reg copied *from* TARGET; ANSI requires this.
4089 Otherwise, if TEMP is not TARGET, return TEMP
4090 if it is constant (for efficiency),
4091 or if we really want the correct value. */
4092 if (!(target
&& GET_CODE (target
) == REG
4093 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4094 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
4095 && ! rtx_equal_p (temp
, target
)
4096 && (CONSTANT_P (temp
) || want_value
))
4097 dont_return_target
= 1;
4100 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4101 the same as that of TARGET, adjust the constant. This is needed, for
4102 example, in case it is a CONST_DOUBLE and we want only a word-sized
4104 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4105 && TREE_CODE (exp
) != ERROR_MARK
4106 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4107 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4108 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
4110 /* If value was not generated in the target, store it there.
4111 Convert the value to TARGET's type first if necessary.
4112 If TEMP and TARGET compare equal according to rtx_equal_p, but
4113 one or both of them are volatile memory refs, we have to distinguish
4115 - expand_expr has used TARGET. In this case, we must not generate
4116 another copy. This can be detected by TARGET being equal according
4118 - expand_expr has not used TARGET - that means that the source just
4119 happens to have the same RTX form. Since temp will have been created
4120 by expand_expr, it will compare unequal according to == .
4121 We must generate a copy in this case, to reach the correct number
4122 of volatile memory references. */
4124 if ((! rtx_equal_p (temp
, target
)
4125 || (temp
!= target
&& (side_effects_p (temp
)
4126 || side_effects_p (target
))))
4127 && TREE_CODE (exp
) != ERROR_MARK
4128 && ! dont_store_target
4129 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4130 but TARGET is not valid memory reference, TEMP will differ
4131 from TARGET although it is really the same location. */
4132 && (TREE_CODE_CLASS (TREE_CODE (exp
)) != 'd'
4133 || target
!= DECL_RTL_IF_SET (exp
)))
4135 target
= protect_from_queue (target
, 1);
4136 if (GET_MODE (temp
) != GET_MODE (target
)
4137 && GET_MODE (temp
) != VOIDmode
)
4139 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4140 if (dont_return_target
)
4142 /* In this case, we will return TEMP,
4143 so make sure it has the proper mode.
4144 But don't forget to store the value into TARGET. */
4145 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4146 emit_move_insn (target
, temp
);
4149 convert_move (target
, temp
, unsignedp
);
4152 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4154 /* Handle copying a string constant into an array. The string
4155 constant may be shorter than the array. So copy just the string's
4156 actual length, and clear the rest. First get the size of the data
4157 type of the string, which is actually the size of the target. */
4158 rtx size
= expr_size (exp
);
4160 if (GET_CODE (size
) == CONST_INT
4161 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4162 emit_block_move (target
, temp
, size
);
4165 /* Compute the size of the data to copy from the string. */
4167 = size_binop (MIN_EXPR
,
4168 make_tree (sizetype
, size
),
4169 size_int (TREE_STRING_LENGTH (exp
)));
4170 rtx copy_size_rtx
= expand_expr (copy_size
, NULL_RTX
,
4174 /* Copy that much. */
4175 copy_size_rtx
= convert_to_mode (ptr_mode
, copy_size_rtx
, 0);
4176 emit_block_move (target
, temp
, copy_size_rtx
);
4178 /* Figure out how much is left in TARGET that we have to clear.
4179 Do all calculations in ptr_mode. */
4180 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4182 size
= plus_constant (size
, -INTVAL (copy_size_rtx
));
4183 target
= adjust_address (target
, BLKmode
,
4184 INTVAL (copy_size_rtx
));
4188 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
4189 copy_size_rtx
, NULL_RTX
, 0,
4192 #ifdef POINTERS_EXTEND_UNSIGNED
4193 if (GET_MODE (copy_size_rtx
) != Pmode
)
4194 copy_size_rtx
= convert_memory_address (Pmode
,
4198 target
= offset_address (target
, copy_size_rtx
,
4199 highest_pow2_factor (copy_size
));
4200 label
= gen_label_rtx ();
4201 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4202 GET_MODE (size
), 0, label
);
4205 if (size
!= const0_rtx
)
4206 clear_storage (target
, size
);
4212 /* Handle calls that return values in multiple non-contiguous locations.
4213 The Irix 6 ABI has examples of this. */
4214 else if (GET_CODE (target
) == PARALLEL
)
4215 emit_group_load (target
, temp
, int_size_in_bytes (TREE_TYPE (exp
)));
4216 else if (GET_MODE (temp
) == BLKmode
)
4217 emit_block_move (target
, temp
, expr_size (exp
));
4219 emit_move_insn (target
, temp
);
4222 /* If we don't want a value, return NULL_RTX. */
4226 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4227 ??? The latter test doesn't seem to make sense. */
4228 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
4231 /* Return TARGET itself if it is a hard register. */
4232 else if (want_value
&& GET_MODE (target
) != BLKmode
4233 && ! (GET_CODE (target
) == REG
4234 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4235 return copy_to_reg (target
);
4241 /* Return 1 if EXP just contains zeros. */
4249 switch (TREE_CODE (exp
))
4253 case NON_LVALUE_EXPR
:
4254 case VIEW_CONVERT_EXPR
:
4255 return is_zeros_p (TREE_OPERAND (exp
, 0));
4258 return integer_zerop (exp
);
4262 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
4265 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp
), dconst0
);
4268 for (elt
= TREE_VECTOR_CST_ELTS (exp
); elt
;
4269 elt
= TREE_CHAIN (elt
))
4270 if (!is_zeros_p (TREE_VALUE (elt
)))
4276 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4277 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
4278 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4279 if (! is_zeros_p (TREE_VALUE (elt
)))
4289 /* Return 1 if EXP contains mostly (3/4) zeros. */
4292 mostly_zeros_p (exp
)
4295 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4297 int elts
= 0, zeros
= 0;
4298 tree elt
= CONSTRUCTOR_ELTS (exp
);
4299 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4301 /* If there are no ranges of true bits, it is all zero. */
4302 return elt
== NULL_TREE
;
4304 for (; elt
; elt
= TREE_CHAIN (elt
))
4306 /* We do not handle the case where the index is a RANGE_EXPR,
4307 so the statistic will be somewhat inaccurate.
4308 We do make a more accurate count in store_constructor itself,
4309 so since this function is only used for nested array elements,
4310 this should be close enough. */
4311 if (mostly_zeros_p (TREE_VALUE (elt
)))
4316 return 4 * zeros
>= 3 * elts
;
4319 return is_zeros_p (exp
);
4322 /* Helper function for store_constructor.
4323 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4324 TYPE is the type of the CONSTRUCTOR, not the element type.
4325 CLEARED is as for store_constructor.
4326 ALIAS_SET is the alias set to use for any stores.
4328 This provides a recursive shortcut back to store_constructor when it isn't
4329 necessary to go through store_field. This is so that we can pass through
4330 the cleared field to let store_constructor know that we may not have to
4331 clear a substructure if the outer structure has already been cleared. */
4334 store_constructor_field (target
, bitsize
, bitpos
, mode
, exp
, type
, cleared
,
4337 unsigned HOST_WIDE_INT bitsize
;
4338 HOST_WIDE_INT bitpos
;
4339 enum machine_mode mode
;
4344 if (TREE_CODE (exp
) == CONSTRUCTOR
4345 && bitpos
% BITS_PER_UNIT
== 0
4346 /* If we have a non-zero bitpos for a register target, then we just
4347 let store_field do the bitfield handling. This is unlikely to
4348 generate unnecessary clear instructions anyways. */
4349 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
4351 if (GET_CODE (target
) == MEM
)
4353 = adjust_address (target
,
4354 GET_MODE (target
) == BLKmode
4356 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4357 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
4360 /* Update the alias set, if required. */
4361 if (GET_CODE (target
) == MEM
&& ! MEM_KEEP_ALIAS_SET_P (target
)
4362 && MEM_ALIAS_SET (target
) != 0)
4364 target
= copy_rtx (target
);
4365 set_mem_alias_set (target
, alias_set
);
4368 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
4371 store_field (target
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
4375 /* Store the value of constructor EXP into the rtx TARGET.
4376 TARGET is either a REG or a MEM; we know it cannot conflict, since
4377 safe_from_p has been called.
4378 CLEARED is true if TARGET is known to have been zero'd.
4379 SIZE is the number of bytes of TARGET we are allowed to modify: this
4380 may not be the same as the size of EXP if we are assigning to a field
4381 which has been packed to exclude padding bits. */
4384 store_constructor (exp
, target
, cleared
, size
)
4390 tree type
= TREE_TYPE (exp
);
4391 #ifdef WORD_REGISTER_OPERATIONS
4392 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4395 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
4396 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4400 /* We either clear the aggregate or indicate the value is dead. */
4401 if ((TREE_CODE (type
) == UNION_TYPE
4402 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4404 && ! CONSTRUCTOR_ELTS (exp
))
4405 /* If the constructor is empty, clear the union. */
4407 clear_storage (target
, expr_size (exp
));
4411 /* If we are building a static constructor into a register,
4412 set the initial value as zero so we can fold the value into
4413 a constant. But if more than one register is involved,
4414 this probably loses. */
4415 else if (! cleared
&& GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
4416 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4418 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4422 /* If the constructor has fewer fields than the structure
4423 or if we are initializing the structure to mostly zeros,
4424 clear the whole structure first. Don't do this if TARGET is a
4425 register whose mode size isn't equal to SIZE since clear_storage
4426 can't handle this case. */
4427 else if (! cleared
&& size
> 0
4428 && ((list_length (CONSTRUCTOR_ELTS (exp
))
4429 != fields_length (type
))
4430 || mostly_zeros_p (exp
))
4431 && (GET_CODE (target
) != REG
4432 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
4435 clear_storage (target
, GEN_INT (size
));
4440 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4442 /* Store each element of the constructor into
4443 the corresponding field of TARGET. */
4445 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4447 tree field
= TREE_PURPOSE (elt
);
4448 tree value
= TREE_VALUE (elt
);
4449 enum machine_mode mode
;
4450 HOST_WIDE_INT bitsize
;
4451 HOST_WIDE_INT bitpos
= 0;
4454 rtx to_rtx
= target
;
4456 /* Just ignore missing fields.
4457 We cleared the whole structure, above,
4458 if any fields are missing. */
4462 if (cleared
&& is_zeros_p (value
))
4465 if (host_integerp (DECL_SIZE (field
), 1))
4466 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4470 unsignedp
= TREE_UNSIGNED (field
);
4471 mode
= DECL_MODE (field
);
4472 if (DECL_BIT_FIELD (field
))
4475 offset
= DECL_FIELD_OFFSET (field
);
4476 if (host_integerp (offset
, 0)
4477 && host_integerp (bit_position (field
), 0))
4479 bitpos
= int_bit_position (field
);
4483 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
4489 if (contains_placeholder_p (offset
))
4490 offset
= build (WITH_RECORD_EXPR
, sizetype
,
4491 offset
, make_tree (TREE_TYPE (exp
), target
));
4493 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4494 if (GET_CODE (to_rtx
) != MEM
)
4497 #ifdef POINTERS_EXTEND_UNSIGNED
4498 if (GET_MODE (offset_rtx
) != Pmode
)
4499 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
4501 if (GET_MODE (offset_rtx
) != ptr_mode
)
4502 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4505 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4506 highest_pow2_factor (offset
));
4509 if (TREE_READONLY (field
))
4511 if (GET_CODE (to_rtx
) == MEM
)
4512 to_rtx
= copy_rtx (to_rtx
);
4514 RTX_UNCHANGING_P (to_rtx
) = 1;
4517 #ifdef WORD_REGISTER_OPERATIONS
4518 /* If this initializes a field that is smaller than a word, at the
4519 start of a word, try to widen it to a full word.
4520 This special case allows us to output C++ member function
4521 initializations in a form that the optimizers can understand. */
4522 if (GET_CODE (target
) == REG
4523 && bitsize
< BITS_PER_WORD
4524 && bitpos
% BITS_PER_WORD
== 0
4525 && GET_MODE_CLASS (mode
) == MODE_INT
4526 && TREE_CODE (value
) == INTEGER_CST
4528 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
4530 tree type
= TREE_TYPE (value
);
4532 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4534 type
= (*lang_hooks
.types
.type_for_size
)
4535 (BITS_PER_WORD
, TREE_UNSIGNED (type
));
4536 value
= convert (type
, value
);
4539 if (BYTES_BIG_ENDIAN
)
4541 = fold (build (LSHIFT_EXPR
, type
, value
,
4542 build_int_2 (BITS_PER_WORD
- bitsize
, 0)));
4543 bitsize
= BITS_PER_WORD
;
4548 if (GET_CODE (to_rtx
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (to_rtx
)
4549 && DECL_NONADDRESSABLE_P (field
))
4551 to_rtx
= copy_rtx (to_rtx
);
4552 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4555 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
4556 value
, type
, cleared
,
4557 get_alias_set (TREE_TYPE (field
)));
4560 else if (TREE_CODE (type
) == ARRAY_TYPE
4561 || TREE_CODE (type
) == VECTOR_TYPE
)
4566 tree domain
= TYPE_DOMAIN (type
);
4567 tree elttype
= TREE_TYPE (type
);
4569 HOST_WIDE_INT minelt
= 0;
4570 HOST_WIDE_INT maxelt
= 0;
4572 /* Vectors are like arrays, but the domain is stored via an array
4574 if (TREE_CODE (type
) == VECTOR_TYPE
)
4576 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4577 the same field as TYPE_DOMAIN, we are not guaranteed that
4579 domain
= TYPE_DEBUG_REPRESENTATION_TYPE (type
);
4580 domain
= TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain
)));
4583 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
4584 && TYPE_MAX_VALUE (domain
)
4585 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
4586 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
4588 /* If we have constant bounds for the range of the type, get them. */
4591 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
4592 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
4595 /* If the constructor has fewer elements than the array,
4596 clear the whole array first. Similarly if this is
4597 static constructor of a non-BLKmode object. */
4598 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
4602 HOST_WIDE_INT count
= 0, zero_count
= 0;
4603 need_to_clear
= ! const_bounds_p
;
4605 /* This loop is a more accurate version of the loop in
4606 mostly_zeros_p (it handles RANGE_EXPR in an index).
4607 It is also needed to check for missing elements. */
4608 for (elt
= CONSTRUCTOR_ELTS (exp
);
4609 elt
!= NULL_TREE
&& ! need_to_clear
;
4610 elt
= TREE_CHAIN (elt
))
4612 tree index
= TREE_PURPOSE (elt
);
4613 HOST_WIDE_INT this_node_count
;
4615 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4617 tree lo_index
= TREE_OPERAND (index
, 0);
4618 tree hi_index
= TREE_OPERAND (index
, 1);
4620 if (! host_integerp (lo_index
, 1)
4621 || ! host_integerp (hi_index
, 1))
4627 this_node_count
= (tree_low_cst (hi_index
, 1)
4628 - tree_low_cst (lo_index
, 1) + 1);
4631 this_node_count
= 1;
4633 count
+= this_node_count
;
4634 if (mostly_zeros_p (TREE_VALUE (elt
)))
4635 zero_count
+= this_node_count
;
4638 /* Clear the entire array first if there are any missing elements,
4639 or if the incidence of zero elements is >= 75%. */
4641 && (count
< maxelt
- minelt
+ 1 || 4 * zero_count
>= 3 * count
))
4645 if (need_to_clear
&& size
> 0)
4650 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4652 clear_storage (target
, GEN_INT (size
));
4656 else if (REG_P (target
))
4657 /* Inform later passes that the old value is dead. */
4658 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4660 /* Store each element of the constructor into
4661 the corresponding element of TARGET, determined
4662 by counting the elements. */
4663 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
4665 elt
= TREE_CHAIN (elt
), i
++)
4667 enum machine_mode mode
;
4668 HOST_WIDE_INT bitsize
;
4669 HOST_WIDE_INT bitpos
;
4671 tree value
= TREE_VALUE (elt
);
4672 tree index
= TREE_PURPOSE (elt
);
4673 rtx xtarget
= target
;
4675 if (cleared
&& is_zeros_p (value
))
4678 unsignedp
= TREE_UNSIGNED (elttype
);
4679 mode
= TYPE_MODE (elttype
);
4680 if (mode
== BLKmode
)
4681 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
4682 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
4685 bitsize
= GET_MODE_BITSIZE (mode
);
4687 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4689 tree lo_index
= TREE_OPERAND (index
, 0);
4690 tree hi_index
= TREE_OPERAND (index
, 1);
4691 rtx index_r
, pos_rtx
, hi_r
, loop_top
, loop_end
;
4692 struct nesting
*loop
;
4693 HOST_WIDE_INT lo
, hi
, count
;
4696 /* If the range is constant and "small", unroll the loop. */
4698 && host_integerp (lo_index
, 0)
4699 && host_integerp (hi_index
, 0)
4700 && (lo
= tree_low_cst (lo_index
, 0),
4701 hi
= tree_low_cst (hi_index
, 0),
4702 count
= hi
- lo
+ 1,
4703 (GET_CODE (target
) != MEM
4705 || (host_integerp (TYPE_SIZE (elttype
), 1)
4706 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
4709 lo
-= minelt
; hi
-= minelt
;
4710 for (; lo
<= hi
; lo
++)
4712 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
4714 if (GET_CODE (target
) == MEM
4715 && !MEM_KEEP_ALIAS_SET_P (target
)
4716 && TREE_CODE (type
) == ARRAY_TYPE
4717 && TYPE_NONALIASED_COMPONENT (type
))
4719 target
= copy_rtx (target
);
4720 MEM_KEEP_ALIAS_SET_P (target
) = 1;
4723 store_constructor_field
4724 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
4725 get_alias_set (elttype
));
4730 hi_r
= expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
4731 loop_top
= gen_label_rtx ();
4732 loop_end
= gen_label_rtx ();
4734 unsignedp
= TREE_UNSIGNED (domain
);
4736 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
4739 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
4741 SET_DECL_RTL (index
, index_r
);
4742 if (TREE_CODE (value
) == SAVE_EXPR
4743 && SAVE_EXPR_RTL (value
) == 0)
4745 /* Make sure value gets expanded once before the
4747 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
4750 store_expr (lo_index
, index_r
, 0);
4751 loop
= expand_start_loop (0);
4753 /* Assign value to element index. */
4755 = convert (ssizetype
,
4756 fold (build (MINUS_EXPR
, TREE_TYPE (index
),
4757 index
, TYPE_MIN_VALUE (domain
))));
4758 position
= size_binop (MULT_EXPR
, position
,
4760 TYPE_SIZE_UNIT (elttype
)));
4762 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
4763 xtarget
= offset_address (target
, pos_rtx
,
4764 highest_pow2_factor (position
));
4765 xtarget
= adjust_address (xtarget
, mode
, 0);
4766 if (TREE_CODE (value
) == CONSTRUCTOR
)
4767 store_constructor (value
, xtarget
, cleared
,
4768 bitsize
/ BITS_PER_UNIT
);
4770 store_expr (value
, xtarget
, 0);
4772 expand_exit_loop_if_false (loop
,
4773 build (LT_EXPR
, integer_type_node
,
4776 expand_increment (build (PREINCREMENT_EXPR
,
4778 index
, integer_one_node
), 0, 0);
4780 emit_label (loop_end
);
4783 else if ((index
!= 0 && ! host_integerp (index
, 0))
4784 || ! host_integerp (TYPE_SIZE (elttype
), 1))
4789 index
= ssize_int (1);
4792 index
= convert (ssizetype
,
4793 fold (build (MINUS_EXPR
, index
,
4794 TYPE_MIN_VALUE (domain
))));
4796 position
= size_binop (MULT_EXPR
, index
,
4798 TYPE_SIZE_UNIT (elttype
)));
4799 xtarget
= offset_address (target
,
4800 expand_expr (position
, 0, VOIDmode
, 0),
4801 highest_pow2_factor (position
));
4802 xtarget
= adjust_address (xtarget
, mode
, 0);
4803 store_expr (value
, xtarget
, 0);
4808 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
4809 * tree_low_cst (TYPE_SIZE (elttype
), 1));
4811 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
4813 if (GET_CODE (target
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (target
)
4814 && TREE_CODE (type
) == ARRAY_TYPE
4815 && TYPE_NONALIASED_COMPONENT (type
))
4817 target
= copy_rtx (target
);
4818 MEM_KEEP_ALIAS_SET_P (target
) = 1;
4821 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
4822 type
, cleared
, get_alias_set (elttype
));
4828 /* Set constructor assignments. */
4829 else if (TREE_CODE (type
) == SET_TYPE
)
4831 tree elt
= CONSTRUCTOR_ELTS (exp
);
4832 unsigned HOST_WIDE_INT nbytes
= int_size_in_bytes (type
), nbits
;
4833 tree domain
= TYPE_DOMAIN (type
);
4834 tree domain_min
, domain_max
, bitlength
;
4836 /* The default implementation strategy is to extract the constant
4837 parts of the constructor, use that to initialize the target,
4838 and then "or" in whatever non-constant ranges we need in addition.
4840 If a large set is all zero or all ones, it is
4841 probably better to set it using memset (if available) or bzero.
4842 Also, if a large set has just a single range, it may also be
4843 better to first clear all the first clear the set (using
4844 bzero/memset), and set the bits we want. */
4846 /* Check for all zeros. */
4847 if (elt
== NULL_TREE
&& size
> 0)
4850 clear_storage (target
, GEN_INT (size
));
4854 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
4855 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
4856 bitlength
= size_binop (PLUS_EXPR
,
4857 size_diffop (domain_max
, domain_min
),
4860 nbits
= tree_low_cst (bitlength
, 1);
4862 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4863 are "complicated" (more than one range), initialize (the
4864 constant parts) by copying from a constant. */
4865 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
4866 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
4868 unsigned int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
4869 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
4870 char *bit_buffer
= (char *) alloca (nbits
);
4871 HOST_WIDE_INT word
= 0;
4872 unsigned int bit_pos
= 0;
4873 unsigned int ibit
= 0;
4874 unsigned int offset
= 0; /* In bytes from beginning of set. */
4876 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
4879 if (bit_buffer
[ibit
])
4881 if (BYTES_BIG_ENDIAN
)
4882 word
|= (1 << (set_word_size
- 1 - bit_pos
));
4884 word
|= 1 << bit_pos
;
4888 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
4890 if (word
!= 0 || ! cleared
)
4892 rtx datum
= GEN_INT (word
);
4895 /* The assumption here is that it is safe to use
4896 XEXP if the set is multi-word, but not if
4897 it's single-word. */
4898 if (GET_CODE (target
) == MEM
)
4899 to_rtx
= adjust_address (target
, mode
, offset
);
4900 else if (offset
== 0)
4904 emit_move_insn (to_rtx
, datum
);
4911 offset
+= set_word_size
/ BITS_PER_UNIT
;
4916 /* Don't bother clearing storage if the set is all ones. */
4917 if (TREE_CHAIN (elt
) != NULL_TREE
4918 || (TREE_PURPOSE (elt
) == NULL_TREE
4920 : ( ! host_integerp (TREE_VALUE (elt
), 0)
4921 || ! host_integerp (TREE_PURPOSE (elt
), 0)
4922 || (tree_low_cst (TREE_VALUE (elt
), 0)
4923 - tree_low_cst (TREE_PURPOSE (elt
), 0) + 1
4924 != (HOST_WIDE_INT
) nbits
))))
4925 clear_storage (target
, expr_size (exp
));
4927 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
4929 /* Start of range of element or NULL. */
4930 tree startbit
= TREE_PURPOSE (elt
);
4931 /* End of range of element, or element value. */
4932 tree endbit
= TREE_VALUE (elt
);
4933 #ifdef TARGET_MEM_FUNCTIONS
4934 HOST_WIDE_INT startb
, endb
;
4936 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
4938 bitlength_rtx
= expand_expr (bitlength
,
4939 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
4941 /* Handle non-range tuple element like [ expr ]. */
4942 if (startbit
== NULL_TREE
)
4944 startbit
= save_expr (endbit
);
4948 startbit
= convert (sizetype
, startbit
);
4949 endbit
= convert (sizetype
, endbit
);
4950 if (! integer_zerop (domain_min
))
4952 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
4953 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
4955 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
4956 EXPAND_CONST_ADDRESS
);
4957 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
4958 EXPAND_CONST_ADDRESS
);
4964 ((build_qualified_type ((*lang_hooks
.types
.type_for_mode
)
4965 (GET_MODE (target
), 0),
4968 emit_move_insn (targetx
, target
);
4971 else if (GET_CODE (target
) == MEM
)
4976 #ifdef TARGET_MEM_FUNCTIONS
4977 /* Optimization: If startbit and endbit are
4978 constants divisible by BITS_PER_UNIT,
4979 call memset instead. */
4980 if (TREE_CODE (startbit
) == INTEGER_CST
4981 && TREE_CODE (endbit
) == INTEGER_CST
4982 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
4983 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
4985 emit_library_call (memset_libfunc
, LCT_NORMAL
,
4987 plus_constant (XEXP (targetx
, 0),
4988 startb
/ BITS_PER_UNIT
),
4990 constm1_rtx
, TYPE_MODE (integer_type_node
),
4991 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
4992 TYPE_MODE (sizetype
));
4996 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__setbits"),
4997 LCT_NORMAL
, VOIDmode
, 4, XEXP (targetx
, 0),
4998 Pmode
, bitlength_rtx
, TYPE_MODE (sizetype
),
4999 startbit_rtx
, TYPE_MODE (sizetype
),
5000 endbit_rtx
, TYPE_MODE (sizetype
));
5003 emit_move_insn (target
, targetx
);
5011 /* Store the value of EXP (an expression tree)
5012 into a subfield of TARGET which has mode MODE and occupies
5013 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5014 If MODE is VOIDmode, it means that we are storing into a bit-field.
5016 If VALUE_MODE is VOIDmode, return nothing in particular.
5017 UNSIGNEDP is not used in this case.
5019 Otherwise, return an rtx for the value stored. This rtx
5020 has mode VALUE_MODE if that is convenient to do.
5021 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5023 TYPE is the type of the underlying object,
5025 ALIAS_SET is the alias set for the destination. This value will
5026 (in general) be different from that for TARGET, since TARGET is a
5027 reference to the containing structure. */
5030 store_field (target
, bitsize
, bitpos
, mode
, exp
, value_mode
, unsignedp
, type
,
5033 HOST_WIDE_INT bitsize
;
5034 HOST_WIDE_INT bitpos
;
5035 enum machine_mode mode
;
5037 enum machine_mode value_mode
;
5042 HOST_WIDE_INT width_mask
= 0;
5044 if (TREE_CODE (exp
) == ERROR_MARK
)
5047 /* If we have nothing to store, do nothing unless the expression has
5050 return expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
5051 else if (bitsize
>=0 && bitsize
< HOST_BITS_PER_WIDE_INT
)
5052 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5054 /* If we are storing into an unaligned field of an aligned union that is
5055 in a register, we may have the mode of TARGET being an integer mode but
5056 MODE == BLKmode. In that case, get an aligned object whose size and
5057 alignment are the same as TARGET and store TARGET into it (we can avoid
5058 the store if the field being stored is the entire width of TARGET). Then
5059 call ourselves recursively to store the field into a BLKmode version of
5060 that object. Finally, load from the object into TARGET. This is not
5061 very efficient in general, but should only be slightly more expensive
5062 than the otherwise-required unaligned accesses. Perhaps this can be
5063 cleaned up later. */
5066 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
5070 (build_qualified_type (type
, TYPE_QUALS (type
) | TYPE_QUAL_CONST
),
5072 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
5074 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5075 emit_move_insn (object
, target
);
5077 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
5080 emit_move_insn (target
, object
);
5082 /* We want to return the BLKmode version of the data. */
5086 if (GET_CODE (target
) == CONCAT
)
5088 /* We're storing into a struct containing a single __complex. */
5092 return store_expr (exp
, target
, 0);
5095 /* If the structure is in a register or if the component
5096 is a bit field, we cannot use addressing to access it.
5097 Use bit-field techniques or SUBREG to store in it. */
5099 if (mode
== VOIDmode
5100 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5101 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5102 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5103 || GET_CODE (target
) == REG
5104 || GET_CODE (target
) == SUBREG
5105 /* If the field isn't aligned enough to store as an ordinary memref,
5106 store it as a bit field. */
5107 || (mode
!= BLKmode
&& SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
))
5108 && (MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
)
5109 || bitpos
% GET_MODE_ALIGNMENT (mode
)))
5110 /* If the RHS and field are a constant size and the size of the
5111 RHS isn't the same size as the bitfield, we must use bitfield
5114 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5115 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5117 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
5119 /* If BITSIZE is narrower than the size of the type of EXP
5120 we will be narrowing TEMP. Normally, what's wanted are the
5121 low-order bits. However, if EXP's type is a record and this is
5122 big-endian machine, we want the upper BITSIZE bits. */
5123 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5124 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
5125 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5126 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5127 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5131 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5133 if (mode
!= VOIDmode
&& mode
!= BLKmode
5134 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5135 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5137 /* If the modes of TARGET and TEMP are both BLKmode, both
5138 must be in memory and BITPOS must be aligned on a byte
5139 boundary. If so, we simply do a block copy. */
5140 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5142 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
5143 || bitpos
% BITS_PER_UNIT
!= 0)
5146 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5147 emit_block_move (target
, temp
,
5148 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5151 return value_mode
== VOIDmode
? const0_rtx
: target
;
5154 /* Store the value in the bitfield. */
5155 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
,
5156 int_size_in_bytes (type
));
5158 if (value_mode
!= VOIDmode
)
5160 /* The caller wants an rtx for the value.
5161 If possible, avoid refetching from the bitfield itself. */
5163 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
5166 enum machine_mode tmode
;
5168 tmode
= GET_MODE (temp
);
5169 if (tmode
== VOIDmode
)
5173 return expand_and (tmode
, temp
,
5174 gen_int_mode (width_mask
, tmode
),
5177 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
5178 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5179 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5182 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
5183 NULL_RTX
, value_mode
, VOIDmode
,
5184 int_size_in_bytes (type
));
5190 rtx addr
= XEXP (target
, 0);
5191 rtx to_rtx
= target
;
5193 /* If a value is wanted, it must be the lhs;
5194 so make the address stable for multiple use. */
5196 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
5197 && ! CONSTANT_ADDRESS_P (addr
)
5198 /* A frame-pointer reference is already stable. */
5199 && ! (GET_CODE (addr
) == PLUS
5200 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
5201 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
5202 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
5203 to_rtx
= replace_equiv_address (to_rtx
, copy_to_reg (addr
));
5205 /* Now build a reference to just the desired component. */
5207 to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5209 if (to_rtx
== target
)
5210 to_rtx
= copy_rtx (to_rtx
);
5212 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5213 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5214 set_mem_alias_set (to_rtx
, alias_set
);
5216 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
5220 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5221 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5222 codes and find the ultimate containing object, which we return.
5224 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5225 bit position, and *PUNSIGNEDP to the signedness of the field.
5226 If the position of the field is variable, we store a tree
5227 giving the variable offset (in units) in *POFFSET.
5228 This offset is in addition to the bit position.
5229 If the position is not variable, we store 0 in *POFFSET.
5231 If any of the extraction expressions is volatile,
5232 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5234 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5235 is a mode that can be used to access the field. In that case, *PBITSIZE
5238 If the field describes a variable-sized object, *PMODE is set to
5239 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5240 this case, but the address of the object can be found. */
5243 get_inner_reference (exp
, pbitsize
, pbitpos
, poffset
, pmode
,
5244 punsignedp
, pvolatilep
)
5246 HOST_WIDE_INT
*pbitsize
;
5247 HOST_WIDE_INT
*pbitpos
;
5249 enum machine_mode
*pmode
;
5254 enum machine_mode mode
= VOIDmode
;
5255 tree offset
= size_zero_node
;
5256 tree bit_offset
= bitsize_zero_node
;
5257 tree placeholder_ptr
= 0;
5260 /* First get the mode, signedness, and size. We do this from just the
5261 outermost expression. */
5262 if (TREE_CODE (exp
) == COMPONENT_REF
)
5264 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5265 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5266 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5268 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
5270 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5272 size_tree
= TREE_OPERAND (exp
, 1);
5273 *punsignedp
= TREE_UNSIGNED (exp
);
5277 mode
= TYPE_MODE (TREE_TYPE (exp
));
5278 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
5280 if (mode
== BLKmode
)
5281 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5283 *pbitsize
= GET_MODE_BITSIZE (mode
);
5288 if (! host_integerp (size_tree
, 1))
5289 mode
= BLKmode
, *pbitsize
= -1;
5291 *pbitsize
= tree_low_cst (size_tree
, 1);
5294 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5295 and find the ultimate containing object. */
5298 if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5299 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
, TREE_OPERAND (exp
, 2));
5300 else if (TREE_CODE (exp
) == COMPONENT_REF
)
5302 tree field
= TREE_OPERAND (exp
, 1);
5303 tree this_offset
= DECL_FIELD_OFFSET (field
);
5305 /* If this field hasn't been filled in yet, don't go
5306 past it. This should only happen when folding expressions
5307 made during type construction. */
5308 if (this_offset
== 0)
5310 else if (! TREE_CONSTANT (this_offset
)
5311 && contains_placeholder_p (this_offset
))
5312 this_offset
= build (WITH_RECORD_EXPR
, sizetype
, this_offset
, exp
);
5314 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5315 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5316 DECL_FIELD_BIT_OFFSET (field
));
5318 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5321 else if (TREE_CODE (exp
) == ARRAY_REF
5322 || TREE_CODE (exp
) == ARRAY_RANGE_REF
)
5324 tree index
= TREE_OPERAND (exp
, 1);
5325 tree array
= TREE_OPERAND (exp
, 0);
5326 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
5327 tree low_bound
= (domain
? TYPE_MIN_VALUE (domain
) : 0);
5328 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array
)));
5330 /* We assume all arrays have sizes that are a multiple of a byte.
5331 First subtract the lower bound, if any, in the type of the
5332 index, then convert to sizetype and multiply by the size of the
5334 if (low_bound
!= 0 && ! integer_zerop (low_bound
))
5335 index
= fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5338 /* If the index has a self-referential type, pass it to a
5339 WITH_RECORD_EXPR; if the component size is, pass our
5340 component to one. */
5341 if (! TREE_CONSTANT (index
)
5342 && contains_placeholder_p (index
))
5343 index
= build (WITH_RECORD_EXPR
, TREE_TYPE (index
), index
, exp
);
5344 if (! TREE_CONSTANT (unit_size
)
5345 && contains_placeholder_p (unit_size
))
5346 unit_size
= build (WITH_RECORD_EXPR
, sizetype
, unit_size
, array
);
5348 offset
= size_binop (PLUS_EXPR
, offset
,
5349 size_binop (MULT_EXPR
,
5350 convert (sizetype
, index
),
5354 else if (TREE_CODE (exp
) == PLACEHOLDER_EXPR
)
5356 tree
new = find_placeholder (exp
, &placeholder_ptr
);
5358 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5359 We might have been called from tree optimization where we
5360 haven't set up an object yet. */
5368 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
5369 && TREE_CODE (exp
) != VIEW_CONVERT_EXPR
5370 && ! ((TREE_CODE (exp
) == NOP_EXPR
5371 || TREE_CODE (exp
) == CONVERT_EXPR
)
5372 && (TYPE_MODE (TREE_TYPE (exp
))
5373 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
5376 /* If any reference in the chain is volatile, the effect is volatile. */
5377 if (TREE_THIS_VOLATILE (exp
))
5380 exp
= TREE_OPERAND (exp
, 0);
5383 /* If OFFSET is constant, see if we can return the whole thing as a
5384 constant bit position. Otherwise, split it up. */
5385 if (host_integerp (offset
, 0)
5386 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5388 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5389 && host_integerp (tem
, 0))
5390 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5392 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5398 /* Return 1 if T is an expression that get_inner_reference handles. */
5401 handled_component_p (t
)
5404 switch (TREE_CODE (t
))
5409 case ARRAY_RANGE_REF
:
5410 case NON_LVALUE_EXPR
:
5411 case VIEW_CONVERT_EXPR
:
5416 return (TYPE_MODE (TREE_TYPE (t
))
5417 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t
, 0))));
5424 /* Given an rtx VALUE that may contain additions and multiplications, return
5425 an equivalent value that just refers to a register, memory, or constant.
5426 This is done by generating instructions to perform the arithmetic and
5427 returning a pseudo-register containing the value.
5429 The returned value may be a REG, SUBREG, MEM or constant. */
5432 force_operand (value
, target
)
5436 /* Use a temporary to force order of execution of calls to
5440 /* Use subtarget as the target for operand 0 of a binary operation. */
5441 rtx subtarget
= get_subtarget (target
);
5443 /* Check for a PIC address load. */
5444 if ((GET_CODE (value
) == PLUS
|| GET_CODE (value
) == MINUS
)
5445 && XEXP (value
, 0) == pic_offset_table_rtx
5446 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5447 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5448 || GET_CODE (XEXP (value
, 1)) == CONST
))
5451 subtarget
= gen_reg_rtx (GET_MODE (value
));
5452 emit_move_insn (subtarget
, value
);
5456 if (GET_CODE (value
) == PLUS
)
5457 binoptab
= add_optab
;
5458 else if (GET_CODE (value
) == MINUS
)
5459 binoptab
= sub_optab
;
5460 else if (GET_CODE (value
) == MULT
)
5462 op2
= XEXP (value
, 1);
5463 if (!CONSTANT_P (op2
)
5464 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5466 tmp
= force_operand (XEXP (value
, 0), subtarget
);
5467 return expand_mult (GET_MODE (value
), tmp
,
5468 force_operand (op2
, NULL_RTX
),
5474 op2
= XEXP (value
, 1);
5475 if (!CONSTANT_P (op2
)
5476 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5478 if (binoptab
== sub_optab
&& GET_CODE (op2
) == CONST_INT
)
5480 binoptab
= add_optab
;
5481 op2
= negate_rtx (GET_MODE (value
), op2
);
5484 /* Check for an addition with OP2 a constant integer and our first
5485 operand a PLUS of a virtual register and something else. In that
5486 case, we want to emit the sum of the virtual register and the
5487 constant first and then add the other value. This allows virtual
5488 register instantiation to simply modify the constant rather than
5489 creating another one around this addition. */
5490 if (binoptab
== add_optab
&& GET_CODE (op2
) == CONST_INT
5491 && GET_CODE (XEXP (value
, 0)) == PLUS
5492 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
5493 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5494 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5496 rtx temp
= expand_binop (GET_MODE (value
), binoptab
,
5497 XEXP (XEXP (value
, 0), 0), op2
,
5498 subtarget
, 0, OPTAB_LIB_WIDEN
);
5499 return expand_binop (GET_MODE (value
), binoptab
, temp
,
5500 force_operand (XEXP (XEXP (value
, 0), 1), 0),
5501 target
, 0, OPTAB_LIB_WIDEN
);
5504 tmp
= force_operand (XEXP (value
, 0), subtarget
);
5505 return expand_binop (GET_MODE (value
), binoptab
, tmp
,
5506 force_operand (op2
, NULL_RTX
),
5507 target
, 0, OPTAB_LIB_WIDEN
);
5508 /* We give UNSIGNEDP = 0 to expand_binop
5509 because the only operations we are expanding here are signed ones. */
5512 #ifdef INSN_SCHEDULING
5513 /* On machines that have insn scheduling, we want all memory reference to be
5514 explicit, so we need to deal with such paradoxical SUBREGs. */
5515 if (GET_CODE (value
) == SUBREG
&& GET_CODE (SUBREG_REG (value
)) == MEM
5516 && (GET_MODE_SIZE (GET_MODE (value
))
5517 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
5519 = simplify_gen_subreg (GET_MODE (value
),
5520 force_reg (GET_MODE (SUBREG_REG (value
)),
5521 force_operand (SUBREG_REG (value
),
5523 GET_MODE (SUBREG_REG (value
)),
5524 SUBREG_BYTE (value
));
5530 /* Subroutine of expand_expr: return nonzero iff there is no way that
5531 EXP can reference X, which is being modified. TOP_P is nonzero if this
5532 call is going to be used to determine whether we need a temporary
5533 for EXP, as opposed to a recursive call to this function.
5535 It is always safe for this routine to return zero since it merely
5536 searches for optimization opportunities. */
5539 safe_from_p (x
, exp
, top_p
)
5546 static tree save_expr_list
;
5549 /* If EXP has varying size, we MUST use a target since we currently
5550 have no way of allocating temporaries of variable size
5551 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5552 So we assume here that something at a higher level has prevented a
5553 clash. This is somewhat bogus, but the best we can do. Only
5554 do this when X is BLKmode and when we are at the top level. */
5555 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
5556 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
5557 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
5558 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
5559 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
5561 && GET_MODE (x
) == BLKmode
)
5562 /* If X is in the outgoing argument area, it is always safe. */
5563 || (GET_CODE (x
) == MEM
5564 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
5565 || (GET_CODE (XEXP (x
, 0)) == PLUS
5566 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
5569 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5570 find the underlying pseudo. */
5571 if (GET_CODE (x
) == SUBREG
)
5574 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5578 /* A SAVE_EXPR might appear many times in the expression passed to the
5579 top-level safe_from_p call, and if it has a complex subexpression,
5580 examining it multiple times could result in a combinatorial explosion.
5581 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5582 with optimization took about 28 minutes to compile -- even though it was
5583 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5584 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5585 we have processed. Note that the only test of top_p was above. */
5594 rtn
= safe_from_p (x
, exp
, 0);
5596 for (t
= save_expr_list
; t
!= 0; t
= TREE_CHAIN (t
))
5597 TREE_PRIVATE (TREE_PURPOSE (t
)) = 0;
5602 /* Now look at our tree code and possibly recurse. */
5603 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
5606 exp_rtl
= DECL_RTL_IF_SET (exp
);
5613 if (TREE_CODE (exp
) == TREE_LIST
)
5614 return ((TREE_VALUE (exp
) == 0
5615 || safe_from_p (x
, TREE_VALUE (exp
), 0))
5616 && (TREE_CHAIN (exp
) == 0
5617 || safe_from_p (x
, TREE_CHAIN (exp
), 0)));
5618 else if (TREE_CODE (exp
) == ERROR_MARK
)
5619 return 1; /* An already-visited SAVE_EXPR? */
5624 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5628 return (safe_from_p (x
, TREE_OPERAND (exp
, 0), 0)
5629 && safe_from_p (x
, TREE_OPERAND (exp
, 1), 0));
5633 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5634 the expression. If it is set, we conflict iff we are that rtx or
5635 both are in memory. Otherwise, we check all operands of the
5636 expression recursively. */
5638 switch (TREE_CODE (exp
))
5641 /* If the operand is static or we are static, we can't conflict.
5642 Likewise if we don't conflict with the operand at all. */
5643 if (staticp (TREE_OPERAND (exp
, 0))
5644 || TREE_STATIC (exp
)
5645 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
5648 /* Otherwise, the only way this can conflict is if we are taking
5649 the address of a DECL a that address if part of X, which is
5651 exp
= TREE_OPERAND (exp
, 0);
5654 if (!DECL_RTL_SET_P (exp
)
5655 || GET_CODE (DECL_RTL (exp
)) != MEM
)
5658 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
5663 if (GET_CODE (x
) == MEM
5664 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
5665 get_alias_set (exp
)))
5670 /* Assume that the call will clobber all hard registers and
5672 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5673 || GET_CODE (x
) == MEM
)
5678 /* If a sequence exists, we would have to scan every instruction
5679 in the sequence to see if it was safe. This is probably not
5681 if (RTL_EXPR_SEQUENCE (exp
))
5684 exp_rtl
= RTL_EXPR_RTL (exp
);
5687 case WITH_CLEANUP_EXPR
:
5688 exp_rtl
= WITH_CLEANUP_EXPR_RTL (exp
);
5691 case CLEANUP_POINT_EXPR
:
5692 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5695 exp_rtl
= SAVE_EXPR_RTL (exp
);
5699 /* If we've already scanned this, don't do it again. Otherwise,
5700 show we've scanned it and record for clearing the flag if we're
5702 if (TREE_PRIVATE (exp
))
5705 TREE_PRIVATE (exp
) = 1;
5706 if (! safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
5708 TREE_PRIVATE (exp
) = 0;
5712 save_expr_list
= tree_cons (exp
, NULL_TREE
, save_expr_list
);
5716 /* The only operand we look at is operand 1. The rest aren't
5717 part of the expression. */
5718 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
5720 case METHOD_CALL_EXPR
:
5721 /* This takes an rtx argument, but shouldn't appear here. */
5728 /* If we have an rtx, we do not need to scan our operands. */
5732 nops
= first_rtl_op (TREE_CODE (exp
));
5733 for (i
= 0; i
< nops
; i
++)
5734 if (TREE_OPERAND (exp
, i
) != 0
5735 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
5738 /* If this is a language-specific tree code, it may require
5739 special handling. */
5740 if ((unsigned int) TREE_CODE (exp
)
5741 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5742 && !(*lang_hooks
.safe_from_p
) (x
, exp
))
5746 /* If we have an rtl, find any enclosed object. Then see if we conflict
5750 if (GET_CODE (exp_rtl
) == SUBREG
)
5752 exp_rtl
= SUBREG_REG (exp_rtl
);
5753 if (GET_CODE (exp_rtl
) == REG
5754 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
5758 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5759 are memory and they conflict. */
5760 return ! (rtx_equal_p (x
, exp_rtl
)
5761 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
5762 && true_dependence (exp_rtl
, VOIDmode
, x
,
5763 rtx_addr_varies_p
)));
5766 /* If we reach here, it is safe. */
5770 /* Subroutine of expand_expr: return rtx if EXP is a
5771 variable or parameter; else return 0. */
5778 switch (TREE_CODE (exp
))
5782 return DECL_RTL (exp
);
5788 #ifdef MAX_INTEGER_COMPUTATION_MODE
5791 check_max_integer_computation_mode (exp
)
5794 enum tree_code code
;
5795 enum machine_mode mode
;
5797 /* Strip any NOPs that don't change the mode. */
5799 code
= TREE_CODE (exp
);
5801 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5802 if (code
== NOP_EXPR
5803 && TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
5806 /* First check the type of the overall operation. We need only look at
5807 unary, binary and relational operations. */
5808 if (TREE_CODE_CLASS (code
) == '1'
5809 || TREE_CODE_CLASS (code
) == '2'
5810 || TREE_CODE_CLASS (code
) == '<')
5812 mode
= TYPE_MODE (TREE_TYPE (exp
));
5813 if (GET_MODE_CLASS (mode
) == MODE_INT
5814 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5815 internal_error ("unsupported wide integer operation");
5818 /* Check operand of a unary op. */
5819 if (TREE_CODE_CLASS (code
) == '1')
5821 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5822 if (GET_MODE_CLASS (mode
) == MODE_INT
5823 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5824 internal_error ("unsupported wide integer operation");
5827 /* Check operands of a binary/comparison op. */
5828 if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<')
5830 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5831 if (GET_MODE_CLASS (mode
) == MODE_INT
5832 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5833 internal_error ("unsupported wide integer operation");
5835 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
5836 if (GET_MODE_CLASS (mode
) == MODE_INT
5837 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5838 internal_error ("unsupported wide integer operation");
5843 /* Return the highest power of two that EXP is known to be a multiple of.
5844 This is used in updating alignment of MEMs in array references. */
5846 static HOST_WIDE_INT
5847 highest_pow2_factor (exp
)
5850 HOST_WIDE_INT c0
, c1
;
5852 switch (TREE_CODE (exp
))
5855 /* We can find the lowest bit that's a one. If the low
5856 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5857 We need to handle this case since we can find it in a COND_EXPR,
5858 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
5859 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5861 if (TREE_CONSTANT_OVERFLOW (exp
))
5862 return BIGGEST_ALIGNMENT
;
5865 /* Note: tree_low_cst is intentionally not used here,
5866 we don't care about the upper bits. */
5867 c0
= TREE_INT_CST_LOW (exp
);
5869 return c0
? c0
: BIGGEST_ALIGNMENT
;
5873 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
5874 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
5875 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
5876 return MIN (c0
, c1
);
5879 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
5880 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
5883 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5885 if (integer_pow2p (TREE_OPERAND (exp
, 1))
5886 && host_integerp (TREE_OPERAND (exp
, 1), 1))
5888 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
5889 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
5890 return MAX (1, c0
/ c1
);
5894 case NON_LVALUE_EXPR
: case NOP_EXPR
: case CONVERT_EXPR
:
5895 case SAVE_EXPR
: case WITH_RECORD_EXPR
:
5896 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
5899 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
5902 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
5903 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
5904 return MIN (c0
, c1
);
5913 /* Return an object on the placeholder list that matches EXP, a
5914 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
5915 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
5916 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
5917 is a location which initially points to a starting location in the
5918 placeholder list (zero means start of the list) and where a pointer into
5919 the placeholder list at which the object is found is placed. */
5922 find_placeholder (exp
, plist
)
5926 tree type
= TREE_TYPE (exp
);
5927 tree placeholder_expr
;
5929 for (placeholder_expr
5930 = plist
&& *plist
? TREE_CHAIN (*plist
) : placeholder_list
;
5931 placeholder_expr
!= 0;
5932 placeholder_expr
= TREE_CHAIN (placeholder_expr
))
5934 tree need_type
= TYPE_MAIN_VARIANT (type
);
5937 /* Find the outermost reference that is of the type we want. If none,
5938 see if any object has a type that is a pointer to the type we
5940 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
5941 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
5942 || TREE_CODE (elt
) == COND_EXPR
)
5943 ? TREE_OPERAND (elt
, 1)
5944 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
5945 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
5946 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
5947 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
5948 ? TREE_OPERAND (elt
, 0) : 0))
5949 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
5952 *plist
= placeholder_expr
;
5956 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
5958 = ((TREE_CODE (elt
) == COMPOUND_EXPR
5959 || TREE_CODE (elt
) == COND_EXPR
)
5960 ? TREE_OPERAND (elt
, 1)
5961 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
5962 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
5963 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
5964 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
5965 ? TREE_OPERAND (elt
, 0) : 0))
5966 if (POINTER_TYPE_P (TREE_TYPE (elt
))
5967 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
5971 *plist
= placeholder_expr
;
5972 return build1 (INDIRECT_REF
, need_type
, elt
);
5979 /* expand_expr: generate code for computing expression EXP.
5980 An rtx for the computed value is returned. The value is never null.
5981 In the case of a void EXP, const0_rtx is returned.
5983 The value may be stored in TARGET if TARGET is nonzero.
5984 TARGET is just a suggestion; callers must assume that
5985 the rtx returned may not be the same as TARGET.
5987 If TARGET is CONST0_RTX, it means that the value will be ignored.
5989 If TMODE is not VOIDmode, it suggests generating the
5990 result in mode TMODE. But this is done only when convenient.
5991 Otherwise, TMODE is ignored and the value generated in its natural mode.
5992 TMODE is just a suggestion; callers must assume that
5993 the rtx returned may not have mode TMODE.
5995 Note that TARGET may have neither TMODE nor MODE. In that case, it
5996 probably will not be used.
5998 If MODIFIER is EXPAND_SUM then when EXP is an addition
5999 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6000 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6001 products as above, or REG or MEM, or constant.
6002 Ordinarily in such cases we would output mul or add instructions
6003 and then return a pseudo reg containing the sum.
6005 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6006 it also marks a label as absolutely required (it can't be dead).
6007 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6008 This is used for outputting expressions used in initializers.
6010 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6011 with a constant address even if that address is not normally legitimate.
6012 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6015 expand_expr (exp
, target
, tmode
, modifier
)
6018 enum machine_mode tmode
;
6019 enum expand_modifier modifier
;
6022 tree type
= TREE_TYPE (exp
);
6023 int unsignedp
= TREE_UNSIGNED (type
);
6024 enum machine_mode mode
;
6025 enum tree_code code
= TREE_CODE (exp
);
6027 rtx subtarget
, original_target
;
6031 /* Handle ERROR_MARK before anybody tries to access its type. */
6032 if (TREE_CODE (exp
) == ERROR_MARK
|| TREE_CODE (type
) == ERROR_MARK
)
6034 op0
= CONST0_RTX (tmode
);
6040 mode
= TYPE_MODE (type
);
6041 /* Use subtarget as the target for operand 0 of a binary operation. */
6042 subtarget
= get_subtarget (target
);
6043 original_target
= target
;
6044 ignore
= (target
== const0_rtx
6045 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
6046 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
6047 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
6048 && TREE_CODE (type
) == VOID_TYPE
));
6050 /* If we are going to ignore this result, we need only do something
6051 if there is a side-effect somewhere in the expression. If there
6052 is, short-circuit the most common cases here. Note that we must
6053 not call expand_expr with anything but const0_rtx in case this
6054 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6058 if (! TREE_SIDE_EFFECTS (exp
))
6061 /* Ensure we reference a volatile object even if value is ignored, but
6062 don't do this if all we are doing is taking its address. */
6063 if (TREE_THIS_VOLATILE (exp
)
6064 && TREE_CODE (exp
) != FUNCTION_DECL
6065 && mode
!= VOIDmode
&& mode
!= BLKmode
6066 && modifier
!= EXPAND_CONST_ADDRESS
)
6068 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
6069 if (GET_CODE (temp
) == MEM
)
6070 temp
= copy_to_reg (temp
);
6074 if (TREE_CODE_CLASS (code
) == '1' || code
== COMPONENT_REF
6075 || code
== INDIRECT_REF
|| code
== BUFFER_REF
)
6076 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6079 else if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<'
6080 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
6082 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6083 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6086 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6087 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
6088 /* If the second operand has no side effects, just evaluate
6090 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6092 else if (code
== BIT_FIELD_REF
)
6094 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6095 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6096 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, modifier
);
6103 #ifdef MAX_INTEGER_COMPUTATION_MODE
6104 /* Only check stuff here if the mode we want is different from the mode
6105 of the expression; if it's the same, check_max_integer_computiation_mode
6106 will handle it. Do we really need to check this stuff at all? */
6109 && GET_MODE (target
) != mode
6110 && TREE_CODE (exp
) != INTEGER_CST
6111 && TREE_CODE (exp
) != PARM_DECL
6112 && TREE_CODE (exp
) != ARRAY_REF
6113 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6114 && TREE_CODE (exp
) != COMPONENT_REF
6115 && TREE_CODE (exp
) != BIT_FIELD_REF
6116 && TREE_CODE (exp
) != INDIRECT_REF
6117 && TREE_CODE (exp
) != CALL_EXPR
6118 && TREE_CODE (exp
) != VAR_DECL
6119 && TREE_CODE (exp
) != RTL_EXPR
)
6121 enum machine_mode mode
= GET_MODE (target
);
6123 if (GET_MODE_CLASS (mode
) == MODE_INT
6124 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6125 internal_error ("unsupported wide integer operation");
6129 && TREE_CODE (exp
) != INTEGER_CST
6130 && TREE_CODE (exp
) != PARM_DECL
6131 && TREE_CODE (exp
) != ARRAY_REF
6132 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6133 && TREE_CODE (exp
) != COMPONENT_REF
6134 && TREE_CODE (exp
) != BIT_FIELD_REF
6135 && TREE_CODE (exp
) != INDIRECT_REF
6136 && TREE_CODE (exp
) != VAR_DECL
6137 && TREE_CODE (exp
) != CALL_EXPR
6138 && TREE_CODE (exp
) != RTL_EXPR
6139 && GET_MODE_CLASS (tmode
) == MODE_INT
6140 && tmode
> MAX_INTEGER_COMPUTATION_MODE
)
6141 internal_error ("unsupported wide integer operation");
6143 check_max_integer_computation_mode (exp
);
6146 /* If will do cse, generate all results into pseudo registers
6147 since 1) that allows cse to find more things
6148 and 2) otherwise cse could produce an insn the machine
6149 cannot support. And exception is a CONSTRUCTOR into a multi-word
6150 MEM: that's much more likely to be most efficient into the MEM. */
6152 if (! cse_not_expected
&& mode
!= BLKmode
&& target
6153 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
)
6154 && ! (code
== CONSTRUCTOR
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
))
6161 tree function
= decl_function_context (exp
);
6162 /* Handle using a label in a containing function. */
6163 if (function
!= current_function_decl
6164 && function
!= inline_function_decl
&& function
!= 0)
6166 struct function
*p
= find_function_data (function
);
6167 p
->expr
->x_forced_labels
6168 = gen_rtx_EXPR_LIST (VOIDmode
, label_rtx (exp
),
6169 p
->expr
->x_forced_labels
);
6173 if (modifier
== EXPAND_INITIALIZER
)
6174 forced_labels
= gen_rtx_EXPR_LIST (VOIDmode
,
6179 temp
= gen_rtx_MEM (FUNCTION_MODE
,
6180 gen_rtx_LABEL_REF (Pmode
, label_rtx (exp
)));
6181 if (function
!= current_function_decl
6182 && function
!= inline_function_decl
&& function
!= 0)
6183 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
6188 if (DECL_RTL (exp
) == 0)
6190 error_with_decl (exp
, "prior parameter's size depends on `%s'");
6191 return CONST0_RTX (mode
);
6194 /* ... fall through ... */
6197 /* If a static var's type was incomplete when the decl was written,
6198 but the type is complete now, lay out the decl now. */
6199 if (DECL_SIZE (exp
) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
6200 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6202 rtx value
= DECL_RTL_IF_SET (exp
);
6204 layout_decl (exp
, 0);
6206 /* If the RTL was already set, update its mode and memory
6210 PUT_MODE (value
, DECL_MODE (exp
));
6211 SET_DECL_RTL (exp
, 0);
6212 set_mem_attributes (value
, exp
, 1);
6213 SET_DECL_RTL (exp
, value
);
6217 /* ... fall through ... */
6221 if (DECL_RTL (exp
) == 0)
6224 /* Ensure variable marked as used even if it doesn't go through
6225 a parser. If it hasn't be used yet, write out an external
6227 if (! TREE_USED (exp
))
6229 assemble_external (exp
);
6230 TREE_USED (exp
) = 1;
6233 /* Show we haven't gotten RTL for this yet. */
6236 /* Handle variables inherited from containing functions. */
6237 context
= decl_function_context (exp
);
6239 /* We treat inline_function_decl as an alias for the current function
6240 because that is the inline function whose vars, types, etc.
6241 are being merged into the current function.
6242 See expand_inline_function. */
6244 if (context
!= 0 && context
!= current_function_decl
6245 && context
!= inline_function_decl
6246 /* If var is static, we don't need a static chain to access it. */
6247 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
6248 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
6252 /* Mark as non-local and addressable. */
6253 DECL_NONLOCAL (exp
) = 1;
6254 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
6256 (*lang_hooks
.mark_addressable
) (exp
);
6257 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
6259 addr
= XEXP (DECL_RTL (exp
), 0);
6260 if (GET_CODE (addr
) == MEM
)
6262 = replace_equiv_address (addr
,
6263 fix_lexical_addr (XEXP (addr
, 0), exp
));
6265 addr
= fix_lexical_addr (addr
, exp
);
6267 temp
= replace_equiv_address (DECL_RTL (exp
), addr
);
6270 /* This is the case of an array whose size is to be determined
6271 from its initializer, while the initializer is still being parsed.
6274 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6275 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
6276 temp
= validize_mem (DECL_RTL (exp
));
6278 /* If DECL_RTL is memory, we are in the normal case and either
6279 the address is not valid or it is not a register and -fforce-addr
6280 is specified, get the address into a register. */
6282 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6283 && modifier
!= EXPAND_CONST_ADDRESS
6284 && modifier
!= EXPAND_SUM
6285 && modifier
!= EXPAND_INITIALIZER
6286 && (! memory_address_p (DECL_MODE (exp
),
6287 XEXP (DECL_RTL (exp
), 0))
6289 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
6290 temp
= replace_equiv_address (DECL_RTL (exp
),
6291 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
6293 /* If we got something, return it. But first, set the alignment
6294 if the address is a register. */
6297 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
6298 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6303 /* If the mode of DECL_RTL does not match that of the decl, it
6304 must be a promoted value. We return a SUBREG of the wanted mode,
6305 but mark it so that we know that it was already extended. */
6307 if (GET_CODE (DECL_RTL (exp
)) == REG
6308 && GET_MODE (DECL_RTL (exp
)) != DECL_MODE (exp
))
6310 /* Get the signedness used for this variable. Ensure we get the
6311 same mode we got when the variable was declared. */
6312 if (GET_MODE (DECL_RTL (exp
))
6313 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
,
6314 (TREE_CODE (exp
) == RESULT_DECL
? 1 : 0)))
6317 temp
= gen_lowpart_SUBREG (mode
, DECL_RTL (exp
));
6318 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6319 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6323 return DECL_RTL (exp
);
6326 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
6327 TREE_INT_CST_HIGH (exp
), mode
);
6329 /* ??? If overflow is set, fold will have done an incomplete job,
6330 which can result in (plus xx (const_int 0)), which can get
6331 simplified by validate_replace_rtx during virtual register
6332 instantiation, which can result in unrecognizable insns.
6333 Avoid this by forcing all overflows into registers. */
6334 if (TREE_CONSTANT_OVERFLOW (exp
)
6335 && modifier
!= EXPAND_INITIALIZER
)
6336 temp
= force_reg (mode
, temp
);
6341 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, 0);
6344 /* If optimized, generate immediate CONST_DOUBLE
6345 which will be turned into memory by reload if necessary.
6347 We used to force a register so that loop.c could see it. But
6348 this does not allow gen_* patterns to perform optimizations with
6349 the constants. It also produces two insns in cases like "x = 1.0;".
6350 On most machines, floating-point constants are not permitted in
6351 many insns, so we'd end up copying it to a register in any case.
6353 Now, we do the copying in expand_binop, if appropriate. */
6354 return immed_real_const (exp
);
6358 if (! TREE_CST_RTL (exp
))
6359 output_constant_def (exp
, 1);
6361 /* TREE_CST_RTL probably contains a constant address.
6362 On RISC machines where a constant address isn't valid,
6363 make some insns to get that address into a register. */
6364 if (GET_CODE (TREE_CST_RTL (exp
)) == MEM
6365 && modifier
!= EXPAND_CONST_ADDRESS
6366 && modifier
!= EXPAND_INITIALIZER
6367 && modifier
!= EXPAND_SUM
6368 && (! memory_address_p (mode
, XEXP (TREE_CST_RTL (exp
), 0))
6370 && GET_CODE (XEXP (TREE_CST_RTL (exp
), 0)) != REG
)))
6371 return replace_equiv_address (TREE_CST_RTL (exp
),
6372 copy_rtx (XEXP (TREE_CST_RTL (exp
), 0)));
6373 return TREE_CST_RTL (exp
);
6375 case EXPR_WITH_FILE_LOCATION
:
6378 const char *saved_input_filename
= input_filename
;
6379 int saved_lineno
= lineno
;
6380 input_filename
= EXPR_WFL_FILENAME (exp
);
6381 lineno
= EXPR_WFL_LINENO (exp
);
6382 if (EXPR_WFL_EMIT_LINE_NOTE (exp
))
6383 emit_line_note (input_filename
, lineno
);
6384 /* Possibly avoid switching back and forth here. */
6385 to_return
= expand_expr (EXPR_WFL_NODE (exp
), target
, tmode
, modifier
);
6386 input_filename
= saved_input_filename
;
6387 lineno
= saved_lineno
;
6392 context
= decl_function_context (exp
);
6394 /* If this SAVE_EXPR was at global context, assume we are an
6395 initialization function and move it into our context. */
6397 SAVE_EXPR_CONTEXT (exp
) = current_function_decl
;
6399 /* We treat inline_function_decl as an alias for the current function
6400 because that is the inline function whose vars, types, etc.
6401 are being merged into the current function.
6402 See expand_inline_function. */
6403 if (context
== current_function_decl
|| context
== inline_function_decl
)
6406 /* If this is non-local, handle it. */
6409 /* The following call just exists to abort if the context is
6410 not of a containing function. */
6411 find_function_data (context
);
6413 temp
= SAVE_EXPR_RTL (exp
);
6414 if (temp
&& GET_CODE (temp
) == REG
)
6416 put_var_into_stack (exp
);
6417 temp
= SAVE_EXPR_RTL (exp
);
6419 if (temp
== 0 || GET_CODE (temp
) != MEM
)
6422 replace_equiv_address (temp
,
6423 fix_lexical_addr (XEXP (temp
, 0), exp
));
6425 if (SAVE_EXPR_RTL (exp
) == 0)
6427 if (mode
== VOIDmode
)
6430 temp
= assign_temp (build_qualified_type (type
,
6432 | TYPE_QUAL_CONST
)),
6435 SAVE_EXPR_RTL (exp
) = temp
;
6436 if (!optimize
&& GET_CODE (temp
) == REG
)
6437 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, temp
,
6440 /* If the mode of TEMP does not match that of the expression, it
6441 must be a promoted value. We pass store_expr a SUBREG of the
6442 wanted mode but mark it so that we know that it was already
6443 extended. Note that `unsignedp' was modified above in
6446 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
6448 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6449 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6450 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6453 if (temp
== const0_rtx
)
6454 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
6456 store_expr (TREE_OPERAND (exp
, 0), temp
, 0);
6458 TREE_USED (exp
) = 1;
6461 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6462 must be a promoted value. We return a SUBREG of the wanted mode,
6463 but mark it so that we know that it was already extended. */
6465 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
6466 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
6468 /* Compute the signedness and make the proper SUBREG. */
6469 promote_mode (type
, mode
, &unsignedp
, 0);
6470 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6471 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6472 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6476 return SAVE_EXPR_RTL (exp
);
6481 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6482 TREE_OPERAND (exp
, 0)
6483 = (*lang_hooks
.unsave_expr_now
) (TREE_OPERAND (exp
, 0));
6487 case PLACEHOLDER_EXPR
:
6489 tree old_list
= placeholder_list
;
6490 tree placeholder_expr
= 0;
6492 exp
= find_placeholder (exp
, &placeholder_expr
);
6496 placeholder_list
= TREE_CHAIN (placeholder_expr
);
6497 temp
= expand_expr (exp
, original_target
, tmode
, modifier
);
6498 placeholder_list
= old_list
;
6502 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6505 case WITH_RECORD_EXPR
:
6506 /* Put the object on the placeholder list, expand our first operand,
6507 and pop the list. */
6508 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
6510 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
, tmode
,
6512 placeholder_list
= TREE_CHAIN (placeholder_list
);
6516 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6517 expand_goto (TREE_OPERAND (exp
, 0));
6519 expand_computed_goto (TREE_OPERAND (exp
, 0));
6523 expand_exit_loop_if_false (NULL
,
6524 invert_truthvalue (TREE_OPERAND (exp
, 0)));
6527 case LABELED_BLOCK_EXPR
:
6528 if (LABELED_BLOCK_BODY (exp
))
6529 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp
), 0, 1);
6530 /* Should perhaps use expand_label, but this is simpler and safer. */
6531 do_pending_stack_adjust ();
6532 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp
)));
6535 case EXIT_BLOCK_EXPR
:
6536 if (EXIT_BLOCK_RETURN (exp
))
6537 sorry ("returned value in block_exit_expr");
6538 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp
)));
6543 expand_start_loop (1);
6544 expand_expr_stmt_value (TREE_OPERAND (exp
, 0), 0, 1);
6552 tree vars
= TREE_OPERAND (exp
, 0);
6553 int vars_need_expansion
= 0;
6555 /* Need to open a binding contour here because
6556 if there are any cleanups they must be contained here. */
6557 expand_start_bindings (2);
6559 /* Mark the corresponding BLOCK for output in its proper place. */
6560 if (TREE_OPERAND (exp
, 2) != 0
6561 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
6562 (*lang_hooks
.decls
.insert_block
) (TREE_OPERAND (exp
, 2));
6564 /* If VARS have not yet been expanded, expand them now. */
6567 if (!DECL_RTL_SET_P (vars
))
6569 vars_need_expansion
= 1;
6572 expand_decl_init (vars
);
6573 vars
= TREE_CHAIN (vars
);
6576 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, modifier
);
6578 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
6584 if (RTL_EXPR_SEQUENCE (exp
))
6586 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
6588 emit_insns (RTL_EXPR_SEQUENCE (exp
));
6589 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
6591 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
6592 free_temps_for_rtl_expr (exp
);
6593 return RTL_EXPR_RTL (exp
);
6596 /* If we don't need the result, just ensure we evaluate any
6602 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
6603 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
, 0);
6608 /* All elts simple constants => refer to a constant in memory. But
6609 if this is a non-BLKmode mode, let it store a field at a time
6610 since that should make a CONST_INT or CONST_DOUBLE when we
6611 fold. Likewise, if we have a target we can use, it is best to
6612 store directly into the target unless the type is large enough
6613 that memcpy will be used. If we are making an initializer and
6614 all operands are constant, put it in memory as well. */
6615 else if ((TREE_STATIC (exp
)
6616 && ((mode
== BLKmode
6617 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
6618 || TREE_ADDRESSABLE (exp
)
6619 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
6620 && (! MOVE_BY_PIECES_P
6621 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
6623 && ! mostly_zeros_p (exp
))))
6624 || (modifier
== EXPAND_INITIALIZER
&& TREE_CONSTANT (exp
)))
6626 rtx constructor
= output_constant_def (exp
, 1);
6628 if (modifier
!= EXPAND_CONST_ADDRESS
6629 && modifier
!= EXPAND_INITIALIZER
6630 && modifier
!= EXPAND_SUM
)
6631 constructor
= validize_mem (constructor
);
6637 /* Handle calls that pass values in multiple non-contiguous
6638 locations. The Irix 6 ABI has examples of this. */
6639 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
6640 || GET_CODE (target
) == PARALLEL
)
6642 = assign_temp (build_qualified_type (type
,
6644 | (TREE_READONLY (exp
)
6645 * TYPE_QUAL_CONST
))),
6646 0, TREE_ADDRESSABLE (exp
), 1);
6648 store_constructor (exp
, target
, 0,
6649 int_size_in_bytes (TREE_TYPE (exp
)));
6655 tree exp1
= TREE_OPERAND (exp
, 0);
6657 tree string
= string_constant (exp1
, &index
);
6659 /* Try to optimize reads from const strings. */
6661 && TREE_CODE (string
) == STRING_CST
6662 && TREE_CODE (index
) == INTEGER_CST
6663 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
6664 && GET_MODE_CLASS (mode
) == MODE_INT
6665 && GET_MODE_SIZE (mode
) == 1
6666 && modifier
!= EXPAND_WRITE
)
6667 return gen_int_mode (TREE_STRING_POINTER (string
)
6668 [TREE_INT_CST_LOW (index
)], mode
);
6670 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
6671 op0
= memory_address (mode
, op0
);
6672 temp
= gen_rtx_MEM (mode
, op0
);
6673 set_mem_attributes (temp
, exp
, 0);
6675 /* If we are writing to this object and its type is a record with
6676 readonly fields, we must mark it as readonly so it will
6677 conflict with readonly references to those fields. */
6678 if (modifier
== EXPAND_WRITE
&& readonly_fields_p (type
))
6679 RTX_UNCHANGING_P (temp
) = 1;
6685 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
6689 tree array
= TREE_OPERAND (exp
, 0);
6690 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
6691 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
6692 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
6695 /* Optimize the special-case of a zero lower bound.
6697 We convert the low_bound to sizetype to avoid some problems
6698 with constant folding. (E.g. suppose the lower bound is 1,
6699 and its mode is QI. Without the conversion, (ARRAY
6700 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6701 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6703 if (! integer_zerop (low_bound
))
6704 index
= size_diffop (index
, convert (sizetype
, low_bound
));
6706 /* Fold an expression like: "foo"[2].
6707 This is not done in fold so it won't happen inside &.
6708 Don't fold if this is for wide characters since it's too
6709 difficult to do correctly and this is a very rare case. */
6711 if (modifier
!= EXPAND_CONST_ADDRESS
&& modifier
!= EXPAND_INITIALIZER
6712 && TREE_CODE (array
) == STRING_CST
6713 && TREE_CODE (index
) == INTEGER_CST
6714 && compare_tree_int (index
, TREE_STRING_LENGTH (array
)) < 0
6715 && GET_MODE_CLASS (mode
) == MODE_INT
6716 && GET_MODE_SIZE (mode
) == 1)
6717 return gen_int_mode (TREE_STRING_POINTER (array
)
6718 [TREE_INT_CST_LOW (index
)], mode
);
6720 /* If this is a constant index into a constant array,
6721 just get the value from the array. Handle both the cases when
6722 we have an explicit constructor and when our operand is a variable
6723 that was declared const. */
6725 if (modifier
!= EXPAND_CONST_ADDRESS
&& modifier
!= EXPAND_INITIALIZER
6726 && TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
)
6727 && TREE_CODE (index
) == INTEGER_CST
6728 && 0 > compare_tree_int (index
,
6729 list_length (CONSTRUCTOR_ELTS
6730 (TREE_OPERAND (exp
, 0)))))
6734 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
6735 i
= TREE_INT_CST_LOW (index
);
6736 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
6740 return expand_expr (fold (TREE_VALUE (elem
)), target
, tmode
,
6744 else if (optimize
>= 1
6745 && modifier
!= EXPAND_CONST_ADDRESS
6746 && modifier
!= EXPAND_INITIALIZER
6747 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
6748 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
6749 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
6751 if (TREE_CODE (index
) == INTEGER_CST
)
6753 tree init
= DECL_INITIAL (array
);
6755 if (TREE_CODE (init
) == CONSTRUCTOR
)
6759 for (elem
= CONSTRUCTOR_ELTS (init
);
6761 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
6762 elem
= TREE_CHAIN (elem
))
6765 if (elem
&& !TREE_SIDE_EFFECTS (TREE_VALUE (elem
)))
6766 return expand_expr (fold (TREE_VALUE (elem
)), target
,
6769 else if (TREE_CODE (init
) == STRING_CST
6770 && 0 > compare_tree_int (index
,
6771 TREE_STRING_LENGTH (init
)))
6773 tree type
= TREE_TYPE (TREE_TYPE (init
));
6774 enum machine_mode mode
= TYPE_MODE (type
);
6776 if (GET_MODE_CLASS (mode
) == MODE_INT
6777 && GET_MODE_SIZE (mode
) == 1)
6778 return gen_int_mode (TREE_STRING_POINTER (init
)
6779 [TREE_INT_CST_LOW (index
)], mode
);
6788 case ARRAY_RANGE_REF
:
6789 /* If the operand is a CONSTRUCTOR, we can just extract the
6790 appropriate field if it is present. Don't do this if we have
6791 already written the data since we want to refer to that copy
6792 and varasm.c assumes that's what we'll do. */
6793 if (code
== COMPONENT_REF
6794 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
6795 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
6799 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
6800 elt
= TREE_CHAIN (elt
))
6801 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
6802 /* We can normally use the value of the field in the
6803 CONSTRUCTOR. However, if this is a bitfield in
6804 an integral mode that we can fit in a HOST_WIDE_INT,
6805 we must mask only the number of bits in the bitfield,
6806 since this is done implicitly by the constructor. If
6807 the bitfield does not meet either of those conditions,
6808 we can't do this optimization. */
6809 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
6810 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
6812 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
6813 <= HOST_BITS_PER_WIDE_INT
))))
6815 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
6816 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
6818 HOST_WIDE_INT bitsize
6819 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt
)));
6820 enum machine_mode imode
6821 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
6823 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
6825 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
6826 op0
= expand_and (imode
, op0
, op1
, target
);
6831 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
6834 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
6836 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
6846 enum machine_mode mode1
;
6847 HOST_WIDE_INT bitsize
, bitpos
;
6850 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
6851 &mode1
, &unsignedp
, &volatilep
);
6854 /* If we got back the original object, something is wrong. Perhaps
6855 we are evaluating an expression too early. In any event, don't
6856 infinitely recurse. */
6860 /* If TEM's type is a union of variable size, pass TARGET to the inner
6861 computation, since it will need a temporary and TARGET is known
6862 to have to do. This occurs in unchecked conversion in Ada. */
6866 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
6867 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
6869 ? target
: NULL_RTX
),
6871 (modifier
== EXPAND_INITIALIZER
6872 || modifier
== EXPAND_CONST_ADDRESS
)
6873 ? modifier
: EXPAND_NORMAL
);
6875 /* If this is a constant, put it into a register if it is a
6876 legitimate constant and OFFSET is 0 and memory if it isn't. */
6877 if (CONSTANT_P (op0
))
6879 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
6880 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
6882 op0
= force_reg (mode
, op0
);
6884 op0
= validize_mem (force_const_mem (mode
, op0
));
6889 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
6891 /* If this object is in a register, put it into memory.
6892 This case can't occur in C, but can in Ada if we have
6893 unchecked conversion of an expression from a scalar type to
6894 an array or record type. */
6895 if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
6896 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
6898 /* If the operand is a SAVE_EXPR, we can deal with this by
6899 forcing the SAVE_EXPR into memory. */
6900 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
6902 put_var_into_stack (TREE_OPERAND (exp
, 0));
6903 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
6908 = build_qualified_type (TREE_TYPE (tem
),
6909 (TYPE_QUALS (TREE_TYPE (tem
))
6910 | TYPE_QUAL_CONST
));
6911 rtx memloc
= assign_temp (nt
, 1, 1, 1);
6913 emit_move_insn (memloc
, op0
);
6918 if (GET_CODE (op0
) != MEM
)
6921 #ifdef POINTERS_EXTEND_UNSIGNED
6922 if (GET_MODE (offset_rtx
) != Pmode
)
6923 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
6925 if (GET_MODE (offset_rtx
) != ptr_mode
)
6926 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
6929 /* A constant address in OP0 can have VOIDmode, we must not try
6930 to call force_reg for that case. Avoid that case. */
6931 if (GET_CODE (op0
) == MEM
6932 && GET_MODE (op0
) == BLKmode
6933 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
6935 && (bitpos
% bitsize
) == 0
6936 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
6937 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
6939 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
6943 op0
= offset_address (op0
, offset_rtx
,
6944 highest_pow2_factor (offset
));
6947 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
6948 record its alignment as BIGGEST_ALIGNMENT. */
6949 if (GET_CODE (op0
) == MEM
&& bitpos
== 0 && offset
!= 0
6950 && is_aligning_offset (offset
, tem
))
6951 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
6953 /* Don't forget about volatility even if this is a bitfield. */
6954 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
6956 if (op0
== orig_op0
)
6957 op0
= copy_rtx (op0
);
6959 MEM_VOLATILE_P (op0
) = 1;
6962 /* The following code doesn't handle CONCAT.
6963 Assume only bitpos == 0 can be used for CONCAT, due to
6964 one element arrays having the same mode as its element. */
6965 if (GET_CODE (op0
) == CONCAT
)
6967 if (bitpos
!= 0 || bitsize
!= GET_MODE_BITSIZE (GET_MODE (op0
)))
6972 /* In cases where an aligned union has an unaligned object
6973 as a field, we might be extracting a BLKmode value from
6974 an integer-mode (e.g., SImode) object. Handle this case
6975 by doing the extract into an object as wide as the field
6976 (which we know to be the width of a basic mode), then
6977 storing into memory, and changing the mode to BLKmode. */
6978 if (mode1
== VOIDmode
6979 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
6980 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
6981 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
6982 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
6983 && modifier
!= EXPAND_CONST_ADDRESS
6984 && modifier
!= EXPAND_INITIALIZER
)
6985 /* If the field isn't aligned enough to fetch as a memref,
6986 fetch it as a bit field. */
6987 || (mode1
!= BLKmode
6988 && SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))
6989 && ((TYPE_ALIGN (TREE_TYPE (tem
))
6990 < GET_MODE_ALIGNMENT (mode
))
6991 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)))
6992 /* If the type and the field are a constant size and the
6993 size of the type isn't the same size as the bitfield,
6994 we must use bitfield operations. */
6996 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
)))
6998 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7001 enum machine_mode ext_mode
= mode
;
7003 if (ext_mode
== BLKmode
7004 && ! (target
!= 0 && GET_CODE (op0
) == MEM
7005 && GET_CODE (target
) == MEM
7006 && bitpos
% BITS_PER_UNIT
== 0))
7007 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7009 if (ext_mode
== BLKmode
)
7011 /* In this case, BITPOS must start at a byte boundary and
7012 TARGET, if specified, must be a MEM. */
7013 if (GET_CODE (op0
) != MEM
7014 || (target
!= 0 && GET_CODE (target
) != MEM
)
7015 || bitpos
% BITS_PER_UNIT
!= 0)
7018 op0
= adjust_address (op0
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
7020 target
= assign_temp (type
, 0, 1, 1);
7022 emit_block_move (target
, op0
,
7023 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7029 op0
= validize_mem (op0
);
7031 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
7032 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7034 op0
= extract_bit_field (op0
, bitsize
, bitpos
,
7035 unsignedp
, target
, ext_mode
, ext_mode
,
7036 int_size_in_bytes (TREE_TYPE (tem
)));
7038 /* If the result is a record type and BITSIZE is narrower than
7039 the mode of OP0, an integral mode, and this is a big endian
7040 machine, we must put the field into the high-order bits. */
7041 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7042 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7043 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
7044 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7045 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7049 if (mode
== BLKmode
)
7051 rtx
new = assign_temp (build_qualified_type
7052 ((*lang_hooks
.types
.type_for_mode
)
7054 TYPE_QUAL_CONST
), 0, 1, 1);
7056 emit_move_insn (new, op0
);
7057 op0
= copy_rtx (new);
7058 PUT_MODE (op0
, BLKmode
);
7059 set_mem_attributes (op0
, exp
, 1);
7065 /* If the result is BLKmode, use that to access the object
7067 if (mode
== BLKmode
)
7070 /* Get a reference to just this component. */
7071 if (modifier
== EXPAND_CONST_ADDRESS
7072 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7073 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7075 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7077 if (op0
== orig_op0
)
7078 op0
= copy_rtx (op0
);
7080 set_mem_attributes (op0
, exp
, 0);
7081 if (GET_CODE (XEXP (op0
, 0)) == REG
)
7082 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7084 MEM_VOLATILE_P (op0
) |= volatilep
;
7085 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7086 || modifier
== EXPAND_CONST_ADDRESS
7087 || modifier
== EXPAND_INITIALIZER
)
7089 else if (target
== 0)
7090 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7092 convert_move (target
, op0
, unsignedp
);
7098 rtx insn
, before
= get_last_insn (), vtbl_ref
;
7100 /* Evaluate the interior expression. */
7101 subtarget
= expand_expr (TREE_OPERAND (exp
, 0), target
,
7104 /* Get or create an instruction off which to hang a note. */
7105 if (REG_P (subtarget
))
7108 insn
= get_last_insn ();
7111 if (! INSN_P (insn
))
7112 insn
= prev_nonnote_insn (insn
);
7116 target
= gen_reg_rtx (GET_MODE (subtarget
));
7117 insn
= emit_move_insn (target
, subtarget
);
7120 /* Collect the data for the note. */
7121 vtbl_ref
= XEXP (DECL_RTL (TREE_OPERAND (exp
, 1)), 0);
7122 vtbl_ref
= plus_constant (vtbl_ref
,
7123 tree_low_cst (TREE_OPERAND (exp
, 2), 0));
7124 /* Discard the initial CONST that was added. */
7125 vtbl_ref
= XEXP (vtbl_ref
, 0);
7128 = gen_rtx_EXPR_LIST (REG_VTABLE_REF
, vtbl_ref
, REG_NOTES (insn
));
7133 /* Intended for a reference to a buffer of a file-object in Pascal.
7134 But it's not certain that a special tree code will really be
7135 necessary for these. INDIRECT_REF might work for them. */
7141 /* Pascal set IN expression.
7144 rlo = set_low - (set_low%bits_per_word);
7145 the_word = set [ (index - rlo)/bits_per_word ];
7146 bit_index = index % bits_per_word;
7147 bitmask = 1 << bit_index;
7148 return !!(the_word & bitmask); */
7150 tree set
= TREE_OPERAND (exp
, 0);
7151 tree index
= TREE_OPERAND (exp
, 1);
7152 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
7153 tree set_type
= TREE_TYPE (set
);
7154 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
7155 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
7156 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
7157 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
7158 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
7159 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
7160 rtx setaddr
= XEXP (setval
, 0);
7161 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
7163 rtx diff
, quo
, rem
, addr
, bit
, result
;
7165 /* If domain is empty, answer is no. Likewise if index is constant
7166 and out of bounds. */
7167 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
7168 && TREE_CODE (set_low_bound
) == INTEGER_CST
7169 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
7170 || (TREE_CODE (index
) == INTEGER_CST
7171 && TREE_CODE (set_low_bound
) == INTEGER_CST
7172 && tree_int_cst_lt (index
, set_low_bound
))
7173 || (TREE_CODE (set_high_bound
) == INTEGER_CST
7174 && TREE_CODE (index
) == INTEGER_CST
7175 && tree_int_cst_lt (set_high_bound
, index
))))
7179 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7181 /* If we get here, we have to generate the code for both cases
7182 (in range and out of range). */
7184 op0
= gen_label_rtx ();
7185 op1
= gen_label_rtx ();
7187 if (! (GET_CODE (index_val
) == CONST_INT
7188 && GET_CODE (lo_r
) == CONST_INT
))
7189 emit_cmp_and_jump_insns (index_val
, lo_r
, LT
, NULL_RTX
,
7190 GET_MODE (index_val
), iunsignedp
, op1
);
7192 if (! (GET_CODE (index_val
) == CONST_INT
7193 && GET_CODE (hi_r
) == CONST_INT
))
7194 emit_cmp_and_jump_insns (index_val
, hi_r
, GT
, NULL_RTX
,
7195 GET_MODE (index_val
), iunsignedp
, op1
);
7197 /* Calculate the element number of bit zero in the first word
7199 if (GET_CODE (lo_r
) == CONST_INT
)
7200 rlow
= GEN_INT (INTVAL (lo_r
)
7201 & ~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
7203 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
7204 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
7205 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7207 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
7208 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7210 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
7211 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7212 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
7213 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7215 addr
= memory_address (byte_mode
,
7216 expand_binop (index_mode
, add_optab
, diff
,
7217 setaddr
, NULL_RTX
, iunsignedp
,
7220 /* Extract the bit we want to examine. */
7221 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
7222 gen_rtx_MEM (byte_mode
, addr
),
7223 make_tree (TREE_TYPE (index
), rem
),
7225 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
7226 GET_MODE (target
) == byte_mode
? target
: 0,
7227 1, OPTAB_LIB_WIDEN
);
7229 if (result
!= target
)
7230 convert_move (target
, result
, 1);
7232 /* Output the code to handle the out-of-range case. */
7235 emit_move_insn (target
, const0_rtx
);
7240 case WITH_CLEANUP_EXPR
:
7241 if (WITH_CLEANUP_EXPR_RTL (exp
) == 0)
7243 WITH_CLEANUP_EXPR_RTL (exp
)
7244 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7245 expand_decl_cleanup_eh (NULL_TREE
, TREE_OPERAND (exp
, 1),
7246 CLEANUP_EH_ONLY (exp
));
7248 /* That's it for this cleanup. */
7249 TREE_OPERAND (exp
, 1) = 0;
7251 return WITH_CLEANUP_EXPR_RTL (exp
);
7253 case CLEANUP_POINT_EXPR
:
7255 /* Start a new binding layer that will keep track of all cleanup
7256 actions to be performed. */
7257 expand_start_bindings (2);
7259 target_temp_slot_level
= temp_slot_level
;
7261 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7262 /* If we're going to use this value, load it up now. */
7264 op0
= force_not_mem (op0
);
7265 preserve_temp_slots (op0
);
7266 expand_end_bindings (NULL_TREE
, 0, 0);
7271 /* Check for a built-in function. */
7272 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7273 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7275 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7277 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7278 == BUILT_IN_FRONTEND
)
7279 return (*lang_hooks
.expand_expr
)
7280 (exp
, original_target
, tmode
, modifier
);
7282 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7285 return expand_call (exp
, target
, ignore
);
7287 case NON_LVALUE_EXPR
:
7290 case REFERENCE_EXPR
:
7291 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7294 if (TREE_CODE (type
) == UNION_TYPE
)
7296 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7298 /* If both input and output are BLKmode, this conversion isn't doing
7299 anything except possibly changing memory attribute. */
7300 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7302 rtx result
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7305 result
= copy_rtx (result
);
7306 set_mem_attributes (result
, exp
, 0);
7311 target
= assign_temp (type
, 0, 1, 1);
7313 if (GET_CODE (target
) == MEM
)
7314 /* Store data into beginning of memory target. */
7315 store_expr (TREE_OPERAND (exp
, 0),
7316 adjust_address (target
, TYPE_MODE (valtype
), 0), 0);
7318 else if (GET_CODE (target
) == REG
)
7319 /* Store this field into a union of the proper type. */
7320 store_field (target
,
7321 MIN ((int_size_in_bytes (TREE_TYPE
7322 (TREE_OPERAND (exp
, 0)))
7324 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7325 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7326 VOIDmode
, 0, type
, 0);
7330 /* Return the entire union. */
7334 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7336 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7339 /* If the signedness of the conversion differs and OP0 is
7340 a promoted SUBREG, clear that indication since we now
7341 have to do the proper extension. */
7342 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7343 && GET_CODE (op0
) == SUBREG
)
7344 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7349 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7350 if (GET_MODE (op0
) == mode
)
7353 /* If OP0 is a constant, just convert it into the proper mode. */
7354 if (CONSTANT_P (op0
))
7356 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7357 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
7359 if (modifier
== EXPAND_INITIALIZER
)
7360 return simplify_gen_subreg (mode
, op0
, inner_mode
,
7361 subreg_lowpart_offset (mode
,
7364 return convert_modes (mode
, inner_mode
, op0
,
7365 TREE_UNSIGNED (inner_type
));
7368 if (modifier
== EXPAND_INITIALIZER
)
7369 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7373 convert_to_mode (mode
, op0
,
7374 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7376 convert_move (target
, op0
,
7377 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7380 case VIEW_CONVERT_EXPR
:
7381 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7383 /* If the input and output modes are both the same, we are done.
7384 Otherwise, if neither mode is BLKmode and both are within a word, we
7385 can use gen_lowpart. If neither is true, make sure the operand is
7386 in memory and convert the MEM to the new mode. */
7387 if (TYPE_MODE (type
) == GET_MODE (op0
))
7389 else if (TYPE_MODE (type
) != BLKmode
&& GET_MODE (op0
) != BLKmode
7390 && GET_MODE_SIZE (TYPE_MODE (type
)) <= UNITS_PER_WORD
7391 && GET_MODE_SIZE (GET_MODE (op0
)) <= UNITS_PER_WORD
)
7392 op0
= gen_lowpart (TYPE_MODE (type
), op0
);
7393 else if (GET_CODE (op0
) != MEM
)
7395 /* If the operand is not a MEM, force it into memory. Since we
7396 are going to be be changing the mode of the MEM, don't call
7397 force_const_mem for constants because we don't allow pool
7398 constants to change mode. */
7399 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7401 if (TREE_ADDRESSABLE (exp
))
7404 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
7406 = assign_stack_temp_for_type
7407 (TYPE_MODE (inner_type
),
7408 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
7410 emit_move_insn (target
, op0
);
7414 /* At this point, OP0 is in the correct mode. If the output type is such
7415 that the operand is known to be aligned, indicate that it is.
7416 Otherwise, we need only be concerned about alignment for non-BLKmode
7418 if (GET_CODE (op0
) == MEM
)
7420 op0
= copy_rtx (op0
);
7422 if (TYPE_ALIGN_OK (type
))
7423 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
7424 else if (TYPE_MODE (type
) != BLKmode
&& STRICT_ALIGNMENT
7425 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
7427 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7428 HOST_WIDE_INT temp_size
7429 = MAX (int_size_in_bytes (inner_type
),
7430 (HOST_WIDE_INT
) GET_MODE_SIZE (TYPE_MODE (type
)));
7431 rtx
new = assign_stack_temp_for_type (TYPE_MODE (type
),
7432 temp_size
, 0, type
);
7433 rtx new_with_op0_mode
= adjust_address (new, GET_MODE (op0
), 0);
7435 if (TREE_ADDRESSABLE (exp
))
7438 if (GET_MODE (op0
) == BLKmode
)
7439 emit_block_move (new_with_op0_mode
, op0
,
7440 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type
))));
7442 emit_move_insn (new_with_op0_mode
, op0
);
7447 op0
= adjust_address (op0
, TYPE_MODE (type
), 0);
7453 /* We come here from MINUS_EXPR when the second operand is a
7456 this_optab
= ! unsignedp
&& flag_trapv
7457 && (GET_MODE_CLASS (mode
) == MODE_INT
)
7458 ? addv_optab
: add_optab
;
7460 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7461 something else, make sure we add the register to the constant and
7462 then to the other thing. This case can occur during strength
7463 reduction and doing it this way will produce better code if the
7464 frame pointer or argument pointer is eliminated.
7466 fold-const.c will ensure that the constant is always in the inner
7467 PLUS_EXPR, so the only case we need to do anything about is if
7468 sp, ap, or fp is our second argument, in which case we must swap
7469 the innermost first argument and our second argument. */
7471 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7472 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7473 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
7474 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7475 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7476 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7478 tree t
= TREE_OPERAND (exp
, 1);
7480 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7481 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
7484 /* If the result is to be ptr_mode and we are adding an integer to
7485 something, we might be forming a constant. So try to use
7486 plus_constant. If it produces a sum and we can't accept it,
7487 use force_operand. This allows P = &ARR[const] to generate
7488 efficient code on machines where a SYMBOL_REF is not a valid
7491 If this is an EXPAND_SUM call, always return the sum. */
7492 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7493 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
7495 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
7496 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7497 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
7501 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
7503 /* Use immed_double_const to ensure that the constant is
7504 truncated according to the mode of OP1, then sign extended
7505 to a HOST_WIDE_INT. Using the constant directly can result
7506 in non-canonical RTL in a 64x32 cross compile. */
7508 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
7510 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
7511 op1
= plus_constant (op1
, INTVAL (constant_part
));
7512 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7513 op1
= force_operand (op1
, target
);
7517 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7518 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
7519 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
7523 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7524 (modifier
== EXPAND_INITIALIZER
7525 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
7526 if (! CONSTANT_P (op0
))
7528 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7529 VOIDmode
, modifier
);
7530 /* Don't go to both_summands if modifier
7531 says it's not right to return a PLUS. */
7532 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7536 /* Use immed_double_const to ensure that the constant is
7537 truncated according to the mode of OP1, then sign extended
7538 to a HOST_WIDE_INT. Using the constant directly can result
7539 in non-canonical RTL in a 64x32 cross compile. */
7541 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
7543 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7544 op0
= plus_constant (op0
, INTVAL (constant_part
));
7545 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7546 op0
= force_operand (op0
, target
);
7551 /* No sense saving up arithmetic to be done
7552 if it's all in the wrong mode to form part of an address.
7553 And force_operand won't know whether to sign-extend or
7555 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7556 || mode
!= ptr_mode
)
7559 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7562 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, modifier
);
7563 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, modifier
);
7566 /* Make sure any term that's a sum with a constant comes last. */
7567 if (GET_CODE (op0
) == PLUS
7568 && CONSTANT_P (XEXP (op0
, 1)))
7574 /* If adding to a sum including a constant,
7575 associate it to put the constant outside. */
7576 if (GET_CODE (op1
) == PLUS
7577 && CONSTANT_P (XEXP (op1
, 1)))
7579 rtx constant_term
= const0_rtx
;
7581 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
7584 /* Ensure that MULT comes first if there is one. */
7585 else if (GET_CODE (op0
) == MULT
)
7586 op0
= gen_rtx_PLUS (mode
, op0
, XEXP (op1
, 0));
7588 op0
= gen_rtx_PLUS (mode
, XEXP (op1
, 0), op0
);
7590 /* Let's also eliminate constants from op0 if possible. */
7591 op0
= eliminate_constant_term (op0
, &constant_term
);
7593 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7594 their sum should be a constant. Form it into OP1, since the
7595 result we want will then be OP0 + OP1. */
7597 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
7602 op1
= gen_rtx_PLUS (mode
, constant_term
, XEXP (op1
, 1));
7605 /* Put a constant term last and put a multiplication first. */
7606 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
7607 temp
= op1
, op1
= op0
, op0
= temp
;
7609 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
7610 return temp
? temp
: gen_rtx_PLUS (mode
, op0
, op1
);
7613 /* For initializers, we are allowed to return a MINUS of two
7614 symbolic constants. Here we handle all cases when both operands
7616 /* Handle difference of two symbolic constants,
7617 for the sake of an initializer. */
7618 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7619 && really_constant_p (TREE_OPERAND (exp
, 0))
7620 && really_constant_p (TREE_OPERAND (exp
, 1)))
7622 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
,
7624 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
,
7627 /* If the last operand is a CONST_INT, use plus_constant of
7628 the negated constant. Else make the MINUS. */
7629 if (GET_CODE (op1
) == CONST_INT
)
7630 return plus_constant (op0
, - INTVAL (op1
));
7632 return gen_rtx_MINUS (mode
, op0
, op1
);
7634 /* Convert A - const to A + (-const). */
7635 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7637 tree negated
= fold (build1 (NEGATE_EXPR
, type
,
7638 TREE_OPERAND (exp
, 1)));
7640 if (TREE_UNSIGNED (type
) || TREE_OVERFLOW (negated
))
7641 /* If we can't negate the constant in TYPE, leave it alone and
7642 expand_binop will negate it for us. We used to try to do it
7643 here in the signed version of TYPE, but that doesn't work
7644 on POINTER_TYPEs. */;
7647 exp
= build (PLUS_EXPR
, type
, TREE_OPERAND (exp
, 0), negated
);
7651 this_optab
= ! unsignedp
&& flag_trapv
7652 && (GET_MODE_CLASS(mode
) == MODE_INT
)
7653 ? subv_optab
: sub_optab
;
7657 /* If first operand is constant, swap them.
7658 Thus the following special case checks need only
7659 check the second operand. */
7660 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
7662 tree t1
= TREE_OPERAND (exp
, 0);
7663 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
7664 TREE_OPERAND (exp
, 1) = t1
;
7667 /* Attempt to return something suitable for generating an
7668 indexed address, for machines that support that. */
7670 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
7671 && host_integerp (TREE_OPERAND (exp
, 1), 0))
7673 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7676 /* If we knew for certain that this is arithmetic for an array
7677 reference, and we knew the bounds of the array, then we could
7678 apply the distributive law across (PLUS X C) for constant C.
7679 Without such knowledge, we risk overflowing the computation
7680 when both X and C are large, but X+C isn't. */
7681 /* ??? Could perhaps special-case EXP being unsigned and C being
7682 positive. In that case we are certain that X+C is no smaller
7683 than X and so the transformed expression will overflow iff the
7684 original would have. */
7686 if (GET_CODE (op0
) != REG
)
7687 op0
= force_operand (op0
, NULL_RTX
);
7688 if (GET_CODE (op0
) != REG
)
7689 op0
= copy_to_mode_reg (mode
, op0
);
7692 gen_rtx_MULT (mode
, op0
,
7693 GEN_INT (tree_low_cst (TREE_OPERAND (exp
, 1), 0)));
7696 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7699 /* Check for multiplying things that have been extended
7700 from a narrower type. If this machine supports multiplying
7701 in that narrower type with a result in the desired type,
7702 do it that way, and avoid the explicit type-conversion. */
7703 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
7704 && TREE_CODE (type
) == INTEGER_TYPE
7705 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7706 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7707 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7708 && int_fits_type_p (TREE_OPERAND (exp
, 1),
7709 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7710 /* Don't use a widening multiply if a shift will do. */
7711 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
7712 > HOST_BITS_PER_WIDE_INT
)
7713 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
7715 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7716 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7718 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
7719 /* If both operands are extended, they must either both
7720 be zero-extended or both be sign-extended. */
7721 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7723 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
7725 enum machine_mode innermode
7726 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
7727 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7728 ? smul_widen_optab
: umul_widen_optab
);
7729 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7730 ? umul_widen_optab
: smul_widen_optab
);
7731 if (mode
== GET_MODE_WIDER_MODE (innermode
))
7733 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
7735 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7736 NULL_RTX
, VOIDmode
, 0);
7737 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7738 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7741 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7742 NULL_RTX
, VOIDmode
, 0);
7745 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
7746 && innermode
== word_mode
)
7749 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7750 NULL_RTX
, VOIDmode
, 0);
7751 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7752 op1
= convert_modes (innermode
, mode
,
7753 expand_expr (TREE_OPERAND (exp
, 1),
7754 NULL_RTX
, VOIDmode
, 0),
7757 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7758 NULL_RTX
, VOIDmode
, 0);
7759 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
7760 unsignedp
, OPTAB_LIB_WIDEN
);
7761 htem
= expand_mult_highpart_adjust (innermode
,
7762 gen_highpart (innermode
, temp
),
7764 gen_highpart (innermode
, temp
),
7766 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
7771 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7772 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7773 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
7775 case TRUNC_DIV_EXPR
:
7776 case FLOOR_DIV_EXPR
:
7778 case ROUND_DIV_EXPR
:
7779 case EXACT_DIV_EXPR
:
7780 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7782 /* Possible optimization: compute the dividend with EXPAND_SUM
7783 then if the divisor is constant can optimize the case
7784 where some terms of the dividend have coeffs divisible by it. */
7785 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7786 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7787 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
7790 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7791 expensive divide. If not, combine will rebuild the original
7793 if (flag_unsafe_math_optimizations
&& optimize
&& !optimize_size
7794 && TREE_CODE (type
) == REAL_TYPE
7795 && !real_onep (TREE_OPERAND (exp
, 0)))
7796 return expand_expr (build (MULT_EXPR
, type
, TREE_OPERAND (exp
, 0),
7797 build (RDIV_EXPR
, type
,
7798 build_real (type
, dconst1
),
7799 TREE_OPERAND (exp
, 1))),
7800 target
, tmode
, unsignedp
);
7801 this_optab
= sdiv_optab
;
7804 case TRUNC_MOD_EXPR
:
7805 case FLOOR_MOD_EXPR
:
7807 case ROUND_MOD_EXPR
:
7808 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7810 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7811 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7812 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
7814 case FIX_ROUND_EXPR
:
7815 case FIX_FLOOR_EXPR
:
7817 abort (); /* Not used for C. */
7819 case FIX_TRUNC_EXPR
:
7820 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7822 target
= gen_reg_rtx (mode
);
7823 expand_fix (target
, op0
, unsignedp
);
7827 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7829 target
= gen_reg_rtx (mode
);
7830 /* expand_float can't figure out what to do if FROM has VOIDmode.
7831 So give it the correct mode. With -O, cse will optimize this. */
7832 if (GET_MODE (op0
) == VOIDmode
)
7833 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7835 expand_float (target
, op0
,
7836 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7840 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7841 temp
= expand_unop (mode
,
7842 ! unsignedp
&& flag_trapv
7843 && (GET_MODE_CLASS(mode
) == MODE_INT
)
7844 ? negv_optab
: neg_optab
, op0
, target
, 0);
7850 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7852 /* Handle complex values specially. */
7853 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
7854 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
7855 return expand_complex_abs (mode
, op0
, target
, unsignedp
);
7857 /* Unsigned abs is simply the operand. Testing here means we don't
7858 risk generating incorrect code below. */
7859 if (TREE_UNSIGNED (type
))
7862 return expand_abs (mode
, op0
, target
, unsignedp
,
7863 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
7867 target
= original_target
;
7868 if (target
== 0 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1), 1)
7869 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
7870 || GET_MODE (target
) != mode
7871 || (GET_CODE (target
) == REG
7872 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
7873 target
= gen_reg_rtx (mode
);
7874 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7875 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
7877 /* First try to do it with a special MIN or MAX instruction.
7878 If that does not win, use a conditional jump to select the proper
7880 this_optab
= (TREE_UNSIGNED (type
)
7881 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
7882 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
7884 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
7889 /* At this point, a MEM target is no longer useful; we will get better
7892 if (GET_CODE (target
) == MEM
)
7893 target
= gen_reg_rtx (mode
);
7896 emit_move_insn (target
, op0
);
7898 op0
= gen_label_rtx ();
7900 /* If this mode is an integer too wide to compare properly,
7901 compare word by word. Rely on cse to optimize constant cases. */
7902 if (GET_MODE_CLASS (mode
) == MODE_INT
7903 && ! can_compare_p (GE
, mode
, ccp_jump
))
7905 if (code
== MAX_EXPR
)
7906 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
7907 target
, op1
, NULL_RTX
, op0
);
7909 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
7910 op1
, target
, NULL_RTX
, op0
);
7914 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)));
7915 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
7916 unsignedp
, mode
, NULL_RTX
, NULL_RTX
,
7919 emit_move_insn (target
, op1
);
7924 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7925 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
7931 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7932 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
7937 /* ??? Can optimize bitwise operations with one arg constant.
7938 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7939 and (a bitwise1 b) bitwise2 b (etc)
7940 but that is probably not worth while. */
7942 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7943 boolean values when we want in all cases to compute both of them. In
7944 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7945 as actual zero-or-1 values and then bitwise anding. In cases where
7946 there cannot be any side effects, better code would be made by
7947 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7948 how to recognize those cases. */
7950 case TRUTH_AND_EXPR
:
7952 this_optab
= and_optab
;
7957 this_optab
= ior_optab
;
7960 case TRUTH_XOR_EXPR
:
7962 this_optab
= xor_optab
;
7969 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7971 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7972 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
7975 /* Could determine the answer when only additive constants differ. Also,
7976 the addition of one can be handled by changing the condition. */
7983 case UNORDERED_EXPR
:
7990 temp
= do_store_flag (exp
, target
, tmode
!= VOIDmode
? tmode
: mode
, 0);
7994 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7995 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
7997 && GET_CODE (original_target
) == REG
7998 && (GET_MODE (original_target
)
7999 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
8001 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
8004 /* If temp is constant, we can just compute the result. */
8005 if (GET_CODE (temp
) == CONST_INT
)
8007 if (INTVAL (temp
) != 0)
8008 emit_move_insn (target
, const1_rtx
);
8010 emit_move_insn (target
, const0_rtx
);
8015 if (temp
!= original_target
)
8017 enum machine_mode mode1
= GET_MODE (temp
);
8018 if (mode1
== VOIDmode
)
8019 mode1
= tmode
!= VOIDmode
? tmode
: mode
;
8021 temp
= copy_to_mode_reg (mode1
, temp
);
8024 op1
= gen_label_rtx ();
8025 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
8026 GET_MODE (temp
), unsignedp
, op1
);
8027 emit_move_insn (temp
, const1_rtx
);
8032 /* If no set-flag instruction, must generate a conditional
8033 store into a temporary variable. Drop through
8034 and handle this like && and ||. */
8036 case TRUTH_ANDIF_EXPR
:
8037 case TRUTH_ORIF_EXPR
:
8039 && (target
== 0 || ! safe_from_p (target
, exp
, 1)
8040 /* Make sure we don't have a hard reg (such as function's return
8041 value) live across basic blocks, if not optimizing. */
8042 || (!optimize
&& GET_CODE (target
) == REG
8043 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8044 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8047 emit_clr_insn (target
);
8049 op1
= gen_label_rtx ();
8050 jumpifnot (exp
, op1
);
8053 emit_0_to_1_insn (target
);
8056 return ignore
? const0_rtx
: target
;
8058 case TRUTH_NOT_EXPR
:
8059 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8060 /* The parser is careful to generate TRUTH_NOT_EXPR
8061 only with operands that are always zero or one. */
8062 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8063 target
, 1, OPTAB_LIB_WIDEN
);
8069 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
8071 return expand_expr (TREE_OPERAND (exp
, 1),
8072 (ignore
? const0_rtx
: target
),
8076 /* If we would have a "singleton" (see below) were it not for a
8077 conversion in each arm, bring that conversion back out. */
8078 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8079 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
8080 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
8081 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
8083 tree iftrue
= TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
8084 tree iffalse
= TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
8086 if ((TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '2'
8087 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8088 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '2'
8089 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0))
8090 || (TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '1'
8091 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8092 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '1'
8093 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0)))
8094 return expand_expr (build1 (NOP_EXPR
, type
,
8095 build (COND_EXPR
, TREE_TYPE (iftrue
),
8096 TREE_OPERAND (exp
, 0),
8098 target
, tmode
, modifier
);
8102 /* Note that COND_EXPRs whose type is a structure or union
8103 are required to be constructed to contain assignments of
8104 a temporary variable, so that we can evaluate them here
8105 for side effect only. If type is void, we must do likewise. */
8107 /* If an arm of the branch requires a cleanup,
8108 only that cleanup is performed. */
8111 tree binary_op
= 0, unary_op
= 0;
8113 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8114 convert it to our mode, if necessary. */
8115 if (integer_onep (TREE_OPERAND (exp
, 1))
8116 && integer_zerop (TREE_OPERAND (exp
, 2))
8117 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8121 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
8126 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, modifier
);
8127 if (GET_MODE (op0
) == mode
)
8131 target
= gen_reg_rtx (mode
);
8132 convert_move (target
, op0
, unsignedp
);
8136 /* Check for X ? A + B : A. If we have this, we can copy A to the
8137 output and conditionally add B. Similarly for unary operations.
8138 Don't do this if X has side-effects because those side effects
8139 might affect A or B and the "?" operation is a sequence point in
8140 ANSI. (operand_equal_p tests for side effects.) */
8142 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
8143 && operand_equal_p (TREE_OPERAND (exp
, 2),
8144 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8145 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
8146 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
8147 && operand_equal_p (TREE_OPERAND (exp
, 1),
8148 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8149 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
8150 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
8151 && operand_equal_p (TREE_OPERAND (exp
, 2),
8152 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8153 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
8154 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
8155 && operand_equal_p (TREE_OPERAND (exp
, 1),
8156 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8157 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
8159 /* If we are not to produce a result, we have no target. Otherwise,
8160 if a target was specified use it; it will not be used as an
8161 intermediate target unless it is safe. If no target, use a
8166 else if (original_target
8167 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
8168 || (singleton
&& GET_CODE (original_target
) == REG
8169 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
8170 && original_target
== var_rtx (singleton
)))
8171 && GET_MODE (original_target
) == mode
8172 #ifdef HAVE_conditional_move
8173 && (! can_conditionally_move_p (mode
)
8174 || GET_CODE (original_target
) == REG
8175 || TREE_ADDRESSABLE (type
))
8177 && (GET_CODE (original_target
) != MEM
8178 || TREE_ADDRESSABLE (type
)))
8179 temp
= original_target
;
8180 else if (TREE_ADDRESSABLE (type
))
8183 temp
= assign_temp (type
, 0, 0, 1);
8185 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8186 do the test of X as a store-flag operation, do this as
8187 A + ((X != 0) << log C). Similarly for other simple binary
8188 operators. Only do for C == 1 if BRANCH_COST is low. */
8189 if (temp
&& singleton
&& binary_op
8190 && (TREE_CODE (binary_op
) == PLUS_EXPR
8191 || TREE_CODE (binary_op
) == MINUS_EXPR
8192 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
8193 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
8194 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
8195 : integer_onep (TREE_OPERAND (binary_op
, 1)))
8196 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8199 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
8200 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8201 ? addv_optab
: add_optab
)
8202 : TREE_CODE (binary_op
) == MINUS_EXPR
8203 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8204 ? subv_optab
: sub_optab
)
8205 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
8208 /* If we had X ? A : A + 1, do this as A + (X == 0).
8210 We have to invert the truth value here and then put it
8211 back later if do_store_flag fails. We cannot simply copy
8212 TREE_OPERAND (exp, 0) to another variable and modify that
8213 because invert_truthvalue can modify the tree pointed to
8215 if (singleton
== TREE_OPERAND (exp
, 1))
8216 TREE_OPERAND (exp
, 0)
8217 = invert_truthvalue (TREE_OPERAND (exp
, 0));
8219 result
= do_store_flag (TREE_OPERAND (exp
, 0),
8220 (safe_from_p (temp
, singleton
, 1)
8222 mode
, BRANCH_COST
<= 1);
8224 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
8225 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
8226 build_int_2 (tree_log2
8230 (safe_from_p (temp
, singleton
, 1)
8231 ? temp
: NULL_RTX
), 0);
8235 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
8236 return expand_binop (mode
, boptab
, op1
, result
, temp
,
8237 unsignedp
, OPTAB_LIB_WIDEN
);
8239 else if (singleton
== TREE_OPERAND (exp
, 1))
8240 TREE_OPERAND (exp
, 0)
8241 = invert_truthvalue (TREE_OPERAND (exp
, 0));
8244 do_pending_stack_adjust ();
8246 op0
= gen_label_rtx ();
8248 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
8252 /* If the target conflicts with the other operand of the
8253 binary op, we can't use it. Also, we can't use the target
8254 if it is a hard register, because evaluating the condition
8255 might clobber it. */
8257 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
8258 || (GET_CODE (temp
) == REG
8259 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
8260 temp
= gen_reg_rtx (mode
);
8261 store_expr (singleton
, temp
, 0);
8264 expand_expr (singleton
,
8265 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8266 if (singleton
== TREE_OPERAND (exp
, 1))
8267 jumpif (TREE_OPERAND (exp
, 0), op0
);
8269 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8271 start_cleanup_deferral ();
8272 if (binary_op
&& temp
== 0)
8273 /* Just touch the other operand. */
8274 expand_expr (TREE_OPERAND (binary_op
, 1),
8275 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8277 store_expr (build (TREE_CODE (binary_op
), type
,
8278 make_tree (type
, temp
),
8279 TREE_OPERAND (binary_op
, 1)),
8282 store_expr (build1 (TREE_CODE (unary_op
), type
,
8283 make_tree (type
, temp
)),
8287 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8288 comparison operator. If we have one of these cases, set the
8289 output to A, branch on A (cse will merge these two references),
8290 then set the output to FOO. */
8292 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8293 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8294 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8295 TREE_OPERAND (exp
, 1), 0)
8296 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8297 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
8298 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
8300 if (GET_CODE (temp
) == REG
8301 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8302 temp
= gen_reg_rtx (mode
);
8303 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8304 jumpif (TREE_OPERAND (exp
, 0), op0
);
8306 start_cleanup_deferral ();
8307 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8311 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8312 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8313 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8314 TREE_OPERAND (exp
, 2), 0)
8315 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8316 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
8317 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
8319 if (GET_CODE (temp
) == REG
8320 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8321 temp
= gen_reg_rtx (mode
);
8322 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8323 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8325 start_cleanup_deferral ();
8326 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8331 op1
= gen_label_rtx ();
8332 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8334 start_cleanup_deferral ();
8336 /* One branch of the cond can be void, if it never returns. For
8337 example A ? throw : E */
8339 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8340 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8342 expand_expr (TREE_OPERAND (exp
, 1),
8343 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8344 end_cleanup_deferral ();
8346 emit_jump_insn (gen_jump (op1
));
8349 start_cleanup_deferral ();
8351 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8352 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8354 expand_expr (TREE_OPERAND (exp
, 2),
8355 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8358 end_cleanup_deferral ();
8369 /* Something needs to be initialized, but we didn't know
8370 where that thing was when building the tree. For example,
8371 it could be the return value of a function, or a parameter
8372 to a function which lays down in the stack, or a temporary
8373 variable which must be passed by reference.
8375 We guarantee that the expression will either be constructed
8376 or copied into our original target. */
8378 tree slot
= TREE_OPERAND (exp
, 0);
8379 tree cleanups
= NULL_TREE
;
8382 if (TREE_CODE (slot
) != VAR_DECL
)
8386 target
= original_target
;
8388 /* Set this here so that if we get a target that refers to a
8389 register variable that's already been used, put_reg_into_stack
8390 knows that it should fix up those uses. */
8391 TREE_USED (slot
) = 1;
8395 if (DECL_RTL_SET_P (slot
))
8397 target
= DECL_RTL (slot
);
8398 /* If we have already expanded the slot, so don't do
8400 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8405 target
= assign_temp (type
, 2, 0, 1);
8406 /* All temp slots at this level must not conflict. */
8407 preserve_temp_slots (target
);
8408 SET_DECL_RTL (slot
, target
);
8409 if (TREE_ADDRESSABLE (slot
))
8410 put_var_into_stack (slot
);
8412 /* Since SLOT is not known to the called function
8413 to belong to its stack frame, we must build an explicit
8414 cleanup. This case occurs when we must build up a reference
8415 to pass the reference as an argument. In this case,
8416 it is very likely that such a reference need not be
8419 if (TREE_OPERAND (exp
, 2) == 0)
8420 TREE_OPERAND (exp
, 2)
8421 = (*lang_hooks
.maybe_build_cleanup
) (slot
);
8422 cleanups
= TREE_OPERAND (exp
, 2);
8427 /* This case does occur, when expanding a parameter which
8428 needs to be constructed on the stack. The target
8429 is the actual stack address that we want to initialize.
8430 The function we call will perform the cleanup in this case. */
8432 /* If we have already assigned it space, use that space,
8433 not target that we were passed in, as our target
8434 parameter is only a hint. */
8435 if (DECL_RTL_SET_P (slot
))
8437 target
= DECL_RTL (slot
);
8438 /* If we have already expanded the slot, so don't do
8440 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8445 SET_DECL_RTL (slot
, target
);
8446 /* If we must have an addressable slot, then make sure that
8447 the RTL that we just stored in slot is OK. */
8448 if (TREE_ADDRESSABLE (slot
))
8449 put_var_into_stack (slot
);
8453 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
8454 /* Mark it as expanded. */
8455 TREE_OPERAND (exp
, 1) = NULL_TREE
;
8457 store_expr (exp1
, target
, 0);
8459 expand_decl_cleanup_eh (NULL_TREE
, cleanups
, CLEANUP_EH_ONLY (exp
));
8466 tree lhs
= TREE_OPERAND (exp
, 0);
8467 tree rhs
= TREE_OPERAND (exp
, 1);
8469 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8475 /* If lhs is complex, expand calls in rhs before computing it.
8476 That's so we don't compute a pointer and save it over a
8477 call. If lhs is simple, compute it first so we can give it
8478 as a target if the rhs is just a call. This avoids an
8479 extra temp and copy and that prevents a partial-subsumption
8480 which makes bad code. Actually we could treat
8481 component_ref's of vars like vars. */
8483 tree lhs
= TREE_OPERAND (exp
, 0);
8484 tree rhs
= TREE_OPERAND (exp
, 1);
8488 /* Check for |= or &= of a bitfield of size one into another bitfield
8489 of size 1. In this case, (unless we need the result of the
8490 assignment) we can do this more efficiently with a
8491 test followed by an assignment, if necessary.
8493 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8494 things change so we do, this code should be enhanced to
8497 && TREE_CODE (lhs
) == COMPONENT_REF
8498 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
8499 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
8500 && TREE_OPERAND (rhs
, 0) == lhs
8501 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
8502 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
8503 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
8505 rtx label
= gen_label_rtx ();
8507 do_jump (TREE_OPERAND (rhs
, 1),
8508 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
8509 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
8510 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
8511 (TREE_CODE (rhs
) == BIT_IOR_EXPR
8513 : integer_zero_node
)),
8515 do_pending_stack_adjust ();
8520 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8526 if (!TREE_OPERAND (exp
, 0))
8527 expand_null_return ();
8529 expand_return (TREE_OPERAND (exp
, 0));
8532 case PREINCREMENT_EXPR
:
8533 case PREDECREMENT_EXPR
:
8534 return expand_increment (exp
, 0, ignore
);
8536 case POSTINCREMENT_EXPR
:
8537 case POSTDECREMENT_EXPR
:
8538 /* Faster to treat as pre-increment if result is not used. */
8539 return expand_increment (exp
, ! ignore
, ignore
);
8542 /* Are we taking the address of a nested function? */
8543 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
8544 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
8545 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0))
8546 && ! TREE_STATIC (exp
))
8548 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
8549 op0
= force_operand (op0
, target
);
8551 /* If we are taking the address of something erroneous, just
8553 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
8555 /* If we are taking the address of a constant and are at the
8556 top level, we have to use output_constant_def since we can't
8557 call force_const_mem at top level. */
8559 && (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
8560 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0)))
8562 op0
= XEXP (output_constant_def (TREE_OPERAND (exp
, 0), 0), 0);
8565 /* We make sure to pass const0_rtx down if we came in with
8566 ignore set, to avoid doing the cleanups twice for something. */
8567 op0
= expand_expr (TREE_OPERAND (exp
, 0),
8568 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
8569 (modifier
== EXPAND_INITIALIZER
8570 ? modifier
: EXPAND_CONST_ADDRESS
));
8572 /* If we are going to ignore the result, OP0 will have been set
8573 to const0_rtx, so just return it. Don't get confused and
8574 think we are taking the address of the constant. */
8578 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8579 clever and returns a REG when given a MEM. */
8580 op0
= protect_from_queue (op0
, 1);
8582 /* We would like the object in memory. If it is a constant, we can
8583 have it be statically allocated into memory. For a non-constant,
8584 we need to allocate some memory and store the value into it. */
8586 if (CONSTANT_P (op0
))
8587 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8589 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
8590 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
8591 || GET_CODE (op0
) == PARALLEL
)
8593 /* If the operand is a SAVE_EXPR, we can deal with this by
8594 forcing the SAVE_EXPR into memory. */
8595 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
8597 put_var_into_stack (TREE_OPERAND (exp
, 0));
8598 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
8602 /* If this object is in a register, it can't be BLKmode. */
8603 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8604 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
8606 if (GET_CODE (op0
) == PARALLEL
)
8607 /* Handle calls that pass values in multiple
8608 non-contiguous locations. The Irix 6 ABI has examples
8610 emit_group_store (memloc
, op0
,
8611 int_size_in_bytes (inner_type
));
8613 emit_move_insn (memloc
, op0
);
8619 if (GET_CODE (op0
) != MEM
)
8622 mark_temp_addr_taken (op0
);
8623 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8625 op0
= XEXP (op0
, 0);
8626 #ifdef POINTERS_EXTEND_UNSIGNED
8627 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
8628 && mode
== ptr_mode
)
8629 op0
= convert_memory_address (ptr_mode
, op0
);
8634 /* If OP0 is not aligned as least as much as the type requires, we
8635 need to make a temporary, copy OP0 to it, and take the address of
8636 the temporary. We want to use the alignment of the type, not of
8637 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8638 the test for BLKmode means that can't happen. The test for
8639 BLKmode is because we never make mis-aligned MEMs with
8642 We don't need to do this at all if the machine doesn't have
8643 strict alignment. */
8644 if (STRICT_ALIGNMENT
&& GET_MODE (op0
) == BLKmode
8645 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
8647 && MEM_ALIGN (op0
) < BIGGEST_ALIGNMENT
)
8649 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8651 = assign_stack_temp_for_type
8652 (TYPE_MODE (inner_type
),
8653 MEM_SIZE (op0
) ? INTVAL (MEM_SIZE (op0
))
8654 : int_size_in_bytes (inner_type
),
8655 1, build_qualified_type (inner_type
,
8656 (TYPE_QUALS (inner_type
)
8657 | TYPE_QUAL_CONST
)));
8659 if (TYPE_ALIGN_OK (inner_type
))
8662 emit_block_move (new, op0
, expr_size (TREE_OPERAND (exp
, 0)));
8666 op0
= force_operand (XEXP (op0
, 0), target
);
8670 && GET_CODE (op0
) != REG
8671 && modifier
!= EXPAND_CONST_ADDRESS
8672 && modifier
!= EXPAND_INITIALIZER
8673 && modifier
!= EXPAND_SUM
)
8674 op0
= force_reg (Pmode
, op0
);
8676 if (GET_CODE (op0
) == REG
8677 && ! REG_USERVAR_P (op0
))
8678 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)));
8680 #ifdef POINTERS_EXTEND_UNSIGNED
8681 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
8682 && mode
== ptr_mode
)
8683 op0
= convert_memory_address (ptr_mode
, op0
);
8688 case ENTRY_VALUE_EXPR
:
8691 /* COMPLEX type for Extended Pascal & Fortran */
8694 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8697 /* Get the rtx code of the operands. */
8698 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8699 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
8702 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8706 /* Move the real (op0) and imaginary (op1) parts to their location. */
8707 emit_move_insn (gen_realpart (mode
, target
), op0
);
8708 emit_move_insn (gen_imagpart (mode
, target
), op1
);
8710 insns
= get_insns ();
8713 /* Complex construction should appear as a single unit. */
8714 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8715 each with a separate pseudo as destination.
8716 It's not correct for flow to treat them as a unit. */
8717 if (GET_CODE (target
) != CONCAT
)
8718 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
8726 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8727 return gen_realpart (mode
, op0
);
8730 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8731 return gen_imagpart (mode
, op0
);
8735 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8739 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8742 target
= gen_reg_rtx (mode
);
8746 /* Store the realpart and the negated imagpart to target. */
8747 emit_move_insn (gen_realpart (partmode
, target
),
8748 gen_realpart (partmode
, op0
));
8750 imag_t
= gen_imagpart (partmode
, target
);
8751 temp
= expand_unop (partmode
,
8752 ! unsignedp
&& flag_trapv
8753 && (GET_MODE_CLASS(partmode
) == MODE_INT
)
8754 ? negv_optab
: neg_optab
,
8755 gen_imagpart (partmode
, op0
), imag_t
, 0);
8757 emit_move_insn (imag_t
, temp
);
8759 insns
= get_insns ();
8762 /* Conjugate should appear as a single unit
8763 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8764 each with a separate pseudo as destination.
8765 It's not correct for flow to treat them as a unit. */
8766 if (GET_CODE (target
) != CONCAT
)
8767 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
8774 case TRY_CATCH_EXPR
:
8776 tree handler
= TREE_OPERAND (exp
, 1);
8778 expand_eh_region_start ();
8780 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8782 expand_eh_region_end_cleanup (handler
);
8787 case TRY_FINALLY_EXPR
:
8789 tree try_block
= TREE_OPERAND (exp
, 0);
8790 tree finally_block
= TREE_OPERAND (exp
, 1);
8791 rtx finally_label
= gen_label_rtx ();
8792 rtx done_label
= gen_label_rtx ();
8793 rtx return_link
= gen_reg_rtx (Pmode
);
8794 tree cleanup
= build (GOTO_SUBROUTINE_EXPR
, void_type_node
,
8795 (tree
) finally_label
, (tree
) return_link
);
8796 TREE_SIDE_EFFECTS (cleanup
) = 1;
8798 /* Start a new binding layer that will keep track of all cleanup
8799 actions to be performed. */
8800 expand_start_bindings (2);
8802 target_temp_slot_level
= temp_slot_level
;
8804 expand_decl_cleanup (NULL_TREE
, cleanup
);
8805 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
8807 preserve_temp_slots (op0
);
8808 expand_end_bindings (NULL_TREE
, 0, 0);
8809 emit_jump (done_label
);
8810 emit_label (finally_label
);
8811 expand_expr (finally_block
, const0_rtx
, VOIDmode
, 0);
8812 emit_indirect_jump (return_link
);
8813 emit_label (done_label
);
8817 case GOTO_SUBROUTINE_EXPR
:
8819 rtx subr
= (rtx
) TREE_OPERAND (exp
, 0);
8820 rtx return_link
= *(rtx
*) &TREE_OPERAND (exp
, 1);
8821 rtx return_address
= gen_label_rtx ();
8822 emit_move_insn (return_link
,
8823 gen_rtx_LABEL_REF (Pmode
, return_address
));
8825 emit_label (return_address
);
8830 return expand_builtin_va_arg (TREE_OPERAND (exp
, 0), type
);
8833 return get_exception_pointer (cfun
);
8836 /* Function descriptors are not valid except for as
8837 initialization constants, and should not be expanded. */
8841 return (*lang_hooks
.expand_expr
) (exp
, original_target
, tmode
, modifier
);
8844 /* Here to do an ordinary binary operator, generating an instruction
8845 from the optab already placed in `this_optab'. */
8847 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8849 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8850 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8852 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
8853 unsignedp
, OPTAB_LIB_WIDEN
);
8859 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8860 when applied to the address of EXP produces an address known to be
8861 aligned more than BIGGEST_ALIGNMENT. */
8864 is_aligning_offset (offset
, exp
)
8868 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
8869 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
8870 || TREE_CODE (offset
) == NOP_EXPR
8871 || TREE_CODE (offset
) == CONVERT_EXPR
8872 || TREE_CODE (offset
) == WITH_RECORD_EXPR
)
8873 offset
= TREE_OPERAND (offset
, 0);
8875 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8876 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8877 if (TREE_CODE (offset
) != BIT_AND_EXPR
8878 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
8879 || compare_tree_int (TREE_OPERAND (offset
, 1), BIGGEST_ALIGNMENT
) <= 0
8880 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
8883 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8884 It must be NEGATE_EXPR. Then strip any more conversions. */
8885 offset
= TREE_OPERAND (offset
, 0);
8886 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
8887 || TREE_CODE (offset
) == NOP_EXPR
8888 || TREE_CODE (offset
) == CONVERT_EXPR
)
8889 offset
= TREE_OPERAND (offset
, 0);
8891 if (TREE_CODE (offset
) != NEGATE_EXPR
)
8894 offset
= TREE_OPERAND (offset
, 0);
8895 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
8896 || TREE_CODE (offset
) == NOP_EXPR
8897 || TREE_CODE (offset
) == CONVERT_EXPR
)
8898 offset
= TREE_OPERAND (offset
, 0);
8900 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
8901 whose type is the same as EXP. */
8902 return (TREE_CODE (offset
) == ADDR_EXPR
8903 && (TREE_OPERAND (offset
, 0) == exp
8904 || (TREE_CODE (TREE_OPERAND (offset
, 0)) == PLACEHOLDER_EXPR
8905 && (TREE_TYPE (TREE_OPERAND (offset
, 0))
8906 == TREE_TYPE (exp
)))));
8909 /* Return the tree node if an ARG corresponds to a string constant or zero
8910 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8911 in bytes within the string that ARG is accessing. The type of the
8912 offset will be `sizetype'. */
8915 string_constant (arg
, ptr_offset
)
8921 if (TREE_CODE (arg
) == ADDR_EXPR
8922 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
8924 *ptr_offset
= size_zero_node
;
8925 return TREE_OPERAND (arg
, 0);
8927 else if (TREE_CODE (arg
) == PLUS_EXPR
)
8929 tree arg0
= TREE_OPERAND (arg
, 0);
8930 tree arg1
= TREE_OPERAND (arg
, 1);
8935 if (TREE_CODE (arg0
) == ADDR_EXPR
8936 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
8938 *ptr_offset
= convert (sizetype
, arg1
);
8939 return TREE_OPERAND (arg0
, 0);
8941 else if (TREE_CODE (arg1
) == ADDR_EXPR
8942 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
8944 *ptr_offset
= convert (sizetype
, arg0
);
8945 return TREE_OPERAND (arg1
, 0);
8952 /* Expand code for a post- or pre- increment or decrement
8953 and return the RTX for the result.
8954 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8957 expand_increment (exp
, post
, ignore
)
8963 tree incremented
= TREE_OPERAND (exp
, 0);
8964 optab this_optab
= add_optab
;
8966 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
8967 int op0_is_copy
= 0;
8968 int single_insn
= 0;
8969 /* 1 means we can't store into OP0 directly,
8970 because it is a subreg narrower than a word,
8971 and we don't dare clobber the rest of the word. */
8974 /* Stabilize any component ref that might need to be
8975 evaluated more than once below. */
8977 || TREE_CODE (incremented
) == BIT_FIELD_REF
8978 || (TREE_CODE (incremented
) == COMPONENT_REF
8979 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
8980 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
8981 incremented
= stabilize_reference (incremented
);
8982 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8983 ones into save exprs so that they don't accidentally get evaluated
8984 more than once by the code below. */
8985 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
8986 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
8987 incremented
= save_expr (incremented
);
8989 /* Compute the operands as RTX.
8990 Note whether OP0 is the actual lvalue or a copy of it:
8991 I believe it is a copy iff it is a register or subreg
8992 and insns were generated in computing it. */
8994 temp
= get_last_insn ();
8995 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, 0);
8997 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8998 in place but instead must do sign- or zero-extension during assignment,
8999 so we copy it into a new register and let the code below use it as
9002 Note that we can safely modify this SUBREG since it is know not to be
9003 shared (it was made by the expand_expr call above). */
9005 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9008 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9012 else if (GET_CODE (op0
) == SUBREG
9013 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9015 /* We cannot increment this SUBREG in place. If we are
9016 post-incrementing, get a copy of the old value. Otherwise,
9017 just mark that we cannot increment in place. */
9019 op0
= copy_to_reg (op0
);
9024 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
9025 && temp
!= get_last_insn ());
9026 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9028 /* Decide whether incrementing or decrementing. */
9029 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9030 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9031 this_optab
= sub_optab
;
9033 /* Convert decrement by a constant into a negative increment. */
9034 if (this_optab
== sub_optab
9035 && GET_CODE (op1
) == CONST_INT
)
9037 op1
= GEN_INT (-INTVAL (op1
));
9038 this_optab
= add_optab
;
9041 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp
)))
9042 this_optab
= this_optab
== add_optab
? addv_optab
: subv_optab
;
9044 /* For a preincrement, see if we can do this with a single instruction. */
9047 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9048 if (icode
!= (int) CODE_FOR_nothing
9049 /* Make sure that OP0 is valid for operands 0 and 1
9050 of the insn we want to queue. */
9051 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9052 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
)
9053 && (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9057 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9058 then we cannot just increment OP0. We must therefore contrive to
9059 increment the original value. Then, for postincrement, we can return
9060 OP0 since it is a copy of the old value. For preincrement, expand here
9061 unless we can do it with a single insn.
9063 Likewise if storing directly into OP0 would clobber high bits
9064 we need to preserve (bad_subreg). */
9065 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9067 /* This is the easiest way to increment the value wherever it is.
9068 Problems with multiple evaluation of INCREMENTED are prevented
9069 because either (1) it is a component_ref or preincrement,
9070 in which case it was stabilized above, or (2) it is an array_ref
9071 with constant index in an array in a register, which is
9072 safe to reevaluate. */
9073 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9074 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9075 ? MINUS_EXPR
: PLUS_EXPR
),
9078 TREE_OPERAND (exp
, 1));
9080 while (TREE_CODE (incremented
) == NOP_EXPR
9081 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9083 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9084 incremented
= TREE_OPERAND (incremented
, 0);
9087 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
, 0);
9088 return post
? op0
: temp
;
9093 /* We have a true reference to the value in OP0.
9094 If there is an insn to add or subtract in this mode, queue it.
9095 Queueing the increment insn avoids the register shuffling
9096 that often results if we must increment now and first save
9097 the old value for subsequent use. */
9099 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9100 op0
= stabilize (op0
);
9103 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9104 if (icode
!= (int) CODE_FOR_nothing
9105 /* Make sure that OP0 is valid for operands 0 and 1
9106 of the insn we want to queue. */
9107 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9108 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
))
9110 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9111 op1
= force_reg (mode
, op1
);
9113 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9115 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9117 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
9118 ? force_reg (Pmode
, XEXP (op0
, 0))
9119 : copy_to_reg (XEXP (op0
, 0)));
9122 op0
= replace_equiv_address (op0
, addr
);
9123 temp
= force_reg (GET_MODE (op0
), op0
);
9124 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9125 op1
= force_reg (mode
, op1
);
9127 /* The increment queue is LIFO, thus we have to `queue'
9128 the instructions in reverse order. */
9129 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9130 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9135 /* Preincrement, or we can't increment with one simple insn. */
9137 /* Save a copy of the value before inc or dec, to return it later. */
9138 temp
= value
= copy_to_reg (op0
);
9140 /* Arrange to return the incremented value. */
9141 /* Copy the rtx because expand_binop will protect from the queue,
9142 and the results of that would be invalid for us to return
9143 if our caller does emit_queue before using our result. */
9144 temp
= copy_rtx (value
= op0
);
9146 /* Increment however we can. */
9147 op1
= expand_binop (mode
, this_optab
, value
, op1
, op0
,
9148 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9150 /* Make sure the value is stored into OP0. */
9152 emit_move_insn (op0
, op1
);
9157 /* At the start of a function, record that we have no previously-pushed
9158 arguments waiting to be popped. */
9161 init_pending_stack_adjust ()
9163 pending_stack_adjust
= 0;
9166 /* When exiting from function, if safe, clear out any pending stack adjust
9167 so the adjustment won't get done.
9169 Note, if the current function calls alloca, then it must have a
9170 frame pointer regardless of the value of flag_omit_frame_pointer. */
9173 clear_pending_stack_adjust ()
9175 #ifdef EXIT_IGNORE_STACK
9177 && (! flag_omit_frame_pointer
|| current_function_calls_alloca
)
9178 && EXIT_IGNORE_STACK
9179 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
9180 && ! flag_inline_functions
)
9182 stack_pointer_delta
-= pending_stack_adjust
,
9183 pending_stack_adjust
= 0;
9188 /* Pop any previously-pushed arguments that have not been popped yet. */
9191 do_pending_stack_adjust ()
9193 if (inhibit_defer_pop
== 0)
9195 if (pending_stack_adjust
!= 0)
9196 adjust_stack (GEN_INT (pending_stack_adjust
));
9197 pending_stack_adjust
= 0;
9201 /* Expand conditional expressions. */
9203 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9204 LABEL is an rtx of code CODE_LABEL, in this function and all the
9208 jumpifnot (exp
, label
)
9212 do_jump (exp
, label
, NULL_RTX
);
9215 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9222 do_jump (exp
, NULL_RTX
, label
);
9225 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9226 the result is zero, or IF_TRUE_LABEL if the result is one.
9227 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9228 meaning fall through in that case.
9230 do_jump always does any pending stack adjust except when it does not
9231 actually perform a jump. An example where there is no jump
9232 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9234 This function is responsible for optimizing cases such as
9235 &&, || and comparison operators in EXP. */
9238 do_jump (exp
, if_false_label
, if_true_label
)
9240 rtx if_false_label
, if_true_label
;
9242 enum tree_code code
= TREE_CODE (exp
);
9243 /* Some cases need to create a label to jump to
9244 in order to properly fall through.
9245 These cases set DROP_THROUGH_LABEL nonzero. */
9246 rtx drop_through_label
= 0;
9250 enum machine_mode mode
;
9252 #ifdef MAX_INTEGER_COMPUTATION_MODE
9253 check_max_integer_computation_mode (exp
);
9264 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
9270 /* This is not true with #pragma weak */
9272 /* The address of something can never be zero. */
9274 emit_jump (if_true_label
);
9279 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
9280 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
9281 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
9282 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_RANGE_REF
)
9285 /* If we are narrowing the operand, we have to do the compare in the
9287 if ((TYPE_PRECISION (TREE_TYPE (exp
))
9288 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9290 case NON_LVALUE_EXPR
:
9291 case REFERENCE_EXPR
:
9296 /* These cannot change zero->non-zero or vice versa. */
9297 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9300 case WITH_RECORD_EXPR
:
9301 /* Put the object on the placeholder list, recurse through our first
9302 operand, and pop the list. */
9303 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
9305 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9306 placeholder_list
= TREE_CHAIN (placeholder_list
);
9310 /* This is never less insns than evaluating the PLUS_EXPR followed by
9311 a test and can be longer if the test is eliminated. */
9313 /* Reduce to minus. */
9314 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
9315 TREE_OPERAND (exp
, 0),
9316 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
9317 TREE_OPERAND (exp
, 1))));
9318 /* Process as MINUS. */
9322 /* Non-zero iff operands of minus differ. */
9323 do_compare_and_jump (build (NE_EXPR
, TREE_TYPE (exp
),
9324 TREE_OPERAND (exp
, 0),
9325 TREE_OPERAND (exp
, 1)),
9326 NE
, NE
, if_false_label
, if_true_label
);
9330 /* If we are AND'ing with a small constant, do this comparison in the
9331 smallest type that fits. If the machine doesn't have comparisons
9332 that small, it will be converted back to the wider comparison.
9333 This helps if we are testing the sign bit of a narrower object.
9334 combine can't do this for us because it can't know whether a
9335 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9337 if (! SLOW_BYTE_ACCESS
9338 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
9339 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
9340 && (i
= tree_floor_log2 (TREE_OPERAND (exp
, 1))) >= 0
9341 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
9342 && (type
= (*lang_hooks
.types
.type_for_mode
) (mode
, 1)) != 0
9343 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9344 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9345 != CODE_FOR_nothing
))
9347 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9352 case TRUTH_NOT_EXPR
:
9353 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9356 case TRUTH_ANDIF_EXPR
:
9357 if (if_false_label
== 0)
9358 if_false_label
= drop_through_label
= gen_label_rtx ();
9359 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
9360 start_cleanup_deferral ();
9361 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9362 end_cleanup_deferral ();
9365 case TRUTH_ORIF_EXPR
:
9366 if (if_true_label
== 0)
9367 if_true_label
= drop_through_label
= gen_label_rtx ();
9368 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
9369 start_cleanup_deferral ();
9370 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9371 end_cleanup_deferral ();
9376 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
9377 preserve_temp_slots (NULL_RTX
);
9381 do_pending_stack_adjust ();
9382 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9388 case ARRAY_RANGE_REF
:
9390 HOST_WIDE_INT bitsize
, bitpos
;
9392 enum machine_mode mode
;
9397 /* Get description of this reference. We don't actually care
9398 about the underlying object here. */
9399 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
9400 &unsignedp
, &volatilep
);
9402 type
= (*lang_hooks
.types
.type_for_size
) (bitsize
, unsignedp
);
9403 if (! SLOW_BYTE_ACCESS
9404 && type
!= 0 && bitsize
>= 0
9405 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9406 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9407 != CODE_FOR_nothing
))
9409 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9416 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9417 if (integer_onep (TREE_OPERAND (exp
, 1))
9418 && integer_zerop (TREE_OPERAND (exp
, 2)))
9419 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9421 else if (integer_zerop (TREE_OPERAND (exp
, 1))
9422 && integer_onep (TREE_OPERAND (exp
, 2)))
9423 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9427 rtx label1
= gen_label_rtx ();
9428 drop_through_label
= gen_label_rtx ();
9430 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
9432 start_cleanup_deferral ();
9433 /* Now the THEN-expression. */
9434 do_jump (TREE_OPERAND (exp
, 1),
9435 if_false_label
? if_false_label
: drop_through_label
,
9436 if_true_label
? if_true_label
: drop_through_label
);
9437 /* In case the do_jump just above never jumps. */
9438 do_pending_stack_adjust ();
9439 emit_label (label1
);
9441 /* Now the ELSE-expression. */
9442 do_jump (TREE_OPERAND (exp
, 2),
9443 if_false_label
? if_false_label
: drop_through_label
,
9444 if_true_label
? if_true_label
: drop_through_label
);
9445 end_cleanup_deferral ();
9451 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9453 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9454 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9456 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
9457 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
9460 (build (TRUTH_ANDIF_EXPR
, TREE_TYPE (exp
),
9461 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9462 fold (build1 (REALPART_EXPR
,
9463 TREE_TYPE (inner_type
),
9465 fold (build1 (REALPART_EXPR
,
9466 TREE_TYPE (inner_type
),
9468 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9469 fold (build1 (IMAGPART_EXPR
,
9470 TREE_TYPE (inner_type
),
9472 fold (build1 (IMAGPART_EXPR
,
9473 TREE_TYPE (inner_type
),
9475 if_false_label
, if_true_label
);
9478 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
9479 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9481 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9482 && !can_compare_p (EQ
, TYPE_MODE (inner_type
), ccp_jump
))
9483 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
9485 do_compare_and_jump (exp
, EQ
, EQ
, if_false_label
, if_true_label
);
9491 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9493 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9494 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9496 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
9497 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
9500 (build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
),
9501 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9502 fold (build1 (REALPART_EXPR
,
9503 TREE_TYPE (inner_type
),
9505 fold (build1 (REALPART_EXPR
,
9506 TREE_TYPE (inner_type
),
9508 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9509 fold (build1 (IMAGPART_EXPR
,
9510 TREE_TYPE (inner_type
),
9512 fold (build1 (IMAGPART_EXPR
,
9513 TREE_TYPE (inner_type
),
9515 if_false_label
, if_true_label
);
9518 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
9519 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9521 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9522 && !can_compare_p (NE
, TYPE_MODE (inner_type
), ccp_jump
))
9523 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
9525 do_compare_and_jump (exp
, NE
, NE
, if_false_label
, if_true_label
);
9530 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9531 if (GET_MODE_CLASS (mode
) == MODE_INT
9532 && ! can_compare_p (LT
, mode
, ccp_jump
))
9533 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
9535 do_compare_and_jump (exp
, LT
, LTU
, if_false_label
, if_true_label
);
9539 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9540 if (GET_MODE_CLASS (mode
) == MODE_INT
9541 && ! can_compare_p (LE
, mode
, ccp_jump
))
9542 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
9544 do_compare_and_jump (exp
, LE
, LEU
, if_false_label
, if_true_label
);
9548 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9549 if (GET_MODE_CLASS (mode
) == MODE_INT
9550 && ! can_compare_p (GT
, mode
, ccp_jump
))
9551 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
9553 do_compare_and_jump (exp
, GT
, GTU
, if_false_label
, if_true_label
);
9557 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9558 if (GET_MODE_CLASS (mode
) == MODE_INT
9559 && ! can_compare_p (GE
, mode
, ccp_jump
))
9560 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
9562 do_compare_and_jump (exp
, GE
, GEU
, if_false_label
, if_true_label
);
9565 case UNORDERED_EXPR
:
9568 enum rtx_code cmp
, rcmp
;
9571 if (code
== UNORDERED_EXPR
)
9572 cmp
= UNORDERED
, rcmp
= ORDERED
;
9574 cmp
= ORDERED
, rcmp
= UNORDERED
;
9575 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9578 if (! can_compare_p (cmp
, mode
, ccp_jump
)
9579 && (can_compare_p (rcmp
, mode
, ccp_jump
)
9580 /* If the target doesn't provide either UNORDERED or ORDERED
9581 comparisons, canonicalize on UNORDERED for the library. */
9582 || rcmp
== UNORDERED
))
9586 do_compare_and_jump (exp
, cmp
, cmp
, if_false_label
, if_true_label
);
9588 do_compare_and_jump (exp
, rcmp
, rcmp
, if_true_label
, if_false_label
);
9593 enum rtx_code rcode1
;
9594 enum tree_code tcode2
;
9618 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9619 if (can_compare_p (rcode1
, mode
, ccp_jump
))
9620 do_compare_and_jump (exp
, rcode1
, rcode1
, if_false_label
,
9624 tree op0
= save_expr (TREE_OPERAND (exp
, 0));
9625 tree op1
= save_expr (TREE_OPERAND (exp
, 1));
9628 /* If the target doesn't support combined unordered
9629 compares, decompose into UNORDERED + comparison. */
9630 cmp0
= fold (build (UNORDERED_EXPR
, TREE_TYPE (exp
), op0
, op1
));
9631 cmp1
= fold (build (tcode2
, TREE_TYPE (exp
), op0
, op1
));
9632 exp
= build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
), cmp0
, cmp1
);
9633 do_jump (exp
, if_false_label
, if_true_label
);
9639 __builtin_expect (<test>, 0) and
9640 __builtin_expect (<test>, 1)
9642 We need to do this here, so that <test> is not converted to a SCC
9643 operation on machines that use condition code registers and COMPARE
9644 like the PowerPC, and then the jump is done based on whether the SCC
9645 operation produced a 1 or 0. */
9647 /* Check for a built-in function. */
9648 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
9650 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
9651 tree arglist
= TREE_OPERAND (exp
, 1);
9653 if (TREE_CODE (fndecl
) == FUNCTION_DECL
9654 && DECL_BUILT_IN (fndecl
)
9655 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
9656 && arglist
!= NULL_TREE
9657 && TREE_CHAIN (arglist
) != NULL_TREE
)
9659 rtx seq
= expand_builtin_expect_jump (exp
, if_false_label
,
9662 if (seq
!= NULL_RTX
)
9669 /* fall through and generate the normal code. */
9673 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
9675 /* This is not needed any more and causes poor code since it causes
9676 comparisons and tests from non-SI objects to have different code
9678 /* Copy to register to avoid generating bad insns by cse
9679 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9680 if (!cse_not_expected
&& GET_CODE (temp
) == MEM
)
9681 temp
= copy_to_reg (temp
);
9683 do_pending_stack_adjust ();
9684 /* Do any postincrements in the expression that was tested. */
9687 if (GET_CODE (temp
) == CONST_INT
9688 || (GET_CODE (temp
) == CONST_DOUBLE
&& GET_MODE (temp
) == VOIDmode
)
9689 || GET_CODE (temp
) == LABEL_REF
)
9691 rtx target
= temp
== const0_rtx
? if_false_label
: if_true_label
;
9695 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
9696 && ! can_compare_p (NE
, GET_MODE (temp
), ccp_jump
))
9697 /* Note swapping the labels gives us not-equal. */
9698 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
9699 else if (GET_MODE (temp
) != VOIDmode
)
9700 do_compare_rtx_and_jump (temp
, CONST0_RTX (GET_MODE (temp
)),
9701 NE
, TREE_UNSIGNED (TREE_TYPE (exp
)),
9702 GET_MODE (temp
), NULL_RTX
,
9703 if_false_label
, if_true_label
);
9708 if (drop_through_label
)
9710 /* If do_jump produces code that might be jumped around,
9711 do any stack adjusts from that code, before the place
9712 where control merges in. */
9713 do_pending_stack_adjust ();
9714 emit_label (drop_through_label
);
9718 /* Given a comparison expression EXP for values too wide to be compared
9719 with one insn, test the comparison and jump to the appropriate label.
9720 The code of EXP is ignored; we always test GT if SWAP is 0,
9721 and LT if SWAP is 1. */
9724 do_jump_by_parts_greater (exp
, swap
, if_false_label
, if_true_label
)
9727 rtx if_false_label
, if_true_label
;
9729 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
9730 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
9731 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9732 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9734 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
);
9737 /* Compare OP0 with OP1, word at a time, in mode MODE.
9738 UNSIGNEDP says to do unsigned comparison.
9739 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9742 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
)
9743 enum machine_mode mode
;
9746 rtx if_false_label
, if_true_label
;
9748 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
9749 rtx drop_through_label
= 0;
9752 if (! if_true_label
|| ! if_false_label
)
9753 drop_through_label
= gen_label_rtx ();
9754 if (! if_true_label
)
9755 if_true_label
= drop_through_label
;
9756 if (! if_false_label
)
9757 if_false_label
= drop_through_label
;
9759 /* Compare a word at a time, high order first. */
9760 for (i
= 0; i
< nwords
; i
++)
9762 rtx op0_word
, op1_word
;
9764 if (WORDS_BIG_ENDIAN
)
9766 op0_word
= operand_subword_force (op0
, i
, mode
);
9767 op1_word
= operand_subword_force (op1
, i
, mode
);
9771 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
9772 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
9775 /* All but high-order word must be compared as unsigned. */
9776 do_compare_rtx_and_jump (op0_word
, op1_word
, GT
,
9777 (unsignedp
|| i
> 0), word_mode
, NULL_RTX
,
9778 NULL_RTX
, if_true_label
);
9780 /* Consider lower words only if these are equal. */
9781 do_compare_rtx_and_jump (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
9782 NULL_RTX
, NULL_RTX
, if_false_label
);
9786 emit_jump (if_false_label
);
9787 if (drop_through_label
)
9788 emit_label (drop_through_label
);
9791 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9792 with one insn, test the comparison and jump to the appropriate label. */
9795 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
)
9797 rtx if_false_label
, if_true_label
;
9799 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
9800 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9801 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9802 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
9804 rtx drop_through_label
= 0;
9806 if (! if_false_label
)
9807 drop_through_label
= if_false_label
= gen_label_rtx ();
9809 for (i
= 0; i
< nwords
; i
++)
9810 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, mode
),
9811 operand_subword_force (op1
, i
, mode
),
9812 EQ
, TREE_UNSIGNED (TREE_TYPE (exp
)),
9813 word_mode
, NULL_RTX
, if_false_label
, NULL_RTX
);
9816 emit_jump (if_true_label
);
9817 if (drop_through_label
)
9818 emit_label (drop_through_label
);
9821 /* Jump according to whether OP0 is 0.
9822 We assume that OP0 has an integer mode that is too wide
9823 for the available compare insns. */
9826 do_jump_by_parts_equality_rtx (op0
, if_false_label
, if_true_label
)
9828 rtx if_false_label
, if_true_label
;
9830 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
9833 rtx drop_through_label
= 0;
9835 /* The fastest way of doing this comparison on almost any machine is to
9836 "or" all the words and compare the result. If all have to be loaded
9837 from memory and this is a very wide item, it's possible this may
9838 be slower, but that's highly unlikely. */
9840 part
= gen_reg_rtx (word_mode
);
9841 emit_move_insn (part
, operand_subword_force (op0
, 0, GET_MODE (op0
)));
9842 for (i
= 1; i
< nwords
&& part
!= 0; i
++)
9843 part
= expand_binop (word_mode
, ior_optab
, part
,
9844 operand_subword_force (op0
, i
, GET_MODE (op0
)),
9845 part
, 1, OPTAB_WIDEN
);
9849 do_compare_rtx_and_jump (part
, const0_rtx
, EQ
, 1, word_mode
,
9850 NULL_RTX
, if_false_label
, if_true_label
);
9855 /* If we couldn't do the "or" simply, do this with a series of compares. */
9856 if (! if_false_label
)
9857 drop_through_label
= if_false_label
= gen_label_rtx ();
9859 for (i
= 0; i
< nwords
; i
++)
9860 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, GET_MODE (op0
)),
9861 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
,
9862 if_false_label
, NULL_RTX
);
9865 emit_jump (if_true_label
);
9867 if (drop_through_label
)
9868 emit_label (drop_through_label
);
9871 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9872 (including code to compute the values to be compared)
9873 and set (CC0) according to the result.
9874 The decision as to signed or unsigned comparison must be made by the caller.
9876 We force a stack adjustment unless there are currently
9877 things pushed on the stack that aren't yet used.
9879 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9883 compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
, size
)
9887 enum machine_mode mode
;
9892 /* If one operand is constant, make it the second one. Only do this
9893 if the other operand is not constant as well. */
9895 if (swap_commutative_operands_p (op0
, op1
))
9900 code
= swap_condition (code
);
9905 op0
= force_not_mem (op0
);
9906 op1
= force_not_mem (op1
);
9909 do_pending_stack_adjust ();
9911 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
9912 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
9916 /* There's no need to do this now that combine.c can eliminate lots of
9917 sign extensions. This can be less efficient in certain cases on other
9920 /* If this is a signed equality comparison, we can do it as an
9921 unsigned comparison since zero-extension is cheaper than sign
9922 extension and comparisons with zero are done as unsigned. This is
9923 the case even on machines that can do fast sign extension, since
9924 zero-extension is easier to combine with other operations than
9925 sign-extension is. If we are comparing against a constant, we must
9926 convert it to what it would look like unsigned. */
9927 if ((code
== EQ
|| code
== NE
) && ! unsignedp
9928 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
9930 if (GET_CODE (op1
) == CONST_INT
9931 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
9932 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
9937 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
);
9939 return gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
9942 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9943 The decision as to signed or unsigned comparison must be made by the caller.
9945 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9949 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
, size
,
9950 if_false_label
, if_true_label
)
9954 enum machine_mode mode
;
9956 rtx if_false_label
, if_true_label
;
9959 int dummy_true_label
= 0;
9961 /* Reverse the comparison if that is safe and we want to jump if it is
9963 if (! if_true_label
&& ! FLOAT_MODE_P (mode
))
9965 if_true_label
= if_false_label
;
9967 code
= reverse_condition (code
);
9970 /* If one operand is constant, make it the second one. Only do this
9971 if the other operand is not constant as well. */
9973 if (swap_commutative_operands_p (op0
, op1
))
9978 code
= swap_condition (code
);
9983 op0
= force_not_mem (op0
);
9984 op1
= force_not_mem (op1
);
9987 do_pending_stack_adjust ();
9989 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
9990 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
9992 if (tem
== const_true_rtx
)
9995 emit_jump (if_true_label
);
10000 emit_jump (if_false_label
);
10006 /* There's no need to do this now that combine.c can eliminate lots of
10007 sign extensions. This can be less efficient in certain cases on other
10010 /* If this is a signed equality comparison, we can do it as an
10011 unsigned comparison since zero-extension is cheaper than sign
10012 extension and comparisons with zero are done as unsigned. This is
10013 the case even on machines that can do fast sign extension, since
10014 zero-extension is easier to combine with other operations than
10015 sign-extension is. If we are comparing against a constant, we must
10016 convert it to what it would look like unsigned. */
10017 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10018 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10020 if (GET_CODE (op1
) == CONST_INT
10021 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10022 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10027 if (! if_true_label
)
10029 dummy_true_label
= 1;
10030 if_true_label
= gen_label_rtx ();
10033 emit_cmp_and_jump_insns (op0
, op1
, code
, size
, mode
, unsignedp
,
10036 if (if_false_label
)
10037 emit_jump (if_false_label
);
10038 if (dummy_true_label
)
10039 emit_label (if_true_label
);
10042 /* Generate code for a comparison expression EXP (including code to compute
10043 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10044 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10045 generated code will drop through.
10046 SIGNED_CODE should be the rtx operation for this comparison for
10047 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10049 We force a stack adjustment unless there are currently
10050 things pushed on the stack that aren't yet used. */
10053 do_compare_and_jump (exp
, signed_code
, unsigned_code
, if_false_label
,
10056 enum rtx_code signed_code
, unsigned_code
;
10057 rtx if_false_label
, if_true_label
;
10061 enum machine_mode mode
;
10063 enum rtx_code code
;
10065 /* Don't crash if the comparison was erroneous. */
10066 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
10067 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
10070 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
10071 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == ERROR_MARK
)
10074 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10075 mode
= TYPE_MODE (type
);
10076 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
10077 && (TREE_CODE (TREE_OPERAND (exp
, 1)) != INTEGER_CST
10078 || (GET_MODE_BITSIZE (mode
)
10079 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
,
10082 /* op0 might have been replaced by promoted constant, in which
10083 case the type of second argument should be used. */
10084 type
= TREE_TYPE (TREE_OPERAND (exp
, 1));
10085 mode
= TYPE_MODE (type
);
10087 unsignedp
= TREE_UNSIGNED (type
);
10088 code
= unsignedp
? unsigned_code
: signed_code
;
10090 #ifdef HAVE_canonicalize_funcptr_for_compare
10091 /* If function pointers need to be "canonicalized" before they can
10092 be reliably compared, then canonicalize them. */
10093 if (HAVE_canonicalize_funcptr_for_compare
10094 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10095 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10098 rtx new_op0
= gen_reg_rtx (mode
);
10100 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
10104 if (HAVE_canonicalize_funcptr_for_compare
10105 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10106 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10109 rtx new_op1
= gen_reg_rtx (mode
);
10111 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
10116 /* Do any postincrements in the expression that was tested. */
10119 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
,
10121 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
10122 if_false_label
, if_true_label
);
10125 /* Generate code to calculate EXP using a store-flag instruction
10126 and return an rtx for the result. EXP is either a comparison
10127 or a TRUTH_NOT_EXPR whose operand is a comparison.
10129 If TARGET is nonzero, store the result there if convenient.
10131 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10134 Return zero if there is no suitable set-flag instruction
10135 available on this machine.
10137 Once expand_expr has been called on the arguments of the comparison,
10138 we are committed to doing the store flag, since it is not safe to
10139 re-evaluate the expression. We emit the store-flag insn by calling
10140 emit_store_flag, but only expand the arguments if we have a reason
10141 to believe that emit_store_flag will be successful. If we think that
10142 it will, but it isn't, we have to simulate the store-flag with a
10143 set/jump/set sequence. */
10146 do_store_flag (exp
, target
, mode
, only_cheap
)
10149 enum machine_mode mode
;
10152 enum rtx_code code
;
10153 tree arg0
, arg1
, type
;
10155 enum machine_mode operand_mode
;
10159 enum insn_code icode
;
10160 rtx subtarget
= target
;
10163 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10164 result at the end. We can't simply invert the test since it would
10165 have already been inverted if it were valid. This case occurs for
10166 some floating-point comparisons. */
10168 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
10169 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
10171 arg0
= TREE_OPERAND (exp
, 0);
10172 arg1
= TREE_OPERAND (exp
, 1);
10174 /* Don't crash if the comparison was erroneous. */
10175 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
10178 type
= TREE_TYPE (arg0
);
10179 operand_mode
= TYPE_MODE (type
);
10180 unsignedp
= TREE_UNSIGNED (type
);
10182 /* We won't bother with BLKmode store-flag operations because it would mean
10183 passing a lot of information to emit_store_flag. */
10184 if (operand_mode
== BLKmode
)
10187 /* We won't bother with store-flag operations involving function pointers
10188 when function pointers must be canonicalized before comparisons. */
10189 #ifdef HAVE_canonicalize_funcptr_for_compare
10190 if (HAVE_canonicalize_funcptr_for_compare
10191 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10192 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10194 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10195 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10196 == FUNCTION_TYPE
))))
10203 /* Get the rtx comparison code to use. We know that EXP is a comparison
10204 operation of some type. Some comparisons against 1 and -1 can be
10205 converted to comparisons with zero. Do so here so that the tests
10206 below will be aware that we have a comparison with zero. These
10207 tests will not catch constants in the first operand, but constants
10208 are rarely passed as the first operand. */
10210 switch (TREE_CODE (exp
))
10219 if (integer_onep (arg1
))
10220 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10222 code
= unsignedp
? LTU
: LT
;
10225 if (! unsignedp
&& integer_all_onesp (arg1
))
10226 arg1
= integer_zero_node
, code
= LT
;
10228 code
= unsignedp
? LEU
: LE
;
10231 if (! unsignedp
&& integer_all_onesp (arg1
))
10232 arg1
= integer_zero_node
, code
= GE
;
10234 code
= unsignedp
? GTU
: GT
;
10237 if (integer_onep (arg1
))
10238 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
10240 code
= unsignedp
? GEU
: GE
;
10243 case UNORDERED_EXPR
:
10269 /* Put a constant second. */
10270 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
10272 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
10273 code
= swap_condition (code
);
10276 /* If this is an equality or inequality test of a single bit, we can
10277 do this by shifting the bit being tested to the low-order bit and
10278 masking the result with the constant 1. If the condition was EQ,
10279 we xor it with 1. This does not require an scc insn and is faster
10280 than an scc insn even if we have it. */
10282 if ((code
== NE
|| code
== EQ
)
10283 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
10284 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10286 tree inner
= TREE_OPERAND (arg0
, 0);
10287 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
10290 /* If INNER is a right shift of a constant and it plus BITNUM does
10291 not overflow, adjust BITNUM and INNER. */
10293 if (TREE_CODE (inner
) == RSHIFT_EXPR
10294 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
10295 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
10296 && bitnum
< TYPE_PRECISION (type
)
10297 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
10298 bitnum
- TYPE_PRECISION (type
)))
10300 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
10301 inner
= TREE_OPERAND (inner
, 0);
10304 /* If we are going to be able to omit the AND below, we must do our
10305 operations as unsigned. If we must use the AND, we have a choice.
10306 Normally unsigned is faster, but for some machines signed is. */
10307 ops_unsignedp
= (bitnum
== TYPE_PRECISION (type
) - 1 ? 1
10308 #ifdef LOAD_EXTEND_OP
10309 : (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1)
10315 if (! get_subtarget (subtarget
)
10316 || GET_MODE (subtarget
) != operand_mode
10317 || ! safe_from_p (subtarget
, inner
, 1))
10320 op0
= expand_expr (inner
, subtarget
, VOIDmode
, 0);
10323 op0
= expand_shift (RSHIFT_EXPR
, operand_mode
, op0
,
10324 size_int (bitnum
), subtarget
, ops_unsignedp
);
10326 if (GET_MODE (op0
) != mode
)
10327 op0
= convert_to_mode (mode
, op0
, ops_unsignedp
);
10329 if ((code
== EQ
&& ! invert
) || (code
== NE
&& invert
))
10330 op0
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
, subtarget
,
10331 ops_unsignedp
, OPTAB_LIB_WIDEN
);
10333 /* Put the AND last so it can combine with more things. */
10334 if (bitnum
!= TYPE_PRECISION (type
) - 1)
10335 op0
= expand_and (mode
, op0
, const1_rtx
, subtarget
);
10340 /* Now see if we are likely to be able to do this. Return if not. */
10341 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
10344 icode
= setcc_gen_code
[(int) code
];
10345 if (icode
== CODE_FOR_nothing
10346 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
10348 /* We can only do this if it is one of the special cases that
10349 can be handled without an scc insn. */
10350 if ((code
== LT
&& integer_zerop (arg1
))
10351 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
10353 else if (BRANCH_COST
>= 0
10354 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
10355 && TREE_CODE (type
) != REAL_TYPE
10356 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
10357 != CODE_FOR_nothing
)
10358 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
10359 != CODE_FOR_nothing
)))
10365 if (! get_subtarget (target
)
10366 || GET_MODE (subtarget
) != operand_mode
10367 || ! safe_from_p (subtarget
, arg1
, 1))
10370 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
10371 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
10374 target
= gen_reg_rtx (mode
);
10376 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10377 because, if the emit_store_flag does anything it will succeed and
10378 OP0 and OP1 will not be used subsequently. */
10380 result
= emit_store_flag (target
, code
,
10381 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
10382 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
10383 operand_mode
, unsignedp
, 1);
10388 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
10389 result
, 0, OPTAB_LIB_WIDEN
);
10393 /* If this failed, we have to do this with set/compare/jump/set code. */
10394 if (GET_CODE (target
) != REG
10395 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
10396 target
= gen_reg_rtx (GET_MODE (target
));
10398 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
10399 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
10400 operand_mode
, NULL_RTX
);
10401 if (GET_CODE (result
) == CONST_INT
)
10402 return (((result
== const0_rtx
&& ! invert
)
10403 || (result
!= const0_rtx
&& invert
))
10404 ? const0_rtx
: const1_rtx
);
10406 /* The code of RESULT may not match CODE if compare_from_rtx
10407 decided to swap its operands and reverse the original code.
10409 We know that compare_from_rtx returns either a CONST_INT or
10410 a new comparison code, so it is safe to just extract the
10411 code from RESULT. */
10412 code
= GET_CODE (result
);
10414 label
= gen_label_rtx ();
10415 if (bcc_gen_fctn
[(int) code
] == 0)
10418 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
10419 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
10420 emit_label (label
);
10426 /* Stubs in case we haven't got a casesi insn. */
10427 #ifndef HAVE_casesi
10428 # define HAVE_casesi 0
10429 # define gen_casesi(a, b, c, d, e) (0)
10430 # define CODE_FOR_casesi CODE_FOR_nothing
10433 /* If the machine does not have a case insn that compares the bounds,
10434 this means extra overhead for dispatch tables, which raises the
10435 threshold for using them. */
10436 #ifndef CASE_VALUES_THRESHOLD
10437 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10438 #endif /* CASE_VALUES_THRESHOLD */
10441 case_values_threshold ()
10443 return CASE_VALUES_THRESHOLD
;
10446 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10447 0 otherwise (i.e. if there is no casesi instruction). */
10449 try_casesi (index_type
, index_expr
, minval
, range
,
10450 table_label
, default_label
)
10451 tree index_type
, index_expr
, minval
, range
;
10452 rtx table_label ATTRIBUTE_UNUSED
;
10455 enum machine_mode index_mode
= SImode
;
10456 int index_bits
= GET_MODE_BITSIZE (index_mode
);
10457 rtx op1
, op2
, index
;
10458 enum machine_mode op_mode
;
10463 /* Convert the index to SImode. */
10464 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
10466 enum machine_mode omode
= TYPE_MODE (index_type
);
10467 rtx rangertx
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
10469 /* We must handle the endpoints in the original mode. */
10470 index_expr
= build (MINUS_EXPR
, index_type
,
10471 index_expr
, minval
);
10472 minval
= integer_zero_node
;
10473 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10474 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
10475 omode
, 1, default_label
);
10476 /* Now we can safely truncate. */
10477 index
= convert_to_mode (index_mode
, index
, 0);
10481 if (TYPE_MODE (index_type
) != index_mode
)
10483 index_expr
= convert ((*lang_hooks
.types
.type_for_size
)
10484 (index_bits
, 0), index_expr
);
10485 index_type
= TREE_TYPE (index_expr
);
10488 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10491 index
= protect_from_queue (index
, 0);
10492 do_pending_stack_adjust ();
10494 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
10495 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
10497 index
= copy_to_mode_reg (op_mode
, index
);
10499 op1
= expand_expr (minval
, NULL_RTX
, VOIDmode
, 0);
10501 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
10502 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
10503 op1
, TREE_UNSIGNED (TREE_TYPE (minval
)));
10504 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
10506 op1
= copy_to_mode_reg (op_mode
, op1
);
10508 op2
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
10510 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
10511 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
10512 op2
, TREE_UNSIGNED (TREE_TYPE (range
)));
10513 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
10515 op2
= copy_to_mode_reg (op_mode
, op2
);
10517 emit_jump_insn (gen_casesi (index
, op1
, op2
,
10518 table_label
, default_label
));
10522 /* Attempt to generate a tablejump instruction; same concept. */
10523 #ifndef HAVE_tablejump
10524 #define HAVE_tablejump 0
10525 #define gen_tablejump(x, y) (0)
10528 /* Subroutine of the next function.
10530 INDEX is the value being switched on, with the lowest value
10531 in the table already subtracted.
10532 MODE is its expected mode (needed if INDEX is constant).
10533 RANGE is the length of the jump table.
10534 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10536 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10537 index value is out of range. */
10540 do_tablejump (index
, mode
, range
, table_label
, default_label
)
10541 rtx index
, range
, table_label
, default_label
;
10542 enum machine_mode mode
;
10546 /* Do an unsigned comparison (in the proper mode) between the index
10547 expression and the value which represents the length of the range.
10548 Since we just finished subtracting the lower bound of the range
10549 from the index expression, this comparison allows us to simultaneously
10550 check that the original index expression value is both greater than
10551 or equal to the minimum value of the range and less than or equal to
10552 the maximum value of the range. */
10554 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
10557 /* If index is in range, it must fit in Pmode.
10558 Convert to Pmode so we can index with it. */
10560 index
= convert_to_mode (Pmode
, index
, 1);
10562 /* Don't let a MEM slip thru, because then INDEX that comes
10563 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10564 and break_out_memory_refs will go to work on it and mess it up. */
10565 #ifdef PIC_CASE_VECTOR_ADDRESS
10566 if (flag_pic
&& GET_CODE (index
) != REG
)
10567 index
= copy_to_mode_reg (Pmode
, index
);
10570 /* If flag_force_addr were to affect this address
10571 it could interfere with the tricky assumptions made
10572 about addresses that contain label-refs,
10573 which may be valid only very near the tablejump itself. */
10574 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10575 GET_MODE_SIZE, because this indicates how large insns are. The other
10576 uses should all be Pmode, because they are addresses. This code
10577 could fail if addresses and insns are not the same size. */
10578 index
= gen_rtx_PLUS (Pmode
,
10579 gen_rtx_MULT (Pmode
, index
,
10580 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
10581 gen_rtx_LABEL_REF (Pmode
, table_label
));
10582 #ifdef PIC_CASE_VECTOR_ADDRESS
10584 index
= PIC_CASE_VECTOR_ADDRESS (index
);
10587 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
10588 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
10589 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
10590 RTX_UNCHANGING_P (vector
) = 1;
10591 convert_move (temp
, vector
, 0);
10593 emit_jump_insn (gen_tablejump (temp
, table_label
));
10595 /* If we are generating PIC code or if the table is PC-relative, the
10596 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10597 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
10602 try_tablejump (index_type
, index_expr
, minval
, range
,
10603 table_label
, default_label
)
10604 tree index_type
, index_expr
, minval
, range
;
10605 rtx table_label
, default_label
;
10609 if (! HAVE_tablejump
)
10612 index_expr
= fold (build (MINUS_EXPR
, index_type
,
10613 convert (index_type
, index_expr
),
10614 convert (index_type
, minval
)));
10615 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10617 index
= protect_from_queue (index
, 0);
10618 do_pending_stack_adjust ();
10620 do_tablejump (index
, TYPE_MODE (index_type
),
10621 convert_modes (TYPE_MODE (index_type
),
10622 TYPE_MODE (TREE_TYPE (range
)),
10623 expand_expr (range
, NULL_RTX
,
10625 TREE_UNSIGNED (TREE_TYPE (range
))),
10626 table_label
, default_label
);