1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
42 #include "typeclass.h"
48 /* Decide whether a function's arguments should be processed
49 from first to last or from last to first.
51 They should if the stack and args grow in opposite directions, but
52 only if we have push insns. */
56 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
57 #define PUSH_ARGS_REVERSED /* If it's last to first. */
62 #ifndef STACK_PUSH_CODE
63 #ifdef STACK_GROWS_DOWNWARD
64 #define STACK_PUSH_CODE PRE_DEC
66 #define STACK_PUSH_CODE PRE_INC
70 /* Assume that case vectors are not pc-relative. */
71 #ifndef CASE_VECTOR_PC_RELATIVE
72 #define CASE_VECTOR_PC_RELATIVE 0
75 /* Hook called by safe_from_p for language-specific tree codes. It is
76 up to the language front-end to install a hook if it has any such
77 codes that safe_from_p needs to know about. Since same_from_p will
78 recursively explore the TREE_OPERANDs of an expression, this hook
79 should not reexamine those pieces. This routine may recursively
80 call safe_from_p; it should always pass `0' as the TOP_P
82 int (*lang_safe_from_p
) PARAMS ((rtx
, tree
));
84 /* If this is nonzero, we do not bother generating VOLATILE
85 around volatile memory references, and we are willing to
86 output indirect addresses. If cse is to follow, we reject
87 indirect addresses so a useful potential cse is generated;
88 if it is used only once, instruction combination will produce
89 the same indirect address eventually. */
92 /* Don't check memory usage, since code is being emitted to check a memory
93 usage. Used when current_function_check_memory_usage is true, to avoid
94 infinite recursion. */
95 static int in_check_memory_usage
;
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 static tree placeholder_list
= 0;
100 /* This structure is used by move_by_pieces to describe the move to
102 struct move_by_pieces
111 int explicit_inc_from
;
112 unsigned HOST_WIDE_INT len
;
113 HOST_WIDE_INT offset
;
117 /* This structure is used by store_by_pieces to describe the clear to
120 struct store_by_pieces
126 unsigned HOST_WIDE_INT len
;
127 HOST_WIDE_INT offset
;
128 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
133 extern struct obstack permanent_obstack
;
135 static rtx get_push_address
PARAMS ((int));
137 static rtx enqueue_insn
PARAMS ((rtx
, rtx
));
138 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
139 PARAMS ((unsigned HOST_WIDE_INT
,
141 static void move_by_pieces_1
PARAMS ((rtx (*) (rtx
, ...), enum machine_mode
,
142 struct move_by_pieces
*));
143 static rtx clear_by_pieces_1
PARAMS ((PTR
, HOST_WIDE_INT
,
145 static void clear_by_pieces
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
147 static void store_by_pieces_1
PARAMS ((struct store_by_pieces
*,
149 static void store_by_pieces_2
PARAMS ((rtx (*) (rtx
, ...),
151 struct store_by_pieces
*));
152 static rtx get_subtarget
PARAMS ((rtx
));
153 static int is_zeros_p
PARAMS ((tree
));
154 static int mostly_zeros_p
PARAMS ((tree
));
155 static void store_constructor_field
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
156 HOST_WIDE_INT
, enum machine_mode
,
157 tree
, tree
, int, int));
158 static void store_constructor
PARAMS ((tree
, rtx
, int, HOST_WIDE_INT
));
159 static rtx store_field
PARAMS ((rtx
, HOST_WIDE_INT
,
160 HOST_WIDE_INT
, enum machine_mode
,
161 tree
, enum machine_mode
, int, tree
,
163 static enum memory_use_mode
164 get_memory_usage_from_modifier
PARAMS ((enum expand_modifier
));
165 static rtx var_rtx
PARAMS ((tree
));
166 static HOST_WIDE_INT highest_pow2_factor
PARAMS ((tree
));
167 static rtx expand_increment
PARAMS ((tree
, int, int));
168 static void do_jump_by_parts_greater
PARAMS ((tree
, int, rtx
, rtx
));
169 static void do_jump_by_parts_equality
PARAMS ((tree
, rtx
, rtx
));
170 static void do_compare_and_jump
PARAMS ((tree
, enum rtx_code
, enum rtx_code
,
172 static rtx do_store_flag
PARAMS ((tree
, rtx
, enum machine_mode
, int));
174 static void emit_single_push_insn
PARAMS ((enum machine_mode
, rtx
, tree
));
176 static void do_tablejump
PARAMS ((rtx
, enum machine_mode
, rtx
, rtx
, rtx
));
178 /* Record for each mode whether we can move a register directly to or
179 from an object of that mode in memory. If we can't, we won't try
180 to use that mode directly when accessing a field of that mode. */
182 static char direct_load
[NUM_MACHINE_MODES
];
183 static char direct_store
[NUM_MACHINE_MODES
];
185 /* If a memory-to-memory move would take MOVE_RATIO or more simple
186 move-instruction sequences, we will do a movstr or libcall instead. */
189 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
192 /* If we are optimizing for space (-Os), cut down the default move ratio. */
193 #define MOVE_RATIO (optimize_size ? 3 : 15)
197 /* This macro is used to determine whether move_by_pieces should be called
198 to perform a structure copy. */
199 #ifndef MOVE_BY_PIECES_P
200 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
201 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
204 /* This array records the insn_code of insns to perform block moves. */
205 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
207 /* This array records the insn_code of insns to perform block clears. */
208 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
210 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
212 #ifndef SLOW_UNALIGNED_ACCESS
213 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
216 /* This is run once per compilation to set up which modes can be used
217 directly in memory and to initialize the block move optab. */
223 enum machine_mode mode
;
229 /* Try indexing by frame ptr and try by stack ptr.
230 It is known that on the Convex the stack ptr isn't a valid index.
231 With luck, one or the other is valid on any machine. */
232 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
233 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
235 insn
= emit_insn (gen_rtx_SET (0, NULL_RTX
, NULL_RTX
));
236 pat
= PATTERN (insn
);
238 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
239 mode
= (enum machine_mode
) ((int) mode
+ 1))
244 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
245 PUT_MODE (mem
, mode
);
246 PUT_MODE (mem1
, mode
);
248 /* See if there is some register that can be used in this mode and
249 directly loaded or stored from memory. */
251 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
252 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
253 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
256 if (! HARD_REGNO_MODE_OK (regno
, mode
))
259 reg
= gen_rtx_REG (mode
, regno
);
262 SET_DEST (pat
) = reg
;
263 if (recog (pat
, insn
, &num_clobbers
) >= 0)
264 direct_load
[(int) mode
] = 1;
266 SET_SRC (pat
) = mem1
;
267 SET_DEST (pat
) = reg
;
268 if (recog (pat
, insn
, &num_clobbers
) >= 0)
269 direct_load
[(int) mode
] = 1;
272 SET_DEST (pat
) = mem
;
273 if (recog (pat
, insn
, &num_clobbers
) >= 0)
274 direct_store
[(int) mode
] = 1;
277 SET_DEST (pat
) = mem1
;
278 if (recog (pat
, insn
, &num_clobbers
) >= 0)
279 direct_store
[(int) mode
] = 1;
286 /* This is run at the start of compiling a function. */
291 cfun
->expr
= (struct expr_status
*) xmalloc (sizeof (struct expr_status
));
294 pending_stack_adjust
= 0;
295 stack_pointer_delta
= 0;
296 inhibit_defer_pop
= 0;
298 apply_args_value
= 0;
304 struct expr_status
*p
;
309 ggc_mark_rtx (p
->x_saveregs_value
);
310 ggc_mark_rtx (p
->x_apply_args_value
);
311 ggc_mark_rtx (p
->x_forced_labels
);
322 /* Small sanity check that the queue is empty at the end of a function. */
325 finish_expr_for_function ()
331 /* Manage the queue of increment instructions to be output
332 for POSTINCREMENT_EXPR expressions, etc. */
334 /* Queue up to increment (or change) VAR later. BODY says how:
335 BODY should be the same thing you would pass to emit_insn
336 to increment right away. It will go to emit_insn later on.
338 The value is a QUEUED expression to be used in place of VAR
339 where you want to guarantee the pre-incrementation value of VAR. */
342 enqueue_insn (var
, body
)
345 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
), var
, NULL_RTX
, NULL_RTX
,
346 body
, pending_chain
);
347 return pending_chain
;
350 /* Use protect_from_queue to convert a QUEUED expression
351 into something that you can put immediately into an instruction.
352 If the queued incrementation has not happened yet,
353 protect_from_queue returns the variable itself.
354 If the incrementation has happened, protect_from_queue returns a temp
355 that contains a copy of the old value of the variable.
357 Any time an rtx which might possibly be a QUEUED is to be put
358 into an instruction, it must be passed through protect_from_queue first.
359 QUEUED expressions are not meaningful in instructions.
361 Do not pass a value through protect_from_queue and then hold
362 on to it for a while before putting it in an instruction!
363 If the queue is flushed in between, incorrect code will result. */
366 protect_from_queue (x
, modify
)
370 RTX_CODE code
= GET_CODE (x
);
372 #if 0 /* A QUEUED can hang around after the queue is forced out. */
373 /* Shortcut for most common case. */
374 if (pending_chain
== 0)
380 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
381 use of autoincrement. Make a copy of the contents of the memory
382 location rather than a copy of the address, but not if the value is
383 of mode BLKmode. Don't modify X in place since it might be
385 if (code
== MEM
&& GET_MODE (x
) != BLKmode
386 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
389 rtx
new = replace_equiv_address_nv (x
, QUEUED_VAR (y
));
393 rtx temp
= gen_reg_rtx (GET_MODE (x
));
395 emit_insn_before (gen_move_insn (temp
, new),
400 /* Copy the address into a pseudo, so that the returned value
401 remains correct across calls to emit_queue. */
402 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
405 /* Otherwise, recursively protect the subexpressions of all
406 the kinds of rtx's that can contain a QUEUED. */
409 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
410 if (tem
!= XEXP (x
, 0))
416 else if (code
== PLUS
|| code
== MULT
)
418 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
419 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
420 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
429 /* If the increment has not happened, use the variable itself. Copy it
430 into a new pseudo so that the value remains correct across calls to
432 if (QUEUED_INSN (x
) == 0)
433 return copy_to_reg (QUEUED_VAR (x
));
434 /* If the increment has happened and a pre-increment copy exists,
436 if (QUEUED_COPY (x
) != 0)
437 return QUEUED_COPY (x
);
438 /* The increment has happened but we haven't set up a pre-increment copy.
439 Set one up now, and use it. */
440 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
441 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
443 return QUEUED_COPY (x
);
446 /* Return nonzero if X contains a QUEUED expression:
447 if it contains anything that will be altered by a queued increment.
448 We handle only combinations of MEM, PLUS, MINUS and MULT operators
449 since memory addresses generally contain only those. */
455 enum rtx_code code
= GET_CODE (x
);
461 return queued_subexp_p (XEXP (x
, 0));
465 return (queued_subexp_p (XEXP (x
, 0))
466 || queued_subexp_p (XEXP (x
, 1)));
472 /* Perform all the pending incrementations. */
478 while ((p
= pending_chain
))
480 rtx body
= QUEUED_BODY (p
);
482 if (GET_CODE (body
) == SEQUENCE
)
484 QUEUED_INSN (p
) = XVECEXP (QUEUED_BODY (p
), 0, 0);
485 emit_insn (QUEUED_BODY (p
));
488 QUEUED_INSN (p
) = emit_insn (QUEUED_BODY (p
));
489 pending_chain
= QUEUED_NEXT (p
);
493 /* Copy data from FROM to TO, where the machine modes are not the same.
494 Both modes may be integer, or both may be floating.
495 UNSIGNEDP should be nonzero if FROM is an unsigned type.
496 This causes zero-extension instead of sign-extension. */
499 convert_move (to
, from
, unsignedp
)
503 enum machine_mode to_mode
= GET_MODE (to
);
504 enum machine_mode from_mode
= GET_MODE (from
);
505 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
506 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
510 /* rtx code for making an equivalent value. */
511 enum rtx_code equiv_code
= (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
);
513 to
= protect_from_queue (to
, 1);
514 from
= protect_from_queue (from
, 0);
516 if (to_real
!= from_real
)
519 /* If FROM is a SUBREG that indicates that we have already done at least
520 the required extension, strip it. We don't handle such SUBREGs as
523 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
524 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
525 >= GET_MODE_SIZE (to_mode
))
526 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
527 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
529 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
532 if (to_mode
== from_mode
533 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
535 emit_move_insn (to
, from
);
539 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
541 if (GET_MODE_BITSIZE (from_mode
) != GET_MODE_BITSIZE (to_mode
))
544 if (VECTOR_MODE_P (to_mode
))
545 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
547 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
549 emit_move_insn (to
, from
);
553 if (to_real
!= from_real
)
560 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
))
562 /* Try converting directly if the insn is supported. */
563 if ((code
= can_extend_p (to_mode
, from_mode
, 0))
566 emit_unop_insn (code
, to
, from
, UNKNOWN
);
571 #ifdef HAVE_trunchfqf2
572 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
574 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
578 #ifdef HAVE_trunctqfqf2
579 if (HAVE_trunctqfqf2
&& from_mode
== TQFmode
&& to_mode
== QFmode
)
581 emit_unop_insn (CODE_FOR_trunctqfqf2
, to
, from
, UNKNOWN
);
585 #ifdef HAVE_truncsfqf2
586 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
588 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
592 #ifdef HAVE_truncdfqf2
593 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
595 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
599 #ifdef HAVE_truncxfqf2
600 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
602 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
606 #ifdef HAVE_trunctfqf2
607 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
609 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
614 #ifdef HAVE_trunctqfhf2
615 if (HAVE_trunctqfhf2
&& from_mode
== TQFmode
&& to_mode
== HFmode
)
617 emit_unop_insn (CODE_FOR_trunctqfhf2
, to
, from
, UNKNOWN
);
621 #ifdef HAVE_truncsfhf2
622 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
624 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
628 #ifdef HAVE_truncdfhf2
629 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
631 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
635 #ifdef HAVE_truncxfhf2
636 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
638 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
642 #ifdef HAVE_trunctfhf2
643 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
645 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
650 #ifdef HAVE_truncsftqf2
651 if (HAVE_truncsftqf2
&& from_mode
== SFmode
&& to_mode
== TQFmode
)
653 emit_unop_insn (CODE_FOR_truncsftqf2
, to
, from
, UNKNOWN
);
657 #ifdef HAVE_truncdftqf2
658 if (HAVE_truncdftqf2
&& from_mode
== DFmode
&& to_mode
== TQFmode
)
660 emit_unop_insn (CODE_FOR_truncdftqf2
, to
, from
, UNKNOWN
);
664 #ifdef HAVE_truncxftqf2
665 if (HAVE_truncxftqf2
&& from_mode
== XFmode
&& to_mode
== TQFmode
)
667 emit_unop_insn (CODE_FOR_truncxftqf2
, to
, from
, UNKNOWN
);
671 #ifdef HAVE_trunctftqf2
672 if (HAVE_trunctftqf2
&& from_mode
== TFmode
&& to_mode
== TQFmode
)
674 emit_unop_insn (CODE_FOR_trunctftqf2
, to
, from
, UNKNOWN
);
679 #ifdef HAVE_truncdfsf2
680 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
682 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
686 #ifdef HAVE_truncxfsf2
687 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
689 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
693 #ifdef HAVE_trunctfsf2
694 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
696 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
700 #ifdef HAVE_truncxfdf2
701 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
703 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
707 #ifdef HAVE_trunctfdf2
708 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
710 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
722 libcall
= extendsfdf2_libfunc
;
726 libcall
= extendsfxf2_libfunc
;
730 libcall
= extendsftf2_libfunc
;
742 libcall
= truncdfsf2_libfunc
;
746 libcall
= extenddfxf2_libfunc
;
750 libcall
= extenddftf2_libfunc
;
762 libcall
= truncxfsf2_libfunc
;
766 libcall
= truncxfdf2_libfunc
;
778 libcall
= trunctfsf2_libfunc
;
782 libcall
= trunctfdf2_libfunc
;
794 if (libcall
== (rtx
) 0)
795 /* This conversion is not implemented yet. */
799 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
801 insns
= get_insns ();
803 emit_libcall_block (insns
, to
, value
, gen_rtx_FLOAT_TRUNCATE (to_mode
,
808 /* Now both modes are integers. */
810 /* Handle expanding beyond a word. */
811 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
812 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
819 enum machine_mode lowpart_mode
;
820 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
822 /* Try converting directly if the insn is supported. */
823 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
826 /* If FROM is a SUBREG, put it into a register. Do this
827 so that we always generate the same set of insns for
828 better cse'ing; if an intermediate assignment occurred,
829 we won't be doing the operation directly on the SUBREG. */
830 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
831 from
= force_reg (from_mode
, from
);
832 emit_unop_insn (code
, to
, from
, equiv_code
);
835 /* Next, try converting via full word. */
836 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
837 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
838 != CODE_FOR_nothing
))
840 if (GET_CODE (to
) == REG
)
841 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
842 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
843 emit_unop_insn (code
, to
,
844 gen_lowpart (word_mode
, to
), equiv_code
);
848 /* No special multiword conversion insn; do it by hand. */
851 /* Since we will turn this into a no conflict block, we must ensure
852 that the source does not overlap the target. */
854 if (reg_overlap_mentioned_p (to
, from
))
855 from
= force_reg (from_mode
, from
);
857 /* Get a copy of FROM widened to a word, if necessary. */
858 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
859 lowpart_mode
= word_mode
;
861 lowpart_mode
= from_mode
;
863 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
865 lowpart
= gen_lowpart (lowpart_mode
, to
);
866 emit_move_insn (lowpart
, lowfrom
);
868 /* Compute the value to put in each remaining word. */
870 fill_value
= const0_rtx
;
875 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
876 && STORE_FLAG_VALUE
== -1)
878 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
880 fill_value
= gen_reg_rtx (word_mode
);
881 emit_insn (gen_slt (fill_value
));
887 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
888 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
890 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
894 /* Fill the remaining words. */
895 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
897 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
898 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
903 if (fill_value
!= subword
)
904 emit_move_insn (subword
, fill_value
);
907 insns
= get_insns ();
910 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
911 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
915 /* Truncating multi-word to a word or less. */
916 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
917 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
919 if (!((GET_CODE (from
) == MEM
920 && ! MEM_VOLATILE_P (from
)
921 && direct_load
[(int) to_mode
]
922 && ! mode_dependent_address_p (XEXP (from
, 0)))
923 || GET_CODE (from
) == REG
924 || GET_CODE (from
) == SUBREG
))
925 from
= force_reg (from_mode
, from
);
926 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
930 /* Handle pointer conversion. */ /* SPEE 900220. */
931 if (to_mode
== PQImode
)
933 if (from_mode
!= QImode
)
934 from
= convert_to_mode (QImode
, from
, unsignedp
);
936 #ifdef HAVE_truncqipqi2
937 if (HAVE_truncqipqi2
)
939 emit_unop_insn (CODE_FOR_truncqipqi2
, to
, from
, UNKNOWN
);
942 #endif /* HAVE_truncqipqi2 */
946 if (from_mode
== PQImode
)
948 if (to_mode
!= QImode
)
950 from
= convert_to_mode (QImode
, from
, unsignedp
);
955 #ifdef HAVE_extendpqiqi2
956 if (HAVE_extendpqiqi2
)
958 emit_unop_insn (CODE_FOR_extendpqiqi2
, to
, from
, UNKNOWN
);
961 #endif /* HAVE_extendpqiqi2 */
966 if (to_mode
== PSImode
)
968 if (from_mode
!= SImode
)
969 from
= convert_to_mode (SImode
, from
, unsignedp
);
971 #ifdef HAVE_truncsipsi2
972 if (HAVE_truncsipsi2
)
974 emit_unop_insn (CODE_FOR_truncsipsi2
, to
, from
, UNKNOWN
);
977 #endif /* HAVE_truncsipsi2 */
981 if (from_mode
== PSImode
)
983 if (to_mode
!= SImode
)
985 from
= convert_to_mode (SImode
, from
, unsignedp
);
990 #ifdef HAVE_extendpsisi2
991 if (! unsignedp
&& HAVE_extendpsisi2
)
993 emit_unop_insn (CODE_FOR_extendpsisi2
, to
, from
, UNKNOWN
);
996 #endif /* HAVE_extendpsisi2 */
997 #ifdef HAVE_zero_extendpsisi2
998 if (unsignedp
&& HAVE_zero_extendpsisi2
)
1000 emit_unop_insn (CODE_FOR_zero_extendpsisi2
, to
, from
, UNKNOWN
);
1003 #endif /* HAVE_zero_extendpsisi2 */
1008 if (to_mode
== PDImode
)
1010 if (from_mode
!= DImode
)
1011 from
= convert_to_mode (DImode
, from
, unsignedp
);
1013 #ifdef HAVE_truncdipdi2
1014 if (HAVE_truncdipdi2
)
1016 emit_unop_insn (CODE_FOR_truncdipdi2
, to
, from
, UNKNOWN
);
1019 #endif /* HAVE_truncdipdi2 */
1023 if (from_mode
== PDImode
)
1025 if (to_mode
!= DImode
)
1027 from
= convert_to_mode (DImode
, from
, unsignedp
);
1032 #ifdef HAVE_extendpdidi2
1033 if (HAVE_extendpdidi2
)
1035 emit_unop_insn (CODE_FOR_extendpdidi2
, to
, from
, UNKNOWN
);
1038 #endif /* HAVE_extendpdidi2 */
1043 /* Now follow all the conversions between integers
1044 no more than a word long. */
1046 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1047 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
1048 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1049 GET_MODE_BITSIZE (from_mode
)))
1051 if (!((GET_CODE (from
) == MEM
1052 && ! MEM_VOLATILE_P (from
)
1053 && direct_load
[(int) to_mode
]
1054 && ! mode_dependent_address_p (XEXP (from
, 0)))
1055 || GET_CODE (from
) == REG
1056 || GET_CODE (from
) == SUBREG
))
1057 from
= force_reg (from_mode
, from
);
1058 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
1059 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
1060 from
= copy_to_reg (from
);
1061 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
1065 /* Handle extension. */
1066 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
1068 /* Convert directly if that works. */
1069 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
1070 != CODE_FOR_nothing
)
1072 emit_unop_insn (code
, to
, from
, equiv_code
);
1077 enum machine_mode intermediate
;
1081 /* Search for a mode to convert via. */
1082 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
1083 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
1084 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
1085 != CODE_FOR_nothing
)
1086 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
1087 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1088 GET_MODE_BITSIZE (intermediate
))))
1089 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
1090 != CODE_FOR_nothing
))
1092 convert_move (to
, convert_to_mode (intermediate
, from
,
1093 unsignedp
), unsignedp
);
1097 /* No suitable intermediate mode.
1098 Generate what we need with shifts. */
1099 shift_amount
= build_int_2 (GET_MODE_BITSIZE (to_mode
)
1100 - GET_MODE_BITSIZE (from_mode
), 0);
1101 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
1102 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
1104 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
1107 emit_move_insn (to
, tmp
);
1112 /* Support special truncate insns for certain modes. */
1114 if (from_mode
== DImode
&& to_mode
== SImode
)
1116 #ifdef HAVE_truncdisi2
1117 if (HAVE_truncdisi2
)
1119 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1123 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1127 if (from_mode
== DImode
&& to_mode
== HImode
)
1129 #ifdef HAVE_truncdihi2
1130 if (HAVE_truncdihi2
)
1132 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1136 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1140 if (from_mode
== DImode
&& to_mode
== QImode
)
1142 #ifdef HAVE_truncdiqi2
1143 if (HAVE_truncdiqi2
)
1145 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1149 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1153 if (from_mode
== SImode
&& to_mode
== HImode
)
1155 #ifdef HAVE_truncsihi2
1156 if (HAVE_truncsihi2
)
1158 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1162 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1166 if (from_mode
== SImode
&& to_mode
== QImode
)
1168 #ifdef HAVE_truncsiqi2
1169 if (HAVE_truncsiqi2
)
1171 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1175 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1179 if (from_mode
== HImode
&& to_mode
== QImode
)
1181 #ifdef HAVE_trunchiqi2
1182 if (HAVE_trunchiqi2
)
1184 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1188 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1192 if (from_mode
== TImode
&& to_mode
== DImode
)
1194 #ifdef HAVE_trunctidi2
1195 if (HAVE_trunctidi2
)
1197 emit_unop_insn (CODE_FOR_trunctidi2
, to
, from
, UNKNOWN
);
1201 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1205 if (from_mode
== TImode
&& to_mode
== SImode
)
1207 #ifdef HAVE_trunctisi2
1208 if (HAVE_trunctisi2
)
1210 emit_unop_insn (CODE_FOR_trunctisi2
, to
, from
, UNKNOWN
);
1214 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1218 if (from_mode
== TImode
&& to_mode
== HImode
)
1220 #ifdef HAVE_trunctihi2
1221 if (HAVE_trunctihi2
)
1223 emit_unop_insn (CODE_FOR_trunctihi2
, to
, from
, UNKNOWN
);
1227 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1231 if (from_mode
== TImode
&& to_mode
== QImode
)
1233 #ifdef HAVE_trunctiqi2
1234 if (HAVE_trunctiqi2
)
1236 emit_unop_insn (CODE_FOR_trunctiqi2
, to
, from
, UNKNOWN
);
1240 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1244 /* Handle truncation of volatile memrefs, and so on;
1245 the things that couldn't be truncated directly,
1246 and for which there was no special instruction. */
1247 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1249 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1250 emit_move_insn (to
, temp
);
1254 /* Mode combination is not recognized. */
1258 /* Return an rtx for a value that would result
1259 from converting X to mode MODE.
1260 Both X and MODE may be floating, or both integer.
1261 UNSIGNEDP is nonzero if X is an unsigned value.
1262 This can be done by referring to a part of X in place
1263 or by copying to a new temporary with conversion.
1265 This function *must not* call protect_from_queue
1266 except when putting X into an insn (in which case convert_move does it). */
1269 convert_to_mode (mode
, x
, unsignedp
)
1270 enum machine_mode mode
;
1274 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
1277 /* Return an rtx for a value that would result
1278 from converting X from mode OLDMODE to mode MODE.
1279 Both modes may be floating, or both integer.
1280 UNSIGNEDP is nonzero if X is an unsigned value.
1282 This can be done by referring to a part of X in place
1283 or by copying to a new temporary with conversion.
1285 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1287 This function *must not* call protect_from_queue
1288 except when putting X into an insn (in which case convert_move does it). */
1291 convert_modes (mode
, oldmode
, x
, unsignedp
)
1292 enum machine_mode mode
, oldmode
;
1298 /* If FROM is a SUBREG that indicates that we have already done at least
1299 the required extension, strip it. */
1301 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1302 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1303 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1304 x
= gen_lowpart (mode
, x
);
1306 if (GET_MODE (x
) != VOIDmode
)
1307 oldmode
= GET_MODE (x
);
1309 if (mode
== oldmode
)
1312 /* There is one case that we must handle specially: If we are converting
1313 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1314 we are to interpret the constant as unsigned, gen_lowpart will do
1315 the wrong if the constant appears negative. What we want to do is
1316 make the high-order word of the constant zero, not all ones. */
1318 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1319 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1320 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1322 HOST_WIDE_INT val
= INTVAL (x
);
1324 if (oldmode
!= VOIDmode
1325 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
1327 int width
= GET_MODE_BITSIZE (oldmode
);
1329 /* We need to zero extend VAL. */
1330 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1333 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
1336 /* We can do this with a gen_lowpart if both desired and current modes
1337 are integer, and this is either a constant integer, a register, or a
1338 non-volatile MEM. Except for the constant case where MODE is no
1339 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1341 if ((GET_CODE (x
) == CONST_INT
1342 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1343 || (GET_MODE_CLASS (mode
) == MODE_INT
1344 && GET_MODE_CLASS (oldmode
) == MODE_INT
1345 && (GET_CODE (x
) == CONST_DOUBLE
1346 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
1347 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
1348 && direct_load
[(int) mode
])
1349 || (GET_CODE (x
) == REG
1350 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
1351 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
1353 /* ?? If we don't know OLDMODE, we have to assume here that
1354 X does not need sign- or zero-extension. This may not be
1355 the case, but it's the best we can do. */
1356 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1357 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1359 HOST_WIDE_INT val
= INTVAL (x
);
1360 int width
= GET_MODE_BITSIZE (oldmode
);
1362 /* We must sign or zero-extend in this case. Start by
1363 zero-extending, then sign extend if we need to. */
1364 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1366 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1367 val
|= (HOST_WIDE_INT
) (-1) << width
;
1369 return GEN_INT (trunc_int_for_mode (val
, mode
));
1372 return gen_lowpart (mode
, x
);
1375 temp
= gen_reg_rtx (mode
);
1376 convert_move (temp
, x
, unsignedp
);
1380 /* This macro is used to determine what the largest unit size that
1381 move_by_pieces can use is. */
1383 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1384 move efficiently, as opposed to MOVE_MAX which is the maximum
1385 number of bytes we can move with a single instruction. */
1387 #ifndef MOVE_MAX_PIECES
1388 #define MOVE_MAX_PIECES MOVE_MAX
1391 /* Generate several move instructions to copy LEN bytes from block FROM to
1392 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1393 and TO through protect_from_queue before calling.
1395 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1396 used to push FROM to the stack.
1398 ALIGN is maximum alignment we can assume. */
1401 move_by_pieces (to
, from
, len
, align
)
1403 unsigned HOST_WIDE_INT len
;
1406 struct move_by_pieces data
;
1407 rtx to_addr
, from_addr
= XEXP (from
, 0);
1408 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
1409 enum machine_mode mode
= VOIDmode
, tmode
;
1410 enum insn_code icode
;
1413 data
.from_addr
= from_addr
;
1416 to_addr
= XEXP (to
, 0);
1419 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1420 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1422 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1429 #ifdef STACK_GROWS_DOWNWARD
1435 data
.to_addr
= to_addr
;
1438 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1439 || GET_CODE (from_addr
) == POST_INC
1440 || GET_CODE (from_addr
) == POST_DEC
);
1442 data
.explicit_inc_from
= 0;
1443 data
.explicit_inc_to
= 0;
1444 if (data
.reverse
) data
.offset
= len
;
1447 /* If copying requires more than two move insns,
1448 copy addresses to registers (to make displacements shorter)
1449 and use post-increment if available. */
1450 if (!(data
.autinc_from
&& data
.autinc_to
)
1451 && move_by_pieces_ninsns (len
, align
) > 2)
1453 /* Find the mode of the largest move... */
1454 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1455 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1456 if (GET_MODE_SIZE (tmode
) < max_size
)
1459 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
1461 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1462 data
.autinc_from
= 1;
1463 data
.explicit_inc_from
= -1;
1465 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
1467 data
.from_addr
= copy_addr_to_reg (from_addr
);
1468 data
.autinc_from
= 1;
1469 data
.explicit_inc_from
= 1;
1471 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1472 data
.from_addr
= copy_addr_to_reg (from_addr
);
1473 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
1475 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1477 data
.explicit_inc_to
= -1;
1479 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
1481 data
.to_addr
= copy_addr_to_reg (to_addr
);
1483 data
.explicit_inc_to
= 1;
1485 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1486 data
.to_addr
= copy_addr_to_reg (to_addr
);
1489 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1490 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1491 align
= MOVE_MAX
* BITS_PER_UNIT
;
1493 /* First move what we can in the largest integer mode, then go to
1494 successively smaller modes. */
1496 while (max_size
> 1)
1498 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1499 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1500 if (GET_MODE_SIZE (tmode
) < max_size
)
1503 if (mode
== VOIDmode
)
1506 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1507 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1508 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1510 max_size
= GET_MODE_SIZE (mode
);
1513 /* The code above should have handled everything. */
1518 /* Return number of insns required to move L bytes by pieces.
1519 ALIGN (in bits) is maximum alignment we can assume. */
1521 static unsigned HOST_WIDE_INT
1522 move_by_pieces_ninsns (l
, align
)
1523 unsigned HOST_WIDE_INT l
;
1526 unsigned HOST_WIDE_INT n_insns
= 0;
1527 unsigned HOST_WIDE_INT max_size
= MOVE_MAX
+ 1;
1529 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1530 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1531 align
= MOVE_MAX
* BITS_PER_UNIT
;
1533 while (max_size
> 1)
1535 enum machine_mode mode
= VOIDmode
, tmode
;
1536 enum insn_code icode
;
1538 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1539 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1540 if (GET_MODE_SIZE (tmode
) < max_size
)
1543 if (mode
== VOIDmode
)
1546 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1547 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1548 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1550 max_size
= GET_MODE_SIZE (mode
);
1558 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1559 with move instructions for mode MODE. GENFUN is the gen_... function
1560 to make a move insn for that mode. DATA has all the other info. */
1563 move_by_pieces_1 (genfun
, mode
, data
)
1564 rtx (*genfun
) PARAMS ((rtx
, ...));
1565 enum machine_mode mode
;
1566 struct move_by_pieces
*data
;
1568 unsigned int size
= GET_MODE_SIZE (mode
);
1569 rtx to1
= NULL_RTX
, from1
;
1571 while (data
->len
>= size
)
1574 data
->offset
-= size
;
1578 if (data
->autinc_to
)
1579 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1582 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1585 if (data
->autinc_from
)
1586 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1589 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1591 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1592 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
1593 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1594 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (-size
)));
1597 emit_insn ((*genfun
) (to1
, from1
));
1600 #ifdef PUSH_ROUNDING
1601 emit_single_push_insn (mode
, from1
, NULL
);
1607 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1608 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1609 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1610 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1612 if (! data
->reverse
)
1613 data
->offset
+= size
;
1619 /* Emit code to move a block Y to a block X.
1620 This may be done with string-move instructions,
1621 with multiple scalar move instructions, or with a library call.
1623 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1625 SIZE is an rtx that says how long they are.
1626 ALIGN is the maximum alignment we can assume they have.
1628 Return the address of the new block, if memcpy is called and returns it,
1632 emit_block_move (x
, y
, size
)
1637 #ifdef TARGET_MEM_FUNCTIONS
1639 tree call_expr
, arg_list
;
1641 unsigned int align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1643 if (GET_MODE (x
) != BLKmode
)
1646 if (GET_MODE (y
) != BLKmode
)
1649 x
= protect_from_queue (x
, 1);
1650 y
= protect_from_queue (y
, 0);
1651 size
= protect_from_queue (size
, 0);
1653 if (GET_CODE (x
) != MEM
)
1655 if (GET_CODE (y
) != MEM
)
1660 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1661 move_by_pieces (x
, y
, INTVAL (size
), align
);
1664 /* Try the most limited insn first, because there's no point
1665 including more than one in the machine description unless
1666 the more limited one has some advantage. */
1668 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1669 enum machine_mode mode
;
1671 /* Since this is a move insn, we don't care about volatility. */
1674 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1675 mode
= GET_MODE_WIDER_MODE (mode
))
1677 enum insn_code code
= movstr_optab
[(int) mode
];
1678 insn_operand_predicate_fn pred
;
1680 if (code
!= CODE_FOR_nothing
1681 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1682 here because if SIZE is less than the mode mask, as it is
1683 returned by the macro, it will definitely be less than the
1684 actual mode mask. */
1685 && ((GET_CODE (size
) == CONST_INT
1686 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1687 <= (GET_MODE_MASK (mode
) >> 1)))
1688 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1689 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1690 || (*pred
) (x
, BLKmode
))
1691 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1692 || (*pred
) (y
, BLKmode
))
1693 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1694 || (*pred
) (opalign
, VOIDmode
)))
1697 rtx last
= get_last_insn ();
1700 op2
= convert_to_mode (mode
, size
, 1);
1701 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1702 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1703 op2
= copy_to_mode_reg (mode
, op2
);
1705 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1713 delete_insns_since (last
);
1719 /* X, Y, or SIZE may have been passed through protect_from_queue.
1721 It is unsafe to save the value generated by protect_from_queue
1722 and reuse it later. Consider what happens if emit_queue is
1723 called before the return value from protect_from_queue is used.
1725 Expansion of the CALL_EXPR below will call emit_queue before
1726 we are finished emitting RTL for argument setup. So if we are
1727 not careful we could get the wrong value for an argument.
1729 To avoid this problem we go ahead and emit code to copy X, Y &
1730 SIZE into new pseudos. We can then place those new pseudos
1731 into an RTL_EXPR and use them later, even after a call to
1734 Note this is not strictly needed for library calls since they
1735 do not call emit_queue before loading their arguments. However,
1736 we may need to have library calls call emit_queue in the future
1737 since failing to do so could cause problems for targets which
1738 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1739 x
= copy_to_mode_reg (Pmode
, XEXP (x
, 0));
1740 y
= copy_to_mode_reg (Pmode
, XEXP (y
, 0));
1742 #ifdef TARGET_MEM_FUNCTIONS
1743 size
= copy_to_mode_reg (TYPE_MODE (sizetype
), size
);
1745 size
= convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1746 TREE_UNSIGNED (integer_type_node
));
1747 size
= copy_to_mode_reg (TYPE_MODE (integer_type_node
), size
);
1750 #ifdef TARGET_MEM_FUNCTIONS
1751 /* It is incorrect to use the libcall calling conventions to call
1752 memcpy in this context.
1754 This could be a user call to memcpy and the user may wish to
1755 examine the return value from memcpy.
1757 For targets where libcalls and normal calls have different conventions
1758 for returning pointers, we could end up generating incorrect code.
1760 So instead of using a libcall sequence we build up a suitable
1761 CALL_EXPR and expand the call in the normal fashion. */
1762 if (fn
== NULL_TREE
)
1766 /* This was copied from except.c, I don't know if all this is
1767 necessary in this context or not. */
1768 fn
= get_identifier ("memcpy");
1769 fntype
= build_pointer_type (void_type_node
);
1770 fntype
= build_function_type (fntype
, NULL_TREE
);
1771 fn
= build_decl (FUNCTION_DECL
, fn
, fntype
);
1772 ggc_add_tree_root (&fn
, 1);
1773 DECL_EXTERNAL (fn
) = 1;
1774 TREE_PUBLIC (fn
) = 1;
1775 DECL_ARTIFICIAL (fn
) = 1;
1776 TREE_NOTHROW (fn
) = 1;
1777 make_decl_rtl (fn
, NULL
);
1778 assemble_external (fn
);
1781 /* We need to make an argument list for the function call.
1783 memcpy has three arguments, the first two are void * addresses and
1784 the last is a size_t byte count for the copy. */
1786 = build_tree_list (NULL_TREE
,
1787 make_tree (build_pointer_type (void_type_node
), x
));
1788 TREE_CHAIN (arg_list
)
1789 = build_tree_list (NULL_TREE
,
1790 make_tree (build_pointer_type (void_type_node
), y
));
1791 TREE_CHAIN (TREE_CHAIN (arg_list
))
1792 = build_tree_list (NULL_TREE
, make_tree (sizetype
, size
));
1793 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list
))) = NULL_TREE
;
1795 /* Now we have to build up the CALL_EXPR itself. */
1796 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1797 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1798 call_expr
, arg_list
, NULL_TREE
);
1799 TREE_SIDE_EFFECTS (call_expr
) = 1;
1801 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
1803 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
1804 VOIDmode
, 3, y
, Pmode
, x
, Pmode
,
1805 convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1806 TREE_UNSIGNED (integer_type_node
)),
1807 TYPE_MODE (integer_type_node
));
1810 /* If we are initializing a readonly value, show the above call
1811 clobbered it. Otherwise, a load from it may erroneously be hoisted
1813 if (RTX_UNCHANGING_P (x
))
1814 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
1820 /* Copy all or part of a value X into registers starting at REGNO.
1821 The number of registers to be filled is NREGS. */
1824 move_block_to_reg (regno
, x
, nregs
, mode
)
1828 enum machine_mode mode
;
1831 #ifdef HAVE_load_multiple
1839 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1840 x
= validize_mem (force_const_mem (mode
, x
));
1842 /* See if the machine can do this with a load multiple insn. */
1843 #ifdef HAVE_load_multiple
1844 if (HAVE_load_multiple
)
1846 last
= get_last_insn ();
1847 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1855 delete_insns_since (last
);
1859 for (i
= 0; i
< nregs
; i
++)
1860 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1861 operand_subword_force (x
, i
, mode
));
1864 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1865 The number of registers to be filled is NREGS. SIZE indicates the number
1866 of bytes in the object X. */
1869 move_block_from_reg (regno
, x
, nregs
, size
)
1876 #ifdef HAVE_store_multiple
1880 enum machine_mode mode
;
1885 /* If SIZE is that of a mode no bigger than a word, just use that
1886 mode's store operation. */
1887 if (size
<= UNITS_PER_WORD
1888 && (mode
= mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0)) != BLKmode
)
1890 emit_move_insn (adjust_address (x
, mode
, 0), gen_rtx_REG (mode
, regno
));
1894 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1895 to the left before storing to memory. Note that the previous test
1896 doesn't handle all cases (e.g. SIZE == 3). */
1897 if (size
< UNITS_PER_WORD
&& BYTES_BIG_ENDIAN
)
1899 rtx tem
= operand_subword (x
, 0, 1, BLKmode
);
1905 shift
= expand_shift (LSHIFT_EXPR
, word_mode
,
1906 gen_rtx_REG (word_mode
, regno
),
1907 build_int_2 ((UNITS_PER_WORD
- size
)
1908 * BITS_PER_UNIT
, 0), NULL_RTX
, 0);
1909 emit_move_insn (tem
, shift
);
1913 /* See if the machine can do this with a store multiple insn. */
1914 #ifdef HAVE_store_multiple
1915 if (HAVE_store_multiple
)
1917 last
= get_last_insn ();
1918 pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1926 delete_insns_since (last
);
1930 for (i
= 0; i
< nregs
; i
++)
1932 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1937 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1941 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1942 registers represented by a PARALLEL. SSIZE represents the total size of
1943 block SRC in bytes, or -1 if not known. */
1944 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1945 the balance will be in what would be the low-order memory addresses, i.e.
1946 left justified for big endian, right justified for little endian. This
1947 happens to be true for the targets currently using this support. If this
1948 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1952 emit_group_load (dst
, orig_src
, ssize
)
1959 if (GET_CODE (dst
) != PARALLEL
)
1962 /* Check for a NULL entry, used to indicate that the parameter goes
1963 both on the stack and in registers. */
1964 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1969 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
1971 /* Process the pieces. */
1972 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1974 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1975 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1976 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1979 /* Handle trailing fragments that run over the size of the struct. */
1980 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1982 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1983 bytelen
= ssize
- bytepos
;
1988 /* If we won't be loading directly from memory, protect the real source
1989 from strange tricks we might play; but make sure that the source can
1990 be loaded directly into the destination. */
1992 if (GET_CODE (orig_src
) != MEM
1993 && (!CONSTANT_P (orig_src
)
1994 || (GET_MODE (orig_src
) != mode
1995 && GET_MODE (orig_src
) != VOIDmode
)))
1997 if (GET_MODE (orig_src
) == VOIDmode
)
1998 src
= gen_reg_rtx (mode
);
2000 src
= gen_reg_rtx (GET_MODE (orig_src
));
2002 emit_move_insn (src
, orig_src
);
2005 /* Optimize the access just a bit. */
2006 if (GET_CODE (src
) == MEM
2007 && MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
)
2008 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2009 && bytelen
== GET_MODE_SIZE (mode
))
2011 tmps
[i
] = gen_reg_rtx (mode
);
2012 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
2014 else if (GET_CODE (src
) == CONCAT
)
2017 && bytelen
== GET_MODE_SIZE (GET_MODE (XEXP (src
, 0))))
2018 tmps
[i
] = XEXP (src
, 0);
2019 else if (bytepos
== (HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)))
2020 && bytelen
== GET_MODE_SIZE (GET_MODE (XEXP (src
, 1))))
2021 tmps
[i
] = XEXP (src
, 1);
2022 else if (bytepos
== 0)
2024 rtx mem
= assign_stack_temp (GET_MODE (src
),
2025 GET_MODE_SIZE (GET_MODE (src
)), 0);
2026 emit_move_insn (mem
, src
);
2027 tmps
[i
] = adjust_address (mem
, mode
, 0);
2032 else if (CONSTANT_P (src
)
2033 || (GET_CODE (src
) == REG
&& GET_MODE (src
) == mode
))
2036 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
2037 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
2040 if (BYTES_BIG_ENDIAN
&& shift
)
2041 expand_binop (mode
, ashl_optab
, tmps
[i
], GEN_INT (shift
),
2042 tmps
[i
], 0, OPTAB_WIDEN
);
2047 /* Copy the extracted pieces into the proper (probable) hard regs. */
2048 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2049 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
2052 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2053 registers represented by a PARALLEL. SSIZE represents the total size of
2054 block DST, or -1 if not known. */
2057 emit_group_store (orig_dst
, src
, ssize
)
2064 if (GET_CODE (src
) != PARALLEL
)
2067 /* Check for a NULL entry, used to indicate that the parameter goes
2068 both on the stack and in registers. */
2069 if (XEXP (XVECEXP (src
, 0, 0), 0))
2074 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (src
, 0));
2076 /* Copy the (probable) hard regs into pseudos. */
2077 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2079 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2080 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2081 emit_move_insn (tmps
[i
], reg
);
2085 /* If we won't be storing directly into memory, protect the real destination
2086 from strange tricks we might play. */
2088 if (GET_CODE (dst
) == PARALLEL
)
2092 /* We can get a PARALLEL dst if there is a conditional expression in
2093 a return statement. In that case, the dst and src are the same,
2094 so no action is necessary. */
2095 if (rtx_equal_p (dst
, src
))
2098 /* It is unclear if we can ever reach here, but we may as well handle
2099 it. Allocate a temporary, and split this into a store/load to/from
2102 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
2103 emit_group_store (temp
, src
, ssize
);
2104 emit_group_load (dst
, temp
, ssize
);
2107 else if (GET_CODE (dst
) != MEM
)
2109 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
2110 /* Make life a bit easier for combine. */
2111 emit_move_insn (dst
, const0_rtx
);
2114 /* Process the pieces. */
2115 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2117 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2118 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2119 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2121 /* Handle trailing fragments that run over the size of the struct. */
2122 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2124 if (BYTES_BIG_ENDIAN
)
2126 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2127 expand_binop (mode
, ashr_optab
, tmps
[i
], GEN_INT (shift
),
2128 tmps
[i
], 0, OPTAB_WIDEN
);
2130 bytelen
= ssize
- bytepos
;
2133 /* Optimize the access just a bit. */
2134 if (GET_CODE (dst
) == MEM
2135 && MEM_ALIGN (dst
) >= GET_MODE_ALIGNMENT (mode
)
2136 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2137 && bytelen
== GET_MODE_SIZE (mode
))
2138 emit_move_insn (adjust_address (dst
, mode
, bytepos
), tmps
[i
]);
2140 store_bit_field (dst
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2141 mode
, tmps
[i
], ssize
);
2146 /* Copy from the pseudo into the (probable) hard reg. */
2147 if (GET_CODE (dst
) == REG
)
2148 emit_move_insn (orig_dst
, dst
);
2151 /* Generate code to copy a BLKmode object of TYPE out of a
2152 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2153 is null, a stack temporary is created. TGTBLK is returned.
2155 The primary purpose of this routine is to handle functions
2156 that return BLKmode structures in registers. Some machines
2157 (the PA for example) want to return all small structures
2158 in registers regardless of the structure's alignment. */
2161 copy_blkmode_from_reg (tgtblk
, srcreg
, type
)
2166 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2167 rtx src
= NULL
, dst
= NULL
;
2168 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2169 unsigned HOST_WIDE_INT bitpos
, xbitpos
, big_endian_correction
= 0;
2173 tgtblk
= assign_temp (build_qualified_type (type
,
2175 | TYPE_QUAL_CONST
)),
2177 preserve_temp_slots (tgtblk
);
2180 /* This code assumes srcreg is at least a full word. If it isn't,
2181 copy it into a new pseudo which is a full word. */
2182 if (GET_MODE (srcreg
) != BLKmode
2183 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2184 srcreg
= convert_to_mode (word_mode
, srcreg
, TREE_UNSIGNED (type
));
2186 /* Structures whose size is not a multiple of a word are aligned
2187 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2188 machine, this means we must skip the empty high order bytes when
2189 calculating the bit offset. */
2190 if (BYTES_BIG_ENDIAN
&& bytes
% UNITS_PER_WORD
)
2191 big_endian_correction
2192 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2194 /* Copy the structure BITSIZE bites at a time.
2196 We could probably emit more efficient code for machines which do not use
2197 strict alignment, but it doesn't seem worth the effort at the current
2199 for (bitpos
= 0, xbitpos
= big_endian_correction
;
2200 bitpos
< bytes
* BITS_PER_UNIT
;
2201 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2203 /* We need a new source operand each time xbitpos is on a
2204 word boundary and when xbitpos == big_endian_correction
2205 (the first time through). */
2206 if (xbitpos
% BITS_PER_WORD
== 0
2207 || xbitpos
== big_endian_correction
)
2208 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2211 /* We need a new destination operand each time bitpos is on
2213 if (bitpos
% BITS_PER_WORD
== 0)
2214 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2216 /* Use xbitpos for the source extraction (right justified) and
2217 xbitpos for the destination store (left justified). */
2218 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2219 extract_bit_field (src
, bitsize
,
2220 xbitpos
% BITS_PER_WORD
, 1,
2221 NULL_RTX
, word_mode
, word_mode
,
2229 /* Add a USE expression for REG to the (possibly empty) list pointed
2230 to by CALL_FUSAGE. REG must denote a hard register. */
2233 use_reg (call_fusage
, reg
)
2234 rtx
*call_fusage
, reg
;
2236 if (GET_CODE (reg
) != REG
2237 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2241 = gen_rtx_EXPR_LIST (VOIDmode
,
2242 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2245 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2246 starting at REGNO. All of these registers must be hard registers. */
2249 use_regs (call_fusage
, regno
, nregs
)
2256 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
2259 for (i
= 0; i
< nregs
; i
++)
2260 use_reg (call_fusage
, gen_rtx_REG (reg_raw_mode
[regno
+ i
], regno
+ i
));
2263 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2264 PARALLEL REGS. This is for calls that pass values in multiple
2265 non-contiguous locations. The Irix 6 ABI has examples of this. */
2268 use_group_regs (call_fusage
, regs
)
2274 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2276 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2278 /* A NULL entry means the parameter goes both on the stack and in
2279 registers. This can also be a MEM for targets that pass values
2280 partially on the stack and partially in registers. */
2281 if (reg
!= 0 && GET_CODE (reg
) == REG
)
2282 use_reg (call_fusage
, reg
);
2288 can_store_by_pieces (len
, constfun
, constfundata
, align
)
2289 unsigned HOST_WIDE_INT len
;
2290 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
2294 unsigned HOST_WIDE_INT max_size
, l
;
2295 HOST_WIDE_INT offset
= 0;
2296 enum machine_mode mode
, tmode
;
2297 enum insn_code icode
;
2301 if (! MOVE_BY_PIECES_P (len
, align
))
2304 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2305 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2306 align
= MOVE_MAX
* BITS_PER_UNIT
;
2308 /* We would first store what we can in the largest integer mode, then go to
2309 successively smaller modes. */
2312 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2317 max_size
= MOVE_MAX_PIECES
+ 1;
2318 while (max_size
> 1)
2320 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2321 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2322 if (GET_MODE_SIZE (tmode
) < max_size
)
2325 if (mode
== VOIDmode
)
2328 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2329 if (icode
!= CODE_FOR_nothing
2330 && align
>= GET_MODE_ALIGNMENT (mode
))
2332 unsigned int size
= GET_MODE_SIZE (mode
);
2339 cst
= (*constfun
) (constfundata
, offset
, mode
);
2340 if (!LEGITIMATE_CONSTANT_P (cst
))
2350 max_size
= GET_MODE_SIZE (mode
);
2353 /* The code above should have handled everything. */
2361 /* Generate several move instructions to store LEN bytes generated by
2362 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2363 pointer which will be passed as argument in every CONSTFUN call.
2364 ALIGN is maximum alignment we can assume. */
2367 store_by_pieces (to
, len
, constfun
, constfundata
, align
)
2369 unsigned HOST_WIDE_INT len
;
2370 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
2374 struct store_by_pieces data
;
2376 if (! MOVE_BY_PIECES_P (len
, align
))
2378 to
= protect_from_queue (to
, 1);
2379 data
.constfun
= constfun
;
2380 data
.constfundata
= constfundata
;
2383 store_by_pieces_1 (&data
, align
);
2386 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2387 rtx with BLKmode). The caller must pass TO through protect_from_queue
2388 before calling. ALIGN is maximum alignment we can assume. */
2391 clear_by_pieces (to
, len
, align
)
2393 unsigned HOST_WIDE_INT len
;
2396 struct store_by_pieces data
;
2398 data
.constfun
= clear_by_pieces_1
;
2399 data
.constfundata
= NULL
;
2402 store_by_pieces_1 (&data
, align
);
2405 /* Callback routine for clear_by_pieces.
2406 Return const0_rtx unconditionally. */
2409 clear_by_pieces_1 (data
, offset
, mode
)
2410 PTR data ATTRIBUTE_UNUSED
;
2411 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
;
2412 enum machine_mode mode ATTRIBUTE_UNUSED
;
2417 /* Subroutine of clear_by_pieces and store_by_pieces.
2418 Generate several move instructions to store LEN bytes of block TO. (A MEM
2419 rtx with BLKmode). The caller must pass TO through protect_from_queue
2420 before calling. ALIGN is maximum alignment we can assume. */
2423 store_by_pieces_1 (data
, align
)
2424 struct store_by_pieces
*data
;
2427 rtx to_addr
= XEXP (data
->to
, 0);
2428 unsigned HOST_WIDE_INT max_size
= MOVE_MAX_PIECES
+ 1;
2429 enum machine_mode mode
= VOIDmode
, tmode
;
2430 enum insn_code icode
;
2433 data
->to_addr
= to_addr
;
2435 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2436 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2438 data
->explicit_inc_to
= 0;
2440 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2442 data
->offset
= data
->len
;
2444 /* If storing requires more than two move insns,
2445 copy addresses to registers (to make displacements shorter)
2446 and use post-increment if available. */
2447 if (!data
->autinc_to
2448 && move_by_pieces_ninsns (data
->len
, align
) > 2)
2450 /* Determine the main mode we'll be using. */
2451 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2452 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2453 if (GET_MODE_SIZE (tmode
) < max_size
)
2456 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2458 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2459 data
->autinc_to
= 1;
2460 data
->explicit_inc_to
= -1;
2463 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2464 && ! data
->autinc_to
)
2466 data
->to_addr
= copy_addr_to_reg (to_addr
);
2467 data
->autinc_to
= 1;
2468 data
->explicit_inc_to
= 1;
2471 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2472 data
->to_addr
= copy_addr_to_reg (to_addr
);
2475 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2476 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2477 align
= MOVE_MAX
* BITS_PER_UNIT
;
2479 /* First store what we can in the largest integer mode, then go to
2480 successively smaller modes. */
2482 while (max_size
> 1)
2484 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2485 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2486 if (GET_MODE_SIZE (tmode
) < max_size
)
2489 if (mode
== VOIDmode
)
2492 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2493 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2494 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2496 max_size
= GET_MODE_SIZE (mode
);
2499 /* The code above should have handled everything. */
2504 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2505 with move instructions for mode MODE. GENFUN is the gen_... function
2506 to make a move insn for that mode. DATA has all the other info. */
2509 store_by_pieces_2 (genfun
, mode
, data
)
2510 rtx (*genfun
) PARAMS ((rtx
, ...));
2511 enum machine_mode mode
;
2512 struct store_by_pieces
*data
;
2514 unsigned int size
= GET_MODE_SIZE (mode
);
2517 while (data
->len
>= size
)
2520 data
->offset
-= size
;
2522 if (data
->autinc_to
)
2523 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2526 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2528 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2529 emit_insn (gen_add2_insn (data
->to_addr
,
2530 GEN_INT (-(HOST_WIDE_INT
) size
)));
2532 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2533 emit_insn ((*genfun
) (to1
, cst
));
2535 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2536 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2538 if (! data
->reverse
)
2539 data
->offset
+= size
;
2545 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2546 its length in bytes. */
2549 clear_storage (object
, size
)
2553 #ifdef TARGET_MEM_FUNCTIONS
2555 tree call_expr
, arg_list
;
2558 unsigned int align
= (GET_CODE (object
) == MEM
? MEM_ALIGN (object
)
2559 : GET_MODE_ALIGNMENT (GET_MODE (object
)));
2561 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2562 just move a zero. Otherwise, do this a piece at a time. */
2563 if (GET_MODE (object
) != BLKmode
2564 && GET_CODE (size
) == CONST_INT
2565 && GET_MODE_SIZE (GET_MODE (object
)) == (unsigned int) INTVAL (size
))
2566 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2569 object
= protect_from_queue (object
, 1);
2570 size
= protect_from_queue (size
, 0);
2572 if (GET_CODE (size
) == CONST_INT
2573 && MOVE_BY_PIECES_P (INTVAL (size
), align
))
2574 clear_by_pieces (object
, INTVAL (size
), align
);
2577 /* Try the most limited insn first, because there's no point
2578 including more than one in the machine description unless
2579 the more limited one has some advantage. */
2581 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2582 enum machine_mode mode
;
2584 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2585 mode
= GET_MODE_WIDER_MODE (mode
))
2587 enum insn_code code
= clrstr_optab
[(int) mode
];
2588 insn_operand_predicate_fn pred
;
2590 if (code
!= CODE_FOR_nothing
2591 /* We don't need MODE to be narrower than
2592 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2593 the mode mask, as it is returned by the macro, it will
2594 definitely be less than the actual mode mask. */
2595 && ((GET_CODE (size
) == CONST_INT
2596 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2597 <= (GET_MODE_MASK (mode
) >> 1)))
2598 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2599 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2600 || (*pred
) (object
, BLKmode
))
2601 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
2602 || (*pred
) (opalign
, VOIDmode
)))
2605 rtx last
= get_last_insn ();
2608 op1
= convert_to_mode (mode
, size
, 1);
2609 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2610 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
2611 op1
= copy_to_mode_reg (mode
, op1
);
2613 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2620 delete_insns_since (last
);
2624 /* OBJECT or SIZE may have been passed through protect_from_queue.
2626 It is unsafe to save the value generated by protect_from_queue
2627 and reuse it later. Consider what happens if emit_queue is
2628 called before the return value from protect_from_queue is used.
2630 Expansion of the CALL_EXPR below will call emit_queue before
2631 we are finished emitting RTL for argument setup. So if we are
2632 not careful we could get the wrong value for an argument.
2634 To avoid this problem we go ahead and emit code to copy OBJECT
2635 and SIZE into new pseudos. We can then place those new pseudos
2636 into an RTL_EXPR and use them later, even after a call to
2639 Note this is not strictly needed for library calls since they
2640 do not call emit_queue before loading their arguments. However,
2641 we may need to have library calls call emit_queue in the future
2642 since failing to do so could cause problems for targets which
2643 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2644 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2646 #ifdef TARGET_MEM_FUNCTIONS
2647 size
= copy_to_mode_reg (TYPE_MODE (sizetype
), size
);
2649 size
= convert_to_mode (TYPE_MODE (integer_type_node
), size
,
2650 TREE_UNSIGNED (integer_type_node
));
2651 size
= copy_to_mode_reg (TYPE_MODE (integer_type_node
), size
);
2654 #ifdef TARGET_MEM_FUNCTIONS
2655 /* It is incorrect to use the libcall calling conventions to call
2656 memset in this context.
2658 This could be a user call to memset and the user may wish to
2659 examine the return value from memset.
2661 For targets where libcalls and normal calls have different
2662 conventions for returning pointers, we could end up generating
2665 So instead of using a libcall sequence we build up a suitable
2666 CALL_EXPR and expand the call in the normal fashion. */
2667 if (fn
== NULL_TREE
)
2671 /* This was copied from except.c, I don't know if all this is
2672 necessary in this context or not. */
2673 fn
= get_identifier ("memset");
2674 fntype
= build_pointer_type (void_type_node
);
2675 fntype
= build_function_type (fntype
, NULL_TREE
);
2676 fn
= build_decl (FUNCTION_DECL
, fn
, fntype
);
2677 ggc_add_tree_root (&fn
, 1);
2678 DECL_EXTERNAL (fn
) = 1;
2679 TREE_PUBLIC (fn
) = 1;
2680 DECL_ARTIFICIAL (fn
) = 1;
2681 TREE_NOTHROW (fn
) = 1;
2682 make_decl_rtl (fn
, NULL
);
2683 assemble_external (fn
);
2686 /* We need to make an argument list for the function call.
2688 memset has three arguments, the first is a void * addresses, the
2689 second an integer with the initialization value, the last is a
2690 size_t byte count for the copy. */
2692 = build_tree_list (NULL_TREE
,
2693 make_tree (build_pointer_type (void_type_node
),
2695 TREE_CHAIN (arg_list
)
2696 = build_tree_list (NULL_TREE
,
2697 make_tree (integer_type_node
, const0_rtx
));
2698 TREE_CHAIN (TREE_CHAIN (arg_list
))
2699 = build_tree_list (NULL_TREE
, make_tree (sizetype
, size
));
2700 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list
))) = NULL_TREE
;
2702 /* Now we have to build up the CALL_EXPR itself. */
2703 call_expr
= build1 (ADDR_EXPR
,
2704 build_pointer_type (TREE_TYPE (fn
)), fn
);
2705 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2706 call_expr
, arg_list
, NULL_TREE
);
2707 TREE_SIDE_EFFECTS (call_expr
) = 1;
2709 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
2711 emit_library_call (bzero_libfunc
, LCT_NORMAL
,
2712 VOIDmode
, 2, object
, Pmode
, size
,
2713 TYPE_MODE (integer_type_node
));
2716 /* If we are initializing a readonly value, show the above call
2717 clobbered it. Otherwise, a load from it may erroneously be
2718 hoisted from a loop. */
2719 if (RTX_UNCHANGING_P (object
))
2720 emit_insn (gen_rtx_CLOBBER (VOIDmode
, object
));
2727 /* Generate code to copy Y into X.
2728 Both Y and X must have the same mode, except that
2729 Y can be a constant with VOIDmode.
2730 This mode cannot be BLKmode; use emit_block_move for that.
2732 Return the last instruction emitted. */
2735 emit_move_insn (x
, y
)
2738 enum machine_mode mode
= GET_MODE (x
);
2739 rtx y_cst
= NULL_RTX
;
2742 x
= protect_from_queue (x
, 1);
2743 y
= protect_from_queue (y
, 0);
2745 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
2748 /* Never force constant_p_rtx to memory. */
2749 if (GET_CODE (y
) == CONSTANT_P_RTX
)
2751 else if (CONSTANT_P (y
) && ! LEGITIMATE_CONSTANT_P (y
))
2754 y
= force_const_mem (mode
, y
);
2757 /* If X or Y are memory references, verify that their addresses are valid
2759 if (GET_CODE (x
) == MEM
2760 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2761 && ! push_operand (x
, GET_MODE (x
)))
2763 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2764 x
= validize_mem (x
);
2766 if (GET_CODE (y
) == MEM
2767 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2769 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2770 y
= validize_mem (y
);
2772 if (mode
== BLKmode
)
2775 last_insn
= emit_move_insn_1 (x
, y
);
2777 if (y_cst
&& GET_CODE (x
) == REG
)
2778 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
2783 /* Low level part of emit_move_insn.
2784 Called just like emit_move_insn, but assumes X and Y
2785 are basically valid. */
2788 emit_move_insn_1 (x
, y
)
2791 enum machine_mode mode
= GET_MODE (x
);
2792 enum machine_mode submode
;
2793 enum mode_class
class = GET_MODE_CLASS (mode
);
2796 if ((unsigned int) mode
>= (unsigned int) MAX_MACHINE_MODE
)
2799 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2801 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2803 /* Expand complex moves by moving real part and imag part, if possible. */
2804 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
2805 && BLKmode
!= (submode
= mode_for_size ((GET_MODE_UNIT_SIZE (mode
)
2807 (class == MODE_COMPLEX_INT
2808 ? MODE_INT
: MODE_FLOAT
),
2810 && (mov_optab
->handlers
[(int) submode
].insn_code
2811 != CODE_FOR_nothing
))
2813 /* Don't split destination if it is a stack push. */
2814 int stack
= push_operand (x
, GET_MODE (x
));
2816 #ifdef PUSH_ROUNDING
2817 /* In case we output to the stack, but the size is smaller machine can
2818 push exactly, we need to use move instructions. */
2820 && PUSH_ROUNDING (GET_MODE_SIZE (submode
)) != GET_MODE_SIZE (submode
))
2823 int offset1
, offset2
;
2825 /* Do not use anti_adjust_stack, since we don't want to update
2826 stack_pointer_delta. */
2827 temp
= expand_binop (Pmode
,
2828 #ifdef STACK_GROWS_DOWNWARD
2835 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))),
2839 if (temp
!= stack_pointer_rtx
)
2840 emit_move_insn (stack_pointer_rtx
, temp
);
2841 #ifdef STACK_GROWS_DOWNWARD
2843 offset2
= GET_MODE_SIZE (submode
);
2845 offset1
= -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)));
2846 offset2
= (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))
2847 + GET_MODE_SIZE (submode
));
2849 emit_move_insn (change_address (x
, submode
,
2850 gen_rtx_PLUS (Pmode
,
2852 GEN_INT (offset1
))),
2853 gen_realpart (submode
, y
));
2854 emit_move_insn (change_address (x
, submode
,
2855 gen_rtx_PLUS (Pmode
,
2857 GEN_INT (offset2
))),
2858 gen_imagpart (submode
, y
));
2862 /* If this is a stack, push the highpart first, so it
2863 will be in the argument order.
2865 In that case, change_address is used only to convert
2866 the mode, not to change the address. */
2869 /* Note that the real part always precedes the imag part in memory
2870 regardless of machine's endianness. */
2871 #ifdef STACK_GROWS_DOWNWARD
2872 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2873 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2874 gen_imagpart (submode
, y
)));
2875 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2876 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2877 gen_realpart (submode
, y
)));
2879 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2880 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2881 gen_realpart (submode
, y
)));
2882 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2883 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2884 gen_imagpart (submode
, y
)));
2889 rtx realpart_x
, realpart_y
;
2890 rtx imagpart_x
, imagpart_y
;
2892 /* If this is a complex value with each part being smaller than a
2893 word, the usual calling sequence will likely pack the pieces into
2894 a single register. Unfortunately, SUBREG of hard registers only
2895 deals in terms of words, so we have a problem converting input
2896 arguments to the CONCAT of two registers that is used elsewhere
2897 for complex values. If this is before reload, we can copy it into
2898 memory and reload. FIXME, we should see about using extract and
2899 insert on integer registers, but complex short and complex char
2900 variables should be rarely used. */
2901 if (GET_MODE_BITSIZE (mode
) < 2 * BITS_PER_WORD
2902 && (reload_in_progress
| reload_completed
) == 0)
2904 int packed_dest_p
= (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
2905 int packed_src_p
= (REG_P (y
) && REGNO (y
) < FIRST_PSEUDO_REGISTER
);
2907 if (packed_dest_p
|| packed_src_p
)
2909 enum mode_class reg_class
= ((class == MODE_COMPLEX_FLOAT
)
2910 ? MODE_FLOAT
: MODE_INT
);
2912 enum machine_mode reg_mode
2913 = mode_for_size (GET_MODE_BITSIZE (mode
), reg_class
, 1);
2915 if (reg_mode
!= BLKmode
)
2917 rtx mem
= assign_stack_temp (reg_mode
,
2918 GET_MODE_SIZE (mode
), 0);
2919 rtx cmem
= adjust_address (mem
, mode
, 0);
2922 = N_("function using short complex types cannot be inline");
2926 rtx sreg
= gen_rtx_SUBREG (reg_mode
, x
, 0);
2927 emit_move_insn_1 (cmem
, y
);
2928 return emit_move_insn_1 (sreg
, mem
);
2932 rtx sreg
= gen_rtx_SUBREG (reg_mode
, y
, 0);
2933 emit_move_insn_1 (mem
, sreg
);
2934 return emit_move_insn_1 (x
, cmem
);
2940 realpart_x
= gen_realpart (submode
, x
);
2941 realpart_y
= gen_realpart (submode
, y
);
2942 imagpart_x
= gen_imagpart (submode
, x
);
2943 imagpart_y
= gen_imagpart (submode
, y
);
2945 /* Show the output dies here. This is necessary for SUBREGs
2946 of pseudos since we cannot track their lifetimes correctly;
2947 hard regs shouldn't appear here except as return values.
2948 We never want to emit such a clobber after reload. */
2950 && ! (reload_in_progress
|| reload_completed
)
2951 && (GET_CODE (realpart_x
) == SUBREG
2952 || GET_CODE (imagpart_x
) == SUBREG
))
2954 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
2957 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2958 (realpart_x
, realpart_y
));
2959 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2960 (imagpart_x
, imagpart_y
));
2963 return get_last_insn ();
2966 /* This will handle any multi-word mode that lacks a move_insn pattern.
2967 However, you will get better code if you define such patterns,
2968 even if they must turn into multiple assembler instructions. */
2969 else if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2975 #ifdef PUSH_ROUNDING
2977 /* If X is a push on the stack, do the push now and replace
2978 X with a reference to the stack pointer. */
2979 if (push_operand (x
, GET_MODE (x
)))
2984 /* Do not use anti_adjust_stack, since we don't want to update
2985 stack_pointer_delta. */
2986 temp
= expand_binop (Pmode
,
2987 #ifdef STACK_GROWS_DOWNWARD
2994 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))),
2998 if (temp
!= stack_pointer_rtx
)
2999 emit_move_insn (stack_pointer_rtx
, temp
);
3001 code
= GET_CODE (XEXP (x
, 0));
3002 /* Just hope that small offsets off SP are OK. */
3003 if (code
== POST_INC
)
3004 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3005 GEN_INT (-(HOST_WIDE_INT
)
3006 GET_MODE_SIZE (GET_MODE (x
))));
3007 else if (code
== POST_DEC
)
3008 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3009 GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
3011 temp
= stack_pointer_rtx
;
3013 x
= change_address (x
, VOIDmode
, temp
);
3017 /* If we are in reload, see if either operand is a MEM whose address
3018 is scheduled for replacement. */
3019 if (reload_in_progress
&& GET_CODE (x
) == MEM
3020 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3021 x
= replace_equiv_address_nv (x
, inner
);
3022 if (reload_in_progress
&& GET_CODE (y
) == MEM
3023 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3024 y
= replace_equiv_address_nv (y
, inner
);
3030 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3033 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3034 rtx ypart
= operand_subword (y
, i
, 1, mode
);
3036 /* If we can't get a part of Y, put Y into memory if it is a
3037 constant. Otherwise, force it into a register. If we still
3038 can't get a part of Y, abort. */
3039 if (ypart
== 0 && CONSTANT_P (y
))
3041 y
= force_const_mem (mode
, y
);
3042 ypart
= operand_subword (y
, i
, 1, mode
);
3044 else if (ypart
== 0)
3045 ypart
= operand_subword_force (y
, i
, mode
);
3047 if (xpart
== 0 || ypart
== 0)
3050 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3052 last_insn
= emit_move_insn (xpart
, ypart
);
3055 seq
= gen_sequence ();
3058 /* Show the output dies here. This is necessary for SUBREGs
3059 of pseudos since we cannot track their lifetimes correctly;
3060 hard regs shouldn't appear here except as return values.
3061 We never want to emit such a clobber after reload. */
3063 && ! (reload_in_progress
|| reload_completed
)
3064 && need_clobber
!= 0)
3066 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3077 /* Pushing data onto the stack. */
3079 /* Push a block of length SIZE (perhaps variable)
3080 and return an rtx to address the beginning of the block.
3081 Note that it is not possible for the value returned to be a QUEUED.
3082 The value may be virtual_outgoing_args_rtx.
3084 EXTRA is the number of bytes of padding to push in addition to SIZE.
3085 BELOW nonzero means this padding comes at low addresses;
3086 otherwise, the padding comes at high addresses. */
3089 push_block (size
, extra
, below
)
3095 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3096 if (CONSTANT_P (size
))
3097 anti_adjust_stack (plus_constant (size
, extra
));
3098 else if (GET_CODE (size
) == REG
&& extra
== 0)
3099 anti_adjust_stack (size
);
3102 temp
= copy_to_mode_reg (Pmode
, size
);
3104 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3105 temp
, 0, OPTAB_LIB_WIDEN
);
3106 anti_adjust_stack (temp
);
3109 #ifndef STACK_GROWS_DOWNWARD
3115 temp
= virtual_outgoing_args_rtx
;
3116 if (extra
!= 0 && below
)
3117 temp
= plus_constant (temp
, extra
);
3121 if (GET_CODE (size
) == CONST_INT
)
3122 temp
= plus_constant (virtual_outgoing_args_rtx
,
3123 -INTVAL (size
) - (below
? 0 : extra
));
3124 else if (extra
!= 0 && !below
)
3125 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3126 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3128 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3129 negate_rtx (Pmode
, size
));
3132 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3136 /* Return an rtx for the address of the beginning of an as-if-it-was-pushed
3137 block of SIZE bytes. */
3140 get_push_address (size
)
3145 if (STACK_PUSH_CODE
== POST_DEC
)
3146 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (size
));
3147 else if (STACK_PUSH_CODE
== POST_INC
)
3148 temp
= gen_rtx_MINUS (Pmode
, stack_pointer_rtx
, GEN_INT (size
));
3150 temp
= stack_pointer_rtx
;
3152 return copy_to_reg (temp
);
3155 #ifdef PUSH_ROUNDING
3157 /* Emit single push insn. */
3160 emit_single_push_insn (mode
, x
, type
)
3162 enum machine_mode mode
;
3166 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3168 enum insn_code icode
;
3169 insn_operand_predicate_fn pred
;
3171 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3172 /* If there is push pattern, use it. Otherwise try old way of throwing
3173 MEM representing push operation to move expander. */
3174 icode
= push_optab
->handlers
[(int) mode
].insn_code
;
3175 if (icode
!= CODE_FOR_nothing
)
3177 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3178 && !((*pred
) (x
, mode
))))
3179 x
= force_reg (mode
, x
);
3180 emit_insn (GEN_FCN (icode
) (x
));
3183 if (GET_MODE_SIZE (mode
) == rounded_size
)
3184 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3187 #ifdef STACK_GROWS_DOWNWARD
3188 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3189 GEN_INT (-(HOST_WIDE_INT
)rounded_size
));
3191 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3192 GEN_INT (rounded_size
));
3194 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3197 dest
= gen_rtx_MEM (mode
, dest_addr
);
3201 set_mem_attributes (dest
, type
, 1);
3203 if (flag_optimize_sibling_calls
)
3204 /* Function incoming arguments may overlap with sibling call
3205 outgoing arguments and we cannot allow reordering of reads
3206 from function arguments with stores to outgoing arguments
3207 of sibling calls. */
3208 set_mem_alias_set (dest
, 0);
3210 emit_move_insn (dest
, x
);
3214 /* Generate code to push X onto the stack, assuming it has mode MODE and
3216 MODE is redundant except when X is a CONST_INT (since they don't
3218 SIZE is an rtx for the size of data to be copied (in bytes),
3219 needed only if X is BLKmode.
3221 ALIGN (in bits) is maximum alignment we can assume.
3223 If PARTIAL and REG are both nonzero, then copy that many of the first
3224 words of X into registers starting with REG, and push the rest of X.
3225 The amount of space pushed is decreased by PARTIAL words,
3226 rounded *down* to a multiple of PARM_BOUNDARY.
3227 REG must be a hard register in this case.
3228 If REG is zero but PARTIAL is not, take any all others actions for an
3229 argument partially in registers, but do not actually load any
3232 EXTRA is the amount in bytes of extra space to leave next to this arg.
3233 This is ignored if an argument block has already been allocated.
3235 On a machine that lacks real push insns, ARGS_ADDR is the address of
3236 the bottom of the argument block for this call. We use indexing off there
3237 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3238 argument block has not been preallocated.
3240 ARGS_SO_FAR is the size of args previously pushed for this call.
3242 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3243 for arguments passed in registers. If nonzero, it will be the number
3244 of bytes required. */
3247 emit_push_insn (x
, mode
, type
, size
, align
, partial
, reg
, extra
,
3248 args_addr
, args_so_far
, reg_parm_stack_space
,
3251 enum machine_mode mode
;
3260 int reg_parm_stack_space
;
3264 enum direction stack_direction
3265 #ifdef STACK_GROWS_DOWNWARD
3271 /* Decide where to pad the argument: `downward' for below,
3272 `upward' for above, or `none' for don't pad it.
3273 Default is below for small data on big-endian machines; else above. */
3274 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3276 /* Invert direction if stack is post-decrement.
3278 if (STACK_PUSH_CODE
== POST_DEC
)
3279 if (where_pad
!= none
)
3280 where_pad
= (where_pad
== downward
? upward
: downward
);
3282 xinner
= x
= protect_from_queue (x
, 0);
3284 if (mode
== BLKmode
)
3286 /* Copy a block into the stack, entirely or partially. */
3289 int used
= partial
* UNITS_PER_WORD
;
3290 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3298 /* USED is now the # of bytes we need not copy to the stack
3299 because registers will take care of them. */
3302 xinner
= adjust_address (xinner
, BLKmode
, used
);
3304 /* If the partial register-part of the arg counts in its stack size,
3305 skip the part of stack space corresponding to the registers.
3306 Otherwise, start copying to the beginning of the stack space,
3307 by setting SKIP to 0. */
3308 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3310 #ifdef PUSH_ROUNDING
3311 /* Do it with several push insns if that doesn't take lots of insns
3312 and if there is no difficulty with push insns that skip bytes
3313 on the stack for alignment purposes. */
3316 && GET_CODE (size
) == CONST_INT
3318 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3319 /* Here we avoid the case of a structure whose weak alignment
3320 forces many pushes of a small amount of data,
3321 and such small pushes do rounding that causes trouble. */
3322 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3323 || align
>= BIGGEST_ALIGNMENT
3324 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3325 == (align
/ BITS_PER_UNIT
)))
3326 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3328 /* Push padding now if padding above and stack grows down,
3329 or if padding below and stack grows up.
3330 But if space already allocated, this has already been done. */
3331 if (extra
&& args_addr
== 0
3332 && where_pad
!= none
&& where_pad
!= stack_direction
)
3333 anti_adjust_stack (GEN_INT (extra
));
3335 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
);
3337 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
3341 in_check_memory_usage
= 1;
3342 temp
= get_push_address (INTVAL (size
) - used
);
3343 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
3344 emit_library_call (chkr_copy_bitmap_libfunc
,
3345 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, temp
,
3346 Pmode
, XEXP (xinner
, 0), Pmode
,
3347 GEN_INT (INTVAL (size
) - used
),
3348 TYPE_MODE (sizetype
));
3350 emit_library_call (chkr_set_right_libfunc
,
3351 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, temp
,
3352 Pmode
, GEN_INT (INTVAL (size
) - used
),
3353 TYPE_MODE (sizetype
),
3354 GEN_INT (MEMORY_USE_RW
),
3355 TYPE_MODE (integer_type_node
));
3356 in_check_memory_usage
= 0;
3360 #endif /* PUSH_ROUNDING */
3364 /* Otherwise make space on the stack and copy the data
3365 to the address of that space. */
3367 /* Deduct words put into registers from the size we must copy. */
3370 if (GET_CODE (size
) == CONST_INT
)
3371 size
= GEN_INT (INTVAL (size
) - used
);
3373 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3374 GEN_INT (used
), NULL_RTX
, 0,
3378 /* Get the address of the stack space.
3379 In this case, we do not deal with EXTRA separately.
3380 A single stack adjust will do. */
3383 temp
= push_block (size
, extra
, where_pad
== downward
);
3386 else if (GET_CODE (args_so_far
) == CONST_INT
)
3387 temp
= memory_address (BLKmode
,
3388 plus_constant (args_addr
,
3389 skip
+ INTVAL (args_so_far
)));
3391 temp
= memory_address (BLKmode
,
3392 plus_constant (gen_rtx_PLUS (Pmode
,
3396 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
3398 in_check_memory_usage
= 1;
3399 target
= copy_to_reg (temp
);
3400 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
3401 emit_library_call (chkr_copy_bitmap_libfunc
,
3402 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3,
3404 XEXP (xinner
, 0), Pmode
,
3405 size
, TYPE_MODE (sizetype
));
3407 emit_library_call (chkr_set_right_libfunc
,
3408 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3,
3410 size
, TYPE_MODE (sizetype
),
3411 GEN_INT (MEMORY_USE_RW
),
3412 TYPE_MODE (integer_type_node
));
3413 in_check_memory_usage
= 0;
3416 target
= gen_rtx_MEM (BLKmode
, temp
);
3420 set_mem_attributes (target
, type
, 1);
3421 /* Function incoming arguments may overlap with sibling call
3422 outgoing arguments and we cannot allow reordering of reads
3423 from function arguments with stores to outgoing arguments
3424 of sibling calls. */
3425 set_mem_alias_set (target
, 0);
3428 set_mem_align (target
, align
);
3430 /* TEMP is the address of the block. Copy the data there. */
3431 if (GET_CODE (size
) == CONST_INT
3432 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size
), align
))
3434 move_by_pieces (target
, xinner
, INTVAL (size
), align
);
3439 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
3440 enum machine_mode mode
;
3442 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
3444 mode
= GET_MODE_WIDER_MODE (mode
))
3446 enum insn_code code
= movstr_optab
[(int) mode
];
3447 insn_operand_predicate_fn pred
;
3449 if (code
!= CODE_FOR_nothing
3450 && ((GET_CODE (size
) == CONST_INT
3451 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
3452 <= (GET_MODE_MASK (mode
) >> 1)))
3453 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
3454 && (!(pred
= insn_data
[(int) code
].operand
[0].predicate
)
3455 || ((*pred
) (target
, BLKmode
)))
3456 && (!(pred
= insn_data
[(int) code
].operand
[1].predicate
)
3457 || ((*pred
) (xinner
, BLKmode
)))
3458 && (!(pred
= insn_data
[(int) code
].operand
[3].predicate
)
3459 || ((*pred
) (opalign
, VOIDmode
))))
3461 rtx op2
= convert_to_mode (mode
, size
, 1);
3462 rtx last
= get_last_insn ();
3465 pred
= insn_data
[(int) code
].operand
[2].predicate
;
3466 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
3467 op2
= copy_to_mode_reg (mode
, op2
);
3469 pat
= GEN_FCN ((int) code
) (target
, xinner
,
3477 delete_insns_since (last
);
3482 if (!ACCUMULATE_OUTGOING_ARGS
)
3484 /* If the source is referenced relative to the stack pointer,
3485 copy it to another register to stabilize it. We do not need
3486 to do this if we know that we won't be changing sp. */
3488 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3489 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3490 temp
= copy_to_reg (temp
);
3493 /* Make inhibit_defer_pop nonzero around the library call
3494 to force it to pop the bcopy-arguments right away. */
3496 #ifdef TARGET_MEM_FUNCTIONS
3497 emit_library_call (memcpy_libfunc
, LCT_NORMAL
,
3498 VOIDmode
, 3, temp
, Pmode
, XEXP (xinner
, 0), Pmode
,
3499 convert_to_mode (TYPE_MODE (sizetype
),
3500 size
, TREE_UNSIGNED (sizetype
)),
3501 TYPE_MODE (sizetype
));
3503 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
3504 VOIDmode
, 3, XEXP (xinner
, 0), Pmode
, temp
, Pmode
,
3505 convert_to_mode (TYPE_MODE (integer_type_node
),
3507 TREE_UNSIGNED (integer_type_node
)),
3508 TYPE_MODE (integer_type_node
));
3513 else if (partial
> 0)
3515 /* Scalar partly in registers. */
3517 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3520 /* # words of start of argument
3521 that we must make space for but need not store. */
3522 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
3523 int args_offset
= INTVAL (args_so_far
);
3526 /* Push padding now if padding above and stack grows down,
3527 or if padding below and stack grows up.
3528 But if space already allocated, this has already been done. */
3529 if (extra
&& args_addr
== 0
3530 && where_pad
!= none
&& where_pad
!= stack_direction
)
3531 anti_adjust_stack (GEN_INT (extra
));
3533 /* If we make space by pushing it, we might as well push
3534 the real data. Otherwise, we can leave OFFSET nonzero
3535 and leave the space uninitialized. */
3539 /* Now NOT_STACK gets the number of words that we don't need to
3540 allocate on the stack. */
3541 not_stack
= partial
- offset
;
3543 /* If the partial register-part of the arg counts in its stack size,
3544 skip the part of stack space corresponding to the registers.
3545 Otherwise, start copying to the beginning of the stack space,
3546 by setting SKIP to 0. */
3547 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3549 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3550 x
= validize_mem (force_const_mem (mode
, x
));
3552 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3553 SUBREGs of such registers are not allowed. */
3554 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
3555 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3556 x
= copy_to_reg (x
);
3558 /* Loop over all the words allocated on the stack for this arg. */
3559 /* We can do it by words, because any scalar bigger than a word
3560 has a size a multiple of a word. */
3561 #ifndef PUSH_ARGS_REVERSED
3562 for (i
= not_stack
; i
< size
; i
++)
3564 for (i
= size
- 1; i
>= not_stack
; i
--)
3566 if (i
>= not_stack
+ offset
)
3567 emit_push_insn (operand_subword_force (x
, i
, mode
),
3568 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3570 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3572 reg_parm_stack_space
, alignment_pad
);
3577 rtx target
= NULL_RTX
;
3580 /* Push padding now if padding above and stack grows down,
3581 or if padding below and stack grows up.
3582 But if space already allocated, this has already been done. */
3583 if (extra
&& args_addr
== 0
3584 && where_pad
!= none
&& where_pad
!= stack_direction
)
3585 anti_adjust_stack (GEN_INT (extra
));
3587 #ifdef PUSH_ROUNDING
3588 if (args_addr
== 0 && PUSH_ARGS
)
3589 emit_single_push_insn (mode
, x
, type
);
3593 if (GET_CODE (args_so_far
) == CONST_INT
)
3595 = memory_address (mode
,
3596 plus_constant (args_addr
,
3597 INTVAL (args_so_far
)));
3599 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3602 dest
= gen_rtx_MEM (mode
, addr
);
3605 set_mem_attributes (dest
, type
, 1);
3606 /* Function incoming arguments may overlap with sibling call
3607 outgoing arguments and we cannot allow reordering of reads
3608 from function arguments with stores to outgoing arguments
3609 of sibling calls. */
3610 set_mem_alias_set (dest
, 0);
3613 emit_move_insn (dest
, x
);
3617 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
3619 in_check_memory_usage
= 1;
3621 target
= get_push_address (GET_MODE_SIZE (mode
));
3623 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
3624 emit_library_call (chkr_copy_bitmap_libfunc
,
3625 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, target
,
3626 Pmode
, XEXP (x
, 0), Pmode
,
3627 GEN_INT (GET_MODE_SIZE (mode
)),
3628 TYPE_MODE (sizetype
));
3630 emit_library_call (chkr_set_right_libfunc
,
3631 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, target
,
3632 Pmode
, GEN_INT (GET_MODE_SIZE (mode
)),
3633 TYPE_MODE (sizetype
),
3634 GEN_INT (MEMORY_USE_RW
),
3635 TYPE_MODE (integer_type_node
));
3636 in_check_memory_usage
= 0;
3641 /* If part should go in registers, copy that part
3642 into the appropriate registers. Do this now, at the end,
3643 since mem-to-mem copies above may do function calls. */
3644 if (partial
> 0 && reg
!= 0)
3646 /* Handle calls that pass values in multiple non-contiguous locations.
3647 The Irix 6 ABI has examples of this. */
3648 if (GET_CODE (reg
) == PARALLEL
)
3649 emit_group_load (reg
, x
, -1); /* ??? size? */
3651 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
3654 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3655 anti_adjust_stack (GEN_INT (extra
));
3657 if (alignment_pad
&& args_addr
== 0)
3658 anti_adjust_stack (alignment_pad
);
3661 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3669 /* Only registers can be subtargets. */
3670 || GET_CODE (x
) != REG
3671 /* If the register is readonly, it can't be set more than once. */
3672 || RTX_UNCHANGING_P (x
)
3673 /* Don't use hard regs to avoid extending their life. */
3674 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3675 /* Avoid subtargets inside loops,
3676 since they hide some invariant expressions. */
3677 || preserve_subexpressions_p ())
3681 /* Expand an assignment that stores the value of FROM into TO.
3682 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3683 (This may contain a QUEUED rtx;
3684 if the value is constant, this rtx is a constant.)
3685 Otherwise, the returned value is NULL_RTX.
3687 SUGGEST_REG is no longer actually used.
3688 It used to mean, copy the value through a register
3689 and return that register, if that is possible.
3690 We now use WANT_VALUE to decide whether to do this. */
3693 expand_assignment (to
, from
, want_value
, suggest_reg
)
3696 int suggest_reg ATTRIBUTE_UNUSED
;
3701 /* Don't crash if the lhs of the assignment was erroneous. */
3703 if (TREE_CODE (to
) == ERROR_MARK
)
3705 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3706 return want_value
? result
: NULL_RTX
;
3709 /* Assignment of a structure component needs special treatment
3710 if the structure component's rtx is not simply a MEM.
3711 Assignment of an array element at a constant index, and assignment of
3712 an array element in an unaligned packed structure field, has the same
3715 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
3716 || TREE_CODE (to
) == ARRAY_REF
|| TREE_CODE (to
) == ARRAY_RANGE_REF
)
3718 enum machine_mode mode1
;
3719 HOST_WIDE_INT bitsize
, bitpos
;
3727 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
3728 &unsignedp
, &volatilep
);
3730 /* If we are going to use store_bit_field and extract_bit_field,
3731 make sure to_rtx will be safe for multiple use. */
3733 if (mode1
== VOIDmode
&& want_value
)
3734 tem
= stabilize_reference (tem
);
3736 orig_to_rtx
= to_rtx
3737 = expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_DONT
);
3740 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
3742 if (GET_CODE (to_rtx
) != MEM
)
3745 if (GET_MODE (offset_rtx
) != ptr_mode
)
3746 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
3748 #ifdef POINTERS_EXTEND_UNSIGNED
3749 if (GET_MODE (offset_rtx
) != Pmode
)
3750 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
3753 /* A constant address in TO_RTX can have VOIDmode, we must not try
3754 to call force_reg for that case. Avoid that case. */
3755 if (GET_CODE (to_rtx
) == MEM
3756 && GET_MODE (to_rtx
) == BLKmode
3757 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
3759 && (bitpos
% bitsize
) == 0
3760 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
3761 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
3764 = adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
3766 if (GET_CODE (XEXP (temp
, 0)) == REG
)
3769 to_rtx
= (replace_equiv_address
3770 (to_rtx
, force_reg (GET_MODE (XEXP (temp
, 0)),
3775 to_rtx
= offset_address (to_rtx
, offset_rtx
,
3776 highest_pow2_factor (offset
));
3780 /* Deal with volatile and readonly fields. The former is only done
3781 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3782 if (volatilep
&& GET_CODE (to_rtx
) == MEM
)
3784 if (to_rtx
== orig_to_rtx
)
3785 to_rtx
= copy_rtx (to_rtx
);
3786 MEM_VOLATILE_P (to_rtx
) = 1;
3789 if (TREE_CODE (to
) == COMPONENT_REF
3790 && TREE_READONLY (TREE_OPERAND (to
, 1)))
3792 if (to_rtx
== orig_to_rtx
)
3793 to_rtx
= copy_rtx (to_rtx
);
3794 RTX_UNCHANGING_P (to_rtx
) = 1;
3797 if (! can_address_p (to
))
3799 if (to_rtx
== orig_to_rtx
)
3800 to_rtx
= copy_rtx (to_rtx
);
3801 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
3804 /* Check the access. */
3805 if (current_function_check_memory_usage
&& GET_CODE (to_rtx
) == MEM
)
3810 enum machine_mode best_mode
;
3812 best_mode
= get_best_mode (bitsize
, bitpos
,
3813 TYPE_ALIGN (TREE_TYPE (tem
)),
3815 if (best_mode
== VOIDmode
)
3818 best_mode_size
= GET_MODE_BITSIZE (best_mode
);
3819 to_addr
= plus_constant (XEXP (to_rtx
, 0), bitpos
/ BITS_PER_UNIT
);
3820 size
= CEIL ((bitpos
% best_mode_size
) + bitsize
, best_mode_size
);
3821 size
*= GET_MODE_SIZE (best_mode
);
3823 /* Check the access right of the pointer. */
3824 in_check_memory_usage
= 1;
3826 emit_library_call (chkr_check_addr_libfunc
, LCT_CONST_MAKE_BLOCK
,
3827 VOIDmode
, 3, to_addr
, Pmode
,
3828 GEN_INT (size
), TYPE_MODE (sizetype
),
3829 GEN_INT (MEMORY_USE_WO
),
3830 TYPE_MODE (integer_type_node
));
3831 in_check_memory_usage
= 0;
3834 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
3836 /* Spurious cast for HPUX compiler. */
3837 ? ((enum machine_mode
)
3838 TYPE_MODE (TREE_TYPE (to
)))
3840 unsignedp
, TREE_TYPE (tem
), get_alias_set (to
));
3842 preserve_temp_slots (result
);
3846 /* If the value is meaningful, convert RESULT to the proper mode.
3847 Otherwise, return nothing. */
3848 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
3849 TYPE_MODE (TREE_TYPE (from
)),
3851 TREE_UNSIGNED (TREE_TYPE (to
)))
3855 /* If the rhs is a function call and its value is not an aggregate,
3856 call the function before we start to compute the lhs.
3857 This is needed for correct code for cases such as
3858 val = setjmp (buf) on machines where reference to val
3859 requires loading up part of an address in a separate insn.
3861 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3862 since it might be a promoted variable where the zero- or sign- extension
3863 needs to be done. Handling this in the normal way is safe because no
3864 computation is done before the call. */
3865 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
)
3866 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
3867 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
3868 && GET_CODE (DECL_RTL (to
)) == REG
))
3873 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3875 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_WO
);
3877 /* Handle calls that return values in multiple non-contiguous locations.
3878 The Irix 6 ABI has examples of this. */
3879 if (GET_CODE (to_rtx
) == PARALLEL
)
3880 emit_group_load (to_rtx
, value
, int_size_in_bytes (TREE_TYPE (from
)));
3881 else if (GET_MODE (to_rtx
) == BLKmode
)
3882 emit_block_move (to_rtx
, value
, expr_size (from
));
3885 #ifdef POINTERS_EXTEND_UNSIGNED
3886 if (POINTER_TYPE_P (TREE_TYPE (to
))
3887 && GET_MODE (to_rtx
) != GET_MODE (value
))
3888 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
3890 emit_move_insn (to_rtx
, value
);
3892 preserve_temp_slots (to_rtx
);
3895 return want_value
? to_rtx
: NULL_RTX
;
3898 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3899 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3902 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_WO
);
3904 /* Don't move directly into a return register. */
3905 if (TREE_CODE (to
) == RESULT_DECL
3906 && (GET_CODE (to_rtx
) == REG
|| GET_CODE (to_rtx
) == PARALLEL
))
3911 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
3913 if (GET_CODE (to_rtx
) == PARALLEL
)
3914 emit_group_load (to_rtx
, temp
, int_size_in_bytes (TREE_TYPE (from
)));
3916 emit_move_insn (to_rtx
, temp
);
3918 preserve_temp_slots (to_rtx
);
3921 return want_value
? to_rtx
: NULL_RTX
;
3924 /* In case we are returning the contents of an object which overlaps
3925 the place the value is being stored, use a safe function when copying
3926 a value through a pointer into a structure value return block. */
3927 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
3928 && current_function_returns_struct
3929 && !current_function_returns_pcc_struct
)
3934 size
= expr_size (from
);
3935 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
,
3936 EXPAND_MEMORY_USE_DONT
);
3938 /* Copy the rights of the bitmap. */
3939 if (current_function_check_memory_usage
)
3940 emit_library_call (chkr_copy_bitmap_libfunc
, LCT_CONST_MAKE_BLOCK
,
3941 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3942 XEXP (from_rtx
, 0), Pmode
,
3943 convert_to_mode (TYPE_MODE (sizetype
),
3944 size
, TREE_UNSIGNED (sizetype
)),
3945 TYPE_MODE (sizetype
));
3947 #ifdef TARGET_MEM_FUNCTIONS
3948 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
3949 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3950 XEXP (from_rtx
, 0), Pmode
,
3951 convert_to_mode (TYPE_MODE (sizetype
),
3952 size
, TREE_UNSIGNED (sizetype
)),
3953 TYPE_MODE (sizetype
));
3955 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
3956 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
3957 XEXP (to_rtx
, 0), Pmode
,
3958 convert_to_mode (TYPE_MODE (integer_type_node
),
3959 size
, TREE_UNSIGNED (integer_type_node
)),
3960 TYPE_MODE (integer_type_node
));
3963 preserve_temp_slots (to_rtx
);
3966 return want_value
? to_rtx
: NULL_RTX
;
3969 /* Compute FROM and store the value in the rtx we got. */
3972 result
= store_expr (from
, to_rtx
, want_value
);
3973 preserve_temp_slots (result
);
3976 return want_value
? result
: NULL_RTX
;
3979 /* Generate code for computing expression EXP,
3980 and storing the value into TARGET.
3981 TARGET may contain a QUEUED rtx.
3983 If WANT_VALUE is nonzero, return a copy of the value
3984 not in TARGET, so that we can be sure to use the proper
3985 value in a containing expression even if TARGET has something
3986 else stored in it. If possible, we copy the value through a pseudo
3987 and return that pseudo. Or, if the value is constant, we try to
3988 return the constant. In some cases, we return a pseudo
3989 copied *from* TARGET.
3991 If the mode is BLKmode then we may return TARGET itself.
3992 It turns out that in BLKmode it doesn't cause a problem.
3993 because C has no operators that could combine two different
3994 assignments into the same BLKmode object with different values
3995 with no sequence point. Will other languages need this to
3998 If WANT_VALUE is 0, we return NULL, to make sure
3999 to catch quickly any cases where the caller uses the value
4000 and fails to set WANT_VALUE. */
4003 store_expr (exp
, target
, want_value
)
4009 int dont_return_target
= 0;
4010 int dont_store_target
= 0;
4012 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4014 /* Perform first part of compound expression, then assign from second
4016 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
4018 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
4020 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4022 /* For conditional expression, get safe form of the target. Then
4023 test the condition, doing the appropriate assignment on either
4024 side. This avoids the creation of unnecessary temporaries.
4025 For non-BLKmode, it is more efficient not to do this. */
4027 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4030 target
= protect_from_queue (target
, 1);
4032 do_pending_stack_adjust ();
4034 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
4035 start_cleanup_deferral ();
4036 store_expr (TREE_OPERAND (exp
, 1), target
, 0);
4037 end_cleanup_deferral ();
4039 emit_jump_insn (gen_jump (lab2
));
4042 start_cleanup_deferral ();
4043 store_expr (TREE_OPERAND (exp
, 2), target
, 0);
4044 end_cleanup_deferral ();
4049 return want_value
? target
: NULL_RTX
;
4051 else if (queued_subexp_p (target
))
4052 /* If target contains a postincrement, let's not risk
4053 using it as the place to generate the rhs. */
4055 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
4057 /* Expand EXP into a new pseudo. */
4058 temp
= gen_reg_rtx (GET_MODE (target
));
4059 temp
= expand_expr (exp
, temp
, GET_MODE (target
), 0);
4062 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
), 0);
4064 /* If target is volatile, ANSI requires accessing the value
4065 *from* the target, if it is accessed. So make that happen.
4066 In no case return the target itself. */
4067 if (! MEM_VOLATILE_P (target
) && want_value
)
4068 dont_return_target
= 1;
4070 else if (want_value
&& GET_CODE (target
) == MEM
&& ! MEM_VOLATILE_P (target
)
4071 && GET_MODE (target
) != BLKmode
)
4072 /* If target is in memory and caller wants value in a register instead,
4073 arrange that. Pass TARGET as target for expand_expr so that,
4074 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4075 We know expand_expr will not use the target in that case.
4076 Don't do this if TARGET is volatile because we are supposed
4077 to write it and then read it. */
4079 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
4080 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
4082 /* If TEMP is already in the desired TARGET, only copy it from
4083 memory and don't store it there again. */
4085 || (rtx_equal_p (temp
, target
)
4086 && ! side_effects_p (temp
) && ! side_effects_p (target
)))
4087 dont_store_target
= 1;
4088 temp
= copy_to_reg (temp
);
4090 dont_return_target
= 1;
4092 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4093 /* If this is an scalar in a register that is stored in a wider mode
4094 than the declared mode, compute the result into its declared mode
4095 and then convert to the wider mode. Our value is the computed
4098 /* If we don't want a value, we can do the conversion inside EXP,
4099 which will often result in some optimizations. Do the conversion
4100 in two steps: first change the signedness, if needed, then
4101 the extend. But don't do this if the type of EXP is a subtype
4102 of something else since then the conversion might involve
4103 more than just converting modes. */
4104 if (! want_value
&& INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4105 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
4107 if (TREE_UNSIGNED (TREE_TYPE (exp
))
4108 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4111 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target
),
4115 exp
= convert (type_for_mode (GET_MODE (SUBREG_REG (target
)),
4116 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4120 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
4122 /* If TEMP is a volatile MEM and we want a result value, make
4123 the access now so it gets done only once. Likewise if
4124 it contains TARGET. */
4125 if (GET_CODE (temp
) == MEM
&& want_value
4126 && (MEM_VOLATILE_P (temp
)
4127 || reg_mentioned_p (SUBREG_REG (target
), XEXP (temp
, 0))))
4128 temp
= copy_to_reg (temp
);
4130 /* If TEMP is a VOIDmode constant, use convert_modes to make
4131 sure that we properly convert it. */
4132 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4134 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4135 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4136 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4137 GET_MODE (target
), temp
,
4138 SUBREG_PROMOTED_UNSIGNED_P (target
));
4141 convert_move (SUBREG_REG (target
), temp
,
4142 SUBREG_PROMOTED_UNSIGNED_P (target
));
4144 /* If we promoted a constant, change the mode back down to match
4145 target. Otherwise, the caller might get confused by a result whose
4146 mode is larger than expected. */
4148 if (want_value
&& GET_MODE (temp
) != GET_MODE (target
)
4149 && GET_MODE (temp
) != VOIDmode
)
4151 temp
= gen_lowpart_SUBREG (GET_MODE (target
), temp
);
4152 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4153 SUBREG_PROMOTED_UNSIGNED_P (temp
)
4154 = SUBREG_PROMOTED_UNSIGNED_P (target
);
4157 return want_value
? temp
: NULL_RTX
;
4161 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
4162 /* Return TARGET if it's a specified hardware register.
4163 If TARGET is a volatile mem ref, either return TARGET
4164 or return a reg copied *from* TARGET; ANSI requires this.
4166 Otherwise, if TEMP is not TARGET, return TEMP
4167 if it is constant (for efficiency),
4168 or if we really want the correct value. */
4169 if (!(target
&& GET_CODE (target
) == REG
4170 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4171 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
4172 && ! rtx_equal_p (temp
, target
)
4173 && (CONSTANT_P (temp
) || want_value
))
4174 dont_return_target
= 1;
4177 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4178 the same as that of TARGET, adjust the constant. This is needed, for
4179 example, in case it is a CONST_DOUBLE and we want only a word-sized
4181 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4182 && TREE_CODE (exp
) != ERROR_MARK
4183 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4184 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4185 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
4187 if (current_function_check_memory_usage
4188 && GET_CODE (target
) == MEM
4189 && AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
4191 in_check_memory_usage
= 1;
4192 if (GET_CODE (temp
) == MEM
)
4193 emit_library_call (chkr_copy_bitmap_libfunc
, LCT_CONST_MAKE_BLOCK
,
4194 VOIDmode
, 3, XEXP (target
, 0), Pmode
,
4195 XEXP (temp
, 0), Pmode
,
4196 expr_size (exp
), TYPE_MODE (sizetype
));
4198 emit_library_call (chkr_check_addr_libfunc
, LCT_CONST_MAKE_BLOCK
,
4199 VOIDmode
, 3, XEXP (target
, 0), Pmode
,
4200 expr_size (exp
), TYPE_MODE (sizetype
),
4201 GEN_INT (MEMORY_USE_WO
),
4202 TYPE_MODE (integer_type_node
));
4203 in_check_memory_usage
= 0;
4206 /* If value was not generated in the target, store it there.
4207 Convert the value to TARGET's type first if nec. */
4208 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4209 one or both of them are volatile memory refs, we have to distinguish
4211 - expand_expr has used TARGET. In this case, we must not generate
4212 another copy. This can be detected by TARGET being equal according
4214 - expand_expr has not used TARGET - that means that the source just
4215 happens to have the same RTX form. Since temp will have been created
4216 by expand_expr, it will compare unequal according to == .
4217 We must generate a copy in this case, to reach the correct number
4218 of volatile memory references. */
4220 if ((! rtx_equal_p (temp
, target
)
4221 || (temp
!= target
&& (side_effects_p (temp
)
4222 || side_effects_p (target
))))
4223 && TREE_CODE (exp
) != ERROR_MARK
4224 && ! dont_store_target
)
4226 target
= protect_from_queue (target
, 1);
4227 if (GET_MODE (temp
) != GET_MODE (target
)
4228 && GET_MODE (temp
) != VOIDmode
)
4230 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4231 if (dont_return_target
)
4233 /* In this case, we will return TEMP,
4234 so make sure it has the proper mode.
4235 But don't forget to store the value into TARGET. */
4236 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4237 emit_move_insn (target
, temp
);
4240 convert_move (target
, temp
, unsignedp
);
4243 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4245 /* Handle copying a string constant into an array.
4246 The string constant may be shorter than the array.
4247 So copy just the string's actual length, and clear the rest. */
4251 /* Get the size of the data type of the string,
4252 which is actually the size of the target. */
4253 size
= expr_size (exp
);
4254 if (GET_CODE (size
) == CONST_INT
4255 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4256 emit_block_move (target
, temp
, size
);
4259 /* Compute the size of the data to copy from the string. */
4261 = size_binop (MIN_EXPR
,
4262 make_tree (sizetype
, size
),
4263 size_int (TREE_STRING_LENGTH (exp
)));
4264 rtx copy_size_rtx
= expand_expr (copy_size
, NULL_RTX
,
4268 /* Copy that much. */
4269 emit_block_move (target
, temp
, copy_size_rtx
);
4271 /* Figure out how much is left in TARGET that we have to clear.
4272 Do all calculations in ptr_mode. */
4274 addr
= XEXP (target
, 0);
4275 addr
= convert_modes (ptr_mode
, Pmode
, addr
, 1);
4277 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4279 addr
= plus_constant (addr
, TREE_STRING_LENGTH (exp
));
4280 size
= plus_constant (size
, -TREE_STRING_LENGTH (exp
));
4284 addr
= force_reg (ptr_mode
, addr
);
4285 addr
= expand_binop (ptr_mode
, add_optab
, addr
,
4286 copy_size_rtx
, NULL_RTX
, 0,
4289 size
= expand_binop (ptr_mode
, sub_optab
, size
,
4290 copy_size_rtx
, NULL_RTX
, 0,
4293 label
= gen_label_rtx ();
4294 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4295 GET_MODE (size
), 0, label
);
4298 if (size
!= const0_rtx
)
4300 rtx dest
= gen_rtx_MEM (BLKmode
, addr
);
4302 MEM_COPY_ATTRIBUTES (dest
, target
);
4304 /* Be sure we can write on ADDR. */
4305 in_check_memory_usage
= 1;
4306 if (current_function_check_memory_usage
)
4307 emit_library_call (chkr_check_addr_libfunc
,
4308 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3,
4310 size
, TYPE_MODE (sizetype
),
4311 GEN_INT (MEMORY_USE_WO
),
4312 TYPE_MODE (integer_type_node
));
4313 in_check_memory_usage
= 0;
4314 clear_storage (dest
, size
);
4321 /* Handle calls that return values in multiple non-contiguous locations.
4322 The Irix 6 ABI has examples of this. */
4323 else if (GET_CODE (target
) == PARALLEL
)
4324 emit_group_load (target
, temp
, int_size_in_bytes (TREE_TYPE (exp
)));
4325 else if (GET_MODE (temp
) == BLKmode
)
4326 emit_block_move (target
, temp
, expr_size (exp
));
4328 emit_move_insn (target
, temp
);
4331 /* If we don't want a value, return NULL_RTX. */
4335 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4336 ??? The latter test doesn't seem to make sense. */
4337 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
4340 /* Return TARGET itself if it is a hard register. */
4341 else if (want_value
&& GET_MODE (target
) != BLKmode
4342 && ! (GET_CODE (target
) == REG
4343 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4344 return copy_to_reg (target
);
4350 /* Return 1 if EXP just contains zeros. */
4358 switch (TREE_CODE (exp
))
4362 case NON_LVALUE_EXPR
:
4363 case VIEW_CONVERT_EXPR
:
4364 return is_zeros_p (TREE_OPERAND (exp
, 0));
4367 return integer_zerop (exp
);
4371 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
4374 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp
), dconst0
);
4377 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4378 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
4379 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4380 if (! is_zeros_p (TREE_VALUE (elt
)))
4390 /* Return 1 if EXP contains mostly (3/4) zeros. */
4393 mostly_zeros_p (exp
)
4396 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4398 int elts
= 0, zeros
= 0;
4399 tree elt
= CONSTRUCTOR_ELTS (exp
);
4400 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4402 /* If there are no ranges of true bits, it is all zero. */
4403 return elt
== NULL_TREE
;
4405 for (; elt
; elt
= TREE_CHAIN (elt
))
4407 /* We do not handle the case where the index is a RANGE_EXPR,
4408 so the statistic will be somewhat inaccurate.
4409 We do make a more accurate count in store_constructor itself,
4410 so since this function is only used for nested array elements,
4411 this should be close enough. */
4412 if (mostly_zeros_p (TREE_VALUE (elt
)))
4417 return 4 * zeros
>= 3 * elts
;
4420 return is_zeros_p (exp
);
4423 /* Helper function for store_constructor.
4424 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4425 TYPE is the type of the CONSTRUCTOR, not the element type.
4426 CLEARED is as for store_constructor.
4427 ALIAS_SET is the alias set to use for any stores.
4429 This provides a recursive shortcut back to store_constructor when it isn't
4430 necessary to go through store_field. This is so that we can pass through
4431 the cleared field to let store_constructor know that we may not have to
4432 clear a substructure if the outer structure has already been cleared. */
4435 store_constructor_field (target
, bitsize
, bitpos
, mode
, exp
, type
, cleared
,
4438 unsigned HOST_WIDE_INT bitsize
;
4439 HOST_WIDE_INT bitpos
;
4440 enum machine_mode mode
;
4445 if (TREE_CODE (exp
) == CONSTRUCTOR
4446 && bitpos
% BITS_PER_UNIT
== 0
4447 /* If we have a non-zero bitpos for a register target, then we just
4448 let store_field do the bitfield handling. This is unlikely to
4449 generate unnecessary clear instructions anyways. */
4450 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
4452 if (GET_CODE (target
) == MEM
)
4454 = adjust_address (target
,
4455 GET_MODE (target
) == BLKmode
4457 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4458 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
4461 /* Update the alias set, if required. */
4462 if (GET_CODE (target
) == MEM
&& ! MEM_KEEP_ALIAS_SET_P (target
)
4463 && MEM_ALIAS_SET (target
) != 0)
4465 target
= copy_rtx (target
);
4466 set_mem_alias_set (target
, alias_set
);
4469 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
4472 store_field (target
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
4476 /* Store the value of constructor EXP into the rtx TARGET.
4477 TARGET is either a REG or a MEM; we know it cannot conflict, since
4478 safe_from_p has been called.
4479 CLEARED is true if TARGET is known to have been zero'd.
4480 SIZE is the number of bytes of TARGET we are allowed to modify: this
4481 may not be the same as the size of EXP if we are assigning to a field
4482 which has been packed to exclude padding bits. */
4485 store_constructor (exp
, target
, cleared
, size
)
4491 tree type
= TREE_TYPE (exp
);
4492 #ifdef WORD_REGISTER_OPERATIONS
4493 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4496 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
4497 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4501 /* We either clear the aggregate or indicate the value is dead. */
4502 if ((TREE_CODE (type
) == UNION_TYPE
4503 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4505 && ! CONSTRUCTOR_ELTS (exp
))
4506 /* If the constructor is empty, clear the union. */
4508 clear_storage (target
, expr_size (exp
));
4512 /* If we are building a static constructor into a register,
4513 set the initial value as zero so we can fold the value into
4514 a constant. But if more than one register is involved,
4515 this probably loses. */
4516 else if (! cleared
&& GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
4517 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4519 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4523 /* If the constructor has fewer fields than the structure
4524 or if we are initializing the structure to mostly zeros,
4525 clear the whole structure first. Don't do this if TARGET is a
4526 register whose mode size isn't equal to SIZE since clear_storage
4527 can't handle this case. */
4528 else if (! cleared
&& size
> 0
4529 && ((list_length (CONSTRUCTOR_ELTS (exp
))
4530 != fields_length (type
))
4531 || mostly_zeros_p (exp
))
4532 && (GET_CODE (target
) != REG
4533 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
4536 clear_storage (target
, GEN_INT (size
));
4541 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4543 /* Store each element of the constructor into
4544 the corresponding field of TARGET. */
4546 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4548 tree field
= TREE_PURPOSE (elt
);
4549 #ifdef WORD_REGISTER_OPERATIONS
4550 tree value
= TREE_VALUE (elt
);
4552 enum machine_mode mode
;
4553 HOST_WIDE_INT bitsize
;
4554 HOST_WIDE_INT bitpos
= 0;
4557 rtx to_rtx
= target
;
4559 /* Just ignore missing fields.
4560 We cleared the whole structure, above,
4561 if any fields are missing. */
4565 if (cleared
&& is_zeros_p (TREE_VALUE (elt
)))
4568 if (host_integerp (DECL_SIZE (field
), 1))
4569 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4573 unsignedp
= TREE_UNSIGNED (field
);
4574 mode
= DECL_MODE (field
);
4575 if (DECL_BIT_FIELD (field
))
4578 offset
= DECL_FIELD_OFFSET (field
);
4579 if (host_integerp (offset
, 0)
4580 && host_integerp (bit_position (field
), 0))
4582 bitpos
= int_bit_position (field
);
4586 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
4592 if (contains_placeholder_p (offset
))
4593 offset
= build (WITH_RECORD_EXPR
, sizetype
,
4594 offset
, make_tree (TREE_TYPE (exp
), target
));
4596 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4597 if (GET_CODE (to_rtx
) != MEM
)
4600 if (GET_MODE (offset_rtx
) != ptr_mode
)
4601 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4603 #ifdef POINTERS_EXTEND_UNSIGNED
4604 if (GET_MODE (offset_rtx
) != Pmode
)
4605 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
4608 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4609 highest_pow2_factor (offset
));
4612 if (TREE_READONLY (field
))
4614 if (GET_CODE (to_rtx
) == MEM
)
4615 to_rtx
= copy_rtx (to_rtx
);
4617 RTX_UNCHANGING_P (to_rtx
) = 1;
4620 #ifdef WORD_REGISTER_OPERATIONS
4621 /* If this initializes a field that is smaller than a word, at the
4622 start of a word, try to widen it to a full word.
4623 This special case allows us to output C++ member function
4624 initializations in a form that the optimizers can understand. */
4625 if (GET_CODE (target
) == REG
4626 && bitsize
< BITS_PER_WORD
4627 && bitpos
% BITS_PER_WORD
== 0
4628 && GET_MODE_CLASS (mode
) == MODE_INT
4629 && TREE_CODE (value
) == INTEGER_CST
4631 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
4633 tree type
= TREE_TYPE (value
);
4635 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4637 type
= type_for_size (BITS_PER_WORD
, TREE_UNSIGNED (type
));
4638 value
= convert (type
, value
);
4641 if (BYTES_BIG_ENDIAN
)
4643 = fold (build (LSHIFT_EXPR
, type
, value
,
4644 build_int_2 (BITS_PER_WORD
- bitsize
, 0)));
4645 bitsize
= BITS_PER_WORD
;
4650 if (GET_CODE (to_rtx
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (to_rtx
)
4651 && DECL_NONADDRESSABLE_P (field
))
4653 to_rtx
= copy_rtx (to_rtx
);
4654 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4657 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
4658 TREE_VALUE (elt
), type
, cleared
,
4659 get_alias_set (TREE_TYPE (field
)));
4662 else if (TREE_CODE (type
) == ARRAY_TYPE
)
4667 tree domain
= TYPE_DOMAIN (type
);
4668 tree elttype
= TREE_TYPE (type
);
4669 int const_bounds_p
= (TYPE_MIN_VALUE (domain
)
4670 && TYPE_MAX_VALUE (domain
)
4671 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
4672 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
4673 HOST_WIDE_INT minelt
= 0;
4674 HOST_WIDE_INT maxelt
= 0;
4676 /* If we have constant bounds for the range of the type, get them. */
4679 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
4680 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
4683 /* If the constructor has fewer elements than the array,
4684 clear the whole array first. Similarly if this is
4685 static constructor of a non-BLKmode object. */
4686 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
4690 HOST_WIDE_INT count
= 0, zero_count
= 0;
4691 need_to_clear
= ! const_bounds_p
;
4693 /* This loop is a more accurate version of the loop in
4694 mostly_zeros_p (it handles RANGE_EXPR in an index).
4695 It is also needed to check for missing elements. */
4696 for (elt
= CONSTRUCTOR_ELTS (exp
);
4697 elt
!= NULL_TREE
&& ! need_to_clear
;
4698 elt
= TREE_CHAIN (elt
))
4700 tree index
= TREE_PURPOSE (elt
);
4701 HOST_WIDE_INT this_node_count
;
4703 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4705 tree lo_index
= TREE_OPERAND (index
, 0);
4706 tree hi_index
= TREE_OPERAND (index
, 1);
4708 if (! host_integerp (lo_index
, 1)
4709 || ! host_integerp (hi_index
, 1))
4715 this_node_count
= (tree_low_cst (hi_index
, 1)
4716 - tree_low_cst (lo_index
, 1) + 1);
4719 this_node_count
= 1;
4721 count
+= this_node_count
;
4722 if (mostly_zeros_p (TREE_VALUE (elt
)))
4723 zero_count
+= this_node_count
;
4726 /* Clear the entire array first if there are any missing elements,
4727 or if the incidence of zero elements is >= 75%. */
4729 && (count
< maxelt
- minelt
+ 1 || 4 * zero_count
>= 3 * count
))
4733 if (need_to_clear
&& size
> 0)
4736 clear_storage (target
, GEN_INT (size
));
4739 else if (REG_P (target
))
4740 /* Inform later passes that the old value is dead. */
4741 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4743 /* Store each element of the constructor into
4744 the corresponding element of TARGET, determined
4745 by counting the elements. */
4746 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
4748 elt
= TREE_CHAIN (elt
), i
++)
4750 enum machine_mode mode
;
4751 HOST_WIDE_INT bitsize
;
4752 HOST_WIDE_INT bitpos
;
4754 tree value
= TREE_VALUE (elt
);
4755 tree index
= TREE_PURPOSE (elt
);
4756 rtx xtarget
= target
;
4758 if (cleared
&& is_zeros_p (value
))
4761 unsignedp
= TREE_UNSIGNED (elttype
);
4762 mode
= TYPE_MODE (elttype
);
4763 if (mode
== BLKmode
)
4764 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
4765 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
4768 bitsize
= GET_MODE_BITSIZE (mode
);
4770 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4772 tree lo_index
= TREE_OPERAND (index
, 0);
4773 tree hi_index
= TREE_OPERAND (index
, 1);
4774 rtx index_r
, pos_rtx
, hi_r
, loop_top
, loop_end
;
4775 struct nesting
*loop
;
4776 HOST_WIDE_INT lo
, hi
, count
;
4779 /* If the range is constant and "small", unroll the loop. */
4781 && host_integerp (lo_index
, 0)
4782 && host_integerp (hi_index
, 0)
4783 && (lo
= tree_low_cst (lo_index
, 0),
4784 hi
= tree_low_cst (hi_index
, 0),
4785 count
= hi
- lo
+ 1,
4786 (GET_CODE (target
) != MEM
4788 || (host_integerp (TYPE_SIZE (elttype
), 1)
4789 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
4792 lo
-= minelt
; hi
-= minelt
;
4793 for (; lo
<= hi
; lo
++)
4795 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
4797 if (GET_CODE (target
) == MEM
4798 && !MEM_KEEP_ALIAS_SET_P (target
)
4799 && TYPE_NONALIASED_COMPONENT (type
))
4801 target
= copy_rtx (target
);
4802 MEM_KEEP_ALIAS_SET_P (target
) = 1;
4805 store_constructor_field
4806 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
4807 get_alias_set (elttype
));
4812 hi_r
= expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
4813 loop_top
= gen_label_rtx ();
4814 loop_end
= gen_label_rtx ();
4816 unsignedp
= TREE_UNSIGNED (domain
);
4818 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
4821 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
4823 SET_DECL_RTL (index
, index_r
);
4824 if (TREE_CODE (value
) == SAVE_EXPR
4825 && SAVE_EXPR_RTL (value
) == 0)
4827 /* Make sure value gets expanded once before the
4829 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
4832 store_expr (lo_index
, index_r
, 0);
4833 loop
= expand_start_loop (0);
4835 /* Assign value to element index. */
4837 = convert (ssizetype
,
4838 fold (build (MINUS_EXPR
, TREE_TYPE (index
),
4839 index
, TYPE_MIN_VALUE (domain
))));
4840 position
= size_binop (MULT_EXPR
, position
,
4842 TYPE_SIZE_UNIT (elttype
)));
4844 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
4845 xtarget
= offset_address (target
, pos_rtx
,
4846 highest_pow2_factor (position
));
4847 xtarget
= adjust_address (xtarget
, mode
, 0);
4848 if (TREE_CODE (value
) == CONSTRUCTOR
)
4849 store_constructor (value
, xtarget
, cleared
,
4850 bitsize
/ BITS_PER_UNIT
);
4852 store_expr (value
, xtarget
, 0);
4854 expand_exit_loop_if_false (loop
,
4855 build (LT_EXPR
, integer_type_node
,
4858 expand_increment (build (PREINCREMENT_EXPR
,
4860 index
, integer_one_node
), 0, 0);
4862 emit_label (loop_end
);
4865 else if ((index
!= 0 && ! host_integerp (index
, 0))
4866 || ! host_integerp (TYPE_SIZE (elttype
), 1))
4871 index
= ssize_int (1);
4874 index
= convert (ssizetype
,
4875 fold (build (MINUS_EXPR
, index
,
4876 TYPE_MIN_VALUE (domain
))));
4878 position
= size_binop (MULT_EXPR
, index
,
4880 TYPE_SIZE_UNIT (elttype
)));
4881 xtarget
= offset_address (target
,
4882 expand_expr (position
, 0, VOIDmode
, 0),
4883 highest_pow2_factor (position
));
4884 xtarget
= adjust_address (xtarget
, mode
, 0);
4885 store_expr (value
, xtarget
, 0);
4890 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
4891 * tree_low_cst (TYPE_SIZE (elttype
), 1));
4893 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
4895 if (GET_CODE (target
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (target
)
4896 && TYPE_NONALIASED_COMPONENT (type
))
4898 target
= copy_rtx (target
);
4899 MEM_KEEP_ALIAS_SET_P (target
) = 1;
4902 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
4903 type
, cleared
, get_alias_set (elttype
));
4909 /* Set constructor assignments. */
4910 else if (TREE_CODE (type
) == SET_TYPE
)
4912 tree elt
= CONSTRUCTOR_ELTS (exp
);
4913 unsigned HOST_WIDE_INT nbytes
= int_size_in_bytes (type
), nbits
;
4914 tree domain
= TYPE_DOMAIN (type
);
4915 tree domain_min
, domain_max
, bitlength
;
4917 /* The default implementation strategy is to extract the constant
4918 parts of the constructor, use that to initialize the target,
4919 and then "or" in whatever non-constant ranges we need in addition.
4921 If a large set is all zero or all ones, it is
4922 probably better to set it using memset (if available) or bzero.
4923 Also, if a large set has just a single range, it may also be
4924 better to first clear all the first clear the set (using
4925 bzero/memset), and set the bits we want. */
4927 /* Check for all zeros. */
4928 if (elt
== NULL_TREE
&& size
> 0)
4931 clear_storage (target
, GEN_INT (size
));
4935 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
4936 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
4937 bitlength
= size_binop (PLUS_EXPR
,
4938 size_diffop (domain_max
, domain_min
),
4941 nbits
= tree_low_cst (bitlength
, 1);
4943 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4944 are "complicated" (more than one range), initialize (the
4945 constant parts) by copying from a constant. */
4946 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
4947 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
4949 unsigned int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
4950 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
4951 char *bit_buffer
= (char *) alloca (nbits
);
4952 HOST_WIDE_INT word
= 0;
4953 unsigned int bit_pos
= 0;
4954 unsigned int ibit
= 0;
4955 unsigned int offset
= 0; /* In bytes from beginning of set. */
4957 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
4960 if (bit_buffer
[ibit
])
4962 if (BYTES_BIG_ENDIAN
)
4963 word
|= (1 << (set_word_size
- 1 - bit_pos
));
4965 word
|= 1 << bit_pos
;
4969 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
4971 if (word
!= 0 || ! cleared
)
4973 rtx datum
= GEN_INT (word
);
4976 /* The assumption here is that it is safe to use
4977 XEXP if the set is multi-word, but not if
4978 it's single-word. */
4979 if (GET_CODE (target
) == MEM
)
4980 to_rtx
= adjust_address (target
, mode
, offset
);
4981 else if (offset
== 0)
4985 emit_move_insn (to_rtx
, datum
);
4992 offset
+= set_word_size
/ BITS_PER_UNIT
;
4997 /* Don't bother clearing storage if the set is all ones. */
4998 if (TREE_CHAIN (elt
) != NULL_TREE
4999 || (TREE_PURPOSE (elt
) == NULL_TREE
5001 : ( ! host_integerp (TREE_VALUE (elt
), 0)
5002 || ! host_integerp (TREE_PURPOSE (elt
), 0)
5003 || (tree_low_cst (TREE_VALUE (elt
), 0)
5004 - tree_low_cst (TREE_PURPOSE (elt
), 0) + 1
5005 != (HOST_WIDE_INT
) nbits
))))
5006 clear_storage (target
, expr_size (exp
));
5008 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
5010 /* Start of range of element or NULL. */
5011 tree startbit
= TREE_PURPOSE (elt
);
5012 /* End of range of element, or element value. */
5013 tree endbit
= TREE_VALUE (elt
);
5014 #ifdef TARGET_MEM_FUNCTIONS
5015 HOST_WIDE_INT startb
, endb
;
5017 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
5019 bitlength_rtx
= expand_expr (bitlength
,
5020 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
5022 /* Handle non-range tuple element like [ expr ]. */
5023 if (startbit
== NULL_TREE
)
5025 startbit
= save_expr (endbit
);
5029 startbit
= convert (sizetype
, startbit
);
5030 endbit
= convert (sizetype
, endbit
);
5031 if (! integer_zerop (domain_min
))
5033 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
5034 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
5036 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
5037 EXPAND_CONST_ADDRESS
);
5038 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
5039 EXPAND_CONST_ADDRESS
);
5045 ((build_qualified_type (type_for_mode (GET_MODE (target
), 0),
5048 emit_move_insn (targetx
, target
);
5051 else if (GET_CODE (target
) == MEM
)
5056 #ifdef TARGET_MEM_FUNCTIONS
5057 /* Optimization: If startbit and endbit are
5058 constants divisible by BITS_PER_UNIT,
5059 call memset instead. */
5060 if (TREE_CODE (startbit
) == INTEGER_CST
5061 && TREE_CODE (endbit
) == INTEGER_CST
5062 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
5063 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
5065 emit_library_call (memset_libfunc
, LCT_NORMAL
,
5067 plus_constant (XEXP (targetx
, 0),
5068 startb
/ BITS_PER_UNIT
),
5070 constm1_rtx
, TYPE_MODE (integer_type_node
),
5071 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
5072 TYPE_MODE (sizetype
));
5076 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__setbits"),
5077 LCT_NORMAL
, VOIDmode
, 4, XEXP (targetx
, 0),
5078 Pmode
, bitlength_rtx
, TYPE_MODE (sizetype
),
5079 startbit_rtx
, TYPE_MODE (sizetype
),
5080 endbit_rtx
, TYPE_MODE (sizetype
));
5083 emit_move_insn (target
, targetx
);
5091 /* Store the value of EXP (an expression tree)
5092 into a subfield of TARGET which has mode MODE and occupies
5093 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5094 If MODE is VOIDmode, it means that we are storing into a bit-field.
5096 If VALUE_MODE is VOIDmode, return nothing in particular.
5097 UNSIGNEDP is not used in this case.
5099 Otherwise, return an rtx for the value stored. This rtx
5100 has mode VALUE_MODE if that is convenient to do.
5101 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5103 TYPE is the type of the underlying object,
5105 ALIAS_SET is the alias set for the destination. This value will
5106 (in general) be different from that for TARGET, since TARGET is a
5107 reference to the containing structure. */
5110 store_field (target
, bitsize
, bitpos
, mode
, exp
, value_mode
, unsignedp
, type
,
5113 HOST_WIDE_INT bitsize
;
5114 HOST_WIDE_INT bitpos
;
5115 enum machine_mode mode
;
5117 enum machine_mode value_mode
;
5122 HOST_WIDE_INT width_mask
= 0;
5124 if (TREE_CODE (exp
) == ERROR_MARK
)
5127 /* If we have nothing to store, do nothing unless the expression has
5130 return expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
5131 else if (bitsize
>=0 && bitsize
< HOST_BITS_PER_WIDE_INT
)
5132 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5134 /* If we are storing into an unaligned field of an aligned union that is
5135 in a register, we may have the mode of TARGET being an integer mode but
5136 MODE == BLKmode. In that case, get an aligned object whose size and
5137 alignment are the same as TARGET and store TARGET into it (we can avoid
5138 the store if the field being stored is the entire width of TARGET). Then
5139 call ourselves recursively to store the field into a BLKmode version of
5140 that object. Finally, load from the object into TARGET. This is not
5141 very efficient in general, but should only be slightly more expensive
5142 than the otherwise-required unaligned accesses. Perhaps this can be
5143 cleaned up later. */
5146 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
5150 (build_qualified_type (type
, TYPE_QUALS (type
) | TYPE_QUAL_CONST
),
5152 rtx blk_object
= copy_rtx (object
);
5154 PUT_MODE (blk_object
, BLKmode
);
5156 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5157 emit_move_insn (object
, target
);
5159 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
5162 emit_move_insn (target
, object
);
5164 /* We want to return the BLKmode version of the data. */
5168 if (GET_CODE (target
) == CONCAT
)
5170 /* We're storing into a struct containing a single __complex. */
5174 return store_expr (exp
, target
, 0);
5177 /* If the structure is in a register or if the component
5178 is a bit field, we cannot use addressing to access it.
5179 Use bit-field techniques or SUBREG to store in it. */
5181 if (mode
== VOIDmode
5182 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5183 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5184 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5185 || GET_CODE (target
) == REG
5186 || GET_CODE (target
) == SUBREG
5187 /* If the field isn't aligned enough to store as an ordinary memref,
5188 store it as a bit field. */
5189 || (mode
!= BLKmode
&& SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
))
5190 && (MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
)
5191 || bitpos
% GET_MODE_ALIGNMENT (mode
)))
5192 /* If the RHS and field are a constant size and the size of the
5193 RHS isn't the same size as the bitfield, we must use bitfield
5196 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5197 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5199 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
5201 /* If BITSIZE is narrower than the size of the type of EXP
5202 we will be narrowing TEMP. Normally, what's wanted are the
5203 low-order bits. However, if EXP's type is a record and this is
5204 big-endian machine, we want the upper BITSIZE bits. */
5205 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5206 && bitsize
< GET_MODE_BITSIZE (GET_MODE (temp
))
5207 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5208 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5209 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5213 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5215 if (mode
!= VOIDmode
&& mode
!= BLKmode
5216 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5217 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5219 /* If the modes of TARGET and TEMP are both BLKmode, both
5220 must be in memory and BITPOS must be aligned on a byte
5221 boundary. If so, we simply do a block copy. */
5222 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5224 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
5225 || bitpos
% BITS_PER_UNIT
!= 0)
5228 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5229 emit_block_move (target
, temp
,
5230 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5233 return value_mode
== VOIDmode
? const0_rtx
: target
;
5236 /* Store the value in the bitfield. */
5237 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
,
5238 int_size_in_bytes (type
));
5240 if (value_mode
!= VOIDmode
)
5242 /* The caller wants an rtx for the value.
5243 If possible, avoid refetching from the bitfield itself. */
5245 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
5248 enum machine_mode tmode
;
5251 return expand_and (temp
,
5255 GET_MODE (temp
) == VOIDmode
5257 : GET_MODE (temp
))), NULL_RTX
);
5259 tmode
= GET_MODE (temp
);
5260 if (tmode
== VOIDmode
)
5262 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
5263 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5264 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5267 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
5268 NULL_RTX
, value_mode
, VOIDmode
,
5269 int_size_in_bytes (type
));
5275 rtx addr
= XEXP (target
, 0);
5276 rtx to_rtx
= target
;
5278 /* If a value is wanted, it must be the lhs;
5279 so make the address stable for multiple use. */
5281 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
5282 && ! CONSTANT_ADDRESS_P (addr
)
5283 /* A frame-pointer reference is already stable. */
5284 && ! (GET_CODE (addr
) == PLUS
5285 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
5286 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
5287 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
5288 to_rtx
= replace_equiv_address (to_rtx
, copy_to_reg (addr
));
5290 /* Now build a reference to just the desired component. */
5292 to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5294 if (to_rtx
== target
)
5295 to_rtx
= copy_rtx (to_rtx
);
5297 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5298 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5299 set_mem_alias_set (to_rtx
, alias_set
);
5301 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
5305 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5306 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5307 codes and find the ultimate containing object, which we return.
5309 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5310 bit position, and *PUNSIGNEDP to the signedness of the field.
5311 If the position of the field is variable, we store a tree
5312 giving the variable offset (in units) in *POFFSET.
5313 This offset is in addition to the bit position.
5314 If the position is not variable, we store 0 in *POFFSET.
5316 If any of the extraction expressions is volatile,
5317 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5319 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5320 is a mode that can be used to access the field. In that case, *PBITSIZE
5323 If the field describes a variable-sized object, *PMODE is set to
5324 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5325 this case, but the address of the object can be found. */
5328 get_inner_reference (exp
, pbitsize
, pbitpos
, poffset
, pmode
,
5329 punsignedp
, pvolatilep
)
5331 HOST_WIDE_INT
*pbitsize
;
5332 HOST_WIDE_INT
*pbitpos
;
5334 enum machine_mode
*pmode
;
5339 enum machine_mode mode
= VOIDmode
;
5340 tree offset
= size_zero_node
;
5341 tree bit_offset
= bitsize_zero_node
;
5342 tree placeholder_ptr
= 0;
5345 /* First get the mode, signedness, and size. We do this from just the
5346 outermost expression. */
5347 if (TREE_CODE (exp
) == COMPONENT_REF
)
5349 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5350 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5351 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5353 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
5355 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5357 size_tree
= TREE_OPERAND (exp
, 1);
5358 *punsignedp
= TREE_UNSIGNED (exp
);
5362 mode
= TYPE_MODE (TREE_TYPE (exp
));
5363 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
5365 if (mode
== BLKmode
)
5366 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5368 *pbitsize
= GET_MODE_BITSIZE (mode
);
5373 if (! host_integerp (size_tree
, 1))
5374 mode
= BLKmode
, *pbitsize
= -1;
5376 *pbitsize
= tree_low_cst (size_tree
, 1);
5379 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5380 and find the ultimate containing object. */
5383 if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5384 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
, TREE_OPERAND (exp
, 2));
5385 else if (TREE_CODE (exp
) == COMPONENT_REF
)
5387 tree field
= TREE_OPERAND (exp
, 1);
5388 tree this_offset
= DECL_FIELD_OFFSET (field
);
5390 /* If this field hasn't been filled in yet, don't go
5391 past it. This should only happen when folding expressions
5392 made during type construction. */
5393 if (this_offset
== 0)
5395 else if (! TREE_CONSTANT (this_offset
)
5396 && contains_placeholder_p (this_offset
))
5397 this_offset
= build (WITH_RECORD_EXPR
, sizetype
, this_offset
, exp
);
5399 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5400 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5401 DECL_FIELD_BIT_OFFSET (field
));
5403 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5406 else if (TREE_CODE (exp
) == ARRAY_REF
5407 || TREE_CODE (exp
) == ARRAY_RANGE_REF
)
5409 tree index
= TREE_OPERAND (exp
, 1);
5410 tree array
= TREE_OPERAND (exp
, 0);
5411 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
5412 tree low_bound
= (domain
? TYPE_MIN_VALUE (domain
) : 0);
5413 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array
)));
5415 /* We assume all arrays have sizes that are a multiple of a byte.
5416 First subtract the lower bound, if any, in the type of the
5417 index, then convert to sizetype and multiply by the size of the
5419 if (low_bound
!= 0 && ! integer_zerop (low_bound
))
5420 index
= fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5423 /* If the index has a self-referential type, pass it to a
5424 WITH_RECORD_EXPR; if the component size is, pass our
5425 component to one. */
5426 if (! TREE_CONSTANT (index
)
5427 && contains_placeholder_p (index
))
5428 index
= build (WITH_RECORD_EXPR
, TREE_TYPE (index
), index
, exp
);
5429 if (! TREE_CONSTANT (unit_size
)
5430 && contains_placeholder_p (unit_size
))
5431 unit_size
= build (WITH_RECORD_EXPR
, sizetype
, unit_size
, array
);
5433 offset
= size_binop (PLUS_EXPR
, offset
,
5434 size_binop (MULT_EXPR
,
5435 convert (sizetype
, index
),
5439 else if (TREE_CODE (exp
) == PLACEHOLDER_EXPR
)
5441 tree
new = find_placeholder (exp
, &placeholder_ptr
);
5443 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5444 We might have been called from tree optimization where we
5445 haven't set up an object yet. */
5453 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
5454 && TREE_CODE (exp
) != VIEW_CONVERT_EXPR
5455 && ! ((TREE_CODE (exp
) == NOP_EXPR
5456 || TREE_CODE (exp
) == CONVERT_EXPR
)
5457 && (TYPE_MODE (TREE_TYPE (exp
))
5458 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
5461 /* If any reference in the chain is volatile, the effect is volatile. */
5462 if (TREE_THIS_VOLATILE (exp
))
5465 exp
= TREE_OPERAND (exp
, 0);
5468 /* If OFFSET is constant, see if we can return the whole thing as a
5469 constant bit position. Otherwise, split it up. */
5470 if (host_integerp (offset
, 0)
5471 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5473 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5474 && host_integerp (tem
, 0))
5475 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5477 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5483 /* Return 1 if T is an expression that get_inner_reference handles. */
5486 handled_component_p (t
)
5489 switch (TREE_CODE (t
))
5494 case ARRAY_RANGE_REF
:
5495 case NON_LVALUE_EXPR
:
5496 case VIEW_CONVERT_EXPR
:
5501 return (TYPE_MODE (TREE_TYPE (t
))
5502 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t
, 0))));
5509 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5511 static enum memory_use_mode
5512 get_memory_usage_from_modifier (modifier
)
5513 enum expand_modifier modifier
;
5519 return MEMORY_USE_RO
;
5521 case EXPAND_MEMORY_USE_WO
:
5522 return MEMORY_USE_WO
;
5524 case EXPAND_MEMORY_USE_RW
:
5525 return MEMORY_USE_RW
;
5527 case EXPAND_MEMORY_USE_DONT
:
5528 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5529 MEMORY_USE_DONT, because they are modifiers to a call of
5530 expand_expr in the ADDR_EXPR case of expand_expr. */
5531 case EXPAND_CONST_ADDRESS
:
5532 case EXPAND_INITIALIZER
:
5533 return MEMORY_USE_DONT
;
5534 case EXPAND_MEMORY_USE_BAD
:
5540 /* Given an rtx VALUE that may contain additions and multiplications, return
5541 an equivalent value that just refers to a register, memory, or constant.
5542 This is done by generating instructions to perform the arithmetic and
5543 returning a pseudo-register containing the value.
5545 The returned value may be a REG, SUBREG, MEM or constant. */
5548 force_operand (value
, target
)
5552 /* Use a temporary to force order of execution of calls to
5556 /* Use subtarget as the target for operand 0 of a binary operation. */
5557 rtx subtarget
= get_subtarget (target
);
5559 /* Check for a PIC address load. */
5561 && (GET_CODE (value
) == PLUS
|| GET_CODE (value
) == MINUS
)
5562 && XEXP (value
, 0) == pic_offset_table_rtx
5563 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5564 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5565 || GET_CODE (XEXP (value
, 1)) == CONST
))
5568 subtarget
= gen_reg_rtx (GET_MODE (value
));
5569 emit_move_insn (subtarget
, value
);
5573 if (GET_CODE (value
) == PLUS
)
5574 binoptab
= add_optab
;
5575 else if (GET_CODE (value
) == MINUS
)
5576 binoptab
= sub_optab
;
5577 else if (GET_CODE (value
) == MULT
)
5579 op2
= XEXP (value
, 1);
5580 if (!CONSTANT_P (op2
)
5581 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5583 tmp
= force_operand (XEXP (value
, 0), subtarget
);
5584 return expand_mult (GET_MODE (value
), tmp
,
5585 force_operand (op2
, NULL_RTX
),
5591 op2
= XEXP (value
, 1);
5592 if (!CONSTANT_P (op2
)
5593 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5595 if (binoptab
== sub_optab
&& GET_CODE (op2
) == CONST_INT
)
5597 binoptab
= add_optab
;
5598 op2
= negate_rtx (GET_MODE (value
), op2
);
5601 /* Check for an addition with OP2 a constant integer and our first
5602 operand a PLUS of a virtual register and something else. In that
5603 case, we want to emit the sum of the virtual register and the
5604 constant first and then add the other value. This allows virtual
5605 register instantiation to simply modify the constant rather than
5606 creating another one around this addition. */
5607 if (binoptab
== add_optab
&& GET_CODE (op2
) == CONST_INT
5608 && GET_CODE (XEXP (value
, 0)) == PLUS
5609 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
5610 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5611 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5613 rtx temp
= expand_binop (GET_MODE (value
), binoptab
,
5614 XEXP (XEXP (value
, 0), 0), op2
,
5615 subtarget
, 0, OPTAB_LIB_WIDEN
);
5616 return expand_binop (GET_MODE (value
), binoptab
, temp
,
5617 force_operand (XEXP (XEXP (value
, 0), 1), 0),
5618 target
, 0, OPTAB_LIB_WIDEN
);
5621 tmp
= force_operand (XEXP (value
, 0), subtarget
);
5622 return expand_binop (GET_MODE (value
), binoptab
, tmp
,
5623 force_operand (op2
, NULL_RTX
),
5624 target
, 0, OPTAB_LIB_WIDEN
);
5625 /* We give UNSIGNEDP = 0 to expand_binop
5626 because the only operations we are expanding here are signed ones. */
5629 #ifdef INSN_SCHEDULING
5630 /* On machines that have insn scheduling, we want all memory reference to be
5631 explicit, so we need to deal with such paradoxical SUBREGs. */
5632 if (GET_CODE (value
) == SUBREG
&& GET_CODE (SUBREG_REG (value
)) == MEM
5633 && (GET_MODE_SIZE (GET_MODE (value
))
5634 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
5636 = simplify_gen_subreg (GET_MODE (value
),
5637 force_reg (GET_MODE (SUBREG_REG (value
)),
5638 force_operand (SUBREG_REG (value
),
5640 GET_MODE (SUBREG_REG (value
)),
5641 SUBREG_BYTE (value
));
5647 /* Subroutine of expand_expr: return nonzero iff there is no way that
5648 EXP can reference X, which is being modified. TOP_P is nonzero if this
5649 call is going to be used to determine whether we need a temporary
5650 for EXP, as opposed to a recursive call to this function.
5652 It is always safe for this routine to return zero since it merely
5653 searches for optimization opportunities. */
5656 safe_from_p (x
, exp
, top_p
)
5663 static tree save_expr_list
;
5666 /* If EXP has varying size, we MUST use a target since we currently
5667 have no way of allocating temporaries of variable size
5668 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5669 So we assume here that something at a higher level has prevented a
5670 clash. This is somewhat bogus, but the best we can do. Only
5671 do this when X is BLKmode and when we are at the top level. */
5672 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
5673 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
5674 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
5675 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
5676 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
5678 && GET_MODE (x
) == BLKmode
)
5679 /* If X is in the outgoing argument area, it is always safe. */
5680 || (GET_CODE (x
) == MEM
5681 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
5682 || (GET_CODE (XEXP (x
, 0)) == PLUS
5683 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
5686 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5687 find the underlying pseudo. */
5688 if (GET_CODE (x
) == SUBREG
)
5691 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5695 /* A SAVE_EXPR might appear many times in the expression passed to the
5696 top-level safe_from_p call, and if it has a complex subexpression,
5697 examining it multiple times could result in a combinatorial explosion.
5698 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5699 with optimization took about 28 minutes to compile -- even though it was
5700 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5701 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5702 we have processed. Note that the only test of top_p was above. */
5711 rtn
= safe_from_p (x
, exp
, 0);
5713 for (t
= save_expr_list
; t
!= 0; t
= TREE_CHAIN (t
))
5714 TREE_PRIVATE (TREE_PURPOSE (t
)) = 0;
5719 /* Now look at our tree code and possibly recurse. */
5720 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
5723 exp_rtl
= DECL_RTL_SET_P (exp
) ? DECL_RTL (exp
) : NULL_RTX
;
5730 if (TREE_CODE (exp
) == TREE_LIST
)
5731 return ((TREE_VALUE (exp
) == 0
5732 || safe_from_p (x
, TREE_VALUE (exp
), 0))
5733 && (TREE_CHAIN (exp
) == 0
5734 || safe_from_p (x
, TREE_CHAIN (exp
), 0)));
5735 else if (TREE_CODE (exp
) == ERROR_MARK
)
5736 return 1; /* An already-visited SAVE_EXPR? */
5741 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5745 return (safe_from_p (x
, TREE_OPERAND (exp
, 0), 0)
5746 && safe_from_p (x
, TREE_OPERAND (exp
, 1), 0));
5750 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5751 the expression. If it is set, we conflict iff we are that rtx or
5752 both are in memory. Otherwise, we check all operands of the
5753 expression recursively. */
5755 switch (TREE_CODE (exp
))
5758 /* If the operand is static or we are static, we can't conflict.
5759 Likewise if we don't conflict with the operand at all. */
5760 if (staticp (TREE_OPERAND (exp
, 0))
5761 || TREE_STATIC (exp
)
5762 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
5765 /* Otherwise, the only way this can conflict is if we are taking
5766 the address of a DECL a that address if part of X, which is
5768 exp
= TREE_OPERAND (exp
, 0);
5771 if (!DECL_RTL_SET_P (exp
)
5772 || GET_CODE (DECL_RTL (exp
)) != MEM
)
5775 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
5780 if (GET_CODE (x
) == MEM
5781 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
5782 get_alias_set (exp
)))
5787 /* Assume that the call will clobber all hard registers and
5789 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5790 || GET_CODE (x
) == MEM
)
5795 /* If a sequence exists, we would have to scan every instruction
5796 in the sequence to see if it was safe. This is probably not
5798 if (RTL_EXPR_SEQUENCE (exp
))
5801 exp_rtl
= RTL_EXPR_RTL (exp
);
5804 case WITH_CLEANUP_EXPR
:
5805 exp_rtl
= WITH_CLEANUP_EXPR_RTL (exp
);
5808 case CLEANUP_POINT_EXPR
:
5809 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5812 exp_rtl
= SAVE_EXPR_RTL (exp
);
5816 /* If we've already scanned this, don't do it again. Otherwise,
5817 show we've scanned it and record for clearing the flag if we're
5819 if (TREE_PRIVATE (exp
))
5822 TREE_PRIVATE (exp
) = 1;
5823 if (! safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
5825 TREE_PRIVATE (exp
) = 0;
5829 save_expr_list
= tree_cons (exp
, NULL_TREE
, save_expr_list
);
5833 /* The only operand we look at is operand 1. The rest aren't
5834 part of the expression. */
5835 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
5837 case METHOD_CALL_EXPR
:
5838 /* This takes an rtx argument, but shouldn't appear here. */
5845 /* If we have an rtx, we do not need to scan our operands. */
5849 nops
= first_rtl_op (TREE_CODE (exp
));
5850 for (i
= 0; i
< nops
; i
++)
5851 if (TREE_OPERAND (exp
, i
) != 0
5852 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
5855 /* If this is a language-specific tree code, it may require
5856 special handling. */
5857 if ((unsigned int) TREE_CODE (exp
)
5858 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5860 && !(*lang_safe_from_p
) (x
, exp
))
5864 /* If we have an rtl, find any enclosed object. Then see if we conflict
5868 if (GET_CODE (exp_rtl
) == SUBREG
)
5870 exp_rtl
= SUBREG_REG (exp_rtl
);
5871 if (GET_CODE (exp_rtl
) == REG
5872 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
5876 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5877 are memory and they conflict. */
5878 return ! (rtx_equal_p (x
, exp_rtl
)
5879 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
5880 && true_dependence (exp_rtl
, GET_MODE (x
), x
,
5881 rtx_addr_varies_p
)));
5884 /* If we reach here, it is safe. */
5888 /* Subroutine of expand_expr: return rtx if EXP is a
5889 variable or parameter; else return 0. */
5896 switch (TREE_CODE (exp
))
5900 return DECL_RTL (exp
);
5906 #ifdef MAX_INTEGER_COMPUTATION_MODE
5909 check_max_integer_computation_mode (exp
)
5912 enum tree_code code
;
5913 enum machine_mode mode
;
5915 /* Strip any NOPs that don't change the mode. */
5917 code
= TREE_CODE (exp
);
5919 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5920 if (code
== NOP_EXPR
5921 && TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
5924 /* First check the type of the overall operation. We need only look at
5925 unary, binary and relational operations. */
5926 if (TREE_CODE_CLASS (code
) == '1'
5927 || TREE_CODE_CLASS (code
) == '2'
5928 || TREE_CODE_CLASS (code
) == '<')
5930 mode
= TYPE_MODE (TREE_TYPE (exp
));
5931 if (GET_MODE_CLASS (mode
) == MODE_INT
5932 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5933 internal_error ("unsupported wide integer operation");
5936 /* Check operand of a unary op. */
5937 if (TREE_CODE_CLASS (code
) == '1')
5939 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5940 if (GET_MODE_CLASS (mode
) == MODE_INT
5941 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5942 internal_error ("unsupported wide integer operation");
5945 /* Check operands of a binary/comparison op. */
5946 if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<')
5948 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5949 if (GET_MODE_CLASS (mode
) == MODE_INT
5950 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5951 internal_error ("unsupported wide integer operation");
5953 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
5954 if (GET_MODE_CLASS (mode
) == MODE_INT
5955 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5956 internal_error ("unsupported wide integer operation");
5961 /* Return the highest power of two that EXP is known to be a multiple of.
5962 This is used in updating alignment of MEMs in array references. */
5964 static HOST_WIDE_INT
5965 highest_pow2_factor (exp
)
5968 HOST_WIDE_INT c0
, c1
;
5970 switch (TREE_CODE (exp
))
5973 /* If the integer is expressable in a HOST_WIDE_INT, we can find the
5974 lowest bit that's a one. If the result is zero, pessimize by
5975 returning 1. This is overly-conservative, but such things should not
5976 happen in the offset expressions that we are called with. */
5977 if (host_integerp (exp
, 0))
5979 c0
= tree_low_cst (exp
, 0);
5980 c0
= c0
< 0 ? - c0
: c0
;
5981 return c0
!= 0 ? c0
& -c0
: 1;
5985 case PLUS_EXPR
: case MINUS_EXPR
:
5986 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
5987 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
5988 return MIN (c0
, c1
);
5991 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
5992 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
5995 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5997 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
5998 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
5999 return MAX (1, c0
/ c1
);
6001 case NON_LVALUE_EXPR
: case NOP_EXPR
: case CONVERT_EXPR
:
6002 case COMPOUND_EXPR
: case SAVE_EXPR
: case WITH_RECORD_EXPR
:
6003 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
6006 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6007 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
6008 return MIN (c0
, c1
);
6017 /* Return an object on the placeholder list that matches EXP, a
6018 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6019 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6020 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6021 is a location which initially points to a starting location in the
6022 placeholder list (zero means start of the list) and where a pointer into
6023 the placeholder list at which the object is found is placed. */
6026 find_placeholder (exp
, plist
)
6030 tree type
= TREE_TYPE (exp
);
6031 tree placeholder_expr
;
6033 for (placeholder_expr
6034 = plist
&& *plist
? TREE_CHAIN (*plist
) : placeholder_list
;
6035 placeholder_expr
!= 0;
6036 placeholder_expr
= TREE_CHAIN (placeholder_expr
))
6038 tree need_type
= TYPE_MAIN_VARIANT (type
);
6041 /* Find the outermost reference that is of the type we want. If none,
6042 see if any object has a type that is a pointer to the type we
6044 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6045 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
6046 || TREE_CODE (elt
) == COND_EXPR
)
6047 ? TREE_OPERAND (elt
, 1)
6048 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6049 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6050 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6051 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6052 ? TREE_OPERAND (elt
, 0) : 0))
6053 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
6056 *plist
= placeholder_expr
;
6060 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6062 = ((TREE_CODE (elt
) == COMPOUND_EXPR
6063 || TREE_CODE (elt
) == COND_EXPR
)
6064 ? TREE_OPERAND (elt
, 1)
6065 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6066 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6067 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6068 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6069 ? TREE_OPERAND (elt
, 0) : 0))
6070 if (POINTER_TYPE_P (TREE_TYPE (elt
))
6071 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
6075 *plist
= placeholder_expr
;
6076 return build1 (INDIRECT_REF
, need_type
, elt
);
6083 /* expand_expr: generate code for computing expression EXP.
6084 An rtx for the computed value is returned. The value is never null.
6085 In the case of a void EXP, const0_rtx is returned.
6087 The value may be stored in TARGET if TARGET is nonzero.
6088 TARGET is just a suggestion; callers must assume that
6089 the rtx returned may not be the same as TARGET.
6091 If TARGET is CONST0_RTX, it means that the value will be ignored.
6093 If TMODE is not VOIDmode, it suggests generating the
6094 result in mode TMODE. But this is done only when convenient.
6095 Otherwise, TMODE is ignored and the value generated in its natural mode.
6096 TMODE is just a suggestion; callers must assume that
6097 the rtx returned may not have mode TMODE.
6099 Note that TARGET may have neither TMODE nor MODE. In that case, it
6100 probably will not be used.
6102 If MODIFIER is EXPAND_SUM then when EXP is an addition
6103 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6104 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6105 products as above, or REG or MEM, or constant.
6106 Ordinarily in such cases we would output mul or add instructions
6107 and then return a pseudo reg containing the sum.
6109 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6110 it also marks a label as absolutely required (it can't be dead).
6111 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6112 This is used for outputting expressions used in initializers.
6114 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6115 with a constant address even if that address is not normally legitimate.
6116 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6119 expand_expr (exp
, target
, tmode
, modifier
)
6122 enum machine_mode tmode
;
6123 enum expand_modifier modifier
;
6126 tree type
= TREE_TYPE (exp
);
6127 int unsignedp
= TREE_UNSIGNED (type
);
6128 enum machine_mode mode
;
6129 enum tree_code code
= TREE_CODE (exp
);
6131 rtx subtarget
, original_target
;
6134 /* Used by check-memory-usage to make modifier read only. */
6135 enum expand_modifier ro_modifier
;
6137 /* Handle ERROR_MARK before anybody tries to access its type. */
6138 if (TREE_CODE (exp
) == ERROR_MARK
|| TREE_CODE (type
) == ERROR_MARK
)
6140 op0
= CONST0_RTX (tmode
);
6146 mode
= TYPE_MODE (type
);
6147 /* Use subtarget as the target for operand 0 of a binary operation. */
6148 subtarget
= get_subtarget (target
);
6149 original_target
= target
;
6150 ignore
= (target
== const0_rtx
6151 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
6152 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
6153 || code
== COND_EXPR
)
6154 && TREE_CODE (type
) == VOID_TYPE
));
6156 /* Make a read-only version of the modifier. */
6157 if (modifier
== EXPAND_NORMAL
|| modifier
== EXPAND_SUM
6158 || modifier
== EXPAND_CONST_ADDRESS
|| modifier
== EXPAND_INITIALIZER
)
6159 ro_modifier
= modifier
;
6161 ro_modifier
= EXPAND_NORMAL
;
6163 /* If we are going to ignore this result, we need only do something
6164 if there is a side-effect somewhere in the expression. If there
6165 is, short-circuit the most common cases here. Note that we must
6166 not call expand_expr with anything but const0_rtx in case this
6167 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6171 if (! TREE_SIDE_EFFECTS (exp
))
6174 /* Ensure we reference a volatile object even if value is ignored, but
6175 don't do this if all we are doing is taking its address. */
6176 if (TREE_THIS_VOLATILE (exp
)
6177 && TREE_CODE (exp
) != FUNCTION_DECL
6178 && mode
!= VOIDmode
&& mode
!= BLKmode
6179 && modifier
!= EXPAND_CONST_ADDRESS
)
6181 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, ro_modifier
);
6182 if (GET_CODE (temp
) == MEM
)
6183 temp
= copy_to_reg (temp
);
6187 if (TREE_CODE_CLASS (code
) == '1' || code
== COMPONENT_REF
6188 || code
== INDIRECT_REF
|| code
== BUFFER_REF
)
6189 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
6190 VOIDmode
, ro_modifier
);
6191 else if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<'
6192 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
6194 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6196 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
,
6200 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6201 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
6202 /* If the second operand has no side effects, just evaluate
6204 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
6205 VOIDmode
, ro_modifier
);
6206 else if (code
== BIT_FIELD_REF
)
6208 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6210 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
,
6212 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
,
6220 #ifdef MAX_INTEGER_COMPUTATION_MODE
6221 /* Only check stuff here if the mode we want is different from the mode
6222 of the expression; if it's the same, check_max_integer_computiation_mode
6223 will handle it. Do we really need to check this stuff at all? */
6226 && GET_MODE (target
) != mode
6227 && TREE_CODE (exp
) != INTEGER_CST
6228 && TREE_CODE (exp
) != PARM_DECL
6229 && TREE_CODE (exp
) != ARRAY_REF
6230 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6231 && TREE_CODE (exp
) != COMPONENT_REF
6232 && TREE_CODE (exp
) != BIT_FIELD_REF
6233 && TREE_CODE (exp
) != INDIRECT_REF
6234 && TREE_CODE (exp
) != CALL_EXPR
6235 && TREE_CODE (exp
) != VAR_DECL
6236 && TREE_CODE (exp
) != RTL_EXPR
)
6238 enum machine_mode mode
= GET_MODE (target
);
6240 if (GET_MODE_CLASS (mode
) == MODE_INT
6241 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6242 internal_error ("unsupported wide integer operation");
6246 && TREE_CODE (exp
) != INTEGER_CST
6247 && TREE_CODE (exp
) != PARM_DECL
6248 && TREE_CODE (exp
) != ARRAY_REF
6249 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6250 && TREE_CODE (exp
) != COMPONENT_REF
6251 && TREE_CODE (exp
) != BIT_FIELD_REF
6252 && TREE_CODE (exp
) != INDIRECT_REF
6253 && TREE_CODE (exp
) != VAR_DECL
6254 && TREE_CODE (exp
) != CALL_EXPR
6255 && TREE_CODE (exp
) != RTL_EXPR
6256 && GET_MODE_CLASS (tmode
) == MODE_INT
6257 && tmode
> MAX_INTEGER_COMPUTATION_MODE
)
6258 internal_error ("unsupported wide integer operation");
6260 check_max_integer_computation_mode (exp
);
6263 /* If will do cse, generate all results into pseudo registers
6264 since 1) that allows cse to find more things
6265 and 2) otherwise cse could produce an insn the machine
6268 if (! cse_not_expected
&& mode
!= BLKmode
&& target
6269 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
))
6276 tree function
= decl_function_context (exp
);
6277 /* Handle using a label in a containing function. */
6278 if (function
!= current_function_decl
6279 && function
!= inline_function_decl
&& function
!= 0)
6281 struct function
*p
= find_function_data (function
);
6282 p
->expr
->x_forced_labels
6283 = gen_rtx_EXPR_LIST (VOIDmode
, label_rtx (exp
),
6284 p
->expr
->x_forced_labels
);
6288 if (modifier
== EXPAND_INITIALIZER
)
6289 forced_labels
= gen_rtx_EXPR_LIST (VOIDmode
,
6294 temp
= gen_rtx_MEM (FUNCTION_MODE
,
6295 gen_rtx_LABEL_REF (Pmode
, label_rtx (exp
)));
6296 if (function
!= current_function_decl
6297 && function
!= inline_function_decl
&& function
!= 0)
6298 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
6303 if (DECL_RTL (exp
) == 0)
6305 error_with_decl (exp
, "prior parameter's size depends on `%s'");
6306 return CONST0_RTX (mode
);
6309 /* ... fall through ... */
6312 /* If a static var's type was incomplete when the decl was written,
6313 but the type is complete now, lay out the decl now. */
6314 if (DECL_SIZE (exp
) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
6315 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6317 rtx value
= DECL_RTL_IF_SET (exp
);
6319 layout_decl (exp
, 0);
6321 /* If the RTL was already set, update its mode and memory
6325 PUT_MODE (value
, DECL_MODE (exp
));
6326 SET_DECL_RTL (exp
, 0);
6327 set_mem_attributes (value
, exp
, 1);
6328 SET_DECL_RTL (exp
, value
);
6332 /* Although static-storage variables start off initialized, according to
6333 ANSI C, a memcpy could overwrite them with uninitialized values. So
6334 we check them too. This also lets us check for read-only variables
6335 accessed via a non-const declaration, in case it won't be detected
6336 any other way (e.g., in an embedded system or OS kernel without
6339 Aggregates are not checked here; they're handled elsewhere. */
6340 if (cfun
&& current_function_check_memory_usage
6342 && GET_CODE (DECL_RTL (exp
)) == MEM
6343 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
6345 enum memory_use_mode memory_usage
;
6346 memory_usage
= get_memory_usage_from_modifier (modifier
);
6348 in_check_memory_usage
= 1;
6349 if (memory_usage
!= MEMORY_USE_DONT
)
6350 emit_library_call (chkr_check_addr_libfunc
,
6351 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3,
6352 XEXP (DECL_RTL (exp
), 0), Pmode
,
6353 GEN_INT (int_size_in_bytes (type
)),
6354 TYPE_MODE (sizetype
),
6355 GEN_INT (memory_usage
),
6356 TYPE_MODE (integer_type_node
));
6357 in_check_memory_usage
= 0;
6360 /* ... fall through ... */
6364 if (DECL_RTL (exp
) == 0)
6367 /* Ensure variable marked as used even if it doesn't go through
6368 a parser. If it hasn't be used yet, write out an external
6370 if (! TREE_USED (exp
))
6372 assemble_external (exp
);
6373 TREE_USED (exp
) = 1;
6376 /* Show we haven't gotten RTL for this yet. */
6379 /* Handle variables inherited from containing functions. */
6380 context
= decl_function_context (exp
);
6382 /* We treat inline_function_decl as an alias for the current function
6383 because that is the inline function whose vars, types, etc.
6384 are being merged into the current function.
6385 See expand_inline_function. */
6387 if (context
!= 0 && context
!= current_function_decl
6388 && context
!= inline_function_decl
6389 /* If var is static, we don't need a static chain to access it. */
6390 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
6391 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
6395 /* Mark as non-local and addressable. */
6396 DECL_NONLOCAL (exp
) = 1;
6397 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
6399 mark_addressable (exp
);
6400 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
6402 addr
= XEXP (DECL_RTL (exp
), 0);
6403 if (GET_CODE (addr
) == MEM
)
6405 = replace_equiv_address (addr
,
6406 fix_lexical_addr (XEXP (addr
, 0), exp
));
6408 addr
= fix_lexical_addr (addr
, exp
);
6410 temp
= replace_equiv_address (DECL_RTL (exp
), addr
);
6413 /* This is the case of an array whose size is to be determined
6414 from its initializer, while the initializer is still being parsed.
6417 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6418 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
6419 temp
= validize_mem (DECL_RTL (exp
));
6421 /* If DECL_RTL is memory, we are in the normal case and either
6422 the address is not valid or it is not a register and -fforce-addr
6423 is specified, get the address into a register. */
6425 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6426 && modifier
!= EXPAND_CONST_ADDRESS
6427 && modifier
!= EXPAND_SUM
6428 && modifier
!= EXPAND_INITIALIZER
6429 && (! memory_address_p (DECL_MODE (exp
),
6430 XEXP (DECL_RTL (exp
), 0))
6432 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
6433 temp
= replace_equiv_address (DECL_RTL (exp
),
6434 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
6436 /* If we got something, return it. But first, set the alignment
6437 if the address is a register. */
6440 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
6441 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6446 /* If the mode of DECL_RTL does not match that of the decl, it
6447 must be a promoted value. We return a SUBREG of the wanted mode,
6448 but mark it so that we know that it was already extended. */
6450 if (GET_CODE (DECL_RTL (exp
)) == REG
6451 && GET_MODE (DECL_RTL (exp
)) != mode
)
6453 /* Get the signedness used for this variable. Ensure we get the
6454 same mode we got when the variable was declared. */
6455 if (GET_MODE (DECL_RTL (exp
))
6456 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
, 0))
6459 temp
= gen_lowpart_SUBREG (mode
, DECL_RTL (exp
));
6460 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6461 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
6465 return DECL_RTL (exp
);
6468 return immed_double_const (TREE_INT_CST_LOW (exp
),
6469 TREE_INT_CST_HIGH (exp
), mode
);
6472 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
,
6473 EXPAND_MEMORY_USE_BAD
);
6476 /* If optimized, generate immediate CONST_DOUBLE
6477 which will be turned into memory by reload if necessary.
6479 We used to force a register so that loop.c could see it. But
6480 this does not allow gen_* patterns to perform optimizations with
6481 the constants. It also produces two insns in cases like "x = 1.0;".
6482 On most machines, floating-point constants are not permitted in
6483 many insns, so we'd end up copying it to a register in any case.
6485 Now, we do the copying in expand_binop, if appropriate. */
6486 return immed_real_const (exp
);
6490 if (! TREE_CST_RTL (exp
))
6491 output_constant_def (exp
, 1);
6493 /* TREE_CST_RTL probably contains a constant address.
6494 On RISC machines where a constant address isn't valid,
6495 make some insns to get that address into a register. */
6496 if (GET_CODE (TREE_CST_RTL (exp
)) == MEM
6497 && modifier
!= EXPAND_CONST_ADDRESS
6498 && modifier
!= EXPAND_INITIALIZER
6499 && modifier
!= EXPAND_SUM
6500 && (! memory_address_p (mode
, XEXP (TREE_CST_RTL (exp
), 0))
6502 && GET_CODE (XEXP (TREE_CST_RTL (exp
), 0)) != REG
)))
6503 return replace_equiv_address (TREE_CST_RTL (exp
),
6504 copy_rtx (XEXP (TREE_CST_RTL (exp
), 0)));
6505 return TREE_CST_RTL (exp
);
6507 case EXPR_WITH_FILE_LOCATION
:
6510 const char *saved_input_filename
= input_filename
;
6511 int saved_lineno
= lineno
;
6512 input_filename
= EXPR_WFL_FILENAME (exp
);
6513 lineno
= EXPR_WFL_LINENO (exp
);
6514 if (EXPR_WFL_EMIT_LINE_NOTE (exp
))
6515 emit_line_note (input_filename
, lineno
);
6516 /* Possibly avoid switching back and forth here. */
6517 to_return
= expand_expr (EXPR_WFL_NODE (exp
), target
, tmode
, modifier
);
6518 input_filename
= saved_input_filename
;
6519 lineno
= saved_lineno
;
6524 context
= decl_function_context (exp
);
6526 /* If this SAVE_EXPR was at global context, assume we are an
6527 initialization function and move it into our context. */
6529 SAVE_EXPR_CONTEXT (exp
) = current_function_decl
;
6531 /* We treat inline_function_decl as an alias for the current function
6532 because that is the inline function whose vars, types, etc.
6533 are being merged into the current function.
6534 See expand_inline_function. */
6535 if (context
== current_function_decl
|| context
== inline_function_decl
)
6538 /* If this is non-local, handle it. */
6541 /* The following call just exists to abort if the context is
6542 not of a containing function. */
6543 find_function_data (context
);
6545 temp
= SAVE_EXPR_RTL (exp
);
6546 if (temp
&& GET_CODE (temp
) == REG
)
6548 put_var_into_stack (exp
);
6549 temp
= SAVE_EXPR_RTL (exp
);
6551 if (temp
== 0 || GET_CODE (temp
) != MEM
)
6554 replace_equiv_address (temp
,
6555 fix_lexical_addr (XEXP (temp
, 0), exp
));
6557 if (SAVE_EXPR_RTL (exp
) == 0)
6559 if (mode
== VOIDmode
)
6562 temp
= assign_temp (build_qualified_type (type
,
6564 | TYPE_QUAL_CONST
)),
6567 SAVE_EXPR_RTL (exp
) = temp
;
6568 if (!optimize
&& GET_CODE (temp
) == REG
)
6569 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, temp
,
6572 /* If the mode of TEMP does not match that of the expression, it
6573 must be a promoted value. We pass store_expr a SUBREG of the
6574 wanted mode but mark it so that we know that it was already
6575 extended. Note that `unsignedp' was modified above in
6578 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
6580 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6581 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6582 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
6585 if (temp
== const0_rtx
)
6586 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6587 EXPAND_MEMORY_USE_BAD
);
6589 store_expr (TREE_OPERAND (exp
, 0), temp
, 0);
6591 TREE_USED (exp
) = 1;
6594 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6595 must be a promoted value. We return a SUBREG of the wanted mode,
6596 but mark it so that we know that it was already extended. */
6598 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
6599 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
6601 /* Compute the signedness and make the proper SUBREG. */
6602 promote_mode (type
, mode
, &unsignedp
, 0);
6603 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6604 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6605 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
6609 return SAVE_EXPR_RTL (exp
);
6614 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6615 TREE_OPERAND (exp
, 0) = unsave_expr_now (TREE_OPERAND (exp
, 0));
6619 case PLACEHOLDER_EXPR
:
6621 tree old_list
= placeholder_list
;
6622 tree placeholder_expr
= 0;
6624 exp
= find_placeholder (exp
, &placeholder_expr
);
6628 placeholder_list
= TREE_CHAIN (placeholder_expr
);
6629 temp
= expand_expr (exp
, original_target
, tmode
, ro_modifier
);
6630 placeholder_list
= old_list
;
6634 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6637 case WITH_RECORD_EXPR
:
6638 /* Put the object on the placeholder list, expand our first operand,
6639 and pop the list. */
6640 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
6642 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
6643 tmode
, ro_modifier
);
6644 placeholder_list
= TREE_CHAIN (placeholder_list
);
6648 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6649 expand_goto (TREE_OPERAND (exp
, 0));
6651 expand_computed_goto (TREE_OPERAND (exp
, 0));
6655 expand_exit_loop_if_false (NULL
,
6656 invert_truthvalue (TREE_OPERAND (exp
, 0)));
6659 case LABELED_BLOCK_EXPR
:
6660 if (LABELED_BLOCK_BODY (exp
))
6661 expand_expr_stmt (LABELED_BLOCK_BODY (exp
));
6662 /* Should perhaps use expand_label, but this is simpler and safer. */
6663 do_pending_stack_adjust ();
6664 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp
)));
6667 case EXIT_BLOCK_EXPR
:
6668 if (EXIT_BLOCK_RETURN (exp
))
6669 sorry ("returned value in block_exit_expr");
6670 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp
)));
6675 expand_start_loop (1);
6676 expand_expr_stmt (TREE_OPERAND (exp
, 0));
6684 tree vars
= TREE_OPERAND (exp
, 0);
6685 int vars_need_expansion
= 0;
6687 /* Need to open a binding contour here because
6688 if there are any cleanups they must be contained here. */
6689 expand_start_bindings (2);
6691 /* Mark the corresponding BLOCK for output in its proper place. */
6692 if (TREE_OPERAND (exp
, 2) != 0
6693 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
6694 insert_block (TREE_OPERAND (exp
, 2));
6696 /* If VARS have not yet been expanded, expand them now. */
6699 if (!DECL_RTL_SET_P (vars
))
6701 vars_need_expansion
= 1;
6704 expand_decl_init (vars
);
6705 vars
= TREE_CHAIN (vars
);
6708 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, ro_modifier
);
6710 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
6716 if (RTL_EXPR_SEQUENCE (exp
))
6718 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
6720 emit_insns (RTL_EXPR_SEQUENCE (exp
));
6721 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
6723 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
6724 free_temps_for_rtl_expr (exp
);
6725 return RTL_EXPR_RTL (exp
);
6728 /* If we don't need the result, just ensure we evaluate any
6733 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
6734 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
,
6735 EXPAND_MEMORY_USE_BAD
);
6739 /* All elts simple constants => refer to a constant in memory. But
6740 if this is a non-BLKmode mode, let it store a field at a time
6741 since that should make a CONST_INT or CONST_DOUBLE when we
6742 fold. Likewise, if we have a target we can use, it is best to
6743 store directly into the target unless the type is large enough
6744 that memcpy will be used. If we are making an initializer and
6745 all operands are constant, put it in memory as well. */
6746 else if ((TREE_STATIC (exp
)
6747 && ((mode
== BLKmode
6748 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
6749 || TREE_ADDRESSABLE (exp
)
6750 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
6751 && (! MOVE_BY_PIECES_P
6752 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
6754 && ! mostly_zeros_p (exp
))))
6755 || (modifier
== EXPAND_INITIALIZER
&& TREE_CONSTANT (exp
)))
6757 rtx constructor
= output_constant_def (exp
, 1);
6759 if (modifier
!= EXPAND_CONST_ADDRESS
6760 && modifier
!= EXPAND_INITIALIZER
6761 && modifier
!= EXPAND_SUM
)
6762 constructor
= validize_mem (constructor
);
6768 /* Handle calls that pass values in multiple non-contiguous
6769 locations. The Irix 6 ABI has examples of this. */
6770 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
6771 || GET_CODE (target
) == PARALLEL
)
6773 = assign_temp (build_qualified_type (type
,
6775 | (TREE_READONLY (exp
)
6776 * TYPE_QUAL_CONST
))),
6777 TREE_ADDRESSABLE (exp
), 1, 1);
6779 store_constructor (exp
, target
, 0,
6780 int_size_in_bytes (TREE_TYPE (exp
)));
6786 tree exp1
= TREE_OPERAND (exp
, 0);
6788 tree string
= string_constant (exp1
, &index
);
6790 /* Try to optimize reads from const strings. */
6792 && TREE_CODE (string
) == STRING_CST
6793 && TREE_CODE (index
) == INTEGER_CST
6794 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
6795 && GET_MODE_CLASS (mode
) == MODE_INT
6796 && GET_MODE_SIZE (mode
) == 1
6797 && modifier
!= EXPAND_MEMORY_USE_WO
)
6799 GEN_INT (TREE_STRING_POINTER (string
)[TREE_INT_CST_LOW (index
)]);
6801 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
6802 op0
= memory_address (mode
, op0
);
6804 if (cfun
&& current_function_check_memory_usage
6805 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
6807 enum memory_use_mode memory_usage
;
6808 memory_usage
= get_memory_usage_from_modifier (modifier
);
6810 if (memory_usage
!= MEMORY_USE_DONT
)
6812 in_check_memory_usage
= 1;
6813 emit_library_call (chkr_check_addr_libfunc
,
6814 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, op0
,
6815 Pmode
, GEN_INT (int_size_in_bytes (type
)),
6816 TYPE_MODE (sizetype
),
6817 GEN_INT (memory_usage
),
6818 TYPE_MODE (integer_type_node
));
6819 in_check_memory_usage
= 0;
6823 temp
= gen_rtx_MEM (mode
, op0
);
6824 set_mem_attributes (temp
, exp
, 0);
6826 /* If we are writing to this object and its type is a record with
6827 readonly fields, we must mark it as readonly so it will
6828 conflict with readonly references to those fields. */
6829 if (modifier
== EXPAND_MEMORY_USE_WO
&& readonly_fields_p (type
))
6830 RTX_UNCHANGING_P (temp
) = 1;
6836 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
6840 tree array
= TREE_OPERAND (exp
, 0);
6841 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
6842 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
6843 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
6846 /* Optimize the special-case of a zero lower bound.
6848 We convert the low_bound to sizetype to avoid some problems
6849 with constant folding. (E.g. suppose the lower bound is 1,
6850 and its mode is QI. Without the conversion, (ARRAY
6851 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6852 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6854 if (! integer_zerop (low_bound
))
6855 index
= size_diffop (index
, convert (sizetype
, low_bound
));
6857 /* Fold an expression like: "foo"[2].
6858 This is not done in fold so it won't happen inside &.
6859 Don't fold if this is for wide characters since it's too
6860 difficult to do correctly and this is a very rare case. */
6862 if (modifier
!= EXPAND_CONST_ADDRESS
&& modifier
!= EXPAND_INITIALIZER
6863 && TREE_CODE (array
) == STRING_CST
6864 && TREE_CODE (index
) == INTEGER_CST
6865 && compare_tree_int (index
, TREE_STRING_LENGTH (array
)) < 0
6866 && GET_MODE_CLASS (mode
) == MODE_INT
6867 && GET_MODE_SIZE (mode
) == 1)
6869 GEN_INT (TREE_STRING_POINTER (array
)[TREE_INT_CST_LOW (index
)]);
6871 /* If this is a constant index into a constant array,
6872 just get the value from the array. Handle both the cases when
6873 we have an explicit constructor and when our operand is a variable
6874 that was declared const. */
6876 if (modifier
!= EXPAND_CONST_ADDRESS
&& modifier
!= EXPAND_INITIALIZER
6877 && TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
)
6878 && TREE_CODE (index
) == INTEGER_CST
6879 && 0 > compare_tree_int (index
,
6880 list_length (CONSTRUCTOR_ELTS
6881 (TREE_OPERAND (exp
, 0)))))
6885 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
6886 i
= TREE_INT_CST_LOW (index
);
6887 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
6891 return expand_expr (fold (TREE_VALUE (elem
)), target
,
6892 tmode
, ro_modifier
);
6895 else if (optimize
>= 1
6896 && modifier
!= EXPAND_CONST_ADDRESS
6897 && modifier
!= EXPAND_INITIALIZER
6898 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
6899 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
6900 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
6902 if (TREE_CODE (index
) == INTEGER_CST
)
6904 tree init
= DECL_INITIAL (array
);
6906 if (TREE_CODE (init
) == CONSTRUCTOR
)
6910 for (elem
= CONSTRUCTOR_ELTS (init
);
6912 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
6913 elem
= TREE_CHAIN (elem
))
6916 if (elem
&& !TREE_SIDE_EFFECTS (TREE_VALUE (elem
)))
6917 return expand_expr (fold (TREE_VALUE (elem
)), target
,
6918 tmode
, ro_modifier
);
6920 else if (TREE_CODE (init
) == STRING_CST
6921 && 0 > compare_tree_int (index
,
6922 TREE_STRING_LENGTH (init
)))
6924 tree type
= TREE_TYPE (TREE_TYPE (init
));
6925 enum machine_mode mode
= TYPE_MODE (type
);
6927 if (GET_MODE_CLASS (mode
) == MODE_INT
6928 && GET_MODE_SIZE (mode
) == 1)
6930 (TREE_STRING_POINTER
6931 (init
)[TREE_INT_CST_LOW (index
)]));
6940 case ARRAY_RANGE_REF
:
6941 /* If the operand is a CONSTRUCTOR, we can just extract the
6942 appropriate field if it is present. Don't do this if we have
6943 already written the data since we want to refer to that copy
6944 and varasm.c assumes that's what we'll do. */
6945 if (code
== COMPONENT_REF
6946 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
6947 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
6951 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
6952 elt
= TREE_CHAIN (elt
))
6953 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
6954 /* We can normally use the value of the field in the
6955 CONSTRUCTOR. However, if this is a bitfield in
6956 an integral mode that we can fit in a HOST_WIDE_INT,
6957 we must mask only the number of bits in the bitfield,
6958 since this is done implicitly by the constructor. If
6959 the bitfield does not meet either of those conditions,
6960 we can't do this optimization. */
6961 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
6962 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
6964 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
6965 <= HOST_BITS_PER_WIDE_INT
))))
6967 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
6968 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
6970 HOST_WIDE_INT bitsize
6971 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt
)));
6973 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
6975 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
6976 op0
= expand_and (op0
, op1
, target
);
6980 enum machine_mode imode
6981 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
6983 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
6986 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
6988 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
6998 enum machine_mode mode1
;
6999 HOST_WIDE_INT bitsize
, bitpos
;
7002 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7003 &mode1
, &unsignedp
, &volatilep
);
7006 /* If we got back the original object, something is wrong. Perhaps
7007 we are evaluating an expression too early. In any event, don't
7008 infinitely recurse. */
7012 /* If TEM's type is a union of variable size, pass TARGET to the inner
7013 computation, since it will need a temporary and TARGET is known
7014 to have to do. This occurs in unchecked conversion in Ada. */
7018 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
7019 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
7021 ? target
: NULL_RTX
),
7023 (modifier
== EXPAND_INITIALIZER
7024 || modifier
== EXPAND_CONST_ADDRESS
)
7025 ? modifier
: EXPAND_NORMAL
);
7027 /* If this is a constant, put it into a register if it is a
7028 legitimate constant and OFFSET is 0 and memory if it isn't. */
7029 if (CONSTANT_P (op0
))
7031 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
7032 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
7034 op0
= force_reg (mode
, op0
);
7036 op0
= validize_mem (force_const_mem (mode
, op0
));
7041 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
7043 /* If this object is in a register, put it into memory.
7044 This case can't occur in C, but can in Ada if we have
7045 unchecked conversion of an expression from a scalar type to
7046 an array or record type. */
7047 if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7048 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
7050 /* If the operand is a SAVE_EXPR, we can deal with this by
7051 forcing the SAVE_EXPR into memory. */
7052 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
7054 put_var_into_stack (TREE_OPERAND (exp
, 0));
7055 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
7060 = build_qualified_type (TREE_TYPE (tem
),
7061 (TYPE_QUALS (TREE_TYPE (tem
))
7062 | TYPE_QUAL_CONST
));
7063 rtx memloc
= assign_temp (nt
, 1, 1, 1);
7065 emit_move_insn (memloc
, op0
);
7070 if (GET_CODE (op0
) != MEM
)
7073 if (GET_MODE (offset_rtx
) != ptr_mode
)
7074 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
7076 #ifdef POINTERS_EXTEND_UNSIGNED
7077 if (GET_MODE (offset_rtx
) != Pmode
)
7078 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
7081 /* A constant address in OP0 can have VOIDmode, we must not try
7082 to call force_reg for that case. Avoid that case. */
7083 if (GET_CODE (op0
) == MEM
7084 && GET_MODE (op0
) == BLKmode
7085 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
7087 && (bitpos
% bitsize
) == 0
7088 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
7089 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
7091 rtx temp
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7093 if (GET_CODE (XEXP (temp
, 0)) == REG
)
7096 op0
= (replace_equiv_address
7098 force_reg (GET_MODE (XEXP (temp
, 0)),
7103 op0
= offset_address (op0
, offset_rtx
,
7104 highest_pow2_factor (offset
));
7107 /* Don't forget about volatility even if this is a bitfield. */
7108 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
7110 if (op0
== orig_op0
)
7111 op0
= copy_rtx (op0
);
7113 MEM_VOLATILE_P (op0
) = 1;
7116 /* Check the access. */
7117 if (cfun
!= 0 && current_function_check_memory_usage
7118 && GET_CODE (op0
) == MEM
)
7120 enum memory_use_mode memory_usage
;
7121 memory_usage
= get_memory_usage_from_modifier (modifier
);
7123 if (memory_usage
!= MEMORY_USE_DONT
)
7128 to
= plus_constant (XEXP (op0
, 0), (bitpos
/ BITS_PER_UNIT
));
7129 size
= (bitpos
% BITS_PER_UNIT
) + bitsize
+ BITS_PER_UNIT
- 1;
7131 /* Check the access right of the pointer. */
7132 in_check_memory_usage
= 1;
7133 if (size
> BITS_PER_UNIT
)
7134 emit_library_call (chkr_check_addr_libfunc
,
7135 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, to
,
7136 Pmode
, GEN_INT (size
/ BITS_PER_UNIT
),
7137 TYPE_MODE (sizetype
),
7138 GEN_INT (memory_usage
),
7139 TYPE_MODE (integer_type_node
));
7140 in_check_memory_usage
= 0;
7144 /* In cases where an aligned union has an unaligned object
7145 as a field, we might be extracting a BLKmode value from
7146 an integer-mode (e.g., SImode) object. Handle this case
7147 by doing the extract into an object as wide as the field
7148 (which we know to be the width of a basic mode), then
7149 storing into memory, and changing the mode to BLKmode. */
7150 if (mode1
== VOIDmode
7151 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7152 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
7153 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7154 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
7155 && modifier
!= EXPAND_CONST_ADDRESS
7156 && modifier
!= EXPAND_INITIALIZER
)
7157 /* If the field isn't aligned enough to fetch as a memref,
7158 fetch it as a bit field. */
7159 || (mode1
!= BLKmode
7160 && SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))
7161 && ((TYPE_ALIGN (TREE_TYPE (tem
))
7162 < GET_MODE_ALIGNMENT (mode
))
7163 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)))
7164 /* If the type and the field are a constant size and the
7165 size of the type isn't the same size as the bitfield,
7166 we must use bitfield operations. */
7168 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
)))
7170 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7173 enum machine_mode ext_mode
= mode
;
7175 if (ext_mode
== BLKmode
7176 && ! (target
!= 0 && GET_CODE (op0
) == MEM
7177 && GET_CODE (target
) == MEM
7178 && bitpos
% BITS_PER_UNIT
== 0))
7179 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7181 if (ext_mode
== BLKmode
)
7183 /* In this case, BITPOS must start at a byte boundary and
7184 TARGET, if specified, must be a MEM. */
7185 if (GET_CODE (op0
) != MEM
7186 || (target
!= 0 && GET_CODE (target
) != MEM
)
7187 || bitpos
% BITS_PER_UNIT
!= 0)
7190 op0
= adjust_address (op0
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
7192 target
= assign_temp (type
, 0, 1, 1);
7194 emit_block_move (target
, op0
,
7195 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7201 op0
= validize_mem (op0
);
7203 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
7204 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7206 op0
= extract_bit_field (op0
, bitsize
, bitpos
,
7207 unsignedp
, target
, ext_mode
, ext_mode
,
7208 int_size_in_bytes (TREE_TYPE (tem
)));
7210 /* If the result is a record type and BITSIZE is narrower than
7211 the mode of OP0, an integral mode, and this is a big endian
7212 machine, we must put the field into the high-order bits. */
7213 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7214 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7215 && bitsize
< GET_MODE_BITSIZE (GET_MODE (op0
)))
7216 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7217 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7221 if (mode
== BLKmode
)
7223 rtx
new = assign_temp (build_qualified_type
7224 (type_for_mode (ext_mode
, 0),
7225 TYPE_QUAL_CONST
), 0, 1, 1);
7227 emit_move_insn (new, op0
);
7228 op0
= copy_rtx (new);
7229 PUT_MODE (op0
, BLKmode
);
7230 set_mem_attributes (op0
, exp
, 1);
7236 /* If the result is BLKmode, use that to access the object
7238 if (mode
== BLKmode
)
7241 /* Get a reference to just this component. */
7242 if (modifier
== EXPAND_CONST_ADDRESS
7243 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7244 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7246 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7248 if (op0
== orig_op0
)
7249 op0
= copy_rtx (op0
);
7251 set_mem_attributes (op0
, exp
, 0);
7252 if (GET_CODE (XEXP (op0
, 0)) == REG
)
7253 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7255 MEM_VOLATILE_P (op0
) |= volatilep
;
7256 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7257 || modifier
== EXPAND_CONST_ADDRESS
7258 || modifier
== EXPAND_INITIALIZER
)
7260 else if (target
== 0)
7261 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7263 convert_move (target
, op0
, unsignedp
);
7269 rtx insn
, before
= get_last_insn (), vtbl_ref
;
7271 /* Evaluate the interior expression. */
7272 subtarget
= expand_expr (TREE_OPERAND (exp
, 0), target
,
7275 /* Get or create an instruction off which to hang a note. */
7276 if (REG_P (subtarget
))
7279 insn
= get_last_insn ();
7282 if (! INSN_P (insn
))
7283 insn
= prev_nonnote_insn (insn
);
7287 target
= gen_reg_rtx (GET_MODE (subtarget
));
7288 insn
= emit_move_insn (target
, subtarget
);
7291 /* Collect the data for the note. */
7292 vtbl_ref
= XEXP (DECL_RTL (TREE_OPERAND (exp
, 1)), 0);
7293 vtbl_ref
= plus_constant (vtbl_ref
,
7294 tree_low_cst (TREE_OPERAND (exp
, 2), 0));
7295 /* Discard the initial CONST that was added. */
7296 vtbl_ref
= XEXP (vtbl_ref
, 0);
7299 = gen_rtx_EXPR_LIST (REG_VTABLE_REF
, vtbl_ref
, REG_NOTES (insn
));
7304 /* Intended for a reference to a buffer of a file-object in Pascal.
7305 But it's not certain that a special tree code will really be
7306 necessary for these. INDIRECT_REF might work for them. */
7312 /* Pascal set IN expression.
7315 rlo = set_low - (set_low%bits_per_word);
7316 the_word = set [ (index - rlo)/bits_per_word ];
7317 bit_index = index % bits_per_word;
7318 bitmask = 1 << bit_index;
7319 return !!(the_word & bitmask); */
7321 tree set
= TREE_OPERAND (exp
, 0);
7322 tree index
= TREE_OPERAND (exp
, 1);
7323 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
7324 tree set_type
= TREE_TYPE (set
);
7325 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
7326 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
7327 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
7328 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
7329 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
7330 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
7331 rtx setaddr
= XEXP (setval
, 0);
7332 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
7334 rtx diff
, quo
, rem
, addr
, bit
, result
;
7336 /* If domain is empty, answer is no. Likewise if index is constant
7337 and out of bounds. */
7338 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
7339 && TREE_CODE (set_low_bound
) == INTEGER_CST
7340 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
7341 || (TREE_CODE (index
) == INTEGER_CST
7342 && TREE_CODE (set_low_bound
) == INTEGER_CST
7343 && tree_int_cst_lt (index
, set_low_bound
))
7344 || (TREE_CODE (set_high_bound
) == INTEGER_CST
7345 && TREE_CODE (index
) == INTEGER_CST
7346 && tree_int_cst_lt (set_high_bound
, index
))))
7350 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7352 /* If we get here, we have to generate the code for both cases
7353 (in range and out of range). */
7355 op0
= gen_label_rtx ();
7356 op1
= gen_label_rtx ();
7358 if (! (GET_CODE (index_val
) == CONST_INT
7359 && GET_CODE (lo_r
) == CONST_INT
))
7360 emit_cmp_and_jump_insns (index_val
, lo_r
, LT
, NULL_RTX
,
7361 GET_MODE (index_val
), iunsignedp
, op1
);
7363 if (! (GET_CODE (index_val
) == CONST_INT
7364 && GET_CODE (hi_r
) == CONST_INT
))
7365 emit_cmp_and_jump_insns (index_val
, hi_r
, GT
, NULL_RTX
,
7366 GET_MODE (index_val
), iunsignedp
, op1
);
7368 /* Calculate the element number of bit zero in the first word
7370 if (GET_CODE (lo_r
) == CONST_INT
)
7371 rlow
= GEN_INT (INTVAL (lo_r
)
7372 & ~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
7374 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
7375 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
7376 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7378 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
7379 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7381 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
7382 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7383 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
7384 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7386 addr
= memory_address (byte_mode
,
7387 expand_binop (index_mode
, add_optab
, diff
,
7388 setaddr
, NULL_RTX
, iunsignedp
,
7391 /* Extract the bit we want to examine. */
7392 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
7393 gen_rtx_MEM (byte_mode
, addr
),
7394 make_tree (TREE_TYPE (index
), rem
),
7396 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
7397 GET_MODE (target
) == byte_mode
? target
: 0,
7398 1, OPTAB_LIB_WIDEN
);
7400 if (result
!= target
)
7401 convert_move (target
, result
, 1);
7403 /* Output the code to handle the out-of-range case. */
7406 emit_move_insn (target
, const0_rtx
);
7411 case WITH_CLEANUP_EXPR
:
7412 if (WITH_CLEANUP_EXPR_RTL (exp
) == 0)
7414 WITH_CLEANUP_EXPR_RTL (exp
)
7415 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, ro_modifier
);
7416 expand_decl_cleanup (NULL_TREE
, TREE_OPERAND (exp
, 1));
7418 /* That's it for this cleanup. */
7419 TREE_OPERAND (exp
, 1) = 0;
7421 return WITH_CLEANUP_EXPR_RTL (exp
);
7423 case CLEANUP_POINT_EXPR
:
7425 /* Start a new binding layer that will keep track of all cleanup
7426 actions to be performed. */
7427 expand_start_bindings (2);
7429 target_temp_slot_level
= temp_slot_level
;
7431 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, ro_modifier
);
7432 /* If we're going to use this value, load it up now. */
7434 op0
= force_not_mem (op0
);
7435 preserve_temp_slots (op0
);
7436 expand_end_bindings (NULL_TREE
, 0, 0);
7441 /* Check for a built-in function. */
7442 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7443 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7445 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7447 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7448 == BUILT_IN_FRONTEND
)
7449 return (*lang_expand_expr
) (exp
, original_target
, tmode
, modifier
);
7451 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7454 return expand_call (exp
, target
, ignore
);
7456 case NON_LVALUE_EXPR
:
7459 case REFERENCE_EXPR
:
7460 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7463 if (TREE_CODE (type
) == UNION_TYPE
)
7465 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7467 /* If both input and output are BLKmode, this conversion isn't doing
7468 anything except possibly changing memory attribute. */
7469 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7471 rtx result
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7474 result
= copy_rtx (result
);
7475 set_mem_attributes (result
, exp
, 0);
7480 target
= assign_temp (type
, 0, 1, 1);
7482 if (GET_CODE (target
) == MEM
)
7483 /* Store data into beginning of memory target. */
7484 store_expr (TREE_OPERAND (exp
, 0),
7485 adjust_address (target
, TYPE_MODE (valtype
), 0), 0);
7487 else if (GET_CODE (target
) == REG
)
7488 /* Store this field into a union of the proper type. */
7489 store_field (target
,
7490 MIN ((int_size_in_bytes (TREE_TYPE
7491 (TREE_OPERAND (exp
, 0)))
7493 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7494 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7495 VOIDmode
, 0, type
, 0);
7499 /* Return the entire union. */
7503 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7505 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7508 /* If the signedness of the conversion differs and OP0 is
7509 a promoted SUBREG, clear that indication since we now
7510 have to do the proper extension. */
7511 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7512 && GET_CODE (op0
) == SUBREG
)
7513 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7518 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, 0);
7519 if (GET_MODE (op0
) == mode
)
7522 /* If OP0 is a constant, just convert it into the proper mode. */
7523 if (CONSTANT_P (op0
))
7525 convert_modes (mode
, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7526 op0
, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7528 if (modifier
== EXPAND_INITIALIZER
)
7529 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7533 convert_to_mode (mode
, op0
,
7534 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7536 convert_move (target
, op0
,
7537 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7540 case VIEW_CONVERT_EXPR
:
7541 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, 0);
7543 /* If the input and output modes are both the same, we are done.
7544 Otherwise, if neither mode is BLKmode and both are within a word, we
7545 can use gen_lowpart. If neither is true, make sure the operand is
7546 in memory and convert the MEM to the new mode. */
7547 if (TYPE_MODE (type
) == GET_MODE (op0
))
7549 else if (TYPE_MODE (type
) != BLKmode
&& GET_MODE (op0
) != BLKmode
7550 && GET_MODE_SIZE (TYPE_MODE (type
)) <= UNITS_PER_WORD
7551 && GET_MODE_SIZE (GET_MODE (op0
)) <= UNITS_PER_WORD
)
7552 op0
= gen_lowpart (TYPE_MODE (type
), op0
);
7553 else if (GET_CODE (op0
) != MEM
)
7555 /* If the operand is not a MEM, force it into memory. Since we
7556 are going to be be changing the mode of the MEM, don't call
7557 force_const_mem for constants because we don't allow pool
7558 constants to change mode. */
7559 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7561 if (TREE_ADDRESSABLE (exp
))
7564 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
7566 = assign_stack_temp_for_type
7567 (TYPE_MODE (inner_type
),
7568 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
7570 emit_move_insn (target
, op0
);
7574 /* At this point, OP0 is in the correct mode. If the output type is such
7575 that the operand is known to be aligned, indicate that it is.
7576 Otherwise, we need only be concerned about alignment for non-BLKmode
7578 if (GET_CODE (op0
) == MEM
)
7580 op0
= copy_rtx (op0
);
7582 if (TYPE_ALIGN_OK (type
))
7583 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
7584 else if (TYPE_MODE (type
) != BLKmode
&& STRICT_ALIGNMENT
7585 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
7587 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7588 HOST_WIDE_INT temp_size
= MAX (int_size_in_bytes (inner_type
),
7589 GET_MODE_SIZE (TYPE_MODE (type
)));
7590 rtx
new = assign_stack_temp_for_type (TYPE_MODE (type
),
7591 temp_size
, 0, type
);
7592 rtx new_with_op0_mode
= copy_rtx (new);
7594 if (TREE_ADDRESSABLE (exp
))
7597 PUT_MODE (new_with_op0_mode
, GET_MODE (op0
));
7598 if (GET_MODE (op0
) == BLKmode
)
7599 emit_block_move (new_with_op0_mode
, op0
,
7600 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type
))));
7602 emit_move_insn (new_with_op0_mode
, op0
);
7607 PUT_MODE (op0
, TYPE_MODE (type
));
7613 /* We come here from MINUS_EXPR when the second operand is a
7616 this_optab
= ! unsignedp
&& flag_trapv
7617 && (GET_MODE_CLASS(mode
) == MODE_INT
)
7618 ? addv_optab
: add_optab
;
7620 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7621 something else, make sure we add the register to the constant and
7622 then to the other thing. This case can occur during strength
7623 reduction and doing it this way will produce better code if the
7624 frame pointer or argument pointer is eliminated.
7626 fold-const.c will ensure that the constant is always in the inner
7627 PLUS_EXPR, so the only case we need to do anything about is if
7628 sp, ap, or fp is our second argument, in which case we must swap
7629 the innermost first argument and our second argument. */
7631 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7632 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7633 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
7634 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7635 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7636 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7638 tree t
= TREE_OPERAND (exp
, 1);
7640 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7641 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
7644 /* If the result is to be ptr_mode and we are adding an integer to
7645 something, we might be forming a constant. So try to use
7646 plus_constant. If it produces a sum and we can't accept it,
7647 use force_operand. This allows P = &ARR[const] to generate
7648 efficient code on machines where a SYMBOL_REF is not a valid
7651 If this is an EXPAND_SUM call, always return the sum. */
7652 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7653 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
7655 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
7656 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7657 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
7661 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
7663 /* Use immed_double_const to ensure that the constant is
7664 truncated according to the mode of OP1, then sign extended
7665 to a HOST_WIDE_INT. Using the constant directly can result
7666 in non-canonical RTL in a 64x32 cross compile. */
7668 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
7670 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
7671 op1
= plus_constant (op1
, INTVAL (constant_part
));
7672 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7673 op1
= force_operand (op1
, target
);
7677 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7678 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
7679 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
7683 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7685 if (! CONSTANT_P (op0
))
7687 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7688 VOIDmode
, modifier
);
7689 /* Don't go to both_summands if modifier
7690 says it's not right to return a PLUS. */
7691 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7695 /* Use immed_double_const to ensure that the constant is
7696 truncated according to the mode of OP1, then sign extended
7697 to a HOST_WIDE_INT. Using the constant directly can result
7698 in non-canonical RTL in a 64x32 cross compile. */
7700 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
7702 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7703 op0
= plus_constant (op0
, INTVAL (constant_part
));
7704 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7705 op0
= force_operand (op0
, target
);
7710 /* No sense saving up arithmetic to be done
7711 if it's all in the wrong mode to form part of an address.
7712 And force_operand won't know whether to sign-extend or
7714 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7715 || mode
!= ptr_mode
)
7718 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7721 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, ro_modifier
);
7722 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, ro_modifier
);
7725 /* Make sure any term that's a sum with a constant comes last. */
7726 if (GET_CODE (op0
) == PLUS
7727 && CONSTANT_P (XEXP (op0
, 1)))
7733 /* If adding to a sum including a constant,
7734 associate it to put the constant outside. */
7735 if (GET_CODE (op1
) == PLUS
7736 && CONSTANT_P (XEXP (op1
, 1)))
7738 rtx constant_term
= const0_rtx
;
7740 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
7743 /* Ensure that MULT comes first if there is one. */
7744 else if (GET_CODE (op0
) == MULT
)
7745 op0
= gen_rtx_PLUS (mode
, op0
, XEXP (op1
, 0));
7747 op0
= gen_rtx_PLUS (mode
, XEXP (op1
, 0), op0
);
7749 /* Let's also eliminate constants from op0 if possible. */
7750 op0
= eliminate_constant_term (op0
, &constant_term
);
7752 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7753 their sum should be a constant. Form it into OP1, since the
7754 result we want will then be OP0 + OP1. */
7756 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
7761 op1
= gen_rtx_PLUS (mode
, constant_term
, XEXP (op1
, 1));
7764 /* Put a constant term last and put a multiplication first. */
7765 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
7766 temp
= op1
, op1
= op0
, op0
= temp
;
7768 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
7769 return temp
? temp
: gen_rtx_PLUS (mode
, op0
, op1
);
7772 /* For initializers, we are allowed to return a MINUS of two
7773 symbolic constants. Here we handle all cases when both operands
7775 /* Handle difference of two symbolic constants,
7776 for the sake of an initializer. */
7777 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7778 && really_constant_p (TREE_OPERAND (exp
, 0))
7779 && really_constant_p (TREE_OPERAND (exp
, 1)))
7781 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
,
7782 VOIDmode
, ro_modifier
);
7783 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7784 VOIDmode
, ro_modifier
);
7786 /* If the last operand is a CONST_INT, use plus_constant of
7787 the negated constant. Else make the MINUS. */
7788 if (GET_CODE (op1
) == CONST_INT
)
7789 return plus_constant (op0
, - INTVAL (op1
));
7791 return gen_rtx_MINUS (mode
, op0
, op1
);
7793 /* Convert A - const to A + (-const). */
7794 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7796 tree negated
= fold (build1 (NEGATE_EXPR
, type
,
7797 TREE_OPERAND (exp
, 1)));
7799 if (TREE_UNSIGNED (type
) || TREE_OVERFLOW (negated
))
7800 /* If we can't negate the constant in TYPE, leave it alone and
7801 expand_binop will negate it for us. We used to try to do it
7802 here in the signed version of TYPE, but that doesn't work
7803 on POINTER_TYPEs. */;
7806 exp
= build (PLUS_EXPR
, type
, TREE_OPERAND (exp
, 0), negated
);
7810 this_optab
= ! unsignedp
&& flag_trapv
7811 && (GET_MODE_CLASS(mode
) == MODE_INT
)
7812 ? subv_optab
: sub_optab
;
7816 /* If first operand is constant, swap them.
7817 Thus the following special case checks need only
7818 check the second operand. */
7819 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
7821 tree t1
= TREE_OPERAND (exp
, 0);
7822 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
7823 TREE_OPERAND (exp
, 1) = t1
;
7826 /* Attempt to return something suitable for generating an
7827 indexed address, for machines that support that. */
7829 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
7830 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7831 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
7833 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7836 /* Apply distributive law if OP0 is x+c. */
7837 if (GET_CODE (op0
) == PLUS
7838 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
7843 (mode
, XEXP (op0
, 0),
7844 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))),
7845 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))
7846 * INTVAL (XEXP (op0
, 1))));
7848 if (GET_CODE (op0
) != REG
)
7849 op0
= force_operand (op0
, NULL_RTX
);
7850 if (GET_CODE (op0
) != REG
)
7851 op0
= copy_to_mode_reg (mode
, op0
);
7854 gen_rtx_MULT (mode
, op0
,
7855 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))));
7858 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7861 /* Check for multiplying things that have been extended
7862 from a narrower type. If this machine supports multiplying
7863 in that narrower type with a result in the desired type,
7864 do it that way, and avoid the explicit type-conversion. */
7865 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
7866 && TREE_CODE (type
) == INTEGER_TYPE
7867 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7868 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7869 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7870 && int_fits_type_p (TREE_OPERAND (exp
, 1),
7871 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7872 /* Don't use a widening multiply if a shift will do. */
7873 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
7874 > HOST_BITS_PER_WIDE_INT
)
7875 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
7877 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7878 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7880 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
7881 /* If both operands are extended, they must either both
7882 be zero-extended or both be sign-extended. */
7883 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7885 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
7887 enum machine_mode innermode
7888 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
7889 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7890 ? smul_widen_optab
: umul_widen_optab
);
7891 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7892 ? umul_widen_optab
: smul_widen_optab
);
7893 if (mode
== GET_MODE_WIDER_MODE (innermode
))
7895 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
7897 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7898 NULL_RTX
, VOIDmode
, 0);
7899 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7900 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7903 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7904 NULL_RTX
, VOIDmode
, 0);
7907 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
7908 && innermode
== word_mode
)
7911 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7912 NULL_RTX
, VOIDmode
, 0);
7913 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7914 op1
= convert_modes (innermode
, mode
,
7915 expand_expr (TREE_OPERAND (exp
, 1),
7916 NULL_RTX
, VOIDmode
, 0),
7919 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7920 NULL_RTX
, VOIDmode
, 0);
7921 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
7922 unsignedp
, OPTAB_LIB_WIDEN
);
7923 htem
= expand_mult_highpart_adjust (innermode
,
7924 gen_highpart (innermode
, temp
),
7926 gen_highpart (innermode
, temp
),
7928 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
7933 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7934 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7935 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
7937 case TRUNC_DIV_EXPR
:
7938 case FLOOR_DIV_EXPR
:
7940 case ROUND_DIV_EXPR
:
7941 case EXACT_DIV_EXPR
:
7942 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7944 /* Possible optimization: compute the dividend with EXPAND_SUM
7945 then if the divisor is constant can optimize the case
7946 where some terms of the dividend have coeffs divisible by it. */
7947 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7948 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7949 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
7952 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7953 expensive divide. If not, combine will rebuild the original
7955 if (flag_unsafe_math_optimizations
&& optimize
&& !optimize_size
7956 && !real_onep (TREE_OPERAND (exp
, 0)))
7957 return expand_expr (build (MULT_EXPR
, type
, TREE_OPERAND (exp
, 0),
7958 build (RDIV_EXPR
, type
,
7959 build_real (type
, dconst1
),
7960 TREE_OPERAND (exp
, 1))),
7961 target
, tmode
, unsignedp
);
7962 this_optab
= sdiv_optab
;
7965 case TRUNC_MOD_EXPR
:
7966 case FLOOR_MOD_EXPR
:
7968 case ROUND_MOD_EXPR
:
7969 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7971 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7972 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7973 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
7975 case FIX_ROUND_EXPR
:
7976 case FIX_FLOOR_EXPR
:
7978 abort (); /* Not used for C. */
7980 case FIX_TRUNC_EXPR
:
7981 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7983 target
= gen_reg_rtx (mode
);
7984 expand_fix (target
, op0
, unsignedp
);
7988 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7990 target
= gen_reg_rtx (mode
);
7991 /* expand_float can't figure out what to do if FROM has VOIDmode.
7992 So give it the correct mode. With -O, cse will optimize this. */
7993 if (GET_MODE (op0
) == VOIDmode
)
7994 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7996 expand_float (target
, op0
,
7997 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8001 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8002 temp
= expand_unop (mode
,
8003 ! unsignedp
&& flag_trapv
8004 && (GET_MODE_CLASS(mode
) == MODE_INT
)
8005 ? negv_optab
: neg_optab
, op0
, target
, 0);
8011 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8013 /* Handle complex values specially. */
8014 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
8015 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
8016 return expand_complex_abs (mode
, op0
, target
, unsignedp
);
8018 /* Unsigned abs is simply the operand. Testing here means we don't
8019 risk generating incorrect code below. */
8020 if (TREE_UNSIGNED (type
))
8023 return expand_abs (mode
, op0
, target
, unsignedp
,
8024 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
8028 target
= original_target
;
8029 if (target
== 0 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1), 1)
8030 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
8031 || GET_MODE (target
) != mode
8032 || (GET_CODE (target
) == REG
8033 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8034 target
= gen_reg_rtx (mode
);
8035 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8036 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8038 /* First try to do it with a special MIN or MAX instruction.
8039 If that does not win, use a conditional jump to select the proper
8041 this_optab
= (TREE_UNSIGNED (type
)
8042 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
8043 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
8045 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8050 /* At this point, a MEM target is no longer useful; we will get better
8053 if (GET_CODE (target
) == MEM
)
8054 target
= gen_reg_rtx (mode
);
8057 emit_move_insn (target
, op0
);
8059 op0
= gen_label_rtx ();
8061 /* If this mode is an integer too wide to compare properly,
8062 compare word by word. Rely on cse to optimize constant cases. */
8063 if (GET_MODE_CLASS (mode
) == MODE_INT
8064 && ! can_compare_p (GE
, mode
, ccp_jump
))
8066 if (code
== MAX_EXPR
)
8067 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8068 target
, op1
, NULL_RTX
, op0
);
8070 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8071 op1
, target
, NULL_RTX
, op0
);
8075 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)));
8076 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
8077 unsignedp
, mode
, NULL_RTX
, NULL_RTX
,
8080 emit_move_insn (target
, op1
);
8085 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8086 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8092 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8093 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
8098 /* ??? Can optimize bitwise operations with one arg constant.
8099 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8100 and (a bitwise1 b) bitwise2 b (etc)
8101 but that is probably not worth while. */
8103 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8104 boolean values when we want in all cases to compute both of them. In
8105 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8106 as actual zero-or-1 values and then bitwise anding. In cases where
8107 there cannot be any side effects, better code would be made by
8108 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8109 how to recognize those cases. */
8111 case TRUTH_AND_EXPR
:
8113 this_optab
= and_optab
;
8118 this_optab
= ior_optab
;
8121 case TRUTH_XOR_EXPR
:
8123 this_optab
= xor_optab
;
8130 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8132 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8133 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
8136 /* Could determine the answer when only additive constants differ. Also,
8137 the addition of one can be handled by changing the condition. */
8144 case UNORDERED_EXPR
:
8151 temp
= do_store_flag (exp
, target
, tmode
!= VOIDmode
? tmode
: mode
, 0);
8155 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8156 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
8158 && GET_CODE (original_target
) == REG
8159 && (GET_MODE (original_target
)
8160 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
8162 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
8165 if (temp
!= original_target
)
8166 temp
= copy_to_reg (temp
);
8168 op1
= gen_label_rtx ();
8169 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
8170 GET_MODE (temp
), unsignedp
, op1
);
8171 emit_move_insn (temp
, const1_rtx
);
8176 /* If no set-flag instruction, must generate a conditional
8177 store into a temporary variable. Drop through
8178 and handle this like && and ||. */
8180 case TRUTH_ANDIF_EXPR
:
8181 case TRUTH_ORIF_EXPR
:
8183 && (target
== 0 || ! safe_from_p (target
, exp
, 1)
8184 /* Make sure we don't have a hard reg (such as function's return
8185 value) live across basic blocks, if not optimizing. */
8186 || (!optimize
&& GET_CODE (target
) == REG
8187 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8188 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8191 emit_clr_insn (target
);
8193 op1
= gen_label_rtx ();
8194 jumpifnot (exp
, op1
);
8197 emit_0_to_1_insn (target
);
8200 return ignore
? const0_rtx
: target
;
8202 case TRUTH_NOT_EXPR
:
8203 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8204 /* The parser is careful to generate TRUTH_NOT_EXPR
8205 only with operands that are always zero or one. */
8206 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8207 target
, 1, OPTAB_LIB_WIDEN
);
8213 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
8215 return expand_expr (TREE_OPERAND (exp
, 1),
8216 (ignore
? const0_rtx
: target
),
8220 /* If we would have a "singleton" (see below) were it not for a
8221 conversion in each arm, bring that conversion back out. */
8222 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8223 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
8224 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
8225 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
8227 tree iftrue
= TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
8228 tree iffalse
= TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
8230 if ((TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '2'
8231 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8232 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '2'
8233 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0))
8234 || (TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '1'
8235 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8236 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '1'
8237 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0)))
8238 return expand_expr (build1 (NOP_EXPR
, type
,
8239 build (COND_EXPR
, TREE_TYPE (iftrue
),
8240 TREE_OPERAND (exp
, 0),
8242 target
, tmode
, modifier
);
8246 /* Note that COND_EXPRs whose type is a structure or union
8247 are required to be constructed to contain assignments of
8248 a temporary variable, so that we can evaluate them here
8249 for side effect only. If type is void, we must do likewise. */
8251 /* If an arm of the branch requires a cleanup,
8252 only that cleanup is performed. */
8255 tree binary_op
= 0, unary_op
= 0;
8257 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8258 convert it to our mode, if necessary. */
8259 if (integer_onep (TREE_OPERAND (exp
, 1))
8260 && integer_zerop (TREE_OPERAND (exp
, 2))
8261 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8265 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
8270 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, ro_modifier
);
8271 if (GET_MODE (op0
) == mode
)
8275 target
= gen_reg_rtx (mode
);
8276 convert_move (target
, op0
, unsignedp
);
8280 /* Check for X ? A + B : A. If we have this, we can copy A to the
8281 output and conditionally add B. Similarly for unary operations.
8282 Don't do this if X has side-effects because those side effects
8283 might affect A or B and the "?" operation is a sequence point in
8284 ANSI. (operand_equal_p tests for side effects.) */
8286 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
8287 && operand_equal_p (TREE_OPERAND (exp
, 2),
8288 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8289 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
8290 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
8291 && operand_equal_p (TREE_OPERAND (exp
, 1),
8292 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8293 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
8294 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
8295 && operand_equal_p (TREE_OPERAND (exp
, 2),
8296 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8297 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
8298 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
8299 && operand_equal_p (TREE_OPERAND (exp
, 1),
8300 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8301 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
8303 /* If we are not to produce a result, we have no target. Otherwise,
8304 if a target was specified use it; it will not be used as an
8305 intermediate target unless it is safe. If no target, use a
8310 else if (original_target
8311 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
8312 || (singleton
&& GET_CODE (original_target
) == REG
8313 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
8314 && original_target
== var_rtx (singleton
)))
8315 && GET_MODE (original_target
) == mode
8316 #ifdef HAVE_conditional_move
8317 && (! can_conditionally_move_p (mode
)
8318 || GET_CODE (original_target
) == REG
8319 || TREE_ADDRESSABLE (type
))
8321 && (GET_CODE (original_target
) != MEM
8322 || TREE_ADDRESSABLE (type
)))
8323 temp
= original_target
;
8324 else if (TREE_ADDRESSABLE (type
))
8327 temp
= assign_temp (type
, 0, 0, 1);
8329 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8330 do the test of X as a store-flag operation, do this as
8331 A + ((X != 0) << log C). Similarly for other simple binary
8332 operators. Only do for C == 1 if BRANCH_COST is low. */
8333 if (temp
&& singleton
&& binary_op
8334 && (TREE_CODE (binary_op
) == PLUS_EXPR
8335 || TREE_CODE (binary_op
) == MINUS_EXPR
8336 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
8337 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
8338 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
8339 : integer_onep (TREE_OPERAND (binary_op
, 1)))
8340 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8343 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
8344 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8345 ? addv_optab
: add_optab
)
8346 : TREE_CODE (binary_op
) == MINUS_EXPR
8347 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8348 ? subv_optab
: sub_optab
)
8349 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
8352 /* If we had X ? A : A + 1, do this as A + (X == 0).
8354 We have to invert the truth value here and then put it
8355 back later if do_store_flag fails. We cannot simply copy
8356 TREE_OPERAND (exp, 0) to another variable and modify that
8357 because invert_truthvalue can modify the tree pointed to
8359 if (singleton
== TREE_OPERAND (exp
, 1))
8360 TREE_OPERAND (exp
, 0)
8361 = invert_truthvalue (TREE_OPERAND (exp
, 0));
8363 result
= do_store_flag (TREE_OPERAND (exp
, 0),
8364 (safe_from_p (temp
, singleton
, 1)
8366 mode
, BRANCH_COST
<= 1);
8368 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
8369 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
8370 build_int_2 (tree_log2
8374 (safe_from_p (temp
, singleton
, 1)
8375 ? temp
: NULL_RTX
), 0);
8379 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
8380 return expand_binop (mode
, boptab
, op1
, result
, temp
,
8381 unsignedp
, OPTAB_LIB_WIDEN
);
8383 else if (singleton
== TREE_OPERAND (exp
, 1))
8384 TREE_OPERAND (exp
, 0)
8385 = invert_truthvalue (TREE_OPERAND (exp
, 0));
8388 do_pending_stack_adjust ();
8390 op0
= gen_label_rtx ();
8392 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
8396 /* If the target conflicts with the other operand of the
8397 binary op, we can't use it. Also, we can't use the target
8398 if it is a hard register, because evaluating the condition
8399 might clobber it. */
8401 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
8402 || (GET_CODE (temp
) == REG
8403 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
8404 temp
= gen_reg_rtx (mode
);
8405 store_expr (singleton
, temp
, 0);
8408 expand_expr (singleton
,
8409 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8410 if (singleton
== TREE_OPERAND (exp
, 1))
8411 jumpif (TREE_OPERAND (exp
, 0), op0
);
8413 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8415 start_cleanup_deferral ();
8416 if (binary_op
&& temp
== 0)
8417 /* Just touch the other operand. */
8418 expand_expr (TREE_OPERAND (binary_op
, 1),
8419 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8421 store_expr (build (TREE_CODE (binary_op
), type
,
8422 make_tree (type
, temp
),
8423 TREE_OPERAND (binary_op
, 1)),
8426 store_expr (build1 (TREE_CODE (unary_op
), type
,
8427 make_tree (type
, temp
)),
8431 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8432 comparison operator. If we have one of these cases, set the
8433 output to A, branch on A (cse will merge these two references),
8434 then set the output to FOO. */
8436 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8437 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8438 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8439 TREE_OPERAND (exp
, 1), 0)
8440 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8441 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
8442 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
8444 if (GET_CODE (temp
) == REG
8445 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8446 temp
= gen_reg_rtx (mode
);
8447 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8448 jumpif (TREE_OPERAND (exp
, 0), op0
);
8450 start_cleanup_deferral ();
8451 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8455 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8456 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8457 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8458 TREE_OPERAND (exp
, 2), 0)
8459 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8460 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
8461 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
8463 if (GET_CODE (temp
) == REG
8464 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8465 temp
= gen_reg_rtx (mode
);
8466 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8467 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8469 start_cleanup_deferral ();
8470 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8475 op1
= gen_label_rtx ();
8476 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8478 start_cleanup_deferral ();
8480 /* One branch of the cond can be void, if it never returns. For
8481 example A ? throw : E */
8483 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8484 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8486 expand_expr (TREE_OPERAND (exp
, 1),
8487 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8488 end_cleanup_deferral ();
8490 emit_jump_insn (gen_jump (op1
));
8493 start_cleanup_deferral ();
8495 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8496 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8498 expand_expr (TREE_OPERAND (exp
, 2),
8499 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8502 end_cleanup_deferral ();
8513 /* Something needs to be initialized, but we didn't know
8514 where that thing was when building the tree. For example,
8515 it could be the return value of a function, or a parameter
8516 to a function which lays down in the stack, or a temporary
8517 variable which must be passed by reference.
8519 We guarantee that the expression will either be constructed
8520 or copied into our original target. */
8522 tree slot
= TREE_OPERAND (exp
, 0);
8523 tree cleanups
= NULL_TREE
;
8526 if (TREE_CODE (slot
) != VAR_DECL
)
8530 target
= original_target
;
8532 /* Set this here so that if we get a target that refers to a
8533 register variable that's already been used, put_reg_into_stack
8534 knows that it should fix up those uses. */
8535 TREE_USED (slot
) = 1;
8539 if (DECL_RTL_SET_P (slot
))
8541 target
= DECL_RTL (slot
);
8542 /* If we have already expanded the slot, so don't do
8544 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8549 target
= assign_temp (type
, 2, 0, 1);
8550 /* All temp slots at this level must not conflict. */
8551 preserve_temp_slots (target
);
8552 SET_DECL_RTL (slot
, target
);
8553 if (TREE_ADDRESSABLE (slot
))
8554 put_var_into_stack (slot
);
8556 /* Since SLOT is not known to the called function
8557 to belong to its stack frame, we must build an explicit
8558 cleanup. This case occurs when we must build up a reference
8559 to pass the reference as an argument. In this case,
8560 it is very likely that such a reference need not be
8563 if (TREE_OPERAND (exp
, 2) == 0)
8564 TREE_OPERAND (exp
, 2) = maybe_build_cleanup (slot
);
8565 cleanups
= TREE_OPERAND (exp
, 2);
8570 /* This case does occur, when expanding a parameter which
8571 needs to be constructed on the stack. The target
8572 is the actual stack address that we want to initialize.
8573 The function we call will perform the cleanup in this case. */
8575 /* If we have already assigned it space, use that space,
8576 not target that we were passed in, as our target
8577 parameter is only a hint. */
8578 if (DECL_RTL_SET_P (slot
))
8580 target
= DECL_RTL (slot
);
8581 /* If we have already expanded the slot, so don't do
8583 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8588 SET_DECL_RTL (slot
, target
);
8589 /* If we must have an addressable slot, then make sure that
8590 the RTL that we just stored in slot is OK. */
8591 if (TREE_ADDRESSABLE (slot
))
8592 put_var_into_stack (slot
);
8596 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
8597 /* Mark it as expanded. */
8598 TREE_OPERAND (exp
, 1) = NULL_TREE
;
8600 store_expr (exp1
, target
, 0);
8602 expand_decl_cleanup (NULL_TREE
, cleanups
);
8609 tree lhs
= TREE_OPERAND (exp
, 0);
8610 tree rhs
= TREE_OPERAND (exp
, 1);
8612 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8618 /* If lhs is complex, expand calls in rhs before computing it.
8619 That's so we don't compute a pointer and save it over a
8620 call. If lhs is simple, compute it first so we can give it
8621 as a target if the rhs is just a call. This avoids an
8622 extra temp and copy and that prevents a partial-subsumption
8623 which makes bad code. Actually we could treat
8624 component_ref's of vars like vars. */
8626 tree lhs
= TREE_OPERAND (exp
, 0);
8627 tree rhs
= TREE_OPERAND (exp
, 1);
8631 /* Check for |= or &= of a bitfield of size one into another bitfield
8632 of size 1. In this case, (unless we need the result of the
8633 assignment) we can do this more efficiently with a
8634 test followed by an assignment, if necessary.
8636 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8637 things change so we do, this code should be enhanced to
8640 && TREE_CODE (lhs
) == COMPONENT_REF
8641 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
8642 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
8643 && TREE_OPERAND (rhs
, 0) == lhs
8644 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
8645 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
8646 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
8648 rtx label
= gen_label_rtx ();
8650 do_jump (TREE_OPERAND (rhs
, 1),
8651 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
8652 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
8653 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
8654 (TREE_CODE (rhs
) == BIT_IOR_EXPR
8656 : integer_zero_node
)),
8658 do_pending_stack_adjust ();
8663 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8669 if (!TREE_OPERAND (exp
, 0))
8670 expand_null_return ();
8672 expand_return (TREE_OPERAND (exp
, 0));
8675 case PREINCREMENT_EXPR
:
8676 case PREDECREMENT_EXPR
:
8677 return expand_increment (exp
, 0, ignore
);
8679 case POSTINCREMENT_EXPR
:
8680 case POSTDECREMENT_EXPR
:
8681 /* Faster to treat as pre-increment if result is not used. */
8682 return expand_increment (exp
, ! ignore
, ignore
);
8685 /* Are we taking the address of a nested function? */
8686 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
8687 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
8688 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0))
8689 && ! TREE_STATIC (exp
))
8691 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
8692 op0
= force_operand (op0
, target
);
8694 /* If we are taking the address of something erroneous, just
8696 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
8698 /* If we are taking the address of a constant and are at the
8699 top level, we have to use output_constant_def since we can't
8700 call force_const_mem at top level. */
8702 && (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
8703 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0)))
8705 op0
= XEXP (output_constant_def (TREE_OPERAND (exp
, 0), 0), 0);
8708 /* We make sure to pass const0_rtx down if we came in with
8709 ignore set, to avoid doing the cleanups twice for something. */
8710 op0
= expand_expr (TREE_OPERAND (exp
, 0),
8711 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
8712 (modifier
== EXPAND_INITIALIZER
8713 ? modifier
: EXPAND_CONST_ADDRESS
));
8715 /* If we are going to ignore the result, OP0 will have been set
8716 to const0_rtx, so just return it. Don't get confused and
8717 think we are taking the address of the constant. */
8721 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8722 clever and returns a REG when given a MEM. */
8723 op0
= protect_from_queue (op0
, 1);
8725 /* We would like the object in memory. If it is a constant, we can
8726 have it be statically allocated into memory. For a non-constant,
8727 we need to allocate some memory and store the value into it. */
8729 if (CONSTANT_P (op0
))
8730 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8732 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
8733 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
8734 || GET_CODE (op0
) == PARALLEL
)
8736 /* If this object is in a register, it must can't be BLKmode. */
8737 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8738 tree nt
= build_qualified_type (inner_type
,
8739 (TYPE_QUALS (inner_type
)
8740 | TYPE_QUAL_CONST
));
8741 rtx memloc
= assign_temp (nt
, 1, 1, 1);
8743 if (GET_CODE (op0
) == PARALLEL
)
8744 /* Handle calls that pass values in multiple non-contiguous
8745 locations. The Irix 6 ABI has examples of this. */
8746 emit_group_store (memloc
, op0
, int_size_in_bytes (inner_type
));
8748 emit_move_insn (memloc
, op0
);
8753 if (GET_CODE (op0
) != MEM
)
8756 mark_temp_addr_taken (op0
);
8757 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8759 op0
= XEXP (op0
, 0);
8760 #ifdef POINTERS_EXTEND_UNSIGNED
8761 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
8762 && mode
== ptr_mode
)
8763 op0
= convert_memory_address (ptr_mode
, op0
);
8768 /* If OP0 is not aligned as least as much as the type requires, we
8769 need to make a temporary, copy OP0 to it, and take the address of
8770 the temporary. We want to use the alignment of the type, not of
8771 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8772 the test for BLKmode means that can't happen. The test for
8773 BLKmode is because we never make mis-aligned MEMs with
8776 We don't need to do this at all if the machine doesn't have
8777 strict alignment. */
8778 if (STRICT_ALIGNMENT
&& GET_MODE (op0
) == BLKmode
8779 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
8781 && MEM_ALIGN (op0
) < BIGGEST_ALIGNMENT
)
8783 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8785 = assign_stack_temp_for_type
8786 (TYPE_MODE (inner_type
),
8787 MEM_SIZE (op0
) ? INTVAL (MEM_SIZE (op0
))
8788 : int_size_in_bytes (inner_type
),
8789 1, build_qualified_type (inner_type
,
8790 (TYPE_QUALS (inner_type
)
8791 | TYPE_QUAL_CONST
)));
8793 if (TYPE_ALIGN_OK (inner_type
))
8796 emit_block_move (new, op0
, expr_size (TREE_OPERAND (exp
, 0)));
8800 op0
= force_operand (XEXP (op0
, 0), target
);
8803 if (flag_force_addr
&& GET_CODE (op0
) != REG
)
8804 op0
= force_reg (Pmode
, op0
);
8806 if (GET_CODE (op0
) == REG
8807 && ! REG_USERVAR_P (op0
))
8808 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)));
8810 #ifdef POINTERS_EXTEND_UNSIGNED
8811 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
8812 && mode
== ptr_mode
)
8813 op0
= convert_memory_address (ptr_mode
, op0
);
8818 case ENTRY_VALUE_EXPR
:
8821 /* COMPLEX type for Extended Pascal & Fortran */
8824 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8827 /* Get the rtx code of the operands. */
8828 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8829 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
8832 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8836 /* Move the real (op0) and imaginary (op1) parts to their location. */
8837 emit_move_insn (gen_realpart (mode
, target
), op0
);
8838 emit_move_insn (gen_imagpart (mode
, target
), op1
);
8840 insns
= get_insns ();
8843 /* Complex construction should appear as a single unit. */
8844 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8845 each with a separate pseudo as destination.
8846 It's not correct for flow to treat them as a unit. */
8847 if (GET_CODE (target
) != CONCAT
)
8848 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
8856 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8857 return gen_realpart (mode
, op0
);
8860 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8861 return gen_imagpart (mode
, op0
);
8865 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8869 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8872 target
= gen_reg_rtx (mode
);
8876 /* Store the realpart and the negated imagpart to target. */
8877 emit_move_insn (gen_realpart (partmode
, target
),
8878 gen_realpart (partmode
, op0
));
8880 imag_t
= gen_imagpart (partmode
, target
);
8881 temp
= expand_unop (partmode
,
8882 ! unsignedp
&& flag_trapv
8883 && (GET_MODE_CLASS(partmode
) == MODE_INT
)
8884 ? negv_optab
: neg_optab
,
8885 gen_imagpart (partmode
, op0
), imag_t
, 0);
8887 emit_move_insn (imag_t
, temp
);
8889 insns
= get_insns ();
8892 /* Conjugate should appear as a single unit
8893 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8894 each with a separate pseudo as destination.
8895 It's not correct for flow to treat them as a unit. */
8896 if (GET_CODE (target
) != CONCAT
)
8897 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
8904 case TRY_CATCH_EXPR
:
8906 tree handler
= TREE_OPERAND (exp
, 1);
8908 expand_eh_region_start ();
8910 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8912 expand_eh_region_end_cleanup (handler
);
8917 case TRY_FINALLY_EXPR
:
8919 tree try_block
= TREE_OPERAND (exp
, 0);
8920 tree finally_block
= TREE_OPERAND (exp
, 1);
8921 rtx finally_label
= gen_label_rtx ();
8922 rtx done_label
= gen_label_rtx ();
8923 rtx return_link
= gen_reg_rtx (Pmode
);
8924 tree cleanup
= build (GOTO_SUBROUTINE_EXPR
, void_type_node
,
8925 (tree
) finally_label
, (tree
) return_link
);
8926 TREE_SIDE_EFFECTS (cleanup
) = 1;
8928 /* Start a new binding layer that will keep track of all cleanup
8929 actions to be performed. */
8930 expand_start_bindings (2);
8932 target_temp_slot_level
= temp_slot_level
;
8934 expand_decl_cleanup (NULL_TREE
, cleanup
);
8935 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
8937 preserve_temp_slots (op0
);
8938 expand_end_bindings (NULL_TREE
, 0, 0);
8939 emit_jump (done_label
);
8940 emit_label (finally_label
);
8941 expand_expr (finally_block
, const0_rtx
, VOIDmode
, 0);
8942 emit_indirect_jump (return_link
);
8943 emit_label (done_label
);
8947 case GOTO_SUBROUTINE_EXPR
:
8949 rtx subr
= (rtx
) TREE_OPERAND (exp
, 0);
8950 rtx return_link
= *(rtx
*) &TREE_OPERAND (exp
, 1);
8951 rtx return_address
= gen_label_rtx ();
8952 emit_move_insn (return_link
,
8953 gen_rtx_LABEL_REF (Pmode
, return_address
));
8955 emit_label (return_address
);
8960 return expand_builtin_va_arg (TREE_OPERAND (exp
, 0), type
);
8963 return get_exception_pointer (cfun
);
8966 /* Function descriptors are not valid except for as
8967 initialization constants, and should not be expanded. */
8971 return (*lang_expand_expr
) (exp
, original_target
, tmode
, modifier
);
8974 /* Here to do an ordinary binary operator, generating an instruction
8975 from the optab already placed in `this_optab'. */
8977 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8979 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8980 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8982 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
8983 unsignedp
, OPTAB_LIB_WIDEN
);
8989 /* Return the tree node if a ARG corresponds to a string constant or zero
8990 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8991 in bytes within the string that ARG is accessing. The type of the
8992 offset will be `sizetype'. */
8995 string_constant (arg
, ptr_offset
)
9001 if (TREE_CODE (arg
) == ADDR_EXPR
9002 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
9004 *ptr_offset
= size_zero_node
;
9005 return TREE_OPERAND (arg
, 0);
9007 else if (TREE_CODE (arg
) == PLUS_EXPR
)
9009 tree arg0
= TREE_OPERAND (arg
, 0);
9010 tree arg1
= TREE_OPERAND (arg
, 1);
9015 if (TREE_CODE (arg0
) == ADDR_EXPR
9016 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
9018 *ptr_offset
= convert (sizetype
, arg1
);
9019 return TREE_OPERAND (arg0
, 0);
9021 else if (TREE_CODE (arg1
) == ADDR_EXPR
9022 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
9024 *ptr_offset
= convert (sizetype
, arg0
);
9025 return TREE_OPERAND (arg1
, 0);
9032 /* Expand code for a post- or pre- increment or decrement
9033 and return the RTX for the result.
9034 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9037 expand_increment (exp
, post
, ignore
)
9043 tree incremented
= TREE_OPERAND (exp
, 0);
9044 optab this_optab
= add_optab
;
9046 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
9047 int op0_is_copy
= 0;
9048 int single_insn
= 0;
9049 /* 1 means we can't store into OP0 directly,
9050 because it is a subreg narrower than a word,
9051 and we don't dare clobber the rest of the word. */
9054 /* Stabilize any component ref that might need to be
9055 evaluated more than once below. */
9057 || TREE_CODE (incremented
) == BIT_FIELD_REF
9058 || (TREE_CODE (incremented
) == COMPONENT_REF
9059 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
9060 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
9061 incremented
= stabilize_reference (incremented
);
9062 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9063 ones into save exprs so that they don't accidentally get evaluated
9064 more than once by the code below. */
9065 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
9066 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
9067 incremented
= save_expr (incremented
);
9069 /* Compute the operands as RTX.
9070 Note whether OP0 is the actual lvalue or a copy of it:
9071 I believe it is a copy iff it is a register or subreg
9072 and insns were generated in computing it. */
9074 temp
= get_last_insn ();
9075 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_RW
);
9077 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9078 in place but instead must do sign- or zero-extension during assignment,
9079 so we copy it into a new register and let the code below use it as
9082 Note that we can safely modify this SUBREG since it is know not to be
9083 shared (it was made by the expand_expr call above). */
9085 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9088 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9092 else if (GET_CODE (op0
) == SUBREG
9093 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9095 /* We cannot increment this SUBREG in place. If we are
9096 post-incrementing, get a copy of the old value. Otherwise,
9097 just mark that we cannot increment in place. */
9099 op0
= copy_to_reg (op0
);
9104 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
9105 && temp
!= get_last_insn ());
9106 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
,
9107 EXPAND_MEMORY_USE_BAD
);
9109 /* Decide whether incrementing or decrementing. */
9110 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9111 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9112 this_optab
= sub_optab
;
9114 /* Convert decrement by a constant into a negative increment. */
9115 if (this_optab
== sub_optab
9116 && GET_CODE (op1
) == CONST_INT
)
9118 op1
= GEN_INT (-INTVAL (op1
));
9119 this_optab
= add_optab
;
9122 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp
)))
9123 this_optab
= this_optab
== add_optab
? addv_optab
: subv_optab
;
9125 /* For a preincrement, see if we can do this with a single instruction. */
9128 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9129 if (icode
!= (int) CODE_FOR_nothing
9130 /* Make sure that OP0 is valid for operands 0 and 1
9131 of the insn we want to queue. */
9132 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9133 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
)
9134 && (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9138 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9139 then we cannot just increment OP0. We must therefore contrive to
9140 increment the original value. Then, for postincrement, we can return
9141 OP0 since it is a copy of the old value. For preincrement, expand here
9142 unless we can do it with a single insn.
9144 Likewise if storing directly into OP0 would clobber high bits
9145 we need to preserve (bad_subreg). */
9146 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9148 /* This is the easiest way to increment the value wherever it is.
9149 Problems with multiple evaluation of INCREMENTED are prevented
9150 because either (1) it is a component_ref or preincrement,
9151 in which case it was stabilized above, or (2) it is an array_ref
9152 with constant index in an array in a register, which is
9153 safe to reevaluate. */
9154 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9155 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9156 ? MINUS_EXPR
: PLUS_EXPR
),
9159 TREE_OPERAND (exp
, 1));
9161 while (TREE_CODE (incremented
) == NOP_EXPR
9162 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9164 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9165 incremented
= TREE_OPERAND (incremented
, 0);
9168 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
, 0);
9169 return post
? op0
: temp
;
9174 /* We have a true reference to the value in OP0.
9175 If there is an insn to add or subtract in this mode, queue it.
9176 Queueing the increment insn avoids the register shuffling
9177 that often results if we must increment now and first save
9178 the old value for subsequent use. */
9180 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9181 op0
= stabilize (op0
);
9184 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9185 if (icode
!= (int) CODE_FOR_nothing
9186 /* Make sure that OP0 is valid for operands 0 and 1
9187 of the insn we want to queue. */
9188 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9189 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
))
9191 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9192 op1
= force_reg (mode
, op1
);
9194 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9196 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9198 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
9199 ? force_reg (Pmode
, XEXP (op0
, 0))
9200 : copy_to_reg (XEXP (op0
, 0)));
9203 op0
= replace_equiv_address (op0
, addr
);
9204 temp
= force_reg (GET_MODE (op0
), op0
);
9205 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9206 op1
= force_reg (mode
, op1
);
9208 /* The increment queue is LIFO, thus we have to `queue'
9209 the instructions in reverse order. */
9210 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9211 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9216 /* Preincrement, or we can't increment with one simple insn. */
9218 /* Save a copy of the value before inc or dec, to return it later. */
9219 temp
= value
= copy_to_reg (op0
);
9221 /* Arrange to return the incremented value. */
9222 /* Copy the rtx because expand_binop will protect from the queue,
9223 and the results of that would be invalid for us to return
9224 if our caller does emit_queue before using our result. */
9225 temp
= copy_rtx (value
= op0
);
9227 /* Increment however we can. */
9228 op1
= expand_binop (mode
, this_optab
, value
, op1
,
9229 current_function_check_memory_usage
? NULL_RTX
: op0
,
9230 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9231 /* Make sure the value is stored into OP0. */
9233 emit_move_insn (op0
, op1
);
9238 /* At the start of a function, record that we have no previously-pushed
9239 arguments waiting to be popped. */
9242 init_pending_stack_adjust ()
9244 pending_stack_adjust
= 0;
9247 /* When exiting from function, if safe, clear out any pending stack adjust
9248 so the adjustment won't get done.
9250 Note, if the current function calls alloca, then it must have a
9251 frame pointer regardless of the value of flag_omit_frame_pointer. */
9254 clear_pending_stack_adjust ()
9256 #ifdef EXIT_IGNORE_STACK
9258 && (! flag_omit_frame_pointer
|| current_function_calls_alloca
)
9259 && EXIT_IGNORE_STACK
9260 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
9261 && ! flag_inline_functions
)
9263 stack_pointer_delta
-= pending_stack_adjust
,
9264 pending_stack_adjust
= 0;
9269 /* Pop any previously-pushed arguments that have not been popped yet. */
9272 do_pending_stack_adjust ()
9274 if (inhibit_defer_pop
== 0)
9276 if (pending_stack_adjust
!= 0)
9277 adjust_stack (GEN_INT (pending_stack_adjust
));
9278 pending_stack_adjust
= 0;
9282 /* Expand conditional expressions. */
9284 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9285 LABEL is an rtx of code CODE_LABEL, in this function and all the
9289 jumpifnot (exp
, label
)
9293 do_jump (exp
, label
, NULL_RTX
);
9296 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9303 do_jump (exp
, NULL_RTX
, label
);
9306 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9307 the result is zero, or IF_TRUE_LABEL if the result is one.
9308 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9309 meaning fall through in that case.
9311 do_jump always does any pending stack adjust except when it does not
9312 actually perform a jump. An example where there is no jump
9313 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9315 This function is responsible for optimizing cases such as
9316 &&, || and comparison operators in EXP. */
9319 do_jump (exp
, if_false_label
, if_true_label
)
9321 rtx if_false_label
, if_true_label
;
9323 enum tree_code code
= TREE_CODE (exp
);
9324 /* Some cases need to create a label to jump to
9325 in order to properly fall through.
9326 These cases set DROP_THROUGH_LABEL nonzero. */
9327 rtx drop_through_label
= 0;
9331 enum machine_mode mode
;
9333 #ifdef MAX_INTEGER_COMPUTATION_MODE
9334 check_max_integer_computation_mode (exp
);
9345 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
9351 /* This is not true with #pragma weak */
9353 /* The address of something can never be zero. */
9355 emit_jump (if_true_label
);
9360 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
9361 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
9362 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
9363 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_RANGE_REF
)
9366 /* If we are narrowing the operand, we have to do the compare in the
9368 if ((TYPE_PRECISION (TREE_TYPE (exp
))
9369 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9371 case NON_LVALUE_EXPR
:
9372 case REFERENCE_EXPR
:
9377 /* These cannot change zero->non-zero or vice versa. */
9378 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9381 case WITH_RECORD_EXPR
:
9382 /* Put the object on the placeholder list, recurse through our first
9383 operand, and pop the list. */
9384 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
9386 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9387 placeholder_list
= TREE_CHAIN (placeholder_list
);
9391 /* This is never less insns than evaluating the PLUS_EXPR followed by
9392 a test and can be longer if the test is eliminated. */
9394 /* Reduce to minus. */
9395 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
9396 TREE_OPERAND (exp
, 0),
9397 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
9398 TREE_OPERAND (exp
, 1))));
9399 /* Process as MINUS. */
9403 /* Non-zero iff operands of minus differ. */
9404 do_compare_and_jump (build (NE_EXPR
, TREE_TYPE (exp
),
9405 TREE_OPERAND (exp
, 0),
9406 TREE_OPERAND (exp
, 1)),
9407 NE
, NE
, if_false_label
, if_true_label
);
9411 /* If we are AND'ing with a small constant, do this comparison in the
9412 smallest type that fits. If the machine doesn't have comparisons
9413 that small, it will be converted back to the wider comparison.
9414 This helps if we are testing the sign bit of a narrower object.
9415 combine can't do this for us because it can't know whether a
9416 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9418 if (! SLOW_BYTE_ACCESS
9419 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
9420 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
9421 && (i
= tree_floor_log2 (TREE_OPERAND (exp
, 1))) >= 0
9422 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
9423 && (type
= type_for_mode (mode
, 1)) != 0
9424 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9425 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9426 != CODE_FOR_nothing
))
9428 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9433 case TRUTH_NOT_EXPR
:
9434 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9437 case TRUTH_ANDIF_EXPR
:
9438 if (if_false_label
== 0)
9439 if_false_label
= drop_through_label
= gen_label_rtx ();
9440 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
9441 start_cleanup_deferral ();
9442 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9443 end_cleanup_deferral ();
9446 case TRUTH_ORIF_EXPR
:
9447 if (if_true_label
== 0)
9448 if_true_label
= drop_through_label
= gen_label_rtx ();
9449 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
9450 start_cleanup_deferral ();
9451 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9452 end_cleanup_deferral ();
9457 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
9458 preserve_temp_slots (NULL_RTX
);
9462 do_pending_stack_adjust ();
9463 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9469 case ARRAY_RANGE_REF
:
9471 HOST_WIDE_INT bitsize
, bitpos
;
9473 enum machine_mode mode
;
9478 /* Get description of this reference. We don't actually care
9479 about the underlying object here. */
9480 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
9481 &unsignedp
, &volatilep
);
9483 type
= type_for_size (bitsize
, unsignedp
);
9484 if (! SLOW_BYTE_ACCESS
9485 && type
!= 0 && bitsize
>= 0
9486 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9487 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9488 != CODE_FOR_nothing
))
9490 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9497 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9498 if (integer_onep (TREE_OPERAND (exp
, 1))
9499 && integer_zerop (TREE_OPERAND (exp
, 2)))
9500 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9502 else if (integer_zerop (TREE_OPERAND (exp
, 1))
9503 && integer_onep (TREE_OPERAND (exp
, 2)))
9504 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9508 rtx label1
= gen_label_rtx ();
9509 drop_through_label
= gen_label_rtx ();
9511 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
9513 start_cleanup_deferral ();
9514 /* Now the THEN-expression. */
9515 do_jump (TREE_OPERAND (exp
, 1),
9516 if_false_label
? if_false_label
: drop_through_label
,
9517 if_true_label
? if_true_label
: drop_through_label
);
9518 /* In case the do_jump just above never jumps. */
9519 do_pending_stack_adjust ();
9520 emit_label (label1
);
9522 /* Now the ELSE-expression. */
9523 do_jump (TREE_OPERAND (exp
, 2),
9524 if_false_label
? if_false_label
: drop_through_label
,
9525 if_true_label
? if_true_label
: drop_through_label
);
9526 end_cleanup_deferral ();
9532 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9534 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9535 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9537 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
9538 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
9541 (build (TRUTH_ANDIF_EXPR
, TREE_TYPE (exp
),
9542 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9543 fold (build1 (REALPART_EXPR
,
9544 TREE_TYPE (inner_type
),
9546 fold (build1 (REALPART_EXPR
,
9547 TREE_TYPE (inner_type
),
9549 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9550 fold (build1 (IMAGPART_EXPR
,
9551 TREE_TYPE (inner_type
),
9553 fold (build1 (IMAGPART_EXPR
,
9554 TREE_TYPE (inner_type
),
9556 if_false_label
, if_true_label
);
9559 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
9560 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9562 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9563 && !can_compare_p (EQ
, TYPE_MODE (inner_type
), ccp_jump
))
9564 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
9566 do_compare_and_jump (exp
, EQ
, EQ
, if_false_label
, if_true_label
);
9572 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9574 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9575 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9577 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
9578 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
9581 (build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
),
9582 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9583 fold (build1 (REALPART_EXPR
,
9584 TREE_TYPE (inner_type
),
9586 fold (build1 (REALPART_EXPR
,
9587 TREE_TYPE (inner_type
),
9589 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9590 fold (build1 (IMAGPART_EXPR
,
9591 TREE_TYPE (inner_type
),
9593 fold (build1 (IMAGPART_EXPR
,
9594 TREE_TYPE (inner_type
),
9596 if_false_label
, if_true_label
);
9599 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
9600 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9602 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9603 && !can_compare_p (NE
, TYPE_MODE (inner_type
), ccp_jump
))
9604 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
9606 do_compare_and_jump (exp
, NE
, NE
, if_false_label
, if_true_label
);
9611 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9612 if (GET_MODE_CLASS (mode
) == MODE_INT
9613 && ! can_compare_p (LT
, mode
, ccp_jump
))
9614 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
9616 do_compare_and_jump (exp
, LT
, LTU
, if_false_label
, if_true_label
);
9620 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9621 if (GET_MODE_CLASS (mode
) == MODE_INT
9622 && ! can_compare_p (LE
, mode
, ccp_jump
))
9623 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
9625 do_compare_and_jump (exp
, LE
, LEU
, if_false_label
, if_true_label
);
9629 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9630 if (GET_MODE_CLASS (mode
) == MODE_INT
9631 && ! can_compare_p (GT
, mode
, ccp_jump
))
9632 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
9634 do_compare_and_jump (exp
, GT
, GTU
, if_false_label
, if_true_label
);
9638 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9639 if (GET_MODE_CLASS (mode
) == MODE_INT
9640 && ! can_compare_p (GE
, mode
, ccp_jump
))
9641 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
9643 do_compare_and_jump (exp
, GE
, GEU
, if_false_label
, if_true_label
);
9646 case UNORDERED_EXPR
:
9649 enum rtx_code cmp
, rcmp
;
9652 if (code
== UNORDERED_EXPR
)
9653 cmp
= UNORDERED
, rcmp
= ORDERED
;
9655 cmp
= ORDERED
, rcmp
= UNORDERED
;
9656 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9659 if (! can_compare_p (cmp
, mode
, ccp_jump
)
9660 && (can_compare_p (rcmp
, mode
, ccp_jump
)
9661 /* If the target doesn't provide either UNORDERED or ORDERED
9662 comparisons, canonicalize on UNORDERED for the library. */
9663 || rcmp
== UNORDERED
))
9667 do_compare_and_jump (exp
, cmp
, cmp
, if_false_label
, if_true_label
);
9669 do_compare_and_jump (exp
, rcmp
, rcmp
, if_true_label
, if_false_label
);
9674 enum rtx_code rcode1
;
9675 enum tree_code tcode2
;
9699 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9700 if (can_compare_p (rcode1
, mode
, ccp_jump
))
9701 do_compare_and_jump (exp
, rcode1
, rcode1
, if_false_label
,
9705 tree op0
= save_expr (TREE_OPERAND (exp
, 0));
9706 tree op1
= save_expr (TREE_OPERAND (exp
, 1));
9709 /* If the target doesn't support combined unordered
9710 compares, decompose into UNORDERED + comparison. */
9711 cmp0
= fold (build (UNORDERED_EXPR
, TREE_TYPE (exp
), op0
, op1
));
9712 cmp1
= fold (build (tcode2
, TREE_TYPE (exp
), op0
, op1
));
9713 exp
= build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
), cmp0
, cmp1
);
9714 do_jump (exp
, if_false_label
, if_true_label
);
9720 __builtin_expect (<test>, 0) and
9721 __builtin_expect (<test>, 1)
9723 We need to do this here, so that <test> is not converted to a SCC
9724 operation on machines that use condition code registers and COMPARE
9725 like the PowerPC, and then the jump is done based on whether the SCC
9726 operation produced a 1 or 0. */
9728 /* Check for a built-in function. */
9729 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
9731 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
9732 tree arglist
= TREE_OPERAND (exp
, 1);
9734 if (TREE_CODE (fndecl
) == FUNCTION_DECL
9735 && DECL_BUILT_IN (fndecl
)
9736 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
9737 && arglist
!= NULL_TREE
9738 && TREE_CHAIN (arglist
) != NULL_TREE
)
9740 rtx seq
= expand_builtin_expect_jump (exp
, if_false_label
,
9743 if (seq
!= NULL_RTX
)
9750 /* fall through and generate the normal code. */
9754 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
9756 /* This is not needed any more and causes poor code since it causes
9757 comparisons and tests from non-SI objects to have different code
9759 /* Copy to register to avoid generating bad insns by cse
9760 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9761 if (!cse_not_expected
&& GET_CODE (temp
) == MEM
)
9762 temp
= copy_to_reg (temp
);
9764 do_pending_stack_adjust ();
9765 /* Do any postincrements in the expression that was tested. */
9768 if (GET_CODE (temp
) == CONST_INT
9769 || (GET_CODE (temp
) == CONST_DOUBLE
&& GET_MODE (temp
) == VOIDmode
)
9770 || GET_CODE (temp
) == LABEL_REF
)
9772 rtx target
= temp
== const0_rtx
? if_false_label
: if_true_label
;
9776 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
9777 && ! can_compare_p (NE
, GET_MODE (temp
), ccp_jump
))
9778 /* Note swapping the labels gives us not-equal. */
9779 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
9780 else if (GET_MODE (temp
) != VOIDmode
)
9781 do_compare_rtx_and_jump (temp
, CONST0_RTX (GET_MODE (temp
)),
9782 NE
, TREE_UNSIGNED (TREE_TYPE (exp
)),
9783 GET_MODE (temp
), NULL_RTX
,
9784 if_false_label
, if_true_label
);
9789 if (drop_through_label
)
9791 /* If do_jump produces code that might be jumped around,
9792 do any stack adjusts from that code, before the place
9793 where control merges in. */
9794 do_pending_stack_adjust ();
9795 emit_label (drop_through_label
);
9799 /* Given a comparison expression EXP for values too wide to be compared
9800 with one insn, test the comparison and jump to the appropriate label.
9801 The code of EXP is ignored; we always test GT if SWAP is 0,
9802 and LT if SWAP is 1. */
9805 do_jump_by_parts_greater (exp
, swap
, if_false_label
, if_true_label
)
9808 rtx if_false_label
, if_true_label
;
9810 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
9811 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
9812 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9813 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9815 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
);
9818 /* Compare OP0 with OP1, word at a time, in mode MODE.
9819 UNSIGNEDP says to do unsigned comparison.
9820 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9823 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
)
9824 enum machine_mode mode
;
9827 rtx if_false_label
, if_true_label
;
9829 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
9830 rtx drop_through_label
= 0;
9833 if (! if_true_label
|| ! if_false_label
)
9834 drop_through_label
= gen_label_rtx ();
9835 if (! if_true_label
)
9836 if_true_label
= drop_through_label
;
9837 if (! if_false_label
)
9838 if_false_label
= drop_through_label
;
9840 /* Compare a word at a time, high order first. */
9841 for (i
= 0; i
< nwords
; i
++)
9843 rtx op0_word
, op1_word
;
9845 if (WORDS_BIG_ENDIAN
)
9847 op0_word
= operand_subword_force (op0
, i
, mode
);
9848 op1_word
= operand_subword_force (op1
, i
, mode
);
9852 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
9853 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
9856 /* All but high-order word must be compared as unsigned. */
9857 do_compare_rtx_and_jump (op0_word
, op1_word
, GT
,
9858 (unsignedp
|| i
> 0), word_mode
, NULL_RTX
,
9859 NULL_RTX
, if_true_label
);
9861 /* Consider lower words only if these are equal. */
9862 do_compare_rtx_and_jump (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
9863 NULL_RTX
, NULL_RTX
, if_false_label
);
9867 emit_jump (if_false_label
);
9868 if (drop_through_label
)
9869 emit_label (drop_through_label
);
9872 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9873 with one insn, test the comparison and jump to the appropriate label. */
9876 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
)
9878 rtx if_false_label
, if_true_label
;
9880 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
9881 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9882 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9883 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
9885 rtx drop_through_label
= 0;
9887 if (! if_false_label
)
9888 drop_through_label
= if_false_label
= gen_label_rtx ();
9890 for (i
= 0; i
< nwords
; i
++)
9891 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, mode
),
9892 operand_subword_force (op1
, i
, mode
),
9893 EQ
, TREE_UNSIGNED (TREE_TYPE (exp
)),
9894 word_mode
, NULL_RTX
, if_false_label
, NULL_RTX
);
9897 emit_jump (if_true_label
);
9898 if (drop_through_label
)
9899 emit_label (drop_through_label
);
9902 /* Jump according to whether OP0 is 0.
9903 We assume that OP0 has an integer mode that is too wide
9904 for the available compare insns. */
9907 do_jump_by_parts_equality_rtx (op0
, if_false_label
, if_true_label
)
9909 rtx if_false_label
, if_true_label
;
9911 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
9914 rtx drop_through_label
= 0;
9916 /* The fastest way of doing this comparison on almost any machine is to
9917 "or" all the words and compare the result. If all have to be loaded
9918 from memory and this is a very wide item, it's possible this may
9919 be slower, but that's highly unlikely. */
9921 part
= gen_reg_rtx (word_mode
);
9922 emit_move_insn (part
, operand_subword_force (op0
, 0, GET_MODE (op0
)));
9923 for (i
= 1; i
< nwords
&& part
!= 0; i
++)
9924 part
= expand_binop (word_mode
, ior_optab
, part
,
9925 operand_subword_force (op0
, i
, GET_MODE (op0
)),
9926 part
, 1, OPTAB_WIDEN
);
9930 do_compare_rtx_and_jump (part
, const0_rtx
, EQ
, 1, word_mode
,
9931 NULL_RTX
, if_false_label
, if_true_label
);
9936 /* If we couldn't do the "or" simply, do this with a series of compares. */
9937 if (! if_false_label
)
9938 drop_through_label
= if_false_label
= gen_label_rtx ();
9940 for (i
= 0; i
< nwords
; i
++)
9941 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, GET_MODE (op0
)),
9942 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
,
9943 if_false_label
, NULL_RTX
);
9946 emit_jump (if_true_label
);
9948 if (drop_through_label
)
9949 emit_label (drop_through_label
);
9952 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9953 (including code to compute the values to be compared)
9954 and set (CC0) according to the result.
9955 The decision as to signed or unsigned comparison must be made by the caller.
9957 We force a stack adjustment unless there are currently
9958 things pushed on the stack that aren't yet used.
9960 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9964 compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
, size
)
9968 enum machine_mode mode
;
9973 /* If one operand is constant, make it the second one. Only do this
9974 if the other operand is not constant as well. */
9976 if (swap_commutative_operands_p (op0
, op1
))
9981 code
= swap_condition (code
);
9986 op0
= force_not_mem (op0
);
9987 op1
= force_not_mem (op1
);
9990 do_pending_stack_adjust ();
9992 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
9993 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
9997 /* There's no need to do this now that combine.c can eliminate lots of
9998 sign extensions. This can be less efficient in certain cases on other
10001 /* If this is a signed equality comparison, we can do it as an
10002 unsigned comparison since zero-extension is cheaper than sign
10003 extension and comparisons with zero are done as unsigned. This is
10004 the case even on machines that can do fast sign extension, since
10005 zero-extension is easier to combine with other operations than
10006 sign-extension is. If we are comparing against a constant, we must
10007 convert it to what it would look like unsigned. */
10008 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10009 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10011 if (GET_CODE (op1
) == CONST_INT
10012 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10013 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10018 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
);
10020 return gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
10023 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10024 The decision as to signed or unsigned comparison must be made by the caller.
10026 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10030 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
, size
,
10031 if_false_label
, if_true_label
)
10033 enum rtx_code code
;
10035 enum machine_mode mode
;
10037 rtx if_false_label
, if_true_label
;
10040 int dummy_true_label
= 0;
10042 /* Reverse the comparison if that is safe and we want to jump if it is
10044 if (! if_true_label
&& ! FLOAT_MODE_P (mode
))
10046 if_true_label
= if_false_label
;
10047 if_false_label
= 0;
10048 code
= reverse_condition (code
);
10051 /* If one operand is constant, make it the second one. Only do this
10052 if the other operand is not constant as well. */
10054 if (swap_commutative_operands_p (op0
, op1
))
10059 code
= swap_condition (code
);
10062 if (flag_force_mem
)
10064 op0
= force_not_mem (op0
);
10065 op1
= force_not_mem (op1
);
10068 do_pending_stack_adjust ();
10070 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
10071 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
10073 if (tem
== const_true_rtx
)
10076 emit_jump (if_true_label
);
10080 if (if_false_label
)
10081 emit_jump (if_false_label
);
10087 /* There's no need to do this now that combine.c can eliminate lots of
10088 sign extensions. This can be less efficient in certain cases on other
10091 /* If this is a signed equality comparison, we can do it as an
10092 unsigned comparison since zero-extension is cheaper than sign
10093 extension and comparisons with zero are done as unsigned. This is
10094 the case even on machines that can do fast sign extension, since
10095 zero-extension is easier to combine with other operations than
10096 sign-extension is. If we are comparing against a constant, we must
10097 convert it to what it would look like unsigned. */
10098 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10099 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10101 if (GET_CODE (op1
) == CONST_INT
10102 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10103 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10108 if (! if_true_label
)
10110 dummy_true_label
= 1;
10111 if_true_label
= gen_label_rtx ();
10114 emit_cmp_and_jump_insns (op0
, op1
, code
, size
, mode
, unsignedp
,
10117 if (if_false_label
)
10118 emit_jump (if_false_label
);
10119 if (dummy_true_label
)
10120 emit_label (if_true_label
);
10123 /* Generate code for a comparison expression EXP (including code to compute
10124 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10125 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10126 generated code will drop through.
10127 SIGNED_CODE should be the rtx operation for this comparison for
10128 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10130 We force a stack adjustment unless there are currently
10131 things pushed on the stack that aren't yet used. */
10134 do_compare_and_jump (exp
, signed_code
, unsigned_code
, if_false_label
,
10137 enum rtx_code signed_code
, unsigned_code
;
10138 rtx if_false_label
, if_true_label
;
10142 enum machine_mode mode
;
10144 enum rtx_code code
;
10146 /* Don't crash if the comparison was erroneous. */
10147 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
10148 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
10151 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
10152 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == ERROR_MARK
)
10155 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10156 mode
= TYPE_MODE (type
);
10157 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
10158 && (TREE_CODE (TREE_OPERAND (exp
, 1)) != INTEGER_CST
10159 || (GET_MODE_BITSIZE (mode
)
10160 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
,
10163 /* op0 might have been replaced by promoted constant, in which
10164 case the type of second argument should be used. */
10165 type
= TREE_TYPE (TREE_OPERAND (exp
, 1));
10166 mode
= TYPE_MODE (type
);
10168 unsignedp
= TREE_UNSIGNED (type
);
10169 code
= unsignedp
? unsigned_code
: signed_code
;
10171 #ifdef HAVE_canonicalize_funcptr_for_compare
10172 /* If function pointers need to be "canonicalized" before they can
10173 be reliably compared, then canonicalize them. */
10174 if (HAVE_canonicalize_funcptr_for_compare
10175 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10176 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10179 rtx new_op0
= gen_reg_rtx (mode
);
10181 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
10185 if (HAVE_canonicalize_funcptr_for_compare
10186 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10187 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10190 rtx new_op1
= gen_reg_rtx (mode
);
10192 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
10197 /* Do any postincrements in the expression that was tested. */
10200 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
,
10202 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
10203 if_false_label
, if_true_label
);
10206 /* Generate code to calculate EXP using a store-flag instruction
10207 and return an rtx for the result. EXP is either a comparison
10208 or a TRUTH_NOT_EXPR whose operand is a comparison.
10210 If TARGET is nonzero, store the result there if convenient.
10212 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10215 Return zero if there is no suitable set-flag instruction
10216 available on this machine.
10218 Once expand_expr has been called on the arguments of the comparison,
10219 we are committed to doing the store flag, since it is not safe to
10220 re-evaluate the expression. We emit the store-flag insn by calling
10221 emit_store_flag, but only expand the arguments if we have a reason
10222 to believe that emit_store_flag will be successful. If we think that
10223 it will, but it isn't, we have to simulate the store-flag with a
10224 set/jump/set sequence. */
10227 do_store_flag (exp
, target
, mode
, only_cheap
)
10230 enum machine_mode mode
;
10233 enum rtx_code code
;
10234 tree arg0
, arg1
, type
;
10236 enum machine_mode operand_mode
;
10240 enum insn_code icode
;
10241 rtx subtarget
= target
;
10244 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10245 result at the end. We can't simply invert the test since it would
10246 have already been inverted if it were valid. This case occurs for
10247 some floating-point comparisons. */
10249 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
10250 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
10252 arg0
= TREE_OPERAND (exp
, 0);
10253 arg1
= TREE_OPERAND (exp
, 1);
10255 /* Don't crash if the comparison was erroneous. */
10256 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
10259 type
= TREE_TYPE (arg0
);
10260 operand_mode
= TYPE_MODE (type
);
10261 unsignedp
= TREE_UNSIGNED (type
);
10263 /* We won't bother with BLKmode store-flag operations because it would mean
10264 passing a lot of information to emit_store_flag. */
10265 if (operand_mode
== BLKmode
)
10268 /* We won't bother with store-flag operations involving function pointers
10269 when function pointers must be canonicalized before comparisons. */
10270 #ifdef HAVE_canonicalize_funcptr_for_compare
10271 if (HAVE_canonicalize_funcptr_for_compare
10272 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10273 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10275 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10276 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10277 == FUNCTION_TYPE
))))
10284 /* Get the rtx comparison code to use. We know that EXP is a comparison
10285 operation of some type. Some comparisons against 1 and -1 can be
10286 converted to comparisons with zero. Do so here so that the tests
10287 below will be aware that we have a comparison with zero. These
10288 tests will not catch constants in the first operand, but constants
10289 are rarely passed as the first operand. */
10291 switch (TREE_CODE (exp
))
10300 if (integer_onep (arg1
))
10301 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10303 code
= unsignedp
? LTU
: LT
;
10306 if (! unsignedp
&& integer_all_onesp (arg1
))
10307 arg1
= integer_zero_node
, code
= LT
;
10309 code
= unsignedp
? LEU
: LE
;
10312 if (! unsignedp
&& integer_all_onesp (arg1
))
10313 arg1
= integer_zero_node
, code
= GE
;
10315 code
= unsignedp
? GTU
: GT
;
10318 if (integer_onep (arg1
))
10319 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
10321 code
= unsignedp
? GEU
: GE
;
10324 case UNORDERED_EXPR
:
10350 /* Put a constant second. */
10351 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
10353 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
10354 code
= swap_condition (code
);
10357 /* If this is an equality or inequality test of a single bit, we can
10358 do this by shifting the bit being tested to the low-order bit and
10359 masking the result with the constant 1. If the condition was EQ,
10360 we xor it with 1. This does not require an scc insn and is faster
10361 than an scc insn even if we have it. */
10363 if ((code
== NE
|| code
== EQ
)
10364 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
10365 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10367 tree inner
= TREE_OPERAND (arg0
, 0);
10368 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
10371 /* If INNER is a right shift of a constant and it plus BITNUM does
10372 not overflow, adjust BITNUM and INNER. */
10374 if (TREE_CODE (inner
) == RSHIFT_EXPR
10375 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
10376 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
10377 && bitnum
< TYPE_PRECISION (type
)
10378 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
10379 bitnum
- TYPE_PRECISION (type
)))
10381 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
10382 inner
= TREE_OPERAND (inner
, 0);
10385 /* If we are going to be able to omit the AND below, we must do our
10386 operations as unsigned. If we must use the AND, we have a choice.
10387 Normally unsigned is faster, but for some machines signed is. */
10388 ops_unsignedp
= (bitnum
== TYPE_PRECISION (type
) - 1 ? 1
10389 #ifdef LOAD_EXTEND_OP
10390 : (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1)
10396 if (! get_subtarget (subtarget
)
10397 || GET_MODE (subtarget
) != operand_mode
10398 || ! safe_from_p (subtarget
, inner
, 1))
10401 op0
= expand_expr (inner
, subtarget
, VOIDmode
, 0);
10404 op0
= expand_shift (RSHIFT_EXPR
, operand_mode
, op0
,
10405 size_int (bitnum
), subtarget
, ops_unsignedp
);
10407 if (GET_MODE (op0
) != mode
)
10408 op0
= convert_to_mode (mode
, op0
, ops_unsignedp
);
10410 if ((code
== EQ
&& ! invert
) || (code
== NE
&& invert
))
10411 op0
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
, subtarget
,
10412 ops_unsignedp
, OPTAB_LIB_WIDEN
);
10414 /* Put the AND last so it can combine with more things. */
10415 if (bitnum
!= TYPE_PRECISION (type
) - 1)
10416 op0
= expand_and (op0
, const1_rtx
, subtarget
);
10421 /* Now see if we are likely to be able to do this. Return if not. */
10422 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
10425 icode
= setcc_gen_code
[(int) code
];
10426 if (icode
== CODE_FOR_nothing
10427 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
10429 /* We can only do this if it is one of the special cases that
10430 can be handled without an scc insn. */
10431 if ((code
== LT
&& integer_zerop (arg1
))
10432 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
10434 else if (BRANCH_COST
>= 0
10435 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
10436 && TREE_CODE (type
) != REAL_TYPE
10437 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
10438 != CODE_FOR_nothing
)
10439 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
10440 != CODE_FOR_nothing
)))
10446 if (! get_subtarget (target
)
10447 || GET_MODE (subtarget
) != operand_mode
10448 || ! safe_from_p (subtarget
, arg1
, 1))
10451 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
10452 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
10455 target
= gen_reg_rtx (mode
);
10457 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10458 because, if the emit_store_flag does anything it will succeed and
10459 OP0 and OP1 will not be used subsequently. */
10461 result
= emit_store_flag (target
, code
,
10462 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
10463 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
10464 operand_mode
, unsignedp
, 1);
10469 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
10470 result
, 0, OPTAB_LIB_WIDEN
);
10474 /* If this failed, we have to do this with set/compare/jump/set code. */
10475 if (GET_CODE (target
) != REG
10476 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
10477 target
= gen_reg_rtx (GET_MODE (target
));
10479 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
10480 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
10481 operand_mode
, NULL_RTX
);
10482 if (GET_CODE (result
) == CONST_INT
)
10483 return (((result
== const0_rtx
&& ! invert
)
10484 || (result
!= const0_rtx
&& invert
))
10485 ? const0_rtx
: const1_rtx
);
10487 label
= gen_label_rtx ();
10488 if (bcc_gen_fctn
[(int) code
] == 0)
10491 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
10492 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
10493 emit_label (label
);
10499 /* Stubs in case we haven't got a casesi insn. */
10500 #ifndef HAVE_casesi
10501 # define HAVE_casesi 0
10502 # define gen_casesi(a, b, c, d, e) (0)
10503 # define CODE_FOR_casesi CODE_FOR_nothing
10506 /* If the machine does not have a case insn that compares the bounds,
10507 this means extra overhead for dispatch tables, which raises the
10508 threshold for using them. */
10509 #ifndef CASE_VALUES_THRESHOLD
10510 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10511 #endif /* CASE_VALUES_THRESHOLD */
10514 case_values_threshold ()
10516 return CASE_VALUES_THRESHOLD
;
10519 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10520 0 otherwise (i.e. if there is no casesi instruction). */
10522 try_casesi (index_type
, index_expr
, minval
, range
,
10523 table_label
, default_label
)
10524 tree index_type
, index_expr
, minval
, range
;
10525 rtx table_label ATTRIBUTE_UNUSED
;
10528 enum machine_mode index_mode
= SImode
;
10529 int index_bits
= GET_MODE_BITSIZE (index_mode
);
10530 rtx op1
, op2
, index
;
10531 enum machine_mode op_mode
;
10536 /* Convert the index to SImode. */
10537 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
10539 enum machine_mode omode
= TYPE_MODE (index_type
);
10540 rtx rangertx
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
10542 /* We must handle the endpoints in the original mode. */
10543 index_expr
= build (MINUS_EXPR
, index_type
,
10544 index_expr
, minval
);
10545 minval
= integer_zero_node
;
10546 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10547 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
10548 omode
, 1, default_label
);
10549 /* Now we can safely truncate. */
10550 index
= convert_to_mode (index_mode
, index
, 0);
10554 if (TYPE_MODE (index_type
) != index_mode
)
10556 index_expr
= convert (type_for_size (index_bits
, 0),
10558 index_type
= TREE_TYPE (index_expr
);
10561 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10564 index
= protect_from_queue (index
, 0);
10565 do_pending_stack_adjust ();
10567 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
10568 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
10570 index
= copy_to_mode_reg (op_mode
, index
);
10572 op1
= expand_expr (minval
, NULL_RTX
, VOIDmode
, 0);
10574 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
10575 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
10576 op1
, TREE_UNSIGNED (TREE_TYPE (minval
)));
10577 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
10579 op1
= copy_to_mode_reg (op_mode
, op1
);
10581 op2
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
10583 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
10584 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
10585 op2
, TREE_UNSIGNED (TREE_TYPE (range
)));
10586 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
10588 op2
= copy_to_mode_reg (op_mode
, op2
);
10590 emit_jump_insn (gen_casesi (index
, op1
, op2
,
10591 table_label
, default_label
));
10595 /* Attempt to generate a tablejump instruction; same concept. */
10596 #ifndef HAVE_tablejump
10597 #define HAVE_tablejump 0
10598 #define gen_tablejump(x, y) (0)
10601 /* Subroutine of the next function.
10603 INDEX is the value being switched on, with the lowest value
10604 in the table already subtracted.
10605 MODE is its expected mode (needed if INDEX is constant).
10606 RANGE is the length of the jump table.
10607 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10609 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10610 index value is out of range. */
10613 do_tablejump (index
, mode
, range
, table_label
, default_label
)
10614 rtx index
, range
, table_label
, default_label
;
10615 enum machine_mode mode
;
10619 /* Do an unsigned comparison (in the proper mode) between the index
10620 expression and the value which represents the length of the range.
10621 Since we just finished subtracting the lower bound of the range
10622 from the index expression, this comparison allows us to simultaneously
10623 check that the original index expression value is both greater than
10624 or equal to the minimum value of the range and less than or equal to
10625 the maximum value of the range. */
10627 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
10630 /* If index is in range, it must fit in Pmode.
10631 Convert to Pmode so we can index with it. */
10633 index
= convert_to_mode (Pmode
, index
, 1);
10635 /* Don't let a MEM slip thru, because then INDEX that comes
10636 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10637 and break_out_memory_refs will go to work on it and mess it up. */
10638 #ifdef PIC_CASE_VECTOR_ADDRESS
10639 if (flag_pic
&& GET_CODE (index
) != REG
)
10640 index
= copy_to_mode_reg (Pmode
, index
);
10643 /* If flag_force_addr were to affect this address
10644 it could interfere with the tricky assumptions made
10645 about addresses that contain label-refs,
10646 which may be valid only very near the tablejump itself. */
10647 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10648 GET_MODE_SIZE, because this indicates how large insns are. The other
10649 uses should all be Pmode, because they are addresses. This code
10650 could fail if addresses and insns are not the same size. */
10651 index
= gen_rtx_PLUS (Pmode
,
10652 gen_rtx_MULT (Pmode
, index
,
10653 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
10654 gen_rtx_LABEL_REF (Pmode
, table_label
));
10655 #ifdef PIC_CASE_VECTOR_ADDRESS
10657 index
= PIC_CASE_VECTOR_ADDRESS (index
);
10660 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
10661 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
10662 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
10663 RTX_UNCHANGING_P (vector
) = 1;
10664 convert_move (temp
, vector
, 0);
10666 emit_jump_insn (gen_tablejump (temp
, table_label
));
10668 /* If we are generating PIC code or if the table is PC-relative, the
10669 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10670 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
10675 try_tablejump (index_type
, index_expr
, minval
, range
,
10676 table_label
, default_label
)
10677 tree index_type
, index_expr
, minval
, range
;
10678 rtx table_label
, default_label
;
10682 if (! HAVE_tablejump
)
10685 index_expr
= fold (build (MINUS_EXPR
, index_type
,
10686 convert (index_type
, index_expr
),
10687 convert (index_type
, minval
)));
10688 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10690 index
= protect_from_queue (index
, 0);
10691 do_pending_stack_adjust ();
10693 do_tablejump (index
, TYPE_MODE (index_type
),
10694 convert_modes (TYPE_MODE (index_type
),
10695 TYPE_MODE (TREE_TYPE (range
)),
10696 expand_expr (range
, NULL_RTX
,
10698 TREE_UNSIGNED (TREE_TYPE (range
))),
10699 table_label
, default_label
);