1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
30 #include "hard-reg-set.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
41 #include "typeclass.h"
48 #ifndef ACCUMULATE_OUTGOING_ARGS
49 #define ACCUMULATE_OUTGOING_ARGS 0
52 /* Supply a default definition for PUSH_ARGS. */
55 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
61 /* Decide whether a function's arguments should be processed
62 from first to last or from last to first.
64 They should if the stack and args grow in opposite directions, but
65 only if we have push insns. */
69 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
70 #define PUSH_ARGS_REVERSED /* If it's last to first. */
75 #ifndef STACK_PUSH_CODE
76 #ifdef STACK_GROWS_DOWNWARD
77 #define STACK_PUSH_CODE PRE_DEC
79 #define STACK_PUSH_CODE PRE_INC
83 /* Assume that case vectors are not pc-relative. */
84 #ifndef CASE_VECTOR_PC_RELATIVE
85 #define CASE_VECTOR_PC_RELATIVE 0
88 /* Hook called by safe_from_p for language-specific tree codes. It is
89 up to the language front-end to install a hook if it has any such
90 codes that safe_from_p needs to know about. Since same_from_p will
91 recursively explore the TREE_OPERANDs of an expression, this hook
92 should not reexamine those pieces. This routine may recursively
93 call safe_from_p; it should always pass `0' as the TOP_P
95 int (*lang_safe_from_p
) PARAMS ((rtx
, tree
));
97 /* If this is nonzero, we do not bother generating VOLATILE
98 around volatile memory references, and we are willing to
99 output indirect addresses. If cse is to follow, we reject
100 indirect addresses so a useful potential cse is generated;
101 if it is used only once, instruction combination will produce
102 the same indirect address eventually. */
103 int cse_not_expected
;
105 /* Nonzero to generate code for all the subroutines within an
106 expression before generating the upper levels of the expression.
107 Nowadays this is never zero. */
108 int do_preexpand_calls
= 1;
110 /* Don't check memory usage, since code is being emitted to check a memory
111 usage. Used when current_function_check_memory_usage is true, to avoid
112 infinite recursion. */
113 static int in_check_memory_usage
;
115 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
116 static tree placeholder_list
= 0;
118 /* This structure is used by move_by_pieces to describe the move to
120 struct move_by_pieces
129 int explicit_inc_from
;
130 unsigned HOST_WIDE_INT len
;
131 HOST_WIDE_INT offset
;
135 /* This structure is used by clear_by_pieces to describe the clear to
138 struct clear_by_pieces
144 unsigned HOST_WIDE_INT len
;
145 HOST_WIDE_INT offset
;
149 extern struct obstack permanent_obstack
;
151 static rtx get_push_address
PARAMS ((int));
153 static rtx enqueue_insn
PARAMS ((rtx
, rtx
));
154 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
155 PARAMS ((unsigned HOST_WIDE_INT
,
157 static void move_by_pieces_1
PARAMS ((rtx (*) (rtx
, ...), enum machine_mode
,
158 struct move_by_pieces
*));
159 static void clear_by_pieces
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
161 static void clear_by_pieces_1
PARAMS ((rtx (*) (rtx
, ...),
163 struct clear_by_pieces
*));
164 static rtx get_subtarget
PARAMS ((rtx
));
165 static int is_zeros_p
PARAMS ((tree
));
166 static int mostly_zeros_p
PARAMS ((tree
));
167 static void store_constructor_field
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
168 HOST_WIDE_INT
, enum machine_mode
,
169 tree
, tree
, unsigned int, int,
171 static void store_constructor
PARAMS ((tree
, rtx
, unsigned int, int,
173 static rtx store_field
PARAMS ((rtx
, HOST_WIDE_INT
,
174 HOST_WIDE_INT
, enum machine_mode
,
175 tree
, enum machine_mode
, int,
176 unsigned int, HOST_WIDE_INT
, int));
177 static enum memory_use_mode
178 get_memory_usage_from_modifier
PARAMS ((enum expand_modifier
));
179 static tree save_noncopied_parts
PARAMS ((tree
, tree
));
180 static tree init_noncopied_parts
PARAMS ((tree
, tree
));
181 static int fixed_type_p
PARAMS ((tree
));
182 static rtx var_rtx
PARAMS ((tree
));
183 static int readonly_fields_p
PARAMS ((tree
));
184 static rtx expand_expr_unaligned
PARAMS ((tree
, unsigned int *));
185 static rtx expand_increment
PARAMS ((tree
, int, int));
186 static void preexpand_calls
PARAMS ((tree
));
187 static void do_jump_by_parts_greater
PARAMS ((tree
, int, rtx
, rtx
));
188 static void do_jump_by_parts_equality
PARAMS ((tree
, rtx
, rtx
));
189 static void do_compare_and_jump
PARAMS ((tree
, enum rtx_code
, enum rtx_code
,
191 static rtx do_store_flag
PARAMS ((tree
, rtx
, enum machine_mode
, int));
193 /* Record for each mode whether we can move a register directly to or
194 from an object of that mode in memory. If we can't, we won't try
195 to use that mode directly when accessing a field of that mode. */
197 static char direct_load
[NUM_MACHINE_MODES
];
198 static char direct_store
[NUM_MACHINE_MODES
];
200 /* If a memory-to-memory move would take MOVE_RATIO or more simple
201 move-instruction sequences, we will do a movstr or libcall instead. */
204 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
207 /* If we are optimizing for space (-Os), cut down the default move ratio. */
208 #define MOVE_RATIO (optimize_size ? 3 : 15)
212 /* This macro is used to determine whether move_by_pieces should be called
213 to perform a structure copy. */
214 #ifndef MOVE_BY_PIECES_P
215 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
216 (move_by_pieces_ninsns (SIZE, ALIGN) < MOVE_RATIO)
219 /* This array records the insn_code of insns to perform block moves. */
220 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
222 /* This array records the insn_code of insns to perform block clears. */
223 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
225 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
227 #ifndef SLOW_UNALIGNED_ACCESS
228 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
231 /* This is run once per compilation to set up which modes can be used
232 directly in memory and to initialize the block move optab. */
238 enum machine_mode mode
;
244 /* Try indexing by frame ptr and try by stack ptr.
245 It is known that on the Convex the stack ptr isn't a valid index.
246 With luck, one or the other is valid on any machine. */
247 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
248 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
250 insn
= emit_insn (gen_rtx_SET (0, NULL_RTX
, NULL_RTX
));
251 pat
= PATTERN (insn
);
253 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
254 mode
= (enum machine_mode
) ((int) mode
+ 1))
259 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
260 PUT_MODE (mem
, mode
);
261 PUT_MODE (mem1
, mode
);
263 /* See if there is some register that can be used in this mode and
264 directly loaded or stored from memory. */
266 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
267 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
268 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
271 if (! HARD_REGNO_MODE_OK (regno
, mode
))
274 reg
= gen_rtx_REG (mode
, regno
);
277 SET_DEST (pat
) = reg
;
278 if (recog (pat
, insn
, &num_clobbers
) >= 0)
279 direct_load
[(int) mode
] = 1;
281 SET_SRC (pat
) = mem1
;
282 SET_DEST (pat
) = reg
;
283 if (recog (pat
, insn
, &num_clobbers
) >= 0)
284 direct_load
[(int) mode
] = 1;
287 SET_DEST (pat
) = mem
;
288 if (recog (pat
, insn
, &num_clobbers
) >= 0)
289 direct_store
[(int) mode
] = 1;
292 SET_DEST (pat
) = mem1
;
293 if (recog (pat
, insn
, &num_clobbers
) >= 0)
294 direct_store
[(int) mode
] = 1;
301 /* This is run at the start of compiling a function. */
306 cfun
->expr
= (struct expr_status
*) xmalloc (sizeof (struct expr_status
));
309 pending_stack_adjust
= 0;
310 stack_pointer_delta
= 0;
311 inhibit_defer_pop
= 0;
313 apply_args_value
= 0;
319 struct expr_status
*p
;
324 ggc_mark_rtx (p
->x_saveregs_value
);
325 ggc_mark_rtx (p
->x_apply_args_value
);
326 ggc_mark_rtx (p
->x_forced_labels
);
337 /* Small sanity check that the queue is empty at the end of a function. */
340 finish_expr_for_function ()
346 /* Manage the queue of increment instructions to be output
347 for POSTINCREMENT_EXPR expressions, etc. */
349 /* Queue up to increment (or change) VAR later. BODY says how:
350 BODY should be the same thing you would pass to emit_insn
351 to increment right away. It will go to emit_insn later on.
353 The value is a QUEUED expression to be used in place of VAR
354 where you want to guarantee the pre-incrementation value of VAR. */
357 enqueue_insn (var
, body
)
360 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
), var
, NULL_RTX
, NULL_RTX
,
361 body
, pending_chain
);
362 return pending_chain
;
365 /* Use protect_from_queue to convert a QUEUED expression
366 into something that you can put immediately into an instruction.
367 If the queued incrementation has not happened yet,
368 protect_from_queue returns the variable itself.
369 If the incrementation has happened, protect_from_queue returns a temp
370 that contains a copy of the old value of the variable.
372 Any time an rtx which might possibly be a QUEUED is to be put
373 into an instruction, it must be passed through protect_from_queue first.
374 QUEUED expressions are not meaningful in instructions.
376 Do not pass a value through protect_from_queue and then hold
377 on to it for a while before putting it in an instruction!
378 If the queue is flushed in between, incorrect code will result. */
381 protect_from_queue (x
, modify
)
385 register RTX_CODE code
= GET_CODE (x
);
387 #if 0 /* A QUEUED can hang around after the queue is forced out. */
388 /* Shortcut for most common case. */
389 if (pending_chain
== 0)
395 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
396 use of autoincrement. Make a copy of the contents of the memory
397 location rather than a copy of the address, but not if the value is
398 of mode BLKmode. Don't modify X in place since it might be
400 if (code
== MEM
&& GET_MODE (x
) != BLKmode
401 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
403 register rtx y
= XEXP (x
, 0);
404 register rtx
new = gen_rtx_MEM (GET_MODE (x
), QUEUED_VAR (y
));
406 MEM_COPY_ATTRIBUTES (new, x
);
410 register rtx temp
= gen_reg_rtx (GET_MODE (new));
411 emit_insn_before (gen_move_insn (temp
, new),
417 /* Otherwise, recursively protect the subexpressions of all
418 the kinds of rtx's that can contain a QUEUED. */
421 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
422 if (tem
!= XEXP (x
, 0))
428 else if (code
== PLUS
|| code
== MULT
)
430 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
431 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
432 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
441 /* If the increment has not happened, use the variable itself. */
442 if (QUEUED_INSN (x
) == 0)
443 return QUEUED_VAR (x
);
444 /* If the increment has happened and a pre-increment copy exists,
446 if (QUEUED_COPY (x
) != 0)
447 return QUEUED_COPY (x
);
448 /* The increment has happened but we haven't set up a pre-increment copy.
449 Set one up now, and use it. */
450 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
451 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
453 return QUEUED_COPY (x
);
456 /* Return nonzero if X contains a QUEUED expression:
457 if it contains anything that will be altered by a queued increment.
458 We handle only combinations of MEM, PLUS, MINUS and MULT operators
459 since memory addresses generally contain only those. */
465 register enum rtx_code code
= GET_CODE (x
);
471 return queued_subexp_p (XEXP (x
, 0));
475 return (queued_subexp_p (XEXP (x
, 0))
476 || queued_subexp_p (XEXP (x
, 1)));
482 /* Perform all the pending incrementations. */
488 while ((p
= pending_chain
))
490 rtx body
= QUEUED_BODY (p
);
492 if (GET_CODE (body
) == SEQUENCE
)
494 QUEUED_INSN (p
) = XVECEXP (QUEUED_BODY (p
), 0, 0);
495 emit_insn (QUEUED_BODY (p
));
498 QUEUED_INSN (p
) = emit_insn (QUEUED_BODY (p
));
499 pending_chain
= QUEUED_NEXT (p
);
503 /* Copy data from FROM to TO, where the machine modes are not the same.
504 Both modes may be integer, or both may be floating.
505 UNSIGNEDP should be nonzero if FROM is an unsigned type.
506 This causes zero-extension instead of sign-extension. */
509 convert_move (to
, from
, unsignedp
)
510 register rtx to
, from
;
513 enum machine_mode to_mode
= GET_MODE (to
);
514 enum machine_mode from_mode
= GET_MODE (from
);
515 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
516 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
520 /* rtx code for making an equivalent value. */
521 enum rtx_code equiv_code
= (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
);
523 to
= protect_from_queue (to
, 1);
524 from
= protect_from_queue (from
, 0);
526 if (to_real
!= from_real
)
529 /* If FROM is a SUBREG that indicates that we have already done at least
530 the required extension, strip it. We don't handle such SUBREGs as
533 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
534 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
535 >= GET_MODE_SIZE (to_mode
))
536 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
537 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
539 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
542 if (to_mode
== from_mode
543 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
545 emit_move_insn (to
, from
);
549 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
551 if (GET_MODE_BITSIZE (from_mode
) != GET_MODE_BITSIZE (to_mode
))
554 if (VECTOR_MODE_P (to_mode
))
555 from
= gen_rtx_SUBREG (to_mode
, from
, 0);
557 to
= gen_rtx_SUBREG (from_mode
, to
, 0);
559 emit_move_insn (to
, from
);
563 if (to_real
!= from_real
)
570 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
))
572 /* Try converting directly if the insn is supported. */
573 if ((code
= can_extend_p (to_mode
, from_mode
, 0))
576 emit_unop_insn (code
, to
, from
, UNKNOWN
);
581 #ifdef HAVE_trunchfqf2
582 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
584 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
588 #ifdef HAVE_trunctqfqf2
589 if (HAVE_trunctqfqf2
&& from_mode
== TQFmode
&& to_mode
== QFmode
)
591 emit_unop_insn (CODE_FOR_trunctqfqf2
, to
, from
, UNKNOWN
);
595 #ifdef HAVE_truncsfqf2
596 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
598 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
602 #ifdef HAVE_truncdfqf2
603 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
605 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
609 #ifdef HAVE_truncxfqf2
610 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
612 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
616 #ifdef HAVE_trunctfqf2
617 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
619 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
624 #ifdef HAVE_trunctqfhf2
625 if (HAVE_trunctqfhf2
&& from_mode
== TQFmode
&& to_mode
== HFmode
)
627 emit_unop_insn (CODE_FOR_trunctqfhf2
, to
, from
, UNKNOWN
);
631 #ifdef HAVE_truncsfhf2
632 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
634 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
638 #ifdef HAVE_truncdfhf2
639 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
641 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
645 #ifdef HAVE_truncxfhf2
646 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
648 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
652 #ifdef HAVE_trunctfhf2
653 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
655 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
660 #ifdef HAVE_truncsftqf2
661 if (HAVE_truncsftqf2
&& from_mode
== SFmode
&& to_mode
== TQFmode
)
663 emit_unop_insn (CODE_FOR_truncsftqf2
, to
, from
, UNKNOWN
);
667 #ifdef HAVE_truncdftqf2
668 if (HAVE_truncdftqf2
&& from_mode
== DFmode
&& to_mode
== TQFmode
)
670 emit_unop_insn (CODE_FOR_truncdftqf2
, to
, from
, UNKNOWN
);
674 #ifdef HAVE_truncxftqf2
675 if (HAVE_truncxftqf2
&& from_mode
== XFmode
&& to_mode
== TQFmode
)
677 emit_unop_insn (CODE_FOR_truncxftqf2
, to
, from
, UNKNOWN
);
681 #ifdef HAVE_trunctftqf2
682 if (HAVE_trunctftqf2
&& from_mode
== TFmode
&& to_mode
== TQFmode
)
684 emit_unop_insn (CODE_FOR_trunctftqf2
, to
, from
, UNKNOWN
);
689 #ifdef HAVE_truncdfsf2
690 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
692 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
696 #ifdef HAVE_truncxfsf2
697 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
699 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
703 #ifdef HAVE_trunctfsf2
704 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
706 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
710 #ifdef HAVE_truncxfdf2
711 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
713 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
717 #ifdef HAVE_trunctfdf2
718 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
720 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
732 libcall
= extendsfdf2_libfunc
;
736 libcall
= extendsfxf2_libfunc
;
740 libcall
= extendsftf2_libfunc
;
752 libcall
= truncdfsf2_libfunc
;
756 libcall
= extenddfxf2_libfunc
;
760 libcall
= extenddftf2_libfunc
;
772 libcall
= truncxfsf2_libfunc
;
776 libcall
= truncxfdf2_libfunc
;
788 libcall
= trunctfsf2_libfunc
;
792 libcall
= trunctfdf2_libfunc
;
804 if (libcall
== (rtx
) 0)
805 /* This conversion is not implemented yet. */
809 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
811 insns
= get_insns ();
813 emit_libcall_block (insns
, to
, value
, gen_rtx_FLOAT_TRUNCATE (to_mode
,
818 /* Now both modes are integers. */
820 /* Handle expanding beyond a word. */
821 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
822 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
829 enum machine_mode lowpart_mode
;
830 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
832 /* Try converting directly if the insn is supported. */
833 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
836 /* If FROM is a SUBREG, put it into a register. Do this
837 so that we always generate the same set of insns for
838 better cse'ing; if an intermediate assignment occurred,
839 we won't be doing the operation directly on the SUBREG. */
840 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
841 from
= force_reg (from_mode
, from
);
842 emit_unop_insn (code
, to
, from
, equiv_code
);
845 /* Next, try converting via full word. */
846 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
847 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
848 != CODE_FOR_nothing
))
850 if (GET_CODE (to
) == REG
)
851 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
852 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
853 emit_unop_insn (code
, to
,
854 gen_lowpart (word_mode
, to
), equiv_code
);
858 /* No special multiword conversion insn; do it by hand. */
861 /* Since we will turn this into a no conflict block, we must ensure
862 that the source does not overlap the target. */
864 if (reg_overlap_mentioned_p (to
, from
))
865 from
= force_reg (from_mode
, from
);
867 /* Get a copy of FROM widened to a word, if necessary. */
868 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
869 lowpart_mode
= word_mode
;
871 lowpart_mode
= from_mode
;
873 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
875 lowpart
= gen_lowpart (lowpart_mode
, to
);
876 emit_move_insn (lowpart
, lowfrom
);
878 /* Compute the value to put in each remaining word. */
880 fill_value
= const0_rtx
;
885 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
886 && STORE_FLAG_VALUE
== -1)
888 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
890 fill_value
= gen_reg_rtx (word_mode
);
891 emit_insn (gen_slt (fill_value
));
897 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
898 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
900 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
904 /* Fill the remaining words. */
905 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
907 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
908 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
913 if (fill_value
!= subword
)
914 emit_move_insn (subword
, fill_value
);
917 insns
= get_insns ();
920 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
921 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
925 /* Truncating multi-word to a word or less. */
926 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
927 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
929 if (!((GET_CODE (from
) == MEM
930 && ! MEM_VOLATILE_P (from
)
931 && direct_load
[(int) to_mode
]
932 && ! mode_dependent_address_p (XEXP (from
, 0)))
933 || GET_CODE (from
) == REG
934 || GET_CODE (from
) == SUBREG
))
935 from
= force_reg (from_mode
, from
);
936 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
940 /* Handle pointer conversion. */ /* SPEE 900220. */
941 if (to_mode
== PQImode
)
943 if (from_mode
!= QImode
)
944 from
= convert_to_mode (QImode
, from
, unsignedp
);
946 #ifdef HAVE_truncqipqi2
947 if (HAVE_truncqipqi2
)
949 emit_unop_insn (CODE_FOR_truncqipqi2
, to
, from
, UNKNOWN
);
952 #endif /* HAVE_truncqipqi2 */
956 if (from_mode
== PQImode
)
958 if (to_mode
!= QImode
)
960 from
= convert_to_mode (QImode
, from
, unsignedp
);
965 #ifdef HAVE_extendpqiqi2
966 if (HAVE_extendpqiqi2
)
968 emit_unop_insn (CODE_FOR_extendpqiqi2
, to
, from
, UNKNOWN
);
971 #endif /* HAVE_extendpqiqi2 */
976 if (to_mode
== PSImode
)
978 if (from_mode
!= SImode
)
979 from
= convert_to_mode (SImode
, from
, unsignedp
);
981 #ifdef HAVE_truncsipsi2
982 if (HAVE_truncsipsi2
)
984 emit_unop_insn (CODE_FOR_truncsipsi2
, to
, from
, UNKNOWN
);
987 #endif /* HAVE_truncsipsi2 */
991 if (from_mode
== PSImode
)
993 if (to_mode
!= SImode
)
995 from
= convert_to_mode (SImode
, from
, unsignedp
);
1000 #ifdef HAVE_extendpsisi2
1001 if (! unsignedp
&& HAVE_extendpsisi2
)
1003 emit_unop_insn (CODE_FOR_extendpsisi2
, to
, from
, UNKNOWN
);
1006 #endif /* HAVE_extendpsisi2 */
1007 #ifdef HAVE_zero_extendpsisi2
1008 if (unsignedp
&& HAVE_zero_extendpsisi2
)
1010 emit_unop_insn (CODE_FOR_zero_extendpsisi2
, to
, from
, UNKNOWN
);
1013 #endif /* HAVE_zero_extendpsisi2 */
1018 if (to_mode
== PDImode
)
1020 if (from_mode
!= DImode
)
1021 from
= convert_to_mode (DImode
, from
, unsignedp
);
1023 #ifdef HAVE_truncdipdi2
1024 if (HAVE_truncdipdi2
)
1026 emit_unop_insn (CODE_FOR_truncdipdi2
, to
, from
, UNKNOWN
);
1029 #endif /* HAVE_truncdipdi2 */
1033 if (from_mode
== PDImode
)
1035 if (to_mode
!= DImode
)
1037 from
= convert_to_mode (DImode
, from
, unsignedp
);
1042 #ifdef HAVE_extendpdidi2
1043 if (HAVE_extendpdidi2
)
1045 emit_unop_insn (CODE_FOR_extendpdidi2
, to
, from
, UNKNOWN
);
1048 #endif /* HAVE_extendpdidi2 */
1053 /* Now follow all the conversions between integers
1054 no more than a word long. */
1056 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1057 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
1058 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1059 GET_MODE_BITSIZE (from_mode
)))
1061 if (!((GET_CODE (from
) == MEM
1062 && ! MEM_VOLATILE_P (from
)
1063 && direct_load
[(int) to_mode
]
1064 && ! mode_dependent_address_p (XEXP (from
, 0)))
1065 || GET_CODE (from
) == REG
1066 || GET_CODE (from
) == SUBREG
))
1067 from
= force_reg (from_mode
, from
);
1068 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
1069 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
1070 from
= copy_to_reg (from
);
1071 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
1075 /* Handle extension. */
1076 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
1078 /* Convert directly if that works. */
1079 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
1080 != CODE_FOR_nothing
)
1082 emit_unop_insn (code
, to
, from
, equiv_code
);
1087 enum machine_mode intermediate
;
1091 /* Search for a mode to convert via. */
1092 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
1093 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
1094 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
1095 != CODE_FOR_nothing
)
1096 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
1097 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1098 GET_MODE_BITSIZE (intermediate
))))
1099 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
1100 != CODE_FOR_nothing
))
1102 convert_move (to
, convert_to_mode (intermediate
, from
,
1103 unsignedp
), unsignedp
);
1107 /* No suitable intermediate mode.
1108 Generate what we need with shifts. */
1109 shift_amount
= build_int_2 (GET_MODE_BITSIZE (to_mode
)
1110 - GET_MODE_BITSIZE (from_mode
), 0);
1111 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
1112 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
1114 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
1117 emit_move_insn (to
, tmp
);
1122 /* Support special truncate insns for certain modes. */
1124 if (from_mode
== DImode
&& to_mode
== SImode
)
1126 #ifdef HAVE_truncdisi2
1127 if (HAVE_truncdisi2
)
1129 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1133 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1137 if (from_mode
== DImode
&& to_mode
== HImode
)
1139 #ifdef HAVE_truncdihi2
1140 if (HAVE_truncdihi2
)
1142 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1146 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1150 if (from_mode
== DImode
&& to_mode
== QImode
)
1152 #ifdef HAVE_truncdiqi2
1153 if (HAVE_truncdiqi2
)
1155 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1159 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1163 if (from_mode
== SImode
&& to_mode
== HImode
)
1165 #ifdef HAVE_truncsihi2
1166 if (HAVE_truncsihi2
)
1168 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1172 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1176 if (from_mode
== SImode
&& to_mode
== QImode
)
1178 #ifdef HAVE_truncsiqi2
1179 if (HAVE_truncsiqi2
)
1181 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1185 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1189 if (from_mode
== HImode
&& to_mode
== QImode
)
1191 #ifdef HAVE_trunchiqi2
1192 if (HAVE_trunchiqi2
)
1194 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1198 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1202 if (from_mode
== TImode
&& to_mode
== DImode
)
1204 #ifdef HAVE_trunctidi2
1205 if (HAVE_trunctidi2
)
1207 emit_unop_insn (CODE_FOR_trunctidi2
, to
, from
, UNKNOWN
);
1211 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1215 if (from_mode
== TImode
&& to_mode
== SImode
)
1217 #ifdef HAVE_trunctisi2
1218 if (HAVE_trunctisi2
)
1220 emit_unop_insn (CODE_FOR_trunctisi2
, to
, from
, UNKNOWN
);
1224 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1228 if (from_mode
== TImode
&& to_mode
== HImode
)
1230 #ifdef HAVE_trunctihi2
1231 if (HAVE_trunctihi2
)
1233 emit_unop_insn (CODE_FOR_trunctihi2
, to
, from
, UNKNOWN
);
1237 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1241 if (from_mode
== TImode
&& to_mode
== QImode
)
1243 #ifdef HAVE_trunctiqi2
1244 if (HAVE_trunctiqi2
)
1246 emit_unop_insn (CODE_FOR_trunctiqi2
, to
, from
, UNKNOWN
);
1250 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1254 /* Handle truncation of volatile memrefs, and so on;
1255 the things that couldn't be truncated directly,
1256 and for which there was no special instruction. */
1257 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1259 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1260 emit_move_insn (to
, temp
);
1264 /* Mode combination is not recognized. */
1268 /* Return an rtx for a value that would result
1269 from converting X to mode MODE.
1270 Both X and MODE may be floating, or both integer.
1271 UNSIGNEDP is nonzero if X is an unsigned value.
1272 This can be done by referring to a part of X in place
1273 or by copying to a new temporary with conversion.
1275 This function *must not* call protect_from_queue
1276 except when putting X into an insn (in which case convert_move does it). */
1279 convert_to_mode (mode
, x
, unsignedp
)
1280 enum machine_mode mode
;
1284 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
1287 /* Return an rtx for a value that would result
1288 from converting X from mode OLDMODE to mode MODE.
1289 Both modes may be floating, or both integer.
1290 UNSIGNEDP is nonzero if X is an unsigned value.
1292 This can be done by referring to a part of X in place
1293 or by copying to a new temporary with conversion.
1295 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1297 This function *must not* call protect_from_queue
1298 except when putting X into an insn (in which case convert_move does it). */
1301 convert_modes (mode
, oldmode
, x
, unsignedp
)
1302 enum machine_mode mode
, oldmode
;
1308 /* If FROM is a SUBREG that indicates that we have already done at least
1309 the required extension, strip it. */
1311 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1312 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1313 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1314 x
= gen_lowpart (mode
, x
);
1316 if (GET_MODE (x
) != VOIDmode
)
1317 oldmode
= GET_MODE (x
);
1319 if (mode
== oldmode
)
1322 /* There is one case that we must handle specially: If we are converting
1323 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1324 we are to interpret the constant as unsigned, gen_lowpart will do
1325 the wrong if the constant appears negative. What we want to do is
1326 make the high-order word of the constant zero, not all ones. */
1328 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1329 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1330 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1332 HOST_WIDE_INT val
= INTVAL (x
);
1334 if (oldmode
!= VOIDmode
1335 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
1337 int width
= GET_MODE_BITSIZE (oldmode
);
1339 /* We need to zero extend VAL. */
1340 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1343 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
1346 /* We can do this with a gen_lowpart if both desired and current modes
1347 are integer, and this is either a constant integer, a register, or a
1348 non-volatile MEM. Except for the constant case where MODE is no
1349 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1351 if ((GET_CODE (x
) == CONST_INT
1352 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1353 || (GET_MODE_CLASS (mode
) == MODE_INT
1354 && GET_MODE_CLASS (oldmode
) == MODE_INT
1355 && (GET_CODE (x
) == CONST_DOUBLE
1356 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
1357 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
1358 && direct_load
[(int) mode
])
1359 || (GET_CODE (x
) == REG
1360 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
1361 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
1363 /* ?? If we don't know OLDMODE, we have to assume here that
1364 X does not need sign- or zero-extension. This may not be
1365 the case, but it's the best we can do. */
1366 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1367 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1369 HOST_WIDE_INT val
= INTVAL (x
);
1370 int width
= GET_MODE_BITSIZE (oldmode
);
1372 /* We must sign or zero-extend in this case. Start by
1373 zero-extending, then sign extend if we need to. */
1374 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1376 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1377 val
|= (HOST_WIDE_INT
) (-1) << width
;
1379 return GEN_INT (val
);
1382 return gen_lowpart (mode
, x
);
1385 temp
= gen_reg_rtx (mode
);
1386 convert_move (temp
, x
, unsignedp
);
1390 /* This macro is used to determine what the largest unit size that
1391 move_by_pieces can use is. */
1393 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1394 move efficiently, as opposed to MOVE_MAX which is the maximum
1395 number of bytes we can move with a single instruction. */
1397 #ifndef MOVE_MAX_PIECES
1398 #define MOVE_MAX_PIECES MOVE_MAX
1401 /* Generate several move instructions to copy LEN bytes
1402 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1403 The caller must pass FROM and TO
1404 through protect_from_queue before calling.
1405 ALIGN is maximum alignment we can assume. */
1408 move_by_pieces (to
, from
, len
, align
)
1410 unsigned HOST_WIDE_INT len
;
1413 struct move_by_pieces data
;
1414 rtx to_addr
= XEXP (to
, 0), from_addr
= XEXP (from
, 0);
1415 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
1416 enum machine_mode mode
= VOIDmode
, tmode
;
1417 enum insn_code icode
;
1420 data
.to_addr
= to_addr
;
1421 data
.from_addr
= from_addr
;
1425 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1426 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1428 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1429 || GET_CODE (from_addr
) == POST_INC
1430 || GET_CODE (from_addr
) == POST_DEC
);
1432 data
.explicit_inc_from
= 0;
1433 data
.explicit_inc_to
= 0;
1435 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1436 if (data
.reverse
) data
.offset
= len
;
1439 /* If copying requires more than two move insns,
1440 copy addresses to registers (to make displacements shorter)
1441 and use post-increment if available. */
1442 if (!(data
.autinc_from
&& data
.autinc_to
)
1443 && move_by_pieces_ninsns (len
, align
) > 2)
1445 /* Find the mode of the largest move... */
1446 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1447 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1448 if (GET_MODE_SIZE (tmode
) < max_size
)
1451 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
1453 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1454 data
.autinc_from
= 1;
1455 data
.explicit_inc_from
= -1;
1457 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
1459 data
.from_addr
= copy_addr_to_reg (from_addr
);
1460 data
.autinc_from
= 1;
1461 data
.explicit_inc_from
= 1;
1463 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1464 data
.from_addr
= copy_addr_to_reg (from_addr
);
1465 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
1467 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1469 data
.explicit_inc_to
= -1;
1471 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
1473 data
.to_addr
= copy_addr_to_reg (to_addr
);
1475 data
.explicit_inc_to
= 1;
1477 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1478 data
.to_addr
= copy_addr_to_reg (to_addr
);
1481 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1482 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1483 align
= MOVE_MAX
* BITS_PER_UNIT
;
1485 /* First move what we can in the largest integer mode, then go to
1486 successively smaller modes. */
1488 while (max_size
> 1)
1490 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1491 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1492 if (GET_MODE_SIZE (tmode
) < max_size
)
1495 if (mode
== VOIDmode
)
1498 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1499 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1500 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1502 max_size
= GET_MODE_SIZE (mode
);
1505 /* The code above should have handled everything. */
1510 /* Return number of insns required to move L bytes by pieces.
1511 ALIGN (in bytes) is maximum alignment we can assume. */
1513 static unsigned HOST_WIDE_INT
1514 move_by_pieces_ninsns (l
, align
)
1515 unsigned HOST_WIDE_INT l
;
1518 unsigned HOST_WIDE_INT n_insns
= 0;
1519 unsigned HOST_WIDE_INT max_size
= MOVE_MAX
+ 1;
1521 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1522 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1523 align
= MOVE_MAX
* BITS_PER_UNIT
;
1525 while (max_size
> 1)
1527 enum machine_mode mode
= VOIDmode
, tmode
;
1528 enum insn_code icode
;
1530 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1531 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1532 if (GET_MODE_SIZE (tmode
) < max_size
)
1535 if (mode
== VOIDmode
)
1538 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1539 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1540 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1542 max_size
= GET_MODE_SIZE (mode
);
1548 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1549 with move instructions for mode MODE. GENFUN is the gen_... function
1550 to make a move insn for that mode. DATA has all the other info. */
1553 move_by_pieces_1 (genfun
, mode
, data
)
1554 rtx (*genfun
) PARAMS ((rtx
, ...));
1555 enum machine_mode mode
;
1556 struct move_by_pieces
*data
;
1558 unsigned int size
= GET_MODE_SIZE (mode
);
1561 while (data
->len
>= size
)
1564 data
->offset
-= size
;
1566 if (data
->autinc_to
)
1568 to1
= gen_rtx_MEM (mode
, data
->to_addr
);
1569 MEM_COPY_ATTRIBUTES (to1
, data
->to
);
1572 to1
= change_address (data
->to
, mode
,
1573 plus_constant (data
->to_addr
, data
->offset
));
1575 if (data
->autinc_from
)
1577 from1
= gen_rtx_MEM (mode
, data
->from_addr
);
1578 MEM_COPY_ATTRIBUTES (from1
, data
->from
);
1581 from1
= change_address (data
->from
, mode
,
1582 plus_constant (data
->from_addr
, data
->offset
));
1584 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1585 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
1586 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1587 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (-size
)));
1589 emit_insn ((*genfun
) (to1
, from1
));
1591 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1592 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1593 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1594 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1596 if (! data
->reverse
)
1597 data
->offset
+= size
;
1603 /* Emit code to move a block Y to a block X.
1604 This may be done with string-move instructions,
1605 with multiple scalar move instructions, or with a library call.
1607 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1609 SIZE is an rtx that says how long they are.
1610 ALIGN is the maximum alignment we can assume they have.
1612 Return the address of the new block, if memcpy is called and returns it,
1616 emit_block_move (x
, y
, size
, align
)
1622 #ifdef TARGET_MEM_FUNCTIONS
1624 tree call_expr
, arg_list
;
1627 if (GET_MODE (x
) != BLKmode
)
1630 if (GET_MODE (y
) != BLKmode
)
1633 x
= protect_from_queue (x
, 1);
1634 y
= protect_from_queue (y
, 0);
1635 size
= protect_from_queue (size
, 0);
1637 if (GET_CODE (x
) != MEM
)
1639 if (GET_CODE (y
) != MEM
)
1644 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1645 move_by_pieces (x
, y
, INTVAL (size
), align
);
1648 /* Try the most limited insn first, because there's no point
1649 including more than one in the machine description unless
1650 the more limited one has some advantage. */
1652 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1653 enum machine_mode mode
;
1655 /* Since this is a move insn, we don't care about volatility. */
1658 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1659 mode
= GET_MODE_WIDER_MODE (mode
))
1661 enum insn_code code
= movstr_optab
[(int) mode
];
1662 insn_operand_predicate_fn pred
;
1664 if (code
!= CODE_FOR_nothing
1665 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1666 here because if SIZE is less than the mode mask, as it is
1667 returned by the macro, it will definitely be less than the
1668 actual mode mask. */
1669 && ((GET_CODE (size
) == CONST_INT
1670 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1671 <= (GET_MODE_MASK (mode
) >> 1)))
1672 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1673 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1674 || (*pred
) (x
, BLKmode
))
1675 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1676 || (*pred
) (y
, BLKmode
))
1677 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1678 || (*pred
) (opalign
, VOIDmode
)))
1681 rtx last
= get_last_insn ();
1684 op2
= convert_to_mode (mode
, size
, 1);
1685 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1686 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1687 op2
= copy_to_mode_reg (mode
, op2
);
1689 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1697 delete_insns_since (last
);
1703 /* X, Y, or SIZE may have been passed through protect_from_queue.
1705 It is unsafe to save the value generated by protect_from_queue
1706 and reuse it later. Consider what happens if emit_queue is
1707 called before the return value from protect_from_queue is used.
1709 Expansion of the CALL_EXPR below will call emit_queue before
1710 we are finished emitting RTL for argument setup. So if we are
1711 not careful we could get the wrong value for an argument.
1713 To avoid this problem we go ahead and emit code to copy X, Y &
1714 SIZE into new pseudos. We can then place those new pseudos
1715 into an RTL_EXPR and use them later, even after a call to
1718 Note this is not strictly needed for library calls since they
1719 do not call emit_queue before loading their arguments. However,
1720 we may need to have library calls call emit_queue in the future
1721 since failing to do so could cause problems for targets which
1722 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1723 x
= copy_to_mode_reg (Pmode
, XEXP (x
, 0));
1724 y
= copy_to_mode_reg (Pmode
, XEXP (y
, 0));
1726 #ifdef TARGET_MEM_FUNCTIONS
1727 size
= copy_to_mode_reg (TYPE_MODE (sizetype
), size
);
1729 size
= convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1730 TREE_UNSIGNED (integer_type_node
));
1731 size
= copy_to_mode_reg (TYPE_MODE (integer_type_node
), size
);
1734 #ifdef TARGET_MEM_FUNCTIONS
1735 /* It is incorrect to use the libcall calling conventions to call
1736 memcpy in this context.
1738 This could be a user call to memcpy and the user may wish to
1739 examine the return value from memcpy.
1741 For targets where libcalls and normal calls have different conventions
1742 for returning pointers, we could end up generating incorrect code.
1744 So instead of using a libcall sequence we build up a suitable
1745 CALL_EXPR and expand the call in the normal fashion. */
1746 if (fn
== NULL_TREE
)
1750 /* This was copied from except.c, I don't know if all this is
1751 necessary in this context or not. */
1752 fn
= get_identifier ("memcpy");
1753 fntype
= build_pointer_type (void_type_node
);
1754 fntype
= build_function_type (fntype
, NULL_TREE
);
1755 fn
= build_decl (FUNCTION_DECL
, fn
, fntype
);
1756 ggc_add_tree_root (&fn
, 1);
1757 DECL_EXTERNAL (fn
) = 1;
1758 TREE_PUBLIC (fn
) = 1;
1759 DECL_ARTIFICIAL (fn
) = 1;
1760 make_decl_rtl (fn
, NULL_PTR
, 1);
1761 assemble_external (fn
);
1764 /* We need to make an argument list for the function call.
1766 memcpy has three arguments, the first two are void * addresses and
1767 the last is a size_t byte count for the copy. */
1769 = build_tree_list (NULL_TREE
,
1770 make_tree (build_pointer_type (void_type_node
), x
));
1771 TREE_CHAIN (arg_list
)
1772 = build_tree_list (NULL_TREE
,
1773 make_tree (build_pointer_type (void_type_node
), y
));
1774 TREE_CHAIN (TREE_CHAIN (arg_list
))
1775 = build_tree_list (NULL_TREE
, make_tree (sizetype
, size
));
1776 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list
))) = NULL_TREE
;
1778 /* Now we have to build up the CALL_EXPR itself. */
1779 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1780 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1781 call_expr
, arg_list
, NULL_TREE
);
1782 TREE_SIDE_EFFECTS (call_expr
) = 1;
1784 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
1786 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
1787 VOIDmode
, 3, y
, Pmode
, x
, Pmode
,
1788 convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1789 TREE_UNSIGNED (integer_type_node
)),
1790 TYPE_MODE (integer_type_node
));
1797 /* Copy all or part of a value X into registers starting at REGNO.
1798 The number of registers to be filled is NREGS. */
1801 move_block_to_reg (regno
, x
, nregs
, mode
)
1805 enum machine_mode mode
;
1808 #ifdef HAVE_load_multiple
1816 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1817 x
= validize_mem (force_const_mem (mode
, x
));
1819 /* See if the machine can do this with a load multiple insn. */
1820 #ifdef HAVE_load_multiple
1821 if (HAVE_load_multiple
)
1823 last
= get_last_insn ();
1824 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1832 delete_insns_since (last
);
1836 for (i
= 0; i
< nregs
; i
++)
1837 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1838 operand_subword_force (x
, i
, mode
));
1841 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1842 The number of registers to be filled is NREGS. SIZE indicates the number
1843 of bytes in the object X. */
1846 move_block_from_reg (regno
, x
, nregs
, size
)
1853 #ifdef HAVE_store_multiple
1857 enum machine_mode mode
;
1859 /* If SIZE is that of a mode no bigger than a word, just use that
1860 mode's store operation. */
1861 if (size
<= UNITS_PER_WORD
1862 && (mode
= mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0)) != BLKmode
)
1864 emit_move_insn (change_address (x
, mode
, NULL
),
1865 gen_rtx_REG (mode
, regno
));
1869 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1870 to the left before storing to memory. Note that the previous test
1871 doesn't handle all cases (e.g. SIZE == 3). */
1872 if (size
< UNITS_PER_WORD
&& BYTES_BIG_ENDIAN
)
1874 rtx tem
= operand_subword (x
, 0, 1, BLKmode
);
1880 shift
= expand_shift (LSHIFT_EXPR
, word_mode
,
1881 gen_rtx_REG (word_mode
, regno
),
1882 build_int_2 ((UNITS_PER_WORD
- size
)
1883 * BITS_PER_UNIT
, 0), NULL_RTX
, 0);
1884 emit_move_insn (tem
, shift
);
1888 /* See if the machine can do this with a store multiple insn. */
1889 #ifdef HAVE_store_multiple
1890 if (HAVE_store_multiple
)
1892 last
= get_last_insn ();
1893 pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1901 delete_insns_since (last
);
1905 for (i
= 0; i
< nregs
; i
++)
1907 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1912 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1916 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1917 registers represented by a PARALLEL. SSIZE represents the total size of
1918 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1920 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1921 the balance will be in what would be the low-order memory addresses, i.e.
1922 left justified for big endian, right justified for little endian. This
1923 happens to be true for the targets currently using this support. If this
1924 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1928 emit_group_load (dst
, orig_src
, ssize
, align
)
1936 if (GET_CODE (dst
) != PARALLEL
)
1939 /* Check for a NULL entry, used to indicate that the parameter goes
1940 both on the stack and in registers. */
1941 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1946 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
1948 /* If we won't be loading directly from memory, protect the real source
1949 from strange tricks we might play. */
1951 if (GET_CODE (src
) != MEM
&& ! CONSTANT_P (src
))
1953 if (GET_MODE (src
) == VOIDmode
)
1954 src
= gen_reg_rtx (GET_MODE (dst
));
1956 src
= gen_reg_rtx (GET_MODE (orig_src
));
1957 emit_move_insn (src
, orig_src
);
1960 /* Process the pieces. */
1961 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1963 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1964 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1965 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1968 /* Handle trailing fragments that run over the size of the struct. */
1969 if (ssize
>= 0 && bytepos
+ bytelen
> ssize
)
1971 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1972 bytelen
= ssize
- bytepos
;
1977 /* Optimize the access just a bit. */
1978 if (GET_CODE (src
) == MEM
1979 && align
>= GET_MODE_ALIGNMENT (mode
)
1980 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1981 && bytelen
== GET_MODE_SIZE (mode
))
1983 tmps
[i
] = gen_reg_rtx (mode
);
1984 emit_move_insn (tmps
[i
],
1985 change_address (src
, mode
,
1986 plus_constant (XEXP (src
, 0),
1989 else if (GET_CODE (src
) == CONCAT
)
1992 && bytelen
== GET_MODE_SIZE (GET_MODE (XEXP (src
, 0))))
1993 tmps
[i
] = XEXP (src
, 0);
1994 else if (bytepos
== GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)))
1995 && bytelen
== GET_MODE_SIZE (GET_MODE (XEXP (src
, 1))))
1996 tmps
[i
] = XEXP (src
, 1);
2000 else if ((CONSTANT_P (src
)
2001 && (GET_MODE (src
) == VOIDmode
|| GET_MODE (src
) == mode
))
2002 || (GET_CODE (src
) == REG
&& GET_MODE (src
) == mode
))
2005 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
2006 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
2007 mode
, mode
, align
, ssize
);
2009 if (BYTES_BIG_ENDIAN
&& shift
)
2010 expand_binop (mode
, ashl_optab
, tmps
[i
], GEN_INT (shift
),
2011 tmps
[i
], 0, OPTAB_WIDEN
);
2016 /* Copy the extracted pieces into the proper (probable) hard regs. */
2017 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2018 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
2021 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2022 registers represented by a PARALLEL. SSIZE represents the total size of
2023 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2026 emit_group_store (orig_dst
, src
, ssize
, align
)
2034 if (GET_CODE (src
) != PARALLEL
)
2037 /* Check for a NULL entry, used to indicate that the parameter goes
2038 both on the stack and in registers. */
2039 if (XEXP (XVECEXP (src
, 0, 0), 0))
2044 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (src
, 0));
2046 /* Copy the (probable) hard regs into pseudos. */
2047 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2049 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2050 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2051 emit_move_insn (tmps
[i
], reg
);
2055 /* If we won't be storing directly into memory, protect the real destination
2056 from strange tricks we might play. */
2058 if (GET_CODE (dst
) == PARALLEL
)
2062 /* We can get a PARALLEL dst if there is a conditional expression in
2063 a return statement. In that case, the dst and src are the same,
2064 so no action is necessary. */
2065 if (rtx_equal_p (dst
, src
))
2068 /* It is unclear if we can ever reach here, but we may as well handle
2069 it. Allocate a temporary, and split this into a store/load to/from
2072 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
2073 emit_group_store (temp
, src
, ssize
, align
);
2074 emit_group_load (dst
, temp
, ssize
, align
);
2077 else if (GET_CODE (dst
) != MEM
)
2079 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
2080 /* Make life a bit easier for combine. */
2081 emit_move_insn (dst
, const0_rtx
);
2084 /* Process the pieces. */
2085 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2087 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2088 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2089 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2091 /* Handle trailing fragments that run over the size of the struct. */
2092 if (ssize
>= 0 && bytepos
+ bytelen
> ssize
)
2094 if (BYTES_BIG_ENDIAN
)
2096 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2097 expand_binop (mode
, ashr_optab
, tmps
[i
], GEN_INT (shift
),
2098 tmps
[i
], 0, OPTAB_WIDEN
);
2100 bytelen
= ssize
- bytepos
;
2103 /* Optimize the access just a bit. */
2104 if (GET_CODE (dst
) == MEM
2105 && align
>= GET_MODE_ALIGNMENT (mode
)
2106 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2107 && bytelen
== GET_MODE_SIZE (mode
))
2108 emit_move_insn (change_address (dst
, mode
,
2109 plus_constant (XEXP (dst
, 0),
2113 store_bit_field (dst
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2114 mode
, tmps
[i
], align
, ssize
);
2119 /* Copy from the pseudo into the (probable) hard reg. */
2120 if (GET_CODE (dst
) == REG
)
2121 emit_move_insn (orig_dst
, dst
);
2124 /* Generate code to copy a BLKmode object of TYPE out of a
2125 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2126 is null, a stack temporary is created. TGTBLK is returned.
2128 The primary purpose of this routine is to handle functions
2129 that return BLKmode structures in registers. Some machines
2130 (the PA for example) want to return all small structures
2131 in registers regardless of the structure's alignment. */
2134 copy_blkmode_from_reg (tgtblk
, srcreg
, type
)
2139 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2140 rtx src
= NULL
, dst
= NULL
;
2141 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2142 unsigned HOST_WIDE_INT bitpos
, xbitpos
, big_endian_correction
= 0;
2146 tgtblk
= assign_stack_temp (BLKmode
, bytes
, 0);
2147 MEM_SET_IN_STRUCT_P (tgtblk
, AGGREGATE_TYPE_P (type
));
2148 preserve_temp_slots (tgtblk
);
2151 /* This code assumes srcreg is at least a full word. If it isn't,
2152 copy it into a new pseudo which is a full word. */
2153 if (GET_MODE (srcreg
) != BLKmode
2154 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2155 srcreg
= convert_to_mode (word_mode
, srcreg
, TREE_UNSIGNED (type
));
2157 /* Structures whose size is not a multiple of a word are aligned
2158 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2159 machine, this means we must skip the empty high order bytes when
2160 calculating the bit offset. */
2161 if (BYTES_BIG_ENDIAN
&& bytes
% UNITS_PER_WORD
)
2162 big_endian_correction
2163 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2165 /* Copy the structure BITSIZE bites at a time.
2167 We could probably emit more efficient code for machines which do not use
2168 strict alignment, but it doesn't seem worth the effort at the current
2170 for (bitpos
= 0, xbitpos
= big_endian_correction
;
2171 bitpos
< bytes
* BITS_PER_UNIT
;
2172 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2174 /* We need a new source operand each time xbitpos is on a
2175 word boundary and when xbitpos == big_endian_correction
2176 (the first time through). */
2177 if (xbitpos
% BITS_PER_WORD
== 0
2178 || xbitpos
== big_endian_correction
)
2179 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
, BLKmode
);
2181 /* We need a new destination operand each time bitpos is on
2183 if (bitpos
% BITS_PER_WORD
== 0)
2184 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2186 /* Use xbitpos for the source extraction (right justified) and
2187 xbitpos for the destination store (left justified). */
2188 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2189 extract_bit_field (src
, bitsize
,
2190 xbitpos
% BITS_PER_WORD
, 1,
2191 NULL_RTX
, word_mode
, word_mode
,
2192 bitsize
, BITS_PER_WORD
),
2193 bitsize
, BITS_PER_WORD
);
2199 /* Add a USE expression for REG to the (possibly empty) list pointed
2200 to by CALL_FUSAGE. REG must denote a hard register. */
2203 use_reg (call_fusage
, reg
)
2204 rtx
*call_fusage
, reg
;
2206 if (GET_CODE (reg
) != REG
2207 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2211 = gen_rtx_EXPR_LIST (VOIDmode
,
2212 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2215 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2216 starting at REGNO. All of these registers must be hard registers. */
2219 use_regs (call_fusage
, regno
, nregs
)
2226 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
2229 for (i
= 0; i
< nregs
; i
++)
2230 use_reg (call_fusage
, gen_rtx_REG (reg_raw_mode
[regno
+ i
], regno
+ i
));
2233 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2234 PARALLEL REGS. This is for calls that pass values in multiple
2235 non-contiguous locations. The Irix 6 ABI has examples of this. */
2238 use_group_regs (call_fusage
, regs
)
2244 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2246 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2248 /* A NULL entry means the parameter goes both on the stack and in
2249 registers. This can also be a MEM for targets that pass values
2250 partially on the stack and partially in registers. */
2251 if (reg
!= 0 && GET_CODE (reg
) == REG
)
2252 use_reg (call_fusage
, reg
);
2256 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2257 rtx with BLKmode). The caller must pass TO through protect_from_queue
2258 before calling. ALIGN is maximum alignment we can assume. */
2261 clear_by_pieces (to
, len
, align
)
2263 unsigned HOST_WIDE_INT len
;
2266 struct clear_by_pieces data
;
2267 rtx to_addr
= XEXP (to
, 0);
2268 unsigned HOST_WIDE_INT max_size
= MOVE_MAX_PIECES
+ 1;
2269 enum machine_mode mode
= VOIDmode
, tmode
;
2270 enum insn_code icode
;
2273 data
.to_addr
= to_addr
;
2276 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2277 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2279 data
.explicit_inc_to
= 0;
2281 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2286 /* If copying requires more than two move insns,
2287 copy addresses to registers (to make displacements shorter)
2288 and use post-increment if available. */
2290 && move_by_pieces_ninsns (len
, align
) > 2)
2292 /* Determine the main mode we'll be using. */
2293 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2294 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2295 if (GET_MODE_SIZE (tmode
) < max_size
)
2298 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
2300 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
2302 data
.explicit_inc_to
= -1;
2305 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
2306 && ! data
.autinc_to
)
2308 data
.to_addr
= copy_addr_to_reg (to_addr
);
2310 data
.explicit_inc_to
= 1;
2313 if ( !data
.autinc_to
&& CONSTANT_P (to_addr
))
2314 data
.to_addr
= copy_addr_to_reg (to_addr
);
2317 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2318 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2319 align
= MOVE_MAX
* BITS_PER_UNIT
;
2321 /* First move what we can in the largest integer mode, then go to
2322 successively smaller modes. */
2324 while (max_size
> 1)
2326 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2327 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2328 if (GET_MODE_SIZE (tmode
) < max_size
)
2331 if (mode
== VOIDmode
)
2334 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2335 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2336 clear_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
2338 max_size
= GET_MODE_SIZE (mode
);
2341 /* The code above should have handled everything. */
2346 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2347 with move instructions for mode MODE. GENFUN is the gen_... function
2348 to make a move insn for that mode. DATA has all the other info. */
2351 clear_by_pieces_1 (genfun
, mode
, data
)
2352 rtx (*genfun
) PARAMS ((rtx
, ...));
2353 enum machine_mode mode
;
2354 struct clear_by_pieces
*data
;
2356 unsigned int size
= GET_MODE_SIZE (mode
);
2359 while (data
->len
>= size
)
2362 data
->offset
-= size
;
2364 if (data
->autinc_to
)
2366 to1
= gen_rtx_MEM (mode
, data
->to_addr
);
2367 MEM_COPY_ATTRIBUTES (to1
, data
->to
);
2370 to1
= change_address (data
->to
, mode
,
2371 plus_constant (data
->to_addr
, data
->offset
));
2373 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2374 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
2376 emit_insn ((*genfun
) (to1
, const0_rtx
));
2378 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2379 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2381 if (! data
->reverse
)
2382 data
->offset
+= size
;
2388 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2389 its length in bytes and ALIGN is the maximum alignment we can is has.
2391 If we call a function that returns the length of the block, return it. */
2394 clear_storage (object
, size
, align
)
2399 #ifdef TARGET_MEM_FUNCTIONS
2401 tree call_expr
, arg_list
;
2405 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2406 just move a zero. Otherwise, do this a piece at a time. */
2407 if (GET_MODE (object
) != BLKmode
2408 && GET_CODE (size
) == CONST_INT
2409 && GET_MODE_SIZE (GET_MODE (object
)) == INTVAL (size
))
2410 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2413 object
= protect_from_queue (object
, 1);
2414 size
= protect_from_queue (size
, 0);
2416 if (GET_CODE (size
) == CONST_INT
2417 && MOVE_BY_PIECES_P (INTVAL (size
), align
))
2418 clear_by_pieces (object
, INTVAL (size
), align
);
2421 /* Try the most limited insn first, because there's no point
2422 including more than one in the machine description unless
2423 the more limited one has some advantage. */
2425 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2426 enum machine_mode mode
;
2428 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2429 mode
= GET_MODE_WIDER_MODE (mode
))
2431 enum insn_code code
= clrstr_optab
[(int) mode
];
2432 insn_operand_predicate_fn pred
;
2434 if (code
!= CODE_FOR_nothing
2435 /* We don't need MODE to be narrower than
2436 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2437 the mode mask, as it is returned by the macro, it will
2438 definitely be less than the actual mode mask. */
2439 && ((GET_CODE (size
) == CONST_INT
2440 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2441 <= (GET_MODE_MASK (mode
) >> 1)))
2442 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2443 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2444 || (*pred
) (object
, BLKmode
))
2445 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
2446 || (*pred
) (opalign
, VOIDmode
)))
2449 rtx last
= get_last_insn ();
2452 op1
= convert_to_mode (mode
, size
, 1);
2453 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2454 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
2455 op1
= copy_to_mode_reg (mode
, op1
);
2457 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2464 delete_insns_since (last
);
2468 /* OBJECT or SIZE may have been passed through protect_from_queue.
2470 It is unsafe to save the value generated by protect_from_queue
2471 and reuse it later. Consider what happens if emit_queue is
2472 called before the return value from protect_from_queue is used.
2474 Expansion of the CALL_EXPR below will call emit_queue before
2475 we are finished emitting RTL for argument setup. So if we are
2476 not careful we could get the wrong value for an argument.
2478 To avoid this problem we go ahead and emit code to copy OBJECT
2479 and SIZE into new pseudos. We can then place those new pseudos
2480 into an RTL_EXPR and use them later, even after a call to
2483 Note this is not strictly needed for library calls since they
2484 do not call emit_queue before loading their arguments. However,
2485 we may need to have library calls call emit_queue in the future
2486 since failing to do so could cause problems for targets which
2487 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2488 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2490 #ifdef TARGET_MEM_FUNCTIONS
2491 size
= copy_to_mode_reg (TYPE_MODE (sizetype
), size
);
2493 size
= convert_to_mode (TYPE_MODE (integer_type_node
), size
,
2494 TREE_UNSIGNED (integer_type_node
));
2495 size
= copy_to_mode_reg (TYPE_MODE (integer_type_node
), size
);
2498 #ifdef TARGET_MEM_FUNCTIONS
2499 /* It is incorrect to use the libcall calling conventions to call
2500 memset in this context.
2502 This could be a user call to memset and the user may wish to
2503 examine the return value from memset.
2505 For targets where libcalls and normal calls have different
2506 conventions for returning pointers, we could end up generating
2509 So instead of using a libcall sequence we build up a suitable
2510 CALL_EXPR and expand the call in the normal fashion. */
2511 if (fn
== NULL_TREE
)
2515 /* This was copied from except.c, I don't know if all this is
2516 necessary in this context or not. */
2517 fn
= get_identifier ("memset");
2518 fntype
= build_pointer_type (void_type_node
);
2519 fntype
= build_function_type (fntype
, NULL_TREE
);
2520 fn
= build_decl (FUNCTION_DECL
, fn
, fntype
);
2521 ggc_add_tree_root (&fn
, 1);
2522 DECL_EXTERNAL (fn
) = 1;
2523 TREE_PUBLIC (fn
) = 1;
2524 DECL_ARTIFICIAL (fn
) = 1;
2525 make_decl_rtl (fn
, NULL_PTR
, 1);
2526 assemble_external (fn
);
2529 /* We need to make an argument list for the function call.
2531 memset has three arguments, the first is a void * addresses, the
2532 second a integer with the initialization value, the last is a
2533 size_t byte count for the copy. */
2535 = build_tree_list (NULL_TREE
,
2536 make_tree (build_pointer_type (void_type_node
),
2538 TREE_CHAIN (arg_list
)
2539 = build_tree_list (NULL_TREE
,
2540 make_tree (integer_type_node
, const0_rtx
));
2541 TREE_CHAIN (TREE_CHAIN (arg_list
))
2542 = build_tree_list (NULL_TREE
, make_tree (sizetype
, size
));
2543 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list
))) = NULL_TREE
;
2545 /* Now we have to build up the CALL_EXPR itself. */
2546 call_expr
= build1 (ADDR_EXPR
,
2547 build_pointer_type (TREE_TYPE (fn
)), fn
);
2548 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2549 call_expr
, arg_list
, NULL_TREE
);
2550 TREE_SIDE_EFFECTS (call_expr
) = 1;
2552 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
2554 emit_library_call (bzero_libfunc
, LCT_NORMAL
,
2555 VOIDmode
, 2, object
, Pmode
, size
,
2556 TYPE_MODE (integer_type_node
));
2564 /* Generate code to copy Y into X.
2565 Both Y and X must have the same mode, except that
2566 Y can be a constant with VOIDmode.
2567 This mode cannot be BLKmode; use emit_block_move for that.
2569 Return the last instruction emitted. */
2572 emit_move_insn (x
, y
)
2575 enum machine_mode mode
= GET_MODE (x
);
2577 x
= protect_from_queue (x
, 1);
2578 y
= protect_from_queue (y
, 0);
2580 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
2583 /* Never force constant_p_rtx to memory. */
2584 if (GET_CODE (y
) == CONSTANT_P_RTX
)
2586 else if (CONSTANT_P (y
) && ! LEGITIMATE_CONSTANT_P (y
))
2587 y
= force_const_mem (mode
, y
);
2589 /* If X or Y are memory references, verify that their addresses are valid
2591 if (GET_CODE (x
) == MEM
2592 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2593 && ! push_operand (x
, GET_MODE (x
)))
2595 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2596 x
= change_address (x
, VOIDmode
, XEXP (x
, 0));
2598 if (GET_CODE (y
) == MEM
2599 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2601 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2602 y
= change_address (y
, VOIDmode
, XEXP (y
, 0));
2604 if (mode
== BLKmode
)
2607 return emit_move_insn_1 (x
, y
);
2610 /* Low level part of emit_move_insn.
2611 Called just like emit_move_insn, but assumes X and Y
2612 are basically valid. */
2615 emit_move_insn_1 (x
, y
)
2618 enum machine_mode mode
= GET_MODE (x
);
2619 enum machine_mode submode
;
2620 enum mode_class
class = GET_MODE_CLASS (mode
);
2623 if (mode
>= MAX_MACHINE_MODE
)
2626 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2628 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2630 /* Expand complex moves by moving real part and imag part, if possible. */
2631 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
2632 && BLKmode
!= (submode
= mode_for_size ((GET_MODE_UNIT_SIZE (mode
)
2634 (class == MODE_COMPLEX_INT
2635 ? MODE_INT
: MODE_FLOAT
),
2637 && (mov_optab
->handlers
[(int) submode
].insn_code
2638 != CODE_FOR_nothing
))
2640 /* Don't split destination if it is a stack push. */
2641 int stack
= push_operand (x
, GET_MODE (x
));
2643 /* If this is a stack, push the highpart first, so it
2644 will be in the argument order.
2646 In that case, change_address is used only to convert
2647 the mode, not to change the address. */
2650 /* Note that the real part always precedes the imag part in memory
2651 regardless of machine's endianness. */
2652 #ifdef STACK_GROWS_DOWNWARD
2653 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2654 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2655 gen_imagpart (submode
, y
)));
2656 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2657 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2658 gen_realpart (submode
, y
)));
2660 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2661 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2662 gen_realpart (submode
, y
)));
2663 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2664 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2665 gen_imagpart (submode
, y
)));
2670 rtx realpart_x
, realpart_y
;
2671 rtx imagpart_x
, imagpart_y
;
2673 /* If this is a complex value with each part being smaller than a
2674 word, the usual calling sequence will likely pack the pieces into
2675 a single register. Unfortunately, SUBREG of hard registers only
2676 deals in terms of words, so we have a problem converting input
2677 arguments to the CONCAT of two registers that is used elsewhere
2678 for complex values. If this is before reload, we can copy it into
2679 memory and reload. FIXME, we should see about using extract and
2680 insert on integer registers, but complex short and complex char
2681 variables should be rarely used. */
2682 if (GET_MODE_BITSIZE (mode
) < 2 * BITS_PER_WORD
2683 && (reload_in_progress
| reload_completed
) == 0)
2685 int packed_dest_p
= (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
2686 int packed_src_p
= (REG_P (y
) && REGNO (y
) < FIRST_PSEUDO_REGISTER
);
2688 if (packed_dest_p
|| packed_src_p
)
2690 enum mode_class reg_class
= ((class == MODE_COMPLEX_FLOAT
)
2691 ? MODE_FLOAT
: MODE_INT
);
2693 enum machine_mode reg_mode
=
2694 mode_for_size (GET_MODE_BITSIZE (mode
), reg_class
, 1);
2696 if (reg_mode
!= BLKmode
)
2698 rtx mem
= assign_stack_temp (reg_mode
,
2699 GET_MODE_SIZE (mode
), 0);
2701 rtx cmem
= change_address (mem
, mode
, NULL_RTX
);
2703 cfun
->cannot_inline
= N_("function using short complex types cannot be inline");
2707 rtx sreg
= gen_rtx_SUBREG (reg_mode
, x
, 0);
2708 emit_move_insn_1 (cmem
, y
);
2709 return emit_move_insn_1 (sreg
, mem
);
2713 rtx sreg
= gen_rtx_SUBREG (reg_mode
, y
, 0);
2714 emit_move_insn_1 (mem
, sreg
);
2715 return emit_move_insn_1 (x
, cmem
);
2721 realpart_x
= gen_realpart (submode
, x
);
2722 realpart_y
= gen_realpart (submode
, y
);
2723 imagpart_x
= gen_imagpart (submode
, x
);
2724 imagpart_y
= gen_imagpart (submode
, y
);
2726 /* Show the output dies here. This is necessary for SUBREGs
2727 of pseudos since we cannot track their lifetimes correctly;
2728 hard regs shouldn't appear here except as return values.
2729 We never want to emit such a clobber after reload. */
2731 && ! (reload_in_progress
|| reload_completed
)
2732 && (GET_CODE (realpart_x
) == SUBREG
2733 || GET_CODE (imagpart_x
) == SUBREG
))
2735 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
2738 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2739 (realpart_x
, realpart_y
));
2740 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2741 (imagpart_x
, imagpart_y
));
2744 return get_last_insn ();
2747 /* This will handle any multi-word mode that lacks a move_insn pattern.
2748 However, you will get better code if you define such patterns,
2749 even if they must turn into multiple assembler instructions. */
2750 else if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2756 #ifdef PUSH_ROUNDING
2758 /* If X is a push on the stack, do the push now and replace
2759 X with a reference to the stack pointer. */
2760 if (push_operand (x
, GET_MODE (x
)))
2762 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
2763 x
= change_address (x
, VOIDmode
, stack_pointer_rtx
);
2767 /* If we are in reload, see if either operand is a MEM whose address
2768 is scheduled for replacement. */
2769 if (reload_in_progress
&& GET_CODE (x
) == MEM
2770 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
2772 rtx
new = gen_rtx_MEM (GET_MODE (x
), inner
);
2774 MEM_COPY_ATTRIBUTES (new, x
);
2777 if (reload_in_progress
&& GET_CODE (y
) == MEM
2778 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
2780 rtx
new = gen_rtx_MEM (GET_MODE (y
), inner
);
2782 MEM_COPY_ATTRIBUTES (new, y
);
2790 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
2793 rtx xpart
= operand_subword (x
, i
, 1, mode
);
2794 rtx ypart
= operand_subword (y
, i
, 1, mode
);
2796 /* If we can't get a part of Y, put Y into memory if it is a
2797 constant. Otherwise, force it into a register. If we still
2798 can't get a part of Y, abort. */
2799 if (ypart
== 0 && CONSTANT_P (y
))
2801 y
= force_const_mem (mode
, y
);
2802 ypart
= operand_subword (y
, i
, 1, mode
);
2804 else if (ypart
== 0)
2805 ypart
= operand_subword_force (y
, i
, mode
);
2807 if (xpart
== 0 || ypart
== 0)
2810 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
2812 last_insn
= emit_move_insn (xpart
, ypart
);
2815 seq
= gen_sequence ();
2818 /* Show the output dies here. This is necessary for SUBREGs
2819 of pseudos since we cannot track their lifetimes correctly;
2820 hard regs shouldn't appear here except as return values.
2821 We never want to emit such a clobber after reload. */
2823 && ! (reload_in_progress
|| reload_completed
)
2824 && need_clobber
!= 0)
2826 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
2837 /* Pushing data onto the stack. */
2839 /* Push a block of length SIZE (perhaps variable)
2840 and return an rtx to address the beginning of the block.
2841 Note that it is not possible for the value returned to be a QUEUED.
2842 The value may be virtual_outgoing_args_rtx.
2844 EXTRA is the number of bytes of padding to push in addition to SIZE.
2845 BELOW nonzero means this padding comes at low addresses;
2846 otherwise, the padding comes at high addresses. */
2849 push_block (size
, extra
, below
)
2855 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
2856 if (CONSTANT_P (size
))
2857 anti_adjust_stack (plus_constant (size
, extra
));
2858 else if (GET_CODE (size
) == REG
&& extra
== 0)
2859 anti_adjust_stack (size
);
2862 temp
= copy_to_mode_reg (Pmode
, size
);
2864 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
2865 temp
, 0, OPTAB_LIB_WIDEN
);
2866 anti_adjust_stack (temp
);
2869 #ifndef STACK_GROWS_DOWNWARD
2870 #ifdef ARGS_GROW_DOWNWARD
2871 if (!ACCUMULATE_OUTGOING_ARGS
)
2879 /* Return the lowest stack address when STACK or ARGS grow downward and
2880 we are not aaccumulating outgoing arguments (the c4x port uses such
2882 temp
= virtual_outgoing_args_rtx
;
2883 if (extra
!= 0 && below
)
2884 temp
= plus_constant (temp
, extra
);
2888 if (GET_CODE (size
) == CONST_INT
)
2889 temp
= plus_constant (virtual_outgoing_args_rtx
,
2890 -INTVAL (size
) - (below
? 0 : extra
));
2891 else if (extra
!= 0 && !below
)
2892 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
2893 negate_rtx (Pmode
, plus_constant (size
, extra
)));
2895 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
2896 negate_rtx (Pmode
, size
));
2899 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
2905 return gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
2908 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2909 block of SIZE bytes. */
2912 get_push_address (size
)
2917 if (STACK_PUSH_CODE
== POST_DEC
)
2918 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (size
));
2919 else if (STACK_PUSH_CODE
== POST_INC
)
2920 temp
= gen_rtx_MINUS (Pmode
, stack_pointer_rtx
, GEN_INT (size
));
2922 temp
= stack_pointer_rtx
;
2924 return copy_to_reg (temp
);
2927 /* Generate code to push X onto the stack, assuming it has mode MODE and
2929 MODE is redundant except when X is a CONST_INT (since they don't
2931 SIZE is an rtx for the size of data to be copied (in bytes),
2932 needed only if X is BLKmode.
2934 ALIGN is maximum alignment we can assume.
2936 If PARTIAL and REG are both nonzero, then copy that many of the first
2937 words of X into registers starting with REG, and push the rest of X.
2938 The amount of space pushed is decreased by PARTIAL words,
2939 rounded *down* to a multiple of PARM_BOUNDARY.
2940 REG must be a hard register in this case.
2941 If REG is zero but PARTIAL is not, take any all others actions for an
2942 argument partially in registers, but do not actually load any
2945 EXTRA is the amount in bytes of extra space to leave next to this arg.
2946 This is ignored if an argument block has already been allocated.
2948 On a machine that lacks real push insns, ARGS_ADDR is the address of
2949 the bottom of the argument block for this call. We use indexing off there
2950 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2951 argument block has not been preallocated.
2953 ARGS_SO_FAR is the size of args previously pushed for this call.
2955 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2956 for arguments passed in registers. If nonzero, it will be the number
2957 of bytes required. */
2960 emit_push_insn (x
, mode
, type
, size
, align
, partial
, reg
, extra
,
2961 args_addr
, args_so_far
, reg_parm_stack_space
,
2964 enum machine_mode mode
;
2973 int reg_parm_stack_space
;
2977 enum direction stack_direction
2978 #ifdef STACK_GROWS_DOWNWARD
2984 /* Decide where to pad the argument: `downward' for below,
2985 `upward' for above, or `none' for don't pad it.
2986 Default is below for small data on big-endian machines; else above. */
2987 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
2989 /* Invert direction if stack is post-update. */
2990 if (STACK_PUSH_CODE
== POST_INC
|| STACK_PUSH_CODE
== POST_DEC
)
2991 if (where_pad
!= none
)
2992 where_pad
= (where_pad
== downward
? upward
: downward
);
2994 xinner
= x
= protect_from_queue (x
, 0);
2996 if (mode
== BLKmode
)
2998 /* Copy a block into the stack, entirely or partially. */
3001 int used
= partial
* UNITS_PER_WORD
;
3002 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3010 /* USED is now the # of bytes we need not copy to the stack
3011 because registers will take care of them. */
3014 xinner
= change_address (xinner
, BLKmode
,
3015 plus_constant (XEXP (xinner
, 0), used
));
3017 /* If the partial register-part of the arg counts in its stack size,
3018 skip the part of stack space corresponding to the registers.
3019 Otherwise, start copying to the beginning of the stack space,
3020 by setting SKIP to 0. */
3021 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3023 #ifdef PUSH_ROUNDING
3024 /* Do it with several push insns if that doesn't take lots of insns
3025 and if there is no difficulty with push insns that skip bytes
3026 on the stack for alignment purposes. */
3029 && GET_CODE (size
) == CONST_INT
3031 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3032 /* Here we avoid the case of a structure whose weak alignment
3033 forces many pushes of a small amount of data,
3034 and such small pushes do rounding that causes trouble. */
3035 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3036 || align
>= BIGGEST_ALIGNMENT
3037 || PUSH_ROUNDING (align
) == align
)
3038 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3040 /* Push padding now if padding above and stack grows down,
3041 or if padding below and stack grows up.
3042 But if space already allocated, this has already been done. */
3043 if (extra
&& args_addr
== 0
3044 && where_pad
!= none
&& where_pad
!= stack_direction
)
3045 anti_adjust_stack (GEN_INT (extra
));
3047 stack_pointer_delta
+= INTVAL (size
) - used
;
3048 move_by_pieces (gen_rtx_MEM (BLKmode
, gen_push_operand ()), xinner
,
3049 INTVAL (size
) - used
, align
);
3051 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
3055 in_check_memory_usage
= 1;
3056 temp
= get_push_address (INTVAL (size
) - used
);
3057 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
3058 emit_library_call (chkr_copy_bitmap_libfunc
,
3059 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, temp
,
3060 Pmode
, XEXP (xinner
, 0), Pmode
,
3061 GEN_INT (INTVAL (size
) - used
),
3062 TYPE_MODE (sizetype
));
3064 emit_library_call (chkr_set_right_libfunc
,
3065 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, temp
,
3066 Pmode
, GEN_INT (INTVAL (size
) - used
),
3067 TYPE_MODE (sizetype
),
3068 GEN_INT (MEMORY_USE_RW
),
3069 TYPE_MODE (integer_type_node
));
3070 in_check_memory_usage
= 0;
3074 #endif /* PUSH_ROUNDING */
3078 /* Otherwise make space on the stack and copy the data
3079 to the address of that space. */
3081 /* Deduct words put into registers from the size we must copy. */
3084 if (GET_CODE (size
) == CONST_INT
)
3085 size
= GEN_INT (INTVAL (size
) - used
);
3087 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3088 GEN_INT (used
), NULL_RTX
, 0,
3092 /* Get the address of the stack space.
3093 In this case, we do not deal with EXTRA separately.
3094 A single stack adjust will do. */
3097 temp
= push_block (size
, extra
, where_pad
== downward
);
3100 else if (GET_CODE (args_so_far
) == CONST_INT
)
3101 temp
= memory_address (BLKmode
,
3102 plus_constant (args_addr
,
3103 skip
+ INTVAL (args_so_far
)));
3105 temp
= memory_address (BLKmode
,
3106 plus_constant (gen_rtx_PLUS (Pmode
,
3110 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
3112 in_check_memory_usage
= 1;
3113 target
= copy_to_reg (temp
);
3114 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
3115 emit_library_call (chkr_copy_bitmap_libfunc
,
3116 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3,
3118 XEXP (xinner
, 0), Pmode
,
3119 size
, TYPE_MODE (sizetype
));
3121 emit_library_call (chkr_set_right_libfunc
,
3122 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3,
3124 size
, TYPE_MODE (sizetype
),
3125 GEN_INT (MEMORY_USE_RW
),
3126 TYPE_MODE (integer_type_node
));
3127 in_check_memory_usage
= 0;
3130 target
= gen_rtx_MEM (BLKmode
, temp
);
3134 set_mem_attributes (target
, type
, 1);
3135 /* Function incoming arguments may overlap with sibling call
3136 outgoing arguments and we cannot allow reordering of reads
3137 from function arguments with stores to outgoing arguments
3138 of sibling calls. */
3139 MEM_ALIAS_SET (target
) = 0;
3142 /* TEMP is the address of the block. Copy the data there. */
3143 if (GET_CODE (size
) == CONST_INT
3144 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size
), align
))
3146 move_by_pieces (target
, xinner
, INTVAL (size
), align
);
3151 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
3152 enum machine_mode mode
;
3154 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
3156 mode
= GET_MODE_WIDER_MODE (mode
))
3158 enum insn_code code
= movstr_optab
[(int) mode
];
3159 insn_operand_predicate_fn pred
;
3161 if (code
!= CODE_FOR_nothing
3162 && ((GET_CODE (size
) == CONST_INT
3163 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
3164 <= (GET_MODE_MASK (mode
) >> 1)))
3165 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
3166 && (!(pred
= insn_data
[(int) code
].operand
[0].predicate
)
3167 || ((*pred
) (target
, BLKmode
)))
3168 && (!(pred
= insn_data
[(int) code
].operand
[1].predicate
)
3169 || ((*pred
) (xinner
, BLKmode
)))
3170 && (!(pred
= insn_data
[(int) code
].operand
[3].predicate
)
3171 || ((*pred
) (opalign
, VOIDmode
))))
3173 rtx op2
= convert_to_mode (mode
, size
, 1);
3174 rtx last
= get_last_insn ();
3177 pred
= insn_data
[(int) code
].operand
[2].predicate
;
3178 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
3179 op2
= copy_to_mode_reg (mode
, op2
);
3181 pat
= GEN_FCN ((int) code
) (target
, xinner
,
3189 delete_insns_since (last
);
3194 if (!ACCUMULATE_OUTGOING_ARGS
)
3196 /* If the source is referenced relative to the stack pointer,
3197 copy it to another register to stabilize it. We do not need
3198 to do this if we know that we won't be changing sp. */
3200 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3201 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3202 temp
= copy_to_reg (temp
);
3205 /* Make inhibit_defer_pop nonzero around the library call
3206 to force it to pop the bcopy-arguments right away. */
3208 #ifdef TARGET_MEM_FUNCTIONS
3209 emit_library_call (memcpy_libfunc
, LCT_NORMAL
,
3210 VOIDmode
, 3, temp
, Pmode
, XEXP (xinner
, 0), Pmode
,
3211 convert_to_mode (TYPE_MODE (sizetype
),
3212 size
, TREE_UNSIGNED (sizetype
)),
3213 TYPE_MODE (sizetype
));
3215 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
3216 VOIDmode
, 3, XEXP (xinner
, 0), Pmode
, temp
, Pmode
,
3217 convert_to_mode (TYPE_MODE (integer_type_node
),
3219 TREE_UNSIGNED (integer_type_node
)),
3220 TYPE_MODE (integer_type_node
));
3225 else if (partial
> 0)
3227 /* Scalar partly in registers. */
3229 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3232 /* # words of start of argument
3233 that we must make space for but need not store. */
3234 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
3235 int args_offset
= INTVAL (args_so_far
);
3238 /* Push padding now if padding above and stack grows down,
3239 or if padding below and stack grows up.
3240 But if space already allocated, this has already been done. */
3241 if (extra
&& args_addr
== 0
3242 && where_pad
!= none
&& where_pad
!= stack_direction
)
3243 anti_adjust_stack (GEN_INT (extra
));
3245 /* If we make space by pushing it, we might as well push
3246 the real data. Otherwise, we can leave OFFSET nonzero
3247 and leave the space uninitialized. */
3251 /* Now NOT_STACK gets the number of words that we don't need to
3252 allocate on the stack. */
3253 not_stack
= partial
- offset
;
3255 /* If the partial register-part of the arg counts in its stack size,
3256 skip the part of stack space corresponding to the registers.
3257 Otherwise, start copying to the beginning of the stack space,
3258 by setting SKIP to 0. */
3259 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3261 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3262 x
= validize_mem (force_const_mem (mode
, x
));
3264 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3265 SUBREGs of such registers are not allowed. */
3266 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
3267 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3268 x
= copy_to_reg (x
);
3270 /* Loop over all the words allocated on the stack for this arg. */
3271 /* We can do it by words, because any scalar bigger than a word
3272 has a size a multiple of a word. */
3273 #ifndef PUSH_ARGS_REVERSED
3274 for (i
= not_stack
; i
< size
; i
++)
3276 for (i
= size
- 1; i
>= not_stack
; i
--)
3278 if (i
>= not_stack
+ offset
)
3279 emit_push_insn (operand_subword_force (x
, i
, mode
),
3280 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3282 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3284 reg_parm_stack_space
, alignment_pad
);
3289 rtx target
= NULL_RTX
;
3292 /* Push padding now if padding above and stack grows down,
3293 or if padding below and stack grows up.
3294 But if space already allocated, this has already been done. */
3295 if (extra
&& args_addr
== 0
3296 && where_pad
!= none
&& where_pad
!= stack_direction
)
3297 anti_adjust_stack (GEN_INT (extra
));
3299 #ifdef PUSH_ROUNDING
3300 if (args_addr
== 0 && PUSH_ARGS
)
3302 addr
= gen_push_operand ();
3303 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3308 if (GET_CODE (args_so_far
) == CONST_INT
)
3310 = memory_address (mode
,
3311 plus_constant (args_addr
,
3312 INTVAL (args_so_far
)));
3314 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3319 dest
= gen_rtx_MEM (mode
, addr
);
3322 set_mem_attributes (dest
, type
, 1);
3323 /* Function incoming arguments may overlap with sibling call
3324 outgoing arguments and we cannot allow reordering of reads
3325 from function arguments with stores to outgoing arguments
3326 of sibling calls. */
3327 MEM_ALIAS_SET (dest
) = 0;
3330 emit_move_insn (dest
, x
);
3332 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
3334 in_check_memory_usage
= 1;
3336 target
= get_push_address (GET_MODE_SIZE (mode
));
3338 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
3339 emit_library_call (chkr_copy_bitmap_libfunc
,
3340 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, target
,
3341 Pmode
, XEXP (x
, 0), Pmode
,
3342 GEN_INT (GET_MODE_SIZE (mode
)),
3343 TYPE_MODE (sizetype
));
3345 emit_library_call (chkr_set_right_libfunc
,
3346 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, target
,
3347 Pmode
, GEN_INT (GET_MODE_SIZE (mode
)),
3348 TYPE_MODE (sizetype
),
3349 GEN_INT (MEMORY_USE_RW
),
3350 TYPE_MODE (integer_type_node
));
3351 in_check_memory_usage
= 0;
3356 /* If part should go in registers, copy that part
3357 into the appropriate registers. Do this now, at the end,
3358 since mem-to-mem copies above may do function calls. */
3359 if (partial
> 0 && reg
!= 0)
3361 /* Handle calls that pass values in multiple non-contiguous locations.
3362 The Irix 6 ABI has examples of this. */
3363 if (GET_CODE (reg
) == PARALLEL
)
3364 emit_group_load (reg
, x
, -1, align
); /* ??? size? */
3366 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
3369 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3370 anti_adjust_stack (GEN_INT (extra
));
3372 if (alignment_pad
&& args_addr
== 0)
3373 anti_adjust_stack (alignment_pad
);
3376 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3384 /* Only registers can be subtargets. */
3385 || GET_CODE (x
) != REG
3386 /* If the register is readonly, it can't be set more than once. */
3387 || RTX_UNCHANGING_P (x
)
3388 /* Don't use hard regs to avoid extending their life. */
3389 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3390 /* Avoid subtargets inside loops,
3391 since they hide some invariant expressions. */
3392 || preserve_subexpressions_p ())
3396 /* Expand an assignment that stores the value of FROM into TO.
3397 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3398 (This may contain a QUEUED rtx;
3399 if the value is constant, this rtx is a constant.)
3400 Otherwise, the returned value is NULL_RTX.
3402 SUGGEST_REG is no longer actually used.
3403 It used to mean, copy the value through a register
3404 and return that register, if that is possible.
3405 We now use WANT_VALUE to decide whether to do this. */
3408 expand_assignment (to
, from
, want_value
, suggest_reg
)
3411 int suggest_reg ATTRIBUTE_UNUSED
;
3413 register rtx to_rtx
= 0;
3416 /* Don't crash if the lhs of the assignment was erroneous. */
3418 if (TREE_CODE (to
) == ERROR_MARK
)
3420 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3421 return want_value
? result
: NULL_RTX
;
3424 /* Assignment of a structure component needs special treatment
3425 if the structure component's rtx is not simply a MEM.
3426 Assignment of an array element at a constant index, and assignment of
3427 an array element in an unaligned packed structure field, has the same
3430 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
3431 || TREE_CODE (to
) == ARRAY_REF
)
3433 enum machine_mode mode1
;
3434 HOST_WIDE_INT bitsize
, bitpos
;
3439 unsigned int alignment
;
3442 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
3443 &unsignedp
, &volatilep
, &alignment
);
3445 /* If we are going to use store_bit_field and extract_bit_field,
3446 make sure to_rtx will be safe for multiple use. */
3448 if (mode1
== VOIDmode
&& want_value
)
3449 tem
= stabilize_reference (tem
);
3451 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_DONT
);
3454 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
3456 if (GET_CODE (to_rtx
) != MEM
)
3459 if (GET_MODE (offset_rtx
) != ptr_mode
)
3461 #ifdef POINTERS_EXTEND_UNSIGNED
3462 offset_rtx
= convert_memory_address (ptr_mode
, offset_rtx
);
3464 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
3468 /* A constant address in TO_RTX can have VOIDmode, we must not try
3469 to call force_reg for that case. Avoid that case. */
3470 if (GET_CODE (to_rtx
) == MEM
3471 && GET_MODE (to_rtx
) == BLKmode
3472 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
3474 && (bitpos
% bitsize
) == 0
3475 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
3476 && alignment
== GET_MODE_ALIGNMENT (mode1
))
3478 rtx temp
= change_address (to_rtx
, mode1
,
3479 plus_constant (XEXP (to_rtx
, 0),
3482 if (GET_CODE (XEXP (temp
, 0)) == REG
)
3485 to_rtx
= change_address (to_rtx
, mode1
,
3486 force_reg (GET_MODE (XEXP (temp
, 0)),
3491 to_rtx
= change_address (to_rtx
, VOIDmode
,
3492 gen_rtx_PLUS (ptr_mode
, XEXP (to_rtx
, 0),
3493 force_reg (ptr_mode
,
3499 if (GET_CODE (to_rtx
) == MEM
)
3501 /* When the offset is zero, to_rtx is the address of the
3502 structure we are storing into, and hence may be shared.
3503 We must make a new MEM before setting the volatile bit. */
3505 to_rtx
= copy_rtx (to_rtx
);
3507 MEM_VOLATILE_P (to_rtx
) = 1;
3509 #if 0 /* This was turned off because, when a field is volatile
3510 in an object which is not volatile, the object may be in a register,
3511 and then we would abort over here. */
3517 if (TREE_CODE (to
) == COMPONENT_REF
3518 && TREE_READONLY (TREE_OPERAND (to
, 1)))
3521 to_rtx
= copy_rtx (to_rtx
);
3523 RTX_UNCHANGING_P (to_rtx
) = 1;
3526 /* Check the access. */
3527 if (current_function_check_memory_usage
&& GET_CODE (to_rtx
) == MEM
)
3532 enum machine_mode best_mode
;
3534 best_mode
= get_best_mode (bitsize
, bitpos
,
3535 TYPE_ALIGN (TREE_TYPE (tem
)),
3537 if (best_mode
== VOIDmode
)
3540 best_mode_size
= GET_MODE_BITSIZE (best_mode
);
3541 to_addr
= plus_constant (XEXP (to_rtx
, 0), (bitpos
/ BITS_PER_UNIT
));
3542 size
= CEIL ((bitpos
% best_mode_size
) + bitsize
, best_mode_size
);
3543 size
*= GET_MODE_SIZE (best_mode
);
3545 /* Check the access right of the pointer. */
3546 in_check_memory_usage
= 1;
3548 emit_library_call (chkr_check_addr_libfunc
, LCT_CONST_MAKE_BLOCK
,
3549 VOIDmode
, 3, to_addr
, Pmode
,
3550 GEN_INT (size
), TYPE_MODE (sizetype
),
3551 GEN_INT (MEMORY_USE_WO
),
3552 TYPE_MODE (integer_type_node
));
3553 in_check_memory_usage
= 0;
3556 /* If this is a varying-length object, we must get the address of
3557 the source and do an explicit block move. */
3560 unsigned int from_align
;
3561 rtx from_rtx
= expand_expr_unaligned (from
, &from_align
);
3563 = change_address (to_rtx
, VOIDmode
,
3564 plus_constant (XEXP (to_rtx
, 0),
3565 bitpos
/ BITS_PER_UNIT
));
3567 emit_block_move (inner_to_rtx
, from_rtx
, expr_size (from
),
3568 MIN (alignment
, from_align
));
3575 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
3577 /* Spurious cast for HPUX compiler. */
3578 ? ((enum machine_mode
)
3579 TYPE_MODE (TREE_TYPE (to
)))
3583 int_size_in_bytes (TREE_TYPE (tem
)),
3584 get_alias_set (to
));
3586 preserve_temp_slots (result
);
3590 /* If the value is meaningful, convert RESULT to the proper mode.
3591 Otherwise, return nothing. */
3592 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
3593 TYPE_MODE (TREE_TYPE (from
)),
3595 TREE_UNSIGNED (TREE_TYPE (to
)))
3600 /* If the rhs is a function call and its value is not an aggregate,
3601 call the function before we start to compute the lhs.
3602 This is needed for correct code for cases such as
3603 val = setjmp (buf) on machines where reference to val
3604 requires loading up part of an address in a separate insn.
3606 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3607 since it might be a promoted variable where the zero- or sign- extension
3608 needs to be done. Handling this in the normal way is safe because no
3609 computation is done before the call. */
3610 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
)
3611 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
3612 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
3613 && GET_CODE (DECL_RTL (to
)) == REG
))
3618 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3620 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_WO
);
3622 /* Handle calls that return values in multiple non-contiguous locations.
3623 The Irix 6 ABI has examples of this. */
3624 if (GET_CODE (to_rtx
) == PARALLEL
)
3625 emit_group_load (to_rtx
, value
, int_size_in_bytes (TREE_TYPE (from
)),
3626 TYPE_ALIGN (TREE_TYPE (from
)));
3627 else if (GET_MODE (to_rtx
) == BLKmode
)
3628 emit_block_move (to_rtx
, value
, expr_size (from
),
3629 TYPE_ALIGN (TREE_TYPE (from
)));
3632 #ifdef POINTERS_EXTEND_UNSIGNED
3633 if (TREE_CODE (TREE_TYPE (to
)) == REFERENCE_TYPE
3634 || TREE_CODE (TREE_TYPE (to
)) == POINTER_TYPE
)
3635 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
3637 emit_move_insn (to_rtx
, value
);
3639 preserve_temp_slots (to_rtx
);
3642 return want_value
? to_rtx
: NULL_RTX
;
3645 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3646 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3650 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_WO
);
3651 if (GET_CODE (to_rtx
) == MEM
)
3652 MEM_ALIAS_SET (to_rtx
) = get_alias_set (to
);
3655 /* Don't move directly into a return register. */
3656 if (TREE_CODE (to
) == RESULT_DECL
3657 && (GET_CODE (to_rtx
) == REG
|| GET_CODE (to_rtx
) == PARALLEL
))
3662 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
3664 if (GET_CODE (to_rtx
) == PARALLEL
)
3665 emit_group_load (to_rtx
, temp
, int_size_in_bytes (TREE_TYPE (from
)),
3666 TYPE_ALIGN (TREE_TYPE (from
)));
3668 emit_move_insn (to_rtx
, temp
);
3670 preserve_temp_slots (to_rtx
);
3673 return want_value
? to_rtx
: NULL_RTX
;
3676 /* In case we are returning the contents of an object which overlaps
3677 the place the value is being stored, use a safe function when copying
3678 a value through a pointer into a structure value return block. */
3679 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
3680 && current_function_returns_struct
3681 && !current_function_returns_pcc_struct
)
3686 size
= expr_size (from
);
3687 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
,
3688 EXPAND_MEMORY_USE_DONT
);
3690 /* Copy the rights of the bitmap. */
3691 if (current_function_check_memory_usage
)
3692 emit_library_call (chkr_copy_bitmap_libfunc
, LCT_CONST_MAKE_BLOCK
,
3693 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3694 XEXP (from_rtx
, 0), Pmode
,
3695 convert_to_mode (TYPE_MODE (sizetype
),
3696 size
, TREE_UNSIGNED (sizetype
)),
3697 TYPE_MODE (sizetype
));
3699 #ifdef TARGET_MEM_FUNCTIONS
3700 emit_library_call (memcpy_libfunc
, LCT_NORMAL
,
3701 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3702 XEXP (from_rtx
, 0), Pmode
,
3703 convert_to_mode (TYPE_MODE (sizetype
),
3704 size
, TREE_UNSIGNED (sizetype
)),
3705 TYPE_MODE (sizetype
));
3707 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
3708 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
3709 XEXP (to_rtx
, 0), Pmode
,
3710 convert_to_mode (TYPE_MODE (integer_type_node
),
3711 size
, TREE_UNSIGNED (integer_type_node
)),
3712 TYPE_MODE (integer_type_node
));
3715 preserve_temp_slots (to_rtx
);
3718 return want_value
? to_rtx
: NULL_RTX
;
3721 /* Compute FROM and store the value in the rtx we got. */
3724 result
= store_expr (from
, to_rtx
, want_value
);
3725 preserve_temp_slots (result
);
3728 return want_value
? result
: NULL_RTX
;
3731 /* Generate code for computing expression EXP,
3732 and storing the value into TARGET.
3733 TARGET may contain a QUEUED rtx.
3735 If WANT_VALUE is nonzero, return a copy of the value
3736 not in TARGET, so that we can be sure to use the proper
3737 value in a containing expression even if TARGET has something
3738 else stored in it. If possible, we copy the value through a pseudo
3739 and return that pseudo. Or, if the value is constant, we try to
3740 return the constant. In some cases, we return a pseudo
3741 copied *from* TARGET.
3743 If the mode is BLKmode then we may return TARGET itself.
3744 It turns out that in BLKmode it doesn't cause a problem.
3745 because C has no operators that could combine two different
3746 assignments into the same BLKmode object with different values
3747 with no sequence point. Will other languages need this to
3750 If WANT_VALUE is 0, we return NULL, to make sure
3751 to catch quickly any cases where the caller uses the value
3752 and fails to set WANT_VALUE. */
3755 store_expr (exp
, target
, want_value
)
3757 register rtx target
;
3761 int dont_return_target
= 0;
3763 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
3765 /* Perform first part of compound expression, then assign from second
3767 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
3769 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
3771 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
3773 /* For conditional expression, get safe form of the target. Then
3774 test the condition, doing the appropriate assignment on either
3775 side. This avoids the creation of unnecessary temporaries.
3776 For non-BLKmode, it is more efficient not to do this. */
3778 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
3781 target
= protect_from_queue (target
, 1);
3783 do_pending_stack_adjust ();
3785 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
3786 start_cleanup_deferral ();
3787 store_expr (TREE_OPERAND (exp
, 1), target
, 0);
3788 end_cleanup_deferral ();
3790 emit_jump_insn (gen_jump (lab2
));
3793 start_cleanup_deferral ();
3794 store_expr (TREE_OPERAND (exp
, 2), target
, 0);
3795 end_cleanup_deferral ();
3800 return want_value
? target
: NULL_RTX
;
3802 else if (queued_subexp_p (target
))
3803 /* If target contains a postincrement, let's not risk
3804 using it as the place to generate the rhs. */
3806 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
3808 /* Expand EXP into a new pseudo. */
3809 temp
= gen_reg_rtx (GET_MODE (target
));
3810 temp
= expand_expr (exp
, temp
, GET_MODE (target
), 0);
3813 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
), 0);
3815 /* If target is volatile, ANSI requires accessing the value
3816 *from* the target, if it is accessed. So make that happen.
3817 In no case return the target itself. */
3818 if (! MEM_VOLATILE_P (target
) && want_value
)
3819 dont_return_target
= 1;
3821 else if (want_value
&& GET_CODE (target
) == MEM
&& ! MEM_VOLATILE_P (target
)
3822 && GET_MODE (target
) != BLKmode
)
3823 /* If target is in memory and caller wants value in a register instead,
3824 arrange that. Pass TARGET as target for expand_expr so that,
3825 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3826 We know expand_expr will not use the target in that case.
3827 Don't do this if TARGET is volatile because we are supposed
3828 to write it and then read it. */
3830 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
3831 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
3832 temp
= copy_to_reg (temp
);
3833 dont_return_target
= 1;
3835 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
3836 /* If this is an scalar in a register that is stored in a wider mode
3837 than the declared mode, compute the result into its declared mode
3838 and then convert to the wider mode. Our value is the computed
3841 /* If we don't want a value, we can do the conversion inside EXP,
3842 which will often result in some optimizations. Do the conversion
3843 in two steps: first change the signedness, if needed, then
3844 the extend. But don't do this if the type of EXP is a subtype
3845 of something else since then the conversion might involve
3846 more than just converting modes. */
3847 if (! want_value
&& INTEGRAL_TYPE_P (TREE_TYPE (exp
))
3848 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
3850 if (TREE_UNSIGNED (TREE_TYPE (exp
))
3851 != SUBREG_PROMOTED_UNSIGNED_P (target
))
3854 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target
),
3858 exp
= convert (type_for_mode (GET_MODE (SUBREG_REG (target
)),
3859 SUBREG_PROMOTED_UNSIGNED_P (target
)),
3863 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
3865 /* If TEMP is a volatile MEM and we want a result value, make
3866 the access now so it gets done only once. Likewise if
3867 it contains TARGET. */
3868 if (GET_CODE (temp
) == MEM
&& want_value
3869 && (MEM_VOLATILE_P (temp
)
3870 || reg_mentioned_p (SUBREG_REG (target
), XEXP (temp
, 0))))
3871 temp
= copy_to_reg (temp
);
3873 /* If TEMP is a VOIDmode constant, use convert_modes to make
3874 sure that we properly convert it. */
3875 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
3876 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
3877 TYPE_MODE (TREE_TYPE (exp
)), temp
,
3878 SUBREG_PROMOTED_UNSIGNED_P (target
));
3880 convert_move (SUBREG_REG (target
), temp
,
3881 SUBREG_PROMOTED_UNSIGNED_P (target
));
3883 /* If we promoted a constant, change the mode back down to match
3884 target. Otherwise, the caller might get confused by a result whose
3885 mode is larger than expected. */
3887 if (want_value
&& GET_MODE (temp
) != GET_MODE (target
)
3888 && GET_MODE (temp
) != VOIDmode
)
3890 temp
= gen_rtx_SUBREG (GET_MODE (target
), temp
, 0);
3891 SUBREG_PROMOTED_VAR_P (temp
) = 1;
3892 SUBREG_PROMOTED_UNSIGNED_P (temp
)
3893 = SUBREG_PROMOTED_UNSIGNED_P (target
);
3896 return want_value
? temp
: NULL_RTX
;
3900 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
3901 /* Return TARGET if it's a specified hardware register.
3902 If TARGET is a volatile mem ref, either return TARGET
3903 or return a reg copied *from* TARGET; ANSI requires this.
3905 Otherwise, if TEMP is not TARGET, return TEMP
3906 if it is constant (for efficiency),
3907 or if we really want the correct value. */
3908 if (!(target
&& GET_CODE (target
) == REG
3909 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
3910 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
3911 && ! rtx_equal_p (temp
, target
)
3912 && (CONSTANT_P (temp
) || want_value
))
3913 dont_return_target
= 1;
3916 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3917 the same as that of TARGET, adjust the constant. This is needed, for
3918 example, in case it is a CONST_DOUBLE and we want only a word-sized
3920 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
3921 && TREE_CODE (exp
) != ERROR_MARK
3922 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
3923 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
3924 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
3926 if (current_function_check_memory_usage
3927 && GET_CODE (target
) == MEM
3928 && AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
3930 in_check_memory_usage
= 1;
3931 if (GET_CODE (temp
) == MEM
)
3932 emit_library_call (chkr_copy_bitmap_libfunc
, LCT_CONST_MAKE_BLOCK
,
3933 VOIDmode
, 3, XEXP (target
, 0), Pmode
,
3934 XEXP (temp
, 0), Pmode
,
3935 expr_size (exp
), TYPE_MODE (sizetype
));
3937 emit_library_call (chkr_check_addr_libfunc
, LCT_CONST_MAKE_BLOCK
,
3938 VOIDmode
, 3, XEXP (target
, 0), Pmode
,
3939 expr_size (exp
), TYPE_MODE (sizetype
),
3940 GEN_INT (MEMORY_USE_WO
),
3941 TYPE_MODE (integer_type_node
));
3942 in_check_memory_usage
= 0;
3945 /* If value was not generated in the target, store it there.
3946 Convert the value to TARGET's type first if nec. */
3947 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3948 one or both of them are volatile memory refs, we have to distinguish
3950 - expand_expr has used TARGET. In this case, we must not generate
3951 another copy. This can be detected by TARGET being equal according
3953 - expand_expr has not used TARGET - that means that the source just
3954 happens to have the same RTX form. Since temp will have been created
3955 by expand_expr, it will compare unequal according to == .
3956 We must generate a copy in this case, to reach the correct number
3957 of volatile memory references. */
3959 if ((! rtx_equal_p (temp
, target
)
3960 || (temp
!= target
&& (side_effects_p (temp
)
3961 || side_effects_p (target
))))
3962 && TREE_CODE (exp
) != ERROR_MARK
)
3964 target
= protect_from_queue (target
, 1);
3965 if (GET_MODE (temp
) != GET_MODE (target
)
3966 && GET_MODE (temp
) != VOIDmode
)
3968 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
3969 if (dont_return_target
)
3971 /* In this case, we will return TEMP,
3972 so make sure it has the proper mode.
3973 But don't forget to store the value into TARGET. */
3974 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
3975 emit_move_insn (target
, temp
);
3978 convert_move (target
, temp
, unsignedp
);
3981 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
3983 /* Handle copying a string constant into an array.
3984 The string constant may be shorter than the array.
3985 So copy just the string's actual length, and clear the rest. */
3989 /* Get the size of the data type of the string,
3990 which is actually the size of the target. */
3991 size
= expr_size (exp
);
3992 if (GET_CODE (size
) == CONST_INT
3993 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
3994 emit_block_move (target
, temp
, size
, TYPE_ALIGN (TREE_TYPE (exp
)));
3997 /* Compute the size of the data to copy from the string. */
3999 = size_binop (MIN_EXPR
,
4000 make_tree (sizetype
, size
),
4001 size_int (TREE_STRING_LENGTH (exp
)));
4002 unsigned int align
= TYPE_ALIGN (TREE_TYPE (exp
));
4003 rtx copy_size_rtx
= expand_expr (copy_size
, NULL_RTX
,
4007 /* Copy that much. */
4008 emit_block_move (target
, temp
, copy_size_rtx
,
4009 TYPE_ALIGN (TREE_TYPE (exp
)));
4011 /* Figure out how much is left in TARGET that we have to clear.
4012 Do all calculations in ptr_mode. */
4014 addr
= XEXP (target
, 0);
4015 addr
= convert_modes (ptr_mode
, Pmode
, addr
, 1);
4017 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4019 addr
= plus_constant (addr
, TREE_STRING_LENGTH (exp
));
4020 size
= plus_constant (size
, -TREE_STRING_LENGTH (exp
));
4021 align
= MIN (align
, (BITS_PER_UNIT
4022 * (INTVAL (copy_size_rtx
)
4023 & - INTVAL (copy_size_rtx
))));
4027 addr
= force_reg (ptr_mode
, addr
);
4028 addr
= expand_binop (ptr_mode
, add_optab
, addr
,
4029 copy_size_rtx
, NULL_RTX
, 0,
4032 size
= expand_binop (ptr_mode
, sub_optab
, size
,
4033 copy_size_rtx
, NULL_RTX
, 0,
4036 align
= BITS_PER_UNIT
;
4037 label
= gen_label_rtx ();
4038 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4039 GET_MODE (size
), 0, 0, label
);
4041 align
= MIN (align
, expr_align (copy_size
));
4043 if (size
!= const0_rtx
)
4045 rtx dest
= gen_rtx_MEM (BLKmode
, addr
);
4047 MEM_COPY_ATTRIBUTES (dest
, target
);
4049 /* Be sure we can write on ADDR. */
4050 in_check_memory_usage
= 1;
4051 if (current_function_check_memory_usage
)
4052 emit_library_call (chkr_check_addr_libfunc
,
4053 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3,
4055 size
, TYPE_MODE (sizetype
),
4056 GEN_INT (MEMORY_USE_WO
),
4057 TYPE_MODE (integer_type_node
));
4058 in_check_memory_usage
= 0;
4059 clear_storage (dest
, size
, align
);
4066 /* Handle calls that return values in multiple non-contiguous locations.
4067 The Irix 6 ABI has examples of this. */
4068 else if (GET_CODE (target
) == PARALLEL
)
4069 emit_group_load (target
, temp
, int_size_in_bytes (TREE_TYPE (exp
)),
4070 TYPE_ALIGN (TREE_TYPE (exp
)));
4071 else if (GET_MODE (temp
) == BLKmode
)
4072 emit_block_move (target
, temp
, expr_size (exp
),
4073 TYPE_ALIGN (TREE_TYPE (exp
)));
4075 emit_move_insn (target
, temp
);
4078 /* If we don't want a value, return NULL_RTX. */
4082 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4083 ??? The latter test doesn't seem to make sense. */
4084 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
4087 /* Return TARGET itself if it is a hard register. */
4088 else if (want_value
&& GET_MODE (target
) != BLKmode
4089 && ! (GET_CODE (target
) == REG
4090 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4091 return copy_to_reg (target
);
4097 /* Return 1 if EXP just contains zeros. */
4105 switch (TREE_CODE (exp
))
4109 case NON_LVALUE_EXPR
:
4110 return is_zeros_p (TREE_OPERAND (exp
, 0));
4113 return integer_zerop (exp
);
4117 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
4120 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp
), dconst0
);
4123 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4124 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
4125 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4126 if (! is_zeros_p (TREE_VALUE (elt
)))
4136 /* Return 1 if EXP contains mostly (3/4) zeros. */
4139 mostly_zeros_p (exp
)
4142 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4144 int elts
= 0, zeros
= 0;
4145 tree elt
= CONSTRUCTOR_ELTS (exp
);
4146 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4148 /* If there are no ranges of true bits, it is all zero. */
4149 return elt
== NULL_TREE
;
4151 for (; elt
; elt
= TREE_CHAIN (elt
))
4153 /* We do not handle the case where the index is a RANGE_EXPR,
4154 so the statistic will be somewhat inaccurate.
4155 We do make a more accurate count in store_constructor itself,
4156 so since this function is only used for nested array elements,
4157 this should be close enough. */
4158 if (mostly_zeros_p (TREE_VALUE (elt
)))
4163 return 4 * zeros
>= 3 * elts
;
4166 return is_zeros_p (exp
);
4169 /* Helper function for store_constructor.
4170 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4171 TYPE is the type of the CONSTRUCTOR, not the element type.
4172 ALIGN and CLEARED are as for store_constructor.
4173 ALIAS_SET is the alias set to use for any stores.
4175 This provides a recursive shortcut back to store_constructor when it isn't
4176 necessary to go through store_field. This is so that we can pass through
4177 the cleared field to let store_constructor know that we may not have to
4178 clear a substructure if the outer structure has already been cleared. */
4181 store_constructor_field (target
, bitsize
, bitpos
,
4182 mode
, exp
, type
, align
, cleared
, alias_set
)
4184 unsigned HOST_WIDE_INT bitsize
;
4185 HOST_WIDE_INT bitpos
;
4186 enum machine_mode mode
;
4192 if (TREE_CODE (exp
) == CONSTRUCTOR
4193 && bitpos
% BITS_PER_UNIT
== 0
4194 /* If we have a non-zero bitpos for a register target, then we just
4195 let store_field do the bitfield handling. This is unlikely to
4196 generate unnecessary clear instructions anyways. */
4197 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
4201 = change_address (target
,
4202 GET_MODE (target
) == BLKmode
4204 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4205 ? BLKmode
: VOIDmode
,
4206 plus_constant (XEXP (target
, 0),
4207 bitpos
/ BITS_PER_UNIT
));
4209 MEM_ALIAS_SET (target
) = alias_set
;
4210 store_constructor (exp
, target
, align
, cleared
, bitsize
/ BITS_PER_UNIT
);
4213 store_field (target
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, align
,
4214 int_size_in_bytes (type
), alias_set
);
4217 /* Store the value of constructor EXP into the rtx TARGET.
4218 TARGET is either a REG or a MEM.
4219 ALIGN is the maximum known alignment for TARGET.
4220 CLEARED is true if TARGET is known to have been zero'd.
4221 SIZE is the number of bytes of TARGET we are allowed to modify: this
4222 may not be the same as the size of EXP if we are assigning to a field
4223 which has been packed to exclude padding bits. */
4226 store_constructor (exp
, target
, align
, cleared
, size
)
4233 tree type
= TREE_TYPE (exp
);
4234 #ifdef WORD_REGISTER_OPERATIONS
4235 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4238 /* We know our target cannot conflict, since safe_from_p has been called. */
4240 /* Don't try copying piece by piece into a hard register
4241 since that is vulnerable to being clobbered by EXP.
4242 Instead, construct in a pseudo register and then copy it all. */
4243 if (GET_CODE (target
) == REG
&& REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4245 rtx temp
= gen_reg_rtx (GET_MODE (target
));
4246 store_constructor (exp
, temp
, align
, cleared
, size
);
4247 emit_move_insn (target
, temp
);
4252 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
4253 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4257 /* Inform later passes that the whole union value is dead. */
4258 if ((TREE_CODE (type
) == UNION_TYPE
4259 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4262 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4264 /* If the constructor is empty, clear the union. */
4265 if (! CONSTRUCTOR_ELTS (exp
) && ! cleared
)
4266 clear_storage (target
, expr_size (exp
), TYPE_ALIGN (type
));
4269 /* If we are building a static constructor into a register,
4270 set the initial value as zero so we can fold the value into
4271 a constant. But if more than one register is involved,
4272 this probably loses. */
4273 else if (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
4274 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4277 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4282 /* If the constructor has fewer fields than the structure
4283 or if we are initializing the structure to mostly zeros,
4284 clear the whole structure first. Don't do this is TARGET is
4285 register whose mode size isn't equal to SIZE since clear_storage
4286 can't handle this case. */
4288 && ((list_length (CONSTRUCTOR_ELTS (exp
))
4289 != fields_length (type
))
4290 || mostly_zeros_p (exp
))
4291 && (GET_CODE (target
) != REG
4292 || GET_MODE_SIZE (GET_MODE (target
)) == size
))
4295 clear_storage (target
, GEN_INT (size
), align
);
4300 /* Inform later passes that the old value is dead. */
4301 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4303 /* Store each element of the constructor into
4304 the corresponding field of TARGET. */
4306 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4308 register tree field
= TREE_PURPOSE (elt
);
4309 #ifdef WORD_REGISTER_OPERATIONS
4310 tree value
= TREE_VALUE (elt
);
4312 register enum machine_mode mode
;
4313 HOST_WIDE_INT bitsize
;
4314 HOST_WIDE_INT bitpos
= 0;
4317 rtx to_rtx
= target
;
4319 /* Just ignore missing fields.
4320 We cleared the whole structure, above,
4321 if any fields are missing. */
4325 if (cleared
&& is_zeros_p (TREE_VALUE (elt
)))
4328 if (host_integerp (DECL_SIZE (field
), 1))
4329 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4333 unsignedp
= TREE_UNSIGNED (field
);
4334 mode
= DECL_MODE (field
);
4335 if (DECL_BIT_FIELD (field
))
4338 offset
= DECL_FIELD_OFFSET (field
);
4339 if (host_integerp (offset
, 0)
4340 && host_integerp (bit_position (field
), 0))
4342 bitpos
= int_bit_position (field
);
4346 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
4352 if (contains_placeholder_p (offset
))
4353 offset
= build (WITH_RECORD_EXPR
, sizetype
,
4354 offset
, make_tree (TREE_TYPE (exp
), target
));
4356 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4357 if (GET_CODE (to_rtx
) != MEM
)
4360 if (GET_MODE (offset_rtx
) != ptr_mode
)
4362 #ifdef POINTERS_EXTEND_UNSIGNED
4363 offset_rtx
= convert_memory_address (ptr_mode
, offset_rtx
);
4365 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4370 = change_address (to_rtx
, VOIDmode
,
4371 gen_rtx_PLUS (ptr_mode
, XEXP (to_rtx
, 0),
4372 force_reg (ptr_mode
,
4374 align
= DECL_OFFSET_ALIGN (field
);
4377 if (TREE_READONLY (field
))
4379 if (GET_CODE (to_rtx
) == MEM
)
4380 to_rtx
= copy_rtx (to_rtx
);
4382 RTX_UNCHANGING_P (to_rtx
) = 1;
4385 #ifdef WORD_REGISTER_OPERATIONS
4386 /* If this initializes a field that is smaller than a word, at the
4387 start of a word, try to widen it to a full word.
4388 This special case allows us to output C++ member function
4389 initializations in a form that the optimizers can understand. */
4390 if (GET_CODE (target
) == REG
4391 && bitsize
< BITS_PER_WORD
4392 && bitpos
% BITS_PER_WORD
== 0
4393 && GET_MODE_CLASS (mode
) == MODE_INT
4394 && TREE_CODE (value
) == INTEGER_CST
4396 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
4398 tree type
= TREE_TYPE (value
);
4399 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4401 type
= type_for_size (BITS_PER_WORD
, TREE_UNSIGNED (type
));
4402 value
= convert (type
, value
);
4404 if (BYTES_BIG_ENDIAN
)
4406 = fold (build (LSHIFT_EXPR
, type
, value
,
4407 build_int_2 (BITS_PER_WORD
- bitsize
, 0)));
4408 bitsize
= BITS_PER_WORD
;
4412 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
4413 TREE_VALUE (elt
), type
, align
, cleared
,
4414 DECL_NONADDRESSABLE_P (field
)
4415 ? MEM_ALIAS_SET (to_rtx
)
4416 : get_alias_set (TREE_TYPE (field
)));
4419 else if (TREE_CODE (type
) == ARRAY_TYPE
)
4424 tree domain
= TYPE_DOMAIN (type
);
4425 tree elttype
= TREE_TYPE (type
);
4426 int const_bounds_p
= (host_integerp (TYPE_MIN_VALUE (domain
), 0)
4427 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
4428 HOST_WIDE_INT minelt
;
4429 HOST_WIDE_INT maxelt
;
4431 /* If we have constant bounds for the range of the type, get them. */
4434 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
4435 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
4438 /* If the constructor has fewer elements than the array,
4439 clear the whole array first. Similarly if this is
4440 static constructor of a non-BLKmode object. */
4441 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
4445 HOST_WIDE_INT count
= 0, zero_count
= 0;
4446 need_to_clear
= ! const_bounds_p
;
4448 /* This loop is a more accurate version of the loop in
4449 mostly_zeros_p (it handles RANGE_EXPR in an index).
4450 It is also needed to check for missing elements. */
4451 for (elt
= CONSTRUCTOR_ELTS (exp
);
4452 elt
!= NULL_TREE
&& ! need_to_clear
;
4453 elt
= TREE_CHAIN (elt
))
4455 tree index
= TREE_PURPOSE (elt
);
4456 HOST_WIDE_INT this_node_count
;
4458 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4460 tree lo_index
= TREE_OPERAND (index
, 0);
4461 tree hi_index
= TREE_OPERAND (index
, 1);
4463 if (! host_integerp (lo_index
, 1)
4464 || ! host_integerp (hi_index
, 1))
4470 this_node_count
= (tree_low_cst (hi_index
, 1)
4471 - tree_low_cst (lo_index
, 1) + 1);
4474 this_node_count
= 1;
4476 count
+= this_node_count
;
4477 if (mostly_zeros_p (TREE_VALUE (elt
)))
4478 zero_count
+= this_node_count
;
4481 /* Clear the entire array first if there are any missing elements,
4482 or if the incidence of zero elements is >= 75%. */
4484 && (count
< maxelt
- minelt
+ 1 || 4 * zero_count
>= 3 * count
))
4488 if (need_to_clear
&& size
> 0)
4491 clear_storage (target
, GEN_INT (size
), align
);
4495 /* Inform later passes that the old value is dead. */
4496 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4498 /* Store each element of the constructor into
4499 the corresponding element of TARGET, determined
4500 by counting the elements. */
4501 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
4503 elt
= TREE_CHAIN (elt
), i
++)
4505 register enum machine_mode mode
;
4506 HOST_WIDE_INT bitsize
;
4507 HOST_WIDE_INT bitpos
;
4509 tree value
= TREE_VALUE (elt
);
4510 unsigned int align
= TYPE_ALIGN (TREE_TYPE (value
));
4511 tree index
= TREE_PURPOSE (elt
);
4512 rtx xtarget
= target
;
4514 if (cleared
&& is_zeros_p (value
))
4517 unsignedp
= TREE_UNSIGNED (elttype
);
4518 mode
= TYPE_MODE (elttype
);
4519 if (mode
== BLKmode
)
4520 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
4521 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
4524 bitsize
= GET_MODE_BITSIZE (mode
);
4526 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4528 tree lo_index
= TREE_OPERAND (index
, 0);
4529 tree hi_index
= TREE_OPERAND (index
, 1);
4530 rtx index_r
, pos_rtx
, addr
, hi_r
, loop_top
, loop_end
;
4531 struct nesting
*loop
;
4532 HOST_WIDE_INT lo
, hi
, count
;
4535 /* If the range is constant and "small", unroll the loop. */
4537 && host_integerp (lo_index
, 0)
4538 && host_integerp (hi_index
, 0)
4539 && (lo
= tree_low_cst (lo_index
, 0),
4540 hi
= tree_low_cst (hi_index
, 0),
4541 count
= hi
- lo
+ 1,
4542 (GET_CODE (target
) != MEM
4544 || (host_integerp (TYPE_SIZE (elttype
), 1)
4545 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
4548 lo
-= minelt
; hi
-= minelt
;
4549 for (; lo
<= hi
; lo
++)
4551 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
4552 store_constructor_field
4553 (target
, bitsize
, bitpos
, mode
, value
, type
, align
,
4555 TYPE_NONALIASED_COMPONENT (type
)
4556 ? MEM_ALIAS_SET (target
) : get_alias_set (elttype
));
4561 hi_r
= expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
4562 loop_top
= gen_label_rtx ();
4563 loop_end
= gen_label_rtx ();
4565 unsignedp
= TREE_UNSIGNED (domain
);
4567 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
4569 DECL_RTL (index
) = index_r
4570 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
4573 if (TREE_CODE (value
) == SAVE_EXPR
4574 && SAVE_EXPR_RTL (value
) == 0)
4576 /* Make sure value gets expanded once before the
4578 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
4581 store_expr (lo_index
, index_r
, 0);
4582 loop
= expand_start_loop (0);
4584 /* Assign value to element index. */
4586 = convert (ssizetype
,
4587 fold (build (MINUS_EXPR
, TREE_TYPE (index
),
4588 index
, TYPE_MIN_VALUE (domain
))));
4589 position
= size_binop (MULT_EXPR
, position
,
4591 TYPE_SIZE_UNIT (elttype
)));
4593 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
4594 addr
= gen_rtx_PLUS (Pmode
, XEXP (target
, 0), pos_rtx
);
4595 xtarget
= change_address (target
, mode
, addr
);
4596 if (TREE_CODE (value
) == CONSTRUCTOR
)
4597 store_constructor (value
, xtarget
, align
, cleared
,
4598 bitsize
/ BITS_PER_UNIT
);
4600 store_expr (value
, xtarget
, 0);
4602 expand_exit_loop_if_false (loop
,
4603 build (LT_EXPR
, integer_type_node
,
4606 expand_increment (build (PREINCREMENT_EXPR
,
4608 index
, integer_one_node
), 0, 0);
4610 emit_label (loop_end
);
4613 else if ((index
!= 0 && ! host_integerp (index
, 0))
4614 || ! host_integerp (TYPE_SIZE (elttype
), 1))
4620 index
= ssize_int (1);
4623 index
= convert (ssizetype
,
4624 fold (build (MINUS_EXPR
, index
,
4625 TYPE_MIN_VALUE (domain
))));
4627 position
= size_binop (MULT_EXPR
, index
,
4629 TYPE_SIZE_UNIT (elttype
)));
4630 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
4631 addr
= gen_rtx_PLUS (Pmode
, XEXP (target
, 0), pos_rtx
);
4632 xtarget
= change_address (target
, mode
, addr
);
4633 store_expr (value
, xtarget
, 0);
4638 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
4639 * tree_low_cst (TYPE_SIZE (elttype
), 1));
4641 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
4643 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
4644 type
, align
, cleared
,
4645 TYPE_NONALIASED_COMPONENT (type
)
4646 ? MEM_ALIAS_SET (target
) :
4647 get_alias_set (elttype
));
4653 /* Set constructor assignments. */
4654 else if (TREE_CODE (type
) == SET_TYPE
)
4656 tree elt
= CONSTRUCTOR_ELTS (exp
);
4657 unsigned HOST_WIDE_INT nbytes
= int_size_in_bytes (type
), nbits
;
4658 tree domain
= TYPE_DOMAIN (type
);
4659 tree domain_min
, domain_max
, bitlength
;
4661 /* The default implementation strategy is to extract the constant
4662 parts of the constructor, use that to initialize the target,
4663 and then "or" in whatever non-constant ranges we need in addition.
4665 If a large set is all zero or all ones, it is
4666 probably better to set it using memset (if available) or bzero.
4667 Also, if a large set has just a single range, it may also be
4668 better to first clear all the first clear the set (using
4669 bzero/memset), and set the bits we want. */
4671 /* Check for all zeros. */
4672 if (elt
== NULL_TREE
&& size
> 0)
4675 clear_storage (target
, GEN_INT (size
), TYPE_ALIGN (type
));
4679 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
4680 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
4681 bitlength
= size_binop (PLUS_EXPR
,
4682 size_diffop (domain_max
, domain_min
),
4685 nbits
= tree_low_cst (bitlength
, 1);
4687 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4688 are "complicated" (more than one range), initialize (the
4689 constant parts) by copying from a constant. */
4690 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
4691 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
4693 unsigned int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
4694 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
4695 char *bit_buffer
= (char *) alloca (nbits
);
4696 HOST_WIDE_INT word
= 0;
4697 unsigned int bit_pos
= 0;
4698 unsigned int ibit
= 0;
4699 unsigned int offset
= 0; /* In bytes from beginning of set. */
4701 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
4704 if (bit_buffer
[ibit
])
4706 if (BYTES_BIG_ENDIAN
)
4707 word
|= (1 << (set_word_size
- 1 - bit_pos
));
4709 word
|= 1 << bit_pos
;
4713 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
4715 if (word
!= 0 || ! cleared
)
4717 rtx datum
= GEN_INT (word
);
4720 /* The assumption here is that it is safe to use
4721 XEXP if the set is multi-word, but not if
4722 it's single-word. */
4723 if (GET_CODE (target
) == MEM
)
4725 to_rtx
= plus_constant (XEXP (target
, 0), offset
);
4726 to_rtx
= change_address (target
, mode
, to_rtx
);
4728 else if (offset
== 0)
4732 emit_move_insn (to_rtx
, datum
);
4739 offset
+= set_word_size
/ BITS_PER_UNIT
;
4744 /* Don't bother clearing storage if the set is all ones. */
4745 if (TREE_CHAIN (elt
) != NULL_TREE
4746 || (TREE_PURPOSE (elt
) == NULL_TREE
4748 : ( ! host_integerp (TREE_VALUE (elt
), 0)
4749 || ! host_integerp (TREE_PURPOSE (elt
), 0)
4750 || (tree_low_cst (TREE_VALUE (elt
), 0)
4751 - tree_low_cst (TREE_PURPOSE (elt
), 0) + 1
4752 != (HOST_WIDE_INT
) nbits
))))
4753 clear_storage (target
, expr_size (exp
), TYPE_ALIGN (type
));
4755 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
4757 /* Start of range of element or NULL. */
4758 tree startbit
= TREE_PURPOSE (elt
);
4759 /* End of range of element, or element value. */
4760 tree endbit
= TREE_VALUE (elt
);
4761 #ifdef TARGET_MEM_FUNCTIONS
4762 HOST_WIDE_INT startb
, endb
;
4764 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
4766 bitlength_rtx
= expand_expr (bitlength
,
4767 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
4769 /* Handle non-range tuple element like [ expr ]. */
4770 if (startbit
== NULL_TREE
)
4772 startbit
= save_expr (endbit
);
4776 startbit
= convert (sizetype
, startbit
);
4777 endbit
= convert (sizetype
, endbit
);
4778 if (! integer_zerop (domain_min
))
4780 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
4781 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
4783 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
4784 EXPAND_CONST_ADDRESS
);
4785 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
4786 EXPAND_CONST_ADDRESS
);
4790 targetx
= assign_stack_temp (GET_MODE (target
),
4791 GET_MODE_SIZE (GET_MODE (target
)),
4793 emit_move_insn (targetx
, target
);
4796 else if (GET_CODE (target
) == MEM
)
4801 #ifdef TARGET_MEM_FUNCTIONS
4802 /* Optimization: If startbit and endbit are
4803 constants divisible by BITS_PER_UNIT,
4804 call memset instead. */
4805 if (TREE_CODE (startbit
) == INTEGER_CST
4806 && TREE_CODE (endbit
) == INTEGER_CST
4807 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
4808 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
4810 emit_library_call (memset_libfunc
, LCT_NORMAL
,
4812 plus_constant (XEXP (targetx
, 0),
4813 startb
/ BITS_PER_UNIT
),
4815 constm1_rtx
, TYPE_MODE (integer_type_node
),
4816 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
4817 TYPE_MODE (sizetype
));
4821 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__setbits"),
4822 LCT_NORMAL
, VOIDmode
, 4, XEXP (targetx
, 0),
4823 Pmode
, bitlength_rtx
, TYPE_MODE (sizetype
),
4824 startbit_rtx
, TYPE_MODE (sizetype
),
4825 endbit_rtx
, TYPE_MODE (sizetype
));
4828 emit_move_insn (target
, targetx
);
4836 /* Store the value of EXP (an expression tree)
4837 into a subfield of TARGET which has mode MODE and occupies
4838 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4839 If MODE is VOIDmode, it means that we are storing into a bit-field.
4841 If VALUE_MODE is VOIDmode, return nothing in particular.
4842 UNSIGNEDP is not used in this case.
4844 Otherwise, return an rtx for the value stored. This rtx
4845 has mode VALUE_MODE if that is convenient to do.
4846 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4848 ALIGN is the alignment that TARGET is known to have.
4849 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4851 ALIAS_SET is the alias set for the destination. This value will
4852 (in general) be different from that for TARGET, since TARGET is a
4853 reference to the containing structure. */
4856 store_field (target
, bitsize
, bitpos
, mode
, exp
, value_mode
,
4857 unsignedp
, align
, total_size
, alias_set
)
4859 HOST_WIDE_INT bitsize
;
4860 HOST_WIDE_INT bitpos
;
4861 enum machine_mode mode
;
4863 enum machine_mode value_mode
;
4866 HOST_WIDE_INT total_size
;
4869 HOST_WIDE_INT width_mask
= 0;
4871 if (TREE_CODE (exp
) == ERROR_MARK
)
4874 if (bitsize
< HOST_BITS_PER_WIDE_INT
)
4875 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
4877 /* If we are storing into an unaligned field of an aligned union that is
4878 in a register, we may have the mode of TARGET being an integer mode but
4879 MODE == BLKmode. In that case, get an aligned object whose size and
4880 alignment are the same as TARGET and store TARGET into it (we can avoid
4881 the store if the field being stored is the entire width of TARGET). Then
4882 call ourselves recursively to store the field into a BLKmode version of
4883 that object. Finally, load from the object into TARGET. This is not
4884 very efficient in general, but should only be slightly more expensive
4885 than the otherwise-required unaligned accesses. Perhaps this can be
4886 cleaned up later. */
4889 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
4891 rtx object
= assign_stack_temp (GET_MODE (target
),
4892 GET_MODE_SIZE (GET_MODE (target
)), 0);
4893 rtx blk_object
= copy_rtx (object
);
4895 MEM_SET_IN_STRUCT_P (object
, 1);
4896 MEM_SET_IN_STRUCT_P (blk_object
, 1);
4897 PUT_MODE (blk_object
, BLKmode
);
4899 if (bitsize
!= GET_MODE_BITSIZE (GET_MODE (target
)))
4900 emit_move_insn (object
, target
);
4902 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0,
4903 align
, total_size
, alias_set
);
4905 /* Even though we aren't returning target, we need to
4906 give it the updated value. */
4907 emit_move_insn (target
, object
);
4912 if (GET_CODE (target
) == CONCAT
)
4914 /* We're storing into a struct containing a single __complex. */
4918 return store_expr (exp
, target
, 0);
4921 /* If the structure is in a register or if the component
4922 is a bit field, we cannot use addressing to access it.
4923 Use bit-field techniques or SUBREG to store in it. */
4925 if (mode
== VOIDmode
4926 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
4927 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
4928 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
4929 || GET_CODE (target
) == REG
4930 || GET_CODE (target
) == SUBREG
4931 /* If the field isn't aligned enough to store as an ordinary memref,
4932 store it as a bit field. */
4933 || (mode
!= BLKmode
&& SLOW_UNALIGNED_ACCESS (mode
, align
)
4934 && (align
< GET_MODE_ALIGNMENT (mode
)
4935 || bitpos
% GET_MODE_ALIGNMENT (mode
)))
4936 || (mode
== BLKmode
&& SLOW_UNALIGNED_ACCESS (mode
, align
)
4937 && (TYPE_ALIGN (TREE_TYPE (exp
)) > align
4938 || bitpos
% TYPE_ALIGN (TREE_TYPE (exp
)) != 0))
4939 /* If the RHS and field are a constant size and the size of the
4940 RHS isn't the same size as the bitfield, we must use bitfield
4943 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
4944 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
4946 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
4948 /* If BITSIZE is narrower than the size of the type of EXP
4949 we will be narrowing TEMP. Normally, what's wanted are the
4950 low-order bits. However, if EXP's type is a record and this is
4951 big-endian machine, we want the upper BITSIZE bits. */
4952 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
4953 && bitsize
< GET_MODE_BITSIZE (GET_MODE (temp
))
4954 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
4955 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
4956 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
4960 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4962 if (mode
!= VOIDmode
&& mode
!= BLKmode
4963 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
4964 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
4966 /* If the modes of TARGET and TEMP are both BLKmode, both
4967 must be in memory and BITPOS must be aligned on a byte
4968 boundary. If so, we simply do a block copy. */
4969 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
4971 unsigned int exp_align
= expr_align (exp
);
4973 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
4974 || bitpos
% BITS_PER_UNIT
!= 0)
4977 target
= change_address (target
, VOIDmode
,
4978 plus_constant (XEXP (target
, 0),
4979 bitpos
/ BITS_PER_UNIT
));
4981 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
4982 align
= MIN (exp_align
, align
);
4984 /* Find an alignment that is consistent with the bit position. */
4985 while ((bitpos
% align
) != 0)
4988 emit_block_move (target
, temp
,
4989 bitsize
== -1 ? expr_size (exp
)
4990 : GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
4994 return value_mode
== VOIDmode
? const0_rtx
: target
;
4997 /* Store the value in the bitfield. */
4998 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
, align
, total_size
);
4999 if (value_mode
!= VOIDmode
)
5001 /* The caller wants an rtx for the value. */
5002 /* If possible, avoid refetching from the bitfield itself. */
5004 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
5007 enum machine_mode tmode
;
5010 return expand_and (temp
, GEN_INT (width_mask
), NULL_RTX
);
5011 tmode
= GET_MODE (temp
);
5012 if (tmode
== VOIDmode
)
5014 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
5015 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5016 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5018 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
5019 NULL_RTX
, value_mode
, 0, align
,
5026 rtx addr
= XEXP (target
, 0);
5029 /* If a value is wanted, it must be the lhs;
5030 so make the address stable for multiple use. */
5032 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
5033 && ! CONSTANT_ADDRESS_P (addr
)
5034 /* A frame-pointer reference is already stable. */
5035 && ! (GET_CODE (addr
) == PLUS
5036 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
5037 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
5038 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
5039 addr
= copy_to_reg (addr
);
5041 /* Now build a reference to just the desired component. */
5043 to_rtx
= copy_rtx (change_address (target
, mode
,
5044 plus_constant (addr
,
5046 / BITS_PER_UNIT
))));
5047 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5048 MEM_ALIAS_SET (to_rtx
) = alias_set
;
5050 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
5054 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5055 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
5056 ARRAY_REFs and find the ultimate containing object, which we return.
5058 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5059 bit position, and *PUNSIGNEDP to the signedness of the field.
5060 If the position of the field is variable, we store a tree
5061 giving the variable offset (in units) in *POFFSET.
5062 This offset is in addition to the bit position.
5063 If the position is not variable, we store 0 in *POFFSET.
5064 We set *PALIGNMENT to the alignment of the address that will be
5065 computed. This is the alignment of the thing we return if *POFFSET
5066 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5068 If any of the extraction expressions is volatile,
5069 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5071 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5072 is a mode that can be used to access the field. In that case, *PBITSIZE
5075 If the field describes a variable-sized object, *PMODE is set to
5076 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5077 this case, but the address of the object can be found. */
5080 get_inner_reference (exp
, pbitsize
, pbitpos
, poffset
, pmode
,
5081 punsignedp
, pvolatilep
, palignment
)
5083 HOST_WIDE_INT
*pbitsize
;
5084 HOST_WIDE_INT
*pbitpos
;
5086 enum machine_mode
*pmode
;
5089 unsigned int *palignment
;
5092 enum machine_mode mode
= VOIDmode
;
5093 tree offset
= size_zero_node
;
5094 tree bit_offset
= bitsize_zero_node
;
5095 unsigned int alignment
= BIGGEST_ALIGNMENT
;
5098 /* First get the mode, signedness, and size. We do this from just the
5099 outermost expression. */
5100 if (TREE_CODE (exp
) == COMPONENT_REF
)
5102 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5103 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5104 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5106 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
5108 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5110 size_tree
= TREE_OPERAND (exp
, 1);
5111 *punsignedp
= TREE_UNSIGNED (exp
);
5115 mode
= TYPE_MODE (TREE_TYPE (exp
));
5116 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
5118 if (mode
== BLKmode
)
5119 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5121 *pbitsize
= GET_MODE_BITSIZE (mode
);
5126 if (! host_integerp (size_tree
, 1))
5127 mode
= BLKmode
, *pbitsize
= -1;
5129 *pbitsize
= tree_low_cst (size_tree
, 1);
5132 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5133 and find the ultimate containing object. */
5136 if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5137 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
, TREE_OPERAND (exp
, 2));
5138 else if (TREE_CODE (exp
) == COMPONENT_REF
)
5140 tree field
= TREE_OPERAND (exp
, 1);
5141 tree this_offset
= DECL_FIELD_OFFSET (field
);
5143 /* If this field hasn't been filled in yet, don't go
5144 past it. This should only happen when folding expressions
5145 made during type construction. */
5146 if (this_offset
== 0)
5148 else if (! TREE_CONSTANT (this_offset
)
5149 && contains_placeholder_p (this_offset
))
5150 this_offset
= build (WITH_RECORD_EXPR
, sizetype
, this_offset
, exp
);
5152 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5153 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5154 DECL_FIELD_BIT_OFFSET (field
));
5156 if (! host_integerp (offset
, 0))
5157 alignment
= MIN (alignment
, DECL_OFFSET_ALIGN (field
));
5160 else if (TREE_CODE (exp
) == ARRAY_REF
)
5162 tree index
= TREE_OPERAND (exp
, 1);
5163 tree domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5164 tree low_bound
= (domain
? TYPE_MIN_VALUE (domain
) : 0);
5165 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (exp
));
5167 /* We assume all arrays have sizes that are a multiple of a byte.
5168 First subtract the lower bound, if any, in the type of the
5169 index, then convert to sizetype and multiply by the size of the
5171 if (low_bound
!= 0 && ! integer_zerop (low_bound
))
5172 index
= fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5175 /* If the index has a self-referential type, pass it to a
5176 WITH_RECORD_EXPR; if the component size is, pass our
5177 component to one. */
5178 if (! TREE_CONSTANT (index
)
5179 && contains_placeholder_p (index
))
5180 index
= build (WITH_RECORD_EXPR
, TREE_TYPE (index
), index
, exp
);
5181 if (! TREE_CONSTANT (unit_size
)
5182 && contains_placeholder_p (unit_size
))
5183 unit_size
= build (WITH_RECORD_EXPR
, sizetype
, unit_size
,
5184 TREE_OPERAND (exp
, 0));
5186 offset
= size_binop (PLUS_EXPR
, offset
,
5187 size_binop (MULT_EXPR
,
5188 convert (sizetype
, index
),
5192 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
5193 && ! ((TREE_CODE (exp
) == NOP_EXPR
5194 || TREE_CODE (exp
) == CONVERT_EXPR
)
5195 && (TYPE_MODE (TREE_TYPE (exp
))
5196 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
5199 /* If any reference in the chain is volatile, the effect is volatile. */
5200 if (TREE_THIS_VOLATILE (exp
))
5203 /* If the offset is non-constant already, then we can't assume any
5204 alignment more than the alignment here. */
5205 if (! TREE_CONSTANT (offset
))
5206 alignment
= MIN (alignment
, TYPE_ALIGN (TREE_TYPE (exp
)));
5208 exp
= TREE_OPERAND (exp
, 0);
5212 alignment
= MIN (alignment
, DECL_ALIGN (exp
));
5213 else if (TREE_TYPE (exp
) != 0)
5214 alignment
= MIN (alignment
, TYPE_ALIGN (TREE_TYPE (exp
)));
5216 /* If OFFSET is constant, see if we can return the whole thing as a
5217 constant bit position. Otherwise, split it up. */
5218 if (host_integerp (offset
, 0)
5219 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5221 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5222 && host_integerp (tem
, 0))
5223 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5225 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5228 *palignment
= alignment
;
5232 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5234 static enum memory_use_mode
5235 get_memory_usage_from_modifier (modifier
)
5236 enum expand_modifier modifier
;
5242 return MEMORY_USE_RO
;
5244 case EXPAND_MEMORY_USE_WO
:
5245 return MEMORY_USE_WO
;
5247 case EXPAND_MEMORY_USE_RW
:
5248 return MEMORY_USE_RW
;
5250 case EXPAND_MEMORY_USE_DONT
:
5251 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5252 MEMORY_USE_DONT, because they are modifiers to a call of
5253 expand_expr in the ADDR_EXPR case of expand_expr. */
5254 case EXPAND_CONST_ADDRESS
:
5255 case EXPAND_INITIALIZER
:
5256 return MEMORY_USE_DONT
;
5257 case EXPAND_MEMORY_USE_BAD
:
5263 /* Given an rtx VALUE that may contain additions and multiplications,
5264 return an equivalent value that just refers to a register or memory.
5265 This is done by generating instructions to perform the arithmetic
5266 and returning a pseudo-register containing the value.
5268 The returned value may be a REG, SUBREG, MEM or constant. */
5271 force_operand (value
, target
)
5274 register optab binoptab
= 0;
5275 /* Use a temporary to force order of execution of calls to
5279 /* Use subtarget as the target for operand 0 of a binary operation. */
5280 register rtx subtarget
= get_subtarget (target
);
5282 /* Check for a PIC address load. */
5284 && (GET_CODE (value
) == PLUS
|| GET_CODE (value
) == MINUS
)
5285 && XEXP (value
, 0) == pic_offset_table_rtx
5286 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5287 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5288 || GET_CODE (XEXP (value
, 1)) == CONST
))
5291 subtarget
= gen_reg_rtx (GET_MODE (value
));
5292 emit_move_insn (subtarget
, value
);
5296 if (GET_CODE (value
) == PLUS
)
5297 binoptab
= add_optab
;
5298 else if (GET_CODE (value
) == MINUS
)
5299 binoptab
= sub_optab
;
5300 else if (GET_CODE (value
) == MULT
)
5302 op2
= XEXP (value
, 1);
5303 if (!CONSTANT_P (op2
)
5304 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5306 tmp
= force_operand (XEXP (value
, 0), subtarget
);
5307 return expand_mult (GET_MODE (value
), tmp
,
5308 force_operand (op2
, NULL_RTX
),
5314 op2
= XEXP (value
, 1);
5315 if (!CONSTANT_P (op2
)
5316 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5318 if (binoptab
== sub_optab
&& GET_CODE (op2
) == CONST_INT
)
5320 binoptab
= add_optab
;
5321 op2
= negate_rtx (GET_MODE (value
), op2
);
5324 /* Check for an addition with OP2 a constant integer and our first
5325 operand a PLUS of a virtual register and something else. In that
5326 case, we want to emit the sum of the virtual register and the
5327 constant first and then add the other value. This allows virtual
5328 register instantiation to simply modify the constant rather than
5329 creating another one around this addition. */
5330 if (binoptab
== add_optab
&& GET_CODE (op2
) == CONST_INT
5331 && GET_CODE (XEXP (value
, 0)) == PLUS
5332 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
5333 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5334 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5336 rtx temp
= expand_binop (GET_MODE (value
), binoptab
,
5337 XEXP (XEXP (value
, 0), 0), op2
,
5338 subtarget
, 0, OPTAB_LIB_WIDEN
);
5339 return expand_binop (GET_MODE (value
), binoptab
, temp
,
5340 force_operand (XEXP (XEXP (value
, 0), 1), 0),
5341 target
, 0, OPTAB_LIB_WIDEN
);
5344 tmp
= force_operand (XEXP (value
, 0), subtarget
);
5345 return expand_binop (GET_MODE (value
), binoptab
, tmp
,
5346 force_operand (op2
, NULL_RTX
),
5347 target
, 0, OPTAB_LIB_WIDEN
);
5348 /* We give UNSIGNEDP = 0 to expand_binop
5349 because the only operations we are expanding here are signed ones. */
5354 /* Subroutine of expand_expr:
5355 save the non-copied parts (LIST) of an expr (LHS), and return a list
5356 which can restore these values to their previous values,
5357 should something modify their storage. */
5360 save_noncopied_parts (lhs
, list
)
5367 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
5368 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
5369 parts
= chainon (parts
, save_noncopied_parts (lhs
, TREE_VALUE (tail
)));
5372 tree part
= TREE_VALUE (tail
);
5373 tree part_type
= TREE_TYPE (part
);
5374 tree to_be_saved
= build (COMPONENT_REF
, part_type
, lhs
, part
);
5375 rtx target
= assign_temp (part_type
, 0, 1, 1);
5376 if (! memory_address_p (TYPE_MODE (part_type
), XEXP (target
, 0)))
5377 target
= change_address (target
, TYPE_MODE (part_type
), NULL_RTX
);
5378 parts
= tree_cons (to_be_saved
,
5379 build (RTL_EXPR
, part_type
, NULL_TREE
,
5382 store_expr (TREE_PURPOSE (parts
), RTL_EXPR_RTL (TREE_VALUE (parts
)), 0);
5387 /* Subroutine of expand_expr:
5388 record the non-copied parts (LIST) of an expr (LHS), and return a list
5389 which specifies the initial values of these parts. */
5392 init_noncopied_parts (lhs
, list
)
5399 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
5400 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
5401 parts
= chainon (parts
, init_noncopied_parts (lhs
, TREE_VALUE (tail
)));
5402 else if (TREE_PURPOSE (tail
))
5404 tree part
= TREE_VALUE (tail
);
5405 tree part_type
= TREE_TYPE (part
);
5406 tree to_be_initialized
= build (COMPONENT_REF
, part_type
, lhs
, part
);
5407 parts
= tree_cons (TREE_PURPOSE (tail
), to_be_initialized
, parts
);
5412 /* Subroutine of expand_expr: return nonzero iff there is no way that
5413 EXP can reference X, which is being modified. TOP_P is nonzero if this
5414 call is going to be used to determine whether we need a temporary
5415 for EXP, as opposed to a recursive call to this function.
5417 It is always safe for this routine to return zero since it merely
5418 searches for optimization opportunities. */
5421 safe_from_p (x
, exp
, top_p
)
5428 static int save_expr_count
;
5429 static int save_expr_size
= 0;
5430 static tree
*save_expr_rewritten
;
5431 static tree save_expr_trees
[256];
5434 /* If EXP has varying size, we MUST use a target since we currently
5435 have no way of allocating temporaries of variable size
5436 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5437 So we assume here that something at a higher level has prevented a
5438 clash. This is somewhat bogus, but the best we can do. Only
5439 do this when X is BLKmode and when we are at the top level. */
5440 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
5441 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
5442 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
5443 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
5444 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
5446 && GET_MODE (x
) == BLKmode
))
5449 if (top_p
&& save_expr_size
== 0)
5453 save_expr_count
= 0;
5454 save_expr_size
= ARRAY_SIZE (save_expr_trees
);
5455 save_expr_rewritten
= &save_expr_trees
[0];
5457 rtn
= safe_from_p (x
, exp
, 1);
5459 for (i
= 0; i
< save_expr_count
; ++i
)
5461 if (TREE_CODE (save_expr_trees
[i
]) != ERROR_MARK
)
5463 TREE_SET_CODE (save_expr_trees
[i
], SAVE_EXPR
);
5471 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5472 find the underlying pseudo. */
5473 if (GET_CODE (x
) == SUBREG
)
5476 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5480 /* If X is a location in the outgoing argument area, it is always safe. */
5481 if (GET_CODE (x
) == MEM
5482 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
5483 || (GET_CODE (XEXP (x
, 0)) == PLUS
5484 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
)))
5487 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
5490 exp_rtl
= DECL_RTL (exp
);
5497 if (TREE_CODE (exp
) == TREE_LIST
)
5498 return ((TREE_VALUE (exp
) == 0
5499 || safe_from_p (x
, TREE_VALUE (exp
), 0))
5500 && (TREE_CHAIN (exp
) == 0
5501 || safe_from_p (x
, TREE_CHAIN (exp
), 0)));
5502 else if (TREE_CODE (exp
) == ERROR_MARK
)
5503 return 1; /* An already-visited SAVE_EXPR? */
5508 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5512 return (safe_from_p (x
, TREE_OPERAND (exp
, 0), 0)
5513 && safe_from_p (x
, TREE_OPERAND (exp
, 1), 0));
5517 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5518 the expression. If it is set, we conflict iff we are that rtx or
5519 both are in memory. Otherwise, we check all operands of the
5520 expression recursively. */
5522 switch (TREE_CODE (exp
))
5525 return (staticp (TREE_OPERAND (exp
, 0))
5526 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0)
5527 || TREE_STATIC (exp
));
5530 if (GET_CODE (x
) == MEM
)
5535 exp_rtl
= CALL_EXPR_RTL (exp
);
5538 /* Assume that the call will clobber all hard registers and
5540 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5541 || GET_CODE (x
) == MEM
)
5548 /* If a sequence exists, we would have to scan every instruction
5549 in the sequence to see if it was safe. This is probably not
5551 if (RTL_EXPR_SEQUENCE (exp
))
5554 exp_rtl
= RTL_EXPR_RTL (exp
);
5557 case WITH_CLEANUP_EXPR
:
5558 exp_rtl
= RTL_EXPR_RTL (exp
);
5561 case CLEANUP_POINT_EXPR
:
5562 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5565 exp_rtl
= SAVE_EXPR_RTL (exp
);
5569 /* This SAVE_EXPR might appear many times in the top-level
5570 safe_from_p() expression, and if it has a complex
5571 subexpression, examining it multiple times could result
5572 in a combinatorial explosion. E.g. on an Alpha
5573 running at least 200MHz, a Fortran test case compiled with
5574 optimization took about 28 minutes to compile -- even though
5575 it was only a few lines long, and the complicated line causing
5576 so much time to be spent in the earlier version of safe_from_p()
5577 had only 293 or so unique nodes.
5579 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5580 where it is so we can turn it back in the top-level safe_from_p()
5583 /* For now, don't bother re-sizing the array. */
5584 if (save_expr_count
>= save_expr_size
)
5586 save_expr_rewritten
[save_expr_count
++] = exp
;
5588 nops
= TREE_CODE_LENGTH (SAVE_EXPR
);
5589 for (i
= 0; i
< nops
; i
++)
5591 tree operand
= TREE_OPERAND (exp
, i
);
5592 if (operand
== NULL_TREE
)
5594 TREE_SET_CODE (exp
, ERROR_MARK
);
5595 if (!safe_from_p (x
, operand
, 0))
5597 TREE_SET_CODE (exp
, SAVE_EXPR
);
5599 TREE_SET_CODE (exp
, ERROR_MARK
);
5603 /* The only operand we look at is operand 1. The rest aren't
5604 part of the expression. */
5605 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
5607 case METHOD_CALL_EXPR
:
5608 /* This takes a rtx argument, but shouldn't appear here. */
5615 /* If we have an rtx, we do not need to scan our operands. */
5619 nops
= first_rtl_op (TREE_CODE (exp
));
5620 for (i
= 0; i
< nops
; i
++)
5621 if (TREE_OPERAND (exp
, i
) != 0
5622 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
5625 /* If this is a language-specific tree code, it may require
5626 special handling. */
5627 if (TREE_CODE (exp
) >= LAST_AND_UNUSED_TREE_CODE
5629 && !(*lang_safe_from_p
) (x
, exp
))
5633 /* If we have an rtl, find any enclosed object. Then see if we conflict
5637 if (GET_CODE (exp_rtl
) == SUBREG
)
5639 exp_rtl
= SUBREG_REG (exp_rtl
);
5640 if (GET_CODE (exp_rtl
) == REG
5641 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
5645 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5646 are memory and EXP is not readonly. */
5647 return ! (rtx_equal_p (x
, exp_rtl
)
5648 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
5649 && ! TREE_READONLY (exp
)));
5652 /* If we reach here, it is safe. */
5656 /* Subroutine of expand_expr: return nonzero iff EXP is an
5657 expression whose type is statically determinable. */
5663 if (TREE_CODE (exp
) == PARM_DECL
5664 || TREE_CODE (exp
) == VAR_DECL
5665 || TREE_CODE (exp
) == CALL_EXPR
|| TREE_CODE (exp
) == TARGET_EXPR
5666 || TREE_CODE (exp
) == COMPONENT_REF
5667 || TREE_CODE (exp
) == ARRAY_REF
)
5672 /* Subroutine of expand_expr: return rtx if EXP is a
5673 variable or parameter; else return 0. */
5680 switch (TREE_CODE (exp
))
5684 return DECL_RTL (exp
);
5690 #ifdef MAX_INTEGER_COMPUTATION_MODE
5692 check_max_integer_computation_mode (exp
)
5695 enum tree_code code
;
5696 enum machine_mode mode
;
5698 /* Strip any NOPs that don't change the mode. */
5700 code
= TREE_CODE (exp
);
5702 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5703 if (code
== NOP_EXPR
5704 && TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
5707 /* First check the type of the overall operation. We need only look at
5708 unary, binary and relational operations. */
5709 if (TREE_CODE_CLASS (code
) == '1'
5710 || TREE_CODE_CLASS (code
) == '2'
5711 || TREE_CODE_CLASS (code
) == '<')
5713 mode
= TYPE_MODE (TREE_TYPE (exp
));
5714 if (GET_MODE_CLASS (mode
) == MODE_INT
5715 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5716 fatal ("unsupported wide integer operation");
5719 /* Check operand of a unary op. */
5720 if (TREE_CODE_CLASS (code
) == '1')
5722 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5723 if (GET_MODE_CLASS (mode
) == MODE_INT
5724 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5725 fatal ("unsupported wide integer operation");
5728 /* Check operands of a binary/comparison op. */
5729 if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<')
5731 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5732 if (GET_MODE_CLASS (mode
) == MODE_INT
5733 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5734 fatal ("unsupported wide integer operation");
5736 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
5737 if (GET_MODE_CLASS (mode
) == MODE_INT
5738 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5739 fatal ("unsupported wide integer operation");
5744 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5745 has any readonly fields. If any of the fields have types that
5746 contain readonly fields, return true as well. */
5749 readonly_fields_p (type
)
5754 for (field
= TYPE_FIELDS (type
); field
!= 0; field
= TREE_CHAIN (field
))
5755 if (TREE_CODE (field
) == FIELD_DECL
5756 && (TREE_READONLY (field
)
5757 || (TREE_CODE (TREE_TYPE (field
)) == RECORD_TYPE
5758 && readonly_fields_p (TREE_TYPE (field
)))))
5764 /* expand_expr: generate code for computing expression EXP.
5765 An rtx for the computed value is returned. The value is never null.
5766 In the case of a void EXP, const0_rtx is returned.
5768 The value may be stored in TARGET if TARGET is nonzero.
5769 TARGET is just a suggestion; callers must assume that
5770 the rtx returned may not be the same as TARGET.
5772 If TARGET is CONST0_RTX, it means that the value will be ignored.
5774 If TMODE is not VOIDmode, it suggests generating the
5775 result in mode TMODE. But this is done only when convenient.
5776 Otherwise, TMODE is ignored and the value generated in its natural mode.
5777 TMODE is just a suggestion; callers must assume that
5778 the rtx returned may not have mode TMODE.
5780 Note that TARGET may have neither TMODE nor MODE. In that case, it
5781 probably will not be used.
5783 If MODIFIER is EXPAND_SUM then when EXP is an addition
5784 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5785 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5786 products as above, or REG or MEM, or constant.
5787 Ordinarily in such cases we would output mul or add instructions
5788 and then return a pseudo reg containing the sum.
5790 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5791 it also marks a label as absolutely required (it can't be dead).
5792 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5793 This is used for outputting expressions used in initializers.
5795 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5796 with a constant address even if that address is not normally legitimate.
5797 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5800 expand_expr (exp
, target
, tmode
, modifier
)
5803 enum machine_mode tmode
;
5804 enum expand_modifier modifier
;
5806 register rtx op0
, op1
, temp
;
5807 tree type
= TREE_TYPE (exp
);
5808 int unsignedp
= TREE_UNSIGNED (type
);
5809 register enum machine_mode mode
;
5810 register enum tree_code code
= TREE_CODE (exp
);
5812 rtx subtarget
, original_target
;
5815 /* Used by check-memory-usage to make modifier read only. */
5816 enum expand_modifier ro_modifier
;
5818 /* Handle ERROR_MARK before anybody tries to access its type. */
5819 if (TREE_CODE (exp
) == ERROR_MARK
|| TREE_CODE (type
) == ERROR_MARK
)
5821 op0
= CONST0_RTX (tmode
);
5827 mode
= TYPE_MODE (type
);
5828 /* Use subtarget as the target for operand 0 of a binary operation. */
5829 subtarget
= get_subtarget (target
);
5830 original_target
= target
;
5831 ignore
= (target
== const0_rtx
5832 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
5833 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
5834 || code
== COND_EXPR
)
5835 && TREE_CODE (type
) == VOID_TYPE
));
5837 /* Make a read-only version of the modifier. */
5838 if (modifier
== EXPAND_NORMAL
|| modifier
== EXPAND_SUM
5839 || modifier
== EXPAND_CONST_ADDRESS
|| modifier
== EXPAND_INITIALIZER
)
5840 ro_modifier
= modifier
;
5842 ro_modifier
= EXPAND_NORMAL
;
5844 /* If we are going to ignore this result, we need only do something
5845 if there is a side-effect somewhere in the expression. If there
5846 is, short-circuit the most common cases here. Note that we must
5847 not call expand_expr with anything but const0_rtx in case this
5848 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5852 if (! TREE_SIDE_EFFECTS (exp
))
5855 /* Ensure we reference a volatile object even if value is ignored, but
5856 don't do this if all we are doing is taking its address. */
5857 if (TREE_THIS_VOLATILE (exp
)
5858 && TREE_CODE (exp
) != FUNCTION_DECL
5859 && mode
!= VOIDmode
&& mode
!= BLKmode
5860 && modifier
!= EXPAND_CONST_ADDRESS
)
5862 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, ro_modifier
);
5863 if (GET_CODE (temp
) == MEM
)
5864 temp
= copy_to_reg (temp
);
5868 if (TREE_CODE_CLASS (code
) == '1' || code
== COMPONENT_REF
5869 || code
== INDIRECT_REF
|| code
== BUFFER_REF
)
5870 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
5871 VOIDmode
, ro_modifier
);
5872 else if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<'
5873 || code
== ARRAY_REF
)
5875 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, ro_modifier
);
5876 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, ro_modifier
);
5879 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
5880 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
5881 /* If the second operand has no side effects, just evaluate
5883 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
5884 VOIDmode
, ro_modifier
);
5885 else if (code
== BIT_FIELD_REF
)
5887 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, ro_modifier
);
5888 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, ro_modifier
);
5889 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, ro_modifier
);
5896 #ifdef MAX_INTEGER_COMPUTATION_MODE
5897 /* Only check stuff here if the mode we want is different from the mode
5898 of the expression; if it's the same, check_max_integer_computiation_mode
5899 will handle it. Do we really need to check this stuff at all? */
5902 && GET_MODE (target
) != mode
5903 && TREE_CODE (exp
) != INTEGER_CST
5904 && TREE_CODE (exp
) != PARM_DECL
5905 && TREE_CODE (exp
) != ARRAY_REF
5906 && TREE_CODE (exp
) != COMPONENT_REF
5907 && TREE_CODE (exp
) != BIT_FIELD_REF
5908 && TREE_CODE (exp
) != INDIRECT_REF
5909 && TREE_CODE (exp
) != CALL_EXPR
5910 && TREE_CODE (exp
) != VAR_DECL
5911 && TREE_CODE (exp
) != RTL_EXPR
)
5913 enum machine_mode mode
= GET_MODE (target
);
5915 if (GET_MODE_CLASS (mode
) == MODE_INT
5916 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5917 fatal ("unsupported wide integer operation");
5921 && TREE_CODE (exp
) != INTEGER_CST
5922 && TREE_CODE (exp
) != PARM_DECL
5923 && TREE_CODE (exp
) != ARRAY_REF
5924 && TREE_CODE (exp
) != COMPONENT_REF
5925 && TREE_CODE (exp
) != BIT_FIELD_REF
5926 && TREE_CODE (exp
) != INDIRECT_REF
5927 && TREE_CODE (exp
) != VAR_DECL
5928 && TREE_CODE (exp
) != CALL_EXPR
5929 && TREE_CODE (exp
) != RTL_EXPR
5930 && GET_MODE_CLASS (tmode
) == MODE_INT
5931 && tmode
> MAX_INTEGER_COMPUTATION_MODE
)
5932 fatal ("unsupported wide integer operation");
5934 check_max_integer_computation_mode (exp
);
5937 /* If will do cse, generate all results into pseudo registers
5938 since 1) that allows cse to find more things
5939 and 2) otherwise cse could produce an insn the machine
5942 if (! cse_not_expected
&& mode
!= BLKmode
&& target
5943 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
))
5950 tree function
= decl_function_context (exp
);
5951 /* Handle using a label in a containing function. */
5952 if (function
!= current_function_decl
5953 && function
!= inline_function_decl
&& function
!= 0)
5955 struct function
*p
= find_function_data (function
);
5956 p
->expr
->x_forced_labels
5957 = gen_rtx_EXPR_LIST (VOIDmode
, label_rtx (exp
),
5958 p
->expr
->x_forced_labels
);
5962 if (modifier
== EXPAND_INITIALIZER
)
5963 forced_labels
= gen_rtx_EXPR_LIST (VOIDmode
,
5968 temp
= gen_rtx_MEM (FUNCTION_MODE
,
5969 gen_rtx_LABEL_REF (Pmode
, label_rtx (exp
)));
5970 if (function
!= current_function_decl
5971 && function
!= inline_function_decl
&& function
!= 0)
5972 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
5977 if (DECL_RTL (exp
) == 0)
5979 error_with_decl (exp
, "prior parameter's size depends on `%s'");
5980 return CONST0_RTX (mode
);
5983 /* ... fall through ... */
5986 /* If a static var's type was incomplete when the decl was written,
5987 but the type is complete now, lay out the decl now. */
5988 if (DECL_SIZE (exp
) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
5989 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
5991 layout_decl (exp
, 0);
5992 PUT_MODE (DECL_RTL (exp
), DECL_MODE (exp
));
5995 /* Although static-storage variables start off initialized, according to
5996 ANSI C, a memcpy could overwrite them with uninitialized values. So
5997 we check them too. This also lets us check for read-only variables
5998 accessed via a non-const declaration, in case it won't be detected
5999 any other way (e.g., in an embedded system or OS kernel without
6002 Aggregates are not checked here; they're handled elsewhere. */
6003 if (cfun
&& current_function_check_memory_usage
6005 && GET_CODE (DECL_RTL (exp
)) == MEM
6006 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
6008 enum memory_use_mode memory_usage
;
6009 memory_usage
= get_memory_usage_from_modifier (modifier
);
6011 in_check_memory_usage
= 1;
6012 if (memory_usage
!= MEMORY_USE_DONT
)
6013 emit_library_call (chkr_check_addr_libfunc
,
6014 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3,
6015 XEXP (DECL_RTL (exp
), 0), Pmode
,
6016 GEN_INT (int_size_in_bytes (type
)),
6017 TYPE_MODE (sizetype
),
6018 GEN_INT (memory_usage
),
6019 TYPE_MODE (integer_type_node
));
6020 in_check_memory_usage
= 0;
6023 /* ... fall through ... */
6027 if (DECL_RTL (exp
) == 0)
6030 /* Ensure variable marked as used even if it doesn't go through
6031 a parser. If it hasn't be used yet, write out an external
6033 if (! TREE_USED (exp
))
6035 assemble_external (exp
);
6036 TREE_USED (exp
) = 1;
6039 /* Show we haven't gotten RTL for this yet. */
6042 /* Handle variables inherited from containing functions. */
6043 context
= decl_function_context (exp
);
6045 /* We treat inline_function_decl as an alias for the current function
6046 because that is the inline function whose vars, types, etc.
6047 are being merged into the current function.
6048 See expand_inline_function. */
6050 if (context
!= 0 && context
!= current_function_decl
6051 && context
!= inline_function_decl
6052 /* If var is static, we don't need a static chain to access it. */
6053 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
6054 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
6058 /* Mark as non-local and addressable. */
6059 DECL_NONLOCAL (exp
) = 1;
6060 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
6062 mark_addressable (exp
);
6063 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
6065 addr
= XEXP (DECL_RTL (exp
), 0);
6066 if (GET_CODE (addr
) == MEM
)
6067 addr
= change_address (addr
, Pmode
,
6068 fix_lexical_addr (XEXP (addr
, 0), exp
));
6070 addr
= fix_lexical_addr (addr
, exp
);
6072 temp
= change_address (DECL_RTL (exp
), mode
, addr
);
6075 /* This is the case of an array whose size is to be determined
6076 from its initializer, while the initializer is still being parsed.
6079 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6080 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
6081 temp
= change_address (DECL_RTL (exp
), GET_MODE (DECL_RTL (exp
)),
6082 XEXP (DECL_RTL (exp
), 0));
6084 /* If DECL_RTL is memory, we are in the normal case and either
6085 the address is not valid or it is not a register and -fforce-addr
6086 is specified, get the address into a register. */
6088 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6089 && modifier
!= EXPAND_CONST_ADDRESS
6090 && modifier
!= EXPAND_SUM
6091 && modifier
!= EXPAND_INITIALIZER
6092 && (! memory_address_p (DECL_MODE (exp
),
6093 XEXP (DECL_RTL (exp
), 0))
6095 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
6096 temp
= change_address (DECL_RTL (exp
), VOIDmode
,
6097 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
6099 /* If we got something, return it. But first, set the alignment
6100 the address is a register. */
6103 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
6104 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6109 /* If the mode of DECL_RTL does not match that of the decl, it
6110 must be a promoted value. We return a SUBREG of the wanted mode,
6111 but mark it so that we know that it was already extended. */
6113 if (GET_CODE (DECL_RTL (exp
)) == REG
6114 && GET_MODE (DECL_RTL (exp
)) != mode
)
6116 /* Get the signedness used for this variable. Ensure we get the
6117 same mode we got when the variable was declared. */
6118 if (GET_MODE (DECL_RTL (exp
))
6119 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
, 0))
6122 temp
= gen_rtx_SUBREG (mode
, DECL_RTL (exp
), 0);
6123 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6124 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
6128 return DECL_RTL (exp
);
6131 return immed_double_const (TREE_INT_CST_LOW (exp
),
6132 TREE_INT_CST_HIGH (exp
), mode
);
6135 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
,
6136 EXPAND_MEMORY_USE_BAD
);
6139 /* If optimized, generate immediate CONST_DOUBLE
6140 which will be turned into memory by reload if necessary.
6142 We used to force a register so that loop.c could see it. But
6143 this does not allow gen_* patterns to perform optimizations with
6144 the constants. It also produces two insns in cases like "x = 1.0;".
6145 On most machines, floating-point constants are not permitted in
6146 many insns, so we'd end up copying it to a register in any case.
6148 Now, we do the copying in expand_binop, if appropriate. */
6149 return immed_real_const (exp
);
6153 if (! TREE_CST_RTL (exp
))
6154 output_constant_def (exp
);
6156 /* TREE_CST_RTL probably contains a constant address.
6157 On RISC machines where a constant address isn't valid,
6158 make some insns to get that address into a register. */
6159 if (GET_CODE (TREE_CST_RTL (exp
)) == MEM
6160 && modifier
!= EXPAND_CONST_ADDRESS
6161 && modifier
!= EXPAND_INITIALIZER
6162 && modifier
!= EXPAND_SUM
6163 && (! memory_address_p (mode
, XEXP (TREE_CST_RTL (exp
), 0))
6165 && GET_CODE (XEXP (TREE_CST_RTL (exp
), 0)) != REG
)))
6166 return change_address (TREE_CST_RTL (exp
), VOIDmode
,
6167 copy_rtx (XEXP (TREE_CST_RTL (exp
), 0)));
6168 return TREE_CST_RTL (exp
);
6170 case EXPR_WITH_FILE_LOCATION
:
6173 const char *saved_input_filename
= input_filename
;
6174 int saved_lineno
= lineno
;
6175 input_filename
= EXPR_WFL_FILENAME (exp
);
6176 lineno
= EXPR_WFL_LINENO (exp
);
6177 if (EXPR_WFL_EMIT_LINE_NOTE (exp
))
6178 emit_line_note (input_filename
, lineno
);
6179 /* Possibly avoid switching back and force here. */
6180 to_return
= expand_expr (EXPR_WFL_NODE (exp
), target
, tmode
, modifier
);
6181 input_filename
= saved_input_filename
;
6182 lineno
= saved_lineno
;
6187 context
= decl_function_context (exp
);
6189 /* If this SAVE_EXPR was at global context, assume we are an
6190 initialization function and move it into our context. */
6192 SAVE_EXPR_CONTEXT (exp
) = current_function_decl
;
6194 /* We treat inline_function_decl as an alias for the current function
6195 because that is the inline function whose vars, types, etc.
6196 are being merged into the current function.
6197 See expand_inline_function. */
6198 if (context
== current_function_decl
|| context
== inline_function_decl
)
6201 /* If this is non-local, handle it. */
6204 /* The following call just exists to abort if the context is
6205 not of a containing function. */
6206 find_function_data (context
);
6208 temp
= SAVE_EXPR_RTL (exp
);
6209 if (temp
&& GET_CODE (temp
) == REG
)
6211 put_var_into_stack (exp
);
6212 temp
= SAVE_EXPR_RTL (exp
);
6214 if (temp
== 0 || GET_CODE (temp
) != MEM
)
6216 return change_address (temp
, mode
,
6217 fix_lexical_addr (XEXP (temp
, 0), exp
));
6219 if (SAVE_EXPR_RTL (exp
) == 0)
6221 if (mode
== VOIDmode
)
6224 temp
= assign_temp (type
, 3, 0, 0);
6226 SAVE_EXPR_RTL (exp
) = temp
;
6227 if (!optimize
&& GET_CODE (temp
) == REG
)
6228 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, temp
,
6231 /* If the mode of TEMP does not match that of the expression, it
6232 must be a promoted value. We pass store_expr a SUBREG of the
6233 wanted mode but mark it so that we know that it was already
6234 extended. Note that `unsignedp' was modified above in
6237 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
6239 temp
= gen_rtx_SUBREG (mode
, SAVE_EXPR_RTL (exp
), 0);
6240 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6241 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
6244 if (temp
== const0_rtx
)
6245 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6246 EXPAND_MEMORY_USE_BAD
);
6248 store_expr (TREE_OPERAND (exp
, 0), temp
, 0);
6250 TREE_USED (exp
) = 1;
6253 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6254 must be a promoted value. We return a SUBREG of the wanted mode,
6255 but mark it so that we know that it was already extended. */
6257 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
6258 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
6260 /* Compute the signedness and make the proper SUBREG. */
6261 promote_mode (type
, mode
, &unsignedp
, 0);
6262 temp
= gen_rtx_SUBREG (mode
, SAVE_EXPR_RTL (exp
), 0);
6263 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6264 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
6268 return SAVE_EXPR_RTL (exp
);
6273 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6274 TREE_OPERAND (exp
, 0) = unsave_expr_now (TREE_OPERAND (exp
, 0));
6278 case PLACEHOLDER_EXPR
:
6280 tree placeholder_expr
;
6282 /* If there is an object on the head of the placeholder list,
6283 see if some object in it of type TYPE or a pointer to it. For
6284 further information, see tree.def. */
6285 for (placeholder_expr
= placeholder_list
;
6286 placeholder_expr
!= 0;
6287 placeholder_expr
= TREE_CHAIN (placeholder_expr
))
6289 tree need_type
= TYPE_MAIN_VARIANT (type
);
6291 tree old_list
= placeholder_list
;
6294 /* Find the outermost reference that is of the type we want.
6295 If none, see if any object has a type that is a pointer to
6296 the type we want. */
6297 for (elt
= TREE_PURPOSE (placeholder_expr
);
6298 elt
!= 0 && object
== 0;
6300 = ((TREE_CODE (elt
) == COMPOUND_EXPR
6301 || TREE_CODE (elt
) == COND_EXPR
)
6302 ? TREE_OPERAND (elt
, 1)
6303 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6304 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6305 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6306 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6307 ? TREE_OPERAND (elt
, 0) : 0))
6308 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
6311 for (elt
= TREE_PURPOSE (placeholder_expr
);
6312 elt
!= 0 && object
== 0;
6314 = ((TREE_CODE (elt
) == COMPOUND_EXPR
6315 || TREE_CODE (elt
) == COND_EXPR
)
6316 ? TREE_OPERAND (elt
, 1)
6317 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6318 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6319 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6320 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6321 ? TREE_OPERAND (elt
, 0) : 0))
6322 if (POINTER_TYPE_P (TREE_TYPE (elt
))
6323 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
6325 object
= build1 (INDIRECT_REF
, need_type
, elt
);
6329 /* Expand this object skipping the list entries before
6330 it was found in case it is also a PLACEHOLDER_EXPR.
6331 In that case, we want to translate it using subsequent
6333 placeholder_list
= TREE_CHAIN (placeholder_expr
);
6334 temp
= expand_expr (object
, original_target
, tmode
,
6336 placeholder_list
= old_list
;
6342 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6345 case WITH_RECORD_EXPR
:
6346 /* Put the object on the placeholder list, expand our first operand,
6347 and pop the list. */
6348 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
6350 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
6351 tmode
, ro_modifier
);
6352 placeholder_list
= TREE_CHAIN (placeholder_list
);
6356 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6357 expand_goto (TREE_OPERAND (exp
, 0));
6359 expand_computed_goto (TREE_OPERAND (exp
, 0));
6363 expand_exit_loop_if_false (NULL_PTR
,
6364 invert_truthvalue (TREE_OPERAND (exp
, 0)));
6367 case LABELED_BLOCK_EXPR
:
6368 if (LABELED_BLOCK_BODY (exp
))
6369 expand_expr_stmt (LABELED_BLOCK_BODY (exp
));
6370 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp
)));
6373 case EXIT_BLOCK_EXPR
:
6374 if (EXIT_BLOCK_RETURN (exp
))
6375 sorry ("returned value in block_exit_expr");
6376 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp
)));
6381 expand_start_loop (1);
6382 expand_expr_stmt (TREE_OPERAND (exp
, 0));
6390 tree vars
= TREE_OPERAND (exp
, 0);
6391 int vars_need_expansion
= 0;
6393 /* Need to open a binding contour here because
6394 if there are any cleanups they must be contained here. */
6395 expand_start_bindings (2);
6397 /* Mark the corresponding BLOCK for output in its proper place. */
6398 if (TREE_OPERAND (exp
, 2) != 0
6399 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
6400 insert_block (TREE_OPERAND (exp
, 2));
6402 /* If VARS have not yet been expanded, expand them now. */
6405 if (DECL_RTL (vars
) == 0)
6407 vars_need_expansion
= 1;
6410 expand_decl_init (vars
);
6411 vars
= TREE_CHAIN (vars
);
6414 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, ro_modifier
);
6416 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
6422 if (RTL_EXPR_SEQUENCE (exp
))
6424 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
6426 emit_insns (RTL_EXPR_SEQUENCE (exp
));
6427 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
6429 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
6430 free_temps_for_rtl_expr (exp
);
6431 return RTL_EXPR_RTL (exp
);
6434 /* If we don't need the result, just ensure we evaluate any
6439 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
6440 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
,
6441 EXPAND_MEMORY_USE_BAD
);
6445 /* All elts simple constants => refer to a constant in memory. But
6446 if this is a non-BLKmode mode, let it store a field at a time
6447 since that should make a CONST_INT or CONST_DOUBLE when we
6448 fold. Likewise, if we have a target we can use, it is best to
6449 store directly into the target unless the type is large enough
6450 that memcpy will be used. If we are making an initializer and
6451 all operands are constant, put it in memory as well. */
6452 else if ((TREE_STATIC (exp
)
6453 && ((mode
== BLKmode
6454 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
6455 || TREE_ADDRESSABLE (exp
)
6456 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
6457 && (! MOVE_BY_PIECES_P
6458 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
6460 && ! mostly_zeros_p (exp
))))
6461 || (modifier
== EXPAND_INITIALIZER
&& TREE_CONSTANT (exp
)))
6463 rtx constructor
= output_constant_def (exp
);
6465 if (modifier
!= EXPAND_CONST_ADDRESS
6466 && modifier
!= EXPAND_INITIALIZER
6467 && modifier
!= EXPAND_SUM
6468 && (! memory_address_p (GET_MODE (constructor
),
6469 XEXP (constructor
, 0))
6471 && GET_CODE (XEXP (constructor
, 0)) != REG
)))
6472 constructor
= change_address (constructor
, VOIDmode
,
6473 XEXP (constructor
, 0));
6479 /* Handle calls that pass values in multiple non-contiguous
6480 locations. The Irix 6 ABI has examples of this. */
6481 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
6482 || GET_CODE (target
) == PARALLEL
)
6484 if (mode
!= BLKmode
&& ! TREE_ADDRESSABLE (exp
))
6485 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
6487 target
= assign_temp (type
, 0, 1, 1);
6490 if (TREE_READONLY (exp
))
6492 if (GET_CODE (target
) == MEM
)
6493 target
= copy_rtx (target
);
6495 RTX_UNCHANGING_P (target
) = 1;
6498 store_constructor (exp
, target
, TYPE_ALIGN (TREE_TYPE (exp
)), 0,
6499 int_size_in_bytes (TREE_TYPE (exp
)));
6505 tree exp1
= TREE_OPERAND (exp
, 0);
6507 tree string
= string_constant (exp1
, &index
);
6509 /* Try to optimize reads from const strings. */
6511 && TREE_CODE (string
) == STRING_CST
6512 && TREE_CODE (index
) == INTEGER_CST
6513 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
6514 && GET_MODE_CLASS (mode
) == MODE_INT
6515 && GET_MODE_SIZE (mode
) == 1
6516 && modifier
!= EXPAND_MEMORY_USE_WO
)
6518 GEN_INT (TREE_STRING_POINTER (string
)[TREE_INT_CST_LOW (index
)]);
6520 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
6521 op0
= memory_address (mode
, op0
);
6523 if (cfun
&& current_function_check_memory_usage
6524 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
6526 enum memory_use_mode memory_usage
;
6527 memory_usage
= get_memory_usage_from_modifier (modifier
);
6529 if (memory_usage
!= MEMORY_USE_DONT
)
6531 in_check_memory_usage
= 1;
6532 emit_library_call (chkr_check_addr_libfunc
,
6533 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, op0
,
6534 Pmode
, GEN_INT (int_size_in_bytes (type
)),
6535 TYPE_MODE (sizetype
),
6536 GEN_INT (memory_usage
),
6537 TYPE_MODE (integer_type_node
));
6538 in_check_memory_usage
= 0;
6542 temp
= gen_rtx_MEM (mode
, op0
);
6543 set_mem_attributes (temp
, exp
, 0);
6545 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6546 here, because, in C and C++, the fact that a location is accessed
6547 through a pointer to const does not mean that the value there can
6548 never change. Languages where it can never change should
6549 also set TREE_STATIC. */
6550 RTX_UNCHANGING_P (temp
) = TREE_READONLY (exp
) & TREE_STATIC (exp
);
6552 /* If we are writing to this object and its type is a record with
6553 readonly fields, we must mark it as readonly so it will
6554 conflict with readonly references to those fields. */
6555 if (modifier
== EXPAND_MEMORY_USE_WO
6556 && TREE_CODE (type
) == RECORD_TYPE
&& readonly_fields_p (type
))
6557 RTX_UNCHANGING_P (temp
) = 1;
6563 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
6567 tree array
= TREE_OPERAND (exp
, 0);
6568 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
6569 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
6570 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
6573 /* Optimize the special-case of a zero lower bound.
6575 We convert the low_bound to sizetype to avoid some problems
6576 with constant folding. (E.g. suppose the lower bound is 1,
6577 and its mode is QI. Without the conversion, (ARRAY
6578 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6579 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6581 if (! integer_zerop (low_bound
))
6582 index
= size_diffop (index
, convert (sizetype
, low_bound
));
6584 /* Fold an expression like: "foo"[2].
6585 This is not done in fold so it won't happen inside &.
6586 Don't fold if this is for wide characters since it's too
6587 difficult to do correctly and this is a very rare case. */
6589 if (TREE_CODE (array
) == STRING_CST
6590 && TREE_CODE (index
) == INTEGER_CST
6591 && compare_tree_int (index
, TREE_STRING_LENGTH (array
)) < 0
6592 && GET_MODE_CLASS (mode
) == MODE_INT
6593 && GET_MODE_SIZE (mode
) == 1)
6595 GEN_INT (TREE_STRING_POINTER (array
)[TREE_INT_CST_LOW (index
)]);
6597 /* If this is a constant index into a constant array,
6598 just get the value from the array. Handle both the cases when
6599 we have an explicit constructor and when our operand is a variable
6600 that was declared const. */
6602 if (TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
)
6603 && TREE_CODE (index
) == INTEGER_CST
6604 && 0 > compare_tree_int (index
,
6605 list_length (CONSTRUCTOR_ELTS
6606 (TREE_OPERAND (exp
, 0)))))
6610 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
6611 i
= TREE_INT_CST_LOW (index
);
6612 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
6616 return expand_expr (fold (TREE_VALUE (elem
)), target
,
6617 tmode
, ro_modifier
);
6620 else if (optimize
>= 1
6621 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
6622 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
6623 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
6625 if (TREE_CODE (index
) == INTEGER_CST
)
6627 tree init
= DECL_INITIAL (array
);
6629 if (TREE_CODE (init
) == CONSTRUCTOR
)
6633 for (elem
= CONSTRUCTOR_ELTS (init
);
6635 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
6636 elem
= TREE_CHAIN (elem
))
6640 return expand_expr (fold (TREE_VALUE (elem
)), target
,
6641 tmode
, ro_modifier
);
6643 else if (TREE_CODE (init
) == STRING_CST
6644 && 0 > compare_tree_int (index
,
6645 TREE_STRING_LENGTH (init
)))
6647 tree type
= TREE_TYPE (TREE_TYPE (init
));
6648 enum machine_mode mode
= TYPE_MODE (type
);
6650 if (GET_MODE_CLASS (mode
) == MODE_INT
6651 && GET_MODE_SIZE (mode
) == 1)
6653 (TREE_STRING_POINTER
6654 (init
)[TREE_INT_CST_LOW (index
)]));
6663 /* If the operand is a CONSTRUCTOR, we can just extract the
6664 appropriate field if it is present. Don't do this if we have
6665 already written the data since we want to refer to that copy
6666 and varasm.c assumes that's what we'll do. */
6667 if (code
!= ARRAY_REF
6668 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
6669 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
6673 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
6674 elt
= TREE_CHAIN (elt
))
6675 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
6676 /* We can normally use the value of the field in the
6677 CONSTRUCTOR. However, if this is a bitfield in
6678 an integral mode that we can fit in a HOST_WIDE_INT,
6679 we must mask only the number of bits in the bitfield,
6680 since this is done implicitly by the constructor. If
6681 the bitfield does not meet either of those conditions,
6682 we can't do this optimization. */
6683 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
6684 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
6686 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
6687 <= HOST_BITS_PER_WIDE_INT
))))
6689 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
6690 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
6692 HOST_WIDE_INT bitsize
6693 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt
)));
6695 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
6697 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
6698 op0
= expand_and (op0
, op1
, target
);
6702 enum machine_mode imode
6703 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
6705 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
6708 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
6710 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
6720 enum machine_mode mode1
;
6721 HOST_WIDE_INT bitsize
, bitpos
;
6724 unsigned int alignment
;
6725 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
6726 &mode1
, &unsignedp
, &volatilep
,
6729 /* If we got back the original object, something is wrong. Perhaps
6730 we are evaluating an expression too early. In any event, don't
6731 infinitely recurse. */
6735 /* If TEM's type is a union of variable size, pass TARGET to the inner
6736 computation, since it will need a temporary and TARGET is known
6737 to have to do. This occurs in unchecked conversion in Ada. */
6739 op0
= expand_expr (tem
,
6740 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
6741 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
6743 ? target
: NULL_RTX
),
6745 (modifier
== EXPAND_INITIALIZER
6746 || modifier
== EXPAND_CONST_ADDRESS
)
6747 ? modifier
: EXPAND_NORMAL
);
6749 /* If this is a constant, put it into a register if it is a
6750 legitimate constant and OFFSET is 0 and memory if it isn't. */
6751 if (CONSTANT_P (op0
))
6753 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
6754 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
6756 op0
= force_reg (mode
, op0
);
6758 op0
= validize_mem (force_const_mem (mode
, op0
));
6763 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
6765 /* If this object is in memory, put it into a register.
6766 This case can't occur in C, but can in Ada if we have
6767 unchecked conversion of an expression from a scalar type to
6768 an array or record type. */
6769 if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
6770 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
6772 rtx memloc
= assign_temp (TREE_TYPE (tem
), 1, 1, 1);
6774 mark_temp_addr_taken (memloc
);
6775 emit_move_insn (memloc
, op0
);
6779 if (GET_CODE (op0
) != MEM
)
6782 if (GET_MODE (offset_rtx
) != ptr_mode
)
6784 #ifdef POINTERS_EXTEND_UNSIGNED
6785 offset_rtx
= convert_memory_address (ptr_mode
, offset_rtx
);
6787 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
6791 /* A constant address in OP0 can have VOIDmode, we must not try
6792 to call force_reg for that case. Avoid that case. */
6793 if (GET_CODE (op0
) == MEM
6794 && GET_MODE (op0
) == BLKmode
6795 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
6797 && (bitpos
% bitsize
) == 0
6798 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
6799 && alignment
== GET_MODE_ALIGNMENT (mode1
))
6801 rtx temp
= change_address (op0
, mode1
,
6802 plus_constant (XEXP (op0
, 0),
6805 if (GET_CODE (XEXP (temp
, 0)) == REG
)
6808 op0
= change_address (op0
, mode1
,
6809 force_reg (GET_MODE (XEXP (temp
, 0)),
6814 op0
= change_address (op0
, VOIDmode
,
6815 gen_rtx_PLUS (ptr_mode
, XEXP (op0
, 0),
6816 force_reg (ptr_mode
,
6820 /* Don't forget about volatility even if this is a bitfield. */
6821 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
6823 op0
= copy_rtx (op0
);
6824 MEM_VOLATILE_P (op0
) = 1;
6827 /* Check the access. */
6828 if (cfun
!= 0 && current_function_check_memory_usage
6829 && GET_CODE (op0
) == MEM
)
6831 enum memory_use_mode memory_usage
;
6832 memory_usage
= get_memory_usage_from_modifier (modifier
);
6834 if (memory_usage
!= MEMORY_USE_DONT
)
6839 to
= plus_constant (XEXP (op0
, 0), (bitpos
/ BITS_PER_UNIT
));
6840 size
= (bitpos
% BITS_PER_UNIT
) + bitsize
+ BITS_PER_UNIT
- 1;
6842 /* Check the access right of the pointer. */
6843 in_check_memory_usage
= 1;
6844 if (size
> BITS_PER_UNIT
)
6845 emit_library_call (chkr_check_addr_libfunc
,
6846 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, to
,
6847 Pmode
, GEN_INT (size
/ BITS_PER_UNIT
),
6848 TYPE_MODE (sizetype
),
6849 GEN_INT (memory_usage
),
6850 TYPE_MODE (integer_type_node
));
6851 in_check_memory_usage
= 0;
6855 /* In cases where an aligned union has an unaligned object
6856 as a field, we might be extracting a BLKmode value from
6857 an integer-mode (e.g., SImode) object. Handle this case
6858 by doing the extract into an object as wide as the field
6859 (which we know to be the width of a basic mode), then
6860 storing into memory, and changing the mode to BLKmode.
6861 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6862 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6863 if (mode1
== VOIDmode
6864 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
6865 || (modifier
!= EXPAND_CONST_ADDRESS
6866 && modifier
!= EXPAND_INITIALIZER
6867 && ((mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
6868 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
6869 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
6870 /* If the field isn't aligned enough to fetch as a memref,
6871 fetch it as a bit field. */
6872 || (mode1
!= BLKmode
6873 && SLOW_UNALIGNED_ACCESS (mode1
, alignment
)
6874 && ((TYPE_ALIGN (TREE_TYPE (tem
))
6875 < GET_MODE_ALIGNMENT (mode
))
6876 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)))
6877 /* If the type and the field are a constant size and the
6878 size of the type isn't the same size as the bitfield,
6879 we must use bitfield operations. */
6881 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
)))
6883 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
6885 || (modifier
!= EXPAND_CONST_ADDRESS
6886 && modifier
!= EXPAND_INITIALIZER
6888 && SLOW_UNALIGNED_ACCESS (mode
, alignment
)
6889 && (TYPE_ALIGN (type
) > alignment
6890 || bitpos
% TYPE_ALIGN (type
) != 0)))
6892 enum machine_mode ext_mode
= mode
;
6894 if (ext_mode
== BLKmode
6895 && ! (target
!= 0 && GET_CODE (op0
) == MEM
6896 && GET_CODE (target
) == MEM
6897 && bitpos
% BITS_PER_UNIT
== 0))
6898 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
6900 if (ext_mode
== BLKmode
)
6902 /* In this case, BITPOS must start at a byte boundary and
6903 TARGET, if specified, must be a MEM. */
6904 if (GET_CODE (op0
) != MEM
6905 || (target
!= 0 && GET_CODE (target
) != MEM
)
6906 || bitpos
% BITS_PER_UNIT
!= 0)
6909 op0
= change_address (op0
, VOIDmode
,
6910 plus_constant (XEXP (op0
, 0),
6911 bitpos
/ BITS_PER_UNIT
));
6913 target
= assign_temp (type
, 0, 1, 1);
6915 emit_block_move (target
, op0
,
6916 bitsize
== -1 ? expr_size (exp
)
6917 : GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
6924 op0
= validize_mem (op0
);
6926 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
6927 mark_reg_pointer (XEXP (op0
, 0), alignment
);
6929 op0
= extract_bit_field (op0
, bitsize
, bitpos
,
6930 unsignedp
, target
, ext_mode
, ext_mode
,
6932 int_size_in_bytes (TREE_TYPE (tem
)));
6934 /* If the result is a record type and BITSIZE is narrower than
6935 the mode of OP0, an integral mode, and this is a big endian
6936 machine, we must put the field into the high-order bits. */
6937 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
6938 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
6939 && bitsize
< GET_MODE_BITSIZE (GET_MODE (op0
)))
6940 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
6941 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
6945 if (mode
== BLKmode
)
6947 rtx
new = assign_stack_temp (ext_mode
,
6948 bitsize
/ BITS_PER_UNIT
, 0);
6950 emit_move_insn (new, op0
);
6951 op0
= copy_rtx (new);
6952 PUT_MODE (op0
, BLKmode
);
6953 MEM_SET_IN_STRUCT_P (op0
, 1);
6959 /* If the result is BLKmode, use that to access the object
6961 if (mode
== BLKmode
)
6964 /* Get a reference to just this component. */
6965 if (modifier
== EXPAND_CONST_ADDRESS
6966 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
6968 rtx
new = gen_rtx_MEM (mode1
,
6969 plus_constant (XEXP (op0
, 0),
6970 (bitpos
/ BITS_PER_UNIT
)));
6972 MEM_COPY_ATTRIBUTES (new, op0
);
6976 op0
= change_address (op0
, mode1
,
6977 plus_constant (XEXP (op0
, 0),
6978 (bitpos
/ BITS_PER_UNIT
)));
6980 set_mem_attributes (op0
, exp
, 0);
6981 if (GET_CODE (XEXP (op0
, 0)) == REG
)
6982 mark_reg_pointer (XEXP (op0
, 0), alignment
);
6984 MEM_VOLATILE_P (op0
) |= volatilep
;
6985 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
6986 || modifier
== EXPAND_CONST_ADDRESS
6987 || modifier
== EXPAND_INITIALIZER
)
6989 else if (target
== 0)
6990 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
6992 convert_move (target
, op0
, unsignedp
);
6996 /* Intended for a reference to a buffer of a file-object in Pascal.
6997 But it's not certain that a special tree code will really be
6998 necessary for these. INDIRECT_REF might work for them. */
7004 /* Pascal set IN expression.
7007 rlo = set_low - (set_low%bits_per_word);
7008 the_word = set [ (index - rlo)/bits_per_word ];
7009 bit_index = index % bits_per_word;
7010 bitmask = 1 << bit_index;
7011 return !!(the_word & bitmask); */
7013 tree set
= TREE_OPERAND (exp
, 0);
7014 tree index
= TREE_OPERAND (exp
, 1);
7015 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
7016 tree set_type
= TREE_TYPE (set
);
7017 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
7018 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
7019 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
7020 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
7021 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
7022 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
7023 rtx setaddr
= XEXP (setval
, 0);
7024 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
7026 rtx diff
, quo
, rem
, addr
, bit
, result
;
7028 preexpand_calls (exp
);
7030 /* If domain is empty, answer is no. Likewise if index is constant
7031 and out of bounds. */
7032 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
7033 && TREE_CODE (set_low_bound
) == INTEGER_CST
7034 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
7035 || (TREE_CODE (index
) == INTEGER_CST
7036 && TREE_CODE (set_low_bound
) == INTEGER_CST
7037 && tree_int_cst_lt (index
, set_low_bound
))
7038 || (TREE_CODE (set_high_bound
) == INTEGER_CST
7039 && TREE_CODE (index
) == INTEGER_CST
7040 && tree_int_cst_lt (set_high_bound
, index
))))
7044 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7046 /* If we get here, we have to generate the code for both cases
7047 (in range and out of range). */
7049 op0
= gen_label_rtx ();
7050 op1
= gen_label_rtx ();
7052 if (! (GET_CODE (index_val
) == CONST_INT
7053 && GET_CODE (lo_r
) == CONST_INT
))
7055 emit_cmp_and_jump_insns (index_val
, lo_r
, LT
, NULL_RTX
,
7056 GET_MODE (index_val
), iunsignedp
, 0, op1
);
7059 if (! (GET_CODE (index_val
) == CONST_INT
7060 && GET_CODE (hi_r
) == CONST_INT
))
7062 emit_cmp_and_jump_insns (index_val
, hi_r
, GT
, NULL_RTX
,
7063 GET_MODE (index_val
), iunsignedp
, 0, op1
);
7066 /* Calculate the element number of bit zero in the first word
7068 if (GET_CODE (lo_r
) == CONST_INT
)
7069 rlow
= GEN_INT (INTVAL (lo_r
)
7070 & ~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
7072 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
7073 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
7074 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7076 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
7077 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7079 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
7080 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7081 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
7082 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7084 addr
= memory_address (byte_mode
,
7085 expand_binop (index_mode
, add_optab
, diff
,
7086 setaddr
, NULL_RTX
, iunsignedp
,
7089 /* Extract the bit we want to examine. */
7090 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
7091 gen_rtx_MEM (byte_mode
, addr
),
7092 make_tree (TREE_TYPE (index
), rem
),
7094 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
7095 GET_MODE (target
) == byte_mode
? target
: 0,
7096 1, OPTAB_LIB_WIDEN
);
7098 if (result
!= target
)
7099 convert_move (target
, result
, 1);
7101 /* Output the code to handle the out-of-range case. */
7104 emit_move_insn (target
, const0_rtx
);
7109 case WITH_CLEANUP_EXPR
:
7110 if (RTL_EXPR_RTL (exp
) == 0)
7113 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, ro_modifier
);
7114 expand_decl_cleanup (NULL_TREE
, TREE_OPERAND (exp
, 2));
7116 /* That's it for this cleanup. */
7117 TREE_OPERAND (exp
, 2) = 0;
7119 return RTL_EXPR_RTL (exp
);
7121 case CLEANUP_POINT_EXPR
:
7123 /* Start a new binding layer that will keep track of all cleanup
7124 actions to be performed. */
7125 expand_start_bindings (2);
7127 target_temp_slot_level
= temp_slot_level
;
7129 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, ro_modifier
);
7130 /* If we're going to use this value, load it up now. */
7132 op0
= force_not_mem (op0
);
7133 preserve_temp_slots (op0
);
7134 expand_end_bindings (NULL_TREE
, 0, 0);
7139 /* Check for a built-in function. */
7140 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7141 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7143 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7145 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7146 == BUILT_IN_FRONTEND
)
7147 return (*lang_expand_expr
) (exp
, original_target
, tmode
, modifier
);
7149 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7152 /* If this call was expanded already by preexpand_calls,
7153 just return the result we got. */
7154 if (CALL_EXPR_RTL (exp
) != 0)
7155 return CALL_EXPR_RTL (exp
);
7157 return expand_call (exp
, target
, ignore
);
7159 case NON_LVALUE_EXPR
:
7162 case REFERENCE_EXPR
:
7163 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7166 if (TREE_CODE (type
) == UNION_TYPE
)
7168 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7170 /* If both input and output are BLKmode, this conversion
7171 isn't actually doing anything unless we need to make the
7172 alignment stricter. */
7173 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
7174 && (TYPE_ALIGN (type
) <= TYPE_ALIGN (valtype
)
7175 || TYPE_ALIGN (type
) >= BIGGEST_ALIGNMENT
))
7176 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7181 if (mode
!= BLKmode
)
7182 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7184 target
= assign_temp (type
, 0, 1, 1);
7187 if (GET_CODE (target
) == MEM
)
7188 /* Store data into beginning of memory target. */
7189 store_expr (TREE_OPERAND (exp
, 0),
7190 change_address (target
, TYPE_MODE (valtype
), 0), 0);
7192 else if (GET_CODE (target
) == REG
)
7193 /* Store this field into a union of the proper type. */
7194 store_field (target
,
7195 MIN ((int_size_in_bytes (TREE_TYPE
7196 (TREE_OPERAND (exp
, 0)))
7198 GET_MODE_BITSIZE (mode
)),
7199 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7200 VOIDmode
, 0, BITS_PER_UNIT
,
7201 int_size_in_bytes (type
), 0);
7205 /* Return the entire union. */
7209 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7211 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7214 /* If the signedness of the conversion differs and OP0 is
7215 a promoted SUBREG, clear that indication since we now
7216 have to do the proper extension. */
7217 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7218 && GET_CODE (op0
) == SUBREG
)
7219 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7224 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, 0);
7225 if (GET_MODE (op0
) == mode
)
7228 /* If OP0 is a constant, just convert it into the proper mode. */
7229 if (CONSTANT_P (op0
))
7231 convert_modes (mode
, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7232 op0
, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7234 if (modifier
== EXPAND_INITIALIZER
)
7235 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7239 convert_to_mode (mode
, op0
,
7240 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7242 convert_move (target
, op0
,
7243 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7247 /* We come here from MINUS_EXPR when the second operand is a
7250 this_optab
= add_optab
;
7252 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7253 something else, make sure we add the register to the constant and
7254 then to the other thing. This case can occur during strength
7255 reduction and doing it this way will produce better code if the
7256 frame pointer or argument pointer is eliminated.
7258 fold-const.c will ensure that the constant is always in the inner
7259 PLUS_EXPR, so the only case we need to do anything about is if
7260 sp, ap, or fp is our second argument, in which case we must swap
7261 the innermost first argument and our second argument. */
7263 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7264 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7265 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
7266 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7267 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7268 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7270 tree t
= TREE_OPERAND (exp
, 1);
7272 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7273 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
7276 /* If the result is to be ptr_mode and we are adding an integer to
7277 something, we might be forming a constant. So try to use
7278 plus_constant. If it produces a sum and we can't accept it,
7279 use force_operand. This allows P = &ARR[const] to generate
7280 efficient code on machines where a SYMBOL_REF is not a valid
7283 If this is an EXPAND_SUM call, always return the sum. */
7284 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7285 || mode
== ptr_mode
)
7287 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
7288 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7289 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
7293 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
7295 /* Use immed_double_const to ensure that the constant is
7296 truncated according to the mode of OP1, then sign extended
7297 to a HOST_WIDE_INT. Using the constant directly can result
7298 in non-canonical RTL in a 64x32 cross compile. */
7300 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
7302 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
7303 op1
= plus_constant (op1
, INTVAL (constant_part
));
7304 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7305 op1
= force_operand (op1
, target
);
7309 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7310 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
7311 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
7315 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7317 if (! CONSTANT_P (op0
))
7319 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7320 VOIDmode
, modifier
);
7321 /* Don't go to both_summands if modifier
7322 says it's not right to return a PLUS. */
7323 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7327 /* Use immed_double_const to ensure that the constant is
7328 truncated according to the mode of OP1, then sign extended
7329 to a HOST_WIDE_INT. Using the constant directly can result
7330 in non-canonical RTL in a 64x32 cross compile. */
7332 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
7334 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7335 op0
= plus_constant (op0
, INTVAL (constant_part
));
7336 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7337 op0
= force_operand (op0
, target
);
7342 /* No sense saving up arithmetic to be done
7343 if it's all in the wrong mode to form part of an address.
7344 And force_operand won't know whether to sign-extend or
7346 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7347 || mode
!= ptr_mode
)
7350 preexpand_calls (exp
);
7351 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7354 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, ro_modifier
);
7355 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, ro_modifier
);
7358 /* Make sure any term that's a sum with a constant comes last. */
7359 if (GET_CODE (op0
) == PLUS
7360 && CONSTANT_P (XEXP (op0
, 1)))
7366 /* If adding to a sum including a constant,
7367 associate it to put the constant outside. */
7368 if (GET_CODE (op1
) == PLUS
7369 && CONSTANT_P (XEXP (op1
, 1)))
7371 rtx constant_term
= const0_rtx
;
7373 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
7376 /* Ensure that MULT comes first if there is one. */
7377 else if (GET_CODE (op0
) == MULT
)
7378 op0
= gen_rtx_PLUS (mode
, op0
, XEXP (op1
, 0));
7380 op0
= gen_rtx_PLUS (mode
, XEXP (op1
, 0), op0
);
7382 /* Let's also eliminate constants from op0 if possible. */
7383 op0
= eliminate_constant_term (op0
, &constant_term
);
7385 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7386 their sum should be a constant. Form it into OP1, since the
7387 result we want will then be OP0 + OP1. */
7389 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
7394 op1
= gen_rtx_PLUS (mode
, constant_term
, XEXP (op1
, 1));
7397 /* Put a constant term last and put a multiplication first. */
7398 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
7399 temp
= op1
, op1
= op0
, op0
= temp
;
7401 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
7402 return temp
? temp
: gen_rtx_PLUS (mode
, op0
, op1
);
7405 /* For initializers, we are allowed to return a MINUS of two
7406 symbolic constants. Here we handle all cases when both operands
7408 /* Handle difference of two symbolic constants,
7409 for the sake of an initializer. */
7410 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7411 && really_constant_p (TREE_OPERAND (exp
, 0))
7412 && really_constant_p (TREE_OPERAND (exp
, 1)))
7414 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
,
7415 VOIDmode
, ro_modifier
);
7416 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7417 VOIDmode
, ro_modifier
);
7419 /* If the last operand is a CONST_INT, use plus_constant of
7420 the negated constant. Else make the MINUS. */
7421 if (GET_CODE (op1
) == CONST_INT
)
7422 return plus_constant (op0
, - INTVAL (op1
));
7424 return gen_rtx_MINUS (mode
, op0
, op1
);
7426 /* Convert A - const to A + (-const). */
7427 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7429 tree negated
= fold (build1 (NEGATE_EXPR
, type
,
7430 TREE_OPERAND (exp
, 1)));
7432 if (TREE_UNSIGNED (type
) || TREE_OVERFLOW (negated
))
7433 /* If we can't negate the constant in TYPE, leave it alone and
7434 expand_binop will negate it for us. We used to try to do it
7435 here in the signed version of TYPE, but that doesn't work
7436 on POINTER_TYPEs. */;
7439 exp
= build (PLUS_EXPR
, type
, TREE_OPERAND (exp
, 0), negated
);
7443 this_optab
= sub_optab
;
7447 preexpand_calls (exp
);
7448 /* If first operand is constant, swap them.
7449 Thus the following special case checks need only
7450 check the second operand. */
7451 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
7453 register tree t1
= TREE_OPERAND (exp
, 0);
7454 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
7455 TREE_OPERAND (exp
, 1) = t1
;
7458 /* Attempt to return something suitable for generating an
7459 indexed address, for machines that support that. */
7461 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
7462 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7463 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
7465 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7468 /* Apply distributive law if OP0 is x+c. */
7469 if (GET_CODE (op0
) == PLUS
7470 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
7475 (mode
, XEXP (op0
, 0),
7476 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))),
7477 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))
7478 * INTVAL (XEXP (op0
, 1))));
7480 if (GET_CODE (op0
) != REG
)
7481 op0
= force_operand (op0
, NULL_RTX
);
7482 if (GET_CODE (op0
) != REG
)
7483 op0
= copy_to_mode_reg (mode
, op0
);
7486 gen_rtx_MULT (mode
, op0
,
7487 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))));
7490 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7493 /* Check for multiplying things that have been extended
7494 from a narrower type. If this machine supports multiplying
7495 in that narrower type with a result in the desired type,
7496 do it that way, and avoid the explicit type-conversion. */
7497 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
7498 && TREE_CODE (type
) == INTEGER_TYPE
7499 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7500 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7501 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7502 && int_fits_type_p (TREE_OPERAND (exp
, 1),
7503 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7504 /* Don't use a widening multiply if a shift will do. */
7505 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
7506 > HOST_BITS_PER_WIDE_INT
)
7507 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
7509 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7510 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7512 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
7513 /* If both operands are extended, they must either both
7514 be zero-extended or both be sign-extended. */
7515 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7517 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
7519 enum machine_mode innermode
7520 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
7521 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7522 ? smul_widen_optab
: umul_widen_optab
);
7523 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7524 ? umul_widen_optab
: smul_widen_optab
);
7525 if (mode
== GET_MODE_WIDER_MODE (innermode
))
7527 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
7529 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7530 NULL_RTX
, VOIDmode
, 0);
7531 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7532 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7535 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7536 NULL_RTX
, VOIDmode
, 0);
7539 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
7540 && innermode
== word_mode
)
7543 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7544 NULL_RTX
, VOIDmode
, 0);
7545 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7546 op1
= convert_modes (innermode
, mode
,
7547 expand_expr (TREE_OPERAND (exp
, 1),
7548 NULL_RTX
, VOIDmode
, 0),
7551 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7552 NULL_RTX
, VOIDmode
, 0);
7553 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
7554 unsignedp
, OPTAB_LIB_WIDEN
);
7555 htem
= expand_mult_highpart_adjust (innermode
,
7556 gen_highpart (innermode
, temp
),
7558 gen_highpart (innermode
, temp
),
7560 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
7565 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7566 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7567 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
7569 case TRUNC_DIV_EXPR
:
7570 case FLOOR_DIV_EXPR
:
7572 case ROUND_DIV_EXPR
:
7573 case EXACT_DIV_EXPR
:
7574 preexpand_calls (exp
);
7575 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7577 /* Possible optimization: compute the dividend with EXPAND_SUM
7578 then if the divisor is constant can optimize the case
7579 where some terms of the dividend have coeffs divisible by it. */
7580 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7581 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7582 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
7585 this_optab
= flodiv_optab
;
7588 case TRUNC_MOD_EXPR
:
7589 case FLOOR_MOD_EXPR
:
7591 case ROUND_MOD_EXPR
:
7592 preexpand_calls (exp
);
7593 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7595 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7596 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7597 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
7599 case FIX_ROUND_EXPR
:
7600 case FIX_FLOOR_EXPR
:
7602 abort (); /* Not used for C. */
7604 case FIX_TRUNC_EXPR
:
7605 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7607 target
= gen_reg_rtx (mode
);
7608 expand_fix (target
, op0
, unsignedp
);
7612 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7614 target
= gen_reg_rtx (mode
);
7615 /* expand_float can't figure out what to do if FROM has VOIDmode.
7616 So give it the correct mode. With -O, cse will optimize this. */
7617 if (GET_MODE (op0
) == VOIDmode
)
7618 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7620 expand_float (target
, op0
,
7621 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7625 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7626 temp
= expand_unop (mode
, neg_optab
, op0
, target
, 0);
7632 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7634 /* Handle complex values specially. */
7635 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
7636 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
7637 return expand_complex_abs (mode
, op0
, target
, unsignedp
);
7639 /* Unsigned abs is simply the operand. Testing here means we don't
7640 risk generating incorrect code below. */
7641 if (TREE_UNSIGNED (type
))
7644 return expand_abs (mode
, op0
, target
,
7645 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
7649 target
= original_target
;
7650 if (target
== 0 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1), 1)
7651 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
7652 || GET_MODE (target
) != mode
7653 || (GET_CODE (target
) == REG
7654 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
7655 target
= gen_reg_rtx (mode
);
7656 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7657 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
7659 /* First try to do it with a special MIN or MAX instruction.
7660 If that does not win, use a conditional jump to select the proper
7662 this_optab
= (TREE_UNSIGNED (type
)
7663 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
7664 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
7666 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
7671 /* At this point, a MEM target is no longer useful; we will get better
7674 if (GET_CODE (target
) == MEM
)
7675 target
= gen_reg_rtx (mode
);
7678 emit_move_insn (target
, op0
);
7680 op0
= gen_label_rtx ();
7682 /* If this mode is an integer too wide to compare properly,
7683 compare word by word. Rely on cse to optimize constant cases. */
7684 if (GET_MODE_CLASS (mode
) == MODE_INT
7685 && ! can_compare_p (GE
, mode
, ccp_jump
))
7687 if (code
== MAX_EXPR
)
7688 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
7689 target
, op1
, NULL_RTX
, op0
);
7691 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
7692 op1
, target
, NULL_RTX
, op0
);
7696 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)));
7697 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
7698 unsignedp
, mode
, NULL_RTX
, 0, NULL_RTX
,
7701 emit_move_insn (target
, op1
);
7706 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7707 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
7713 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7714 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
7719 /* ??? Can optimize bitwise operations with one arg constant.
7720 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7721 and (a bitwise1 b) bitwise2 b (etc)
7722 but that is probably not worth while. */
7724 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7725 boolean values when we want in all cases to compute both of them. In
7726 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7727 as actual zero-or-1 values and then bitwise anding. In cases where
7728 there cannot be any side effects, better code would be made by
7729 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7730 how to recognize those cases. */
7732 case TRUTH_AND_EXPR
:
7734 this_optab
= and_optab
;
7739 this_optab
= ior_optab
;
7742 case TRUTH_XOR_EXPR
:
7744 this_optab
= xor_optab
;
7751 preexpand_calls (exp
);
7752 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7754 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7755 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
7758 /* Could determine the answer when only additive constants differ. Also,
7759 the addition of one can be handled by changing the condition. */
7766 case UNORDERED_EXPR
:
7773 preexpand_calls (exp
);
7774 temp
= do_store_flag (exp
, target
, tmode
!= VOIDmode
? tmode
: mode
, 0);
7778 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7779 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
7781 && GET_CODE (original_target
) == REG
7782 && (GET_MODE (original_target
)
7783 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
7785 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
7788 if (temp
!= original_target
)
7789 temp
= copy_to_reg (temp
);
7791 op1
= gen_label_rtx ();
7792 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
7793 GET_MODE (temp
), unsignedp
, 0, op1
);
7794 emit_move_insn (temp
, const1_rtx
);
7799 /* If no set-flag instruction, must generate a conditional
7800 store into a temporary variable. Drop through
7801 and handle this like && and ||. */
7803 case TRUTH_ANDIF_EXPR
:
7804 case TRUTH_ORIF_EXPR
:
7806 && (target
== 0 || ! safe_from_p (target
, exp
, 1)
7807 /* Make sure we don't have a hard reg (such as function's return
7808 value) live across basic blocks, if not optimizing. */
7809 || (!optimize
&& GET_CODE (target
) == REG
7810 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
7811 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7814 emit_clr_insn (target
);
7816 op1
= gen_label_rtx ();
7817 jumpifnot (exp
, op1
);
7820 emit_0_to_1_insn (target
);
7823 return ignore
? const0_rtx
: target
;
7825 case TRUTH_NOT_EXPR
:
7826 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
7827 /* The parser is careful to generate TRUTH_NOT_EXPR
7828 only with operands that are always zero or one. */
7829 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
7830 target
, 1, OPTAB_LIB_WIDEN
);
7836 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
7838 return expand_expr (TREE_OPERAND (exp
, 1),
7839 (ignore
? const0_rtx
: target
),
7843 /* If we would have a "singleton" (see below) were it not for a
7844 conversion in each arm, bring that conversion back out. */
7845 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7846 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
7847 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
7848 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
7850 tree
true = TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
7851 tree
false = TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
7853 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7854 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7855 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7856 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7857 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7858 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7859 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7860 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7861 return expand_expr (build1 (NOP_EXPR
, type
,
7862 build (COND_EXPR
, TREE_TYPE (true),
7863 TREE_OPERAND (exp
, 0),
7865 target
, tmode
, modifier
);
7869 /* Note that COND_EXPRs whose type is a structure or union
7870 are required to be constructed to contain assignments of
7871 a temporary variable, so that we can evaluate them here
7872 for side effect only. If type is void, we must do likewise. */
7874 /* If an arm of the branch requires a cleanup,
7875 only that cleanup is performed. */
7878 tree binary_op
= 0, unary_op
= 0;
7880 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7881 convert it to our mode, if necessary. */
7882 if (integer_onep (TREE_OPERAND (exp
, 1))
7883 && integer_zerop (TREE_OPERAND (exp
, 2))
7884 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
7888 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
7893 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, ro_modifier
);
7894 if (GET_MODE (op0
) == mode
)
7898 target
= gen_reg_rtx (mode
);
7899 convert_move (target
, op0
, unsignedp
);
7903 /* Check for X ? A + B : A. If we have this, we can copy A to the
7904 output and conditionally add B. Similarly for unary operations.
7905 Don't do this if X has side-effects because those side effects
7906 might affect A or B and the "?" operation is a sequence point in
7907 ANSI. (operand_equal_p tests for side effects.) */
7909 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
7910 && operand_equal_p (TREE_OPERAND (exp
, 2),
7911 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
7912 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
7913 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
7914 && operand_equal_p (TREE_OPERAND (exp
, 1),
7915 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
7916 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
7917 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
7918 && operand_equal_p (TREE_OPERAND (exp
, 2),
7919 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
7920 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
7921 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
7922 && operand_equal_p (TREE_OPERAND (exp
, 1),
7923 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
7924 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
7926 /* If we are not to produce a result, we have no target. Otherwise,
7927 if a target was specified use it; it will not be used as an
7928 intermediate target unless it is safe. If no target, use a
7933 else if (original_target
7934 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
7935 || (singleton
&& GET_CODE (original_target
) == REG
7936 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
7937 && original_target
== var_rtx (singleton
)))
7938 && GET_MODE (original_target
) == mode
7939 #ifdef HAVE_conditional_move
7940 && (! can_conditionally_move_p (mode
)
7941 || GET_CODE (original_target
) == REG
7942 || TREE_ADDRESSABLE (type
))
7944 && ! (GET_CODE (original_target
) == MEM
7945 && MEM_VOLATILE_P (original_target
)))
7946 temp
= original_target
;
7947 else if (TREE_ADDRESSABLE (type
))
7950 temp
= assign_temp (type
, 0, 0, 1);
7952 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7953 do the test of X as a store-flag operation, do this as
7954 A + ((X != 0) << log C). Similarly for other simple binary
7955 operators. Only do for C == 1 if BRANCH_COST is low. */
7956 if (temp
&& singleton
&& binary_op
7957 && (TREE_CODE (binary_op
) == PLUS_EXPR
7958 || TREE_CODE (binary_op
) == MINUS_EXPR
7959 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
7960 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
7961 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
7962 : integer_onep (TREE_OPERAND (binary_op
, 1)))
7963 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
7966 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
? add_optab
7967 : TREE_CODE (binary_op
) == MINUS_EXPR
? sub_optab
7968 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
7971 /* If we had X ? A : A + 1, do this as A + (X == 0).
7973 We have to invert the truth value here and then put it
7974 back later if do_store_flag fails. We cannot simply copy
7975 TREE_OPERAND (exp, 0) to another variable and modify that
7976 because invert_truthvalue can modify the tree pointed to
7978 if (singleton
== TREE_OPERAND (exp
, 1))
7979 TREE_OPERAND (exp
, 0)
7980 = invert_truthvalue (TREE_OPERAND (exp
, 0));
7982 result
= do_store_flag (TREE_OPERAND (exp
, 0),
7983 (safe_from_p (temp
, singleton
, 1)
7985 mode
, BRANCH_COST
<= 1);
7987 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
7988 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
7989 build_int_2 (tree_log2
7993 (safe_from_p (temp
, singleton
, 1)
7994 ? temp
: NULL_RTX
), 0);
7998 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
7999 return expand_binop (mode
, boptab
, op1
, result
, temp
,
8000 unsignedp
, OPTAB_LIB_WIDEN
);
8002 else if (singleton
== TREE_OPERAND (exp
, 1))
8003 TREE_OPERAND (exp
, 0)
8004 = invert_truthvalue (TREE_OPERAND (exp
, 0));
8007 do_pending_stack_adjust ();
8009 op0
= gen_label_rtx ();
8011 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
8015 /* If the target conflicts with the other operand of the
8016 binary op, we can't use it. Also, we can't use the target
8017 if it is a hard register, because evaluating the condition
8018 might clobber it. */
8020 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
8021 || (GET_CODE (temp
) == REG
8022 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
8023 temp
= gen_reg_rtx (mode
);
8024 store_expr (singleton
, temp
, 0);
8027 expand_expr (singleton
,
8028 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8029 if (singleton
== TREE_OPERAND (exp
, 1))
8030 jumpif (TREE_OPERAND (exp
, 0), op0
);
8032 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8034 start_cleanup_deferral ();
8035 if (binary_op
&& temp
== 0)
8036 /* Just touch the other operand. */
8037 expand_expr (TREE_OPERAND (binary_op
, 1),
8038 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8040 store_expr (build (TREE_CODE (binary_op
), type
,
8041 make_tree (type
, temp
),
8042 TREE_OPERAND (binary_op
, 1)),
8045 store_expr (build1 (TREE_CODE (unary_op
), type
,
8046 make_tree (type
, temp
)),
8050 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8051 comparison operator. If we have one of these cases, set the
8052 output to A, branch on A (cse will merge these two references),
8053 then set the output to FOO. */
8055 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8056 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8057 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8058 TREE_OPERAND (exp
, 1), 0)
8059 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8060 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
8061 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
8063 if (GET_CODE (temp
) == REG
8064 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8065 temp
= gen_reg_rtx (mode
);
8066 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8067 jumpif (TREE_OPERAND (exp
, 0), op0
);
8069 start_cleanup_deferral ();
8070 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8074 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8075 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8076 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8077 TREE_OPERAND (exp
, 2), 0)
8078 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8079 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
8080 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
8082 if (GET_CODE (temp
) == REG
8083 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8084 temp
= gen_reg_rtx (mode
);
8085 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8086 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8088 start_cleanup_deferral ();
8089 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8094 op1
= gen_label_rtx ();
8095 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8097 start_cleanup_deferral ();
8099 /* One branch of the cond can be void, if it never returns. For
8100 example A ? throw : E */
8102 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8103 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8105 expand_expr (TREE_OPERAND (exp
, 1),
8106 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8107 end_cleanup_deferral ();
8109 emit_jump_insn (gen_jump (op1
));
8112 start_cleanup_deferral ();
8114 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8115 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8117 expand_expr (TREE_OPERAND (exp
, 2),
8118 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8121 end_cleanup_deferral ();
8132 /* Something needs to be initialized, but we didn't know
8133 where that thing was when building the tree. For example,
8134 it could be the return value of a function, or a parameter
8135 to a function which lays down in the stack, or a temporary
8136 variable which must be passed by reference.
8138 We guarantee that the expression will either be constructed
8139 or copied into our original target. */
8141 tree slot
= TREE_OPERAND (exp
, 0);
8142 tree cleanups
= NULL_TREE
;
8145 if (TREE_CODE (slot
) != VAR_DECL
)
8149 target
= original_target
;
8151 /* Set this here so that if we get a target that refers to a
8152 register variable that's already been used, put_reg_into_stack
8153 knows that it should fix up those uses. */
8154 TREE_USED (slot
) = 1;
8158 if (DECL_RTL (slot
) != 0)
8160 target
= DECL_RTL (slot
);
8161 /* If we have already expanded the slot, so don't do
8163 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8168 target
= assign_temp (type
, 2, 0, 1);
8169 /* All temp slots at this level must not conflict. */
8170 preserve_temp_slots (target
);
8171 DECL_RTL (slot
) = target
;
8172 if (TREE_ADDRESSABLE (slot
))
8173 put_var_into_stack (slot
);
8175 /* Since SLOT is not known to the called function
8176 to belong to its stack frame, we must build an explicit
8177 cleanup. This case occurs when we must build up a reference
8178 to pass the reference as an argument. In this case,
8179 it is very likely that such a reference need not be
8182 if (TREE_OPERAND (exp
, 2) == 0)
8183 TREE_OPERAND (exp
, 2) = maybe_build_cleanup (slot
);
8184 cleanups
= TREE_OPERAND (exp
, 2);
8189 /* This case does occur, when expanding a parameter which
8190 needs to be constructed on the stack. The target
8191 is the actual stack address that we want to initialize.
8192 The function we call will perform the cleanup in this case. */
8194 /* If we have already assigned it space, use that space,
8195 not target that we were passed in, as our target
8196 parameter is only a hint. */
8197 if (DECL_RTL (slot
) != 0)
8199 target
= DECL_RTL (slot
);
8200 /* If we have already expanded the slot, so don't do
8202 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8207 DECL_RTL (slot
) = target
;
8208 /* If we must have an addressable slot, then make sure that
8209 the RTL that we just stored in slot is OK. */
8210 if (TREE_ADDRESSABLE (slot
))
8211 put_var_into_stack (slot
);
8215 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
8216 /* Mark it as expanded. */
8217 TREE_OPERAND (exp
, 1) = NULL_TREE
;
8219 store_expr (exp1
, target
, 0);
8221 expand_decl_cleanup (NULL_TREE
, cleanups
);
8228 tree lhs
= TREE_OPERAND (exp
, 0);
8229 tree rhs
= TREE_OPERAND (exp
, 1);
8230 tree noncopied_parts
= 0;
8231 tree lhs_type
= TREE_TYPE (lhs
);
8233 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8234 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0 && !fixed_type_p (rhs
))
8235 noncopied_parts
= init_noncopied_parts (stabilize_reference (lhs
),
8236 TYPE_NONCOPIED_PARTS (lhs_type
));
8237 while (noncopied_parts
!= 0)
8239 expand_assignment (TREE_VALUE (noncopied_parts
),
8240 TREE_PURPOSE (noncopied_parts
), 0, 0);
8241 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
8248 /* If lhs is complex, expand calls in rhs before computing it.
8249 That's so we don't compute a pointer and save it over a call.
8250 If lhs is simple, compute it first so we can give it as a
8251 target if the rhs is just a call. This avoids an extra temp and copy
8252 and that prevents a partial-subsumption which makes bad code.
8253 Actually we could treat component_ref's of vars like vars. */
8255 tree lhs
= TREE_OPERAND (exp
, 0);
8256 tree rhs
= TREE_OPERAND (exp
, 1);
8257 tree noncopied_parts
= 0;
8258 tree lhs_type
= TREE_TYPE (lhs
);
8262 if (TREE_CODE (lhs
) != VAR_DECL
8263 && TREE_CODE (lhs
) != RESULT_DECL
8264 && TREE_CODE (lhs
) != PARM_DECL
8265 && ! (TREE_CODE (lhs
) == INDIRECT_REF
8266 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs
, 0)))))
8267 preexpand_calls (exp
);
8269 /* Check for |= or &= of a bitfield of size one into another bitfield
8270 of size 1. In this case, (unless we need the result of the
8271 assignment) we can do this more efficiently with a
8272 test followed by an assignment, if necessary.
8274 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8275 things change so we do, this code should be enhanced to
8278 && TREE_CODE (lhs
) == COMPONENT_REF
8279 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
8280 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
8281 && TREE_OPERAND (rhs
, 0) == lhs
8282 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
8283 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
8284 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
8286 rtx label
= gen_label_rtx ();
8288 do_jump (TREE_OPERAND (rhs
, 1),
8289 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
8290 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
8291 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
8292 (TREE_CODE (rhs
) == BIT_IOR_EXPR
8294 : integer_zero_node
)),
8296 do_pending_stack_adjust ();
8301 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0
8302 && ! (fixed_type_p (lhs
) && fixed_type_p (rhs
)))
8303 noncopied_parts
= save_noncopied_parts (stabilize_reference (lhs
),
8304 TYPE_NONCOPIED_PARTS (lhs_type
));
8306 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8307 while (noncopied_parts
!= 0)
8309 expand_assignment (TREE_PURPOSE (noncopied_parts
),
8310 TREE_VALUE (noncopied_parts
), 0, 0);
8311 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
8317 if (!TREE_OPERAND (exp
, 0))
8318 expand_null_return ();
8320 expand_return (TREE_OPERAND (exp
, 0));
8323 case PREINCREMENT_EXPR
:
8324 case PREDECREMENT_EXPR
:
8325 return expand_increment (exp
, 0, ignore
);
8327 case POSTINCREMENT_EXPR
:
8328 case POSTDECREMENT_EXPR
:
8329 /* Faster to treat as pre-increment if result is not used. */
8330 return expand_increment (exp
, ! ignore
, ignore
);
8333 /* If nonzero, TEMP will be set to the address of something that might
8334 be a MEM corresponding to a stack slot. */
8337 /* Are we taking the address of a nested function? */
8338 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
8339 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
8340 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0))
8341 && ! TREE_STATIC (exp
))
8343 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
8344 op0
= force_operand (op0
, target
);
8346 /* If we are taking the address of something erroneous, just
8348 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
8352 /* We make sure to pass const0_rtx down if we came in with
8353 ignore set, to avoid doing the cleanups twice for something. */
8354 op0
= expand_expr (TREE_OPERAND (exp
, 0),
8355 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
8356 (modifier
== EXPAND_INITIALIZER
8357 ? modifier
: EXPAND_CONST_ADDRESS
));
8359 /* If we are going to ignore the result, OP0 will have been set
8360 to const0_rtx, so just return it. Don't get confused and
8361 think we are taking the address of the constant. */
8365 op0
= protect_from_queue (op0
, 0);
8367 /* We would like the object in memory. If it is a constant, we can
8368 have it be statically allocated into memory. For a non-constant,
8369 we need to allocate some memory and store the value into it. */
8371 if (CONSTANT_P (op0
))
8372 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8374 else if (GET_CODE (op0
) == MEM
)
8376 mark_temp_addr_taken (op0
);
8377 temp
= XEXP (op0
, 0);
8380 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
8381 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
8383 /* If this object is in a register, it must be not
8385 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8386 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
8388 mark_temp_addr_taken (memloc
);
8389 emit_move_insn (memloc
, op0
);
8393 if (GET_CODE (op0
) != MEM
)
8396 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8398 temp
= XEXP (op0
, 0);
8399 #ifdef POINTERS_EXTEND_UNSIGNED
8400 if (GET_MODE (temp
) == Pmode
&& GET_MODE (temp
) != mode
8401 && mode
== ptr_mode
)
8402 temp
= convert_memory_address (ptr_mode
, temp
);
8407 op0
= force_operand (XEXP (op0
, 0), target
);
8410 if (flag_force_addr
&& GET_CODE (op0
) != REG
)
8411 op0
= force_reg (Pmode
, op0
);
8413 if (GET_CODE (op0
) == REG
8414 && ! REG_USERVAR_P (op0
))
8415 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)));
8417 /* If we might have had a temp slot, add an equivalent address
8420 update_temp_slot_address (temp
, op0
);
8422 #ifdef POINTERS_EXTEND_UNSIGNED
8423 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
8424 && mode
== ptr_mode
)
8425 op0
= convert_memory_address (ptr_mode
, op0
);
8430 case ENTRY_VALUE_EXPR
:
8433 /* COMPLEX type for Extended Pascal & Fortran */
8436 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8439 /* Get the rtx code of the operands. */
8440 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8441 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
8444 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8448 /* Move the real (op0) and imaginary (op1) parts to their location. */
8449 emit_move_insn (gen_realpart (mode
, target
), op0
);
8450 emit_move_insn (gen_imagpart (mode
, target
), op1
);
8452 insns
= get_insns ();
8455 /* Complex construction should appear as a single unit. */
8456 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8457 each with a separate pseudo as destination.
8458 It's not correct for flow to treat them as a unit. */
8459 if (GET_CODE (target
) != CONCAT
)
8460 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
8468 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8469 return gen_realpart (mode
, op0
);
8472 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8473 return gen_imagpart (mode
, op0
);
8477 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8481 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8484 target
= gen_reg_rtx (mode
);
8488 /* Store the realpart and the negated imagpart to target. */
8489 emit_move_insn (gen_realpart (partmode
, target
),
8490 gen_realpart (partmode
, op0
));
8492 imag_t
= gen_imagpart (partmode
, target
);
8493 temp
= expand_unop (partmode
, neg_optab
,
8494 gen_imagpart (partmode
, op0
), imag_t
, 0);
8496 emit_move_insn (imag_t
, temp
);
8498 insns
= get_insns ();
8501 /* Conjugate should appear as a single unit
8502 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8503 each with a separate pseudo as destination.
8504 It's not correct for flow to treat them as a unit. */
8505 if (GET_CODE (target
) != CONCAT
)
8506 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
8513 case TRY_CATCH_EXPR
:
8515 tree handler
= TREE_OPERAND (exp
, 1);
8517 expand_eh_region_start ();
8519 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8521 expand_eh_region_end (handler
);
8526 case TRY_FINALLY_EXPR
:
8528 tree try_block
= TREE_OPERAND (exp
, 0);
8529 tree finally_block
= TREE_OPERAND (exp
, 1);
8530 rtx finally_label
= gen_label_rtx ();
8531 rtx done_label
= gen_label_rtx ();
8532 rtx return_link
= gen_reg_rtx (Pmode
);
8533 tree cleanup
= build (GOTO_SUBROUTINE_EXPR
, void_type_node
,
8534 (tree
) finally_label
, (tree
) return_link
);
8535 TREE_SIDE_EFFECTS (cleanup
) = 1;
8537 /* Start a new binding layer that will keep track of all cleanup
8538 actions to be performed. */
8539 expand_start_bindings (2);
8541 target_temp_slot_level
= temp_slot_level
;
8543 expand_decl_cleanup (NULL_TREE
, cleanup
);
8544 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
8546 preserve_temp_slots (op0
);
8547 expand_end_bindings (NULL_TREE
, 0, 0);
8548 emit_jump (done_label
);
8549 emit_label (finally_label
);
8550 expand_expr (finally_block
, const0_rtx
, VOIDmode
, 0);
8551 emit_indirect_jump (return_link
);
8552 emit_label (done_label
);
8556 case GOTO_SUBROUTINE_EXPR
:
8558 rtx subr
= (rtx
) TREE_OPERAND (exp
, 0);
8559 rtx return_link
= *(rtx
*) &TREE_OPERAND (exp
, 1);
8560 rtx return_address
= gen_label_rtx ();
8561 emit_move_insn (return_link
,
8562 gen_rtx_LABEL_REF (Pmode
, return_address
));
8564 emit_label (return_address
);
8570 rtx dcc
= get_dynamic_cleanup_chain ();
8571 emit_move_insn (dcc
, validize_mem (gen_rtx_MEM (Pmode
, dcc
)));
8577 rtx dhc
= get_dynamic_handler_chain ();
8578 emit_move_insn (dhc
, validize_mem (gen_rtx_MEM (Pmode
, dhc
)));
8583 return expand_builtin_va_arg (TREE_OPERAND (exp
, 0), type
);
8586 return (*lang_expand_expr
) (exp
, original_target
, tmode
, modifier
);
8589 /* Here to do an ordinary binary operator, generating an instruction
8590 from the optab already placed in `this_optab'. */
8592 preexpand_calls (exp
);
8593 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8595 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8596 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8598 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
8599 unsignedp
, OPTAB_LIB_WIDEN
);
8605 /* Similar to expand_expr, except that we don't specify a target, target
8606 mode, or modifier and we return the alignment of the inner type. This is
8607 used in cases where it is not necessary to align the result to the
8608 alignment of its type as long as we know the alignment of the result, for
8609 example for comparisons of BLKmode values. */
8612 expand_expr_unaligned (exp
, palign
)
8614 unsigned int *palign
;
8617 tree type
= TREE_TYPE (exp
);
8618 register enum machine_mode mode
= TYPE_MODE (type
);
8620 /* Default the alignment we return to that of the type. */
8621 *palign
= TYPE_ALIGN (type
);
8623 /* The only cases in which we do anything special is if the resulting mode
8625 if (mode
!= BLKmode
)
8626 return expand_expr (exp
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
8628 switch (TREE_CODE (exp
))
8632 case NON_LVALUE_EXPR
:
8633 /* Conversions between BLKmode values don't change the underlying
8634 alignment or value. */
8635 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == BLKmode
)
8636 return expand_expr_unaligned (TREE_OPERAND (exp
, 0), palign
);
8640 /* Much of the code for this case is copied directly from expand_expr.
8641 We need to duplicate it here because we will do something different
8642 in the fall-through case, so we need to handle the same exceptions
8645 tree array
= TREE_OPERAND (exp
, 0);
8646 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
8647 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
8648 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
8651 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
8654 /* Optimize the special-case of a zero lower bound.
8656 We convert the low_bound to sizetype to avoid some problems
8657 with constant folding. (E.g. suppose the lower bound is 1,
8658 and its mode is QI. Without the conversion, (ARRAY
8659 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8660 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8662 if (! integer_zerop (low_bound
))
8663 index
= size_diffop (index
, convert (sizetype
, low_bound
));
8665 /* If this is a constant index into a constant array,
8666 just get the value from the array. Handle both the cases when
8667 we have an explicit constructor and when our operand is a variable
8668 that was declared const. */
8670 if (TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
)
8671 && 0 > compare_tree_int (index
,
8672 list_length (CONSTRUCTOR_ELTS
8673 (TREE_OPERAND (exp
, 0)))))
8677 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
8678 i
= TREE_INT_CST_LOW (index
);
8679 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
8683 return expand_expr_unaligned (fold (TREE_VALUE (elem
)), palign
);
8686 else if (optimize
>= 1
8687 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
8688 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
8689 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
8691 if (TREE_CODE (index
) == INTEGER_CST
)
8693 tree init
= DECL_INITIAL (array
);
8695 if (TREE_CODE (init
) == CONSTRUCTOR
)
8699 for (elem
= CONSTRUCTOR_ELTS (init
);
8700 ! tree_int_cst_equal (TREE_PURPOSE (elem
), index
);
8701 elem
= TREE_CHAIN (elem
))
8705 return expand_expr_unaligned (fold (TREE_VALUE (elem
)),
8715 /* If the operand is a CONSTRUCTOR, we can just extract the
8716 appropriate field if it is present. Don't do this if we have
8717 already written the data since we want to refer to that copy
8718 and varasm.c assumes that's what we'll do. */
8719 if (TREE_CODE (exp
) != ARRAY_REF
8720 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
8721 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
8725 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
8726 elt
= TREE_CHAIN (elt
))
8727 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1))
8728 /* Note that unlike the case in expand_expr, we know this is
8729 BLKmode and hence not an integer. */
8730 return expand_expr_unaligned (TREE_VALUE (elt
), palign
);
8734 enum machine_mode mode1
;
8735 HOST_WIDE_INT bitsize
, bitpos
;
8738 unsigned int alignment
;
8740 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
8741 &mode1
, &unsignedp
, &volatilep
,
8744 /* If we got back the original object, something is wrong. Perhaps
8745 we are evaluating an expression too early. In any event, don't
8746 infinitely recurse. */
8750 op0
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
8752 /* If this is a constant, put it into a register if it is a
8753 legitimate constant and OFFSET is 0 and memory if it isn't. */
8754 if (CONSTANT_P (op0
))
8756 enum machine_mode inner_mode
= TYPE_MODE (TREE_TYPE (tem
));
8758 if (inner_mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
8760 op0
= force_reg (inner_mode
, op0
);
8762 op0
= validize_mem (force_const_mem (inner_mode
, op0
));
8767 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
8769 /* If this object is in a register, put it into memory.
8770 This case can't occur in C, but can in Ada if we have
8771 unchecked conversion of an expression from a scalar type to
8772 an array or record type. */
8773 if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
8774 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
8776 rtx memloc
= assign_temp (TREE_TYPE (tem
), 1, 1, 1);
8778 mark_temp_addr_taken (memloc
);
8779 emit_move_insn (memloc
, op0
);
8783 if (GET_CODE (op0
) != MEM
)
8786 if (GET_MODE (offset_rtx
) != ptr_mode
)
8788 #ifdef POINTERS_EXTEND_UNSIGNED
8789 offset_rtx
= convert_memory_address (ptr_mode
, offset_rtx
);
8791 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
8795 op0
= change_address (op0
, VOIDmode
,
8796 gen_rtx_PLUS (ptr_mode
, XEXP (op0
, 0),
8797 force_reg (ptr_mode
,
8801 /* Don't forget about volatility even if this is a bitfield. */
8802 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
8804 op0
= copy_rtx (op0
);
8805 MEM_VOLATILE_P (op0
) = 1;
8808 /* Check the access. */
8809 if (current_function_check_memory_usage
&& GET_CODE (op0
) == MEM
)
8814 to
= plus_constant (XEXP (op0
, 0), (bitpos
/ BITS_PER_UNIT
));
8815 size
= (bitpos
% BITS_PER_UNIT
) + bitsize
+ BITS_PER_UNIT
- 1;
8817 /* Check the access right of the pointer. */
8818 in_check_memory_usage
= 1;
8819 if (size
> BITS_PER_UNIT
)
8820 emit_library_call (chkr_check_addr_libfunc
,
8821 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3,
8822 to
, ptr_mode
, GEN_INT (size
/ BITS_PER_UNIT
),
8823 TYPE_MODE (sizetype
),
8824 GEN_INT (MEMORY_USE_RO
),
8825 TYPE_MODE (integer_type_node
));
8826 in_check_memory_usage
= 0;
8829 /* In cases where an aligned union has an unaligned object
8830 as a field, we might be extracting a BLKmode value from
8831 an integer-mode (e.g., SImode) object. Handle this case
8832 by doing the extract into an object as wide as the field
8833 (which we know to be the width of a basic mode), then
8834 storing into memory, and changing the mode to BLKmode.
8835 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8836 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8837 if (mode1
== VOIDmode
8838 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
8839 || (SLOW_UNALIGNED_ACCESS (mode1
, alignment
)
8840 && (TYPE_ALIGN (type
) > alignment
8841 || bitpos
% TYPE_ALIGN (type
) != 0)))
8843 enum machine_mode ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
8845 if (ext_mode
== BLKmode
)
8847 /* In this case, BITPOS must start at a byte boundary. */
8848 if (GET_CODE (op0
) != MEM
8849 || bitpos
% BITS_PER_UNIT
!= 0)
8852 op0
= change_address (op0
, VOIDmode
,
8853 plus_constant (XEXP (op0
, 0),
8854 bitpos
/ BITS_PER_UNIT
));
8858 rtx
new = assign_stack_temp (ext_mode
,
8859 bitsize
/ BITS_PER_UNIT
, 0);
8861 op0
= extract_bit_field (validize_mem (op0
), bitsize
, bitpos
,
8862 unsignedp
, NULL_RTX
, ext_mode
,
8863 ext_mode
, alignment
,
8864 int_size_in_bytes (TREE_TYPE (tem
)));
8866 /* If the result is a record type and BITSIZE is narrower than
8867 the mode of OP0, an integral mode, and this is a big endian
8868 machine, we must put the field into the high-order bits. */
8869 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
8870 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
8871 && bitsize
< GET_MODE_BITSIZE (GET_MODE (op0
)))
8872 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
8873 size_int (GET_MODE_BITSIZE
8878 emit_move_insn (new, op0
);
8879 op0
= copy_rtx (new);
8880 PUT_MODE (op0
, BLKmode
);
8884 /* Get a reference to just this component. */
8885 op0
= change_address (op0
, mode1
,
8886 plus_constant (XEXP (op0
, 0),
8887 (bitpos
/ BITS_PER_UNIT
)));
8889 MEM_ALIAS_SET (op0
) = get_alias_set (exp
);
8891 /* Adjust the alignment in case the bit position is not
8892 a multiple of the alignment of the inner object. */
8893 while (bitpos
% alignment
!= 0)
8896 if (GET_CODE (XEXP (op0
, 0)) == REG
)
8897 mark_reg_pointer (XEXP (op0
, 0), alignment
);
8899 MEM_IN_STRUCT_P (op0
) = 1;
8900 MEM_VOLATILE_P (op0
) |= volatilep
;
8902 *palign
= alignment
;
8911 return expand_expr (exp
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
8914 /* Return the tree node if a ARG corresponds to a string constant or zero
8915 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8916 in bytes within the string that ARG is accessing. The type of the
8917 offset will be `sizetype'. */
8920 string_constant (arg
, ptr_offset
)
8926 if (TREE_CODE (arg
) == ADDR_EXPR
8927 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
8929 *ptr_offset
= size_zero_node
;
8930 return TREE_OPERAND (arg
, 0);
8932 else if (TREE_CODE (arg
) == PLUS_EXPR
)
8934 tree arg0
= TREE_OPERAND (arg
, 0);
8935 tree arg1
= TREE_OPERAND (arg
, 1);
8940 if (TREE_CODE (arg0
) == ADDR_EXPR
8941 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
8943 *ptr_offset
= convert (sizetype
, arg1
);
8944 return TREE_OPERAND (arg0
, 0);
8946 else if (TREE_CODE (arg1
) == ADDR_EXPR
8947 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
8949 *ptr_offset
= convert (sizetype
, arg0
);
8950 return TREE_OPERAND (arg1
, 0);
8957 /* Expand code for a post- or pre- increment or decrement
8958 and return the RTX for the result.
8959 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8962 expand_increment (exp
, post
, ignore
)
8966 register rtx op0
, op1
;
8967 register rtx temp
, value
;
8968 register tree incremented
= TREE_OPERAND (exp
, 0);
8969 optab this_optab
= add_optab
;
8971 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
8972 int op0_is_copy
= 0;
8973 int single_insn
= 0;
8974 /* 1 means we can't store into OP0 directly,
8975 because it is a subreg narrower than a word,
8976 and we don't dare clobber the rest of the word. */
8979 /* Stabilize any component ref that might need to be
8980 evaluated more than once below. */
8982 || TREE_CODE (incremented
) == BIT_FIELD_REF
8983 || (TREE_CODE (incremented
) == COMPONENT_REF
8984 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
8985 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
8986 incremented
= stabilize_reference (incremented
);
8987 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8988 ones into save exprs so that they don't accidentally get evaluated
8989 more than once by the code below. */
8990 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
8991 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
8992 incremented
= save_expr (incremented
);
8994 /* Compute the operands as RTX.
8995 Note whether OP0 is the actual lvalue or a copy of it:
8996 I believe it is a copy iff it is a register or subreg
8997 and insns were generated in computing it. */
8999 temp
= get_last_insn ();
9000 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_RW
);
9002 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9003 in place but instead must do sign- or zero-extension during assignment,
9004 so we copy it into a new register and let the code below use it as
9007 Note that we can safely modify this SUBREG since it is know not to be
9008 shared (it was made by the expand_expr call above). */
9010 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9013 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9017 else if (GET_CODE (op0
) == SUBREG
9018 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9020 /* We cannot increment this SUBREG in place. If we are
9021 post-incrementing, get a copy of the old value. Otherwise,
9022 just mark that we cannot increment in place. */
9024 op0
= copy_to_reg (op0
);
9029 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
9030 && temp
!= get_last_insn ());
9031 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
,
9032 EXPAND_MEMORY_USE_BAD
);
9034 /* Decide whether incrementing or decrementing. */
9035 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9036 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9037 this_optab
= sub_optab
;
9039 /* Convert decrement by a constant into a negative increment. */
9040 if (this_optab
== sub_optab
9041 && GET_CODE (op1
) == CONST_INT
)
9043 op1
= GEN_INT (-INTVAL (op1
));
9044 this_optab
= add_optab
;
9047 /* For a preincrement, see if we can do this with a single instruction. */
9050 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9051 if (icode
!= (int) CODE_FOR_nothing
9052 /* Make sure that OP0 is valid for operands 0 and 1
9053 of the insn we want to queue. */
9054 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9055 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
)
9056 && (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9060 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9061 then we cannot just increment OP0. We must therefore contrive to
9062 increment the original value. Then, for postincrement, we can return
9063 OP0 since it is a copy of the old value. For preincrement, expand here
9064 unless we can do it with a single insn.
9066 Likewise if storing directly into OP0 would clobber high bits
9067 we need to preserve (bad_subreg). */
9068 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9070 /* This is the easiest way to increment the value wherever it is.
9071 Problems with multiple evaluation of INCREMENTED are prevented
9072 because either (1) it is a component_ref or preincrement,
9073 in which case it was stabilized above, or (2) it is an array_ref
9074 with constant index in an array in a register, which is
9075 safe to reevaluate. */
9076 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9077 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9078 ? MINUS_EXPR
: PLUS_EXPR
),
9081 TREE_OPERAND (exp
, 1));
9083 while (TREE_CODE (incremented
) == NOP_EXPR
9084 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9086 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9087 incremented
= TREE_OPERAND (incremented
, 0);
9090 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
, 0);
9091 return post
? op0
: temp
;
9096 /* We have a true reference to the value in OP0.
9097 If there is an insn to add or subtract in this mode, queue it.
9098 Queueing the increment insn avoids the register shuffling
9099 that often results if we must increment now and first save
9100 the old value for subsequent use. */
9102 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9103 op0
= stabilize (op0
);
9106 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9107 if (icode
!= (int) CODE_FOR_nothing
9108 /* Make sure that OP0 is valid for operands 0 and 1
9109 of the insn we want to queue. */
9110 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9111 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
))
9113 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9114 op1
= force_reg (mode
, op1
);
9116 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9118 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9120 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
9121 ? force_reg (Pmode
, XEXP (op0
, 0))
9122 : copy_to_reg (XEXP (op0
, 0)));
9125 op0
= change_address (op0
, VOIDmode
, addr
);
9126 temp
= force_reg (GET_MODE (op0
), op0
);
9127 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9128 op1
= force_reg (mode
, op1
);
9130 /* The increment queue is LIFO, thus we have to `queue'
9131 the instructions in reverse order. */
9132 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9133 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9138 /* Preincrement, or we can't increment with one simple insn. */
9140 /* Save a copy of the value before inc or dec, to return it later. */
9141 temp
= value
= copy_to_reg (op0
);
9143 /* Arrange to return the incremented value. */
9144 /* Copy the rtx because expand_binop will protect from the queue,
9145 and the results of that would be invalid for us to return
9146 if our caller does emit_queue before using our result. */
9147 temp
= copy_rtx (value
= op0
);
9149 /* Increment however we can. */
9150 op1
= expand_binop (mode
, this_optab
, value
, op1
,
9151 current_function_check_memory_usage
? NULL_RTX
: op0
,
9152 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9153 /* Make sure the value is stored into OP0. */
9155 emit_move_insn (op0
, op1
);
9160 /* Expand all function calls contained within EXP, innermost ones first.
9161 But don't look within expressions that have sequence points.
9162 For each CALL_EXPR, record the rtx for its value
9163 in the CALL_EXPR_RTL field. */
9166 preexpand_calls (exp
)
9169 register int nops
, i
;
9170 int class = TREE_CODE_CLASS (TREE_CODE (exp
));
9172 if (! do_preexpand_calls
)
9175 /* Only expressions and references can contain calls. */
9177 if (! IS_EXPR_CODE_CLASS (class) && class != 'r')
9180 switch (TREE_CODE (exp
))
9183 /* Do nothing if already expanded. */
9184 if (CALL_EXPR_RTL (exp
) != 0
9185 /* Do nothing if the call returns a variable-sized object. */
9186 || (TREE_CODE (TREE_TYPE (exp
)) != VOID_TYPE
9187 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
)
9188 /* Do nothing to built-in functions. */
9189 || (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
9190 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
9192 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
9195 CALL_EXPR_RTL (exp
) = expand_call (exp
, NULL_RTX
, 0);
9200 case TRUTH_ANDIF_EXPR
:
9201 case TRUTH_ORIF_EXPR
:
9202 /* If we find one of these, then we can be sure
9203 the adjust will be done for it (since it makes jumps).
9204 Do it now, so that if this is inside an argument
9205 of a function, we don't get the stack adjustment
9206 after some other args have already been pushed. */
9207 do_pending_stack_adjust ();
9212 case WITH_CLEANUP_EXPR
:
9213 case CLEANUP_POINT_EXPR
:
9214 case TRY_CATCH_EXPR
:
9218 if (SAVE_EXPR_RTL (exp
) != 0)
9225 nops
= TREE_CODE_LENGTH (TREE_CODE (exp
));
9226 for (i
= 0; i
< nops
; i
++)
9227 if (TREE_OPERAND (exp
, i
) != 0)
9229 if (TREE_CODE (exp
) == TARGET_EXPR
&& i
== 2)
9230 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9231 It doesn't happen before the call is made. */
9235 class = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, i
)));
9236 if (IS_EXPR_CODE_CLASS (class) || class == 'r')
9237 preexpand_calls (TREE_OPERAND (exp
, i
));
9242 /* At the start of a function, record that we have no previously-pushed
9243 arguments waiting to be popped. */
9246 init_pending_stack_adjust ()
9248 pending_stack_adjust
= 0;
9251 /* When exiting from function, if safe, clear out any pending stack adjust
9252 so the adjustment won't get done.
9254 Note, if the current function calls alloca, then it must have a
9255 frame pointer regardless of the value of flag_omit_frame_pointer. */
9258 clear_pending_stack_adjust ()
9260 #ifdef EXIT_IGNORE_STACK
9262 && (! flag_omit_frame_pointer
|| current_function_calls_alloca
)
9263 && EXIT_IGNORE_STACK
9264 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
9265 && ! flag_inline_functions
)
9267 stack_pointer_delta
-= pending_stack_adjust
,
9268 pending_stack_adjust
= 0;
9273 /* Pop any previously-pushed arguments that have not been popped yet. */
9276 do_pending_stack_adjust ()
9278 if (inhibit_defer_pop
== 0)
9280 if (pending_stack_adjust
!= 0)
9281 adjust_stack (GEN_INT (pending_stack_adjust
));
9282 pending_stack_adjust
= 0;
9286 /* Expand conditional expressions. */
9288 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9289 LABEL is an rtx of code CODE_LABEL, in this function and all the
9293 jumpifnot (exp
, label
)
9297 do_jump (exp
, label
, NULL_RTX
);
9300 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9307 do_jump (exp
, NULL_RTX
, label
);
9310 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9311 the result is zero, or IF_TRUE_LABEL if the result is one.
9312 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9313 meaning fall through in that case.
9315 do_jump always does any pending stack adjust except when it does not
9316 actually perform a jump. An example where there is no jump
9317 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9319 This function is responsible for optimizing cases such as
9320 &&, || and comparison operators in EXP. */
9323 do_jump (exp
, if_false_label
, if_true_label
)
9325 rtx if_false_label
, if_true_label
;
9327 register enum tree_code code
= TREE_CODE (exp
);
9328 /* Some cases need to create a label to jump to
9329 in order to properly fall through.
9330 These cases set DROP_THROUGH_LABEL nonzero. */
9331 rtx drop_through_label
= 0;
9335 enum machine_mode mode
;
9337 #ifdef MAX_INTEGER_COMPUTATION_MODE
9338 check_max_integer_computation_mode (exp
);
9349 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
9355 /* This is not true with #pragma weak */
9357 /* The address of something can never be zero. */
9359 emit_jump (if_true_label
);
9364 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
9365 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
9366 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
)
9369 /* If we are narrowing the operand, we have to do the compare in the
9371 if ((TYPE_PRECISION (TREE_TYPE (exp
))
9372 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9374 case NON_LVALUE_EXPR
:
9375 case REFERENCE_EXPR
:
9380 /* These cannot change zero->non-zero or vice versa. */
9381 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9384 case WITH_RECORD_EXPR
:
9385 /* Put the object on the placeholder list, recurse through our first
9386 operand, and pop the list. */
9387 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
9389 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9390 placeholder_list
= TREE_CHAIN (placeholder_list
);
9394 /* This is never less insns than evaluating the PLUS_EXPR followed by
9395 a test and can be longer if the test is eliminated. */
9397 /* Reduce to minus. */
9398 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
9399 TREE_OPERAND (exp
, 0),
9400 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
9401 TREE_OPERAND (exp
, 1))));
9402 /* Process as MINUS. */
9406 /* Non-zero iff operands of minus differ. */
9407 do_compare_and_jump (build (NE_EXPR
, TREE_TYPE (exp
),
9408 TREE_OPERAND (exp
, 0),
9409 TREE_OPERAND (exp
, 1)),
9410 NE
, NE
, if_false_label
, if_true_label
);
9414 /* If we are AND'ing with a small constant, do this comparison in the
9415 smallest type that fits. If the machine doesn't have comparisons
9416 that small, it will be converted back to the wider comparison.
9417 This helps if we are testing the sign bit of a narrower object.
9418 combine can't do this for us because it can't know whether a
9419 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9421 if (! SLOW_BYTE_ACCESS
9422 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
9423 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
9424 && (i
= tree_floor_log2 (TREE_OPERAND (exp
, 1))) >= 0
9425 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
9426 && (type
= type_for_mode (mode
, 1)) != 0
9427 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9428 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9429 != CODE_FOR_nothing
))
9431 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9436 case TRUTH_NOT_EXPR
:
9437 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9440 case TRUTH_ANDIF_EXPR
:
9441 if (if_false_label
== 0)
9442 if_false_label
= drop_through_label
= gen_label_rtx ();
9443 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
9444 start_cleanup_deferral ();
9445 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9446 end_cleanup_deferral ();
9449 case TRUTH_ORIF_EXPR
:
9450 if (if_true_label
== 0)
9451 if_true_label
= drop_through_label
= gen_label_rtx ();
9452 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
9453 start_cleanup_deferral ();
9454 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9455 end_cleanup_deferral ();
9460 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
9461 preserve_temp_slots (NULL_RTX
);
9465 do_pending_stack_adjust ();
9466 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9473 HOST_WIDE_INT bitsize
, bitpos
;
9475 enum machine_mode mode
;
9479 unsigned int alignment
;
9481 /* Get description of this reference. We don't actually care
9482 about the underlying object here. */
9483 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
9484 &unsignedp
, &volatilep
, &alignment
);
9486 type
= type_for_size (bitsize
, unsignedp
);
9487 if (! SLOW_BYTE_ACCESS
9488 && type
!= 0 && bitsize
>= 0
9489 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9490 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9491 != CODE_FOR_nothing
))
9493 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9500 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9501 if (integer_onep (TREE_OPERAND (exp
, 1))
9502 && integer_zerop (TREE_OPERAND (exp
, 2)))
9503 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9505 else if (integer_zerop (TREE_OPERAND (exp
, 1))
9506 && integer_onep (TREE_OPERAND (exp
, 2)))
9507 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9511 register rtx label1
= gen_label_rtx ();
9512 drop_through_label
= gen_label_rtx ();
9514 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
9516 start_cleanup_deferral ();
9517 /* Now the THEN-expression. */
9518 do_jump (TREE_OPERAND (exp
, 1),
9519 if_false_label
? if_false_label
: drop_through_label
,
9520 if_true_label
? if_true_label
: drop_through_label
);
9521 /* In case the do_jump just above never jumps. */
9522 do_pending_stack_adjust ();
9523 emit_label (label1
);
9525 /* Now the ELSE-expression. */
9526 do_jump (TREE_OPERAND (exp
, 2),
9527 if_false_label
? if_false_label
: drop_through_label
,
9528 if_true_label
? if_true_label
: drop_through_label
);
9529 end_cleanup_deferral ();
9535 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9537 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9538 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9540 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
9541 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
9544 (build (TRUTH_ANDIF_EXPR
, TREE_TYPE (exp
),
9545 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9546 fold (build1 (REALPART_EXPR
,
9547 TREE_TYPE (inner_type
),
9549 fold (build1 (REALPART_EXPR
,
9550 TREE_TYPE (inner_type
),
9552 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9553 fold (build1 (IMAGPART_EXPR
,
9554 TREE_TYPE (inner_type
),
9556 fold (build1 (IMAGPART_EXPR
,
9557 TREE_TYPE (inner_type
),
9559 if_false_label
, if_true_label
);
9562 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
9563 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9565 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9566 && !can_compare_p (EQ
, TYPE_MODE (inner_type
), ccp_jump
))
9567 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
9569 do_compare_and_jump (exp
, EQ
, EQ
, if_false_label
, if_true_label
);
9575 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9577 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9578 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9580 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
9581 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
9584 (build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
),
9585 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9586 fold (build1 (REALPART_EXPR
,
9587 TREE_TYPE (inner_type
),
9589 fold (build1 (REALPART_EXPR
,
9590 TREE_TYPE (inner_type
),
9592 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9593 fold (build1 (IMAGPART_EXPR
,
9594 TREE_TYPE (inner_type
),
9596 fold (build1 (IMAGPART_EXPR
,
9597 TREE_TYPE (inner_type
),
9599 if_false_label
, if_true_label
);
9602 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
9603 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9605 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9606 && !can_compare_p (NE
, TYPE_MODE (inner_type
), ccp_jump
))
9607 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
9609 do_compare_and_jump (exp
, NE
, NE
, if_false_label
, if_true_label
);
9614 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9615 if (GET_MODE_CLASS (mode
) == MODE_INT
9616 && ! can_compare_p (LT
, mode
, ccp_jump
))
9617 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
9619 do_compare_and_jump (exp
, LT
, LTU
, if_false_label
, if_true_label
);
9623 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9624 if (GET_MODE_CLASS (mode
) == MODE_INT
9625 && ! can_compare_p (LE
, mode
, ccp_jump
))
9626 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
9628 do_compare_and_jump (exp
, LE
, LEU
, if_false_label
, if_true_label
);
9632 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9633 if (GET_MODE_CLASS (mode
) == MODE_INT
9634 && ! can_compare_p (GT
, mode
, ccp_jump
))
9635 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
9637 do_compare_and_jump (exp
, GT
, GTU
, if_false_label
, if_true_label
);
9641 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9642 if (GET_MODE_CLASS (mode
) == MODE_INT
9643 && ! can_compare_p (GE
, mode
, ccp_jump
))
9644 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
9646 do_compare_and_jump (exp
, GE
, GEU
, if_false_label
, if_true_label
);
9649 case UNORDERED_EXPR
:
9652 enum rtx_code cmp
, rcmp
;
9655 if (code
== UNORDERED_EXPR
)
9656 cmp
= UNORDERED
, rcmp
= ORDERED
;
9658 cmp
= ORDERED
, rcmp
= UNORDERED
;
9659 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9662 if (! can_compare_p (cmp
, mode
, ccp_jump
)
9663 && (can_compare_p (rcmp
, mode
, ccp_jump
)
9664 /* If the target doesn't provide either UNORDERED or ORDERED
9665 comparisons, canonicalize on UNORDERED for the library. */
9666 || rcmp
== UNORDERED
))
9670 do_compare_and_jump (exp
, cmp
, cmp
, if_false_label
, if_true_label
);
9672 do_compare_and_jump (exp
, rcmp
, rcmp
, if_true_label
, if_false_label
);
9677 enum rtx_code rcode1
;
9678 enum tree_code tcode2
;
9702 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9703 if (can_compare_p (rcode1
, mode
, ccp_jump
))
9704 do_compare_and_jump (exp
, rcode1
, rcode1
, if_false_label
,
9708 tree op0
= save_expr (TREE_OPERAND (exp
, 0));
9709 tree op1
= save_expr (TREE_OPERAND (exp
, 1));
9712 /* If the target doesn't support combined unordered
9713 compares, decompose into UNORDERED + comparison. */
9714 cmp0
= fold (build (UNORDERED_EXPR
, TREE_TYPE (exp
), op0
, op1
));
9715 cmp1
= fold (build (tcode2
, TREE_TYPE (exp
), op0
, op1
));
9716 exp
= build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
), cmp0
, cmp1
);
9717 do_jump (exp
, if_false_label
, if_true_label
);
9724 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
9726 /* This is not needed any more and causes poor code since it causes
9727 comparisons and tests from non-SI objects to have different code
9729 /* Copy to register to avoid generating bad insns by cse
9730 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9731 if (!cse_not_expected
&& GET_CODE (temp
) == MEM
)
9732 temp
= copy_to_reg (temp
);
9734 do_pending_stack_adjust ();
9735 /* Do any postincrements in the expression that was tested. */
9738 if (GET_CODE (temp
) == CONST_INT
|| GET_CODE (temp
) == LABEL_REF
)
9740 rtx target
= temp
== const0_rtx
? if_false_label
: if_true_label
;
9744 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
9745 && ! can_compare_p (NE
, GET_MODE (temp
), ccp_jump
))
9746 /* Note swapping the labels gives us not-equal. */
9747 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
9748 else if (GET_MODE (temp
) != VOIDmode
)
9749 do_compare_rtx_and_jump (temp
, CONST0_RTX (GET_MODE (temp
)),
9750 NE
, TREE_UNSIGNED (TREE_TYPE (exp
)),
9751 GET_MODE (temp
), NULL_RTX
, 0,
9752 if_false_label
, if_true_label
);
9757 if (drop_through_label
)
9759 /* If do_jump produces code that might be jumped around,
9760 do any stack adjusts from that code, before the place
9761 where control merges in. */
9762 do_pending_stack_adjust ();
9763 emit_label (drop_through_label
);
9767 /* Given a comparison expression EXP for values too wide to be compared
9768 with one insn, test the comparison and jump to the appropriate label.
9769 The code of EXP is ignored; we always test GT if SWAP is 0,
9770 and LT if SWAP is 1. */
9773 do_jump_by_parts_greater (exp
, swap
, if_false_label
, if_true_label
)
9776 rtx if_false_label
, if_true_label
;
9778 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
9779 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
9780 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9781 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9783 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
);
9786 /* Compare OP0 with OP1, word at a time, in mode MODE.
9787 UNSIGNEDP says to do unsigned comparison.
9788 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9791 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
)
9792 enum machine_mode mode
;
9795 rtx if_false_label
, if_true_label
;
9797 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
9798 rtx drop_through_label
= 0;
9801 if (! if_true_label
|| ! if_false_label
)
9802 drop_through_label
= gen_label_rtx ();
9803 if (! if_true_label
)
9804 if_true_label
= drop_through_label
;
9805 if (! if_false_label
)
9806 if_false_label
= drop_through_label
;
9808 /* Compare a word at a time, high order first. */
9809 for (i
= 0; i
< nwords
; i
++)
9811 rtx op0_word
, op1_word
;
9813 if (WORDS_BIG_ENDIAN
)
9815 op0_word
= operand_subword_force (op0
, i
, mode
);
9816 op1_word
= operand_subword_force (op1
, i
, mode
);
9820 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
9821 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
9824 /* All but high-order word must be compared as unsigned. */
9825 do_compare_rtx_and_jump (op0_word
, op1_word
, GT
,
9826 (unsignedp
|| i
> 0), word_mode
, NULL_RTX
, 0,
9827 NULL_RTX
, if_true_label
);
9829 /* Consider lower words only if these are equal. */
9830 do_compare_rtx_and_jump (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
9831 NULL_RTX
, 0, NULL_RTX
, if_false_label
);
9835 emit_jump (if_false_label
);
9836 if (drop_through_label
)
9837 emit_label (drop_through_label
);
9840 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9841 with one insn, test the comparison and jump to the appropriate label. */
9844 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
)
9846 rtx if_false_label
, if_true_label
;
9848 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
9849 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9850 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9851 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
9853 rtx drop_through_label
= 0;
9855 if (! if_false_label
)
9856 drop_through_label
= if_false_label
= gen_label_rtx ();
9858 for (i
= 0; i
< nwords
; i
++)
9859 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, mode
),
9860 operand_subword_force (op1
, i
, mode
),
9861 EQ
, TREE_UNSIGNED (TREE_TYPE (exp
)),
9862 word_mode
, NULL_RTX
, 0, if_false_label
,
9866 emit_jump (if_true_label
);
9867 if (drop_through_label
)
9868 emit_label (drop_through_label
);
9871 /* Jump according to whether OP0 is 0.
9872 We assume that OP0 has an integer mode that is too wide
9873 for the available compare insns. */
9876 do_jump_by_parts_equality_rtx (op0
, if_false_label
, if_true_label
)
9878 rtx if_false_label
, if_true_label
;
9880 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
9883 rtx drop_through_label
= 0;
9885 /* The fastest way of doing this comparison on almost any machine is to
9886 "or" all the words and compare the result. If all have to be loaded
9887 from memory and this is a very wide item, it's possible this may
9888 be slower, but that's highly unlikely. */
9890 part
= gen_reg_rtx (word_mode
);
9891 emit_move_insn (part
, operand_subword_force (op0
, 0, GET_MODE (op0
)));
9892 for (i
= 1; i
< nwords
&& part
!= 0; i
++)
9893 part
= expand_binop (word_mode
, ior_optab
, part
,
9894 operand_subword_force (op0
, i
, GET_MODE (op0
)),
9895 part
, 1, OPTAB_WIDEN
);
9899 do_compare_rtx_and_jump (part
, const0_rtx
, EQ
, 1, word_mode
,
9900 NULL_RTX
, 0, if_false_label
, if_true_label
);
9905 /* If we couldn't do the "or" simply, do this with a series of compares. */
9906 if (! if_false_label
)
9907 drop_through_label
= if_false_label
= gen_label_rtx ();
9909 for (i
= 0; i
< nwords
; i
++)
9910 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, GET_MODE (op0
)),
9911 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
, 0,
9912 if_false_label
, NULL_RTX
);
9915 emit_jump (if_true_label
);
9917 if (drop_through_label
)
9918 emit_label (drop_through_label
);
9921 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9922 (including code to compute the values to be compared)
9923 and set (CC0) according to the result.
9924 The decision as to signed or unsigned comparison must be made by the caller.
9926 We force a stack adjustment unless there are currently
9927 things pushed on the stack that aren't yet used.
9929 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9932 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9933 size of MODE should be used. */
9936 compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
, size
, align
)
9937 register rtx op0
, op1
;
9940 enum machine_mode mode
;
9946 /* If one operand is constant, make it the second one. Only do this
9947 if the other operand is not constant as well. */
9949 if ((CONSTANT_P (op0
) && ! CONSTANT_P (op1
))
9950 || (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) != CONST_INT
))
9955 code
= swap_condition (code
);
9960 op0
= force_not_mem (op0
);
9961 op1
= force_not_mem (op1
);
9964 do_pending_stack_adjust ();
9966 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
9967 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
9971 /* There's no need to do this now that combine.c can eliminate lots of
9972 sign extensions. This can be less efficient in certain cases on other
9975 /* If this is a signed equality comparison, we can do it as an
9976 unsigned comparison since zero-extension is cheaper than sign
9977 extension and comparisons with zero are done as unsigned. This is
9978 the case even on machines that can do fast sign extension, since
9979 zero-extension is easier to combine with other operations than
9980 sign-extension is. If we are comparing against a constant, we must
9981 convert it to what it would look like unsigned. */
9982 if ((code
== EQ
|| code
== NE
) && ! unsignedp
9983 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
9985 if (GET_CODE (op1
) == CONST_INT
9986 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
9987 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
9992 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
, align
);
9994 return gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
9997 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9998 The decision as to signed or unsigned comparison must be made by the caller.
10000 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10003 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10004 size of MODE should be used. */
10007 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
, size
, align
,
10008 if_false_label
, if_true_label
)
10009 register rtx op0
, op1
;
10010 enum rtx_code code
;
10012 enum machine_mode mode
;
10014 unsigned int align
;
10015 rtx if_false_label
, if_true_label
;
10018 int dummy_true_label
= 0;
10020 /* Reverse the comparison if that is safe and we want to jump if it is
10022 if (! if_true_label
&& ! FLOAT_MODE_P (mode
))
10024 if_true_label
= if_false_label
;
10025 if_false_label
= 0;
10026 code
= reverse_condition (code
);
10029 /* If one operand is constant, make it the second one. Only do this
10030 if the other operand is not constant as well. */
10032 if ((CONSTANT_P (op0
) && ! CONSTANT_P (op1
))
10033 || (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) != CONST_INT
))
10038 code
= swap_condition (code
);
10041 if (flag_force_mem
)
10043 op0
= force_not_mem (op0
);
10044 op1
= force_not_mem (op1
);
10047 do_pending_stack_adjust ();
10049 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
10050 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
10052 if (tem
== const_true_rtx
)
10055 emit_jump (if_true_label
);
10059 if (if_false_label
)
10060 emit_jump (if_false_label
);
10066 /* There's no need to do this now that combine.c can eliminate lots of
10067 sign extensions. This can be less efficient in certain cases on other
10070 /* If this is a signed equality comparison, we can do it as an
10071 unsigned comparison since zero-extension is cheaper than sign
10072 extension and comparisons with zero are done as unsigned. This is
10073 the case even on machines that can do fast sign extension, since
10074 zero-extension is easier to combine with other operations than
10075 sign-extension is. If we are comparing against a constant, we must
10076 convert it to what it would look like unsigned. */
10077 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10078 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10080 if (GET_CODE (op1
) == CONST_INT
10081 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10082 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10087 if (! if_true_label
)
10089 dummy_true_label
= 1;
10090 if_true_label
= gen_label_rtx ();
10093 emit_cmp_and_jump_insns (op0
, op1
, code
, size
, mode
, unsignedp
, align
,
10096 if (if_false_label
)
10097 emit_jump (if_false_label
);
10098 if (dummy_true_label
)
10099 emit_label (if_true_label
);
10102 /* Generate code for a comparison expression EXP (including code to compute
10103 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10104 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10105 generated code will drop through.
10106 SIGNED_CODE should be the rtx operation for this comparison for
10107 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10109 We force a stack adjustment unless there are currently
10110 things pushed on the stack that aren't yet used. */
10113 do_compare_and_jump (exp
, signed_code
, unsigned_code
, if_false_label
,
10116 enum rtx_code signed_code
, unsigned_code
;
10117 rtx if_false_label
, if_true_label
;
10119 unsigned int align0
, align1
;
10120 register rtx op0
, op1
;
10121 register tree type
;
10122 register enum machine_mode mode
;
10124 enum rtx_code code
;
10126 /* Don't crash if the comparison was erroneous. */
10127 op0
= expand_expr_unaligned (TREE_OPERAND (exp
, 0), &align0
);
10128 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
10131 op1
= expand_expr_unaligned (TREE_OPERAND (exp
, 1), &align1
);
10132 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10133 mode
= TYPE_MODE (type
);
10134 unsignedp
= TREE_UNSIGNED (type
);
10135 code
= unsignedp
? unsigned_code
: signed_code
;
10137 #ifdef HAVE_canonicalize_funcptr_for_compare
10138 /* If function pointers need to be "canonicalized" before they can
10139 be reliably compared, then canonicalize them. */
10140 if (HAVE_canonicalize_funcptr_for_compare
10141 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10142 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10145 rtx new_op0
= gen_reg_rtx (mode
);
10147 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
10151 if (HAVE_canonicalize_funcptr_for_compare
10152 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10153 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10156 rtx new_op1
= gen_reg_rtx (mode
);
10158 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
10163 /* Do any postincrements in the expression that was tested. */
10166 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
,
10168 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
10169 MIN (align0
, align1
),
10170 if_false_label
, if_true_label
);
10173 /* Generate code to calculate EXP using a store-flag instruction
10174 and return an rtx for the result. EXP is either a comparison
10175 or a TRUTH_NOT_EXPR whose operand is a comparison.
10177 If TARGET is nonzero, store the result there if convenient.
10179 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10182 Return zero if there is no suitable set-flag instruction
10183 available on this machine.
10185 Once expand_expr has been called on the arguments of the comparison,
10186 we are committed to doing the store flag, since it is not safe to
10187 re-evaluate the expression. We emit the store-flag insn by calling
10188 emit_store_flag, but only expand the arguments if we have a reason
10189 to believe that emit_store_flag will be successful. If we think that
10190 it will, but it isn't, we have to simulate the store-flag with a
10191 set/jump/set sequence. */
10194 do_store_flag (exp
, target
, mode
, only_cheap
)
10197 enum machine_mode mode
;
10200 enum rtx_code code
;
10201 tree arg0
, arg1
, type
;
10203 enum machine_mode operand_mode
;
10207 enum insn_code icode
;
10208 rtx subtarget
= target
;
10211 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10212 result at the end. We can't simply invert the test since it would
10213 have already been inverted if it were valid. This case occurs for
10214 some floating-point comparisons. */
10216 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
10217 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
10219 arg0
= TREE_OPERAND (exp
, 0);
10220 arg1
= TREE_OPERAND (exp
, 1);
10221 type
= TREE_TYPE (arg0
);
10222 operand_mode
= TYPE_MODE (type
);
10223 unsignedp
= TREE_UNSIGNED (type
);
10225 /* We won't bother with BLKmode store-flag operations because it would mean
10226 passing a lot of information to emit_store_flag. */
10227 if (operand_mode
== BLKmode
)
10230 /* We won't bother with store-flag operations involving function pointers
10231 when function pointers must be canonicalized before comparisons. */
10232 #ifdef HAVE_canonicalize_funcptr_for_compare
10233 if (HAVE_canonicalize_funcptr_for_compare
10234 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10235 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10237 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10238 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10239 == FUNCTION_TYPE
))))
10246 /* Get the rtx comparison code to use. We know that EXP is a comparison
10247 operation of some type. Some comparisons against 1 and -1 can be
10248 converted to comparisons with zero. Do so here so that the tests
10249 below will be aware that we have a comparison with zero. These
10250 tests will not catch constants in the first operand, but constants
10251 are rarely passed as the first operand. */
10253 switch (TREE_CODE (exp
))
10262 if (integer_onep (arg1
))
10263 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10265 code
= unsignedp
? LTU
: LT
;
10268 if (! unsignedp
&& integer_all_onesp (arg1
))
10269 arg1
= integer_zero_node
, code
= LT
;
10271 code
= unsignedp
? LEU
: LE
;
10274 if (! unsignedp
&& integer_all_onesp (arg1
))
10275 arg1
= integer_zero_node
, code
= GE
;
10277 code
= unsignedp
? GTU
: GT
;
10280 if (integer_onep (arg1
))
10281 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
10283 code
= unsignedp
? GEU
: GE
;
10286 case UNORDERED_EXPR
:
10312 /* Put a constant second. */
10313 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
10315 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
10316 code
= swap_condition (code
);
10319 /* If this is an equality or inequality test of a single bit, we can
10320 do this by shifting the bit being tested to the low-order bit and
10321 masking the result with the constant 1. If the condition was EQ,
10322 we xor it with 1. This does not require an scc insn and is faster
10323 than an scc insn even if we have it. */
10325 if ((code
== NE
|| code
== EQ
)
10326 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
10327 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10329 tree inner
= TREE_OPERAND (arg0
, 0);
10330 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
10333 /* If INNER is a right shift of a constant and it plus BITNUM does
10334 not overflow, adjust BITNUM and INNER. */
10336 if (TREE_CODE (inner
) == RSHIFT_EXPR
10337 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
10338 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
10339 && bitnum
< TYPE_PRECISION (type
)
10340 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
10341 bitnum
- TYPE_PRECISION (type
)))
10343 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
10344 inner
= TREE_OPERAND (inner
, 0);
10347 /* If we are going to be able to omit the AND below, we must do our
10348 operations as unsigned. If we must use the AND, we have a choice.
10349 Normally unsigned is faster, but for some machines signed is. */
10350 ops_unsignedp
= (bitnum
== TYPE_PRECISION (type
) - 1 ? 1
10351 #ifdef LOAD_EXTEND_OP
10352 : (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1)
10358 if (! get_subtarget (subtarget
)
10359 || GET_MODE (subtarget
) != operand_mode
10360 || ! safe_from_p (subtarget
, inner
, 1))
10363 op0
= expand_expr (inner
, subtarget
, VOIDmode
, 0);
10366 op0
= expand_shift (RSHIFT_EXPR
, GET_MODE (op0
), op0
,
10367 size_int (bitnum
), subtarget
, ops_unsignedp
);
10369 if (GET_MODE (op0
) != mode
)
10370 op0
= convert_to_mode (mode
, op0
, ops_unsignedp
);
10372 if ((code
== EQ
&& ! invert
) || (code
== NE
&& invert
))
10373 op0
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
, subtarget
,
10374 ops_unsignedp
, OPTAB_LIB_WIDEN
);
10376 /* Put the AND last so it can combine with more things. */
10377 if (bitnum
!= TYPE_PRECISION (type
) - 1)
10378 op0
= expand_and (op0
, const1_rtx
, subtarget
);
10383 /* Now see if we are likely to be able to do this. Return if not. */
10384 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
10387 icode
= setcc_gen_code
[(int) code
];
10388 if (icode
== CODE_FOR_nothing
10389 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
10391 /* We can only do this if it is one of the special cases that
10392 can be handled without an scc insn. */
10393 if ((code
== LT
&& integer_zerop (arg1
))
10394 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
10396 else if (BRANCH_COST
>= 0
10397 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
10398 && TREE_CODE (type
) != REAL_TYPE
10399 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
10400 != CODE_FOR_nothing
)
10401 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
10402 != CODE_FOR_nothing
)))
10408 preexpand_calls (exp
);
10409 if (! get_subtarget (target
)
10410 || GET_MODE (subtarget
) != operand_mode
10411 || ! safe_from_p (subtarget
, arg1
, 1))
10414 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
10415 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
10418 target
= gen_reg_rtx (mode
);
10420 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10421 because, if the emit_store_flag does anything it will succeed and
10422 OP0 and OP1 will not be used subsequently. */
10424 result
= emit_store_flag (target
, code
,
10425 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
10426 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
10427 operand_mode
, unsignedp
, 1);
10432 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
10433 result
, 0, OPTAB_LIB_WIDEN
);
10437 /* If this failed, we have to do this with set/compare/jump/set code. */
10438 if (GET_CODE (target
) != REG
10439 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
10440 target
= gen_reg_rtx (GET_MODE (target
));
10442 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
10443 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
10444 operand_mode
, NULL_RTX
, 0);
10445 if (GET_CODE (result
) == CONST_INT
)
10446 return (((result
== const0_rtx
&& ! invert
)
10447 || (result
!= const0_rtx
&& invert
))
10448 ? const0_rtx
: const1_rtx
);
10450 label
= gen_label_rtx ();
10451 if (bcc_gen_fctn
[(int) code
] == 0)
10454 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
10455 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
10456 emit_label (label
);
10461 /* Generate a tablejump instruction (used for switch statements). */
10463 #ifdef HAVE_tablejump
10465 /* INDEX is the value being switched on, with the lowest value
10466 in the table already subtracted.
10467 MODE is its expected mode (needed if INDEX is constant).
10468 RANGE is the length of the jump table.
10469 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10471 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10472 index value is out of range. */
10475 do_tablejump (index
, mode
, range
, table_label
, default_label
)
10476 rtx index
, range
, table_label
, default_label
;
10477 enum machine_mode mode
;
10479 register rtx temp
, vector
;
10481 /* Do an unsigned comparison (in the proper mode) between the index
10482 expression and the value which represents the length of the range.
10483 Since we just finished subtracting the lower bound of the range
10484 from the index expression, this comparison allows us to simultaneously
10485 check that the original index expression value is both greater than
10486 or equal to the minimum value of the range and less than or equal to
10487 the maximum value of the range. */
10489 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
10492 /* If index is in range, it must fit in Pmode.
10493 Convert to Pmode so we can index with it. */
10495 index
= convert_to_mode (Pmode
, index
, 1);
10497 /* Don't let a MEM slip thru, because then INDEX that comes
10498 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10499 and break_out_memory_refs will go to work on it and mess it up. */
10500 #ifdef PIC_CASE_VECTOR_ADDRESS
10501 if (flag_pic
&& GET_CODE (index
) != REG
)
10502 index
= copy_to_mode_reg (Pmode
, index
);
10505 /* If flag_force_addr were to affect this address
10506 it could interfere with the tricky assumptions made
10507 about addresses that contain label-refs,
10508 which may be valid only very near the tablejump itself. */
10509 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10510 GET_MODE_SIZE, because this indicates how large insns are. The other
10511 uses should all be Pmode, because they are addresses. This code
10512 could fail if addresses and insns are not the same size. */
10513 index
= gen_rtx_PLUS (Pmode
,
10514 gen_rtx_MULT (Pmode
, index
,
10515 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
10516 gen_rtx_LABEL_REF (Pmode
, table_label
));
10517 #ifdef PIC_CASE_VECTOR_ADDRESS
10519 index
= PIC_CASE_VECTOR_ADDRESS (index
);
10522 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
10523 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
10524 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
10525 RTX_UNCHANGING_P (vector
) = 1;
10526 convert_move (temp
, vector
, 0);
10528 emit_jump_insn (gen_tablejump (temp
, table_label
));
10530 /* If we are generating PIC code or if the table is PC-relative, the
10531 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10532 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
10536 #endif /* HAVE_tablejump */