1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
39 #include "typeclass.h"
45 /* Decide whether a function's arguments should be processed
46 from first to last or from last to first.
48 They should if the stack and args grow in opposite directions, but
49 only if we have push insns. */
53 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
54 #define PUSH_ARGS_REVERSED /* If it's last to first. */
59 #ifndef STACK_PUSH_CODE
60 #ifdef STACK_GROWS_DOWNWARD
61 #define STACK_PUSH_CODE PRE_DEC
63 #define STACK_PUSH_CODE PRE_INC
67 /* Assume that case vectors are not pc-relative. */
68 #ifndef CASE_VECTOR_PC_RELATIVE
69 #define CASE_VECTOR_PC_RELATIVE 0
72 /* Hook called by safe_from_p for language-specific tree codes. It is
73 up to the language front-end to install a hook if it has any such
74 codes that safe_from_p needs to know about. Since same_from_p will
75 recursively explore the TREE_OPERANDs of an expression, this hook
76 should not reexamine those pieces. This routine may recursively
77 call safe_from_p; it should always pass `0' as the TOP_P
79 int (*lang_safe_from_p
) PARAMS ((rtx
, tree
));
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
89 /* Don't check memory usage, since code is being emitted to check a memory
90 usage. Used when current_function_check_memory_usage is true, to avoid
91 infinite recursion. */
92 static int in_check_memory_usage
;
94 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
95 static tree placeholder_list
= 0;
97 /* This structure is used by move_by_pieces to describe the move to
108 int explicit_inc_from
;
109 unsigned HOST_WIDE_INT len
;
110 HOST_WIDE_INT offset
;
114 /* This structure is used by store_by_pieces to describe the clear to
117 struct store_by_pieces
123 unsigned HOST_WIDE_INT len
;
124 HOST_WIDE_INT offset
;
125 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
130 extern struct obstack permanent_obstack
;
132 static rtx get_push_address
PARAMS ((int));
134 static rtx enqueue_insn
PARAMS ((rtx
, rtx
));
135 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
136 PARAMS ((unsigned HOST_WIDE_INT
,
138 static void move_by_pieces_1
PARAMS ((rtx (*) (rtx
, ...), enum machine_mode
,
139 struct move_by_pieces
*));
140 static rtx clear_by_pieces_1
PARAMS ((PTR
, HOST_WIDE_INT
,
142 static void clear_by_pieces
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
144 static void store_by_pieces_1
PARAMS ((struct store_by_pieces
*,
146 static void store_by_pieces_2
PARAMS ((rtx (*) (rtx
, ...),
148 struct store_by_pieces
*));
149 static rtx get_subtarget
PARAMS ((rtx
));
150 static int is_zeros_p
PARAMS ((tree
));
151 static int mostly_zeros_p
PARAMS ((tree
));
152 static void store_constructor_field
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
153 HOST_WIDE_INT
, enum machine_mode
,
154 tree
, tree
, unsigned int, int,
156 static void store_constructor
PARAMS ((tree
, rtx
, unsigned int, int,
158 static rtx store_field
PARAMS ((rtx
, HOST_WIDE_INT
,
159 HOST_WIDE_INT
, enum machine_mode
,
160 tree
, enum machine_mode
, int,
161 unsigned int, HOST_WIDE_INT
, int));
162 static enum memory_use_mode
163 get_memory_usage_from_modifier
PARAMS ((enum expand_modifier
));
164 static tree save_noncopied_parts
PARAMS ((tree
, tree
));
165 static tree init_noncopied_parts
PARAMS ((tree
, tree
));
166 static int fixed_type_p
PARAMS ((tree
));
167 static rtx var_rtx
PARAMS ((tree
));
168 static rtx expand_expr_unaligned
PARAMS ((tree
, unsigned int *));
169 static rtx expand_increment
PARAMS ((tree
, int, int));
170 static void do_jump_by_parts_greater
PARAMS ((tree
, int, rtx
, rtx
));
171 static void do_jump_by_parts_equality
PARAMS ((tree
, rtx
, rtx
));
172 static void do_compare_and_jump
PARAMS ((tree
, enum rtx_code
, enum rtx_code
,
174 static rtx do_store_flag
PARAMS ((tree
, rtx
, enum machine_mode
, int));
175 static void emit_single_push_insn
PARAMS ((enum machine_mode
, rtx
, tree
));
177 /* Record for each mode whether we can move a register directly to or
178 from an object of that mode in memory. If we can't, we won't try
179 to use that mode directly when accessing a field of that mode. */
181 static char direct_load
[NUM_MACHINE_MODES
];
182 static char direct_store
[NUM_MACHINE_MODES
];
184 /* If a memory-to-memory move would take MOVE_RATIO or more simple
185 move-instruction sequences, we will do a movstr or libcall instead. */
188 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
191 /* If we are optimizing for space (-Os), cut down the default move ratio. */
192 #define MOVE_RATIO (optimize_size ? 3 : 15)
196 /* This macro is used to determine whether move_by_pieces should be called
197 to perform a structure copy. */
198 #ifndef MOVE_BY_PIECES_P
199 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
200 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
203 /* This array records the insn_code of insns to perform block moves. */
204 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
206 /* This array records the insn_code of insns to perform block clears. */
207 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
209 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
211 #ifndef SLOW_UNALIGNED_ACCESS
212 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
215 /* This is run once per compilation to set up which modes can be used
216 directly in memory and to initialize the block move optab. */
222 enum machine_mode mode
;
228 /* Try indexing by frame ptr and try by stack ptr.
229 It is known that on the Convex the stack ptr isn't a valid index.
230 With luck, one or the other is valid on any machine. */
231 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
232 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
234 insn
= emit_insn (gen_rtx_SET (0, NULL_RTX
, NULL_RTX
));
235 pat
= PATTERN (insn
);
237 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
238 mode
= (enum machine_mode
) ((int) mode
+ 1))
243 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
244 PUT_MODE (mem
, mode
);
245 PUT_MODE (mem1
, mode
);
247 /* See if there is some register that can be used in this mode and
248 directly loaded or stored from memory. */
250 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
251 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
252 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
255 if (! HARD_REGNO_MODE_OK (regno
, mode
))
258 reg
= gen_rtx_REG (mode
, regno
);
261 SET_DEST (pat
) = reg
;
262 if (recog (pat
, insn
, &num_clobbers
) >= 0)
263 direct_load
[(int) mode
] = 1;
265 SET_SRC (pat
) = mem1
;
266 SET_DEST (pat
) = reg
;
267 if (recog (pat
, insn
, &num_clobbers
) >= 0)
268 direct_load
[(int) mode
] = 1;
271 SET_DEST (pat
) = mem
;
272 if (recog (pat
, insn
, &num_clobbers
) >= 0)
273 direct_store
[(int) mode
] = 1;
276 SET_DEST (pat
) = mem1
;
277 if (recog (pat
, insn
, &num_clobbers
) >= 0)
278 direct_store
[(int) mode
] = 1;
285 /* This is run at the start of compiling a function. */
290 cfun
->expr
= (struct expr_status
*) xmalloc (sizeof (struct expr_status
));
293 pending_stack_adjust
= 0;
294 stack_pointer_delta
= 0;
295 inhibit_defer_pop
= 0;
297 apply_args_value
= 0;
303 struct expr_status
*p
;
308 ggc_mark_rtx (p
->x_saveregs_value
);
309 ggc_mark_rtx (p
->x_apply_args_value
);
310 ggc_mark_rtx (p
->x_forced_labels
);
321 /* Small sanity check that the queue is empty at the end of a function. */
324 finish_expr_for_function ()
330 /* Manage the queue of increment instructions to be output
331 for POSTINCREMENT_EXPR expressions, etc. */
333 /* Queue up to increment (or change) VAR later. BODY says how:
334 BODY should be the same thing you would pass to emit_insn
335 to increment right away. It will go to emit_insn later on.
337 The value is a QUEUED expression to be used in place of VAR
338 where you want to guarantee the pre-incrementation value of VAR. */
341 enqueue_insn (var
, body
)
344 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
), var
, NULL_RTX
, NULL_RTX
,
345 body
, pending_chain
);
346 return pending_chain
;
349 /* Use protect_from_queue to convert a QUEUED expression
350 into something that you can put immediately into an instruction.
351 If the queued incrementation has not happened yet,
352 protect_from_queue returns the variable itself.
353 If the incrementation has happened, protect_from_queue returns a temp
354 that contains a copy of the old value of the variable.
356 Any time an rtx which might possibly be a QUEUED is to be put
357 into an instruction, it must be passed through protect_from_queue first.
358 QUEUED expressions are not meaningful in instructions.
360 Do not pass a value through protect_from_queue and then hold
361 on to it for a while before putting it in an instruction!
362 If the queue is flushed in between, incorrect code will result. */
365 protect_from_queue (x
, modify
)
369 register RTX_CODE code
= GET_CODE (x
);
371 #if 0 /* A QUEUED can hang around after the queue is forced out. */
372 /* Shortcut for most common case. */
373 if (pending_chain
== 0)
379 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
380 use of autoincrement. Make a copy of the contents of the memory
381 location rather than a copy of the address, but not if the value is
382 of mode BLKmode. Don't modify X in place since it might be
384 if (code
== MEM
&& GET_MODE (x
) != BLKmode
385 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
387 register rtx y
= XEXP (x
, 0);
388 register rtx
new = gen_rtx_MEM (GET_MODE (x
), QUEUED_VAR (y
));
390 MEM_COPY_ATTRIBUTES (new, x
);
394 register rtx temp
= gen_reg_rtx (GET_MODE (new));
395 emit_insn_before (gen_move_insn (temp
, new),
401 /* Otherwise, recursively protect the subexpressions of all
402 the kinds of rtx's that can contain a QUEUED. */
405 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
406 if (tem
!= XEXP (x
, 0))
412 else if (code
== PLUS
|| code
== MULT
)
414 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
415 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
416 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
425 /* If the increment has not happened, use the variable itself. */
426 if (QUEUED_INSN (x
) == 0)
427 return QUEUED_VAR (x
);
428 /* If the increment has happened and a pre-increment copy exists,
430 if (QUEUED_COPY (x
) != 0)
431 return QUEUED_COPY (x
);
432 /* The increment has happened but we haven't set up a pre-increment copy.
433 Set one up now, and use it. */
434 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
435 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
437 return QUEUED_COPY (x
);
440 /* Return nonzero if X contains a QUEUED expression:
441 if it contains anything that will be altered by a queued increment.
442 We handle only combinations of MEM, PLUS, MINUS and MULT operators
443 since memory addresses generally contain only those. */
449 register enum rtx_code code
= GET_CODE (x
);
455 return queued_subexp_p (XEXP (x
, 0));
459 return (queued_subexp_p (XEXP (x
, 0))
460 || queued_subexp_p (XEXP (x
, 1)));
466 /* Perform all the pending incrementations. */
472 while ((p
= pending_chain
))
474 rtx body
= QUEUED_BODY (p
);
476 if (GET_CODE (body
) == SEQUENCE
)
478 QUEUED_INSN (p
) = XVECEXP (QUEUED_BODY (p
), 0, 0);
479 emit_insn (QUEUED_BODY (p
));
482 QUEUED_INSN (p
) = emit_insn (QUEUED_BODY (p
));
483 pending_chain
= QUEUED_NEXT (p
);
487 /* Copy data from FROM to TO, where the machine modes are not the same.
488 Both modes may be integer, or both may be floating.
489 UNSIGNEDP should be nonzero if FROM is an unsigned type.
490 This causes zero-extension instead of sign-extension. */
493 convert_move (to
, from
, unsignedp
)
494 register rtx to
, from
;
497 enum machine_mode to_mode
= GET_MODE (to
);
498 enum machine_mode from_mode
= GET_MODE (from
);
499 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
500 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
504 /* rtx code for making an equivalent value. */
505 enum rtx_code equiv_code
= (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
);
507 to
= protect_from_queue (to
, 1);
508 from
= protect_from_queue (from
, 0);
510 if (to_real
!= from_real
)
513 /* If FROM is a SUBREG that indicates that we have already done at least
514 the required extension, strip it. We don't handle such SUBREGs as
517 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
518 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
519 >= GET_MODE_SIZE (to_mode
))
520 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
521 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
523 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
526 if (to_mode
== from_mode
527 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
529 emit_move_insn (to
, from
);
533 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
535 if (GET_MODE_BITSIZE (from_mode
) != GET_MODE_BITSIZE (to_mode
))
538 if (VECTOR_MODE_P (to_mode
))
539 from
= gen_rtx_SUBREG (to_mode
, from
, 0);
541 to
= gen_rtx_SUBREG (from_mode
, to
, 0);
543 emit_move_insn (to
, from
);
547 if (to_real
!= from_real
)
554 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
))
556 /* Try converting directly if the insn is supported. */
557 if ((code
= can_extend_p (to_mode
, from_mode
, 0))
560 emit_unop_insn (code
, to
, from
, UNKNOWN
);
565 #ifdef HAVE_trunchfqf2
566 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
568 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
572 #ifdef HAVE_trunctqfqf2
573 if (HAVE_trunctqfqf2
&& from_mode
== TQFmode
&& to_mode
== QFmode
)
575 emit_unop_insn (CODE_FOR_trunctqfqf2
, to
, from
, UNKNOWN
);
579 #ifdef HAVE_truncsfqf2
580 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
582 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
586 #ifdef HAVE_truncdfqf2
587 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
589 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
593 #ifdef HAVE_truncxfqf2
594 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
596 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
600 #ifdef HAVE_trunctfqf2
601 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
603 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
608 #ifdef HAVE_trunctqfhf2
609 if (HAVE_trunctqfhf2
&& from_mode
== TQFmode
&& to_mode
== HFmode
)
611 emit_unop_insn (CODE_FOR_trunctqfhf2
, to
, from
, UNKNOWN
);
615 #ifdef HAVE_truncsfhf2
616 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
618 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
622 #ifdef HAVE_truncdfhf2
623 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
625 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
629 #ifdef HAVE_truncxfhf2
630 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
632 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
636 #ifdef HAVE_trunctfhf2
637 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
639 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
644 #ifdef HAVE_truncsftqf2
645 if (HAVE_truncsftqf2
&& from_mode
== SFmode
&& to_mode
== TQFmode
)
647 emit_unop_insn (CODE_FOR_truncsftqf2
, to
, from
, UNKNOWN
);
651 #ifdef HAVE_truncdftqf2
652 if (HAVE_truncdftqf2
&& from_mode
== DFmode
&& to_mode
== TQFmode
)
654 emit_unop_insn (CODE_FOR_truncdftqf2
, to
, from
, UNKNOWN
);
658 #ifdef HAVE_truncxftqf2
659 if (HAVE_truncxftqf2
&& from_mode
== XFmode
&& to_mode
== TQFmode
)
661 emit_unop_insn (CODE_FOR_truncxftqf2
, to
, from
, UNKNOWN
);
665 #ifdef HAVE_trunctftqf2
666 if (HAVE_trunctftqf2
&& from_mode
== TFmode
&& to_mode
== TQFmode
)
668 emit_unop_insn (CODE_FOR_trunctftqf2
, to
, from
, UNKNOWN
);
673 #ifdef HAVE_truncdfsf2
674 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
676 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
680 #ifdef HAVE_truncxfsf2
681 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
683 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
687 #ifdef HAVE_trunctfsf2
688 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
690 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
694 #ifdef HAVE_truncxfdf2
695 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
697 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
701 #ifdef HAVE_trunctfdf2
702 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
704 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
716 libcall
= extendsfdf2_libfunc
;
720 libcall
= extendsfxf2_libfunc
;
724 libcall
= extendsftf2_libfunc
;
736 libcall
= truncdfsf2_libfunc
;
740 libcall
= extenddfxf2_libfunc
;
744 libcall
= extenddftf2_libfunc
;
756 libcall
= truncxfsf2_libfunc
;
760 libcall
= truncxfdf2_libfunc
;
772 libcall
= trunctfsf2_libfunc
;
776 libcall
= trunctfdf2_libfunc
;
788 if (libcall
== (rtx
) 0)
789 /* This conversion is not implemented yet. */
793 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
795 insns
= get_insns ();
797 emit_libcall_block (insns
, to
, value
, gen_rtx_FLOAT_TRUNCATE (to_mode
,
802 /* Now both modes are integers. */
804 /* Handle expanding beyond a word. */
805 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
806 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
813 enum machine_mode lowpart_mode
;
814 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
816 /* Try converting directly if the insn is supported. */
817 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
820 /* If FROM is a SUBREG, put it into a register. Do this
821 so that we always generate the same set of insns for
822 better cse'ing; if an intermediate assignment occurred,
823 we won't be doing the operation directly on the SUBREG. */
824 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
825 from
= force_reg (from_mode
, from
);
826 emit_unop_insn (code
, to
, from
, equiv_code
);
829 /* Next, try converting via full word. */
830 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
831 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
832 != CODE_FOR_nothing
))
834 if (GET_CODE (to
) == REG
)
835 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
836 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
837 emit_unop_insn (code
, to
,
838 gen_lowpart (word_mode
, to
), equiv_code
);
842 /* No special multiword conversion insn; do it by hand. */
845 /* Since we will turn this into a no conflict block, we must ensure
846 that the source does not overlap the target. */
848 if (reg_overlap_mentioned_p (to
, from
))
849 from
= force_reg (from_mode
, from
);
851 /* Get a copy of FROM widened to a word, if necessary. */
852 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
853 lowpart_mode
= word_mode
;
855 lowpart_mode
= from_mode
;
857 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
859 lowpart
= gen_lowpart (lowpart_mode
, to
);
860 emit_move_insn (lowpart
, lowfrom
);
862 /* Compute the value to put in each remaining word. */
864 fill_value
= const0_rtx
;
869 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
870 && STORE_FLAG_VALUE
== -1)
872 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
874 fill_value
= gen_reg_rtx (word_mode
);
875 emit_insn (gen_slt (fill_value
));
881 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
882 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
884 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
888 /* Fill the remaining words. */
889 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
891 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
892 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
897 if (fill_value
!= subword
)
898 emit_move_insn (subword
, fill_value
);
901 insns
= get_insns ();
904 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
905 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
909 /* Truncating multi-word to a word or less. */
910 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
911 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
913 if (!((GET_CODE (from
) == MEM
914 && ! MEM_VOLATILE_P (from
)
915 && direct_load
[(int) to_mode
]
916 && ! mode_dependent_address_p (XEXP (from
, 0)))
917 || GET_CODE (from
) == REG
918 || GET_CODE (from
) == SUBREG
))
919 from
= force_reg (from_mode
, from
);
920 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
924 /* Handle pointer conversion. */ /* SPEE 900220. */
925 if (to_mode
== PQImode
)
927 if (from_mode
!= QImode
)
928 from
= convert_to_mode (QImode
, from
, unsignedp
);
930 #ifdef HAVE_truncqipqi2
931 if (HAVE_truncqipqi2
)
933 emit_unop_insn (CODE_FOR_truncqipqi2
, to
, from
, UNKNOWN
);
936 #endif /* HAVE_truncqipqi2 */
940 if (from_mode
== PQImode
)
942 if (to_mode
!= QImode
)
944 from
= convert_to_mode (QImode
, from
, unsignedp
);
949 #ifdef HAVE_extendpqiqi2
950 if (HAVE_extendpqiqi2
)
952 emit_unop_insn (CODE_FOR_extendpqiqi2
, to
, from
, UNKNOWN
);
955 #endif /* HAVE_extendpqiqi2 */
960 if (to_mode
== PSImode
)
962 if (from_mode
!= SImode
)
963 from
= convert_to_mode (SImode
, from
, unsignedp
);
965 #ifdef HAVE_truncsipsi2
966 if (HAVE_truncsipsi2
)
968 emit_unop_insn (CODE_FOR_truncsipsi2
, to
, from
, UNKNOWN
);
971 #endif /* HAVE_truncsipsi2 */
975 if (from_mode
== PSImode
)
977 if (to_mode
!= SImode
)
979 from
= convert_to_mode (SImode
, from
, unsignedp
);
984 #ifdef HAVE_extendpsisi2
985 if (! unsignedp
&& HAVE_extendpsisi2
)
987 emit_unop_insn (CODE_FOR_extendpsisi2
, to
, from
, UNKNOWN
);
990 #endif /* HAVE_extendpsisi2 */
991 #ifdef HAVE_zero_extendpsisi2
992 if (unsignedp
&& HAVE_zero_extendpsisi2
)
994 emit_unop_insn (CODE_FOR_zero_extendpsisi2
, to
, from
, UNKNOWN
);
997 #endif /* HAVE_zero_extendpsisi2 */
1002 if (to_mode
== PDImode
)
1004 if (from_mode
!= DImode
)
1005 from
= convert_to_mode (DImode
, from
, unsignedp
);
1007 #ifdef HAVE_truncdipdi2
1008 if (HAVE_truncdipdi2
)
1010 emit_unop_insn (CODE_FOR_truncdipdi2
, to
, from
, UNKNOWN
);
1013 #endif /* HAVE_truncdipdi2 */
1017 if (from_mode
== PDImode
)
1019 if (to_mode
!= DImode
)
1021 from
= convert_to_mode (DImode
, from
, unsignedp
);
1026 #ifdef HAVE_extendpdidi2
1027 if (HAVE_extendpdidi2
)
1029 emit_unop_insn (CODE_FOR_extendpdidi2
, to
, from
, UNKNOWN
);
1032 #endif /* HAVE_extendpdidi2 */
1037 /* Now follow all the conversions between integers
1038 no more than a word long. */
1040 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1041 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
1042 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1043 GET_MODE_BITSIZE (from_mode
)))
1045 if (!((GET_CODE (from
) == MEM
1046 && ! MEM_VOLATILE_P (from
)
1047 && direct_load
[(int) to_mode
]
1048 && ! mode_dependent_address_p (XEXP (from
, 0)))
1049 || GET_CODE (from
) == REG
1050 || GET_CODE (from
) == SUBREG
))
1051 from
= force_reg (from_mode
, from
);
1052 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
1053 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
1054 from
= copy_to_reg (from
);
1055 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
1059 /* Handle extension. */
1060 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
1062 /* Convert directly if that works. */
1063 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
1064 != CODE_FOR_nothing
)
1066 emit_unop_insn (code
, to
, from
, equiv_code
);
1071 enum machine_mode intermediate
;
1075 /* Search for a mode to convert via. */
1076 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
1077 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
1078 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
1079 != CODE_FOR_nothing
)
1080 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
1081 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1082 GET_MODE_BITSIZE (intermediate
))))
1083 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
1084 != CODE_FOR_nothing
))
1086 convert_move (to
, convert_to_mode (intermediate
, from
,
1087 unsignedp
), unsignedp
);
1091 /* No suitable intermediate mode.
1092 Generate what we need with shifts. */
1093 shift_amount
= build_int_2 (GET_MODE_BITSIZE (to_mode
)
1094 - GET_MODE_BITSIZE (from_mode
), 0);
1095 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
1096 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
1098 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
1101 emit_move_insn (to
, tmp
);
1106 /* Support special truncate insns for certain modes. */
1108 if (from_mode
== DImode
&& to_mode
== SImode
)
1110 #ifdef HAVE_truncdisi2
1111 if (HAVE_truncdisi2
)
1113 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1117 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1121 if (from_mode
== DImode
&& to_mode
== HImode
)
1123 #ifdef HAVE_truncdihi2
1124 if (HAVE_truncdihi2
)
1126 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1130 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1134 if (from_mode
== DImode
&& to_mode
== QImode
)
1136 #ifdef HAVE_truncdiqi2
1137 if (HAVE_truncdiqi2
)
1139 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1143 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1147 if (from_mode
== SImode
&& to_mode
== HImode
)
1149 #ifdef HAVE_truncsihi2
1150 if (HAVE_truncsihi2
)
1152 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1156 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1160 if (from_mode
== SImode
&& to_mode
== QImode
)
1162 #ifdef HAVE_truncsiqi2
1163 if (HAVE_truncsiqi2
)
1165 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1169 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1173 if (from_mode
== HImode
&& to_mode
== QImode
)
1175 #ifdef HAVE_trunchiqi2
1176 if (HAVE_trunchiqi2
)
1178 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1182 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1186 if (from_mode
== TImode
&& to_mode
== DImode
)
1188 #ifdef HAVE_trunctidi2
1189 if (HAVE_trunctidi2
)
1191 emit_unop_insn (CODE_FOR_trunctidi2
, to
, from
, UNKNOWN
);
1195 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1199 if (from_mode
== TImode
&& to_mode
== SImode
)
1201 #ifdef HAVE_trunctisi2
1202 if (HAVE_trunctisi2
)
1204 emit_unop_insn (CODE_FOR_trunctisi2
, to
, from
, UNKNOWN
);
1208 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1212 if (from_mode
== TImode
&& to_mode
== HImode
)
1214 #ifdef HAVE_trunctihi2
1215 if (HAVE_trunctihi2
)
1217 emit_unop_insn (CODE_FOR_trunctihi2
, to
, from
, UNKNOWN
);
1221 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1225 if (from_mode
== TImode
&& to_mode
== QImode
)
1227 #ifdef HAVE_trunctiqi2
1228 if (HAVE_trunctiqi2
)
1230 emit_unop_insn (CODE_FOR_trunctiqi2
, to
, from
, UNKNOWN
);
1234 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1238 /* Handle truncation of volatile memrefs, and so on;
1239 the things that couldn't be truncated directly,
1240 and for which there was no special instruction. */
1241 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1243 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1244 emit_move_insn (to
, temp
);
1248 /* Mode combination is not recognized. */
1252 /* Return an rtx for a value that would result
1253 from converting X to mode MODE.
1254 Both X and MODE may be floating, or both integer.
1255 UNSIGNEDP is nonzero if X is an unsigned value.
1256 This can be done by referring to a part of X in place
1257 or by copying to a new temporary with conversion.
1259 This function *must not* call protect_from_queue
1260 except when putting X into an insn (in which case convert_move does it). */
1263 convert_to_mode (mode
, x
, unsignedp
)
1264 enum machine_mode mode
;
1268 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
1271 /* Return an rtx for a value that would result
1272 from converting X from mode OLDMODE to mode MODE.
1273 Both modes may be floating, or both integer.
1274 UNSIGNEDP is nonzero if X is an unsigned value.
1276 This can be done by referring to a part of X in place
1277 or by copying to a new temporary with conversion.
1279 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1281 This function *must not* call protect_from_queue
1282 except when putting X into an insn (in which case convert_move does it). */
1285 convert_modes (mode
, oldmode
, x
, unsignedp
)
1286 enum machine_mode mode
, oldmode
;
1292 /* If FROM is a SUBREG that indicates that we have already done at least
1293 the required extension, strip it. */
1295 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1296 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1297 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1298 x
= gen_lowpart (mode
, x
);
1300 if (GET_MODE (x
) != VOIDmode
)
1301 oldmode
= GET_MODE (x
);
1303 if (mode
== oldmode
)
1306 /* There is one case that we must handle specially: If we are converting
1307 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1308 we are to interpret the constant as unsigned, gen_lowpart will do
1309 the wrong if the constant appears negative. What we want to do is
1310 make the high-order word of the constant zero, not all ones. */
1312 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1313 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1314 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1316 HOST_WIDE_INT val
= INTVAL (x
);
1318 if (oldmode
!= VOIDmode
1319 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
1321 int width
= GET_MODE_BITSIZE (oldmode
);
1323 /* We need to zero extend VAL. */
1324 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1327 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
1330 /* We can do this with a gen_lowpart if both desired and current modes
1331 are integer, and this is either a constant integer, a register, or a
1332 non-volatile MEM. Except for the constant case where MODE is no
1333 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1335 if ((GET_CODE (x
) == CONST_INT
1336 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1337 || (GET_MODE_CLASS (mode
) == MODE_INT
1338 && GET_MODE_CLASS (oldmode
) == MODE_INT
1339 && (GET_CODE (x
) == CONST_DOUBLE
1340 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
1341 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
1342 && direct_load
[(int) mode
])
1343 || (GET_CODE (x
) == REG
1344 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
1345 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
1347 /* ?? If we don't know OLDMODE, we have to assume here that
1348 X does not need sign- or zero-extension. This may not be
1349 the case, but it's the best we can do. */
1350 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1351 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1353 HOST_WIDE_INT val
= INTVAL (x
);
1354 int width
= GET_MODE_BITSIZE (oldmode
);
1356 /* We must sign or zero-extend in this case. Start by
1357 zero-extending, then sign extend if we need to. */
1358 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1360 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1361 val
|= (HOST_WIDE_INT
) (-1) << width
;
1363 return GEN_INT (trunc_int_for_mode (val
, mode
));
1366 return gen_lowpart (mode
, x
);
1369 temp
= gen_reg_rtx (mode
);
1370 convert_move (temp
, x
, unsignedp
);
1374 /* This macro is used to determine what the largest unit size that
1375 move_by_pieces can use is. */
1377 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1378 move efficiently, as opposed to MOVE_MAX which is the maximum
1379 number of bytes we can move with a single instruction. */
1381 #ifndef MOVE_MAX_PIECES
1382 #define MOVE_MAX_PIECES MOVE_MAX
1385 /* Generate several move instructions to copy LEN bytes
1386 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1387 The caller must pass FROM and TO
1388 through protect_from_queue before calling.
1390 When TO is NULL, the emit_single_push_insn is used to push the
1393 ALIGN is maximum alignment we can assume. */
1396 move_by_pieces (to
, from
, len
, align
)
1398 unsigned HOST_WIDE_INT len
;
1401 struct move_by_pieces data
;
1402 rtx to_addr
, from_addr
= XEXP (from
, 0);
1403 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
1404 enum machine_mode mode
= VOIDmode
, tmode
;
1405 enum insn_code icode
;
1408 data
.from_addr
= from_addr
;
1411 to_addr
= XEXP (to
, 0);
1414 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1415 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1417 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1424 #ifdef STACK_GROWS_DOWNWARD
1430 data
.to_addr
= to_addr
;
1433 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1434 || GET_CODE (from_addr
) == POST_INC
1435 || GET_CODE (from_addr
) == POST_DEC
);
1437 data
.explicit_inc_from
= 0;
1438 data
.explicit_inc_to
= 0;
1439 if (data
.reverse
) data
.offset
= len
;
1442 /* If copying requires more than two move insns,
1443 copy addresses to registers (to make displacements shorter)
1444 and use post-increment if available. */
1445 if (!(data
.autinc_from
&& data
.autinc_to
)
1446 && move_by_pieces_ninsns (len
, align
) > 2)
1448 /* Find the mode of the largest move... */
1449 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1450 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1451 if (GET_MODE_SIZE (tmode
) < max_size
)
1454 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
1456 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1457 data
.autinc_from
= 1;
1458 data
.explicit_inc_from
= -1;
1460 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
1462 data
.from_addr
= copy_addr_to_reg (from_addr
);
1463 data
.autinc_from
= 1;
1464 data
.explicit_inc_from
= 1;
1466 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1467 data
.from_addr
= copy_addr_to_reg (from_addr
);
1468 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
1470 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1472 data
.explicit_inc_to
= -1;
1474 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
1476 data
.to_addr
= copy_addr_to_reg (to_addr
);
1478 data
.explicit_inc_to
= 1;
1480 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1481 data
.to_addr
= copy_addr_to_reg (to_addr
);
1484 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1485 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1486 align
= MOVE_MAX
* BITS_PER_UNIT
;
1488 /* First move what we can in the largest integer mode, then go to
1489 successively smaller modes. */
1491 while (max_size
> 1)
1493 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1494 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1495 if (GET_MODE_SIZE (tmode
) < max_size
)
1498 if (mode
== VOIDmode
)
1501 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1502 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1503 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1505 max_size
= GET_MODE_SIZE (mode
);
1508 /* The code above should have handled everything. */
1513 /* Return number of insns required to move L bytes by pieces.
1514 ALIGN (in bytes) is maximum alignment we can assume. */
1516 static unsigned HOST_WIDE_INT
1517 move_by_pieces_ninsns (l
, align
)
1518 unsigned HOST_WIDE_INT l
;
1521 unsigned HOST_WIDE_INT n_insns
= 0;
1522 unsigned HOST_WIDE_INT max_size
= MOVE_MAX
+ 1;
1524 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1525 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1526 align
= MOVE_MAX
* BITS_PER_UNIT
;
1528 while (max_size
> 1)
1530 enum machine_mode mode
= VOIDmode
, tmode
;
1531 enum insn_code icode
;
1533 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1534 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1535 if (GET_MODE_SIZE (tmode
) < max_size
)
1538 if (mode
== VOIDmode
)
1541 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1542 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1543 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1545 max_size
= GET_MODE_SIZE (mode
);
1553 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1554 with move instructions for mode MODE. GENFUN is the gen_... function
1555 to make a move insn for that mode. DATA has all the other info. */
1558 move_by_pieces_1 (genfun
, mode
, data
)
1559 rtx (*genfun
) PARAMS ((rtx
, ...));
1560 enum machine_mode mode
;
1561 struct move_by_pieces
*data
;
1563 unsigned int size
= GET_MODE_SIZE (mode
);
1566 while (data
->len
>= size
)
1569 data
->offset
-= size
;
1573 if (data
->autinc_to
)
1575 to1
= gen_rtx_MEM (mode
, data
->to_addr
);
1576 MEM_COPY_ATTRIBUTES (to1
, data
->to
);
1579 to1
= change_address (data
->to
, mode
,
1580 plus_constant (data
->to_addr
, data
->offset
));
1583 if (data
->autinc_from
)
1585 from1
= gen_rtx_MEM (mode
, data
->from_addr
);
1586 MEM_COPY_ATTRIBUTES (from1
, data
->from
);
1589 from1
= change_address (data
->from
, mode
,
1590 plus_constant (data
->from_addr
, data
->offset
));
1592 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1593 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
1594 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1595 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (-size
)));
1598 emit_insn ((*genfun
) (to1
, from1
));
1600 emit_single_push_insn (mode
, from1
, NULL
);
1602 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1603 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1604 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1605 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1607 if (! data
->reverse
)
1608 data
->offset
+= size
;
1614 /* Emit code to move a block Y to a block X.
1615 This may be done with string-move instructions,
1616 with multiple scalar move instructions, or with a library call.
1618 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1620 SIZE is an rtx that says how long they are.
1621 ALIGN is the maximum alignment we can assume they have.
1623 Return the address of the new block, if memcpy is called and returns it,
1627 emit_block_move (x
, y
, size
, align
)
1633 #ifdef TARGET_MEM_FUNCTIONS
1635 tree call_expr
, arg_list
;
1638 if (GET_MODE (x
) != BLKmode
)
1641 if (GET_MODE (y
) != BLKmode
)
1644 x
= protect_from_queue (x
, 1);
1645 y
= protect_from_queue (y
, 0);
1646 size
= protect_from_queue (size
, 0);
1648 if (GET_CODE (x
) != MEM
)
1650 if (GET_CODE (y
) != MEM
)
1655 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1656 move_by_pieces (x
, y
, INTVAL (size
), align
);
1659 /* Try the most limited insn first, because there's no point
1660 including more than one in the machine description unless
1661 the more limited one has some advantage. */
1663 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1664 enum machine_mode mode
;
1666 /* Since this is a move insn, we don't care about volatility. */
1669 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1670 mode
= GET_MODE_WIDER_MODE (mode
))
1672 enum insn_code code
= movstr_optab
[(int) mode
];
1673 insn_operand_predicate_fn pred
;
1675 if (code
!= CODE_FOR_nothing
1676 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1677 here because if SIZE is less than the mode mask, as it is
1678 returned by the macro, it will definitely be less than the
1679 actual mode mask. */
1680 && ((GET_CODE (size
) == CONST_INT
1681 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1682 <= (GET_MODE_MASK (mode
) >> 1)))
1683 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1684 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1685 || (*pred
) (x
, BLKmode
))
1686 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1687 || (*pred
) (y
, BLKmode
))
1688 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1689 || (*pred
) (opalign
, VOIDmode
)))
1692 rtx last
= get_last_insn ();
1695 op2
= convert_to_mode (mode
, size
, 1);
1696 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1697 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1698 op2
= copy_to_mode_reg (mode
, op2
);
1700 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1708 delete_insns_since (last
);
1714 /* X, Y, or SIZE may have been passed through protect_from_queue.
1716 It is unsafe to save the value generated by protect_from_queue
1717 and reuse it later. Consider what happens if emit_queue is
1718 called before the return value from protect_from_queue is used.
1720 Expansion of the CALL_EXPR below will call emit_queue before
1721 we are finished emitting RTL for argument setup. So if we are
1722 not careful we could get the wrong value for an argument.
1724 To avoid this problem we go ahead and emit code to copy X, Y &
1725 SIZE into new pseudos. We can then place those new pseudos
1726 into an RTL_EXPR and use them later, even after a call to
1729 Note this is not strictly needed for library calls since they
1730 do not call emit_queue before loading their arguments. However,
1731 we may need to have library calls call emit_queue in the future
1732 since failing to do so could cause problems for targets which
1733 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1734 x
= copy_to_mode_reg (Pmode
, XEXP (x
, 0));
1735 y
= copy_to_mode_reg (Pmode
, XEXP (y
, 0));
1737 #ifdef TARGET_MEM_FUNCTIONS
1738 size
= copy_to_mode_reg (TYPE_MODE (sizetype
), size
);
1740 size
= convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1741 TREE_UNSIGNED (integer_type_node
));
1742 size
= copy_to_mode_reg (TYPE_MODE (integer_type_node
), size
);
1745 #ifdef TARGET_MEM_FUNCTIONS
1746 /* It is incorrect to use the libcall calling conventions to call
1747 memcpy in this context.
1749 This could be a user call to memcpy and the user may wish to
1750 examine the return value from memcpy.
1752 For targets where libcalls and normal calls have different conventions
1753 for returning pointers, we could end up generating incorrect code.
1755 So instead of using a libcall sequence we build up a suitable
1756 CALL_EXPR and expand the call in the normal fashion. */
1757 if (fn
== NULL_TREE
)
1761 /* This was copied from except.c, I don't know if all this is
1762 necessary in this context or not. */
1763 fn
= get_identifier ("memcpy");
1764 fntype
= build_pointer_type (void_type_node
);
1765 fntype
= build_function_type (fntype
, NULL_TREE
);
1766 fn
= build_decl (FUNCTION_DECL
, fn
, fntype
);
1767 ggc_add_tree_root (&fn
, 1);
1768 DECL_EXTERNAL (fn
) = 1;
1769 TREE_PUBLIC (fn
) = 1;
1770 DECL_ARTIFICIAL (fn
) = 1;
1771 make_decl_rtl (fn
, NULL_PTR
);
1772 assemble_external (fn
);
1775 /* We need to make an argument list for the function call.
1777 memcpy has three arguments, the first two are void * addresses and
1778 the last is a size_t byte count for the copy. */
1780 = build_tree_list (NULL_TREE
,
1781 make_tree (build_pointer_type (void_type_node
), x
));
1782 TREE_CHAIN (arg_list
)
1783 = build_tree_list (NULL_TREE
,
1784 make_tree (build_pointer_type (void_type_node
), y
));
1785 TREE_CHAIN (TREE_CHAIN (arg_list
))
1786 = build_tree_list (NULL_TREE
, make_tree (sizetype
, size
));
1787 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list
))) = NULL_TREE
;
1789 /* Now we have to build up the CALL_EXPR itself. */
1790 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1791 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1792 call_expr
, arg_list
, NULL_TREE
);
1793 TREE_SIDE_EFFECTS (call_expr
) = 1;
1795 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
1797 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
1798 VOIDmode
, 3, y
, Pmode
, x
, Pmode
,
1799 convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1800 TREE_UNSIGNED (integer_type_node
)),
1801 TYPE_MODE (integer_type_node
));
1808 /* Copy all or part of a value X into registers starting at REGNO.
1809 The number of registers to be filled is NREGS. */
1812 move_block_to_reg (regno
, x
, nregs
, mode
)
1816 enum machine_mode mode
;
1819 #ifdef HAVE_load_multiple
1827 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1828 x
= validize_mem (force_const_mem (mode
, x
));
1830 /* See if the machine can do this with a load multiple insn. */
1831 #ifdef HAVE_load_multiple
1832 if (HAVE_load_multiple
)
1834 last
= get_last_insn ();
1835 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1843 delete_insns_since (last
);
1847 for (i
= 0; i
< nregs
; i
++)
1848 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1849 operand_subword_force (x
, i
, mode
));
1852 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1853 The number of registers to be filled is NREGS. SIZE indicates the number
1854 of bytes in the object X. */
1857 move_block_from_reg (regno
, x
, nregs
, size
)
1864 #ifdef HAVE_store_multiple
1868 enum machine_mode mode
;
1873 /* If SIZE is that of a mode no bigger than a word, just use that
1874 mode's store operation. */
1875 if (size
<= UNITS_PER_WORD
1876 && (mode
= mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0)) != BLKmode
)
1878 emit_move_insn (change_address (x
, mode
, NULL
),
1879 gen_rtx_REG (mode
, regno
));
1883 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1884 to the left before storing to memory. Note that the previous test
1885 doesn't handle all cases (e.g. SIZE == 3). */
1886 if (size
< UNITS_PER_WORD
&& BYTES_BIG_ENDIAN
)
1888 rtx tem
= operand_subword (x
, 0, 1, BLKmode
);
1894 shift
= expand_shift (LSHIFT_EXPR
, word_mode
,
1895 gen_rtx_REG (word_mode
, regno
),
1896 build_int_2 ((UNITS_PER_WORD
- size
)
1897 * BITS_PER_UNIT
, 0), NULL_RTX
, 0);
1898 emit_move_insn (tem
, shift
);
1902 /* See if the machine can do this with a store multiple insn. */
1903 #ifdef HAVE_store_multiple
1904 if (HAVE_store_multiple
)
1906 last
= get_last_insn ();
1907 pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1915 delete_insns_since (last
);
1919 for (i
= 0; i
< nregs
; i
++)
1921 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1926 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1930 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1931 registers represented by a PARALLEL. SSIZE represents the total size of
1932 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1934 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1935 the balance will be in what would be the low-order memory addresses, i.e.
1936 left justified for big endian, right justified for little endian. This
1937 happens to be true for the targets currently using this support. If this
1938 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1942 emit_group_load (dst
, orig_src
, ssize
, align
)
1950 if (GET_CODE (dst
) != PARALLEL
)
1953 /* Check for a NULL entry, used to indicate that the parameter goes
1954 both on the stack and in registers. */
1955 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1960 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
1962 /* If we won't be loading directly from memory, protect the real source
1963 from strange tricks we might play. */
1965 if (GET_CODE (src
) != MEM
&& ! CONSTANT_P (src
))
1967 if (GET_MODE (src
) == VOIDmode
)
1968 src
= gen_reg_rtx (GET_MODE (dst
));
1970 src
= gen_reg_rtx (GET_MODE (orig_src
));
1971 emit_move_insn (src
, orig_src
);
1974 /* Process the pieces. */
1975 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1977 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1978 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1979 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1982 /* Handle trailing fragments that run over the size of the struct. */
1983 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1985 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1986 bytelen
= ssize
- bytepos
;
1991 /* Optimize the access just a bit. */
1992 if (GET_CODE (src
) == MEM
1993 && align
>= GET_MODE_ALIGNMENT (mode
)
1994 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1995 && bytelen
== GET_MODE_SIZE (mode
))
1997 tmps
[i
] = gen_reg_rtx (mode
);
1998 emit_move_insn (tmps
[i
],
1999 change_address (src
, mode
,
2000 plus_constant (XEXP (src
, 0),
2003 else if (GET_CODE (src
) == CONCAT
)
2006 && bytelen
== GET_MODE_SIZE (GET_MODE (XEXP (src
, 0))))
2007 tmps
[i
] = XEXP (src
, 0);
2008 else if (bytepos
== (HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)))
2009 && bytelen
== GET_MODE_SIZE (GET_MODE (XEXP (src
, 1))))
2010 tmps
[i
] = XEXP (src
, 1);
2014 else if ((CONSTANT_P (src
)
2015 && (GET_MODE (src
) == VOIDmode
|| GET_MODE (src
) == mode
))
2016 || (GET_CODE (src
) == REG
&& GET_MODE (src
) == mode
))
2019 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
2020 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
2021 mode
, mode
, align
, ssize
);
2023 if (BYTES_BIG_ENDIAN
&& shift
)
2024 expand_binop (mode
, ashl_optab
, tmps
[i
], GEN_INT (shift
),
2025 tmps
[i
], 0, OPTAB_WIDEN
);
2030 /* Copy the extracted pieces into the proper (probable) hard regs. */
2031 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2032 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
2035 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2036 registers represented by a PARALLEL. SSIZE represents the total size of
2037 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2040 emit_group_store (orig_dst
, src
, ssize
, align
)
2048 if (GET_CODE (src
) != PARALLEL
)
2051 /* Check for a NULL entry, used to indicate that the parameter goes
2052 both on the stack and in registers. */
2053 if (XEXP (XVECEXP (src
, 0, 0), 0))
2058 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (src
, 0));
2060 /* Copy the (probable) hard regs into pseudos. */
2061 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2063 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2064 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2065 emit_move_insn (tmps
[i
], reg
);
2069 /* If we won't be storing directly into memory, protect the real destination
2070 from strange tricks we might play. */
2072 if (GET_CODE (dst
) == PARALLEL
)
2076 /* We can get a PARALLEL dst if there is a conditional expression in
2077 a return statement. In that case, the dst and src are the same,
2078 so no action is necessary. */
2079 if (rtx_equal_p (dst
, src
))
2082 /* It is unclear if we can ever reach here, but we may as well handle
2083 it. Allocate a temporary, and split this into a store/load to/from
2086 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
2087 emit_group_store (temp
, src
, ssize
, align
);
2088 emit_group_load (dst
, temp
, ssize
, align
);
2091 else if (GET_CODE (dst
) != MEM
)
2093 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
2094 /* Make life a bit easier for combine. */
2095 emit_move_insn (dst
, const0_rtx
);
2098 /* Process the pieces. */
2099 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2101 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2102 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2103 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2105 /* Handle trailing fragments that run over the size of the struct. */
2106 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2108 if (BYTES_BIG_ENDIAN
)
2110 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2111 expand_binop (mode
, ashr_optab
, tmps
[i
], GEN_INT (shift
),
2112 tmps
[i
], 0, OPTAB_WIDEN
);
2114 bytelen
= ssize
- bytepos
;
2117 /* Optimize the access just a bit. */
2118 if (GET_CODE (dst
) == MEM
2119 && align
>= GET_MODE_ALIGNMENT (mode
)
2120 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2121 && bytelen
== GET_MODE_SIZE (mode
))
2122 emit_move_insn (change_address (dst
, mode
,
2123 plus_constant (XEXP (dst
, 0),
2127 store_bit_field (dst
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2128 mode
, tmps
[i
], align
, ssize
);
2133 /* Copy from the pseudo into the (probable) hard reg. */
2134 if (GET_CODE (dst
) == REG
)
2135 emit_move_insn (orig_dst
, dst
);
2138 /* Generate code to copy a BLKmode object of TYPE out of a
2139 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2140 is null, a stack temporary is created. TGTBLK is returned.
2142 The primary purpose of this routine is to handle functions
2143 that return BLKmode structures in registers. Some machines
2144 (the PA for example) want to return all small structures
2145 in registers regardless of the structure's alignment. */
2148 copy_blkmode_from_reg (tgtblk
, srcreg
, type
)
2153 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2154 rtx src
= NULL
, dst
= NULL
;
2155 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2156 unsigned HOST_WIDE_INT bitpos
, xbitpos
, big_endian_correction
= 0;
2160 tgtblk
= assign_temp (build_qualified_type (type
,
2162 | TYPE_QUAL_CONST
)),
2164 preserve_temp_slots (tgtblk
);
2167 /* This code assumes srcreg is at least a full word. If it isn't,
2168 copy it into a new pseudo which is a full word. */
2169 if (GET_MODE (srcreg
) != BLKmode
2170 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2171 srcreg
= convert_to_mode (word_mode
, srcreg
, TREE_UNSIGNED (type
));
2173 /* Structures whose size is not a multiple of a word are aligned
2174 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2175 machine, this means we must skip the empty high order bytes when
2176 calculating the bit offset. */
2177 if (BYTES_BIG_ENDIAN
&& bytes
% UNITS_PER_WORD
)
2178 big_endian_correction
2179 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2181 /* Copy the structure BITSIZE bites at a time.
2183 We could probably emit more efficient code for machines which do not use
2184 strict alignment, but it doesn't seem worth the effort at the current
2186 for (bitpos
= 0, xbitpos
= big_endian_correction
;
2187 bitpos
< bytes
* BITS_PER_UNIT
;
2188 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2190 /* We need a new source operand each time xbitpos is on a
2191 word boundary and when xbitpos == big_endian_correction
2192 (the first time through). */
2193 if (xbitpos
% BITS_PER_WORD
== 0
2194 || xbitpos
== big_endian_correction
)
2195 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
, BLKmode
);
2197 /* We need a new destination operand each time bitpos is on
2199 if (bitpos
% BITS_PER_WORD
== 0)
2200 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2202 /* Use xbitpos for the source extraction (right justified) and
2203 xbitpos for the destination store (left justified). */
2204 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2205 extract_bit_field (src
, bitsize
,
2206 xbitpos
% BITS_PER_WORD
, 1,
2207 NULL_RTX
, word_mode
, word_mode
,
2208 bitsize
, BITS_PER_WORD
),
2209 bitsize
, BITS_PER_WORD
);
2215 /* Add a USE expression for REG to the (possibly empty) list pointed
2216 to by CALL_FUSAGE. REG must denote a hard register. */
2219 use_reg (call_fusage
, reg
)
2220 rtx
*call_fusage
, reg
;
2222 if (GET_CODE (reg
) != REG
2223 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2227 = gen_rtx_EXPR_LIST (VOIDmode
,
2228 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2231 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2232 starting at REGNO. All of these registers must be hard registers. */
2235 use_regs (call_fusage
, regno
, nregs
)
2242 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
2245 for (i
= 0; i
< nregs
; i
++)
2246 use_reg (call_fusage
, gen_rtx_REG (reg_raw_mode
[regno
+ i
], regno
+ i
));
2249 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2250 PARALLEL REGS. This is for calls that pass values in multiple
2251 non-contiguous locations. The Irix 6 ABI has examples of this. */
2254 use_group_regs (call_fusage
, regs
)
2260 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2262 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2264 /* A NULL entry means the parameter goes both on the stack and in
2265 registers. This can also be a MEM for targets that pass values
2266 partially on the stack and partially in registers. */
2267 if (reg
!= 0 && GET_CODE (reg
) == REG
)
2268 use_reg (call_fusage
, reg
);
2274 can_store_by_pieces (len
, constfun
, constfundata
, align
)
2275 unsigned HOST_WIDE_INT len
;
2276 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
2280 unsigned HOST_WIDE_INT max_size
, l
;
2281 HOST_WIDE_INT offset
= 0;
2282 enum machine_mode mode
, tmode
;
2283 enum insn_code icode
;
2287 if (! MOVE_BY_PIECES_P (len
, align
))
2290 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2291 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2292 align
= MOVE_MAX
* BITS_PER_UNIT
;
2294 /* We would first store what we can in the largest integer mode, then go to
2295 successively smaller modes. */
2298 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2303 max_size
= MOVE_MAX_PIECES
+ 1;
2304 while (max_size
> 1)
2306 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2307 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2308 if (GET_MODE_SIZE (tmode
) < max_size
)
2311 if (mode
== VOIDmode
)
2314 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2315 if (icode
!= CODE_FOR_nothing
2316 && align
>= GET_MODE_ALIGNMENT (mode
))
2318 unsigned int size
= GET_MODE_SIZE (mode
);
2325 cst
= (*constfun
) (constfundata
, offset
, mode
);
2326 if (!LEGITIMATE_CONSTANT_P (cst
))
2336 max_size
= GET_MODE_SIZE (mode
);
2339 /* The code above should have handled everything. */
2347 /* Generate several move instructions to store LEN bytes generated by
2348 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2349 pointer which will be passed as argument in every CONSTFUN call.
2350 ALIGN is maximum alignment we can assume. */
2353 store_by_pieces (to
, len
, constfun
, constfundata
, align
)
2355 unsigned HOST_WIDE_INT len
;
2356 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
2360 struct store_by_pieces data
;
2362 if (! MOVE_BY_PIECES_P (len
, align
))
2364 to
= protect_from_queue (to
, 1);
2365 data
.constfun
= constfun
;
2366 data
.constfundata
= constfundata
;
2369 store_by_pieces_1 (&data
, align
);
2372 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2373 rtx with BLKmode). The caller must pass TO through protect_from_queue
2374 before calling. ALIGN is maximum alignment we can assume. */
2377 clear_by_pieces (to
, len
, align
)
2379 unsigned HOST_WIDE_INT len
;
2382 struct store_by_pieces data
;
2384 data
.constfun
= clear_by_pieces_1
;
2385 data
.constfundata
= NULL_PTR
;
2388 store_by_pieces_1 (&data
, align
);
2391 /* Callback routine for clear_by_pieces.
2392 Return const0_rtx unconditionally. */
2395 clear_by_pieces_1 (data
, offset
, mode
)
2396 PTR data ATTRIBUTE_UNUSED
;
2397 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
;
2398 enum machine_mode mode ATTRIBUTE_UNUSED
;
2403 /* Subroutine of clear_by_pieces and store_by_pieces.
2404 Generate several move instructions to store LEN bytes of block TO. (A MEM
2405 rtx with BLKmode). The caller must pass TO through protect_from_queue
2406 before calling. ALIGN is maximum alignment we can assume. */
2409 store_by_pieces_1 (data
, align
)
2410 struct store_by_pieces
*data
;
2413 rtx to_addr
= XEXP (data
->to
, 0);
2414 unsigned HOST_WIDE_INT max_size
= MOVE_MAX_PIECES
+ 1;
2415 enum machine_mode mode
= VOIDmode
, tmode
;
2416 enum insn_code icode
;
2419 data
->to_addr
= to_addr
;
2421 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2422 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2424 data
->explicit_inc_to
= 0;
2426 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2428 data
->offset
= data
->len
;
2430 /* If storing requires more than two move insns,
2431 copy addresses to registers (to make displacements shorter)
2432 and use post-increment if available. */
2433 if (!data
->autinc_to
2434 && move_by_pieces_ninsns (data
->len
, align
) > 2)
2436 /* Determine the main mode we'll be using. */
2437 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2438 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2439 if (GET_MODE_SIZE (tmode
) < max_size
)
2442 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2444 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2445 data
->autinc_to
= 1;
2446 data
->explicit_inc_to
= -1;
2449 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2450 && ! data
->autinc_to
)
2452 data
->to_addr
= copy_addr_to_reg (to_addr
);
2453 data
->autinc_to
= 1;
2454 data
->explicit_inc_to
= 1;
2457 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2458 data
->to_addr
= copy_addr_to_reg (to_addr
);
2461 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2462 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2463 align
= MOVE_MAX
* BITS_PER_UNIT
;
2465 /* First store what we can in the largest integer mode, then go to
2466 successively smaller modes. */
2468 while (max_size
> 1)
2470 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2471 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2472 if (GET_MODE_SIZE (tmode
) < max_size
)
2475 if (mode
== VOIDmode
)
2478 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2479 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2480 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2482 max_size
= GET_MODE_SIZE (mode
);
2485 /* The code above should have handled everything. */
2490 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2491 with move instructions for mode MODE. GENFUN is the gen_... function
2492 to make a move insn for that mode. DATA has all the other info. */
2495 store_by_pieces_2 (genfun
, mode
, data
)
2496 rtx (*genfun
) PARAMS ((rtx
, ...));
2497 enum machine_mode mode
;
2498 struct store_by_pieces
*data
;
2500 unsigned int size
= GET_MODE_SIZE (mode
);
2503 while (data
->len
>= size
)
2506 data
->offset
-= size
;
2508 if (data
->autinc_to
)
2510 to1
= gen_rtx_MEM (mode
, data
->to_addr
);
2511 MEM_COPY_ATTRIBUTES (to1
, data
->to
);
2514 to1
= change_address (data
->to
, mode
,
2515 plus_constant (data
->to_addr
, data
->offset
));
2517 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2518 emit_insn (gen_add2_insn (data
->to_addr
,
2519 GEN_INT (-(HOST_WIDE_INT
) size
)));
2521 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2522 emit_insn ((*genfun
) (to1
, cst
));
2524 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2525 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2527 if (! data
->reverse
)
2528 data
->offset
+= size
;
2534 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2535 its length in bytes and ALIGN is the maximum alignment we can is has.
2537 If we call a function that returns the length of the block, return it. */
2540 clear_storage (object
, size
, align
)
2545 #ifdef TARGET_MEM_FUNCTIONS
2547 tree call_expr
, arg_list
;
2551 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2552 just move a zero. Otherwise, do this a piece at a time. */
2553 if (GET_MODE (object
) != BLKmode
2554 && GET_CODE (size
) == CONST_INT
2555 && GET_MODE_SIZE (GET_MODE (object
)) == (unsigned int) INTVAL (size
))
2556 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2559 object
= protect_from_queue (object
, 1);
2560 size
= protect_from_queue (size
, 0);
2562 if (GET_CODE (size
) == CONST_INT
2563 && MOVE_BY_PIECES_P (INTVAL (size
), align
))
2564 clear_by_pieces (object
, INTVAL (size
), align
);
2567 /* Try the most limited insn first, because there's no point
2568 including more than one in the machine description unless
2569 the more limited one has some advantage. */
2571 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2572 enum machine_mode mode
;
2574 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2575 mode
= GET_MODE_WIDER_MODE (mode
))
2577 enum insn_code code
= clrstr_optab
[(int) mode
];
2578 insn_operand_predicate_fn pred
;
2580 if (code
!= CODE_FOR_nothing
2581 /* We don't need MODE to be narrower than
2582 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2583 the mode mask, as it is returned by the macro, it will
2584 definitely be less than the actual mode mask. */
2585 && ((GET_CODE (size
) == CONST_INT
2586 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2587 <= (GET_MODE_MASK (mode
) >> 1)))
2588 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2589 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2590 || (*pred
) (object
, BLKmode
))
2591 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
2592 || (*pred
) (opalign
, VOIDmode
)))
2595 rtx last
= get_last_insn ();
2598 op1
= convert_to_mode (mode
, size
, 1);
2599 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2600 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
2601 op1
= copy_to_mode_reg (mode
, op1
);
2603 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2610 delete_insns_since (last
);
2614 /* OBJECT or SIZE may have been passed through protect_from_queue.
2616 It is unsafe to save the value generated by protect_from_queue
2617 and reuse it later. Consider what happens if emit_queue is
2618 called before the return value from protect_from_queue is used.
2620 Expansion of the CALL_EXPR below will call emit_queue before
2621 we are finished emitting RTL for argument setup. So if we are
2622 not careful we could get the wrong value for an argument.
2624 To avoid this problem we go ahead and emit code to copy OBJECT
2625 and SIZE into new pseudos. We can then place those new pseudos
2626 into an RTL_EXPR and use them later, even after a call to
2629 Note this is not strictly needed for library calls since they
2630 do not call emit_queue before loading their arguments. However,
2631 we may need to have library calls call emit_queue in the future
2632 since failing to do so could cause problems for targets which
2633 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2634 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2636 #ifdef TARGET_MEM_FUNCTIONS
2637 size
= copy_to_mode_reg (TYPE_MODE (sizetype
), size
);
2639 size
= convert_to_mode (TYPE_MODE (integer_type_node
), size
,
2640 TREE_UNSIGNED (integer_type_node
));
2641 size
= copy_to_mode_reg (TYPE_MODE (integer_type_node
), size
);
2644 #ifdef TARGET_MEM_FUNCTIONS
2645 /* It is incorrect to use the libcall calling conventions to call
2646 memset in this context.
2648 This could be a user call to memset and the user may wish to
2649 examine the return value from memset.
2651 For targets where libcalls and normal calls have different
2652 conventions for returning pointers, we could end up generating
2655 So instead of using a libcall sequence we build up a suitable
2656 CALL_EXPR and expand the call in the normal fashion. */
2657 if (fn
== NULL_TREE
)
2661 /* This was copied from except.c, I don't know if all this is
2662 necessary in this context or not. */
2663 fn
= get_identifier ("memset");
2664 fntype
= build_pointer_type (void_type_node
);
2665 fntype
= build_function_type (fntype
, NULL_TREE
);
2666 fn
= build_decl (FUNCTION_DECL
, fn
, fntype
);
2667 ggc_add_tree_root (&fn
, 1);
2668 DECL_EXTERNAL (fn
) = 1;
2669 TREE_PUBLIC (fn
) = 1;
2670 DECL_ARTIFICIAL (fn
) = 1;
2671 make_decl_rtl (fn
, NULL_PTR
);
2672 assemble_external (fn
);
2675 /* We need to make an argument list for the function call.
2677 memset has three arguments, the first is a void * addresses, the
2678 second a integer with the initialization value, the last is a
2679 size_t byte count for the copy. */
2681 = build_tree_list (NULL_TREE
,
2682 make_tree (build_pointer_type (void_type_node
),
2684 TREE_CHAIN (arg_list
)
2685 = build_tree_list (NULL_TREE
,
2686 make_tree (integer_type_node
, const0_rtx
));
2687 TREE_CHAIN (TREE_CHAIN (arg_list
))
2688 = build_tree_list (NULL_TREE
, make_tree (sizetype
, size
));
2689 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list
))) = NULL_TREE
;
2691 /* Now we have to build up the CALL_EXPR itself. */
2692 call_expr
= build1 (ADDR_EXPR
,
2693 build_pointer_type (TREE_TYPE (fn
)), fn
);
2694 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2695 call_expr
, arg_list
, NULL_TREE
);
2696 TREE_SIDE_EFFECTS (call_expr
) = 1;
2698 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
2700 emit_library_call (bzero_libfunc
, LCT_NORMAL
,
2701 VOIDmode
, 2, object
, Pmode
, size
,
2702 TYPE_MODE (integer_type_node
));
2710 /* Generate code to copy Y into X.
2711 Both Y and X must have the same mode, except that
2712 Y can be a constant with VOIDmode.
2713 This mode cannot be BLKmode; use emit_block_move for that.
2715 Return the last instruction emitted. */
2718 emit_move_insn (x
, y
)
2721 enum machine_mode mode
= GET_MODE (x
);
2722 rtx y_cst
= NULL_RTX
;
2725 x
= protect_from_queue (x
, 1);
2726 y
= protect_from_queue (y
, 0);
2728 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
2731 /* Never force constant_p_rtx to memory. */
2732 if (GET_CODE (y
) == CONSTANT_P_RTX
)
2734 else if (CONSTANT_P (y
) && ! LEGITIMATE_CONSTANT_P (y
))
2737 y
= force_const_mem (mode
, y
);
2740 /* If X or Y are memory references, verify that their addresses are valid
2742 if (GET_CODE (x
) == MEM
2743 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2744 && ! push_operand (x
, GET_MODE (x
)))
2746 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2747 x
= change_address (x
, VOIDmode
, XEXP (x
, 0));
2749 if (GET_CODE (y
) == MEM
2750 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2752 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2753 y
= change_address (y
, VOIDmode
, XEXP (y
, 0));
2755 if (mode
== BLKmode
)
2758 last_insn
= emit_move_insn_1 (x
, y
);
2760 if (y_cst
&& GET_CODE (x
) == REG
)
2761 REG_NOTES (last_insn
)
2762 = gen_rtx_EXPR_LIST (REG_EQUAL
, y_cst
, REG_NOTES (last_insn
));
2767 /* Low level part of emit_move_insn.
2768 Called just like emit_move_insn, but assumes X and Y
2769 are basically valid. */
2772 emit_move_insn_1 (x
, y
)
2775 enum machine_mode mode
= GET_MODE (x
);
2776 enum machine_mode submode
;
2777 enum mode_class
class = GET_MODE_CLASS (mode
);
2780 if ((unsigned int) mode
>= (unsigned int) MAX_MACHINE_MODE
)
2783 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2785 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2787 /* Expand complex moves by moving real part and imag part, if possible. */
2788 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
2789 && BLKmode
!= (submode
= mode_for_size ((GET_MODE_UNIT_SIZE (mode
)
2791 (class == MODE_COMPLEX_INT
2792 ? MODE_INT
: MODE_FLOAT
),
2794 && (mov_optab
->handlers
[(int) submode
].insn_code
2795 != CODE_FOR_nothing
))
2797 /* Don't split destination if it is a stack push. */
2798 int stack
= push_operand (x
, GET_MODE (x
));
2800 #ifdef PUSH_ROUNDING
2801 /* In case we output to the stack, but the size is smaller machine can
2802 push exactly, we need to use move instructions. */
2804 && PUSH_ROUNDING (GET_MODE_SIZE (submode
)) != GET_MODE_SIZE (submode
))
2807 int offset1
, offset2
;
2809 /* Do not use anti_adjust_stack, since we don't want to update
2810 stack_pointer_delta. */
2811 temp
= expand_binop (Pmode
,
2812 #ifdef STACK_GROWS_DOWNWARD
2819 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))),
2823 if (temp
!= stack_pointer_rtx
)
2824 emit_move_insn (stack_pointer_rtx
, temp
);
2825 #ifdef STACK_GROWS_DOWNWARD
2827 offset2
= GET_MODE_SIZE (submode
);
2829 offset1
= -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)));
2830 offset2
= (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))
2831 + GET_MODE_SIZE (submode
));
2833 emit_move_insn (change_address (x
, submode
,
2834 gen_rtx_PLUS (Pmode
,
2836 GEN_INT (offset1
))),
2837 gen_realpart (submode
, y
));
2838 emit_move_insn (change_address (x
, submode
,
2839 gen_rtx_PLUS (Pmode
,
2841 GEN_INT (offset2
))),
2842 gen_imagpart (submode
, y
));
2846 /* If this is a stack, push the highpart first, so it
2847 will be in the argument order.
2849 In that case, change_address is used only to convert
2850 the mode, not to change the address. */
2853 /* Note that the real part always precedes the imag part in memory
2854 regardless of machine's endianness. */
2855 #ifdef STACK_GROWS_DOWNWARD
2856 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2857 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2858 gen_imagpart (submode
, y
)));
2859 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2860 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2861 gen_realpart (submode
, y
)));
2863 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2864 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2865 gen_realpart (submode
, y
)));
2866 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2867 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2868 gen_imagpart (submode
, y
)));
2873 rtx realpart_x
, realpart_y
;
2874 rtx imagpart_x
, imagpart_y
;
2876 /* If this is a complex value with each part being smaller than a
2877 word, the usual calling sequence will likely pack the pieces into
2878 a single register. Unfortunately, SUBREG of hard registers only
2879 deals in terms of words, so we have a problem converting input
2880 arguments to the CONCAT of two registers that is used elsewhere
2881 for complex values. If this is before reload, we can copy it into
2882 memory and reload. FIXME, we should see about using extract and
2883 insert on integer registers, but complex short and complex char
2884 variables should be rarely used. */
2885 if (GET_MODE_BITSIZE (mode
) < 2 * BITS_PER_WORD
2886 && (reload_in_progress
| reload_completed
) == 0)
2888 int packed_dest_p
= (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
2889 int packed_src_p
= (REG_P (y
) && REGNO (y
) < FIRST_PSEUDO_REGISTER
);
2891 if (packed_dest_p
|| packed_src_p
)
2893 enum mode_class reg_class
= ((class == MODE_COMPLEX_FLOAT
)
2894 ? MODE_FLOAT
: MODE_INT
);
2896 enum machine_mode reg_mode
2897 = mode_for_size (GET_MODE_BITSIZE (mode
), reg_class
, 1);
2899 if (reg_mode
!= BLKmode
)
2901 rtx mem
= assign_stack_temp (reg_mode
,
2902 GET_MODE_SIZE (mode
), 0);
2903 rtx cmem
= change_address (mem
, mode
, NULL_RTX
);
2906 = N_("function using short complex types cannot be inline");
2910 rtx sreg
= gen_rtx_SUBREG (reg_mode
, x
, 0);
2911 emit_move_insn_1 (cmem
, y
);
2912 return emit_move_insn_1 (sreg
, mem
);
2916 rtx sreg
= gen_rtx_SUBREG (reg_mode
, y
, 0);
2917 emit_move_insn_1 (mem
, sreg
);
2918 return emit_move_insn_1 (x
, cmem
);
2924 realpart_x
= gen_realpart (submode
, x
);
2925 realpart_y
= gen_realpart (submode
, y
);
2926 imagpart_x
= gen_imagpart (submode
, x
);
2927 imagpart_y
= gen_imagpart (submode
, y
);
2929 /* Show the output dies here. This is necessary for SUBREGs
2930 of pseudos since we cannot track their lifetimes correctly;
2931 hard regs shouldn't appear here except as return values.
2932 We never want to emit such a clobber after reload. */
2934 && ! (reload_in_progress
|| reload_completed
)
2935 && (GET_CODE (realpart_x
) == SUBREG
2936 || GET_CODE (imagpart_x
) == SUBREG
))
2938 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
2941 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2942 (realpart_x
, realpart_y
));
2943 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2944 (imagpart_x
, imagpart_y
));
2947 return get_last_insn ();
2950 /* This will handle any multi-word mode that lacks a move_insn pattern.
2951 However, you will get better code if you define such patterns,
2952 even if they must turn into multiple assembler instructions. */
2953 else if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2959 #ifdef PUSH_ROUNDING
2961 /* If X is a push on the stack, do the push now and replace
2962 X with a reference to the stack pointer. */
2963 if (push_operand (x
, GET_MODE (x
)))
2965 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
2966 x
= change_address (x
, VOIDmode
, stack_pointer_rtx
);
2970 /* If we are in reload, see if either operand is a MEM whose address
2971 is scheduled for replacement. */
2972 if (reload_in_progress
&& GET_CODE (x
) == MEM
2973 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
2975 rtx
new = gen_rtx_MEM (GET_MODE (x
), inner
);
2977 MEM_COPY_ATTRIBUTES (new, x
);
2980 if (reload_in_progress
&& GET_CODE (y
) == MEM
2981 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
2983 rtx
new = gen_rtx_MEM (GET_MODE (y
), inner
);
2985 MEM_COPY_ATTRIBUTES (new, y
);
2993 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
2996 rtx xpart
= operand_subword (x
, i
, 1, mode
);
2997 rtx ypart
= operand_subword (y
, i
, 1, mode
);
2999 /* If we can't get a part of Y, put Y into memory if it is a
3000 constant. Otherwise, force it into a register. If we still
3001 can't get a part of Y, abort. */
3002 if (ypart
== 0 && CONSTANT_P (y
))
3004 y
= force_const_mem (mode
, y
);
3005 ypart
= operand_subword (y
, i
, 1, mode
);
3007 else if (ypart
== 0)
3008 ypart
= operand_subword_force (y
, i
, mode
);
3010 if (xpart
== 0 || ypart
== 0)
3013 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3015 last_insn
= emit_move_insn (xpart
, ypart
);
3018 seq
= gen_sequence ();
3021 /* Show the output dies here. This is necessary for SUBREGs
3022 of pseudos since we cannot track their lifetimes correctly;
3023 hard regs shouldn't appear here except as return values.
3024 We never want to emit such a clobber after reload. */
3026 && ! (reload_in_progress
|| reload_completed
)
3027 && need_clobber
!= 0)
3029 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3040 /* Pushing data onto the stack. */
3042 /* Push a block of length SIZE (perhaps variable)
3043 and return an rtx to address the beginning of the block.
3044 Note that it is not possible for the value returned to be a QUEUED.
3045 The value may be virtual_outgoing_args_rtx.
3047 EXTRA is the number of bytes of padding to push in addition to SIZE.
3048 BELOW nonzero means this padding comes at low addresses;
3049 otherwise, the padding comes at high addresses. */
3052 push_block (size
, extra
, below
)
3058 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3059 if (CONSTANT_P (size
))
3060 anti_adjust_stack (plus_constant (size
, extra
));
3061 else if (GET_CODE (size
) == REG
&& extra
== 0)
3062 anti_adjust_stack (size
);
3065 temp
= copy_to_mode_reg (Pmode
, size
);
3067 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3068 temp
, 0, OPTAB_LIB_WIDEN
);
3069 anti_adjust_stack (temp
);
3072 #ifndef STACK_GROWS_DOWNWARD
3073 #ifdef ARGS_GROW_DOWNWARD
3074 if (!ACCUMULATE_OUTGOING_ARGS
)
3082 /* Return the lowest stack address when STACK or ARGS grow downward and
3083 we are not aaccumulating outgoing arguments (the c4x port uses such
3085 temp
= virtual_outgoing_args_rtx
;
3086 if (extra
!= 0 && below
)
3087 temp
= plus_constant (temp
, extra
);
3091 if (GET_CODE (size
) == CONST_INT
)
3092 temp
= plus_constant (virtual_outgoing_args_rtx
,
3093 -INTVAL (size
) - (below
? 0 : extra
));
3094 else if (extra
!= 0 && !below
)
3095 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3096 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3098 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3099 negate_rtx (Pmode
, size
));
3102 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3106 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
3107 block of SIZE bytes. */
3110 get_push_address (size
)
3115 if (STACK_PUSH_CODE
== POST_DEC
)
3116 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (size
));
3117 else if (STACK_PUSH_CODE
== POST_INC
)
3118 temp
= gen_rtx_MINUS (Pmode
, stack_pointer_rtx
, GEN_INT (size
));
3120 temp
= stack_pointer_rtx
;
3122 return copy_to_reg (temp
);
3125 /* Emit single push insn. */
3127 emit_single_push_insn (mode
, x
, type
)
3129 enum machine_mode mode
;
3132 #ifdef PUSH_ROUNDING
3134 int rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3137 if (GET_MODE_SIZE (mode
) == rounded_size
)
3138 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3141 #ifdef STACK_GROWS_DOWNWARD
3142 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3143 GEN_INT (-rounded_size
));
3145 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3146 GEN_INT (rounded_size
));
3148 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3151 dest
= gen_rtx_MEM (mode
, dest_addr
);
3153 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3157 set_mem_attributes (dest
, type
, 1);
3158 /* Function incoming arguments may overlap with sibling call
3159 outgoing arguments and we cannot allow reordering of reads
3160 from function arguments with stores to outgoing arguments
3161 of sibling calls. */
3162 MEM_ALIAS_SET (dest
) = 0;
3164 emit_move_insn (dest
, x
);
3170 /* Generate code to push X onto the stack, assuming it has mode MODE and
3172 MODE is redundant except when X is a CONST_INT (since they don't
3174 SIZE is an rtx for the size of data to be copied (in bytes),
3175 needed only if X is BLKmode.
3177 ALIGN is maximum alignment we can assume.
3179 If PARTIAL and REG are both nonzero, then copy that many of the first
3180 words of X into registers starting with REG, and push the rest of X.
3181 The amount of space pushed is decreased by PARTIAL words,
3182 rounded *down* to a multiple of PARM_BOUNDARY.
3183 REG must be a hard register in this case.
3184 If REG is zero but PARTIAL is not, take any all others actions for an
3185 argument partially in registers, but do not actually load any
3188 EXTRA is the amount in bytes of extra space to leave next to this arg.
3189 This is ignored if an argument block has already been allocated.
3191 On a machine that lacks real push insns, ARGS_ADDR is the address of
3192 the bottom of the argument block for this call. We use indexing off there
3193 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3194 argument block has not been preallocated.
3196 ARGS_SO_FAR is the size of args previously pushed for this call.
3198 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3199 for arguments passed in registers. If nonzero, it will be the number
3200 of bytes required. */
3203 emit_push_insn (x
, mode
, type
, size
, align
, partial
, reg
, extra
,
3204 args_addr
, args_so_far
, reg_parm_stack_space
,
3207 enum machine_mode mode
;
3216 int reg_parm_stack_space
;
3220 enum direction stack_direction
3221 #ifdef STACK_GROWS_DOWNWARD
3227 /* Decide where to pad the argument: `downward' for below,
3228 `upward' for above, or `none' for don't pad it.
3229 Default is below for small data on big-endian machines; else above. */
3230 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3232 /* Invert direction if stack is post-update. */
3233 if (STACK_PUSH_CODE
== POST_INC
|| STACK_PUSH_CODE
== POST_DEC
)
3234 if (where_pad
!= none
)
3235 where_pad
= (where_pad
== downward
? upward
: downward
);
3237 xinner
= x
= protect_from_queue (x
, 0);
3239 if (mode
== BLKmode
)
3241 /* Copy a block into the stack, entirely or partially. */
3244 int used
= partial
* UNITS_PER_WORD
;
3245 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3253 /* USED is now the # of bytes we need not copy to the stack
3254 because registers will take care of them. */
3257 xinner
= change_address (xinner
, BLKmode
,
3258 plus_constant (XEXP (xinner
, 0), used
));
3260 /* If the partial register-part of the arg counts in its stack size,
3261 skip the part of stack space corresponding to the registers.
3262 Otherwise, start copying to the beginning of the stack space,
3263 by setting SKIP to 0. */
3264 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3266 #ifdef PUSH_ROUNDING
3267 /* Do it with several push insns if that doesn't take lots of insns
3268 and if there is no difficulty with push insns that skip bytes
3269 on the stack for alignment purposes. */
3272 && GET_CODE (size
) == CONST_INT
3274 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3275 /* Here we avoid the case of a structure whose weak alignment
3276 forces many pushes of a small amount of data,
3277 and such small pushes do rounding that causes trouble. */
3278 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3279 || align
>= BIGGEST_ALIGNMENT
3280 || PUSH_ROUNDING (align
) == align
)
3281 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3283 /* Push padding now if padding above and stack grows down,
3284 or if padding below and stack grows up.
3285 But if space already allocated, this has already been done. */
3286 if (extra
&& args_addr
== 0
3287 && where_pad
!= none
&& where_pad
!= stack_direction
)
3288 anti_adjust_stack (GEN_INT (extra
));
3290 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
);
3292 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
3296 in_check_memory_usage
= 1;
3297 temp
= get_push_address (INTVAL (size
) - used
);
3298 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
3299 emit_library_call (chkr_copy_bitmap_libfunc
,
3300 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, temp
,
3301 Pmode
, XEXP (xinner
, 0), Pmode
,
3302 GEN_INT (INTVAL (size
) - used
),
3303 TYPE_MODE (sizetype
));
3305 emit_library_call (chkr_set_right_libfunc
,
3306 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, temp
,
3307 Pmode
, GEN_INT (INTVAL (size
) - used
),
3308 TYPE_MODE (sizetype
),
3309 GEN_INT (MEMORY_USE_RW
),
3310 TYPE_MODE (integer_type_node
));
3311 in_check_memory_usage
= 0;
3315 #endif /* PUSH_ROUNDING */
3319 /* Otherwise make space on the stack and copy the data
3320 to the address of that space. */
3322 /* Deduct words put into registers from the size we must copy. */
3325 if (GET_CODE (size
) == CONST_INT
)
3326 size
= GEN_INT (INTVAL (size
) - used
);
3328 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3329 GEN_INT (used
), NULL_RTX
, 0,
3333 /* Get the address of the stack space.
3334 In this case, we do not deal with EXTRA separately.
3335 A single stack adjust will do. */
3338 temp
= push_block (size
, extra
, where_pad
== downward
);
3341 else if (GET_CODE (args_so_far
) == CONST_INT
)
3342 temp
= memory_address (BLKmode
,
3343 plus_constant (args_addr
,
3344 skip
+ INTVAL (args_so_far
)));
3346 temp
= memory_address (BLKmode
,
3347 plus_constant (gen_rtx_PLUS (Pmode
,
3351 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
3353 in_check_memory_usage
= 1;
3354 target
= copy_to_reg (temp
);
3355 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
3356 emit_library_call (chkr_copy_bitmap_libfunc
,
3357 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3,
3359 XEXP (xinner
, 0), Pmode
,
3360 size
, TYPE_MODE (sizetype
));
3362 emit_library_call (chkr_set_right_libfunc
,
3363 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3,
3365 size
, TYPE_MODE (sizetype
),
3366 GEN_INT (MEMORY_USE_RW
),
3367 TYPE_MODE (integer_type_node
));
3368 in_check_memory_usage
= 0;
3371 target
= gen_rtx_MEM (BLKmode
, temp
);
3375 set_mem_attributes (target
, type
, 1);
3376 /* Function incoming arguments may overlap with sibling call
3377 outgoing arguments and we cannot allow reordering of reads
3378 from function arguments with stores to outgoing arguments
3379 of sibling calls. */
3380 MEM_ALIAS_SET (target
) = 0;
3383 /* TEMP is the address of the block. Copy the data there. */
3384 if (GET_CODE (size
) == CONST_INT
3385 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size
), align
))
3387 move_by_pieces (target
, xinner
, INTVAL (size
), align
);
3392 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
3393 enum machine_mode mode
;
3395 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
3397 mode
= GET_MODE_WIDER_MODE (mode
))
3399 enum insn_code code
= movstr_optab
[(int) mode
];
3400 insn_operand_predicate_fn pred
;
3402 if (code
!= CODE_FOR_nothing
3403 && ((GET_CODE (size
) == CONST_INT
3404 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
3405 <= (GET_MODE_MASK (mode
) >> 1)))
3406 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
3407 && (!(pred
= insn_data
[(int) code
].operand
[0].predicate
)
3408 || ((*pred
) (target
, BLKmode
)))
3409 && (!(pred
= insn_data
[(int) code
].operand
[1].predicate
)
3410 || ((*pred
) (xinner
, BLKmode
)))
3411 && (!(pred
= insn_data
[(int) code
].operand
[3].predicate
)
3412 || ((*pred
) (opalign
, VOIDmode
))))
3414 rtx op2
= convert_to_mode (mode
, size
, 1);
3415 rtx last
= get_last_insn ();
3418 pred
= insn_data
[(int) code
].operand
[2].predicate
;
3419 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
3420 op2
= copy_to_mode_reg (mode
, op2
);
3422 pat
= GEN_FCN ((int) code
) (target
, xinner
,
3430 delete_insns_since (last
);
3435 if (!ACCUMULATE_OUTGOING_ARGS
)
3437 /* If the source is referenced relative to the stack pointer,
3438 copy it to another register to stabilize it. We do not need
3439 to do this if we know that we won't be changing sp. */
3441 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3442 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3443 temp
= copy_to_reg (temp
);
3446 /* Make inhibit_defer_pop nonzero around the library call
3447 to force it to pop the bcopy-arguments right away. */
3449 #ifdef TARGET_MEM_FUNCTIONS
3450 emit_library_call (memcpy_libfunc
, LCT_NORMAL
,
3451 VOIDmode
, 3, temp
, Pmode
, XEXP (xinner
, 0), Pmode
,
3452 convert_to_mode (TYPE_MODE (sizetype
),
3453 size
, TREE_UNSIGNED (sizetype
)),
3454 TYPE_MODE (sizetype
));
3456 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
3457 VOIDmode
, 3, XEXP (xinner
, 0), Pmode
, temp
, Pmode
,
3458 convert_to_mode (TYPE_MODE (integer_type_node
),
3460 TREE_UNSIGNED (integer_type_node
)),
3461 TYPE_MODE (integer_type_node
));
3466 else if (partial
> 0)
3468 /* Scalar partly in registers. */
3470 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3473 /* # words of start of argument
3474 that we must make space for but need not store. */
3475 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
3476 int args_offset
= INTVAL (args_so_far
);
3479 /* Push padding now if padding above and stack grows down,
3480 or if padding below and stack grows up.
3481 But if space already allocated, this has already been done. */
3482 if (extra
&& args_addr
== 0
3483 && where_pad
!= none
&& where_pad
!= stack_direction
)
3484 anti_adjust_stack (GEN_INT (extra
));
3486 /* If we make space by pushing it, we might as well push
3487 the real data. Otherwise, we can leave OFFSET nonzero
3488 and leave the space uninitialized. */
3492 /* Now NOT_STACK gets the number of words that we don't need to
3493 allocate on the stack. */
3494 not_stack
= partial
- offset
;
3496 /* If the partial register-part of the arg counts in its stack size,
3497 skip the part of stack space corresponding to the registers.
3498 Otherwise, start copying to the beginning of the stack space,
3499 by setting SKIP to 0. */
3500 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3502 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3503 x
= validize_mem (force_const_mem (mode
, x
));
3505 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3506 SUBREGs of such registers are not allowed. */
3507 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
3508 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3509 x
= copy_to_reg (x
);
3511 /* Loop over all the words allocated on the stack for this arg. */
3512 /* We can do it by words, because any scalar bigger than a word
3513 has a size a multiple of a word. */
3514 #ifndef PUSH_ARGS_REVERSED
3515 for (i
= not_stack
; i
< size
; i
++)
3517 for (i
= size
- 1; i
>= not_stack
; i
--)
3519 if (i
>= not_stack
+ offset
)
3520 emit_push_insn (operand_subword_force (x
, i
, mode
),
3521 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3523 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3525 reg_parm_stack_space
, alignment_pad
);
3530 rtx target
= NULL_RTX
;
3533 /* Push padding now if padding above and stack grows down,
3534 or if padding below and stack grows up.
3535 But if space already allocated, this has already been done. */
3536 if (extra
&& args_addr
== 0
3537 && where_pad
!= none
&& where_pad
!= stack_direction
)
3538 anti_adjust_stack (GEN_INT (extra
));
3540 #ifdef PUSH_ROUNDING
3541 if (args_addr
== 0 && PUSH_ARGS
)
3542 emit_single_push_insn (mode
, x
, type
);
3546 if (GET_CODE (args_so_far
) == CONST_INT
)
3548 = memory_address (mode
,
3549 plus_constant (args_addr
,
3550 INTVAL (args_so_far
)));
3552 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3555 dest
= gen_rtx_MEM (mode
, addr
);
3558 set_mem_attributes (dest
, type
, 1);
3559 /* Function incoming arguments may overlap with sibling call
3560 outgoing arguments and we cannot allow reordering of reads
3561 from function arguments with stores to outgoing arguments
3562 of sibling calls. */
3563 MEM_ALIAS_SET (dest
) = 0;
3566 emit_move_insn (dest
, x
);
3570 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
3572 in_check_memory_usage
= 1;
3574 target
= get_push_address (GET_MODE_SIZE (mode
));
3576 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
3577 emit_library_call (chkr_copy_bitmap_libfunc
,
3578 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, target
,
3579 Pmode
, XEXP (x
, 0), Pmode
,
3580 GEN_INT (GET_MODE_SIZE (mode
)),
3581 TYPE_MODE (sizetype
));
3583 emit_library_call (chkr_set_right_libfunc
,
3584 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, target
,
3585 Pmode
, GEN_INT (GET_MODE_SIZE (mode
)),
3586 TYPE_MODE (sizetype
),
3587 GEN_INT (MEMORY_USE_RW
),
3588 TYPE_MODE (integer_type_node
));
3589 in_check_memory_usage
= 0;
3594 /* If part should go in registers, copy that part
3595 into the appropriate registers. Do this now, at the end,
3596 since mem-to-mem copies above may do function calls. */
3597 if (partial
> 0 && reg
!= 0)
3599 /* Handle calls that pass values in multiple non-contiguous locations.
3600 The Irix 6 ABI has examples of this. */
3601 if (GET_CODE (reg
) == PARALLEL
)
3602 emit_group_load (reg
, x
, -1, align
); /* ??? size? */
3604 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
3607 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3608 anti_adjust_stack (GEN_INT (extra
));
3610 if (alignment_pad
&& args_addr
== 0)
3611 anti_adjust_stack (alignment_pad
);
3614 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3622 /* Only registers can be subtargets. */
3623 || GET_CODE (x
) != REG
3624 /* If the register is readonly, it can't be set more than once. */
3625 || RTX_UNCHANGING_P (x
)
3626 /* Don't use hard regs to avoid extending their life. */
3627 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3628 /* Avoid subtargets inside loops,
3629 since they hide some invariant expressions. */
3630 || preserve_subexpressions_p ())
3634 /* Expand an assignment that stores the value of FROM into TO.
3635 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3636 (This may contain a QUEUED rtx;
3637 if the value is constant, this rtx is a constant.)
3638 Otherwise, the returned value is NULL_RTX.
3640 SUGGEST_REG is no longer actually used.
3641 It used to mean, copy the value through a register
3642 and return that register, if that is possible.
3643 We now use WANT_VALUE to decide whether to do this. */
3646 expand_assignment (to
, from
, want_value
, suggest_reg
)
3649 int suggest_reg ATTRIBUTE_UNUSED
;
3651 register rtx to_rtx
= 0;
3654 /* Don't crash if the lhs of the assignment was erroneous. */
3656 if (TREE_CODE (to
) == ERROR_MARK
)
3658 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3659 return want_value
? result
: NULL_RTX
;
3662 /* Assignment of a structure component needs special treatment
3663 if the structure component's rtx is not simply a MEM.
3664 Assignment of an array element at a constant index, and assignment of
3665 an array element in an unaligned packed structure field, has the same
3668 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
3669 || TREE_CODE (to
) == ARRAY_REF
)
3671 enum machine_mode mode1
;
3672 HOST_WIDE_INT bitsize
, bitpos
;
3677 unsigned int alignment
;
3680 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
3681 &unsignedp
, &volatilep
, &alignment
);
3683 /* If we are going to use store_bit_field and extract_bit_field,
3684 make sure to_rtx will be safe for multiple use. */
3686 if (mode1
== VOIDmode
&& want_value
)
3687 tem
= stabilize_reference (tem
);
3689 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_DONT
);
3692 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
3694 if (GET_CODE (to_rtx
) != MEM
)
3697 if (GET_MODE (offset_rtx
) != ptr_mode
)
3699 #ifdef POINTERS_EXTEND_UNSIGNED
3700 offset_rtx
= convert_memory_address (ptr_mode
, offset_rtx
);
3702 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
3706 /* A constant address in TO_RTX can have VOIDmode, we must not try
3707 to call force_reg for that case. Avoid that case. */
3708 if (GET_CODE (to_rtx
) == MEM
3709 && GET_MODE (to_rtx
) == BLKmode
3710 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
3712 && (bitpos
% bitsize
) == 0
3713 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
3714 && alignment
== GET_MODE_ALIGNMENT (mode1
))
3716 rtx temp
= change_address (to_rtx
, mode1
,
3717 plus_constant (XEXP (to_rtx
, 0),
3720 if (GET_CODE (XEXP (temp
, 0)) == REG
)
3723 to_rtx
= change_address (to_rtx
, mode1
,
3724 force_reg (GET_MODE (XEXP (temp
, 0)),
3729 to_rtx
= change_address (to_rtx
, VOIDmode
,
3730 gen_rtx_PLUS (ptr_mode
, XEXP (to_rtx
, 0),
3731 force_reg (ptr_mode
,
3737 if (GET_CODE (to_rtx
) == MEM
)
3739 /* When the offset is zero, to_rtx is the address of the
3740 structure we are storing into, and hence may be shared.
3741 We must make a new MEM before setting the volatile bit. */
3743 to_rtx
= copy_rtx (to_rtx
);
3745 MEM_VOLATILE_P (to_rtx
) = 1;
3747 #if 0 /* This was turned off because, when a field is volatile
3748 in an object which is not volatile, the object may be in a register,
3749 and then we would abort over here. */
3755 if (TREE_CODE (to
) == COMPONENT_REF
3756 && TREE_READONLY (TREE_OPERAND (to
, 1)))
3759 to_rtx
= copy_rtx (to_rtx
);
3761 RTX_UNCHANGING_P (to_rtx
) = 1;
3764 /* Check the access. */
3765 if (current_function_check_memory_usage
&& GET_CODE (to_rtx
) == MEM
)
3770 enum machine_mode best_mode
;
3772 best_mode
= get_best_mode (bitsize
, bitpos
,
3773 TYPE_ALIGN (TREE_TYPE (tem
)),
3775 if (best_mode
== VOIDmode
)
3778 best_mode_size
= GET_MODE_BITSIZE (best_mode
);
3779 to_addr
= plus_constant (XEXP (to_rtx
, 0), (bitpos
/ BITS_PER_UNIT
));
3780 size
= CEIL ((bitpos
% best_mode_size
) + bitsize
, best_mode_size
);
3781 size
*= GET_MODE_SIZE (best_mode
);
3783 /* Check the access right of the pointer. */
3784 in_check_memory_usage
= 1;
3786 emit_library_call (chkr_check_addr_libfunc
, LCT_CONST_MAKE_BLOCK
,
3787 VOIDmode
, 3, to_addr
, Pmode
,
3788 GEN_INT (size
), TYPE_MODE (sizetype
),
3789 GEN_INT (MEMORY_USE_WO
),
3790 TYPE_MODE (integer_type_node
));
3791 in_check_memory_usage
= 0;
3794 /* If this is a varying-length object, we must get the address of
3795 the source and do an explicit block move. */
3798 unsigned int from_align
;
3799 rtx from_rtx
= expand_expr_unaligned (from
, &from_align
);
3801 = change_address (to_rtx
, VOIDmode
,
3802 plus_constant (XEXP (to_rtx
, 0),
3803 bitpos
/ BITS_PER_UNIT
));
3805 emit_block_move (inner_to_rtx
, from_rtx
, expr_size (from
),
3806 MIN (alignment
, from_align
));
3813 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
3815 /* Spurious cast for HPUX compiler. */
3816 ? ((enum machine_mode
)
3817 TYPE_MODE (TREE_TYPE (to
)))
3821 int_size_in_bytes (TREE_TYPE (tem
)),
3822 get_alias_set (to
));
3824 preserve_temp_slots (result
);
3828 /* If the value is meaningful, convert RESULT to the proper mode.
3829 Otherwise, return nothing. */
3830 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
3831 TYPE_MODE (TREE_TYPE (from
)),
3833 TREE_UNSIGNED (TREE_TYPE (to
)))
3838 /* If the rhs is a function call and its value is not an aggregate,
3839 call the function before we start to compute the lhs.
3840 This is needed for correct code for cases such as
3841 val = setjmp (buf) on machines where reference to val
3842 requires loading up part of an address in a separate insn.
3844 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3845 since it might be a promoted variable where the zero- or sign- extension
3846 needs to be done. Handling this in the normal way is safe because no
3847 computation is done before the call. */
3848 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
)
3849 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
3850 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
3851 && GET_CODE (DECL_RTL (to
)) == REG
))
3856 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3858 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_WO
);
3860 /* Handle calls that return values in multiple non-contiguous locations.
3861 The Irix 6 ABI has examples of this. */
3862 if (GET_CODE (to_rtx
) == PARALLEL
)
3863 emit_group_load (to_rtx
, value
, int_size_in_bytes (TREE_TYPE (from
)),
3864 TYPE_ALIGN (TREE_TYPE (from
)));
3865 else if (GET_MODE (to_rtx
) == BLKmode
)
3866 emit_block_move (to_rtx
, value
, expr_size (from
),
3867 TYPE_ALIGN (TREE_TYPE (from
)));
3870 #ifdef POINTERS_EXTEND_UNSIGNED
3871 if (TREE_CODE (TREE_TYPE (to
)) == REFERENCE_TYPE
3872 || TREE_CODE (TREE_TYPE (to
)) == POINTER_TYPE
)
3873 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
3875 emit_move_insn (to_rtx
, value
);
3877 preserve_temp_slots (to_rtx
);
3880 return want_value
? to_rtx
: NULL_RTX
;
3883 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3884 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3888 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_WO
);
3889 if (GET_CODE (to_rtx
) == MEM
)
3890 MEM_ALIAS_SET (to_rtx
) = get_alias_set (to
);
3893 /* Don't move directly into a return register. */
3894 if (TREE_CODE (to
) == RESULT_DECL
3895 && (GET_CODE (to_rtx
) == REG
|| GET_CODE (to_rtx
) == PARALLEL
))
3900 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
3902 if (GET_CODE (to_rtx
) == PARALLEL
)
3903 emit_group_load (to_rtx
, temp
, int_size_in_bytes (TREE_TYPE (from
)),
3904 TYPE_ALIGN (TREE_TYPE (from
)));
3906 emit_move_insn (to_rtx
, temp
);
3908 preserve_temp_slots (to_rtx
);
3911 return want_value
? to_rtx
: NULL_RTX
;
3914 /* In case we are returning the contents of an object which overlaps
3915 the place the value is being stored, use a safe function when copying
3916 a value through a pointer into a structure value return block. */
3917 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
3918 && current_function_returns_struct
3919 && !current_function_returns_pcc_struct
)
3924 size
= expr_size (from
);
3925 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
,
3926 EXPAND_MEMORY_USE_DONT
);
3928 /* Copy the rights of the bitmap. */
3929 if (current_function_check_memory_usage
)
3930 emit_library_call (chkr_copy_bitmap_libfunc
, LCT_CONST_MAKE_BLOCK
,
3931 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3932 XEXP (from_rtx
, 0), Pmode
,
3933 convert_to_mode (TYPE_MODE (sizetype
),
3934 size
, TREE_UNSIGNED (sizetype
)),
3935 TYPE_MODE (sizetype
));
3937 #ifdef TARGET_MEM_FUNCTIONS
3938 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
3939 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3940 XEXP (from_rtx
, 0), Pmode
,
3941 convert_to_mode (TYPE_MODE (sizetype
),
3942 size
, TREE_UNSIGNED (sizetype
)),
3943 TYPE_MODE (sizetype
));
3945 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
3946 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
3947 XEXP (to_rtx
, 0), Pmode
,
3948 convert_to_mode (TYPE_MODE (integer_type_node
),
3949 size
, TREE_UNSIGNED (integer_type_node
)),
3950 TYPE_MODE (integer_type_node
));
3953 preserve_temp_slots (to_rtx
);
3956 return want_value
? to_rtx
: NULL_RTX
;
3959 /* Compute FROM and store the value in the rtx we got. */
3962 result
= store_expr (from
, to_rtx
, want_value
);
3963 preserve_temp_slots (result
);
3966 return want_value
? result
: NULL_RTX
;
3969 /* Generate code for computing expression EXP,
3970 and storing the value into TARGET.
3971 TARGET may contain a QUEUED rtx.
3973 If WANT_VALUE is nonzero, return a copy of the value
3974 not in TARGET, so that we can be sure to use the proper
3975 value in a containing expression even if TARGET has something
3976 else stored in it. If possible, we copy the value through a pseudo
3977 and return that pseudo. Or, if the value is constant, we try to
3978 return the constant. In some cases, we return a pseudo
3979 copied *from* TARGET.
3981 If the mode is BLKmode then we may return TARGET itself.
3982 It turns out that in BLKmode it doesn't cause a problem.
3983 because C has no operators that could combine two different
3984 assignments into the same BLKmode object with different values
3985 with no sequence point. Will other languages need this to
3988 If WANT_VALUE is 0, we return NULL, to make sure
3989 to catch quickly any cases where the caller uses the value
3990 and fails to set WANT_VALUE. */
3993 store_expr (exp
, target
, want_value
)
3995 register rtx target
;
3999 int dont_return_target
= 0;
4000 int dont_store_target
= 0;
4002 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4004 /* Perform first part of compound expression, then assign from second
4006 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
4008 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
4010 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4012 /* For conditional expression, get safe form of the target. Then
4013 test the condition, doing the appropriate assignment on either
4014 side. This avoids the creation of unnecessary temporaries.
4015 For non-BLKmode, it is more efficient not to do this. */
4017 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4020 target
= protect_from_queue (target
, 1);
4022 do_pending_stack_adjust ();
4024 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
4025 start_cleanup_deferral ();
4026 store_expr (TREE_OPERAND (exp
, 1), target
, 0);
4027 end_cleanup_deferral ();
4029 emit_jump_insn (gen_jump (lab2
));
4032 start_cleanup_deferral ();
4033 store_expr (TREE_OPERAND (exp
, 2), target
, 0);
4034 end_cleanup_deferral ();
4039 return want_value
? target
: NULL_RTX
;
4041 else if (queued_subexp_p (target
))
4042 /* If target contains a postincrement, let's not risk
4043 using it as the place to generate the rhs. */
4045 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
4047 /* Expand EXP into a new pseudo. */
4048 temp
= gen_reg_rtx (GET_MODE (target
));
4049 temp
= expand_expr (exp
, temp
, GET_MODE (target
), 0);
4052 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
), 0);
4054 /* If target is volatile, ANSI requires accessing the value
4055 *from* the target, if it is accessed. So make that happen.
4056 In no case return the target itself. */
4057 if (! MEM_VOLATILE_P (target
) && want_value
)
4058 dont_return_target
= 1;
4060 else if (want_value
&& GET_CODE (target
) == MEM
&& ! MEM_VOLATILE_P (target
)
4061 && GET_MODE (target
) != BLKmode
)
4062 /* If target is in memory and caller wants value in a register instead,
4063 arrange that. Pass TARGET as target for expand_expr so that,
4064 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4065 We know expand_expr will not use the target in that case.
4066 Don't do this if TARGET is volatile because we are supposed
4067 to write it and then read it. */
4069 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
4070 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
4072 /* If TEMP is already in the desired TARGET, only copy it from
4073 memory and don't store it there again. */
4075 || (rtx_equal_p (temp
, target
)
4076 && ! side_effects_p (temp
) && ! side_effects_p (target
)))
4077 dont_store_target
= 1;
4078 temp
= copy_to_reg (temp
);
4080 dont_return_target
= 1;
4082 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4083 /* If this is an scalar in a register that is stored in a wider mode
4084 than the declared mode, compute the result into its declared mode
4085 and then convert to the wider mode. Our value is the computed
4088 /* If we don't want a value, we can do the conversion inside EXP,
4089 which will often result in some optimizations. Do the conversion
4090 in two steps: first change the signedness, if needed, then
4091 the extend. But don't do this if the type of EXP is a subtype
4092 of something else since then the conversion might involve
4093 more than just converting modes. */
4094 if (! want_value
&& INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4095 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
4097 if (TREE_UNSIGNED (TREE_TYPE (exp
))
4098 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4101 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target
),
4105 exp
= convert (type_for_mode (GET_MODE (SUBREG_REG (target
)),
4106 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4110 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
4112 /* If TEMP is a volatile MEM and we want a result value, make
4113 the access now so it gets done only once. Likewise if
4114 it contains TARGET. */
4115 if (GET_CODE (temp
) == MEM
&& want_value
4116 && (MEM_VOLATILE_P (temp
)
4117 || reg_mentioned_p (SUBREG_REG (target
), XEXP (temp
, 0))))
4118 temp
= copy_to_reg (temp
);
4120 /* If TEMP is a VOIDmode constant, use convert_modes to make
4121 sure that we properly convert it. */
4122 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4123 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4124 TYPE_MODE (TREE_TYPE (exp
)), temp
,
4125 SUBREG_PROMOTED_UNSIGNED_P (target
));
4127 convert_move (SUBREG_REG (target
), temp
,
4128 SUBREG_PROMOTED_UNSIGNED_P (target
));
4130 /* If we promoted a constant, change the mode back down to match
4131 target. Otherwise, the caller might get confused by a result whose
4132 mode is larger than expected. */
4134 if (want_value
&& GET_MODE (temp
) != GET_MODE (target
)
4135 && GET_MODE (temp
) != VOIDmode
)
4137 temp
= gen_lowpart_SUBREG (GET_MODE (target
), temp
);
4138 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4139 SUBREG_PROMOTED_UNSIGNED_P (temp
)
4140 = SUBREG_PROMOTED_UNSIGNED_P (target
);
4143 return want_value
? temp
: NULL_RTX
;
4147 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
4148 /* Return TARGET if it's a specified hardware register.
4149 If TARGET is a volatile mem ref, either return TARGET
4150 or return a reg copied *from* TARGET; ANSI requires this.
4152 Otherwise, if TEMP is not TARGET, return TEMP
4153 if it is constant (for efficiency),
4154 or if we really want the correct value. */
4155 if (!(target
&& GET_CODE (target
) == REG
4156 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4157 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
4158 && ! rtx_equal_p (temp
, target
)
4159 && (CONSTANT_P (temp
) || want_value
))
4160 dont_return_target
= 1;
4163 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4164 the same as that of TARGET, adjust the constant. This is needed, for
4165 example, in case it is a CONST_DOUBLE and we want only a word-sized
4167 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4168 && TREE_CODE (exp
) != ERROR_MARK
4169 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4170 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4171 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
4173 if (current_function_check_memory_usage
4174 && GET_CODE (target
) == MEM
4175 && AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
4177 in_check_memory_usage
= 1;
4178 if (GET_CODE (temp
) == MEM
)
4179 emit_library_call (chkr_copy_bitmap_libfunc
, LCT_CONST_MAKE_BLOCK
,
4180 VOIDmode
, 3, XEXP (target
, 0), Pmode
,
4181 XEXP (temp
, 0), Pmode
,
4182 expr_size (exp
), TYPE_MODE (sizetype
));
4184 emit_library_call (chkr_check_addr_libfunc
, LCT_CONST_MAKE_BLOCK
,
4185 VOIDmode
, 3, XEXP (target
, 0), Pmode
,
4186 expr_size (exp
), TYPE_MODE (sizetype
),
4187 GEN_INT (MEMORY_USE_WO
),
4188 TYPE_MODE (integer_type_node
));
4189 in_check_memory_usage
= 0;
4192 /* If value was not generated in the target, store it there.
4193 Convert the value to TARGET's type first if nec. */
4194 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4195 one or both of them are volatile memory refs, we have to distinguish
4197 - expand_expr has used TARGET. In this case, we must not generate
4198 another copy. This can be detected by TARGET being equal according
4200 - expand_expr has not used TARGET - that means that the source just
4201 happens to have the same RTX form. Since temp will have been created
4202 by expand_expr, it will compare unequal according to == .
4203 We must generate a copy in this case, to reach the correct number
4204 of volatile memory references. */
4206 if ((! rtx_equal_p (temp
, target
)
4207 || (temp
!= target
&& (side_effects_p (temp
)
4208 || side_effects_p (target
))))
4209 && TREE_CODE (exp
) != ERROR_MARK
4210 && ! dont_store_target
)
4212 target
= protect_from_queue (target
, 1);
4213 if (GET_MODE (temp
) != GET_MODE (target
)
4214 && GET_MODE (temp
) != VOIDmode
)
4216 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4217 if (dont_return_target
)
4219 /* In this case, we will return TEMP,
4220 so make sure it has the proper mode.
4221 But don't forget to store the value into TARGET. */
4222 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4223 emit_move_insn (target
, temp
);
4226 convert_move (target
, temp
, unsignedp
);
4229 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4231 /* Handle copying a string constant into an array.
4232 The string constant may be shorter than the array.
4233 So copy just the string's actual length, and clear the rest. */
4237 /* Get the size of the data type of the string,
4238 which is actually the size of the target. */
4239 size
= expr_size (exp
);
4240 if (GET_CODE (size
) == CONST_INT
4241 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4242 emit_block_move (target
, temp
, size
, TYPE_ALIGN (TREE_TYPE (exp
)));
4245 /* Compute the size of the data to copy from the string. */
4247 = size_binop (MIN_EXPR
,
4248 make_tree (sizetype
, size
),
4249 size_int (TREE_STRING_LENGTH (exp
)));
4250 unsigned int align
= TYPE_ALIGN (TREE_TYPE (exp
));
4251 rtx copy_size_rtx
= expand_expr (copy_size
, NULL_RTX
,
4255 /* Copy that much. */
4256 emit_block_move (target
, temp
, copy_size_rtx
,
4257 TYPE_ALIGN (TREE_TYPE (exp
)));
4259 /* Figure out how much is left in TARGET that we have to clear.
4260 Do all calculations in ptr_mode. */
4262 addr
= XEXP (target
, 0);
4263 addr
= convert_modes (ptr_mode
, Pmode
, addr
, 1);
4265 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4267 addr
= plus_constant (addr
, TREE_STRING_LENGTH (exp
));
4268 size
= plus_constant (size
, -TREE_STRING_LENGTH (exp
));
4270 (unsigned int) (BITS_PER_UNIT
4271 * (INTVAL (copy_size_rtx
)
4272 & - INTVAL (copy_size_rtx
))));
4276 addr
= force_reg (ptr_mode
, addr
);
4277 addr
= expand_binop (ptr_mode
, add_optab
, addr
,
4278 copy_size_rtx
, NULL_RTX
, 0,
4281 size
= expand_binop (ptr_mode
, sub_optab
, size
,
4282 copy_size_rtx
, NULL_RTX
, 0,
4285 align
= BITS_PER_UNIT
;
4286 label
= gen_label_rtx ();
4287 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4288 GET_MODE (size
), 0, 0, label
);
4290 align
= MIN (align
, expr_align (copy_size
));
4292 if (size
!= const0_rtx
)
4294 rtx dest
= gen_rtx_MEM (BLKmode
, addr
);
4296 MEM_COPY_ATTRIBUTES (dest
, target
);
4298 /* Be sure we can write on ADDR. */
4299 in_check_memory_usage
= 1;
4300 if (current_function_check_memory_usage
)
4301 emit_library_call (chkr_check_addr_libfunc
,
4302 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3,
4304 size
, TYPE_MODE (sizetype
),
4305 GEN_INT (MEMORY_USE_WO
),
4306 TYPE_MODE (integer_type_node
));
4307 in_check_memory_usage
= 0;
4308 clear_storage (dest
, size
, align
);
4315 /* Handle calls that return values in multiple non-contiguous locations.
4316 The Irix 6 ABI has examples of this. */
4317 else if (GET_CODE (target
) == PARALLEL
)
4318 emit_group_load (target
, temp
, int_size_in_bytes (TREE_TYPE (exp
)),
4319 TYPE_ALIGN (TREE_TYPE (exp
)));
4320 else if (GET_MODE (temp
) == BLKmode
)
4321 emit_block_move (target
, temp
, expr_size (exp
),
4322 TYPE_ALIGN (TREE_TYPE (exp
)));
4324 emit_move_insn (target
, temp
);
4327 /* If we don't want a value, return NULL_RTX. */
4331 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4332 ??? The latter test doesn't seem to make sense. */
4333 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
4336 /* Return TARGET itself if it is a hard register. */
4337 else if (want_value
&& GET_MODE (target
) != BLKmode
4338 && ! (GET_CODE (target
) == REG
4339 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4340 return copy_to_reg (target
);
4346 /* Return 1 if EXP just contains zeros. */
4354 switch (TREE_CODE (exp
))
4358 case NON_LVALUE_EXPR
:
4359 return is_zeros_p (TREE_OPERAND (exp
, 0));
4362 return integer_zerop (exp
);
4366 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
4369 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp
), dconst0
);
4372 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4373 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
4374 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4375 if (! is_zeros_p (TREE_VALUE (elt
)))
4385 /* Return 1 if EXP contains mostly (3/4) zeros. */
4388 mostly_zeros_p (exp
)
4391 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4393 int elts
= 0, zeros
= 0;
4394 tree elt
= CONSTRUCTOR_ELTS (exp
);
4395 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4397 /* If there are no ranges of true bits, it is all zero. */
4398 return elt
== NULL_TREE
;
4400 for (; elt
; elt
= TREE_CHAIN (elt
))
4402 /* We do not handle the case where the index is a RANGE_EXPR,
4403 so the statistic will be somewhat inaccurate.
4404 We do make a more accurate count in store_constructor itself,
4405 so since this function is only used for nested array elements,
4406 this should be close enough. */
4407 if (mostly_zeros_p (TREE_VALUE (elt
)))
4412 return 4 * zeros
>= 3 * elts
;
4415 return is_zeros_p (exp
);
4418 /* Helper function for store_constructor.
4419 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4420 TYPE is the type of the CONSTRUCTOR, not the element type.
4421 ALIGN and CLEARED are as for store_constructor.
4422 ALIAS_SET is the alias set to use for any stores.
4424 This provides a recursive shortcut back to store_constructor when it isn't
4425 necessary to go through store_field. This is so that we can pass through
4426 the cleared field to let store_constructor know that we may not have to
4427 clear a substructure if the outer structure has already been cleared. */
4430 store_constructor_field (target
, bitsize
, bitpos
,
4431 mode
, exp
, type
, align
, cleared
, alias_set
)
4433 unsigned HOST_WIDE_INT bitsize
;
4434 HOST_WIDE_INT bitpos
;
4435 enum machine_mode mode
;
4441 if (TREE_CODE (exp
) == CONSTRUCTOR
4442 && bitpos
% BITS_PER_UNIT
== 0
4443 /* If we have a non-zero bitpos for a register target, then we just
4444 let store_field do the bitfield handling. This is unlikely to
4445 generate unnecessary clear instructions anyways. */
4446 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
4450 = change_address (target
,
4451 GET_MODE (target
) == BLKmode
4453 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4454 ? BLKmode
: VOIDmode
,
4455 plus_constant (XEXP (target
, 0),
4456 bitpos
/ BITS_PER_UNIT
));
4459 /* Show the alignment may no longer be what it was and update the alias
4460 set, if required. */
4462 align
= MIN (align
, (unsigned int) bitpos
& - bitpos
);
4463 if (GET_CODE (target
) == MEM
)
4464 MEM_ALIAS_SET (target
) = alias_set
;
4466 store_constructor (exp
, target
, align
, cleared
, bitsize
/ BITS_PER_UNIT
);
4469 store_field (target
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, align
,
4470 int_size_in_bytes (type
), alias_set
);
4473 /* Store the value of constructor EXP into the rtx TARGET.
4474 TARGET is either a REG or a MEM.
4475 ALIGN is the maximum known alignment for TARGET.
4476 CLEARED is true if TARGET is known to have been zero'd.
4477 SIZE is the number of bytes of TARGET we are allowed to modify: this
4478 may not be the same as the size of EXP if we are assigning to a field
4479 which has been packed to exclude padding bits. */
4482 store_constructor (exp
, target
, align
, cleared
, size
)
4489 tree type
= TREE_TYPE (exp
);
4490 #ifdef WORD_REGISTER_OPERATIONS
4491 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4494 /* We know our target cannot conflict, since safe_from_p has been called. */
4496 /* Don't try copying piece by piece into a hard register
4497 since that is vulnerable to being clobbered by EXP.
4498 Instead, construct in a pseudo register and then copy it all. */
4499 if (GET_CODE (target
) == REG
&& REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4501 rtx temp
= gen_reg_rtx (GET_MODE (target
));
4502 store_constructor (exp
, temp
, align
, cleared
, size
);
4503 emit_move_insn (target
, temp
);
4508 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
4509 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4513 /* Inform later passes that the whole union value is dead. */
4514 if ((TREE_CODE (type
) == UNION_TYPE
4515 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4518 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4520 /* If the constructor is empty, clear the union. */
4521 if (! CONSTRUCTOR_ELTS (exp
) && ! cleared
)
4522 clear_storage (target
, expr_size (exp
), TYPE_ALIGN (type
));
4525 /* If we are building a static constructor into a register,
4526 set the initial value as zero so we can fold the value into
4527 a constant. But if more than one register is involved,
4528 this probably loses. */
4529 else if (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
4530 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4533 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4538 /* If the constructor has fewer fields than the structure
4539 or if we are initializing the structure to mostly zeros,
4540 clear the whole structure first. Don't do this is TARGET is
4541 register whose mode size isn't equal to SIZE since clear_storage
4542 can't handle this case. */
4544 && ((list_length (CONSTRUCTOR_ELTS (exp
))
4545 != fields_length (type
))
4546 || mostly_zeros_p (exp
))
4547 && (GET_CODE (target
) != REG
4548 || (HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
)) == size
))
4551 clear_storage (target
, GEN_INT (size
), align
);
4556 /* Inform later passes that the old value is dead. */
4557 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4559 /* Store each element of the constructor into
4560 the corresponding field of TARGET. */
4562 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4564 register tree field
= TREE_PURPOSE (elt
);
4565 #ifdef WORD_REGISTER_OPERATIONS
4566 tree value
= TREE_VALUE (elt
);
4568 register enum machine_mode mode
;
4569 HOST_WIDE_INT bitsize
;
4570 HOST_WIDE_INT bitpos
= 0;
4573 rtx to_rtx
= target
;
4575 /* Just ignore missing fields.
4576 We cleared the whole structure, above,
4577 if any fields are missing. */
4581 if (cleared
&& is_zeros_p (TREE_VALUE (elt
)))
4584 if (host_integerp (DECL_SIZE (field
), 1))
4585 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4589 unsignedp
= TREE_UNSIGNED (field
);
4590 mode
= DECL_MODE (field
);
4591 if (DECL_BIT_FIELD (field
))
4594 offset
= DECL_FIELD_OFFSET (field
);
4595 if (host_integerp (offset
, 0)
4596 && host_integerp (bit_position (field
), 0))
4598 bitpos
= int_bit_position (field
);
4602 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
4608 if (contains_placeholder_p (offset
))
4609 offset
= build (WITH_RECORD_EXPR
, sizetype
,
4610 offset
, make_tree (TREE_TYPE (exp
), target
));
4612 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4613 if (GET_CODE (to_rtx
) != MEM
)
4616 if (GET_MODE (offset_rtx
) != ptr_mode
)
4618 #ifdef POINTERS_EXTEND_UNSIGNED
4619 offset_rtx
= convert_memory_address (ptr_mode
, offset_rtx
);
4621 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4626 = change_address (to_rtx
, VOIDmode
,
4627 gen_rtx_PLUS (ptr_mode
, XEXP (to_rtx
, 0),
4628 force_reg (ptr_mode
,
4630 align
= DECL_OFFSET_ALIGN (field
);
4633 if (TREE_READONLY (field
))
4635 if (GET_CODE (to_rtx
) == MEM
)
4636 to_rtx
= copy_rtx (to_rtx
);
4638 RTX_UNCHANGING_P (to_rtx
) = 1;
4641 #ifdef WORD_REGISTER_OPERATIONS
4642 /* If this initializes a field that is smaller than a word, at the
4643 start of a word, try to widen it to a full word.
4644 This special case allows us to output C++ member function
4645 initializations in a form that the optimizers can understand. */
4646 if (GET_CODE (target
) == REG
4647 && bitsize
< BITS_PER_WORD
4648 && bitpos
% BITS_PER_WORD
== 0
4649 && GET_MODE_CLASS (mode
) == MODE_INT
4650 && TREE_CODE (value
) == INTEGER_CST
4652 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
4654 tree type
= TREE_TYPE (value
);
4655 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4657 type
= type_for_size (BITS_PER_WORD
, TREE_UNSIGNED (type
));
4658 value
= convert (type
, value
);
4660 if (BYTES_BIG_ENDIAN
)
4662 = fold (build (LSHIFT_EXPR
, type
, value
,
4663 build_int_2 (BITS_PER_WORD
- bitsize
, 0)));
4664 bitsize
= BITS_PER_WORD
;
4668 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
4669 TREE_VALUE (elt
), type
, align
, cleared
,
4670 (DECL_NONADDRESSABLE_P (field
)
4671 && GET_CODE (to_rtx
) == MEM
)
4672 ? MEM_ALIAS_SET (to_rtx
)
4673 : get_alias_set (TREE_TYPE (field
)));
4676 else if (TREE_CODE (type
) == ARRAY_TYPE
)
4681 tree domain
= TYPE_DOMAIN (type
);
4682 tree elttype
= TREE_TYPE (type
);
4683 int const_bounds_p
= (host_integerp (TYPE_MIN_VALUE (domain
), 0)
4684 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
4685 HOST_WIDE_INT minelt
;
4686 HOST_WIDE_INT maxelt
;
4688 /* If we have constant bounds for the range of the type, get them. */
4691 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
4692 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
4695 /* If the constructor has fewer elements than the array,
4696 clear the whole array first. Similarly if this is
4697 static constructor of a non-BLKmode object. */
4698 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
4702 HOST_WIDE_INT count
= 0, zero_count
= 0;
4703 need_to_clear
= ! const_bounds_p
;
4705 /* This loop is a more accurate version of the loop in
4706 mostly_zeros_p (it handles RANGE_EXPR in an index).
4707 It is also needed to check for missing elements. */
4708 for (elt
= CONSTRUCTOR_ELTS (exp
);
4709 elt
!= NULL_TREE
&& ! need_to_clear
;
4710 elt
= TREE_CHAIN (elt
))
4712 tree index
= TREE_PURPOSE (elt
);
4713 HOST_WIDE_INT this_node_count
;
4715 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4717 tree lo_index
= TREE_OPERAND (index
, 0);
4718 tree hi_index
= TREE_OPERAND (index
, 1);
4720 if (! host_integerp (lo_index
, 1)
4721 || ! host_integerp (hi_index
, 1))
4727 this_node_count
= (tree_low_cst (hi_index
, 1)
4728 - tree_low_cst (lo_index
, 1) + 1);
4731 this_node_count
= 1;
4733 count
+= this_node_count
;
4734 if (mostly_zeros_p (TREE_VALUE (elt
)))
4735 zero_count
+= this_node_count
;
4738 /* Clear the entire array first if there are any missing elements,
4739 or if the incidence of zero elements is >= 75%. */
4741 && (count
< maxelt
- minelt
+ 1 || 4 * zero_count
>= 3 * count
))
4745 if (need_to_clear
&& size
> 0)
4748 clear_storage (target
, GEN_INT (size
), align
);
4752 /* Inform later passes that the old value is dead. */
4753 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4755 /* Store each element of the constructor into
4756 the corresponding element of TARGET, determined
4757 by counting the elements. */
4758 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
4760 elt
= TREE_CHAIN (elt
), i
++)
4762 register enum machine_mode mode
;
4763 HOST_WIDE_INT bitsize
;
4764 HOST_WIDE_INT bitpos
;
4766 tree value
= TREE_VALUE (elt
);
4767 unsigned int align
= TYPE_ALIGN (TREE_TYPE (value
));
4768 tree index
= TREE_PURPOSE (elt
);
4769 rtx xtarget
= target
;
4771 if (cleared
&& is_zeros_p (value
))
4774 unsignedp
= TREE_UNSIGNED (elttype
);
4775 mode
= TYPE_MODE (elttype
);
4776 if (mode
== BLKmode
)
4777 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
4778 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
4781 bitsize
= GET_MODE_BITSIZE (mode
);
4783 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4785 tree lo_index
= TREE_OPERAND (index
, 0);
4786 tree hi_index
= TREE_OPERAND (index
, 1);
4787 rtx index_r
, pos_rtx
, addr
, hi_r
, loop_top
, loop_end
;
4788 struct nesting
*loop
;
4789 HOST_WIDE_INT lo
, hi
, count
;
4792 /* If the range is constant and "small", unroll the loop. */
4794 && host_integerp (lo_index
, 0)
4795 && host_integerp (hi_index
, 0)
4796 && (lo
= tree_low_cst (lo_index
, 0),
4797 hi
= tree_low_cst (hi_index
, 0),
4798 count
= hi
- lo
+ 1,
4799 (GET_CODE (target
) != MEM
4801 || (host_integerp (TYPE_SIZE (elttype
), 1)
4802 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
4805 lo
-= minelt
; hi
-= minelt
;
4806 for (; lo
<= hi
; lo
++)
4808 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
4809 store_constructor_field
4810 (target
, bitsize
, bitpos
, mode
, value
, type
, align
,
4812 TYPE_NONALIASED_COMPONENT (type
)
4813 ? MEM_ALIAS_SET (target
) : get_alias_set (elttype
));
4818 hi_r
= expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
4819 loop_top
= gen_label_rtx ();
4820 loop_end
= gen_label_rtx ();
4822 unsignedp
= TREE_UNSIGNED (domain
);
4824 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
4827 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
4829 SET_DECL_RTL (index
, index_r
);
4830 if (TREE_CODE (value
) == SAVE_EXPR
4831 && SAVE_EXPR_RTL (value
) == 0)
4833 /* Make sure value gets expanded once before the
4835 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
4838 store_expr (lo_index
, index_r
, 0);
4839 loop
= expand_start_loop (0);
4841 /* Assign value to element index. */
4843 = convert (ssizetype
,
4844 fold (build (MINUS_EXPR
, TREE_TYPE (index
),
4845 index
, TYPE_MIN_VALUE (domain
))));
4846 position
= size_binop (MULT_EXPR
, position
,
4848 TYPE_SIZE_UNIT (elttype
)));
4850 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
4851 addr
= gen_rtx_PLUS (Pmode
, XEXP (target
, 0), pos_rtx
);
4852 xtarget
= change_address (target
, mode
, addr
);
4853 if (TREE_CODE (value
) == CONSTRUCTOR
)
4854 store_constructor (value
, xtarget
, align
, cleared
,
4855 bitsize
/ BITS_PER_UNIT
);
4857 store_expr (value
, xtarget
, 0);
4859 expand_exit_loop_if_false (loop
,
4860 build (LT_EXPR
, integer_type_node
,
4863 expand_increment (build (PREINCREMENT_EXPR
,
4865 index
, integer_one_node
), 0, 0);
4867 emit_label (loop_end
);
4870 else if ((index
!= 0 && ! host_integerp (index
, 0))
4871 || ! host_integerp (TYPE_SIZE (elttype
), 1))
4877 index
= ssize_int (1);
4880 index
= convert (ssizetype
,
4881 fold (build (MINUS_EXPR
, index
,
4882 TYPE_MIN_VALUE (domain
))));
4884 position
= size_binop (MULT_EXPR
, index
,
4886 TYPE_SIZE_UNIT (elttype
)));
4887 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
4888 addr
= gen_rtx_PLUS (Pmode
, XEXP (target
, 0), pos_rtx
);
4889 xtarget
= change_address (target
, mode
, addr
);
4890 store_expr (value
, xtarget
, 0);
4895 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
4896 * tree_low_cst (TYPE_SIZE (elttype
), 1));
4898 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
4900 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
4901 type
, align
, cleared
,
4902 TYPE_NONALIASED_COMPONENT (type
)
4903 && GET_CODE (target
) == MEM
4904 ? MEM_ALIAS_SET (target
) :
4905 get_alias_set (elttype
));
4911 /* Set constructor assignments. */
4912 else if (TREE_CODE (type
) == SET_TYPE
)
4914 tree elt
= CONSTRUCTOR_ELTS (exp
);
4915 unsigned HOST_WIDE_INT nbytes
= int_size_in_bytes (type
), nbits
;
4916 tree domain
= TYPE_DOMAIN (type
);
4917 tree domain_min
, domain_max
, bitlength
;
4919 /* The default implementation strategy is to extract the constant
4920 parts of the constructor, use that to initialize the target,
4921 and then "or" in whatever non-constant ranges we need in addition.
4923 If a large set is all zero or all ones, it is
4924 probably better to set it using memset (if available) or bzero.
4925 Also, if a large set has just a single range, it may also be
4926 better to first clear all the first clear the set (using
4927 bzero/memset), and set the bits we want. */
4929 /* Check for all zeros. */
4930 if (elt
== NULL_TREE
&& size
> 0)
4933 clear_storage (target
, GEN_INT (size
), TYPE_ALIGN (type
));
4937 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
4938 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
4939 bitlength
= size_binop (PLUS_EXPR
,
4940 size_diffop (domain_max
, domain_min
),
4943 nbits
= tree_low_cst (bitlength
, 1);
4945 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4946 are "complicated" (more than one range), initialize (the
4947 constant parts) by copying from a constant. */
4948 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
4949 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
4951 unsigned int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
4952 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
4953 char *bit_buffer
= (char *) alloca (nbits
);
4954 HOST_WIDE_INT word
= 0;
4955 unsigned int bit_pos
= 0;
4956 unsigned int ibit
= 0;
4957 unsigned int offset
= 0; /* In bytes from beginning of set. */
4959 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
4962 if (bit_buffer
[ibit
])
4964 if (BYTES_BIG_ENDIAN
)
4965 word
|= (1 << (set_word_size
- 1 - bit_pos
));
4967 word
|= 1 << bit_pos
;
4971 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
4973 if (word
!= 0 || ! cleared
)
4975 rtx datum
= GEN_INT (word
);
4978 /* The assumption here is that it is safe to use
4979 XEXP if the set is multi-word, but not if
4980 it's single-word. */
4981 if (GET_CODE (target
) == MEM
)
4983 to_rtx
= plus_constant (XEXP (target
, 0), offset
);
4984 to_rtx
= change_address (target
, mode
, to_rtx
);
4986 else if (offset
== 0)
4990 emit_move_insn (to_rtx
, datum
);
4997 offset
+= set_word_size
/ BITS_PER_UNIT
;
5002 /* Don't bother clearing storage if the set is all ones. */
5003 if (TREE_CHAIN (elt
) != NULL_TREE
5004 || (TREE_PURPOSE (elt
) == NULL_TREE
5006 : ( ! host_integerp (TREE_VALUE (elt
), 0)
5007 || ! host_integerp (TREE_PURPOSE (elt
), 0)
5008 || (tree_low_cst (TREE_VALUE (elt
), 0)
5009 - tree_low_cst (TREE_PURPOSE (elt
), 0) + 1
5010 != (HOST_WIDE_INT
) nbits
))))
5011 clear_storage (target
, expr_size (exp
), TYPE_ALIGN (type
));
5013 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
5015 /* Start of range of element or NULL. */
5016 tree startbit
= TREE_PURPOSE (elt
);
5017 /* End of range of element, or element value. */
5018 tree endbit
= TREE_VALUE (elt
);
5019 #ifdef TARGET_MEM_FUNCTIONS
5020 HOST_WIDE_INT startb
, endb
;
5022 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
5024 bitlength_rtx
= expand_expr (bitlength
,
5025 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
5027 /* Handle non-range tuple element like [ expr ]. */
5028 if (startbit
== NULL_TREE
)
5030 startbit
= save_expr (endbit
);
5034 startbit
= convert (sizetype
, startbit
);
5035 endbit
= convert (sizetype
, endbit
);
5036 if (! integer_zerop (domain_min
))
5038 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
5039 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
5041 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
5042 EXPAND_CONST_ADDRESS
);
5043 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
5044 EXPAND_CONST_ADDRESS
);
5050 ((build_qualified_type (type_for_mode (GET_MODE (target
), 0),
5053 emit_move_insn (targetx
, target
);
5056 else if (GET_CODE (target
) == MEM
)
5061 #ifdef TARGET_MEM_FUNCTIONS
5062 /* Optimization: If startbit and endbit are
5063 constants divisible by BITS_PER_UNIT,
5064 call memset instead. */
5065 if (TREE_CODE (startbit
) == INTEGER_CST
5066 && TREE_CODE (endbit
) == INTEGER_CST
5067 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
5068 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
5070 emit_library_call (memset_libfunc
, LCT_NORMAL
,
5072 plus_constant (XEXP (targetx
, 0),
5073 startb
/ BITS_PER_UNIT
),
5075 constm1_rtx
, TYPE_MODE (integer_type_node
),
5076 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
5077 TYPE_MODE (sizetype
));
5081 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__setbits"),
5082 LCT_NORMAL
, VOIDmode
, 4, XEXP (targetx
, 0),
5083 Pmode
, bitlength_rtx
, TYPE_MODE (sizetype
),
5084 startbit_rtx
, TYPE_MODE (sizetype
),
5085 endbit_rtx
, TYPE_MODE (sizetype
));
5088 emit_move_insn (target
, targetx
);
5096 /* Store the value of EXP (an expression tree)
5097 into a subfield of TARGET which has mode MODE and occupies
5098 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5099 If MODE is VOIDmode, it means that we are storing into a bit-field.
5101 If VALUE_MODE is VOIDmode, return nothing in particular.
5102 UNSIGNEDP is not used in this case.
5104 Otherwise, return an rtx for the value stored. This rtx
5105 has mode VALUE_MODE if that is convenient to do.
5106 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5108 ALIGN is the alignment that TARGET is known to have.
5109 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5111 ALIAS_SET is the alias set for the destination. This value will
5112 (in general) be different from that for TARGET, since TARGET is a
5113 reference to the containing structure. */
5116 store_field (target
, bitsize
, bitpos
, mode
, exp
, value_mode
,
5117 unsignedp
, align
, total_size
, alias_set
)
5119 HOST_WIDE_INT bitsize
;
5120 HOST_WIDE_INT bitpos
;
5121 enum machine_mode mode
;
5123 enum machine_mode value_mode
;
5126 HOST_WIDE_INT total_size
;
5129 HOST_WIDE_INT width_mask
= 0;
5131 if (TREE_CODE (exp
) == ERROR_MARK
)
5134 if (bitsize
< HOST_BITS_PER_WIDE_INT
)
5135 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5137 /* If we are storing into an unaligned field of an aligned union that is
5138 in a register, we may have the mode of TARGET being an integer mode but
5139 MODE == BLKmode. In that case, get an aligned object whose size and
5140 alignment are the same as TARGET and store TARGET into it (we can avoid
5141 the store if the field being stored is the entire width of TARGET). Then
5142 call ourselves recursively to store the field into a BLKmode version of
5143 that object. Finally, load from the object into TARGET. This is not
5144 very efficient in general, but should only be slightly more expensive
5145 than the otherwise-required unaligned accesses. Perhaps this can be
5146 cleaned up later. */
5149 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
5153 (build_qualified_type (type_for_mode (GET_MODE (target
), 0),
5156 rtx blk_object
= copy_rtx (object
);
5158 PUT_MODE (blk_object
, BLKmode
);
5160 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5161 emit_move_insn (object
, target
);
5163 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0,
5164 align
, total_size
, alias_set
);
5166 /* Even though we aren't returning target, we need to
5167 give it the updated value. */
5168 emit_move_insn (target
, object
);
5173 if (GET_CODE (target
) == CONCAT
)
5175 /* We're storing into a struct containing a single __complex. */
5179 return store_expr (exp
, target
, 0);
5182 /* If the structure is in a register or if the component
5183 is a bit field, we cannot use addressing to access it.
5184 Use bit-field techniques or SUBREG to store in it. */
5186 if (mode
== VOIDmode
5187 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5188 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5189 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5190 || GET_CODE (target
) == REG
5191 || GET_CODE (target
) == SUBREG
5192 /* If the field isn't aligned enough to store as an ordinary memref,
5193 store it as a bit field. */
5194 || (mode
!= BLKmode
&& SLOW_UNALIGNED_ACCESS (mode
, align
)
5195 && (align
< GET_MODE_ALIGNMENT (mode
)
5196 || bitpos
% GET_MODE_ALIGNMENT (mode
)))
5197 || (mode
== BLKmode
&& SLOW_UNALIGNED_ACCESS (mode
, align
)
5198 && (TYPE_ALIGN (TREE_TYPE (exp
)) > align
5199 || bitpos
% TYPE_ALIGN (TREE_TYPE (exp
)) != 0))
5200 /* If the RHS and field are a constant size and the size of the
5201 RHS isn't the same size as the bitfield, we must use bitfield
5204 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5205 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5207 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
5209 /* If BITSIZE is narrower than the size of the type of EXP
5210 we will be narrowing TEMP. Normally, what's wanted are the
5211 low-order bits. However, if EXP's type is a record and this is
5212 big-endian machine, we want the upper BITSIZE bits. */
5213 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5214 && bitsize
< GET_MODE_BITSIZE (GET_MODE (temp
))
5215 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5216 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5217 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5221 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5223 if (mode
!= VOIDmode
&& mode
!= BLKmode
5224 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5225 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5227 /* If the modes of TARGET and TEMP are both BLKmode, both
5228 must be in memory and BITPOS must be aligned on a byte
5229 boundary. If so, we simply do a block copy. */
5230 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5232 unsigned int exp_align
= expr_align (exp
);
5234 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
5235 || bitpos
% BITS_PER_UNIT
!= 0)
5238 target
= change_address (target
, VOIDmode
,
5239 plus_constant (XEXP (target
, 0),
5240 bitpos
/ BITS_PER_UNIT
));
5242 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5243 align
= MIN (exp_align
, align
);
5245 /* Find an alignment that is consistent with the bit position. */
5246 while ((bitpos
% align
) != 0)
5249 emit_block_move (target
, temp
,
5250 bitsize
== -1 ? expr_size (exp
)
5251 : GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5255 return value_mode
== VOIDmode
? const0_rtx
: target
;
5258 /* Store the value in the bitfield. */
5259 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
, align
, total_size
);
5260 if (value_mode
!= VOIDmode
)
5262 /* The caller wants an rtx for the value. */
5263 /* If possible, avoid refetching from the bitfield itself. */
5265 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
5268 enum machine_mode tmode
;
5271 return expand_and (temp
,
5275 GET_MODE (temp
) == VOIDmode
5277 : GET_MODE (temp
))), NULL_RTX
);
5278 tmode
= GET_MODE (temp
);
5279 if (tmode
== VOIDmode
)
5281 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
5282 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5283 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5285 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
5286 NULL_RTX
, value_mode
, 0, align
,
5293 rtx addr
= XEXP (target
, 0);
5296 /* If a value is wanted, it must be the lhs;
5297 so make the address stable for multiple use. */
5299 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
5300 && ! CONSTANT_ADDRESS_P (addr
)
5301 /* A frame-pointer reference is already stable. */
5302 && ! (GET_CODE (addr
) == PLUS
5303 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
5304 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
5305 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
5306 addr
= copy_to_reg (addr
);
5308 /* Now build a reference to just the desired component. */
5310 to_rtx
= copy_rtx (change_address (target
, mode
,
5311 plus_constant (addr
,
5313 / BITS_PER_UNIT
))));
5314 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5315 MEM_ALIAS_SET (to_rtx
) = alias_set
;
5317 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
5321 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5322 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
5323 ARRAY_REFs and find the ultimate containing object, which we return.
5325 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5326 bit position, and *PUNSIGNEDP to the signedness of the field.
5327 If the position of the field is variable, we store a tree
5328 giving the variable offset (in units) in *POFFSET.
5329 This offset is in addition to the bit position.
5330 If the position is not variable, we store 0 in *POFFSET.
5331 We set *PALIGNMENT to the alignment of the address that will be
5332 computed. This is the alignment of the thing we return if *POFFSET
5333 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5335 If any of the extraction expressions is volatile,
5336 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5338 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5339 is a mode that can be used to access the field. In that case, *PBITSIZE
5342 If the field describes a variable-sized object, *PMODE is set to
5343 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5344 this case, but the address of the object can be found. */
5347 get_inner_reference (exp
, pbitsize
, pbitpos
, poffset
, pmode
,
5348 punsignedp
, pvolatilep
, palignment
)
5350 HOST_WIDE_INT
*pbitsize
;
5351 HOST_WIDE_INT
*pbitpos
;
5353 enum machine_mode
*pmode
;
5356 unsigned int *palignment
;
5359 enum machine_mode mode
= VOIDmode
;
5360 tree offset
= size_zero_node
;
5361 tree bit_offset
= bitsize_zero_node
;
5362 unsigned int alignment
= BIGGEST_ALIGNMENT
;
5365 /* First get the mode, signedness, and size. We do this from just the
5366 outermost expression. */
5367 if (TREE_CODE (exp
) == COMPONENT_REF
)
5369 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5370 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5371 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5373 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
5375 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5377 size_tree
= TREE_OPERAND (exp
, 1);
5378 *punsignedp
= TREE_UNSIGNED (exp
);
5382 mode
= TYPE_MODE (TREE_TYPE (exp
));
5383 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
5385 if (mode
== BLKmode
)
5386 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5388 *pbitsize
= GET_MODE_BITSIZE (mode
);
5393 if (! host_integerp (size_tree
, 1))
5394 mode
= BLKmode
, *pbitsize
= -1;
5396 *pbitsize
= tree_low_cst (size_tree
, 1);
5399 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5400 and find the ultimate containing object. */
5403 if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5404 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
, TREE_OPERAND (exp
, 2));
5405 else if (TREE_CODE (exp
) == COMPONENT_REF
)
5407 tree field
= TREE_OPERAND (exp
, 1);
5408 tree this_offset
= DECL_FIELD_OFFSET (field
);
5410 /* If this field hasn't been filled in yet, don't go
5411 past it. This should only happen when folding expressions
5412 made during type construction. */
5413 if (this_offset
== 0)
5415 else if (! TREE_CONSTANT (this_offset
)
5416 && contains_placeholder_p (this_offset
))
5417 this_offset
= build (WITH_RECORD_EXPR
, sizetype
, this_offset
, exp
);
5419 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5420 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5421 DECL_FIELD_BIT_OFFSET (field
));
5423 if (! host_integerp (offset
, 0))
5424 alignment
= MIN (alignment
, DECL_OFFSET_ALIGN (field
));
5427 else if (TREE_CODE (exp
) == ARRAY_REF
)
5429 tree index
= TREE_OPERAND (exp
, 1);
5430 tree domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5431 tree low_bound
= (domain
? TYPE_MIN_VALUE (domain
) : 0);
5432 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (exp
));
5434 /* We assume all arrays have sizes that are a multiple of a byte.
5435 First subtract the lower bound, if any, in the type of the
5436 index, then convert to sizetype and multiply by the size of the
5438 if (low_bound
!= 0 && ! integer_zerop (low_bound
))
5439 index
= fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5442 /* If the index has a self-referential type, pass it to a
5443 WITH_RECORD_EXPR; if the component size is, pass our
5444 component to one. */
5445 if (! TREE_CONSTANT (index
)
5446 && contains_placeholder_p (index
))
5447 index
= build (WITH_RECORD_EXPR
, TREE_TYPE (index
), index
, exp
);
5448 if (! TREE_CONSTANT (unit_size
)
5449 && contains_placeholder_p (unit_size
))
5450 unit_size
= build (WITH_RECORD_EXPR
, sizetype
, unit_size
,
5451 TREE_OPERAND (exp
, 0));
5453 offset
= size_binop (PLUS_EXPR
, offset
,
5454 size_binop (MULT_EXPR
,
5455 convert (sizetype
, index
),
5459 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
5460 && ! ((TREE_CODE (exp
) == NOP_EXPR
5461 || TREE_CODE (exp
) == CONVERT_EXPR
)
5462 && (TYPE_MODE (TREE_TYPE (exp
))
5463 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
5466 /* If any reference in the chain is volatile, the effect is volatile. */
5467 if (TREE_THIS_VOLATILE (exp
))
5470 /* If the offset is non-constant already, then we can't assume any
5471 alignment more than the alignment here. */
5472 if (! TREE_CONSTANT (offset
))
5473 alignment
= MIN (alignment
, TYPE_ALIGN (TREE_TYPE (exp
)));
5475 exp
= TREE_OPERAND (exp
, 0);
5479 alignment
= MIN (alignment
, DECL_ALIGN (exp
));
5480 else if (TREE_TYPE (exp
) != 0)
5481 alignment
= MIN (alignment
, TYPE_ALIGN (TREE_TYPE (exp
)));
5483 /* If OFFSET is constant, see if we can return the whole thing as a
5484 constant bit position. Otherwise, split it up. */
5485 if (host_integerp (offset
, 0)
5486 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5488 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5489 && host_integerp (tem
, 0))
5490 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5492 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5495 *palignment
= alignment
;
5499 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5501 static enum memory_use_mode
5502 get_memory_usage_from_modifier (modifier
)
5503 enum expand_modifier modifier
;
5509 return MEMORY_USE_RO
;
5511 case EXPAND_MEMORY_USE_WO
:
5512 return MEMORY_USE_WO
;
5514 case EXPAND_MEMORY_USE_RW
:
5515 return MEMORY_USE_RW
;
5517 case EXPAND_MEMORY_USE_DONT
:
5518 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5519 MEMORY_USE_DONT, because they are modifiers to a call of
5520 expand_expr in the ADDR_EXPR case of expand_expr. */
5521 case EXPAND_CONST_ADDRESS
:
5522 case EXPAND_INITIALIZER
:
5523 return MEMORY_USE_DONT
;
5524 case EXPAND_MEMORY_USE_BAD
:
5530 /* Given an rtx VALUE that may contain additions and multiplications, return
5531 an equivalent value that just refers to a register, memory, or constant.
5532 This is done by generating instructions to perform the arithmetic and
5533 returning a pseudo-register containing the value.
5535 The returned value may be a REG, SUBREG, MEM or constant. */
5538 force_operand (value
, target
)
5541 register optab binoptab
= 0;
5542 /* Use a temporary to force order of execution of calls to
5546 /* Use subtarget as the target for operand 0 of a binary operation. */
5547 register rtx subtarget
= get_subtarget (target
);
5549 /* Check for a PIC address load. */
5551 && (GET_CODE (value
) == PLUS
|| GET_CODE (value
) == MINUS
)
5552 && XEXP (value
, 0) == pic_offset_table_rtx
5553 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5554 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5555 || GET_CODE (XEXP (value
, 1)) == CONST
))
5558 subtarget
= gen_reg_rtx (GET_MODE (value
));
5559 emit_move_insn (subtarget
, value
);
5563 if (GET_CODE (value
) == PLUS
)
5564 binoptab
= add_optab
;
5565 else if (GET_CODE (value
) == MINUS
)
5566 binoptab
= sub_optab
;
5567 else if (GET_CODE (value
) == MULT
)
5569 op2
= XEXP (value
, 1);
5570 if (!CONSTANT_P (op2
)
5571 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5573 tmp
= force_operand (XEXP (value
, 0), subtarget
);
5574 return expand_mult (GET_MODE (value
), tmp
,
5575 force_operand (op2
, NULL_RTX
),
5581 op2
= XEXP (value
, 1);
5582 if (!CONSTANT_P (op2
)
5583 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5585 if (binoptab
== sub_optab
&& GET_CODE (op2
) == CONST_INT
)
5587 binoptab
= add_optab
;
5588 op2
= negate_rtx (GET_MODE (value
), op2
);
5591 /* Check for an addition with OP2 a constant integer and our first
5592 operand a PLUS of a virtual register and something else. In that
5593 case, we want to emit the sum of the virtual register and the
5594 constant first and then add the other value. This allows virtual
5595 register instantiation to simply modify the constant rather than
5596 creating another one around this addition. */
5597 if (binoptab
== add_optab
&& GET_CODE (op2
) == CONST_INT
5598 && GET_CODE (XEXP (value
, 0)) == PLUS
5599 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
5600 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5601 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5603 rtx temp
= expand_binop (GET_MODE (value
), binoptab
,
5604 XEXP (XEXP (value
, 0), 0), op2
,
5605 subtarget
, 0, OPTAB_LIB_WIDEN
);
5606 return expand_binop (GET_MODE (value
), binoptab
, temp
,
5607 force_operand (XEXP (XEXP (value
, 0), 1), 0),
5608 target
, 0, OPTAB_LIB_WIDEN
);
5611 tmp
= force_operand (XEXP (value
, 0), subtarget
);
5612 return expand_binop (GET_MODE (value
), binoptab
, tmp
,
5613 force_operand (op2
, NULL_RTX
),
5614 target
, 0, OPTAB_LIB_WIDEN
);
5615 /* We give UNSIGNEDP = 0 to expand_binop
5616 because the only operations we are expanding here are signed ones. */
5621 /* Subroutine of expand_expr:
5622 save the non-copied parts (LIST) of an expr (LHS), and return a list
5623 which can restore these values to their previous values,
5624 should something modify their storage. */
5627 save_noncopied_parts (lhs
, list
)
5634 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
5635 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
5636 parts
= chainon (parts
, save_noncopied_parts (lhs
, TREE_VALUE (tail
)));
5639 tree part
= TREE_VALUE (tail
);
5640 tree part_type
= TREE_TYPE (part
);
5641 tree to_be_saved
= build (COMPONENT_REF
, part_type
, lhs
, part
);
5643 = assign_temp (build_qualified_type (part_type
,
5644 (TYPE_QUALS (part_type
)
5645 | TYPE_QUAL_CONST
)),
5648 if (! memory_address_p (TYPE_MODE (part_type
), XEXP (target
, 0)))
5649 target
= change_address (target
, TYPE_MODE (part_type
), NULL_RTX
);
5650 parts
= tree_cons (to_be_saved
,
5651 build (RTL_EXPR
, part_type
, NULL_TREE
,
5654 store_expr (TREE_PURPOSE (parts
), RTL_EXPR_RTL (TREE_VALUE (parts
)), 0);
5659 /* Subroutine of expand_expr:
5660 record the non-copied parts (LIST) of an expr (LHS), and return a list
5661 which specifies the initial values of these parts. */
5664 init_noncopied_parts (lhs
, list
)
5671 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
5672 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
5673 parts
= chainon (parts
, init_noncopied_parts (lhs
, TREE_VALUE (tail
)));
5674 else if (TREE_PURPOSE (tail
))
5676 tree part
= TREE_VALUE (tail
);
5677 tree part_type
= TREE_TYPE (part
);
5678 tree to_be_initialized
= build (COMPONENT_REF
, part_type
, lhs
, part
);
5679 parts
= tree_cons (TREE_PURPOSE (tail
), to_be_initialized
, parts
);
5684 /* Subroutine of expand_expr: return nonzero iff there is no way that
5685 EXP can reference X, which is being modified. TOP_P is nonzero if this
5686 call is going to be used to determine whether we need a temporary
5687 for EXP, as opposed to a recursive call to this function.
5689 It is always safe for this routine to return zero since it merely
5690 searches for optimization opportunities. */
5693 safe_from_p (x
, exp
, top_p
)
5700 static tree save_expr_list
;
5703 /* If EXP has varying size, we MUST use a target since we currently
5704 have no way of allocating temporaries of variable size
5705 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5706 So we assume here that something at a higher level has prevented a
5707 clash. This is somewhat bogus, but the best we can do. Only
5708 do this when X is BLKmode and when we are at the top level. */
5709 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
5710 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
5711 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
5712 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
5713 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
5715 && GET_MODE (x
) == BLKmode
)
5716 /* If X is in the outgoing argument area, it is always safe. */
5717 || (GET_CODE (x
) == MEM
5718 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
5719 || (GET_CODE (XEXP (x
, 0)) == PLUS
5720 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
5723 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5724 find the underlying pseudo. */
5725 if (GET_CODE (x
) == SUBREG
)
5728 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5732 /* A SAVE_EXPR might appear many times in the expression passed to the
5733 top-level safe_from_p call, and if it has a complex subexpression,
5734 examining it multiple times could result in a combinatorial explosion.
5735 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5736 with optimization took about 28 minutes to compile -- even though it was
5737 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5738 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5739 we have processed. Note that the only test of top_p was above. */
5748 rtn
= safe_from_p (x
, exp
, 0);
5750 for (t
= save_expr_list
; t
!= 0; t
= TREE_CHAIN (t
))
5751 TREE_PRIVATE (TREE_PURPOSE (t
)) = 0;
5756 /* Now look at our tree code and possibly recurse. */
5757 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
5760 exp_rtl
= DECL_RTL_SET_P (exp
) ? DECL_RTL (exp
) : NULL_RTX
;
5767 if (TREE_CODE (exp
) == TREE_LIST
)
5768 return ((TREE_VALUE (exp
) == 0
5769 || safe_from_p (x
, TREE_VALUE (exp
), 0))
5770 && (TREE_CHAIN (exp
) == 0
5771 || safe_from_p (x
, TREE_CHAIN (exp
), 0)));
5772 else if (TREE_CODE (exp
) == ERROR_MARK
)
5773 return 1; /* An already-visited SAVE_EXPR? */
5778 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5782 return (safe_from_p (x
, TREE_OPERAND (exp
, 0), 0)
5783 && safe_from_p (x
, TREE_OPERAND (exp
, 1), 0));
5787 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5788 the expression. If it is set, we conflict iff we are that rtx or
5789 both are in memory. Otherwise, we check all operands of the
5790 expression recursively. */
5792 switch (TREE_CODE (exp
))
5795 return (staticp (TREE_OPERAND (exp
, 0))
5796 || TREE_STATIC (exp
)
5797 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0));
5800 if (GET_CODE (x
) == MEM
5801 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
5802 get_alias_set (exp
)))
5807 /* Assume that the call will clobber all hard registers and
5809 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5810 || GET_CODE (x
) == MEM
)
5815 /* If a sequence exists, we would have to scan every instruction
5816 in the sequence to see if it was safe. This is probably not
5818 if (RTL_EXPR_SEQUENCE (exp
))
5821 exp_rtl
= RTL_EXPR_RTL (exp
);
5824 case WITH_CLEANUP_EXPR
:
5825 exp_rtl
= RTL_EXPR_RTL (exp
);
5828 case CLEANUP_POINT_EXPR
:
5829 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5832 exp_rtl
= SAVE_EXPR_RTL (exp
);
5836 /* If we've already scanned this, don't do it again. Otherwise,
5837 show we've scanned it and record for clearing the flag if we're
5839 if (TREE_PRIVATE (exp
))
5842 TREE_PRIVATE (exp
) = 1;
5843 if (! safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
5845 TREE_PRIVATE (exp
) = 0;
5849 save_expr_list
= tree_cons (exp
, NULL_TREE
, save_expr_list
);
5853 /* The only operand we look at is operand 1. The rest aren't
5854 part of the expression. */
5855 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
5857 case METHOD_CALL_EXPR
:
5858 /* This takes a rtx argument, but shouldn't appear here. */
5865 /* If we have an rtx, we do not need to scan our operands. */
5869 nops
= first_rtl_op (TREE_CODE (exp
));
5870 for (i
= 0; i
< nops
; i
++)
5871 if (TREE_OPERAND (exp
, i
) != 0
5872 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
5875 /* If this is a language-specific tree code, it may require
5876 special handling. */
5877 if ((unsigned int) TREE_CODE (exp
)
5878 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5880 && !(*lang_safe_from_p
) (x
, exp
))
5884 /* If we have an rtl, find any enclosed object. Then see if we conflict
5888 if (GET_CODE (exp_rtl
) == SUBREG
)
5890 exp_rtl
= SUBREG_REG (exp_rtl
);
5891 if (GET_CODE (exp_rtl
) == REG
5892 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
5896 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5897 are memory and they conflict. */
5898 return ! (rtx_equal_p (x
, exp_rtl
)
5899 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
5900 && true_dependence (exp_rtl
, GET_MODE (x
), x
,
5901 rtx_addr_varies_p
)));
5904 /* If we reach here, it is safe. */
5908 /* Subroutine of expand_expr: return nonzero iff EXP is an
5909 expression whose type is statically determinable. */
5915 if (TREE_CODE (exp
) == PARM_DECL
5916 || TREE_CODE (exp
) == VAR_DECL
5917 || TREE_CODE (exp
) == CALL_EXPR
|| TREE_CODE (exp
) == TARGET_EXPR
5918 || TREE_CODE (exp
) == COMPONENT_REF
5919 || TREE_CODE (exp
) == ARRAY_REF
)
5924 /* Subroutine of expand_expr: return rtx if EXP is a
5925 variable or parameter; else return 0. */
5932 switch (TREE_CODE (exp
))
5936 return DECL_RTL (exp
);
5942 #ifdef MAX_INTEGER_COMPUTATION_MODE
5945 check_max_integer_computation_mode (exp
)
5948 enum tree_code code
;
5949 enum machine_mode mode
;
5951 /* Strip any NOPs that don't change the mode. */
5953 code
= TREE_CODE (exp
);
5955 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5956 if (code
== NOP_EXPR
5957 && TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
5960 /* First check the type of the overall operation. We need only look at
5961 unary, binary and relational operations. */
5962 if (TREE_CODE_CLASS (code
) == '1'
5963 || TREE_CODE_CLASS (code
) == '2'
5964 || TREE_CODE_CLASS (code
) == '<')
5966 mode
= TYPE_MODE (TREE_TYPE (exp
));
5967 if (GET_MODE_CLASS (mode
) == MODE_INT
5968 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5969 internal_error ("unsupported wide integer operation");
5972 /* Check operand of a unary op. */
5973 if (TREE_CODE_CLASS (code
) == '1')
5975 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5976 if (GET_MODE_CLASS (mode
) == MODE_INT
5977 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5978 internal_error ("unsupported wide integer operation");
5981 /* Check operands of a binary/comparison op. */
5982 if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<')
5984 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5985 if (GET_MODE_CLASS (mode
) == MODE_INT
5986 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5987 internal_error ("unsupported wide integer operation");
5989 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
5990 if (GET_MODE_CLASS (mode
) == MODE_INT
5991 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5992 internal_error ("unsupported wide integer operation");
5997 /* expand_expr: generate code for computing expression EXP.
5998 An rtx for the computed value is returned. The value is never null.
5999 In the case of a void EXP, const0_rtx is returned.
6001 The value may be stored in TARGET if TARGET is nonzero.
6002 TARGET is just a suggestion; callers must assume that
6003 the rtx returned may not be the same as TARGET.
6005 If TARGET is CONST0_RTX, it means that the value will be ignored.
6007 If TMODE is not VOIDmode, it suggests generating the
6008 result in mode TMODE. But this is done only when convenient.
6009 Otherwise, TMODE is ignored and the value generated in its natural mode.
6010 TMODE is just a suggestion; callers must assume that
6011 the rtx returned may not have mode TMODE.
6013 Note that TARGET may have neither TMODE nor MODE. In that case, it
6014 probably will not be used.
6016 If MODIFIER is EXPAND_SUM then when EXP is an addition
6017 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6018 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6019 products as above, or REG or MEM, or constant.
6020 Ordinarily in such cases we would output mul or add instructions
6021 and then return a pseudo reg containing the sum.
6023 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6024 it also marks a label as absolutely required (it can't be dead).
6025 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6026 This is used for outputting expressions used in initializers.
6028 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6029 with a constant address even if that address is not normally legitimate.
6030 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6033 expand_expr (exp
, target
, tmode
, modifier
)
6036 enum machine_mode tmode
;
6037 enum expand_modifier modifier
;
6039 register rtx op0
, op1
, temp
;
6040 tree type
= TREE_TYPE (exp
);
6041 int unsignedp
= TREE_UNSIGNED (type
);
6042 register enum machine_mode mode
;
6043 register enum tree_code code
= TREE_CODE (exp
);
6045 rtx subtarget
, original_target
;
6048 /* Used by check-memory-usage to make modifier read only. */
6049 enum expand_modifier ro_modifier
;
6051 /* Handle ERROR_MARK before anybody tries to access its type. */
6052 if (TREE_CODE (exp
) == ERROR_MARK
|| TREE_CODE (type
) == ERROR_MARK
)
6054 op0
= CONST0_RTX (tmode
);
6060 mode
= TYPE_MODE (type
);
6061 /* Use subtarget as the target for operand 0 of a binary operation. */
6062 subtarget
= get_subtarget (target
);
6063 original_target
= target
;
6064 ignore
= (target
== const0_rtx
6065 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
6066 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
6067 || code
== COND_EXPR
)
6068 && TREE_CODE (type
) == VOID_TYPE
));
6070 /* Make a read-only version of the modifier. */
6071 if (modifier
== EXPAND_NORMAL
|| modifier
== EXPAND_SUM
6072 || modifier
== EXPAND_CONST_ADDRESS
|| modifier
== EXPAND_INITIALIZER
)
6073 ro_modifier
= modifier
;
6075 ro_modifier
= EXPAND_NORMAL
;
6077 /* If we are going to ignore this result, we need only do something
6078 if there is a side-effect somewhere in the expression. If there
6079 is, short-circuit the most common cases here. Note that we must
6080 not call expand_expr with anything but const0_rtx in case this
6081 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6085 if (! TREE_SIDE_EFFECTS (exp
))
6088 /* Ensure we reference a volatile object even if value is ignored, but
6089 don't do this if all we are doing is taking its address. */
6090 if (TREE_THIS_VOLATILE (exp
)
6091 && TREE_CODE (exp
) != FUNCTION_DECL
6092 && mode
!= VOIDmode
&& mode
!= BLKmode
6093 && modifier
!= EXPAND_CONST_ADDRESS
)
6095 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, ro_modifier
);
6096 if (GET_CODE (temp
) == MEM
)
6097 temp
= copy_to_reg (temp
);
6101 if (TREE_CODE_CLASS (code
) == '1' || code
== COMPONENT_REF
6102 || code
== INDIRECT_REF
|| code
== BUFFER_REF
)
6103 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
6104 VOIDmode
, ro_modifier
);
6105 else if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<'
6106 || code
== ARRAY_REF
)
6108 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, ro_modifier
);
6109 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, ro_modifier
);
6112 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6113 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
6114 /* If the second operand has no side effects, just evaluate
6116 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
6117 VOIDmode
, ro_modifier
);
6118 else if (code
== BIT_FIELD_REF
)
6120 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, ro_modifier
);
6121 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, ro_modifier
);
6122 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, ro_modifier
);
6129 #ifdef MAX_INTEGER_COMPUTATION_MODE
6130 /* Only check stuff here if the mode we want is different from the mode
6131 of the expression; if it's the same, check_max_integer_computiation_mode
6132 will handle it. Do we really need to check this stuff at all? */
6135 && GET_MODE (target
) != mode
6136 && TREE_CODE (exp
) != INTEGER_CST
6137 && TREE_CODE (exp
) != PARM_DECL
6138 && TREE_CODE (exp
) != ARRAY_REF
6139 && TREE_CODE (exp
) != COMPONENT_REF
6140 && TREE_CODE (exp
) != BIT_FIELD_REF
6141 && TREE_CODE (exp
) != INDIRECT_REF
6142 && TREE_CODE (exp
) != CALL_EXPR
6143 && TREE_CODE (exp
) != VAR_DECL
6144 && TREE_CODE (exp
) != RTL_EXPR
)
6146 enum machine_mode mode
= GET_MODE (target
);
6148 if (GET_MODE_CLASS (mode
) == MODE_INT
6149 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6150 internal_error ("unsupported wide integer operation");
6154 && TREE_CODE (exp
) != INTEGER_CST
6155 && TREE_CODE (exp
) != PARM_DECL
6156 && TREE_CODE (exp
) != ARRAY_REF
6157 && TREE_CODE (exp
) != COMPONENT_REF
6158 && TREE_CODE (exp
) != BIT_FIELD_REF
6159 && TREE_CODE (exp
) != INDIRECT_REF
6160 && TREE_CODE (exp
) != VAR_DECL
6161 && TREE_CODE (exp
) != CALL_EXPR
6162 && TREE_CODE (exp
) != RTL_EXPR
6163 && GET_MODE_CLASS (tmode
) == MODE_INT
6164 && tmode
> MAX_INTEGER_COMPUTATION_MODE
)
6165 internal_error ("unsupported wide integer operation");
6167 check_max_integer_computation_mode (exp
);
6170 /* If will do cse, generate all results into pseudo registers
6171 since 1) that allows cse to find more things
6172 and 2) otherwise cse could produce an insn the machine
6175 if (! cse_not_expected
&& mode
!= BLKmode
&& target
6176 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
))
6183 tree function
= decl_function_context (exp
);
6184 /* Handle using a label in a containing function. */
6185 if (function
!= current_function_decl
6186 && function
!= inline_function_decl
&& function
!= 0)
6188 struct function
*p
= find_function_data (function
);
6189 p
->expr
->x_forced_labels
6190 = gen_rtx_EXPR_LIST (VOIDmode
, label_rtx (exp
),
6191 p
->expr
->x_forced_labels
);
6195 if (modifier
== EXPAND_INITIALIZER
)
6196 forced_labels
= gen_rtx_EXPR_LIST (VOIDmode
,
6201 temp
= gen_rtx_MEM (FUNCTION_MODE
,
6202 gen_rtx_LABEL_REF (Pmode
, label_rtx (exp
)));
6203 if (function
!= current_function_decl
6204 && function
!= inline_function_decl
&& function
!= 0)
6205 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
6210 if (DECL_RTL (exp
) == 0)
6212 error_with_decl (exp
, "prior parameter's size depends on `%s'");
6213 return CONST0_RTX (mode
);
6216 /* ... fall through ... */
6219 /* If a static var's type was incomplete when the decl was written,
6220 but the type is complete now, lay out the decl now. */
6221 if (DECL_SIZE (exp
) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
6222 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6224 layout_decl (exp
, 0);
6225 PUT_MODE (DECL_RTL (exp
), DECL_MODE (exp
));
6228 /* Although static-storage variables start off initialized, according to
6229 ANSI C, a memcpy could overwrite them with uninitialized values. So
6230 we check them too. This also lets us check for read-only variables
6231 accessed via a non-const declaration, in case it won't be detected
6232 any other way (e.g., in an embedded system or OS kernel without
6235 Aggregates are not checked here; they're handled elsewhere. */
6236 if (cfun
&& current_function_check_memory_usage
6238 && GET_CODE (DECL_RTL (exp
)) == MEM
6239 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
6241 enum memory_use_mode memory_usage
;
6242 memory_usage
= get_memory_usage_from_modifier (modifier
);
6244 in_check_memory_usage
= 1;
6245 if (memory_usage
!= MEMORY_USE_DONT
)
6246 emit_library_call (chkr_check_addr_libfunc
,
6247 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3,
6248 XEXP (DECL_RTL (exp
), 0), Pmode
,
6249 GEN_INT (int_size_in_bytes (type
)),
6250 TYPE_MODE (sizetype
),
6251 GEN_INT (memory_usage
),
6252 TYPE_MODE (integer_type_node
));
6253 in_check_memory_usage
= 0;
6256 /* ... fall through ... */
6260 if (DECL_RTL (exp
) == 0)
6263 /* Ensure variable marked as used even if it doesn't go through
6264 a parser. If it hasn't be used yet, write out an external
6266 if (! TREE_USED (exp
))
6268 assemble_external (exp
);
6269 TREE_USED (exp
) = 1;
6272 /* Show we haven't gotten RTL for this yet. */
6275 /* Handle variables inherited from containing functions. */
6276 context
= decl_function_context (exp
);
6278 /* We treat inline_function_decl as an alias for the current function
6279 because that is the inline function whose vars, types, etc.
6280 are being merged into the current function.
6281 See expand_inline_function. */
6283 if (context
!= 0 && context
!= current_function_decl
6284 && context
!= inline_function_decl
6285 /* If var is static, we don't need a static chain to access it. */
6286 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
6287 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
6291 /* Mark as non-local and addressable. */
6292 DECL_NONLOCAL (exp
) = 1;
6293 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
6295 mark_addressable (exp
);
6296 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
6298 addr
= XEXP (DECL_RTL (exp
), 0);
6299 if (GET_CODE (addr
) == MEM
)
6300 addr
= change_address (addr
, Pmode
,
6301 fix_lexical_addr (XEXP (addr
, 0), exp
));
6303 addr
= fix_lexical_addr (addr
, exp
);
6305 temp
= change_address (DECL_RTL (exp
), mode
, addr
);
6308 /* This is the case of an array whose size is to be determined
6309 from its initializer, while the initializer is still being parsed.
6312 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6313 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
6314 temp
= change_address (DECL_RTL (exp
), GET_MODE (DECL_RTL (exp
)),
6315 XEXP (DECL_RTL (exp
), 0));
6317 /* If DECL_RTL is memory, we are in the normal case and either
6318 the address is not valid or it is not a register and -fforce-addr
6319 is specified, get the address into a register. */
6321 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6322 && modifier
!= EXPAND_CONST_ADDRESS
6323 && modifier
!= EXPAND_SUM
6324 && modifier
!= EXPAND_INITIALIZER
6325 && (! memory_address_p (DECL_MODE (exp
),
6326 XEXP (DECL_RTL (exp
), 0))
6328 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
6329 temp
= change_address (DECL_RTL (exp
), VOIDmode
,
6330 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
6332 /* If we got something, return it. But first, set the alignment
6333 the address is a register. */
6336 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
6337 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6342 /* If the mode of DECL_RTL does not match that of the decl, it
6343 must be a promoted value. We return a SUBREG of the wanted mode,
6344 but mark it so that we know that it was already extended. */
6346 if (GET_CODE (DECL_RTL (exp
)) == REG
6347 && GET_MODE (DECL_RTL (exp
)) != mode
)
6349 /* Get the signedness used for this variable. Ensure we get the
6350 same mode we got when the variable was declared. */
6351 if (GET_MODE (DECL_RTL (exp
))
6352 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
, 0))
6355 temp
= gen_lowpart_SUBREG (mode
, DECL_RTL (exp
));
6356 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6357 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
6361 return DECL_RTL (exp
);
6364 return immed_double_const (TREE_INT_CST_LOW (exp
),
6365 TREE_INT_CST_HIGH (exp
), mode
);
6368 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
,
6369 EXPAND_MEMORY_USE_BAD
);
6372 /* If optimized, generate immediate CONST_DOUBLE
6373 which will be turned into memory by reload if necessary.
6375 We used to force a register so that loop.c could see it. But
6376 this does not allow gen_* patterns to perform optimizations with
6377 the constants. It also produces two insns in cases like "x = 1.0;".
6378 On most machines, floating-point constants are not permitted in
6379 many insns, so we'd end up copying it to a register in any case.
6381 Now, we do the copying in expand_binop, if appropriate. */
6382 return immed_real_const (exp
);
6386 if (! TREE_CST_RTL (exp
))
6387 output_constant_def (exp
, 1);
6389 /* TREE_CST_RTL probably contains a constant address.
6390 On RISC machines where a constant address isn't valid,
6391 make some insns to get that address into a register. */
6392 if (GET_CODE (TREE_CST_RTL (exp
)) == MEM
6393 && modifier
!= EXPAND_CONST_ADDRESS
6394 && modifier
!= EXPAND_INITIALIZER
6395 && modifier
!= EXPAND_SUM
6396 && (! memory_address_p (mode
, XEXP (TREE_CST_RTL (exp
), 0))
6398 && GET_CODE (XEXP (TREE_CST_RTL (exp
), 0)) != REG
)))
6399 return change_address (TREE_CST_RTL (exp
), VOIDmode
,
6400 copy_rtx (XEXP (TREE_CST_RTL (exp
), 0)));
6401 return TREE_CST_RTL (exp
);
6403 case EXPR_WITH_FILE_LOCATION
:
6406 const char *saved_input_filename
= input_filename
;
6407 int saved_lineno
= lineno
;
6408 input_filename
= EXPR_WFL_FILENAME (exp
);
6409 lineno
= EXPR_WFL_LINENO (exp
);
6410 if (EXPR_WFL_EMIT_LINE_NOTE (exp
))
6411 emit_line_note (input_filename
, lineno
);
6412 /* Possibly avoid switching back and force here. */
6413 to_return
= expand_expr (EXPR_WFL_NODE (exp
), target
, tmode
, modifier
);
6414 input_filename
= saved_input_filename
;
6415 lineno
= saved_lineno
;
6420 context
= decl_function_context (exp
);
6422 /* If this SAVE_EXPR was at global context, assume we are an
6423 initialization function and move it into our context. */
6425 SAVE_EXPR_CONTEXT (exp
) = current_function_decl
;
6427 /* We treat inline_function_decl as an alias for the current function
6428 because that is the inline function whose vars, types, etc.
6429 are being merged into the current function.
6430 See expand_inline_function. */
6431 if (context
== current_function_decl
|| context
== inline_function_decl
)
6434 /* If this is non-local, handle it. */
6437 /* The following call just exists to abort if the context is
6438 not of a containing function. */
6439 find_function_data (context
);
6441 temp
= SAVE_EXPR_RTL (exp
);
6442 if (temp
&& GET_CODE (temp
) == REG
)
6444 put_var_into_stack (exp
);
6445 temp
= SAVE_EXPR_RTL (exp
);
6447 if (temp
== 0 || GET_CODE (temp
) != MEM
)
6449 return change_address (temp
, mode
,
6450 fix_lexical_addr (XEXP (temp
, 0), exp
));
6452 if (SAVE_EXPR_RTL (exp
) == 0)
6454 if (mode
== VOIDmode
)
6457 temp
= assign_temp (build_qualified_type (type
,
6459 | TYPE_QUAL_CONST
)),
6462 SAVE_EXPR_RTL (exp
) = temp
;
6463 if (!optimize
&& GET_CODE (temp
) == REG
)
6464 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, temp
,
6467 /* If the mode of TEMP does not match that of the expression, it
6468 must be a promoted value. We pass store_expr a SUBREG of the
6469 wanted mode but mark it so that we know that it was already
6470 extended. Note that `unsignedp' was modified above in
6473 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
6475 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6476 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6477 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
6480 if (temp
== const0_rtx
)
6481 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6482 EXPAND_MEMORY_USE_BAD
);
6484 store_expr (TREE_OPERAND (exp
, 0), temp
, 0);
6486 TREE_USED (exp
) = 1;
6489 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6490 must be a promoted value. We return a SUBREG of the wanted mode,
6491 but mark it so that we know that it was already extended. */
6493 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
6494 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
6496 /* Compute the signedness and make the proper SUBREG. */
6497 promote_mode (type
, mode
, &unsignedp
, 0);
6498 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6499 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6500 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
6504 return SAVE_EXPR_RTL (exp
);
6509 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6510 TREE_OPERAND (exp
, 0) = unsave_expr_now (TREE_OPERAND (exp
, 0));
6514 case PLACEHOLDER_EXPR
:
6516 tree placeholder_expr
;
6518 /* If there is an object on the head of the placeholder list,
6519 see if some object in it of type TYPE or a pointer to it. For
6520 further information, see tree.def. */
6521 for (placeholder_expr
= placeholder_list
;
6522 placeholder_expr
!= 0;
6523 placeholder_expr
= TREE_CHAIN (placeholder_expr
))
6525 tree need_type
= TYPE_MAIN_VARIANT (type
);
6527 tree old_list
= placeholder_list
;
6530 /* Find the outermost reference that is of the type we want.
6531 If none, see if any object has a type that is a pointer to
6532 the type we want. */
6533 for (elt
= TREE_PURPOSE (placeholder_expr
);
6534 elt
!= 0 && object
== 0;
6536 = ((TREE_CODE (elt
) == COMPOUND_EXPR
6537 || TREE_CODE (elt
) == COND_EXPR
)
6538 ? TREE_OPERAND (elt
, 1)
6539 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6540 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6541 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6542 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6543 ? TREE_OPERAND (elt
, 0) : 0))
6544 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
6547 for (elt
= TREE_PURPOSE (placeholder_expr
);
6548 elt
!= 0 && object
== 0;
6550 = ((TREE_CODE (elt
) == COMPOUND_EXPR
6551 || TREE_CODE (elt
) == COND_EXPR
)
6552 ? TREE_OPERAND (elt
, 1)
6553 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6554 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6555 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6556 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6557 ? TREE_OPERAND (elt
, 0) : 0))
6558 if (POINTER_TYPE_P (TREE_TYPE (elt
))
6559 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
6561 object
= build1 (INDIRECT_REF
, need_type
, elt
);
6565 /* Expand this object skipping the list entries before
6566 it was found in case it is also a PLACEHOLDER_EXPR.
6567 In that case, we want to translate it using subsequent
6569 placeholder_list
= TREE_CHAIN (placeholder_expr
);
6570 temp
= expand_expr (object
, original_target
, tmode
,
6572 placeholder_list
= old_list
;
6578 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6581 case WITH_RECORD_EXPR
:
6582 /* Put the object on the placeholder list, expand our first operand,
6583 and pop the list. */
6584 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
6586 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
6587 tmode
, ro_modifier
);
6588 placeholder_list
= TREE_CHAIN (placeholder_list
);
6592 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6593 expand_goto (TREE_OPERAND (exp
, 0));
6595 expand_computed_goto (TREE_OPERAND (exp
, 0));
6599 expand_exit_loop_if_false (NULL_PTR
,
6600 invert_truthvalue (TREE_OPERAND (exp
, 0)));
6603 case LABELED_BLOCK_EXPR
:
6604 if (LABELED_BLOCK_BODY (exp
))
6605 expand_expr_stmt (LABELED_BLOCK_BODY (exp
));
6606 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp
)));
6609 case EXIT_BLOCK_EXPR
:
6610 if (EXIT_BLOCK_RETURN (exp
))
6611 sorry ("returned value in block_exit_expr");
6612 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp
)));
6617 expand_start_loop (1);
6618 expand_expr_stmt (TREE_OPERAND (exp
, 0));
6626 tree vars
= TREE_OPERAND (exp
, 0);
6627 int vars_need_expansion
= 0;
6629 /* Need to open a binding contour here because
6630 if there are any cleanups they must be contained here. */
6631 expand_start_bindings (2);
6633 /* Mark the corresponding BLOCK for output in its proper place. */
6634 if (TREE_OPERAND (exp
, 2) != 0
6635 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
6636 insert_block (TREE_OPERAND (exp
, 2));
6638 /* If VARS have not yet been expanded, expand them now. */
6641 if (!DECL_RTL_SET_P (vars
))
6643 vars_need_expansion
= 1;
6646 expand_decl_init (vars
);
6647 vars
= TREE_CHAIN (vars
);
6650 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, ro_modifier
);
6652 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
6658 if (RTL_EXPR_SEQUENCE (exp
))
6660 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
6662 emit_insns (RTL_EXPR_SEQUENCE (exp
));
6663 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
6665 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
6666 free_temps_for_rtl_expr (exp
);
6667 return RTL_EXPR_RTL (exp
);
6670 /* If we don't need the result, just ensure we evaluate any
6675 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
6676 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
,
6677 EXPAND_MEMORY_USE_BAD
);
6681 /* All elts simple constants => refer to a constant in memory. But
6682 if this is a non-BLKmode mode, let it store a field at a time
6683 since that should make a CONST_INT or CONST_DOUBLE when we
6684 fold. Likewise, if we have a target we can use, it is best to
6685 store directly into the target unless the type is large enough
6686 that memcpy will be used. If we are making an initializer and
6687 all operands are constant, put it in memory as well. */
6688 else if ((TREE_STATIC (exp
)
6689 && ((mode
== BLKmode
6690 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
6691 || TREE_ADDRESSABLE (exp
)
6692 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
6693 && (! MOVE_BY_PIECES_P
6694 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
6696 && ! mostly_zeros_p (exp
))))
6697 || (modifier
== EXPAND_INITIALIZER
&& TREE_CONSTANT (exp
)))
6699 rtx constructor
= output_constant_def (exp
, 1);
6701 if (modifier
!= EXPAND_CONST_ADDRESS
6702 && modifier
!= EXPAND_INITIALIZER
6703 && modifier
!= EXPAND_SUM
6704 && (! memory_address_p (GET_MODE (constructor
),
6705 XEXP (constructor
, 0))
6707 && GET_CODE (XEXP (constructor
, 0)) != REG
)))
6708 constructor
= change_address (constructor
, VOIDmode
,
6709 XEXP (constructor
, 0));
6714 /* Handle calls that pass values in multiple non-contiguous
6715 locations. The Irix 6 ABI has examples of this. */
6716 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
6717 || GET_CODE (target
) == PARALLEL
)
6719 = assign_temp (build_qualified_type (type
,
6721 | (TREE_READONLY (exp
)
6722 * TYPE_QUAL_CONST
))),
6723 TREE_ADDRESSABLE (exp
), 1, 1);
6725 store_constructor (exp
, target
, TYPE_ALIGN (TREE_TYPE (exp
)), 0,
6726 int_size_in_bytes (TREE_TYPE (exp
)));
6732 tree exp1
= TREE_OPERAND (exp
, 0);
6734 tree string
= string_constant (exp1
, &index
);
6736 /* Try to optimize reads from const strings. */
6738 && TREE_CODE (string
) == STRING_CST
6739 && TREE_CODE (index
) == INTEGER_CST
6740 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
6741 && GET_MODE_CLASS (mode
) == MODE_INT
6742 && GET_MODE_SIZE (mode
) == 1
6743 && modifier
!= EXPAND_MEMORY_USE_WO
)
6745 GEN_INT (TREE_STRING_POINTER (string
)[TREE_INT_CST_LOW (index
)]);
6747 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
6748 op0
= memory_address (mode
, op0
);
6750 if (cfun
&& current_function_check_memory_usage
6751 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
6753 enum memory_use_mode memory_usage
;
6754 memory_usage
= get_memory_usage_from_modifier (modifier
);
6756 if (memory_usage
!= MEMORY_USE_DONT
)
6758 in_check_memory_usage
= 1;
6759 emit_library_call (chkr_check_addr_libfunc
,
6760 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, op0
,
6761 Pmode
, GEN_INT (int_size_in_bytes (type
)),
6762 TYPE_MODE (sizetype
),
6763 GEN_INT (memory_usage
),
6764 TYPE_MODE (integer_type_node
));
6765 in_check_memory_usage
= 0;
6769 temp
= gen_rtx_MEM (mode
, op0
);
6770 set_mem_attributes (temp
, exp
, 0);
6772 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6773 here, because, in C and C++, the fact that a location is accessed
6774 through a pointer to const does not mean that the value there can
6775 never change. Languages where it can never change should
6776 also set TREE_STATIC. */
6777 RTX_UNCHANGING_P (temp
) = TREE_READONLY (exp
) & TREE_STATIC (exp
);
6779 /* If we are writing to this object and its type is a record with
6780 readonly fields, we must mark it as readonly so it will
6781 conflict with readonly references to those fields. */
6782 if (modifier
== EXPAND_MEMORY_USE_WO
&& readonly_fields_p (type
))
6783 RTX_UNCHANGING_P (temp
) = 1;
6789 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
6793 tree array
= TREE_OPERAND (exp
, 0);
6794 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
6795 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
6796 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
6799 /* Optimize the special-case of a zero lower bound.
6801 We convert the low_bound to sizetype to avoid some problems
6802 with constant folding. (E.g. suppose the lower bound is 1,
6803 and its mode is QI. Without the conversion, (ARRAY
6804 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6805 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6807 if (! integer_zerop (low_bound
))
6808 index
= size_diffop (index
, convert (sizetype
, low_bound
));
6810 /* Fold an expression like: "foo"[2].
6811 This is not done in fold so it won't happen inside &.
6812 Don't fold if this is for wide characters since it's too
6813 difficult to do correctly and this is a very rare case. */
6815 if (TREE_CODE (array
) == STRING_CST
6816 && TREE_CODE (index
) == INTEGER_CST
6817 && compare_tree_int (index
, TREE_STRING_LENGTH (array
)) < 0
6818 && GET_MODE_CLASS (mode
) == MODE_INT
6819 && GET_MODE_SIZE (mode
) == 1)
6821 GEN_INT (TREE_STRING_POINTER (array
)[TREE_INT_CST_LOW (index
)]);
6823 /* If this is a constant index into a constant array,
6824 just get the value from the array. Handle both the cases when
6825 we have an explicit constructor and when our operand is a variable
6826 that was declared const. */
6828 if (TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
)
6829 && TREE_CODE (index
) == INTEGER_CST
6830 && 0 > compare_tree_int (index
,
6831 list_length (CONSTRUCTOR_ELTS
6832 (TREE_OPERAND (exp
, 0)))))
6836 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
6837 i
= TREE_INT_CST_LOW (index
);
6838 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
6842 return expand_expr (fold (TREE_VALUE (elem
)), target
,
6843 tmode
, ro_modifier
);
6846 else if (optimize
>= 1
6847 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
6848 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
6849 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
6851 if (TREE_CODE (index
) == INTEGER_CST
)
6853 tree init
= DECL_INITIAL (array
);
6855 if (TREE_CODE (init
) == CONSTRUCTOR
)
6859 for (elem
= CONSTRUCTOR_ELTS (init
);
6861 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
6862 elem
= TREE_CHAIN (elem
))
6866 return expand_expr (fold (TREE_VALUE (elem
)), target
,
6867 tmode
, ro_modifier
);
6869 else if (TREE_CODE (init
) == STRING_CST
6870 && 0 > compare_tree_int (index
,
6871 TREE_STRING_LENGTH (init
)))
6873 tree type
= TREE_TYPE (TREE_TYPE (init
));
6874 enum machine_mode mode
= TYPE_MODE (type
);
6876 if (GET_MODE_CLASS (mode
) == MODE_INT
6877 && GET_MODE_SIZE (mode
) == 1)
6879 (TREE_STRING_POINTER
6880 (init
)[TREE_INT_CST_LOW (index
)]));
6889 /* If the operand is a CONSTRUCTOR, we can just extract the
6890 appropriate field if it is present. Don't do this if we have
6891 already written the data since we want to refer to that copy
6892 and varasm.c assumes that's what we'll do. */
6893 if (code
!= ARRAY_REF
6894 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
6895 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
6899 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
6900 elt
= TREE_CHAIN (elt
))
6901 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
6902 /* We can normally use the value of the field in the
6903 CONSTRUCTOR. However, if this is a bitfield in
6904 an integral mode that we can fit in a HOST_WIDE_INT,
6905 we must mask only the number of bits in the bitfield,
6906 since this is done implicitly by the constructor. If
6907 the bitfield does not meet either of those conditions,
6908 we can't do this optimization. */
6909 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
6910 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
6912 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
6913 <= HOST_BITS_PER_WIDE_INT
))))
6915 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
6916 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
6918 HOST_WIDE_INT bitsize
6919 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt
)));
6921 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
6923 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
6924 op0
= expand_and (op0
, op1
, target
);
6928 enum machine_mode imode
6929 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
6931 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
6934 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
6936 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
6946 enum machine_mode mode1
;
6947 HOST_WIDE_INT bitsize
, bitpos
;
6950 unsigned int alignment
;
6951 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
6952 &mode1
, &unsignedp
, &volatilep
,
6955 /* If we got back the original object, something is wrong. Perhaps
6956 we are evaluating an expression too early. In any event, don't
6957 infinitely recurse. */
6961 /* If TEM's type is a union of variable size, pass TARGET to the inner
6962 computation, since it will need a temporary and TARGET is known
6963 to have to do. This occurs in unchecked conversion in Ada. */
6965 op0
= expand_expr (tem
,
6966 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
6967 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
6969 ? target
: NULL_RTX
),
6971 (modifier
== EXPAND_INITIALIZER
6972 || modifier
== EXPAND_CONST_ADDRESS
)
6973 ? modifier
: EXPAND_NORMAL
);
6975 /* If this is a constant, put it into a register if it is a
6976 legitimate constant and OFFSET is 0 and memory if it isn't. */
6977 if (CONSTANT_P (op0
))
6979 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
6980 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
6982 op0
= force_reg (mode
, op0
);
6984 op0
= validize_mem (force_const_mem (mode
, op0
));
6989 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
6991 /* If this object is in memory, put it into a register.
6992 This case can't occur in C, but can in Ada if we have
6993 unchecked conversion of an expression from a scalar type to
6994 an array or record type. */
6995 if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
6996 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
6998 tree nt
= build_qualified_type (TREE_TYPE (tem
),
6999 (TYPE_QUALS (TREE_TYPE (tem
))
7000 | TYPE_QUAL_CONST
));
7001 rtx memloc
= assign_temp (nt
, 1, 1, 1);
7003 mark_temp_addr_taken (memloc
);
7004 emit_move_insn (memloc
, op0
);
7008 if (GET_CODE (op0
) != MEM
)
7011 if (GET_MODE (offset_rtx
) != ptr_mode
)
7013 #ifdef POINTERS_EXTEND_UNSIGNED
7014 offset_rtx
= convert_memory_address (ptr_mode
, offset_rtx
);
7016 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
7020 /* A constant address in OP0 can have VOIDmode, we must not try
7021 to call force_reg for that case. Avoid that case. */
7022 if (GET_CODE (op0
) == MEM
7023 && GET_MODE (op0
) == BLKmode
7024 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
7026 && (bitpos
% bitsize
) == 0
7027 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
7028 && alignment
== GET_MODE_ALIGNMENT (mode1
))
7030 rtx temp
= change_address (op0
, mode1
,
7031 plus_constant (XEXP (op0
, 0),
7034 if (GET_CODE (XEXP (temp
, 0)) == REG
)
7037 op0
= change_address (op0
, mode1
,
7038 force_reg (GET_MODE (XEXP (temp
, 0)),
7043 op0
= change_address (op0
, VOIDmode
,
7044 gen_rtx_PLUS (ptr_mode
, XEXP (op0
, 0),
7045 force_reg (ptr_mode
,
7049 /* Don't forget about volatility even if this is a bitfield. */
7050 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
7052 op0
= copy_rtx (op0
);
7053 MEM_VOLATILE_P (op0
) = 1;
7056 /* Check the access. */
7057 if (cfun
!= 0 && current_function_check_memory_usage
7058 && GET_CODE (op0
) == MEM
)
7060 enum memory_use_mode memory_usage
;
7061 memory_usage
= get_memory_usage_from_modifier (modifier
);
7063 if (memory_usage
!= MEMORY_USE_DONT
)
7068 to
= plus_constant (XEXP (op0
, 0), (bitpos
/ BITS_PER_UNIT
));
7069 size
= (bitpos
% BITS_PER_UNIT
) + bitsize
+ BITS_PER_UNIT
- 1;
7071 /* Check the access right of the pointer. */
7072 in_check_memory_usage
= 1;
7073 if (size
> BITS_PER_UNIT
)
7074 emit_library_call (chkr_check_addr_libfunc
,
7075 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, to
,
7076 Pmode
, GEN_INT (size
/ BITS_PER_UNIT
),
7077 TYPE_MODE (sizetype
),
7078 GEN_INT (memory_usage
),
7079 TYPE_MODE (integer_type_node
));
7080 in_check_memory_usage
= 0;
7084 /* In cases where an aligned union has an unaligned object
7085 as a field, we might be extracting a BLKmode value from
7086 an integer-mode (e.g., SImode) object. Handle this case
7087 by doing the extract into an object as wide as the field
7088 (which we know to be the width of a basic mode), then
7089 storing into memory, and changing the mode to BLKmode.
7090 If we ultimately want the address (EXPAND_CONST_ADDRESS or
7091 EXPAND_INITIALIZER), then we must not copy to a temporary. */
7092 if (mode1
== VOIDmode
7093 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7094 || (modifier
!= EXPAND_CONST_ADDRESS
7095 && modifier
!= EXPAND_INITIALIZER
7096 && ((mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
7097 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7098 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
7099 /* If the field isn't aligned enough to fetch as a memref,
7100 fetch it as a bit field. */
7101 || (mode1
!= BLKmode
7102 && SLOW_UNALIGNED_ACCESS (mode1
, alignment
)
7103 && ((TYPE_ALIGN (TREE_TYPE (tem
))
7104 < GET_MODE_ALIGNMENT (mode
))
7105 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)))
7106 /* If the type and the field are a constant size and the
7107 size of the type isn't the same size as the bitfield,
7108 we must use bitfield operations. */
7110 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
)))
7112 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7114 || (modifier
!= EXPAND_CONST_ADDRESS
7115 && modifier
!= EXPAND_INITIALIZER
7117 && SLOW_UNALIGNED_ACCESS (mode
, alignment
)
7118 && (TYPE_ALIGN (type
) > alignment
7119 || bitpos
% TYPE_ALIGN (type
) != 0)))
7121 enum machine_mode ext_mode
= mode
;
7123 if (ext_mode
== BLKmode
7124 && ! (target
!= 0 && GET_CODE (op0
) == MEM
7125 && GET_CODE (target
) == MEM
7126 && bitpos
% BITS_PER_UNIT
== 0))
7127 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7129 if (ext_mode
== BLKmode
)
7131 /* In this case, BITPOS must start at a byte boundary and
7132 TARGET, if specified, must be a MEM. */
7133 if (GET_CODE (op0
) != MEM
7134 || (target
!= 0 && GET_CODE (target
) != MEM
)
7135 || bitpos
% BITS_PER_UNIT
!= 0)
7138 op0
= change_address (op0
, VOIDmode
,
7139 plus_constant (XEXP (op0
, 0),
7140 bitpos
/ BITS_PER_UNIT
));
7142 target
= assign_temp (type
, 0, 1, 1);
7144 emit_block_move (target
, op0
,
7145 bitsize
== -1 ? expr_size (exp
)
7146 : GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7153 op0
= validize_mem (op0
);
7155 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
7156 mark_reg_pointer (XEXP (op0
, 0), alignment
);
7158 op0
= extract_bit_field (op0
, bitsize
, bitpos
,
7159 unsignedp
, target
, ext_mode
, ext_mode
,
7161 int_size_in_bytes (TREE_TYPE (tem
)));
7163 /* If the result is a record type and BITSIZE is narrower than
7164 the mode of OP0, an integral mode, and this is a big endian
7165 machine, we must put the field into the high-order bits. */
7166 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7167 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7168 && bitsize
< GET_MODE_BITSIZE (GET_MODE (op0
)))
7169 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7170 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7174 if (mode
== BLKmode
)
7176 tree nt
= build_qualified_type (type_for_mode (ext_mode
, 0),
7178 rtx
new = assign_temp (nt
, 0, 1, 1);
7180 emit_move_insn (new, op0
);
7181 op0
= copy_rtx (new);
7182 PUT_MODE (op0
, BLKmode
);
7188 /* If the result is BLKmode, use that to access the object
7190 if (mode
== BLKmode
)
7193 /* Get a reference to just this component. */
7194 if (modifier
== EXPAND_CONST_ADDRESS
7195 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7197 rtx
new = gen_rtx_MEM (mode1
,
7198 plus_constant (XEXP (op0
, 0),
7199 (bitpos
/ BITS_PER_UNIT
)));
7201 MEM_COPY_ATTRIBUTES (new, op0
);
7205 op0
= change_address (op0
, mode1
,
7206 plus_constant (XEXP (op0
, 0),
7207 (bitpos
/ BITS_PER_UNIT
)));
7209 set_mem_attributes (op0
, exp
, 0);
7210 if (GET_CODE (XEXP (op0
, 0)) == REG
)
7211 mark_reg_pointer (XEXP (op0
, 0), alignment
);
7213 MEM_VOLATILE_P (op0
) |= volatilep
;
7214 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7215 || modifier
== EXPAND_CONST_ADDRESS
7216 || modifier
== EXPAND_INITIALIZER
)
7218 else if (target
== 0)
7219 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7221 convert_move (target
, op0
, unsignedp
);
7225 /* Intended for a reference to a buffer of a file-object in Pascal.
7226 But it's not certain that a special tree code will really be
7227 necessary for these. INDIRECT_REF might work for them. */
7233 /* Pascal set IN expression.
7236 rlo = set_low - (set_low%bits_per_word);
7237 the_word = set [ (index - rlo)/bits_per_word ];
7238 bit_index = index % bits_per_word;
7239 bitmask = 1 << bit_index;
7240 return !!(the_word & bitmask); */
7242 tree set
= TREE_OPERAND (exp
, 0);
7243 tree index
= TREE_OPERAND (exp
, 1);
7244 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
7245 tree set_type
= TREE_TYPE (set
);
7246 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
7247 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
7248 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
7249 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
7250 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
7251 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
7252 rtx setaddr
= XEXP (setval
, 0);
7253 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
7255 rtx diff
, quo
, rem
, addr
, bit
, result
;
7257 /* If domain is empty, answer is no. Likewise if index is constant
7258 and out of bounds. */
7259 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
7260 && TREE_CODE (set_low_bound
) == INTEGER_CST
7261 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
7262 || (TREE_CODE (index
) == INTEGER_CST
7263 && TREE_CODE (set_low_bound
) == INTEGER_CST
7264 && tree_int_cst_lt (index
, set_low_bound
))
7265 || (TREE_CODE (set_high_bound
) == INTEGER_CST
7266 && TREE_CODE (index
) == INTEGER_CST
7267 && tree_int_cst_lt (set_high_bound
, index
))))
7271 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7273 /* If we get here, we have to generate the code for both cases
7274 (in range and out of range). */
7276 op0
= gen_label_rtx ();
7277 op1
= gen_label_rtx ();
7279 if (! (GET_CODE (index_val
) == CONST_INT
7280 && GET_CODE (lo_r
) == CONST_INT
))
7282 emit_cmp_and_jump_insns (index_val
, lo_r
, LT
, NULL_RTX
,
7283 GET_MODE (index_val
), iunsignedp
, 0, op1
);
7286 if (! (GET_CODE (index_val
) == CONST_INT
7287 && GET_CODE (hi_r
) == CONST_INT
))
7289 emit_cmp_and_jump_insns (index_val
, hi_r
, GT
, NULL_RTX
,
7290 GET_MODE (index_val
), iunsignedp
, 0, op1
);
7293 /* Calculate the element number of bit zero in the first word
7295 if (GET_CODE (lo_r
) == CONST_INT
)
7296 rlow
= GEN_INT (INTVAL (lo_r
)
7297 & ~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
7299 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
7300 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
7301 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7303 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
7304 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7306 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
7307 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7308 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
7309 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7311 addr
= memory_address (byte_mode
,
7312 expand_binop (index_mode
, add_optab
, diff
,
7313 setaddr
, NULL_RTX
, iunsignedp
,
7316 /* Extract the bit we want to examine. */
7317 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
7318 gen_rtx_MEM (byte_mode
, addr
),
7319 make_tree (TREE_TYPE (index
), rem
),
7321 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
7322 GET_MODE (target
) == byte_mode
? target
: 0,
7323 1, OPTAB_LIB_WIDEN
);
7325 if (result
!= target
)
7326 convert_move (target
, result
, 1);
7328 /* Output the code to handle the out-of-range case. */
7331 emit_move_insn (target
, const0_rtx
);
7336 case WITH_CLEANUP_EXPR
:
7337 if (RTL_EXPR_RTL (exp
) == 0)
7340 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, ro_modifier
);
7341 expand_decl_cleanup (NULL_TREE
, TREE_OPERAND (exp
, 2));
7343 /* That's it for this cleanup. */
7344 TREE_OPERAND (exp
, 2) = 0;
7346 return RTL_EXPR_RTL (exp
);
7348 case CLEANUP_POINT_EXPR
:
7350 /* Start a new binding layer that will keep track of all cleanup
7351 actions to be performed. */
7352 expand_start_bindings (2);
7354 target_temp_slot_level
= temp_slot_level
;
7356 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, ro_modifier
);
7357 /* If we're going to use this value, load it up now. */
7359 op0
= force_not_mem (op0
);
7360 preserve_temp_slots (op0
);
7361 expand_end_bindings (NULL_TREE
, 0, 0);
7366 /* Check for a built-in function. */
7367 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7368 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7370 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7372 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7373 == BUILT_IN_FRONTEND
)
7374 return (*lang_expand_expr
) (exp
, original_target
, tmode
, modifier
);
7376 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7379 return expand_call (exp
, target
, ignore
);
7381 case NON_LVALUE_EXPR
:
7384 case REFERENCE_EXPR
:
7385 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7388 if (TREE_CODE (type
) == UNION_TYPE
)
7390 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7392 /* If both input and output are BLKmode, this conversion
7393 isn't actually doing anything unless we need to make the
7394 alignment stricter. */
7395 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
7396 && (TYPE_ALIGN (type
) <= TYPE_ALIGN (valtype
)
7397 || TYPE_ALIGN (type
) >= BIGGEST_ALIGNMENT
))
7398 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7402 target
= assign_temp (type
, 0, 1, 1);
7404 if (GET_CODE (target
) == MEM
)
7405 /* Store data into beginning of memory target. */
7406 store_expr (TREE_OPERAND (exp
, 0),
7407 change_address (target
, TYPE_MODE (valtype
), 0), 0);
7409 else if (GET_CODE (target
) == REG
)
7410 /* Store this field into a union of the proper type. */
7411 store_field (target
,
7412 MIN ((int_size_in_bytes (TREE_TYPE
7413 (TREE_OPERAND (exp
, 0)))
7415 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7416 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7417 VOIDmode
, 0, BITS_PER_UNIT
,
7418 int_size_in_bytes (type
), 0);
7422 /* Return the entire union. */
7426 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7428 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7431 /* If the signedness of the conversion differs and OP0 is
7432 a promoted SUBREG, clear that indication since we now
7433 have to do the proper extension. */
7434 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7435 && GET_CODE (op0
) == SUBREG
)
7436 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7441 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, 0);
7442 if (GET_MODE (op0
) == mode
)
7445 /* If OP0 is a constant, just convert it into the proper mode. */
7446 if (CONSTANT_P (op0
))
7448 convert_modes (mode
, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7449 op0
, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7451 if (modifier
== EXPAND_INITIALIZER
)
7452 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7456 convert_to_mode (mode
, op0
,
7457 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7459 convert_move (target
, op0
,
7460 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7464 /* We come here from MINUS_EXPR when the second operand is a
7467 this_optab
= ! unsignedp
&& flag_trapv
7468 && (GET_MODE_CLASS(mode
) == MODE_INT
)
7469 ? addv_optab
: add_optab
;
7471 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7472 something else, make sure we add the register to the constant and
7473 then to the other thing. This case can occur during strength
7474 reduction and doing it this way will produce better code if the
7475 frame pointer or argument pointer is eliminated.
7477 fold-const.c will ensure that the constant is always in the inner
7478 PLUS_EXPR, so the only case we need to do anything about is if
7479 sp, ap, or fp is our second argument, in which case we must swap
7480 the innermost first argument and our second argument. */
7482 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7483 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7484 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
7485 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7486 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7487 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7489 tree t
= TREE_OPERAND (exp
, 1);
7491 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7492 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
7495 /* If the result is to be ptr_mode and we are adding an integer to
7496 something, we might be forming a constant. So try to use
7497 plus_constant. If it produces a sum and we can't accept it,
7498 use force_operand. This allows P = &ARR[const] to generate
7499 efficient code on machines where a SYMBOL_REF is not a valid
7502 If this is an EXPAND_SUM call, always return the sum. */
7503 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7504 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
7506 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
7507 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7508 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
7512 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
7514 /* Use immed_double_const to ensure that the constant is
7515 truncated according to the mode of OP1, then sign extended
7516 to a HOST_WIDE_INT. Using the constant directly can result
7517 in non-canonical RTL in a 64x32 cross compile. */
7519 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
7521 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
7522 op1
= plus_constant (op1
, INTVAL (constant_part
));
7523 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7524 op1
= force_operand (op1
, target
);
7528 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7529 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
7530 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
7534 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7536 if (! CONSTANT_P (op0
))
7538 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7539 VOIDmode
, modifier
);
7540 /* Don't go to both_summands if modifier
7541 says it's not right to return a PLUS. */
7542 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7546 /* Use immed_double_const to ensure that the constant is
7547 truncated according to the mode of OP1, then sign extended
7548 to a HOST_WIDE_INT. Using the constant directly can result
7549 in non-canonical RTL in a 64x32 cross compile. */
7551 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
7553 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7554 op0
= plus_constant (op0
, INTVAL (constant_part
));
7555 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7556 op0
= force_operand (op0
, target
);
7561 /* No sense saving up arithmetic to be done
7562 if it's all in the wrong mode to form part of an address.
7563 And force_operand won't know whether to sign-extend or
7565 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7566 || mode
!= ptr_mode
)
7569 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7572 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, ro_modifier
);
7573 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, ro_modifier
);
7576 /* Make sure any term that's a sum with a constant comes last. */
7577 if (GET_CODE (op0
) == PLUS
7578 && CONSTANT_P (XEXP (op0
, 1)))
7584 /* If adding to a sum including a constant,
7585 associate it to put the constant outside. */
7586 if (GET_CODE (op1
) == PLUS
7587 && CONSTANT_P (XEXP (op1
, 1)))
7589 rtx constant_term
= const0_rtx
;
7591 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
7594 /* Ensure that MULT comes first if there is one. */
7595 else if (GET_CODE (op0
) == MULT
)
7596 op0
= gen_rtx_PLUS (mode
, op0
, XEXP (op1
, 0));
7598 op0
= gen_rtx_PLUS (mode
, XEXP (op1
, 0), op0
);
7600 /* Let's also eliminate constants from op0 if possible. */
7601 op0
= eliminate_constant_term (op0
, &constant_term
);
7603 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7604 their sum should be a constant. Form it into OP1, since the
7605 result we want will then be OP0 + OP1. */
7607 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
7612 op1
= gen_rtx_PLUS (mode
, constant_term
, XEXP (op1
, 1));
7615 /* Put a constant term last and put a multiplication first. */
7616 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
7617 temp
= op1
, op1
= op0
, op0
= temp
;
7619 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
7620 return temp
? temp
: gen_rtx_PLUS (mode
, op0
, op1
);
7623 /* For initializers, we are allowed to return a MINUS of two
7624 symbolic constants. Here we handle all cases when both operands
7626 /* Handle difference of two symbolic constants,
7627 for the sake of an initializer. */
7628 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7629 && really_constant_p (TREE_OPERAND (exp
, 0))
7630 && really_constant_p (TREE_OPERAND (exp
, 1)))
7632 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
,
7633 VOIDmode
, ro_modifier
);
7634 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7635 VOIDmode
, ro_modifier
);
7637 /* If the last operand is a CONST_INT, use plus_constant of
7638 the negated constant. Else make the MINUS. */
7639 if (GET_CODE (op1
) == CONST_INT
)
7640 return plus_constant (op0
, - INTVAL (op1
));
7642 return gen_rtx_MINUS (mode
, op0
, op1
);
7644 /* Convert A - const to A + (-const). */
7645 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7647 tree negated
= fold (build1 (NEGATE_EXPR
, type
,
7648 TREE_OPERAND (exp
, 1)));
7650 if (TREE_UNSIGNED (type
) || TREE_OVERFLOW (negated
))
7651 /* If we can't negate the constant in TYPE, leave it alone and
7652 expand_binop will negate it for us. We used to try to do it
7653 here in the signed version of TYPE, but that doesn't work
7654 on POINTER_TYPEs. */;
7657 exp
= build (PLUS_EXPR
, type
, TREE_OPERAND (exp
, 0), negated
);
7661 this_optab
= ! unsignedp
&& flag_trapv
7662 && (GET_MODE_CLASS(mode
) == MODE_INT
)
7663 ? subv_optab
: sub_optab
;
7667 /* If first operand is constant, swap them.
7668 Thus the following special case checks need only
7669 check the second operand. */
7670 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
7672 register tree t1
= TREE_OPERAND (exp
, 0);
7673 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
7674 TREE_OPERAND (exp
, 1) = t1
;
7677 /* Attempt to return something suitable for generating an
7678 indexed address, for machines that support that. */
7680 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
7681 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7682 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
7684 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7687 /* Apply distributive law if OP0 is x+c. */
7688 if (GET_CODE (op0
) == PLUS
7689 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
7694 (mode
, XEXP (op0
, 0),
7695 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))),
7696 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))
7697 * INTVAL (XEXP (op0
, 1))));
7699 if (GET_CODE (op0
) != REG
)
7700 op0
= force_operand (op0
, NULL_RTX
);
7701 if (GET_CODE (op0
) != REG
)
7702 op0
= copy_to_mode_reg (mode
, op0
);
7705 gen_rtx_MULT (mode
, op0
,
7706 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))));
7709 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7712 /* Check for multiplying things that have been extended
7713 from a narrower type. If this machine supports multiplying
7714 in that narrower type with a result in the desired type,
7715 do it that way, and avoid the explicit type-conversion. */
7716 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
7717 && TREE_CODE (type
) == INTEGER_TYPE
7718 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7719 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7720 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7721 && int_fits_type_p (TREE_OPERAND (exp
, 1),
7722 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7723 /* Don't use a widening multiply if a shift will do. */
7724 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
7725 > HOST_BITS_PER_WIDE_INT
)
7726 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
7728 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7729 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7731 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
7732 /* If both operands are extended, they must either both
7733 be zero-extended or both be sign-extended. */
7734 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7736 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
7738 enum machine_mode innermode
7739 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
7740 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7741 ? smul_widen_optab
: umul_widen_optab
);
7742 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7743 ? umul_widen_optab
: smul_widen_optab
);
7744 if (mode
== GET_MODE_WIDER_MODE (innermode
))
7746 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
7748 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7749 NULL_RTX
, VOIDmode
, 0);
7750 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7751 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7754 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7755 NULL_RTX
, VOIDmode
, 0);
7758 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
7759 && innermode
== word_mode
)
7762 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7763 NULL_RTX
, VOIDmode
, 0);
7764 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7765 op1
= convert_modes (innermode
, mode
,
7766 expand_expr (TREE_OPERAND (exp
, 1),
7767 NULL_RTX
, VOIDmode
, 0),
7770 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7771 NULL_RTX
, VOIDmode
, 0);
7772 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
7773 unsignedp
, OPTAB_LIB_WIDEN
);
7774 htem
= expand_mult_highpart_adjust (innermode
,
7775 gen_highpart (innermode
, temp
),
7777 gen_highpart (innermode
, temp
),
7779 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
7784 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7785 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7786 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
7788 case TRUNC_DIV_EXPR
:
7789 case FLOOR_DIV_EXPR
:
7791 case ROUND_DIV_EXPR
:
7792 case EXACT_DIV_EXPR
:
7793 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7795 /* Possible optimization: compute the dividend with EXPAND_SUM
7796 then if the divisor is constant can optimize the case
7797 where some terms of the dividend have coeffs divisible by it. */
7798 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7799 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7800 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
7803 this_optab
= flodiv_optab
;
7806 case TRUNC_MOD_EXPR
:
7807 case FLOOR_MOD_EXPR
:
7809 case ROUND_MOD_EXPR
:
7810 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7812 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7813 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7814 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
7816 case FIX_ROUND_EXPR
:
7817 case FIX_FLOOR_EXPR
:
7819 abort (); /* Not used for C. */
7821 case FIX_TRUNC_EXPR
:
7822 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7824 target
= gen_reg_rtx (mode
);
7825 expand_fix (target
, op0
, unsignedp
);
7829 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7831 target
= gen_reg_rtx (mode
);
7832 /* expand_float can't figure out what to do if FROM has VOIDmode.
7833 So give it the correct mode. With -O, cse will optimize this. */
7834 if (GET_MODE (op0
) == VOIDmode
)
7835 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7837 expand_float (target
, op0
,
7838 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7842 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7843 temp
= expand_unop (mode
,
7844 ! unsignedp
&& flag_trapv
7845 && (GET_MODE_CLASS(mode
) == MODE_INT
)
7846 ? negv_optab
: neg_optab
, op0
, target
, 0);
7852 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7854 /* Handle complex values specially. */
7855 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
7856 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
7857 return expand_complex_abs (mode
, op0
, target
, unsignedp
);
7859 /* Unsigned abs is simply the operand. Testing here means we don't
7860 risk generating incorrect code below. */
7861 if (TREE_UNSIGNED (type
))
7864 return expand_abs (mode
, op0
, target
, unsignedp
,
7865 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
7869 target
= original_target
;
7870 if (target
== 0 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1), 1)
7871 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
7872 || GET_MODE (target
) != mode
7873 || (GET_CODE (target
) == REG
7874 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
7875 target
= gen_reg_rtx (mode
);
7876 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7877 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
7879 /* First try to do it with a special MIN or MAX instruction.
7880 If that does not win, use a conditional jump to select the proper
7882 this_optab
= (TREE_UNSIGNED (type
)
7883 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
7884 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
7886 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
7891 /* At this point, a MEM target is no longer useful; we will get better
7894 if (GET_CODE (target
) == MEM
)
7895 target
= gen_reg_rtx (mode
);
7898 emit_move_insn (target
, op0
);
7900 op0
= gen_label_rtx ();
7902 /* If this mode is an integer too wide to compare properly,
7903 compare word by word. Rely on cse to optimize constant cases. */
7904 if (GET_MODE_CLASS (mode
) == MODE_INT
7905 && ! can_compare_p (GE
, mode
, ccp_jump
))
7907 if (code
== MAX_EXPR
)
7908 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
7909 target
, op1
, NULL_RTX
, op0
);
7911 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
7912 op1
, target
, NULL_RTX
, op0
);
7916 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)));
7917 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
7918 unsignedp
, mode
, NULL_RTX
, 0, NULL_RTX
,
7921 emit_move_insn (target
, op1
);
7926 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7927 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
7933 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7934 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
7939 /* ??? Can optimize bitwise operations with one arg constant.
7940 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7941 and (a bitwise1 b) bitwise2 b (etc)
7942 but that is probably not worth while. */
7944 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7945 boolean values when we want in all cases to compute both of them. In
7946 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7947 as actual zero-or-1 values and then bitwise anding. In cases where
7948 there cannot be any side effects, better code would be made by
7949 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7950 how to recognize those cases. */
7952 case TRUTH_AND_EXPR
:
7954 this_optab
= and_optab
;
7959 this_optab
= ior_optab
;
7962 case TRUTH_XOR_EXPR
:
7964 this_optab
= xor_optab
;
7971 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7973 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7974 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
7977 /* Could determine the answer when only additive constants differ. Also,
7978 the addition of one can be handled by changing the condition. */
7985 case UNORDERED_EXPR
:
7992 temp
= do_store_flag (exp
, target
, tmode
!= VOIDmode
? tmode
: mode
, 0);
7996 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7997 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
7999 && GET_CODE (original_target
) == REG
8000 && (GET_MODE (original_target
)
8001 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
8003 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
8006 if (temp
!= original_target
)
8007 temp
= copy_to_reg (temp
);
8009 op1
= gen_label_rtx ();
8010 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
8011 GET_MODE (temp
), unsignedp
, 0, op1
);
8012 emit_move_insn (temp
, const1_rtx
);
8017 /* If no set-flag instruction, must generate a conditional
8018 store into a temporary variable. Drop through
8019 and handle this like && and ||. */
8021 case TRUTH_ANDIF_EXPR
:
8022 case TRUTH_ORIF_EXPR
:
8024 && (target
== 0 || ! safe_from_p (target
, exp
, 1)
8025 /* Make sure we don't have a hard reg (such as function's return
8026 value) live across basic blocks, if not optimizing. */
8027 || (!optimize
&& GET_CODE (target
) == REG
8028 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8029 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8032 emit_clr_insn (target
);
8034 op1
= gen_label_rtx ();
8035 jumpifnot (exp
, op1
);
8038 emit_0_to_1_insn (target
);
8041 return ignore
? const0_rtx
: target
;
8043 case TRUTH_NOT_EXPR
:
8044 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8045 /* The parser is careful to generate TRUTH_NOT_EXPR
8046 only with operands that are always zero or one. */
8047 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8048 target
, 1, OPTAB_LIB_WIDEN
);
8054 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
8056 return expand_expr (TREE_OPERAND (exp
, 1),
8057 (ignore
? const0_rtx
: target
),
8061 /* If we would have a "singleton" (see below) were it not for a
8062 conversion in each arm, bring that conversion back out. */
8063 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8064 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
8065 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
8066 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
8068 tree iftrue
= TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
8069 tree iffalse
= TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
8071 if ((TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '2'
8072 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8073 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '2'
8074 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0))
8075 || (TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '1'
8076 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8077 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '1'
8078 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0)))
8079 return expand_expr (build1 (NOP_EXPR
, type
,
8080 build (COND_EXPR
, TREE_TYPE (iftrue
),
8081 TREE_OPERAND (exp
, 0),
8083 target
, tmode
, modifier
);
8087 /* Note that COND_EXPRs whose type is a structure or union
8088 are required to be constructed to contain assignments of
8089 a temporary variable, so that we can evaluate them here
8090 for side effect only. If type is void, we must do likewise. */
8092 /* If an arm of the branch requires a cleanup,
8093 only that cleanup is performed. */
8096 tree binary_op
= 0, unary_op
= 0;
8098 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8099 convert it to our mode, if necessary. */
8100 if (integer_onep (TREE_OPERAND (exp
, 1))
8101 && integer_zerop (TREE_OPERAND (exp
, 2))
8102 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8106 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
8111 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, ro_modifier
);
8112 if (GET_MODE (op0
) == mode
)
8116 target
= gen_reg_rtx (mode
);
8117 convert_move (target
, op0
, unsignedp
);
8121 /* Check for X ? A + B : A. If we have this, we can copy A to the
8122 output and conditionally add B. Similarly for unary operations.
8123 Don't do this if X has side-effects because those side effects
8124 might affect A or B and the "?" operation is a sequence point in
8125 ANSI. (operand_equal_p tests for side effects.) */
8127 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
8128 && operand_equal_p (TREE_OPERAND (exp
, 2),
8129 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8130 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
8131 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
8132 && operand_equal_p (TREE_OPERAND (exp
, 1),
8133 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8134 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
8135 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
8136 && operand_equal_p (TREE_OPERAND (exp
, 2),
8137 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8138 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
8139 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
8140 && operand_equal_p (TREE_OPERAND (exp
, 1),
8141 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8142 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
8144 /* If we are not to produce a result, we have no target. Otherwise,
8145 if a target was specified use it; it will not be used as an
8146 intermediate target unless it is safe. If no target, use a
8151 else if (original_target
8152 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
8153 || (singleton
&& GET_CODE (original_target
) == REG
8154 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
8155 && original_target
== var_rtx (singleton
)))
8156 && GET_MODE (original_target
) == mode
8157 #ifdef HAVE_conditional_move
8158 && (! can_conditionally_move_p (mode
)
8159 || GET_CODE (original_target
) == REG
8160 || TREE_ADDRESSABLE (type
))
8162 && ! (GET_CODE (original_target
) == MEM
8163 && MEM_VOLATILE_P (original_target
)))
8164 temp
= original_target
;
8165 else if (TREE_ADDRESSABLE (type
))
8168 temp
= assign_temp (type
, 0, 0, 1);
8170 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8171 do the test of X as a store-flag operation, do this as
8172 A + ((X != 0) << log C). Similarly for other simple binary
8173 operators. Only do for C == 1 if BRANCH_COST is low. */
8174 if (temp
&& singleton
&& binary_op
8175 && (TREE_CODE (binary_op
) == PLUS_EXPR
8176 || TREE_CODE (binary_op
) == MINUS_EXPR
8177 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
8178 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
8179 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
8180 : integer_onep (TREE_OPERAND (binary_op
, 1)))
8181 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8184 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
8185 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8186 ? addv_optab
: add_optab
)
8187 : TREE_CODE (binary_op
) == MINUS_EXPR
8188 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8189 ? subv_optab
: sub_optab
)
8190 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
8193 /* If we had X ? A : A + 1, do this as A + (X == 0).
8195 We have to invert the truth value here and then put it
8196 back later if do_store_flag fails. We cannot simply copy
8197 TREE_OPERAND (exp, 0) to another variable and modify that
8198 because invert_truthvalue can modify the tree pointed to
8200 if (singleton
== TREE_OPERAND (exp
, 1))
8201 TREE_OPERAND (exp
, 0)
8202 = invert_truthvalue (TREE_OPERAND (exp
, 0));
8204 result
= do_store_flag (TREE_OPERAND (exp
, 0),
8205 (safe_from_p (temp
, singleton
, 1)
8207 mode
, BRANCH_COST
<= 1);
8209 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
8210 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
8211 build_int_2 (tree_log2
8215 (safe_from_p (temp
, singleton
, 1)
8216 ? temp
: NULL_RTX
), 0);
8220 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
8221 return expand_binop (mode
, boptab
, op1
, result
, temp
,
8222 unsignedp
, OPTAB_LIB_WIDEN
);
8224 else if (singleton
== TREE_OPERAND (exp
, 1))
8225 TREE_OPERAND (exp
, 0)
8226 = invert_truthvalue (TREE_OPERAND (exp
, 0));
8229 do_pending_stack_adjust ();
8231 op0
= gen_label_rtx ();
8233 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
8237 /* If the target conflicts with the other operand of the
8238 binary op, we can't use it. Also, we can't use the target
8239 if it is a hard register, because evaluating the condition
8240 might clobber it. */
8242 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
8243 || (GET_CODE (temp
) == REG
8244 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
8245 temp
= gen_reg_rtx (mode
);
8246 store_expr (singleton
, temp
, 0);
8249 expand_expr (singleton
,
8250 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8251 if (singleton
== TREE_OPERAND (exp
, 1))
8252 jumpif (TREE_OPERAND (exp
, 0), op0
);
8254 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8256 start_cleanup_deferral ();
8257 if (binary_op
&& temp
== 0)
8258 /* Just touch the other operand. */
8259 expand_expr (TREE_OPERAND (binary_op
, 1),
8260 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8262 store_expr (build (TREE_CODE (binary_op
), type
,
8263 make_tree (type
, temp
),
8264 TREE_OPERAND (binary_op
, 1)),
8267 store_expr (build1 (TREE_CODE (unary_op
), type
,
8268 make_tree (type
, temp
)),
8272 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8273 comparison operator. If we have one of these cases, set the
8274 output to A, branch on A (cse will merge these two references),
8275 then set the output to FOO. */
8277 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8278 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8279 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8280 TREE_OPERAND (exp
, 1), 0)
8281 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8282 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
8283 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
8285 if (GET_CODE (temp
) == REG
8286 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8287 temp
= gen_reg_rtx (mode
);
8288 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8289 jumpif (TREE_OPERAND (exp
, 0), op0
);
8291 start_cleanup_deferral ();
8292 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8296 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8297 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8298 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8299 TREE_OPERAND (exp
, 2), 0)
8300 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8301 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
8302 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
8304 if (GET_CODE (temp
) == REG
8305 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8306 temp
= gen_reg_rtx (mode
);
8307 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8308 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8310 start_cleanup_deferral ();
8311 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8316 op1
= gen_label_rtx ();
8317 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8319 start_cleanup_deferral ();
8321 /* One branch of the cond can be void, if it never returns. For
8322 example A ? throw : E */
8324 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8325 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8327 expand_expr (TREE_OPERAND (exp
, 1),
8328 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8329 end_cleanup_deferral ();
8331 emit_jump_insn (gen_jump (op1
));
8334 start_cleanup_deferral ();
8336 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8337 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8339 expand_expr (TREE_OPERAND (exp
, 2),
8340 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8343 end_cleanup_deferral ();
8354 /* Something needs to be initialized, but we didn't know
8355 where that thing was when building the tree. For example,
8356 it could be the return value of a function, or a parameter
8357 to a function which lays down in the stack, or a temporary
8358 variable which must be passed by reference.
8360 We guarantee that the expression will either be constructed
8361 or copied into our original target. */
8363 tree slot
= TREE_OPERAND (exp
, 0);
8364 tree cleanups
= NULL_TREE
;
8367 if (TREE_CODE (slot
) != VAR_DECL
)
8371 target
= original_target
;
8373 /* Set this here so that if we get a target that refers to a
8374 register variable that's already been used, put_reg_into_stack
8375 knows that it should fix up those uses. */
8376 TREE_USED (slot
) = 1;
8380 if (DECL_RTL_SET_P (slot
))
8382 target
= DECL_RTL (slot
);
8383 /* If we have already expanded the slot, so don't do
8385 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8390 target
= assign_temp (type
, 2, 0, 1);
8391 /* All temp slots at this level must not conflict. */
8392 preserve_temp_slots (target
);
8393 SET_DECL_RTL (slot
, target
);
8394 if (TREE_ADDRESSABLE (slot
))
8395 put_var_into_stack (slot
);
8397 /* Since SLOT is not known to the called function
8398 to belong to its stack frame, we must build an explicit
8399 cleanup. This case occurs when we must build up a reference
8400 to pass the reference as an argument. In this case,
8401 it is very likely that such a reference need not be
8404 if (TREE_OPERAND (exp
, 2) == 0)
8405 TREE_OPERAND (exp
, 2) = maybe_build_cleanup (slot
);
8406 cleanups
= TREE_OPERAND (exp
, 2);
8411 /* This case does occur, when expanding a parameter which
8412 needs to be constructed on the stack. The target
8413 is the actual stack address that we want to initialize.
8414 The function we call will perform the cleanup in this case. */
8416 /* If we have already assigned it space, use that space,
8417 not target that we were passed in, as our target
8418 parameter is only a hint. */
8419 if (DECL_RTL_SET_P (slot
))
8421 target
= DECL_RTL (slot
);
8422 /* If we have already expanded the slot, so don't do
8424 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8429 SET_DECL_RTL (slot
, target
);
8430 /* If we must have an addressable slot, then make sure that
8431 the RTL that we just stored in slot is OK. */
8432 if (TREE_ADDRESSABLE (slot
))
8433 put_var_into_stack (slot
);
8437 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
8438 /* Mark it as expanded. */
8439 TREE_OPERAND (exp
, 1) = NULL_TREE
;
8441 store_expr (exp1
, target
, 0);
8443 expand_decl_cleanup (NULL_TREE
, cleanups
);
8450 tree lhs
= TREE_OPERAND (exp
, 0);
8451 tree rhs
= TREE_OPERAND (exp
, 1);
8452 tree noncopied_parts
= 0;
8453 tree lhs_type
= TREE_TYPE (lhs
);
8455 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8456 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0 && !fixed_type_p (rhs
))
8457 noncopied_parts
= init_noncopied_parts (stabilize_reference (lhs
),
8458 TYPE_NONCOPIED_PARTS (lhs_type
));
8459 while (noncopied_parts
!= 0)
8461 expand_assignment (TREE_VALUE (noncopied_parts
),
8462 TREE_PURPOSE (noncopied_parts
), 0, 0);
8463 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
8470 /* If lhs is complex, expand calls in rhs before computing it.
8471 That's so we don't compute a pointer and save it over a call.
8472 If lhs is simple, compute it first so we can give it as a
8473 target if the rhs is just a call. This avoids an extra temp and copy
8474 and that prevents a partial-subsumption which makes bad code.
8475 Actually we could treat component_ref's of vars like vars. */
8477 tree lhs
= TREE_OPERAND (exp
, 0);
8478 tree rhs
= TREE_OPERAND (exp
, 1);
8479 tree noncopied_parts
= 0;
8480 tree lhs_type
= TREE_TYPE (lhs
);
8484 /* Check for |= or &= of a bitfield of size one into another bitfield
8485 of size 1. In this case, (unless we need the result of the
8486 assignment) we can do this more efficiently with a
8487 test followed by an assignment, if necessary.
8489 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8490 things change so we do, this code should be enhanced to
8493 && TREE_CODE (lhs
) == COMPONENT_REF
8494 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
8495 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
8496 && TREE_OPERAND (rhs
, 0) == lhs
8497 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
8498 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
8499 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
8501 rtx label
= gen_label_rtx ();
8503 do_jump (TREE_OPERAND (rhs
, 1),
8504 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
8505 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
8506 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
8507 (TREE_CODE (rhs
) == BIT_IOR_EXPR
8509 : integer_zero_node
)),
8511 do_pending_stack_adjust ();
8516 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0
8517 && ! (fixed_type_p (lhs
) && fixed_type_p (rhs
)))
8518 noncopied_parts
= save_noncopied_parts (stabilize_reference (lhs
),
8519 TYPE_NONCOPIED_PARTS (lhs_type
));
8521 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8522 while (noncopied_parts
!= 0)
8524 expand_assignment (TREE_PURPOSE (noncopied_parts
),
8525 TREE_VALUE (noncopied_parts
), 0, 0);
8526 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
8532 if (!TREE_OPERAND (exp
, 0))
8533 expand_null_return ();
8535 expand_return (TREE_OPERAND (exp
, 0));
8538 case PREINCREMENT_EXPR
:
8539 case PREDECREMENT_EXPR
:
8540 return expand_increment (exp
, 0, ignore
);
8542 case POSTINCREMENT_EXPR
:
8543 case POSTDECREMENT_EXPR
:
8544 /* Faster to treat as pre-increment if result is not used. */
8545 return expand_increment (exp
, ! ignore
, ignore
);
8548 /* If nonzero, TEMP will be set to the address of something that might
8549 be a MEM corresponding to a stack slot. */
8552 /* Are we taking the address of a nested function? */
8553 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
8554 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
8555 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0))
8556 && ! TREE_STATIC (exp
))
8558 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
8559 op0
= force_operand (op0
, target
);
8561 /* If we are taking the address of something erroneous, just
8563 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
8567 /* We make sure to pass const0_rtx down if we came in with
8568 ignore set, to avoid doing the cleanups twice for something. */
8569 op0
= expand_expr (TREE_OPERAND (exp
, 0),
8570 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
8571 (modifier
== EXPAND_INITIALIZER
8572 ? modifier
: EXPAND_CONST_ADDRESS
));
8574 /* If we are going to ignore the result, OP0 will have been set
8575 to const0_rtx, so just return it. Don't get confused and
8576 think we are taking the address of the constant. */
8580 op0
= protect_from_queue (op0
, 0);
8582 /* We would like the object in memory. If it is a constant, we can
8583 have it be statically allocated into memory. For a non-constant,
8584 we need to allocate some memory and store the value into it. */
8586 if (CONSTANT_P (op0
))
8587 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8589 else if (GET_CODE (op0
) == MEM
)
8591 mark_temp_addr_taken (op0
);
8592 temp
= XEXP (op0
, 0);
8595 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
8596 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
8597 || GET_CODE (op0
) == PARALLEL
)
8599 /* If this object is in a register, it must be not
8601 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8602 tree nt
= build_qualified_type (inner_type
,
8603 (TYPE_QUALS (inner_type
)
8604 | TYPE_QUAL_CONST
));
8605 rtx memloc
= assign_temp (nt
, 1, 1, 1);
8607 mark_temp_addr_taken (memloc
);
8608 if (GET_CODE (op0
) == PARALLEL
)
8609 /* Handle calls that pass values in multiple non-contiguous
8610 locations. The Irix 6 ABI has examples of this. */
8611 emit_group_store (memloc
, op0
,
8612 int_size_in_bytes (inner_type
),
8613 TYPE_ALIGN (inner_type
));
8615 emit_move_insn (memloc
, op0
);
8619 if (GET_CODE (op0
) != MEM
)
8622 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8624 temp
= XEXP (op0
, 0);
8625 #ifdef POINTERS_EXTEND_UNSIGNED
8626 if (GET_MODE (temp
) == Pmode
&& GET_MODE (temp
) != mode
8627 && mode
== ptr_mode
)
8628 temp
= convert_memory_address (ptr_mode
, temp
);
8633 op0
= force_operand (XEXP (op0
, 0), target
);
8636 if (flag_force_addr
&& GET_CODE (op0
) != REG
)
8637 op0
= force_reg (Pmode
, op0
);
8639 if (GET_CODE (op0
) == REG
8640 && ! REG_USERVAR_P (op0
))
8641 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)));
8643 /* If we might have had a temp slot, add an equivalent address
8646 update_temp_slot_address (temp
, op0
);
8648 #ifdef POINTERS_EXTEND_UNSIGNED
8649 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
8650 && mode
== ptr_mode
)
8651 op0
= convert_memory_address (ptr_mode
, op0
);
8656 case ENTRY_VALUE_EXPR
:
8659 /* COMPLEX type for Extended Pascal & Fortran */
8662 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8665 /* Get the rtx code of the operands. */
8666 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8667 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
8670 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8674 /* Move the real (op0) and imaginary (op1) parts to their location. */
8675 emit_move_insn (gen_realpart (mode
, target
), op0
);
8676 emit_move_insn (gen_imagpart (mode
, target
), op1
);
8678 insns
= get_insns ();
8681 /* Complex construction should appear as a single unit. */
8682 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8683 each with a separate pseudo as destination.
8684 It's not correct for flow to treat them as a unit. */
8685 if (GET_CODE (target
) != CONCAT
)
8686 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
8694 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8695 return gen_realpart (mode
, op0
);
8698 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8699 return gen_imagpart (mode
, op0
);
8703 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8707 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8710 target
= gen_reg_rtx (mode
);
8714 /* Store the realpart and the negated imagpart to target. */
8715 emit_move_insn (gen_realpart (partmode
, target
),
8716 gen_realpart (partmode
, op0
));
8718 imag_t
= gen_imagpart (partmode
, target
);
8719 temp
= expand_unop (partmode
,
8720 ! unsignedp
&& flag_trapv
8721 && (GET_MODE_CLASS(partmode
) == MODE_INT
)
8722 ? negv_optab
: neg_optab
,
8723 gen_imagpart (partmode
, op0
), imag_t
, 0);
8725 emit_move_insn (imag_t
, temp
);
8727 insns
= get_insns ();
8730 /* Conjugate should appear as a single unit
8731 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8732 each with a separate pseudo as destination.
8733 It's not correct for flow to treat them as a unit. */
8734 if (GET_CODE (target
) != CONCAT
)
8735 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
8742 case TRY_CATCH_EXPR
:
8744 tree handler
= TREE_OPERAND (exp
, 1);
8746 expand_eh_region_start ();
8748 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8750 expand_eh_region_end_cleanup (handler
);
8755 case TRY_FINALLY_EXPR
:
8757 tree try_block
= TREE_OPERAND (exp
, 0);
8758 tree finally_block
= TREE_OPERAND (exp
, 1);
8759 rtx finally_label
= gen_label_rtx ();
8760 rtx done_label
= gen_label_rtx ();
8761 rtx return_link
= gen_reg_rtx (Pmode
);
8762 tree cleanup
= build (GOTO_SUBROUTINE_EXPR
, void_type_node
,
8763 (tree
) finally_label
, (tree
) return_link
);
8764 TREE_SIDE_EFFECTS (cleanup
) = 1;
8766 /* Start a new binding layer that will keep track of all cleanup
8767 actions to be performed. */
8768 expand_start_bindings (2);
8770 target_temp_slot_level
= temp_slot_level
;
8772 expand_decl_cleanup (NULL_TREE
, cleanup
);
8773 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
8775 preserve_temp_slots (op0
);
8776 expand_end_bindings (NULL_TREE
, 0, 0);
8777 emit_jump (done_label
);
8778 emit_label (finally_label
);
8779 expand_expr (finally_block
, const0_rtx
, VOIDmode
, 0);
8780 emit_indirect_jump (return_link
);
8781 emit_label (done_label
);
8785 case GOTO_SUBROUTINE_EXPR
:
8787 rtx subr
= (rtx
) TREE_OPERAND (exp
, 0);
8788 rtx return_link
= *(rtx
*) &TREE_OPERAND (exp
, 1);
8789 rtx return_address
= gen_label_rtx ();
8790 emit_move_insn (return_link
,
8791 gen_rtx_LABEL_REF (Pmode
, return_address
));
8793 emit_label (return_address
);
8798 return expand_builtin_va_arg (TREE_OPERAND (exp
, 0), type
);
8801 return get_exception_pointer ();
8804 return (*lang_expand_expr
) (exp
, original_target
, tmode
, modifier
);
8807 /* Here to do an ordinary binary operator, generating an instruction
8808 from the optab already placed in `this_optab'. */
8810 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8812 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8813 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8815 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
8816 unsignedp
, OPTAB_LIB_WIDEN
);
8822 /* Similar to expand_expr, except that we don't specify a target, target
8823 mode, or modifier and we return the alignment of the inner type. This is
8824 used in cases where it is not necessary to align the result to the
8825 alignment of its type as long as we know the alignment of the result, for
8826 example for comparisons of BLKmode values. */
8829 expand_expr_unaligned (exp
, palign
)
8831 unsigned int *palign
;
8834 tree type
= TREE_TYPE (exp
);
8835 register enum machine_mode mode
= TYPE_MODE (type
);
8837 /* Default the alignment we return to that of the type. */
8838 *palign
= TYPE_ALIGN (type
);
8840 /* The only cases in which we do anything special is if the resulting mode
8842 if (mode
!= BLKmode
)
8843 return expand_expr (exp
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
8845 switch (TREE_CODE (exp
))
8849 case NON_LVALUE_EXPR
:
8850 /* Conversions between BLKmode values don't change the underlying
8851 alignment or value. */
8852 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == BLKmode
)
8853 return expand_expr_unaligned (TREE_OPERAND (exp
, 0), palign
);
8857 /* Much of the code for this case is copied directly from expand_expr.
8858 We need to duplicate it here because we will do something different
8859 in the fall-through case, so we need to handle the same exceptions
8862 tree array
= TREE_OPERAND (exp
, 0);
8863 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
8864 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
8865 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
8868 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
8871 /* Optimize the special-case of a zero lower bound.
8873 We convert the low_bound to sizetype to avoid some problems
8874 with constant folding. (E.g. suppose the lower bound is 1,
8875 and its mode is QI. Without the conversion, (ARRAY
8876 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8877 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8879 if (! integer_zerop (low_bound
))
8880 index
= size_diffop (index
, convert (sizetype
, low_bound
));
8882 /* If this is a constant index into a constant array,
8883 just get the value from the array. Handle both the cases when
8884 we have an explicit constructor and when our operand is a variable
8885 that was declared const. */
8887 if (TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
)
8888 && host_integerp (index
, 0)
8889 && 0 > compare_tree_int (index
,
8890 list_length (CONSTRUCTOR_ELTS
8891 (TREE_OPERAND (exp
, 0)))))
8895 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
8896 i
= tree_low_cst (index
, 0);
8897 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
8901 return expand_expr_unaligned (fold (TREE_VALUE (elem
)), palign
);
8904 else if (optimize
>= 1
8905 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
8906 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
8907 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
8909 if (TREE_CODE (index
) == INTEGER_CST
)
8911 tree init
= DECL_INITIAL (array
);
8913 if (TREE_CODE (init
) == CONSTRUCTOR
)
8917 for (elem
= CONSTRUCTOR_ELTS (init
);
8918 ! tree_int_cst_equal (TREE_PURPOSE (elem
), index
);
8919 elem
= TREE_CHAIN (elem
))
8923 return expand_expr_unaligned (fold (TREE_VALUE (elem
)),
8933 /* If the operand is a CONSTRUCTOR, we can just extract the
8934 appropriate field if it is present. Don't do this if we have
8935 already written the data since we want to refer to that copy
8936 and varasm.c assumes that's what we'll do. */
8937 if (TREE_CODE (exp
) != ARRAY_REF
8938 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
8939 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
8943 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
8944 elt
= TREE_CHAIN (elt
))
8945 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1))
8946 /* Note that unlike the case in expand_expr, we know this is
8947 BLKmode and hence not an integer. */
8948 return expand_expr_unaligned (TREE_VALUE (elt
), palign
);
8952 enum machine_mode mode1
;
8953 HOST_WIDE_INT bitsize
, bitpos
;
8956 unsigned int alignment
;
8958 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
8959 &mode1
, &unsignedp
, &volatilep
,
8962 /* If we got back the original object, something is wrong. Perhaps
8963 we are evaluating an expression too early. In any event, don't
8964 infinitely recurse. */
8968 op0
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
8970 /* If this is a constant, put it into a register if it is a
8971 legitimate constant and OFFSET is 0 and memory if it isn't. */
8972 if (CONSTANT_P (op0
))
8974 enum machine_mode inner_mode
= TYPE_MODE (TREE_TYPE (tem
));
8976 if (inner_mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
8978 op0
= force_reg (inner_mode
, op0
);
8980 op0
= validize_mem (force_const_mem (inner_mode
, op0
));
8985 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
8987 /* If this object is in a register, put it into memory.
8988 This case can't occur in C, but can in Ada if we have
8989 unchecked conversion of an expression from a scalar type to
8990 an array or record type. */
8991 if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
8992 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
8994 tree nt
= build_qualified_type (TREE_TYPE (tem
),
8995 (TYPE_QUALS (TREE_TYPE (tem
))
8996 | TYPE_QUAL_CONST
));
8997 rtx memloc
= assign_temp (nt
, 1, 1, 1);
8999 mark_temp_addr_taken (memloc
);
9000 emit_move_insn (memloc
, op0
);
9004 if (GET_CODE (op0
) != MEM
)
9007 if (GET_MODE (offset_rtx
) != ptr_mode
)
9009 #ifdef POINTERS_EXTEND_UNSIGNED
9010 offset_rtx
= convert_memory_address (ptr_mode
, offset_rtx
);
9012 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
9016 op0
= change_address (op0
, VOIDmode
,
9017 gen_rtx_PLUS (ptr_mode
, XEXP (op0
, 0),
9018 force_reg (ptr_mode
,
9022 /* Don't forget about volatility even if this is a bitfield. */
9023 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
9025 op0
= copy_rtx (op0
);
9026 MEM_VOLATILE_P (op0
) = 1;
9029 /* Check the access. */
9030 if (current_function_check_memory_usage
&& GET_CODE (op0
) == MEM
)
9035 to
= plus_constant (XEXP (op0
, 0), (bitpos
/ BITS_PER_UNIT
));
9036 size
= (bitpos
% BITS_PER_UNIT
) + bitsize
+ BITS_PER_UNIT
- 1;
9038 /* Check the access right of the pointer. */
9039 in_check_memory_usage
= 1;
9040 if (size
> BITS_PER_UNIT
)
9041 emit_library_call (chkr_check_addr_libfunc
,
9042 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3,
9043 to
, ptr_mode
, GEN_INT (size
/ BITS_PER_UNIT
),
9044 TYPE_MODE (sizetype
),
9045 GEN_INT (MEMORY_USE_RO
),
9046 TYPE_MODE (integer_type_node
));
9047 in_check_memory_usage
= 0;
9050 /* In cases where an aligned union has an unaligned object
9051 as a field, we might be extracting a BLKmode value from
9052 an integer-mode (e.g., SImode) object. Handle this case
9053 by doing the extract into an object as wide as the field
9054 (which we know to be the width of a basic mode), then
9055 storing into memory, and changing the mode to BLKmode.
9056 If we ultimately want the address (EXPAND_CONST_ADDRESS or
9057 EXPAND_INITIALIZER), then we must not copy to a temporary. */
9058 if (mode1
== VOIDmode
9059 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
9060 || (SLOW_UNALIGNED_ACCESS (mode1
, alignment
)
9061 && (TYPE_ALIGN (type
) > alignment
9062 || bitpos
% TYPE_ALIGN (type
) != 0)))
9064 enum machine_mode ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
9066 if (ext_mode
== BLKmode
)
9068 /* In this case, BITPOS must start at a byte boundary. */
9069 if (GET_CODE (op0
) != MEM
9070 || bitpos
% BITS_PER_UNIT
!= 0)
9073 op0
= change_address (op0
, VOIDmode
,
9074 plus_constant (XEXP (op0
, 0),
9075 bitpos
/ BITS_PER_UNIT
));
9079 tree nt
= build_qualified_type (type_for_mode (ext_mode
, 0),
9081 rtx
new = assign_temp (nt
, 0, 1, 1);
9083 op0
= extract_bit_field (validize_mem (op0
), bitsize
, bitpos
,
9084 unsignedp
, NULL_RTX
, ext_mode
,
9085 ext_mode
, alignment
,
9086 int_size_in_bytes (TREE_TYPE (tem
)));
9088 /* If the result is a record type and BITSIZE is narrower than
9089 the mode of OP0, an integral mode, and this is a big endian
9090 machine, we must put the field into the high-order bits. */
9091 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
9092 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
9093 && bitsize
< GET_MODE_BITSIZE (GET_MODE (op0
)))
9094 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
9095 size_int (GET_MODE_BITSIZE
9100 emit_move_insn (new, op0
);
9101 op0
= copy_rtx (new);
9102 PUT_MODE (op0
, BLKmode
);
9106 /* Get a reference to just this component. */
9107 op0
= change_address (op0
, mode1
,
9108 plus_constant (XEXP (op0
, 0),
9109 (bitpos
/ BITS_PER_UNIT
)));
9111 MEM_ALIAS_SET (op0
) = get_alias_set (exp
);
9113 /* Adjust the alignment in case the bit position is not
9114 a multiple of the alignment of the inner object. */
9115 while (bitpos
% alignment
!= 0)
9118 if (GET_CODE (XEXP (op0
, 0)) == REG
)
9119 mark_reg_pointer (XEXP (op0
, 0), alignment
);
9121 MEM_IN_STRUCT_P (op0
) = 1;
9122 MEM_VOLATILE_P (op0
) |= volatilep
;
9124 *palign
= alignment
;
9133 return expand_expr (exp
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
9136 /* Return the tree node if a ARG corresponds to a string constant or zero
9137 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9138 in bytes within the string that ARG is accessing. The type of the
9139 offset will be `sizetype'. */
9142 string_constant (arg
, ptr_offset
)
9148 if (TREE_CODE (arg
) == ADDR_EXPR
9149 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
9151 *ptr_offset
= size_zero_node
;
9152 return TREE_OPERAND (arg
, 0);
9154 else if (TREE_CODE (arg
) == PLUS_EXPR
)
9156 tree arg0
= TREE_OPERAND (arg
, 0);
9157 tree arg1
= TREE_OPERAND (arg
, 1);
9162 if (TREE_CODE (arg0
) == ADDR_EXPR
9163 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
9165 *ptr_offset
= convert (sizetype
, arg1
);
9166 return TREE_OPERAND (arg0
, 0);
9168 else if (TREE_CODE (arg1
) == ADDR_EXPR
9169 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
9171 *ptr_offset
= convert (sizetype
, arg0
);
9172 return TREE_OPERAND (arg1
, 0);
9179 /* Expand code for a post- or pre- increment or decrement
9180 and return the RTX for the result.
9181 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9184 expand_increment (exp
, post
, ignore
)
9188 register rtx op0
, op1
;
9189 register rtx temp
, value
;
9190 register tree incremented
= TREE_OPERAND (exp
, 0);
9191 optab this_optab
= add_optab
;
9193 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
9194 int op0_is_copy
= 0;
9195 int single_insn
= 0;
9196 /* 1 means we can't store into OP0 directly,
9197 because it is a subreg narrower than a word,
9198 and we don't dare clobber the rest of the word. */
9201 /* Stabilize any component ref that might need to be
9202 evaluated more than once below. */
9204 || TREE_CODE (incremented
) == BIT_FIELD_REF
9205 || (TREE_CODE (incremented
) == COMPONENT_REF
9206 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
9207 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
9208 incremented
= stabilize_reference (incremented
);
9209 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9210 ones into save exprs so that they don't accidentally get evaluated
9211 more than once by the code below. */
9212 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
9213 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
9214 incremented
= save_expr (incremented
);
9216 /* Compute the operands as RTX.
9217 Note whether OP0 is the actual lvalue or a copy of it:
9218 I believe it is a copy iff it is a register or subreg
9219 and insns were generated in computing it. */
9221 temp
= get_last_insn ();
9222 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_RW
);
9224 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9225 in place but instead must do sign- or zero-extension during assignment,
9226 so we copy it into a new register and let the code below use it as
9229 Note that we can safely modify this SUBREG since it is know not to be
9230 shared (it was made by the expand_expr call above). */
9232 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9235 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9239 else if (GET_CODE (op0
) == SUBREG
9240 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9242 /* We cannot increment this SUBREG in place. If we are
9243 post-incrementing, get a copy of the old value. Otherwise,
9244 just mark that we cannot increment in place. */
9246 op0
= copy_to_reg (op0
);
9251 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
9252 && temp
!= get_last_insn ());
9253 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
,
9254 EXPAND_MEMORY_USE_BAD
);
9256 /* Decide whether incrementing or decrementing. */
9257 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9258 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9259 this_optab
= sub_optab
;
9261 /* Convert decrement by a constant into a negative increment. */
9262 if (this_optab
== sub_optab
9263 && GET_CODE (op1
) == CONST_INT
)
9265 op1
= GEN_INT (-INTVAL (op1
));
9266 this_optab
= add_optab
;
9269 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp
)))
9270 this_optab
= this_optab
== add_optab
? addv_optab
: subv_optab
;
9272 /* For a preincrement, see if we can do this with a single instruction. */
9275 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9276 if (icode
!= (int) CODE_FOR_nothing
9277 /* Make sure that OP0 is valid for operands 0 and 1
9278 of the insn we want to queue. */
9279 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9280 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
)
9281 && (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9285 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9286 then we cannot just increment OP0. We must therefore contrive to
9287 increment the original value. Then, for postincrement, we can return
9288 OP0 since it is a copy of the old value. For preincrement, expand here
9289 unless we can do it with a single insn.
9291 Likewise if storing directly into OP0 would clobber high bits
9292 we need to preserve (bad_subreg). */
9293 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9295 /* This is the easiest way to increment the value wherever it is.
9296 Problems with multiple evaluation of INCREMENTED are prevented
9297 because either (1) it is a component_ref or preincrement,
9298 in which case it was stabilized above, or (2) it is an array_ref
9299 with constant index in an array in a register, which is
9300 safe to reevaluate. */
9301 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9302 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9303 ? MINUS_EXPR
: PLUS_EXPR
),
9306 TREE_OPERAND (exp
, 1));
9308 while (TREE_CODE (incremented
) == NOP_EXPR
9309 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9311 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9312 incremented
= TREE_OPERAND (incremented
, 0);
9315 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
, 0);
9316 return post
? op0
: temp
;
9321 /* We have a true reference to the value in OP0.
9322 If there is an insn to add or subtract in this mode, queue it.
9323 Queueing the increment insn avoids the register shuffling
9324 that often results if we must increment now and first save
9325 the old value for subsequent use. */
9327 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9328 op0
= stabilize (op0
);
9331 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9332 if (icode
!= (int) CODE_FOR_nothing
9333 /* Make sure that OP0 is valid for operands 0 and 1
9334 of the insn we want to queue. */
9335 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9336 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
))
9338 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9339 op1
= force_reg (mode
, op1
);
9341 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9343 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9345 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
9346 ? force_reg (Pmode
, XEXP (op0
, 0))
9347 : copy_to_reg (XEXP (op0
, 0)));
9350 op0
= change_address (op0
, VOIDmode
, addr
);
9351 temp
= force_reg (GET_MODE (op0
), op0
);
9352 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9353 op1
= force_reg (mode
, op1
);
9355 /* The increment queue is LIFO, thus we have to `queue'
9356 the instructions in reverse order. */
9357 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9358 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9363 /* Preincrement, or we can't increment with one simple insn. */
9365 /* Save a copy of the value before inc or dec, to return it later. */
9366 temp
= value
= copy_to_reg (op0
);
9368 /* Arrange to return the incremented value. */
9369 /* Copy the rtx because expand_binop will protect from the queue,
9370 and the results of that would be invalid for us to return
9371 if our caller does emit_queue before using our result. */
9372 temp
= copy_rtx (value
= op0
);
9374 /* Increment however we can. */
9375 op1
= expand_binop (mode
, this_optab
, value
, op1
,
9376 current_function_check_memory_usage
? NULL_RTX
: op0
,
9377 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9378 /* Make sure the value is stored into OP0. */
9380 emit_move_insn (op0
, op1
);
9385 /* At the start of a function, record that we have no previously-pushed
9386 arguments waiting to be popped. */
9389 init_pending_stack_adjust ()
9391 pending_stack_adjust
= 0;
9394 /* When exiting from function, if safe, clear out any pending stack adjust
9395 so the adjustment won't get done.
9397 Note, if the current function calls alloca, then it must have a
9398 frame pointer regardless of the value of flag_omit_frame_pointer. */
9401 clear_pending_stack_adjust ()
9403 #ifdef EXIT_IGNORE_STACK
9405 && (! flag_omit_frame_pointer
|| current_function_calls_alloca
)
9406 && EXIT_IGNORE_STACK
9407 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
9408 && ! flag_inline_functions
)
9410 stack_pointer_delta
-= pending_stack_adjust
,
9411 pending_stack_adjust
= 0;
9416 /* Pop any previously-pushed arguments that have not been popped yet. */
9419 do_pending_stack_adjust ()
9421 if (inhibit_defer_pop
== 0)
9423 if (pending_stack_adjust
!= 0)
9424 adjust_stack (GEN_INT (pending_stack_adjust
));
9425 pending_stack_adjust
= 0;
9429 /* Expand conditional expressions. */
9431 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9432 LABEL is an rtx of code CODE_LABEL, in this function and all the
9436 jumpifnot (exp
, label
)
9440 do_jump (exp
, label
, NULL_RTX
);
9443 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9450 do_jump (exp
, NULL_RTX
, label
);
9453 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9454 the result is zero, or IF_TRUE_LABEL if the result is one.
9455 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9456 meaning fall through in that case.
9458 do_jump always does any pending stack adjust except when it does not
9459 actually perform a jump. An example where there is no jump
9460 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9462 This function is responsible for optimizing cases such as
9463 &&, || and comparison operators in EXP. */
9466 do_jump (exp
, if_false_label
, if_true_label
)
9468 rtx if_false_label
, if_true_label
;
9470 register enum tree_code code
= TREE_CODE (exp
);
9471 /* Some cases need to create a label to jump to
9472 in order to properly fall through.
9473 These cases set DROP_THROUGH_LABEL nonzero. */
9474 rtx drop_through_label
= 0;
9478 enum machine_mode mode
;
9480 #ifdef MAX_INTEGER_COMPUTATION_MODE
9481 check_max_integer_computation_mode (exp
);
9492 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
9498 /* This is not true with #pragma weak */
9500 /* The address of something can never be zero. */
9502 emit_jump (if_true_label
);
9507 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
9508 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
9509 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
)
9512 /* If we are narrowing the operand, we have to do the compare in the
9514 if ((TYPE_PRECISION (TREE_TYPE (exp
))
9515 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9517 case NON_LVALUE_EXPR
:
9518 case REFERENCE_EXPR
:
9523 /* These cannot change zero->non-zero or vice versa. */
9524 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9527 case WITH_RECORD_EXPR
:
9528 /* Put the object on the placeholder list, recurse through our first
9529 operand, and pop the list. */
9530 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
9532 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9533 placeholder_list
= TREE_CHAIN (placeholder_list
);
9537 /* This is never less insns than evaluating the PLUS_EXPR followed by
9538 a test and can be longer if the test is eliminated. */
9540 /* Reduce to minus. */
9541 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
9542 TREE_OPERAND (exp
, 0),
9543 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
9544 TREE_OPERAND (exp
, 1))));
9545 /* Process as MINUS. */
9549 /* Non-zero iff operands of minus differ. */
9550 do_compare_and_jump (build (NE_EXPR
, TREE_TYPE (exp
),
9551 TREE_OPERAND (exp
, 0),
9552 TREE_OPERAND (exp
, 1)),
9553 NE
, NE
, if_false_label
, if_true_label
);
9557 /* If we are AND'ing with a small constant, do this comparison in the
9558 smallest type that fits. If the machine doesn't have comparisons
9559 that small, it will be converted back to the wider comparison.
9560 This helps if we are testing the sign bit of a narrower object.
9561 combine can't do this for us because it can't know whether a
9562 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9564 if (! SLOW_BYTE_ACCESS
9565 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
9566 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
9567 && (i
= tree_floor_log2 (TREE_OPERAND (exp
, 1))) >= 0
9568 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
9569 && (type
= type_for_mode (mode
, 1)) != 0
9570 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9571 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9572 != CODE_FOR_nothing
))
9574 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9579 case TRUTH_NOT_EXPR
:
9580 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9583 case TRUTH_ANDIF_EXPR
:
9584 if (if_false_label
== 0)
9585 if_false_label
= drop_through_label
= gen_label_rtx ();
9586 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
9587 start_cleanup_deferral ();
9588 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9589 end_cleanup_deferral ();
9592 case TRUTH_ORIF_EXPR
:
9593 if (if_true_label
== 0)
9594 if_true_label
= drop_through_label
= gen_label_rtx ();
9595 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
9596 start_cleanup_deferral ();
9597 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9598 end_cleanup_deferral ();
9603 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
9604 preserve_temp_slots (NULL_RTX
);
9608 do_pending_stack_adjust ();
9609 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9616 HOST_WIDE_INT bitsize
, bitpos
;
9618 enum machine_mode mode
;
9622 unsigned int alignment
;
9624 /* Get description of this reference. We don't actually care
9625 about the underlying object here. */
9626 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
9627 &unsignedp
, &volatilep
, &alignment
);
9629 type
= type_for_size (bitsize
, unsignedp
);
9630 if (! SLOW_BYTE_ACCESS
9631 && type
!= 0 && bitsize
>= 0
9632 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9633 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9634 != CODE_FOR_nothing
))
9636 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9643 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9644 if (integer_onep (TREE_OPERAND (exp
, 1))
9645 && integer_zerop (TREE_OPERAND (exp
, 2)))
9646 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9648 else if (integer_zerop (TREE_OPERAND (exp
, 1))
9649 && integer_onep (TREE_OPERAND (exp
, 2)))
9650 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9654 register rtx label1
= gen_label_rtx ();
9655 drop_through_label
= gen_label_rtx ();
9657 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
9659 start_cleanup_deferral ();
9660 /* Now the THEN-expression. */
9661 do_jump (TREE_OPERAND (exp
, 1),
9662 if_false_label
? if_false_label
: drop_through_label
,
9663 if_true_label
? if_true_label
: drop_through_label
);
9664 /* In case the do_jump just above never jumps. */
9665 do_pending_stack_adjust ();
9666 emit_label (label1
);
9668 /* Now the ELSE-expression. */
9669 do_jump (TREE_OPERAND (exp
, 2),
9670 if_false_label
? if_false_label
: drop_through_label
,
9671 if_true_label
? if_true_label
: drop_through_label
);
9672 end_cleanup_deferral ();
9678 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9680 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9681 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9683 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
9684 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
9687 (build (TRUTH_ANDIF_EXPR
, TREE_TYPE (exp
),
9688 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9689 fold (build1 (REALPART_EXPR
,
9690 TREE_TYPE (inner_type
),
9692 fold (build1 (REALPART_EXPR
,
9693 TREE_TYPE (inner_type
),
9695 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9696 fold (build1 (IMAGPART_EXPR
,
9697 TREE_TYPE (inner_type
),
9699 fold (build1 (IMAGPART_EXPR
,
9700 TREE_TYPE (inner_type
),
9702 if_false_label
, if_true_label
);
9705 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
9706 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9708 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9709 && !can_compare_p (EQ
, TYPE_MODE (inner_type
), ccp_jump
))
9710 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
9712 do_compare_and_jump (exp
, EQ
, EQ
, if_false_label
, if_true_label
);
9718 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9720 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9721 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9723 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
9724 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
9727 (build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
),
9728 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9729 fold (build1 (REALPART_EXPR
,
9730 TREE_TYPE (inner_type
),
9732 fold (build1 (REALPART_EXPR
,
9733 TREE_TYPE (inner_type
),
9735 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9736 fold (build1 (IMAGPART_EXPR
,
9737 TREE_TYPE (inner_type
),
9739 fold (build1 (IMAGPART_EXPR
,
9740 TREE_TYPE (inner_type
),
9742 if_false_label
, if_true_label
);
9745 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
9746 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9748 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9749 && !can_compare_p (NE
, TYPE_MODE (inner_type
), ccp_jump
))
9750 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
9752 do_compare_and_jump (exp
, NE
, NE
, if_false_label
, if_true_label
);
9757 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9758 if (GET_MODE_CLASS (mode
) == MODE_INT
9759 && ! can_compare_p (LT
, mode
, ccp_jump
))
9760 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
9762 do_compare_and_jump (exp
, LT
, LTU
, if_false_label
, if_true_label
);
9766 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9767 if (GET_MODE_CLASS (mode
) == MODE_INT
9768 && ! can_compare_p (LE
, mode
, ccp_jump
))
9769 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
9771 do_compare_and_jump (exp
, LE
, LEU
, if_false_label
, if_true_label
);
9775 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9776 if (GET_MODE_CLASS (mode
) == MODE_INT
9777 && ! can_compare_p (GT
, mode
, ccp_jump
))
9778 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
9780 do_compare_and_jump (exp
, GT
, GTU
, if_false_label
, if_true_label
);
9784 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9785 if (GET_MODE_CLASS (mode
) == MODE_INT
9786 && ! can_compare_p (GE
, mode
, ccp_jump
))
9787 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
9789 do_compare_and_jump (exp
, GE
, GEU
, if_false_label
, if_true_label
);
9792 case UNORDERED_EXPR
:
9795 enum rtx_code cmp
, rcmp
;
9798 if (code
== UNORDERED_EXPR
)
9799 cmp
= UNORDERED
, rcmp
= ORDERED
;
9801 cmp
= ORDERED
, rcmp
= UNORDERED
;
9802 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9805 if (! can_compare_p (cmp
, mode
, ccp_jump
)
9806 && (can_compare_p (rcmp
, mode
, ccp_jump
)
9807 /* If the target doesn't provide either UNORDERED or ORDERED
9808 comparisons, canonicalize on UNORDERED for the library. */
9809 || rcmp
== UNORDERED
))
9813 do_compare_and_jump (exp
, cmp
, cmp
, if_false_label
, if_true_label
);
9815 do_compare_and_jump (exp
, rcmp
, rcmp
, if_true_label
, if_false_label
);
9820 enum rtx_code rcode1
;
9821 enum tree_code tcode2
;
9845 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9846 if (can_compare_p (rcode1
, mode
, ccp_jump
))
9847 do_compare_and_jump (exp
, rcode1
, rcode1
, if_false_label
,
9851 tree op0
= save_expr (TREE_OPERAND (exp
, 0));
9852 tree op1
= save_expr (TREE_OPERAND (exp
, 1));
9855 /* If the target doesn't support combined unordered
9856 compares, decompose into UNORDERED + comparison. */
9857 cmp0
= fold (build (UNORDERED_EXPR
, TREE_TYPE (exp
), op0
, op1
));
9858 cmp1
= fold (build (tcode2
, TREE_TYPE (exp
), op0
, op1
));
9859 exp
= build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
), cmp0
, cmp1
);
9860 do_jump (exp
, if_false_label
, if_true_label
);
9867 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
9869 /* This is not needed any more and causes poor code since it causes
9870 comparisons and tests from non-SI objects to have different code
9872 /* Copy to register to avoid generating bad insns by cse
9873 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9874 if (!cse_not_expected
&& GET_CODE (temp
) == MEM
)
9875 temp
= copy_to_reg (temp
);
9877 do_pending_stack_adjust ();
9878 /* Do any postincrements in the expression that was tested. */
9881 if (GET_CODE (temp
) == CONST_INT
9882 || (GET_CODE (temp
) == CONST_DOUBLE
&& GET_MODE (temp
) == VOIDmode
)
9883 || GET_CODE (temp
) == LABEL_REF
)
9885 rtx target
= temp
== const0_rtx
? if_false_label
: if_true_label
;
9889 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
9890 && ! can_compare_p (NE
, GET_MODE (temp
), ccp_jump
))
9891 /* Note swapping the labels gives us not-equal. */
9892 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
9893 else if (GET_MODE (temp
) != VOIDmode
)
9894 do_compare_rtx_and_jump (temp
, CONST0_RTX (GET_MODE (temp
)),
9895 NE
, TREE_UNSIGNED (TREE_TYPE (exp
)),
9896 GET_MODE (temp
), NULL_RTX
, 0,
9897 if_false_label
, if_true_label
);
9902 if (drop_through_label
)
9904 /* If do_jump produces code that might be jumped around,
9905 do any stack adjusts from that code, before the place
9906 where control merges in. */
9907 do_pending_stack_adjust ();
9908 emit_label (drop_through_label
);
9912 /* Given a comparison expression EXP for values too wide to be compared
9913 with one insn, test the comparison and jump to the appropriate label.
9914 The code of EXP is ignored; we always test GT if SWAP is 0,
9915 and LT if SWAP is 1. */
9918 do_jump_by_parts_greater (exp
, swap
, if_false_label
, if_true_label
)
9921 rtx if_false_label
, if_true_label
;
9923 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
9924 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
9925 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9926 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9928 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
);
9931 /* Compare OP0 with OP1, word at a time, in mode MODE.
9932 UNSIGNEDP says to do unsigned comparison.
9933 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9936 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
)
9937 enum machine_mode mode
;
9940 rtx if_false_label
, if_true_label
;
9942 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
9943 rtx drop_through_label
= 0;
9946 if (! if_true_label
|| ! if_false_label
)
9947 drop_through_label
= gen_label_rtx ();
9948 if (! if_true_label
)
9949 if_true_label
= drop_through_label
;
9950 if (! if_false_label
)
9951 if_false_label
= drop_through_label
;
9953 /* Compare a word at a time, high order first. */
9954 for (i
= 0; i
< nwords
; i
++)
9956 rtx op0_word
, op1_word
;
9958 if (WORDS_BIG_ENDIAN
)
9960 op0_word
= operand_subword_force (op0
, i
, mode
);
9961 op1_word
= operand_subword_force (op1
, i
, mode
);
9965 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
9966 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
9969 /* All but high-order word must be compared as unsigned. */
9970 do_compare_rtx_and_jump (op0_word
, op1_word
, GT
,
9971 (unsignedp
|| i
> 0), word_mode
, NULL_RTX
, 0,
9972 NULL_RTX
, if_true_label
);
9974 /* Consider lower words only if these are equal. */
9975 do_compare_rtx_and_jump (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
9976 NULL_RTX
, 0, NULL_RTX
, if_false_label
);
9980 emit_jump (if_false_label
);
9981 if (drop_through_label
)
9982 emit_label (drop_through_label
);
9985 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9986 with one insn, test the comparison and jump to the appropriate label. */
9989 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
)
9991 rtx if_false_label
, if_true_label
;
9993 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
9994 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9995 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9996 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
9998 rtx drop_through_label
= 0;
10000 if (! if_false_label
)
10001 drop_through_label
= if_false_label
= gen_label_rtx ();
10003 for (i
= 0; i
< nwords
; i
++)
10004 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, mode
),
10005 operand_subword_force (op1
, i
, mode
),
10006 EQ
, TREE_UNSIGNED (TREE_TYPE (exp
)),
10007 word_mode
, NULL_RTX
, 0, if_false_label
,
10011 emit_jump (if_true_label
);
10012 if (drop_through_label
)
10013 emit_label (drop_through_label
);
10016 /* Jump according to whether OP0 is 0.
10017 We assume that OP0 has an integer mode that is too wide
10018 for the available compare insns. */
10021 do_jump_by_parts_equality_rtx (op0
, if_false_label
, if_true_label
)
10023 rtx if_false_label
, if_true_label
;
10025 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
10028 rtx drop_through_label
= 0;
10030 /* The fastest way of doing this comparison on almost any machine is to
10031 "or" all the words and compare the result. If all have to be loaded
10032 from memory and this is a very wide item, it's possible this may
10033 be slower, but that's highly unlikely. */
10035 part
= gen_reg_rtx (word_mode
);
10036 emit_move_insn (part
, operand_subword_force (op0
, 0, GET_MODE (op0
)));
10037 for (i
= 1; i
< nwords
&& part
!= 0; i
++)
10038 part
= expand_binop (word_mode
, ior_optab
, part
,
10039 operand_subword_force (op0
, i
, GET_MODE (op0
)),
10040 part
, 1, OPTAB_WIDEN
);
10044 do_compare_rtx_and_jump (part
, const0_rtx
, EQ
, 1, word_mode
,
10045 NULL_RTX
, 0, if_false_label
, if_true_label
);
10050 /* If we couldn't do the "or" simply, do this with a series of compares. */
10051 if (! if_false_label
)
10052 drop_through_label
= if_false_label
= gen_label_rtx ();
10054 for (i
= 0; i
< nwords
; i
++)
10055 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, GET_MODE (op0
)),
10056 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
, 0,
10057 if_false_label
, NULL_RTX
);
10060 emit_jump (if_true_label
);
10062 if (drop_through_label
)
10063 emit_label (drop_through_label
);
10066 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10067 (including code to compute the values to be compared)
10068 and set (CC0) according to the result.
10069 The decision as to signed or unsigned comparison must be made by the caller.
10071 We force a stack adjustment unless there are currently
10072 things pushed on the stack that aren't yet used.
10074 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10077 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10078 size of MODE should be used. */
10081 compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
, size
, align
)
10082 register rtx op0
, op1
;
10083 enum rtx_code code
;
10085 enum machine_mode mode
;
10087 unsigned int align
;
10091 /* If one operand is constant, make it the second one. Only do this
10092 if the other operand is not constant as well. */
10094 if ((CONSTANT_P (op0
) && ! CONSTANT_P (op1
))
10095 || (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) != CONST_INT
))
10100 code
= swap_condition (code
);
10103 if (flag_force_mem
)
10105 op0
= force_not_mem (op0
);
10106 op1
= force_not_mem (op1
);
10109 do_pending_stack_adjust ();
10111 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
10112 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
10116 /* There's no need to do this now that combine.c can eliminate lots of
10117 sign extensions. This can be less efficient in certain cases on other
10120 /* If this is a signed equality comparison, we can do it as an
10121 unsigned comparison since zero-extension is cheaper than sign
10122 extension and comparisons with zero are done as unsigned. This is
10123 the case even on machines that can do fast sign extension, since
10124 zero-extension is easier to combine with other operations than
10125 sign-extension is. If we are comparing against a constant, we must
10126 convert it to what it would look like unsigned. */
10127 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10128 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10130 if (GET_CODE (op1
) == CONST_INT
10131 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10132 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10137 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
, align
);
10139 return gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
10142 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10143 The decision as to signed or unsigned comparison must be made by the caller.
10145 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10148 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10149 size of MODE should be used. */
10152 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
, size
, align
,
10153 if_false_label
, if_true_label
)
10154 register rtx op0
, op1
;
10155 enum rtx_code code
;
10157 enum machine_mode mode
;
10159 unsigned int align
;
10160 rtx if_false_label
, if_true_label
;
10163 int dummy_true_label
= 0;
10165 /* Reverse the comparison if that is safe and we want to jump if it is
10167 if (! if_true_label
&& ! FLOAT_MODE_P (mode
))
10169 if_true_label
= if_false_label
;
10170 if_false_label
= 0;
10171 code
= reverse_condition (code
);
10174 /* If one operand is constant, make it the second one. Only do this
10175 if the other operand is not constant as well. */
10177 if ((CONSTANT_P (op0
) && ! CONSTANT_P (op1
))
10178 || (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) != CONST_INT
))
10183 code
= swap_condition (code
);
10186 if (flag_force_mem
)
10188 op0
= force_not_mem (op0
);
10189 op1
= force_not_mem (op1
);
10192 do_pending_stack_adjust ();
10194 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
10195 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
10197 if (tem
== const_true_rtx
)
10200 emit_jump (if_true_label
);
10204 if (if_false_label
)
10205 emit_jump (if_false_label
);
10211 /* There's no need to do this now that combine.c can eliminate lots of
10212 sign extensions. This can be less efficient in certain cases on other
10215 /* If this is a signed equality comparison, we can do it as an
10216 unsigned comparison since zero-extension is cheaper than sign
10217 extension and comparisons with zero are done as unsigned. This is
10218 the case even on machines that can do fast sign extension, since
10219 zero-extension is easier to combine with other operations than
10220 sign-extension is. If we are comparing against a constant, we must
10221 convert it to what it would look like unsigned. */
10222 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10223 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10225 if (GET_CODE (op1
) == CONST_INT
10226 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10227 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10232 if (! if_true_label
)
10234 dummy_true_label
= 1;
10235 if_true_label
= gen_label_rtx ();
10238 emit_cmp_and_jump_insns (op0
, op1
, code
, size
, mode
, unsignedp
, align
,
10241 if (if_false_label
)
10242 emit_jump (if_false_label
);
10243 if (dummy_true_label
)
10244 emit_label (if_true_label
);
10247 /* Generate code for a comparison expression EXP (including code to compute
10248 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10249 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10250 generated code will drop through.
10251 SIGNED_CODE should be the rtx operation for this comparison for
10252 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10254 We force a stack adjustment unless there are currently
10255 things pushed on the stack that aren't yet used. */
10258 do_compare_and_jump (exp
, signed_code
, unsigned_code
, if_false_label
,
10261 enum rtx_code signed_code
, unsigned_code
;
10262 rtx if_false_label
, if_true_label
;
10264 unsigned int align0
, align1
;
10265 register rtx op0
, op1
;
10266 register tree type
;
10267 register enum machine_mode mode
;
10269 enum rtx_code code
;
10271 /* Don't crash if the comparison was erroneous. */
10272 op0
= expand_expr_unaligned (TREE_OPERAND (exp
, 0), &align0
);
10273 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
10276 op1
= expand_expr_unaligned (TREE_OPERAND (exp
, 1), &align1
);
10277 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == ERROR_MARK
)
10280 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10281 mode
= TYPE_MODE (type
);
10282 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
10283 && (TREE_CODE (TREE_OPERAND (exp
, 1)) != INTEGER_CST
10284 || (GET_MODE_BITSIZE (mode
)
10285 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
,
10288 /* op0 might have been replaced by promoted constant, in which
10289 case the type of second argument should be used. */
10290 type
= TREE_TYPE (TREE_OPERAND (exp
, 1));
10291 mode
= TYPE_MODE (type
);
10293 unsignedp
= TREE_UNSIGNED (type
);
10294 code
= unsignedp
? unsigned_code
: signed_code
;
10296 #ifdef HAVE_canonicalize_funcptr_for_compare
10297 /* If function pointers need to be "canonicalized" before they can
10298 be reliably compared, then canonicalize them. */
10299 if (HAVE_canonicalize_funcptr_for_compare
10300 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10301 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10304 rtx new_op0
= gen_reg_rtx (mode
);
10306 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
10310 if (HAVE_canonicalize_funcptr_for_compare
10311 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10312 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10315 rtx new_op1
= gen_reg_rtx (mode
);
10317 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
10322 /* Do any postincrements in the expression that was tested. */
10325 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
,
10327 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
10328 MIN (align0
, align1
),
10329 if_false_label
, if_true_label
);
10332 /* Generate code to calculate EXP using a store-flag instruction
10333 and return an rtx for the result. EXP is either a comparison
10334 or a TRUTH_NOT_EXPR whose operand is a comparison.
10336 If TARGET is nonzero, store the result there if convenient.
10338 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10341 Return zero if there is no suitable set-flag instruction
10342 available on this machine.
10344 Once expand_expr has been called on the arguments of the comparison,
10345 we are committed to doing the store flag, since it is not safe to
10346 re-evaluate the expression. We emit the store-flag insn by calling
10347 emit_store_flag, but only expand the arguments if we have a reason
10348 to believe that emit_store_flag will be successful. If we think that
10349 it will, but it isn't, we have to simulate the store-flag with a
10350 set/jump/set sequence. */
10353 do_store_flag (exp
, target
, mode
, only_cheap
)
10356 enum machine_mode mode
;
10359 enum rtx_code code
;
10360 tree arg0
, arg1
, type
;
10362 enum machine_mode operand_mode
;
10366 enum insn_code icode
;
10367 rtx subtarget
= target
;
10370 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10371 result at the end. We can't simply invert the test since it would
10372 have already been inverted if it were valid. This case occurs for
10373 some floating-point comparisons. */
10375 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
10376 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
10378 arg0
= TREE_OPERAND (exp
, 0);
10379 arg1
= TREE_OPERAND (exp
, 1);
10381 /* Don't crash if the comparison was erroneous. */
10382 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
10385 type
= TREE_TYPE (arg0
);
10386 operand_mode
= TYPE_MODE (type
);
10387 unsignedp
= TREE_UNSIGNED (type
);
10389 /* We won't bother with BLKmode store-flag operations because it would mean
10390 passing a lot of information to emit_store_flag. */
10391 if (operand_mode
== BLKmode
)
10394 /* We won't bother with store-flag operations involving function pointers
10395 when function pointers must be canonicalized before comparisons. */
10396 #ifdef HAVE_canonicalize_funcptr_for_compare
10397 if (HAVE_canonicalize_funcptr_for_compare
10398 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10399 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10401 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10402 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10403 == FUNCTION_TYPE
))))
10410 /* Get the rtx comparison code to use. We know that EXP is a comparison
10411 operation of some type. Some comparisons against 1 and -1 can be
10412 converted to comparisons with zero. Do so here so that the tests
10413 below will be aware that we have a comparison with zero. These
10414 tests will not catch constants in the first operand, but constants
10415 are rarely passed as the first operand. */
10417 switch (TREE_CODE (exp
))
10426 if (integer_onep (arg1
))
10427 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10429 code
= unsignedp
? LTU
: LT
;
10432 if (! unsignedp
&& integer_all_onesp (arg1
))
10433 arg1
= integer_zero_node
, code
= LT
;
10435 code
= unsignedp
? LEU
: LE
;
10438 if (! unsignedp
&& integer_all_onesp (arg1
))
10439 arg1
= integer_zero_node
, code
= GE
;
10441 code
= unsignedp
? GTU
: GT
;
10444 if (integer_onep (arg1
))
10445 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
10447 code
= unsignedp
? GEU
: GE
;
10450 case UNORDERED_EXPR
:
10476 /* Put a constant second. */
10477 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
10479 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
10480 code
= swap_condition (code
);
10483 /* If this is an equality or inequality test of a single bit, we can
10484 do this by shifting the bit being tested to the low-order bit and
10485 masking the result with the constant 1. If the condition was EQ,
10486 we xor it with 1. This does not require an scc insn and is faster
10487 than an scc insn even if we have it. */
10489 if ((code
== NE
|| code
== EQ
)
10490 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
10491 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10493 tree inner
= TREE_OPERAND (arg0
, 0);
10494 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
10497 /* If INNER is a right shift of a constant and it plus BITNUM does
10498 not overflow, adjust BITNUM and INNER. */
10500 if (TREE_CODE (inner
) == RSHIFT_EXPR
10501 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
10502 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
10503 && bitnum
< TYPE_PRECISION (type
)
10504 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
10505 bitnum
- TYPE_PRECISION (type
)))
10507 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
10508 inner
= TREE_OPERAND (inner
, 0);
10511 /* If we are going to be able to omit the AND below, we must do our
10512 operations as unsigned. If we must use the AND, we have a choice.
10513 Normally unsigned is faster, but for some machines signed is. */
10514 ops_unsignedp
= (bitnum
== TYPE_PRECISION (type
) - 1 ? 1
10515 #ifdef LOAD_EXTEND_OP
10516 : (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1)
10522 if (! get_subtarget (subtarget
)
10523 || GET_MODE (subtarget
) != operand_mode
10524 || ! safe_from_p (subtarget
, inner
, 1))
10527 op0
= expand_expr (inner
, subtarget
, VOIDmode
, 0);
10530 op0
= expand_shift (RSHIFT_EXPR
, operand_mode
, op0
,
10531 size_int (bitnum
), subtarget
, ops_unsignedp
);
10533 if (GET_MODE (op0
) != mode
)
10534 op0
= convert_to_mode (mode
, op0
, ops_unsignedp
);
10536 if ((code
== EQ
&& ! invert
) || (code
== NE
&& invert
))
10537 op0
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
, subtarget
,
10538 ops_unsignedp
, OPTAB_LIB_WIDEN
);
10540 /* Put the AND last so it can combine with more things. */
10541 if (bitnum
!= TYPE_PRECISION (type
) - 1)
10542 op0
= expand_and (op0
, const1_rtx
, subtarget
);
10547 /* Now see if we are likely to be able to do this. Return if not. */
10548 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
10551 icode
= setcc_gen_code
[(int) code
];
10552 if (icode
== CODE_FOR_nothing
10553 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
10555 /* We can only do this if it is one of the special cases that
10556 can be handled without an scc insn. */
10557 if ((code
== LT
&& integer_zerop (arg1
))
10558 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
10560 else if (BRANCH_COST
>= 0
10561 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
10562 && TREE_CODE (type
) != REAL_TYPE
10563 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
10564 != CODE_FOR_nothing
)
10565 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
10566 != CODE_FOR_nothing
)))
10572 if (! get_subtarget (target
)
10573 || GET_MODE (subtarget
) != operand_mode
10574 || ! safe_from_p (subtarget
, arg1
, 1))
10577 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
10578 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
10581 target
= gen_reg_rtx (mode
);
10583 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10584 because, if the emit_store_flag does anything it will succeed and
10585 OP0 and OP1 will not be used subsequently. */
10587 result
= emit_store_flag (target
, code
,
10588 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
10589 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
10590 operand_mode
, unsignedp
, 1);
10595 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
10596 result
, 0, OPTAB_LIB_WIDEN
);
10600 /* If this failed, we have to do this with set/compare/jump/set code. */
10601 if (GET_CODE (target
) != REG
10602 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
10603 target
= gen_reg_rtx (GET_MODE (target
));
10605 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
10606 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
10607 operand_mode
, NULL_RTX
, 0);
10608 if (GET_CODE (result
) == CONST_INT
)
10609 return (((result
== const0_rtx
&& ! invert
)
10610 || (result
!= const0_rtx
&& invert
))
10611 ? const0_rtx
: const1_rtx
);
10613 label
= gen_label_rtx ();
10614 if (bcc_gen_fctn
[(int) code
] == 0)
10617 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
10618 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
10619 emit_label (label
);
10624 /* Generate a tablejump instruction (used for switch statements). */
10626 #ifdef HAVE_tablejump
10628 /* INDEX is the value being switched on, with the lowest value
10629 in the table already subtracted.
10630 MODE is its expected mode (needed if INDEX is constant).
10631 RANGE is the length of the jump table.
10632 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10634 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10635 index value is out of range. */
10638 do_tablejump (index
, mode
, range
, table_label
, default_label
)
10639 rtx index
, range
, table_label
, default_label
;
10640 enum machine_mode mode
;
10642 register rtx temp
, vector
;
10644 /* Do an unsigned comparison (in the proper mode) between the index
10645 expression and the value which represents the length of the range.
10646 Since we just finished subtracting the lower bound of the range
10647 from the index expression, this comparison allows us to simultaneously
10648 check that the original index expression value is both greater than
10649 or equal to the minimum value of the range and less than or equal to
10650 the maximum value of the range. */
10652 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
10655 /* If index is in range, it must fit in Pmode.
10656 Convert to Pmode so we can index with it. */
10658 index
= convert_to_mode (Pmode
, index
, 1);
10660 /* Don't let a MEM slip thru, because then INDEX that comes
10661 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10662 and break_out_memory_refs will go to work on it and mess it up. */
10663 #ifdef PIC_CASE_VECTOR_ADDRESS
10664 if (flag_pic
&& GET_CODE (index
) != REG
)
10665 index
= copy_to_mode_reg (Pmode
, index
);
10668 /* If flag_force_addr were to affect this address
10669 it could interfere with the tricky assumptions made
10670 about addresses that contain label-refs,
10671 which may be valid only very near the tablejump itself. */
10672 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10673 GET_MODE_SIZE, because this indicates how large insns are. The other
10674 uses should all be Pmode, because they are addresses. This code
10675 could fail if addresses and insns are not the same size. */
10676 index
= gen_rtx_PLUS (Pmode
,
10677 gen_rtx_MULT (Pmode
, index
,
10678 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
10679 gen_rtx_LABEL_REF (Pmode
, table_label
));
10680 #ifdef PIC_CASE_VECTOR_ADDRESS
10682 index
= PIC_CASE_VECTOR_ADDRESS (index
);
10685 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
10686 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
10687 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
10688 RTX_UNCHANGING_P (vector
) = 1;
10689 convert_move (temp
, vector
, 0);
10691 emit_jump_insn (gen_tablejump (temp
, table_label
));
10693 /* If we are generating PIC code or if the table is PC-relative, the
10694 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10695 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
10699 #endif /* HAVE_tablejump */