1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
31 #include "hard-reg-set.h"
34 #include "insn-flags.h"
35 #include "insn-codes.h"
36 #include "insn-config.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
41 #include "typeclass.h"
46 #define CEIL(x,y) (((x) + (y) - 1) / (y))
48 /* Decide whether a function's arguments should be processed
49 from first to last or from last to first.
51 They should if the stack and args grow in opposite directions, but
52 only if we have push insns. */
56 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
57 #define PUSH_ARGS_REVERSED /* If it's last to first */
62 #ifndef STACK_PUSH_CODE
63 #ifdef STACK_GROWS_DOWNWARD
64 #define STACK_PUSH_CODE PRE_DEC
66 #define STACK_PUSH_CODE PRE_INC
70 /* Assume that case vectors are not pc-relative. */
71 #ifndef CASE_VECTOR_PC_RELATIVE
72 #define CASE_VECTOR_PC_RELATIVE 0
75 /* If this is nonzero, we do not bother generating VOLATILE
76 around volatile memory references, and we are willing to
77 output indirect addresses. If cse is to follow, we reject
78 indirect addresses so a useful potential cse is generated;
79 if it is used only once, instruction combination will produce
80 the same indirect address eventually. */
83 /* Nonzero to generate code for all the subroutines within an
84 expression before generating the upper levels of the expression.
85 Nowadays this is never zero. */
86 int do_preexpand_calls
= 1;
88 /* Don't check memory usage, since code is being emitted to check a memory
89 usage. Used when current_function_check_memory_usage is true, to avoid
90 infinite recursion. */
91 static int in_check_memory_usage
;
93 /* This structure is used by move_by_pieces to describe the move to
106 int explicit_inc_from
;
114 /* This structure is used by clear_by_pieces to describe the clear to
117 struct clear_by_pieces
129 extern struct obstack permanent_obstack
;
131 static rtx get_push_address
PROTO ((int));
133 static rtx enqueue_insn
PROTO((rtx
, rtx
));
134 static int move_by_pieces_ninsns
PROTO((unsigned int, int));
135 static void move_by_pieces_1
PROTO((rtx (*) (rtx
, ...), enum machine_mode
,
136 struct move_by_pieces
*));
137 static void clear_by_pieces
PROTO((rtx
, int, int));
138 static void clear_by_pieces_1
PROTO((rtx (*) (rtx
, ...),
140 struct clear_by_pieces
*));
141 static int is_zeros_p
PROTO((tree
));
142 static int mostly_zeros_p
PROTO((tree
));
143 static void store_constructor_field
PROTO((rtx
, int, int, enum machine_mode
,
144 tree
, tree
, int, int));
145 static void store_constructor
PROTO((tree
, rtx
, int, int));
146 static rtx store_field
PROTO((rtx
, int, int, enum machine_mode
, tree
,
147 enum machine_mode
, int, int,
149 static enum memory_use_mode
150 get_memory_usage_from_modifier
PROTO((enum expand_modifier
));
151 static tree save_noncopied_parts
PROTO((tree
, tree
));
152 static tree init_noncopied_parts
PROTO((tree
, tree
));
153 static int safe_from_p
PROTO((rtx
, tree
, int));
154 static int fixed_type_p
PROTO((tree
));
155 static rtx var_rtx
PROTO((tree
));
156 static rtx expand_increment
PROTO((tree
, int, int));
157 static void preexpand_calls
PROTO((tree
));
158 static void do_jump_by_parts_greater
PROTO((tree
, int, rtx
, rtx
));
159 static void do_jump_by_parts_equality
PROTO((tree
, rtx
, rtx
));
160 static void do_compare_and_jump
PROTO((tree
, enum rtx_code
, enum rtx_code
, rtx
, rtx
));
161 static rtx do_store_flag
PROTO((tree
, rtx
, enum machine_mode
, int));
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load
[NUM_MACHINE_MODES
];
168 static char direct_store
[NUM_MACHINE_MODES
];
170 /* If a memory-to-memory move would take MOVE_RATIO or more simple
171 move-instruction sequences, we will do a movstr or libcall instead. */
174 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
177 /* If we are optimizing for space (-Os), cut down the default move ratio */
178 #define MOVE_RATIO (optimize_size ? 3 : 15)
182 /* This macro is used to determine whether move_by_pieces should be called
183 to perform a structure copy. */
184 #ifndef MOVE_BY_PIECES_P
185 #define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
186 (SIZE, ALIGN) < MOVE_RATIO)
189 /* This array records the insn_code of insns to perform block moves. */
190 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
192 /* This array records the insn_code of insns to perform block clears. */
193 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
195 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
197 #ifndef SLOW_UNALIGNED_ACCESS
198 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
201 /* This is run once per compilation to set up which modes can be used
202 directly in memory and to initialize the block move optab. */
208 enum machine_mode mode
;
215 /* Since we are on the permanent obstack, we must be sure we save this
216 spot AFTER we call start_sequence, since it will reuse the rtl it
218 free_point
= (char *) oballoc (0);
220 /* Try indexing by frame ptr and try by stack ptr.
221 It is known that on the Convex the stack ptr isn't a valid index.
222 With luck, one or the other is valid on any machine. */
223 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
224 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
226 insn
= emit_insn (gen_rtx_SET (0, NULL_RTX
, NULL_RTX
));
227 pat
= PATTERN (insn
);
229 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
230 mode
= (enum machine_mode
) ((int) mode
+ 1))
235 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
236 PUT_MODE (mem
, mode
);
237 PUT_MODE (mem1
, mode
);
239 /* See if there is some register that can be used in this mode and
240 directly loaded or stored from memory. */
242 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
243 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
244 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
247 if (! HARD_REGNO_MODE_OK (regno
, mode
))
250 reg
= gen_rtx_REG (mode
, regno
);
253 SET_DEST (pat
) = reg
;
254 if (recog (pat
, insn
, &num_clobbers
) >= 0)
255 direct_load
[(int) mode
] = 1;
257 SET_SRC (pat
) = mem1
;
258 SET_DEST (pat
) = reg
;
259 if (recog (pat
, insn
, &num_clobbers
) >= 0)
260 direct_load
[(int) mode
] = 1;
263 SET_DEST (pat
) = mem
;
264 if (recog (pat
, insn
, &num_clobbers
) >= 0)
265 direct_store
[(int) mode
] = 1;
268 SET_DEST (pat
) = mem1
;
269 if (recog (pat
, insn
, &num_clobbers
) >= 0)
270 direct_store
[(int) mode
] = 1;
278 /* This is run at the start of compiling a function. */
283 current_function
->expr
284 = (struct expr_status
*) xmalloc (sizeof (struct expr_status
));
287 pending_stack_adjust
= 0;
288 inhibit_defer_pop
= 0;
290 apply_args_value
= 0;
296 struct expr_status
*p
;
301 ggc_mark_rtx (p
->x_saveregs_value
);
302 ggc_mark_rtx (p
->x_apply_args_value
);
303 ggc_mark_rtx (p
->x_forced_labels
);
314 /* Small sanity check that the queue is empty at the end of a function. */
316 finish_expr_for_function ()
322 /* Manage the queue of increment instructions to be output
323 for POSTINCREMENT_EXPR expressions, etc. */
325 /* Queue up to increment (or change) VAR later. BODY says how:
326 BODY should be the same thing you would pass to emit_insn
327 to increment right away. It will go to emit_insn later on.
329 The value is a QUEUED expression to be used in place of VAR
330 where you want to guarantee the pre-incrementation value of VAR. */
333 enqueue_insn (var
, body
)
336 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
), var
, NULL_RTX
, NULL_RTX
,
337 body
, pending_chain
);
338 return pending_chain
;
341 /* Use protect_from_queue to convert a QUEUED expression
342 into something that you can put immediately into an instruction.
343 If the queued incrementation has not happened yet,
344 protect_from_queue returns the variable itself.
345 If the incrementation has happened, protect_from_queue returns a temp
346 that contains a copy of the old value of the variable.
348 Any time an rtx which might possibly be a QUEUED is to be put
349 into an instruction, it must be passed through protect_from_queue first.
350 QUEUED expressions are not meaningful in instructions.
352 Do not pass a value through protect_from_queue and then hold
353 on to it for a while before putting it in an instruction!
354 If the queue is flushed in between, incorrect code will result. */
357 protect_from_queue (x
, modify
)
361 register RTX_CODE code
= GET_CODE (x
);
363 #if 0 /* A QUEUED can hang around after the queue is forced out. */
364 /* Shortcut for most common case. */
365 if (pending_chain
== 0)
371 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
372 use of autoincrement. Make a copy of the contents of the memory
373 location rather than a copy of the address, but not if the value is
374 of mode BLKmode. Don't modify X in place since it might be
376 if (code
== MEM
&& GET_MODE (x
) != BLKmode
377 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
379 register rtx y
= XEXP (x
, 0);
380 register rtx
new = gen_rtx_MEM (GET_MODE (x
), QUEUED_VAR (y
));
382 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x
);
383 MEM_COPY_ATTRIBUTES (new, x
);
384 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x
);
388 register rtx temp
= gen_reg_rtx (GET_MODE (new));
389 emit_insn_before (gen_move_insn (temp
, new),
395 /* Otherwise, recursively protect the subexpressions of all
396 the kinds of rtx's that can contain a QUEUED. */
399 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
400 if (tem
!= XEXP (x
, 0))
406 else if (code
== PLUS
|| code
== MULT
)
408 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
409 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
410 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
419 /* If the increment has not happened, use the variable itself. */
420 if (QUEUED_INSN (x
) == 0)
421 return QUEUED_VAR (x
);
422 /* If the increment has happened and a pre-increment copy exists,
424 if (QUEUED_COPY (x
) != 0)
425 return QUEUED_COPY (x
);
426 /* The increment has happened but we haven't set up a pre-increment copy.
427 Set one up now, and use it. */
428 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
429 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
431 return QUEUED_COPY (x
);
434 /* Return nonzero if X contains a QUEUED expression:
435 if it contains anything that will be altered by a queued increment.
436 We handle only combinations of MEM, PLUS, MINUS and MULT operators
437 since memory addresses generally contain only those. */
443 register enum rtx_code code
= GET_CODE (x
);
449 return queued_subexp_p (XEXP (x
, 0));
453 return (queued_subexp_p (XEXP (x
, 0))
454 || queued_subexp_p (XEXP (x
, 1)));
460 /* Perform all the pending incrementations. */
466 while ((p
= pending_chain
))
468 rtx body
= QUEUED_BODY (p
);
470 if (GET_CODE (body
) == SEQUENCE
)
472 QUEUED_INSN (p
) = XVECEXP (QUEUED_BODY (p
), 0, 0);
473 emit_insn (QUEUED_BODY (p
));
476 QUEUED_INSN (p
) = emit_insn (QUEUED_BODY (p
));
477 pending_chain
= QUEUED_NEXT (p
);
481 /* Copy data from FROM to TO, where the machine modes are not the same.
482 Both modes may be integer, or both may be floating.
483 UNSIGNEDP should be nonzero if FROM is an unsigned type.
484 This causes zero-extension instead of sign-extension. */
487 convert_move (to
, from
, unsignedp
)
488 register rtx to
, from
;
491 enum machine_mode to_mode
= GET_MODE (to
);
492 enum machine_mode from_mode
= GET_MODE (from
);
493 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
494 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
498 /* rtx code for making an equivalent value. */
499 enum rtx_code equiv_code
= (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
);
501 to
= protect_from_queue (to
, 1);
502 from
= protect_from_queue (from
, 0);
504 if (to_real
!= from_real
)
507 /* If FROM is a SUBREG that indicates that we have already done at least
508 the required extension, strip it. We don't handle such SUBREGs as
511 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
512 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
513 >= GET_MODE_SIZE (to_mode
))
514 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
515 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
517 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
520 if (to_mode
== from_mode
521 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
523 emit_move_insn (to
, from
);
531 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
))
533 /* Try converting directly if the insn is supported. */
534 if ((code
= can_extend_p (to_mode
, from_mode
, 0))
537 emit_unop_insn (code
, to
, from
, UNKNOWN
);
542 #ifdef HAVE_trunchfqf2
543 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
545 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
549 #ifdef HAVE_trunctqfqf2
550 if (HAVE_trunctqfqf2
&& from_mode
== TQFmode
&& to_mode
== QFmode
)
552 emit_unop_insn (CODE_FOR_trunctqfqf2
, to
, from
, UNKNOWN
);
556 #ifdef HAVE_truncsfqf2
557 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
559 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
563 #ifdef HAVE_truncdfqf2
564 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
566 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
570 #ifdef HAVE_truncxfqf2
571 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
573 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
577 #ifdef HAVE_trunctfqf2
578 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
580 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
585 #ifdef HAVE_trunctqfhf2
586 if (HAVE_trunctqfhf2
&& from_mode
== TQFmode
&& to_mode
== HFmode
)
588 emit_unop_insn (CODE_FOR_trunctqfhf2
, to
, from
, UNKNOWN
);
592 #ifdef HAVE_truncsfhf2
593 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
595 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
599 #ifdef HAVE_truncdfhf2
600 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
602 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
606 #ifdef HAVE_truncxfhf2
607 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
609 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
613 #ifdef HAVE_trunctfhf2
614 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
616 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
621 #ifdef HAVE_truncsftqf2
622 if (HAVE_truncsftqf2
&& from_mode
== SFmode
&& to_mode
== TQFmode
)
624 emit_unop_insn (CODE_FOR_truncsftqf2
, to
, from
, UNKNOWN
);
628 #ifdef HAVE_truncdftqf2
629 if (HAVE_truncdftqf2
&& from_mode
== DFmode
&& to_mode
== TQFmode
)
631 emit_unop_insn (CODE_FOR_truncdftqf2
, to
, from
, UNKNOWN
);
635 #ifdef HAVE_truncxftqf2
636 if (HAVE_truncxftqf2
&& from_mode
== XFmode
&& to_mode
== TQFmode
)
638 emit_unop_insn (CODE_FOR_truncxftqf2
, to
, from
, UNKNOWN
);
642 #ifdef HAVE_trunctftqf2
643 if (HAVE_trunctftqf2
&& from_mode
== TFmode
&& to_mode
== TQFmode
)
645 emit_unop_insn (CODE_FOR_trunctftqf2
, to
, from
, UNKNOWN
);
650 #ifdef HAVE_truncdfsf2
651 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
653 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
657 #ifdef HAVE_truncxfsf2
658 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
660 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
664 #ifdef HAVE_trunctfsf2
665 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
667 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
671 #ifdef HAVE_truncxfdf2
672 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
674 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
678 #ifdef HAVE_trunctfdf2
679 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
681 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
693 libcall
= extendsfdf2_libfunc
;
697 libcall
= extendsfxf2_libfunc
;
701 libcall
= extendsftf2_libfunc
;
713 libcall
= truncdfsf2_libfunc
;
717 libcall
= extenddfxf2_libfunc
;
721 libcall
= extenddftf2_libfunc
;
733 libcall
= truncxfsf2_libfunc
;
737 libcall
= truncxfdf2_libfunc
;
749 libcall
= trunctfsf2_libfunc
;
753 libcall
= trunctfdf2_libfunc
;
765 if (libcall
== (rtx
) 0)
766 /* This conversion is not implemented yet. */
769 value
= emit_library_call_value (libcall
, NULL_RTX
, 1, to_mode
,
771 emit_move_insn (to
, value
);
775 /* Now both modes are integers. */
777 /* Handle expanding beyond a word. */
778 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
779 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
786 enum machine_mode lowpart_mode
;
787 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
789 /* Try converting directly if the insn is supported. */
790 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
793 /* If FROM is a SUBREG, put it into a register. Do this
794 so that we always generate the same set of insns for
795 better cse'ing; if an intermediate assignment occurred,
796 we won't be doing the operation directly on the SUBREG. */
797 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
798 from
= force_reg (from_mode
, from
);
799 emit_unop_insn (code
, to
, from
, equiv_code
);
802 /* Next, try converting via full word. */
803 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
804 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
805 != CODE_FOR_nothing
))
807 if (GET_CODE (to
) == REG
)
808 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
809 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
810 emit_unop_insn (code
, to
,
811 gen_lowpart (word_mode
, to
), equiv_code
);
815 /* No special multiword conversion insn; do it by hand. */
818 /* Since we will turn this into a no conflict block, we must ensure
819 that the source does not overlap the target. */
821 if (reg_overlap_mentioned_p (to
, from
))
822 from
= force_reg (from_mode
, from
);
824 /* Get a copy of FROM widened to a word, if necessary. */
825 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
826 lowpart_mode
= word_mode
;
828 lowpart_mode
= from_mode
;
830 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
832 lowpart
= gen_lowpart (lowpart_mode
, to
);
833 emit_move_insn (lowpart
, lowfrom
);
835 /* Compute the value to put in each remaining word. */
837 fill_value
= const0_rtx
;
842 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
843 && STORE_FLAG_VALUE
== -1)
845 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
847 fill_value
= gen_reg_rtx (word_mode
);
848 emit_insn (gen_slt (fill_value
));
854 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
855 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
857 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
861 /* Fill the remaining words. */
862 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
864 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
865 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
870 if (fill_value
!= subword
)
871 emit_move_insn (subword
, fill_value
);
874 insns
= get_insns ();
877 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
878 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
882 /* Truncating multi-word to a word or less. */
883 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
884 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
886 if (!((GET_CODE (from
) == MEM
887 && ! MEM_VOLATILE_P (from
)
888 && direct_load
[(int) to_mode
]
889 && ! mode_dependent_address_p (XEXP (from
, 0)))
890 || GET_CODE (from
) == REG
891 || GET_CODE (from
) == SUBREG
))
892 from
= force_reg (from_mode
, from
);
893 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
897 /* Handle pointer conversion */ /* SPEE 900220 */
898 if (to_mode
== PQImode
)
900 if (from_mode
!= QImode
)
901 from
= convert_to_mode (QImode
, from
, unsignedp
);
903 #ifdef HAVE_truncqipqi2
904 if (HAVE_truncqipqi2
)
906 emit_unop_insn (CODE_FOR_truncqipqi2
, to
, from
, UNKNOWN
);
909 #endif /* HAVE_truncqipqi2 */
913 if (from_mode
== PQImode
)
915 if (to_mode
!= QImode
)
917 from
= convert_to_mode (QImode
, from
, unsignedp
);
922 #ifdef HAVE_extendpqiqi2
923 if (HAVE_extendpqiqi2
)
925 emit_unop_insn (CODE_FOR_extendpqiqi2
, to
, from
, UNKNOWN
);
928 #endif /* HAVE_extendpqiqi2 */
933 if (to_mode
== PSImode
)
935 if (from_mode
!= SImode
)
936 from
= convert_to_mode (SImode
, from
, unsignedp
);
938 #ifdef HAVE_truncsipsi2
939 if (HAVE_truncsipsi2
)
941 emit_unop_insn (CODE_FOR_truncsipsi2
, to
, from
, UNKNOWN
);
944 #endif /* HAVE_truncsipsi2 */
948 if (from_mode
== PSImode
)
950 if (to_mode
!= SImode
)
952 from
= convert_to_mode (SImode
, from
, unsignedp
);
957 #ifdef HAVE_extendpsisi2
958 if (HAVE_extendpsisi2
)
960 emit_unop_insn (CODE_FOR_extendpsisi2
, to
, from
, UNKNOWN
);
963 #endif /* HAVE_extendpsisi2 */
968 if (to_mode
== PDImode
)
970 if (from_mode
!= DImode
)
971 from
= convert_to_mode (DImode
, from
, unsignedp
);
973 #ifdef HAVE_truncdipdi2
974 if (HAVE_truncdipdi2
)
976 emit_unop_insn (CODE_FOR_truncdipdi2
, to
, from
, UNKNOWN
);
979 #endif /* HAVE_truncdipdi2 */
983 if (from_mode
== PDImode
)
985 if (to_mode
!= DImode
)
987 from
= convert_to_mode (DImode
, from
, unsignedp
);
992 #ifdef HAVE_extendpdidi2
993 if (HAVE_extendpdidi2
)
995 emit_unop_insn (CODE_FOR_extendpdidi2
, to
, from
, UNKNOWN
);
998 #endif /* HAVE_extendpdidi2 */
1003 /* Now follow all the conversions between integers
1004 no more than a word long. */
1006 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1007 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
1008 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1009 GET_MODE_BITSIZE (from_mode
)))
1011 if (!((GET_CODE (from
) == MEM
1012 && ! MEM_VOLATILE_P (from
)
1013 && direct_load
[(int) to_mode
]
1014 && ! mode_dependent_address_p (XEXP (from
, 0)))
1015 || GET_CODE (from
) == REG
1016 || GET_CODE (from
) == SUBREG
))
1017 from
= force_reg (from_mode
, from
);
1018 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
1019 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
1020 from
= copy_to_reg (from
);
1021 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
1025 /* Handle extension. */
1026 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
1028 /* Convert directly if that works. */
1029 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
1030 != CODE_FOR_nothing
)
1032 emit_unop_insn (code
, to
, from
, equiv_code
);
1037 enum machine_mode intermediate
;
1041 /* Search for a mode to convert via. */
1042 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
1043 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
1044 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
1045 != CODE_FOR_nothing
)
1046 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
1047 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1048 GET_MODE_BITSIZE (intermediate
))))
1049 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
1050 != CODE_FOR_nothing
))
1052 convert_move (to
, convert_to_mode (intermediate
, from
,
1053 unsignedp
), unsignedp
);
1057 /* No suitable intermediate mode.
1058 Generate what we need with shifts. */
1059 shift_amount
= build_int_2 (GET_MODE_BITSIZE (to_mode
)
1060 - GET_MODE_BITSIZE (from_mode
), 0);
1061 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
1062 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
1064 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
1067 emit_move_insn (to
, tmp
);
1072 /* Support special truncate insns for certain modes. */
1074 if (from_mode
== DImode
&& to_mode
== SImode
)
1076 #ifdef HAVE_truncdisi2
1077 if (HAVE_truncdisi2
)
1079 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1083 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1087 if (from_mode
== DImode
&& to_mode
== HImode
)
1089 #ifdef HAVE_truncdihi2
1090 if (HAVE_truncdihi2
)
1092 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1096 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1100 if (from_mode
== DImode
&& to_mode
== QImode
)
1102 #ifdef HAVE_truncdiqi2
1103 if (HAVE_truncdiqi2
)
1105 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1109 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1113 if (from_mode
== SImode
&& to_mode
== HImode
)
1115 #ifdef HAVE_truncsihi2
1116 if (HAVE_truncsihi2
)
1118 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1122 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1126 if (from_mode
== SImode
&& to_mode
== QImode
)
1128 #ifdef HAVE_truncsiqi2
1129 if (HAVE_truncsiqi2
)
1131 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1135 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1139 if (from_mode
== HImode
&& to_mode
== QImode
)
1141 #ifdef HAVE_trunchiqi2
1142 if (HAVE_trunchiqi2
)
1144 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1148 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1152 if (from_mode
== TImode
&& to_mode
== DImode
)
1154 #ifdef HAVE_trunctidi2
1155 if (HAVE_trunctidi2
)
1157 emit_unop_insn (CODE_FOR_trunctidi2
, to
, from
, UNKNOWN
);
1161 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1165 if (from_mode
== TImode
&& to_mode
== SImode
)
1167 #ifdef HAVE_trunctisi2
1168 if (HAVE_trunctisi2
)
1170 emit_unop_insn (CODE_FOR_trunctisi2
, to
, from
, UNKNOWN
);
1174 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1178 if (from_mode
== TImode
&& to_mode
== HImode
)
1180 #ifdef HAVE_trunctihi2
1181 if (HAVE_trunctihi2
)
1183 emit_unop_insn (CODE_FOR_trunctihi2
, to
, from
, UNKNOWN
);
1187 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1191 if (from_mode
== TImode
&& to_mode
== QImode
)
1193 #ifdef HAVE_trunctiqi2
1194 if (HAVE_trunctiqi2
)
1196 emit_unop_insn (CODE_FOR_trunctiqi2
, to
, from
, UNKNOWN
);
1200 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1204 /* Handle truncation of volatile memrefs, and so on;
1205 the things that couldn't be truncated directly,
1206 and for which there was no special instruction. */
1207 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1209 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1210 emit_move_insn (to
, temp
);
1214 /* Mode combination is not recognized. */
1218 /* Return an rtx for a value that would result
1219 from converting X to mode MODE.
1220 Both X and MODE may be floating, or both integer.
1221 UNSIGNEDP is nonzero if X is an unsigned value.
1222 This can be done by referring to a part of X in place
1223 or by copying to a new temporary with conversion.
1225 This function *must not* call protect_from_queue
1226 except when putting X into an insn (in which case convert_move does it). */
1229 convert_to_mode (mode
, x
, unsignedp
)
1230 enum machine_mode mode
;
1234 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
1237 /* Return an rtx for a value that would result
1238 from converting X from mode OLDMODE to mode MODE.
1239 Both modes may be floating, or both integer.
1240 UNSIGNEDP is nonzero if X is an unsigned value.
1242 This can be done by referring to a part of X in place
1243 or by copying to a new temporary with conversion.
1245 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1247 This function *must not* call protect_from_queue
1248 except when putting X into an insn (in which case convert_move does it). */
1251 convert_modes (mode
, oldmode
, x
, unsignedp
)
1252 enum machine_mode mode
, oldmode
;
1258 /* If FROM is a SUBREG that indicates that we have already done at least
1259 the required extension, strip it. */
1261 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1262 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1263 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1264 x
= gen_lowpart (mode
, x
);
1266 if (GET_MODE (x
) != VOIDmode
)
1267 oldmode
= GET_MODE (x
);
1269 if (mode
== oldmode
)
1272 /* There is one case that we must handle specially: If we are converting
1273 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1274 we are to interpret the constant as unsigned, gen_lowpart will do
1275 the wrong if the constant appears negative. What we want to do is
1276 make the high-order word of the constant zero, not all ones. */
1278 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1279 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1280 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1282 HOST_WIDE_INT val
= INTVAL (x
);
1284 if (oldmode
!= VOIDmode
1285 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
1287 int width
= GET_MODE_BITSIZE (oldmode
);
1289 /* We need to zero extend VAL. */
1290 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1293 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
1296 /* We can do this with a gen_lowpart if both desired and current modes
1297 are integer, and this is either a constant integer, a register, or a
1298 non-volatile MEM. Except for the constant case where MODE is no
1299 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1301 if ((GET_CODE (x
) == CONST_INT
1302 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1303 || (GET_MODE_CLASS (mode
) == MODE_INT
1304 && GET_MODE_CLASS (oldmode
) == MODE_INT
1305 && (GET_CODE (x
) == CONST_DOUBLE
1306 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
1307 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
1308 && direct_load
[(int) mode
])
1309 || (GET_CODE (x
) == REG
1310 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
1311 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
1313 /* ?? If we don't know OLDMODE, we have to assume here that
1314 X does not need sign- or zero-extension. This may not be
1315 the case, but it's the best we can do. */
1316 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1317 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1319 HOST_WIDE_INT val
= INTVAL (x
);
1320 int width
= GET_MODE_BITSIZE (oldmode
);
1322 /* We must sign or zero-extend in this case. Start by
1323 zero-extending, then sign extend if we need to. */
1324 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1326 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1327 val
|= (HOST_WIDE_INT
) (-1) << width
;
1329 return GEN_INT (val
);
1332 return gen_lowpart (mode
, x
);
1335 temp
= gen_reg_rtx (mode
);
1336 convert_move (temp
, x
, unsignedp
);
1341 /* This macro is used to determine what the largest unit size that
1342 move_by_pieces can use is. */
1344 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1345 move efficiently, as opposed to MOVE_MAX which is the maximum
1346 number of bhytes we can move with a single instruction. */
1348 #ifndef MOVE_MAX_PIECES
1349 #define MOVE_MAX_PIECES MOVE_MAX
1352 /* Generate several move instructions to copy LEN bytes
1353 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1354 The caller must pass FROM and TO
1355 through protect_from_queue before calling.
1356 ALIGN (in bytes) is maximum alignment we can assume. */
1359 move_by_pieces (to
, from
, len
, align
)
1363 struct move_by_pieces data
;
1364 rtx to_addr
= XEXP (to
, 0), from_addr
= XEXP (from
, 0);
1365 int max_size
= MOVE_MAX_PIECES
+ 1;
1366 enum machine_mode mode
= VOIDmode
, tmode
;
1367 enum insn_code icode
;
1370 data
.to_addr
= to_addr
;
1371 data
.from_addr
= from_addr
;
1375 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1376 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1378 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1379 || GET_CODE (from_addr
) == POST_INC
1380 || GET_CODE (from_addr
) == POST_DEC
);
1382 data
.explicit_inc_from
= 0;
1383 data
.explicit_inc_to
= 0;
1385 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1386 if (data
.reverse
) data
.offset
= len
;
1389 data
.to_struct
= MEM_IN_STRUCT_P (to
);
1390 data
.from_struct
= MEM_IN_STRUCT_P (from
);
1391 data
.to_readonly
= RTX_UNCHANGING_P (to
);
1392 data
.from_readonly
= RTX_UNCHANGING_P (from
);
1394 /* If copying requires more than two move insns,
1395 copy addresses to registers (to make displacements shorter)
1396 and use post-increment if available. */
1397 if (!(data
.autinc_from
&& data
.autinc_to
)
1398 && move_by_pieces_ninsns (len
, align
) > 2)
1400 /* Find the mode of the largest move... */
1401 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1402 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1403 if (GET_MODE_SIZE (tmode
) < max_size
)
1406 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
1408 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1409 data
.autinc_from
= 1;
1410 data
.explicit_inc_from
= -1;
1412 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
1414 data
.from_addr
= copy_addr_to_reg (from_addr
);
1415 data
.autinc_from
= 1;
1416 data
.explicit_inc_from
= 1;
1418 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1419 data
.from_addr
= copy_addr_to_reg (from_addr
);
1420 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
1422 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1424 data
.explicit_inc_to
= -1;
1426 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
1428 data
.to_addr
= copy_addr_to_reg (to_addr
);
1430 data
.explicit_inc_to
= 1;
1432 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1433 data
.to_addr
= copy_addr_to_reg (to_addr
);
1436 if (! SLOW_UNALIGNED_ACCESS
1437 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1440 /* First move what we can in the largest integer mode, then go to
1441 successively smaller modes. */
1443 while (max_size
> 1)
1445 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1446 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1447 if (GET_MODE_SIZE (tmode
) < max_size
)
1450 if (mode
== VOIDmode
)
1453 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1454 if (icode
!= CODE_FOR_nothing
1455 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
1456 GET_MODE_SIZE (mode
)))
1457 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1459 max_size
= GET_MODE_SIZE (mode
);
1462 /* The code above should have handled everything. */
1467 /* Return number of insns required to move L bytes by pieces.
1468 ALIGN (in bytes) is maximum alignment we can assume. */
1471 move_by_pieces_ninsns (l
, align
)
1475 register int n_insns
= 0;
1476 int max_size
= MOVE_MAX
+ 1;
1478 if (! SLOW_UNALIGNED_ACCESS
1479 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1482 while (max_size
> 1)
1484 enum machine_mode mode
= VOIDmode
, tmode
;
1485 enum insn_code icode
;
1487 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1488 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1489 if (GET_MODE_SIZE (tmode
) < max_size
)
1492 if (mode
== VOIDmode
)
1495 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1496 if (icode
!= CODE_FOR_nothing
1497 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
1498 GET_MODE_SIZE (mode
)))
1499 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1501 max_size
= GET_MODE_SIZE (mode
);
1507 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1508 with move instructions for mode MODE. GENFUN is the gen_... function
1509 to make a move insn for that mode. DATA has all the other info. */
1512 move_by_pieces_1 (genfun
, mode
, data
)
1513 rtx (*genfun
) PROTO ((rtx
, ...));
1514 enum machine_mode mode
;
1515 struct move_by_pieces
*data
;
1517 register int size
= GET_MODE_SIZE (mode
);
1518 register rtx to1
, from1
;
1520 while (data
->len
>= size
)
1522 if (data
->reverse
) data
->offset
-= size
;
1524 to1
= (data
->autinc_to
1525 ? gen_rtx_MEM (mode
, data
->to_addr
)
1526 : copy_rtx (change_address (data
->to
, mode
,
1527 plus_constant (data
->to_addr
,
1529 MEM_IN_STRUCT_P (to1
) = data
->to_struct
;
1530 RTX_UNCHANGING_P (to1
) = data
->to_readonly
;
1533 = (data
->autinc_from
1534 ? gen_rtx_MEM (mode
, data
->from_addr
)
1535 : copy_rtx (change_address (data
->from
, mode
,
1536 plus_constant (data
->from_addr
,
1538 MEM_IN_STRUCT_P (from1
) = data
->from_struct
;
1539 RTX_UNCHANGING_P (from1
) = data
->from_readonly
;
1541 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1542 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
1543 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1544 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (-size
)));
1546 emit_insn ((*genfun
) (to1
, from1
));
1547 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1548 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1549 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1550 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1552 if (! data
->reverse
) data
->offset
+= size
;
1558 /* Emit code to move a block Y to a block X.
1559 This may be done with string-move instructions,
1560 with multiple scalar move instructions, or with a library call.
1562 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1564 SIZE is an rtx that says how long they are.
1565 ALIGN is the maximum alignment we can assume they have,
1568 Return the address of the new block, if memcpy is called and returns it,
1572 emit_block_move (x
, y
, size
, align
)
1578 #ifdef TARGET_MEM_FUNCTIONS
1580 tree call_expr
, arg_list
;
1583 if (GET_MODE (x
) != BLKmode
)
1586 if (GET_MODE (y
) != BLKmode
)
1589 x
= protect_from_queue (x
, 1);
1590 y
= protect_from_queue (y
, 0);
1591 size
= protect_from_queue (size
, 0);
1593 if (GET_CODE (x
) != MEM
)
1595 if (GET_CODE (y
) != MEM
)
1600 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1601 move_by_pieces (x
, y
, INTVAL (size
), align
);
1604 /* Try the most limited insn first, because there's no point
1605 including more than one in the machine description unless
1606 the more limited one has some advantage. */
1608 rtx opalign
= GEN_INT (align
);
1609 enum machine_mode mode
;
1611 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1612 mode
= GET_MODE_WIDER_MODE (mode
))
1614 enum insn_code code
= movstr_optab
[(int) mode
];
1615 insn_operand_predicate_fn pred
;
1617 if (code
!= CODE_FOR_nothing
1618 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1619 here because if SIZE is less than the mode mask, as it is
1620 returned by the macro, it will definitely be less than the
1621 actual mode mask. */
1622 && ((GET_CODE (size
) == CONST_INT
1623 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1624 <= (GET_MODE_MASK (mode
) >> 1)))
1625 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1626 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1627 || (*pred
) (x
, BLKmode
))
1628 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1629 || (*pred
) (y
, BLKmode
))
1630 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1631 || (*pred
) (opalign
, VOIDmode
)))
1634 rtx last
= get_last_insn ();
1637 op2
= convert_to_mode (mode
, size
, 1);
1638 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1639 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1640 op2
= copy_to_mode_reg (mode
, op2
);
1642 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1649 delete_insns_since (last
);
1653 /* X, Y, or SIZE may have been passed through protect_from_queue.
1655 It is unsafe to save the value generated by protect_from_queue
1656 and reuse it later. Consider what happens if emit_queue is
1657 called before the return value from protect_from_queue is used.
1659 Expansion of the CALL_EXPR below will call emit_queue before
1660 we are finished emitting RTL for argument setup. So if we are
1661 not careful we could get the wrong value for an argument.
1663 To avoid this problem we go ahead and emit code to copy X, Y &
1664 SIZE into new pseudos. We can then place those new pseudos
1665 into an RTL_EXPR and use them later, even after a call to
1668 Note this is not strictly needed for library calls since they
1669 do not call emit_queue before loading their arguments. However,
1670 we may need to have library calls call emit_queue in the future
1671 since failing to do so could cause problems for targets which
1672 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1673 x
= copy_to_mode_reg (Pmode
, XEXP (x
, 0));
1674 y
= copy_to_mode_reg (Pmode
, XEXP (y
, 0));
1676 #ifdef TARGET_MEM_FUNCTIONS
1677 size
= copy_to_mode_reg (TYPE_MODE (sizetype
), size
);
1679 size
= convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1680 TREE_UNSIGNED (integer_type_node
));
1681 size
= copy_to_mode_reg (TYPE_MODE (integer_type_node
), size
);
1684 #ifdef TARGET_MEM_FUNCTIONS
1685 /* It is incorrect to use the libcall calling conventions to call
1686 memcpy in this context.
1688 This could be a user call to memcpy and the user may wish to
1689 examine the return value from memcpy.
1691 For targets where libcalls and normal calls have different conventions
1692 for returning pointers, we could end up generating incorrect code.
1694 So instead of using a libcall sequence we build up a suitable
1695 CALL_EXPR and expand the call in the normal fashion. */
1696 if (fn
== NULL_TREE
)
1700 /* This was copied from except.c, I don't know if all this is
1701 necessary in this context or not. */
1702 fn
= get_identifier ("memcpy");
1703 push_obstacks_nochange ();
1704 end_temporary_allocation ();
1705 fntype
= build_pointer_type (void_type_node
);
1706 fntype
= build_function_type (fntype
, NULL_TREE
);
1707 fn
= build_decl (FUNCTION_DECL
, fn
, fntype
);
1708 ggc_add_tree_root (&fn
, 1);
1709 DECL_EXTERNAL (fn
) = 1;
1710 TREE_PUBLIC (fn
) = 1;
1711 DECL_ARTIFICIAL (fn
) = 1;
1712 make_decl_rtl (fn
, NULL_PTR
, 1);
1713 assemble_external (fn
);
1717 /* We need to make an argument list for the function call.
1719 memcpy has three arguments, the first two are void * addresses and
1720 the last is a size_t byte count for the copy. */
1722 = build_tree_list (NULL_TREE
,
1723 make_tree (build_pointer_type (void_type_node
), x
));
1724 TREE_CHAIN (arg_list
)
1725 = build_tree_list (NULL_TREE
,
1726 make_tree (build_pointer_type (void_type_node
), y
));
1727 TREE_CHAIN (TREE_CHAIN (arg_list
))
1728 = build_tree_list (NULL_TREE
, make_tree (sizetype
, size
));
1729 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list
))) = NULL_TREE
;
1731 /* Now we have to build up the CALL_EXPR itself. */
1732 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1733 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1734 call_expr
, arg_list
, NULL_TREE
);
1735 TREE_SIDE_EFFECTS (call_expr
) = 1;
1737 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
1739 emit_library_call (bcopy_libfunc
, 0,
1740 VOIDmode
, 3, y
, Pmode
, x
, Pmode
,
1741 convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1742 TREE_UNSIGNED (integer_type_node
)),
1743 TYPE_MODE (integer_type_node
));
1750 /* Copy all or part of a value X into registers starting at REGNO.
1751 The number of registers to be filled is NREGS. */
1754 move_block_to_reg (regno
, x
, nregs
, mode
)
1758 enum machine_mode mode
;
1761 #ifdef HAVE_load_multiple
1769 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1770 x
= validize_mem (force_const_mem (mode
, x
));
1772 /* See if the machine can do this with a load multiple insn. */
1773 #ifdef HAVE_load_multiple
1774 if (HAVE_load_multiple
)
1776 last
= get_last_insn ();
1777 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1785 delete_insns_since (last
);
1789 for (i
= 0; i
< nregs
; i
++)
1790 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1791 operand_subword_force (x
, i
, mode
));
1794 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1795 The number of registers to be filled is NREGS. SIZE indicates the number
1796 of bytes in the object X. */
1800 move_block_from_reg (regno
, x
, nregs
, size
)
1807 #ifdef HAVE_store_multiple
1811 enum machine_mode mode
;
1813 /* If SIZE is that of a mode no bigger than a word, just use that
1814 mode's store operation. */
1815 if (size
<= UNITS_PER_WORD
1816 && (mode
= mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0)) != BLKmode
)
1818 emit_move_insn (change_address (x
, mode
, NULL
),
1819 gen_rtx_REG (mode
, regno
));
1823 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1824 to the left before storing to memory. Note that the previous test
1825 doesn't handle all cases (e.g. SIZE == 3). */
1826 if (size
< UNITS_PER_WORD
&& BYTES_BIG_ENDIAN
)
1828 rtx tem
= operand_subword (x
, 0, 1, BLKmode
);
1834 shift
= expand_shift (LSHIFT_EXPR
, word_mode
,
1835 gen_rtx_REG (word_mode
, regno
),
1836 build_int_2 ((UNITS_PER_WORD
- size
)
1837 * BITS_PER_UNIT
, 0), NULL_RTX
, 0);
1838 emit_move_insn (tem
, shift
);
1842 /* See if the machine can do this with a store multiple insn. */
1843 #ifdef HAVE_store_multiple
1844 if (HAVE_store_multiple
)
1846 last
= get_last_insn ();
1847 pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1855 delete_insns_since (last
);
1859 for (i
= 0; i
< nregs
; i
++)
1861 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1866 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1870 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1871 registers represented by a PARALLEL. SSIZE represents the total size of
1872 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1874 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1875 the balance will be in what would be the low-order memory addresses, i.e.
1876 left justified for big endian, right justified for little endian. This
1877 happens to be true for the targets currently using this support. If this
1878 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1882 emit_group_load (dst
, orig_src
, ssize
, align
)
1889 if (GET_CODE (dst
) != PARALLEL
)
1892 /* Check for a NULL entry, used to indicate that the parameter goes
1893 both on the stack and in registers. */
1894 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1899 tmps
= (rtx
*) alloca (sizeof(rtx
) * XVECLEN (dst
, 0));
1901 /* If we won't be loading directly from memory, protect the real source
1902 from strange tricks we might play. */
1904 if (GET_CODE (src
) != MEM
)
1906 src
= gen_reg_rtx (GET_MODE (orig_src
));
1907 emit_move_insn (src
, orig_src
);
1910 /* Process the pieces. */
1911 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1913 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1914 int bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1915 int bytelen
= GET_MODE_SIZE (mode
);
1918 /* Handle trailing fragments that run over the size of the struct. */
1919 if (ssize
>= 0 && bytepos
+ bytelen
> ssize
)
1921 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1922 bytelen
= ssize
- bytepos
;
1927 /* Optimize the access just a bit. */
1928 if (GET_CODE (src
) == MEM
1929 && align
*BITS_PER_UNIT
>= GET_MODE_ALIGNMENT (mode
)
1930 && bytepos
*BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1931 && bytelen
== GET_MODE_SIZE (mode
))
1933 tmps
[i
] = gen_reg_rtx (mode
);
1934 emit_move_insn (tmps
[i
],
1935 change_address (src
, mode
,
1936 plus_constant (XEXP (src
, 0),
1939 else if (GET_CODE (src
) == CONCAT
)
1942 && bytelen
== GET_MODE_SIZE (GET_MODE (XEXP (src
, 0))))
1943 tmps
[i
] = XEXP (src
, 0);
1944 else if (bytepos
== GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)))
1945 && bytelen
== GET_MODE_SIZE (GET_MODE (XEXP (src
, 1))))
1946 tmps
[i
] = XEXP (src
, 1);
1952 tmps
[i
] = extract_bit_field (src
, bytelen
*BITS_PER_UNIT
,
1953 bytepos
*BITS_PER_UNIT
, 1, NULL_RTX
,
1954 mode
, mode
, align
, ssize
);
1957 if (BYTES_BIG_ENDIAN
&& shift
)
1959 expand_binop (mode
, ashl_optab
, tmps
[i
], GEN_INT (shift
),
1960 tmps
[i
], 0, OPTAB_WIDEN
);
1965 /* Copy the extracted pieces into the proper (probable) hard regs. */
1966 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1967 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
1970 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1971 registers represented by a PARALLEL. SSIZE represents the total size of
1972 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1975 emit_group_store (orig_dst
, src
, ssize
, align
)
1982 if (GET_CODE (src
) != PARALLEL
)
1985 /* Check for a NULL entry, used to indicate that the parameter goes
1986 both on the stack and in registers. */
1987 if (XEXP (XVECEXP (src
, 0, 0), 0))
1992 tmps
= (rtx
*) alloca (sizeof(rtx
) * XVECLEN (src
, 0));
1994 /* Copy the (probable) hard regs into pseudos. */
1995 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
1997 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
1998 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
1999 emit_move_insn (tmps
[i
], reg
);
2003 /* If we won't be storing directly into memory, protect the real destination
2004 from strange tricks we might play. */
2006 if (GET_CODE (dst
) == PARALLEL
)
2010 /* We can get a PARALLEL dst if there is a conditional expression in
2011 a return statement. In that case, the dst and src are the same,
2012 so no action is necessary. */
2013 if (rtx_equal_p (dst
, src
))
2016 /* It is unclear if we can ever reach here, but we may as well handle
2017 it. Allocate a temporary, and split this into a store/load to/from
2020 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
2021 emit_group_store (temp
, src
, ssize
, align
);
2022 emit_group_load (dst
, temp
, ssize
, align
);
2025 else if (GET_CODE (dst
) != MEM
)
2027 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
2028 /* Make life a bit easier for combine. */
2029 emit_move_insn (dst
, const0_rtx
);
2031 else if (! MEM_IN_STRUCT_P (dst
))
2033 /* store_bit_field requires that memory operations have
2034 mem_in_struct_p set; we might not. */
2036 dst
= copy_rtx (orig_dst
);
2037 MEM_SET_IN_STRUCT_P (dst
, 1);
2040 /* Process the pieces. */
2041 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2043 int bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2044 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2045 int bytelen
= GET_MODE_SIZE (mode
);
2047 /* Handle trailing fragments that run over the size of the struct. */
2048 if (ssize
>= 0 && bytepos
+ bytelen
> ssize
)
2050 if (BYTES_BIG_ENDIAN
)
2052 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2053 expand_binop (mode
, ashr_optab
, tmps
[i
], GEN_INT (shift
),
2054 tmps
[i
], 0, OPTAB_WIDEN
);
2056 bytelen
= ssize
- bytepos
;
2059 /* Optimize the access just a bit. */
2060 if (GET_CODE (dst
) == MEM
2061 && align
*BITS_PER_UNIT
>= GET_MODE_ALIGNMENT (mode
)
2062 && bytepos
*BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2063 && bytelen
== GET_MODE_SIZE (mode
))
2065 emit_move_insn (change_address (dst
, mode
,
2066 plus_constant (XEXP (dst
, 0),
2072 store_bit_field (dst
, bytelen
*BITS_PER_UNIT
, bytepos
*BITS_PER_UNIT
,
2073 mode
, tmps
[i
], align
, ssize
);
2078 /* Copy from the pseudo into the (probable) hard reg. */
2079 if (GET_CODE (dst
) == REG
)
2080 emit_move_insn (orig_dst
, dst
);
2083 /* Generate code to copy a BLKmode object of TYPE out of a
2084 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2085 is null, a stack temporary is created. TGTBLK is returned.
2087 The primary purpose of this routine is to handle functions
2088 that return BLKmode structures in registers. Some machines
2089 (the PA for example) want to return all small structures
2090 in registers regardless of the structure's alignment.
2094 copy_blkmode_from_reg(tgtblk
,srcreg
,type
)
2099 int bytes
= int_size_in_bytes (type
);
2100 rtx src
= NULL
, dst
= NULL
;
2101 int bitsize
= MIN (TYPE_ALIGN (type
), (unsigned int) BITS_PER_WORD
);
2102 int bitpos
, xbitpos
, big_endian_correction
= 0;
2106 tgtblk
= assign_stack_temp (BLKmode
, bytes
, 0);
2107 MEM_SET_IN_STRUCT_P (tgtblk
, AGGREGATE_TYPE_P (type
));
2108 preserve_temp_slots (tgtblk
);
2111 /* This code assumes srcreg is at least a full word. If it isn't,
2112 copy it into a new pseudo which is a full word. */
2113 if (GET_MODE (srcreg
) != BLKmode
2114 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2115 srcreg
= convert_to_mode (word_mode
, srcreg
,
2116 TREE_UNSIGNED (type
));
2118 /* Structures whose size is not a multiple of a word are aligned
2119 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2120 machine, this means we must skip the empty high order bytes when
2121 calculating the bit offset. */
2122 if (BYTES_BIG_ENDIAN
&& bytes
% UNITS_PER_WORD
)
2123 big_endian_correction
= (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
)
2126 /* Copy the structure BITSIZE bites at a time.
2128 We could probably emit more efficient code for machines
2129 which do not use strict alignment, but it doesn't seem
2130 worth the effort at the current time. */
2131 for (bitpos
= 0, xbitpos
= big_endian_correction
;
2132 bitpos
< bytes
* BITS_PER_UNIT
;
2133 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2136 /* We need a new source operand each time xbitpos is on a
2137 word boundary and when xbitpos == big_endian_correction
2138 (the first time through). */
2139 if (xbitpos
% BITS_PER_WORD
== 0
2140 || xbitpos
== big_endian_correction
)
2141 src
= operand_subword_force (srcreg
,
2142 xbitpos
/ BITS_PER_WORD
,
2145 /* We need a new destination operand each time bitpos is on
2147 if (bitpos
% BITS_PER_WORD
== 0)
2148 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2150 /* Use xbitpos for the source extraction (right justified) and
2151 xbitpos for the destination store (left justified). */
2152 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2153 extract_bit_field (src
, bitsize
,
2154 xbitpos
% BITS_PER_WORD
, 1,
2155 NULL_RTX
, word_mode
,
2157 bitsize
/ BITS_PER_UNIT
,
2159 bitsize
/ BITS_PER_UNIT
, BITS_PER_WORD
);
2165 /* Add a USE expression for REG to the (possibly empty) list pointed
2166 to by CALL_FUSAGE. REG must denote a hard register. */
2169 use_reg (call_fusage
, reg
)
2170 rtx
*call_fusage
, reg
;
2172 if (GET_CODE (reg
) != REG
2173 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2177 = gen_rtx_EXPR_LIST (VOIDmode
,
2178 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2181 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2182 starting at REGNO. All of these registers must be hard registers. */
2185 use_regs (call_fusage
, regno
, nregs
)
2192 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
2195 for (i
= 0; i
< nregs
; i
++)
2196 use_reg (call_fusage
, gen_rtx_REG (reg_raw_mode
[regno
+ i
], regno
+ i
));
2199 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2200 PARALLEL REGS. This is for calls that pass values in multiple
2201 non-contiguous locations. The Irix 6 ABI has examples of this. */
2204 use_group_regs (call_fusage
, regs
)
2210 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2212 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2214 /* A NULL entry means the parameter goes both on the stack and in
2215 registers. This can also be a MEM for targets that pass values
2216 partially on the stack and partially in registers. */
2217 if (reg
!= 0 && GET_CODE (reg
) == REG
)
2218 use_reg (call_fusage
, reg
);
2222 /* Generate several move instructions to clear LEN bytes of block TO.
2223 (A MEM rtx with BLKmode). The caller must pass TO through
2224 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2228 clear_by_pieces (to
, len
, align
)
2232 struct clear_by_pieces data
;
2233 rtx to_addr
= XEXP (to
, 0);
2234 int max_size
= MOVE_MAX_PIECES
+ 1;
2235 enum machine_mode mode
= VOIDmode
, tmode
;
2236 enum insn_code icode
;
2239 data
.to_addr
= to_addr
;
2242 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2243 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2245 data
.explicit_inc_to
= 0;
2247 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2248 if (data
.reverse
) data
.offset
= len
;
2251 data
.to_struct
= MEM_IN_STRUCT_P (to
);
2253 /* If copying requires more than two move insns,
2254 copy addresses to registers (to make displacements shorter)
2255 and use post-increment if available. */
2257 && move_by_pieces_ninsns (len
, align
) > 2)
2259 /* Determine the main mode we'll be using */
2260 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2261 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2262 if (GET_MODE_SIZE (tmode
) < max_size
)
2265 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
2267 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
2269 data
.explicit_inc_to
= -1;
2271 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
2273 data
.to_addr
= copy_addr_to_reg (to_addr
);
2275 data
.explicit_inc_to
= 1;
2277 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
2278 data
.to_addr
= copy_addr_to_reg (to_addr
);
2281 if (! SLOW_UNALIGNED_ACCESS
2282 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
2285 /* First move what we can in the largest integer mode, then go to
2286 successively smaller modes. */
2288 while (max_size
> 1)
2290 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2291 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2292 if (GET_MODE_SIZE (tmode
) < max_size
)
2295 if (mode
== VOIDmode
)
2298 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2299 if (icode
!= CODE_FOR_nothing
2300 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
2301 GET_MODE_SIZE (mode
)))
2302 clear_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
2304 max_size
= GET_MODE_SIZE (mode
);
2307 /* The code above should have handled everything. */
2312 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2313 with move instructions for mode MODE. GENFUN is the gen_... function
2314 to make a move insn for that mode. DATA has all the other info. */
2317 clear_by_pieces_1 (genfun
, mode
, data
)
2318 rtx (*genfun
) PROTO ((rtx
, ...));
2319 enum machine_mode mode
;
2320 struct clear_by_pieces
*data
;
2322 register int size
= GET_MODE_SIZE (mode
);
2325 while (data
->len
>= size
)
2327 if (data
->reverse
) data
->offset
-= size
;
2329 to1
= (data
->autinc_to
2330 ? gen_rtx_MEM (mode
, data
->to_addr
)
2331 : copy_rtx (change_address (data
->to
, mode
,
2332 plus_constant (data
->to_addr
,
2334 MEM_IN_STRUCT_P (to1
) = data
->to_struct
;
2336 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2337 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
2339 emit_insn ((*genfun
) (to1
, const0_rtx
));
2340 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2341 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2343 if (! data
->reverse
) data
->offset
+= size
;
2349 /* Write zeros through the storage of OBJECT.
2350 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2351 the maximum alignment we can is has, measured in bytes.
2353 If we call a function that returns the length of the block, return it. */
2356 clear_storage (object
, size
, align
)
2361 #ifdef TARGET_MEM_FUNCTIONS
2363 tree call_expr
, arg_list
;
2367 if (GET_MODE (object
) == BLKmode
)
2369 object
= protect_from_queue (object
, 1);
2370 size
= protect_from_queue (size
, 0);
2372 if (GET_CODE (size
) == CONST_INT
2373 && MOVE_BY_PIECES_P (INTVAL (size
), align
))
2374 clear_by_pieces (object
, INTVAL (size
), align
);
2378 /* Try the most limited insn first, because there's no point
2379 including more than one in the machine description unless
2380 the more limited one has some advantage. */
2382 rtx opalign
= GEN_INT (align
);
2383 enum machine_mode mode
;
2385 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2386 mode
= GET_MODE_WIDER_MODE (mode
))
2388 enum insn_code code
= clrstr_optab
[(int) mode
];
2389 insn_operand_predicate_fn pred
;
2391 if (code
!= CODE_FOR_nothing
2392 /* We don't need MODE to be narrower than
2393 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2394 the mode mask, as it is returned by the macro, it will
2395 definitely be less than the actual mode mask. */
2396 && ((GET_CODE (size
) == CONST_INT
2397 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2398 <= (GET_MODE_MASK (mode
) >> 1)))
2399 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2400 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2401 || (*pred
) (object
, BLKmode
))
2402 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
2403 || (*pred
) (opalign
, VOIDmode
)))
2406 rtx last
= get_last_insn ();
2409 op1
= convert_to_mode (mode
, size
, 1);
2410 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2411 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
2412 op1
= copy_to_mode_reg (mode
, op1
);
2414 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2421 delete_insns_since (last
);
2425 /* OBJECT or SIZE may have been passed through protect_from_queue.
2427 It is unsafe to save the value generated by protect_from_queue
2428 and reuse it later. Consider what happens if emit_queue is
2429 called before the return value from protect_from_queue is used.
2431 Expansion of the CALL_EXPR below will call emit_queue before
2432 we are finished emitting RTL for argument setup. So if we are
2433 not careful we could get the wrong value for an argument.
2435 To avoid this problem we go ahead and emit code to copy OBJECT
2436 and SIZE into new pseudos. We can then place those new pseudos
2437 into an RTL_EXPR and use them later, even after a call to
2440 Note this is not strictly needed for library calls since they
2441 do not call emit_queue before loading their arguments. However,
2442 we may need to have library calls call emit_queue in the future
2443 since failing to do so could cause problems for targets which
2444 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2445 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2447 #ifdef TARGET_MEM_FUNCTIONS
2448 size
= copy_to_mode_reg (TYPE_MODE (sizetype
), size
);
2450 size
= convert_to_mode (TYPE_MODE (integer_type_node
), size
,
2451 TREE_UNSIGNED (integer_type_node
));
2452 size
= copy_to_mode_reg (TYPE_MODE (integer_type_node
), size
);
2456 #ifdef TARGET_MEM_FUNCTIONS
2457 /* It is incorrect to use the libcall calling conventions to call
2458 memset in this context.
2460 This could be a user call to memset and the user may wish to
2461 examine the return value from memset.
2463 For targets where libcalls and normal calls have different
2464 conventions for returning pointers, we could end up generating
2467 So instead of using a libcall sequence we build up a suitable
2468 CALL_EXPR and expand the call in the normal fashion. */
2469 if (fn
== NULL_TREE
)
2473 /* This was copied from except.c, I don't know if all this is
2474 necessary in this context or not. */
2475 fn
= get_identifier ("memset");
2476 push_obstacks_nochange ();
2477 end_temporary_allocation ();
2478 fntype
= build_pointer_type (void_type_node
);
2479 fntype
= build_function_type (fntype
, NULL_TREE
);
2480 fn
= build_decl (FUNCTION_DECL
, fn
, fntype
);
2481 ggc_add_tree_root (&fn
, 1);
2482 DECL_EXTERNAL (fn
) = 1;
2483 TREE_PUBLIC (fn
) = 1;
2484 DECL_ARTIFICIAL (fn
) = 1;
2485 make_decl_rtl (fn
, NULL_PTR
, 1);
2486 assemble_external (fn
);
2490 /* We need to make an argument list for the function call.
2492 memset has three arguments, the first is a void * addresses, the
2493 second a integer with the initialization value, the last is a
2494 size_t byte count for the copy. */
2496 = build_tree_list (NULL_TREE
,
2497 make_tree (build_pointer_type (void_type_node
),
2499 TREE_CHAIN (arg_list
)
2500 = build_tree_list (NULL_TREE
,
2501 make_tree (integer_type_node
, const0_rtx
));
2502 TREE_CHAIN (TREE_CHAIN (arg_list
))
2503 = build_tree_list (NULL_TREE
, make_tree (sizetype
, size
));
2504 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list
))) = NULL_TREE
;
2506 /* Now we have to build up the CALL_EXPR itself. */
2507 call_expr
= build1 (ADDR_EXPR
,
2508 build_pointer_type (TREE_TYPE (fn
)), fn
);
2509 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2510 call_expr
, arg_list
, NULL_TREE
);
2511 TREE_SIDE_EFFECTS (call_expr
) = 1;
2513 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
2515 emit_library_call (bzero_libfunc
, 0,
2516 VOIDmode
, 2, object
, Pmode
, size
,
2517 TYPE_MODE (integer_type_node
));
2522 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2527 /* Generate code to copy Y into X.
2528 Both Y and X must have the same mode, except that
2529 Y can be a constant with VOIDmode.
2530 This mode cannot be BLKmode; use emit_block_move for that.
2532 Return the last instruction emitted. */
2535 emit_move_insn (x
, y
)
2538 enum machine_mode mode
= GET_MODE (x
);
2540 x
= protect_from_queue (x
, 1);
2541 y
= protect_from_queue (y
, 0);
2543 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
2546 /* Never force constant_p_rtx to memory. */
2547 if (GET_CODE (y
) == CONSTANT_P_RTX
)
2549 else if (CONSTANT_P (y
) && ! LEGITIMATE_CONSTANT_P (y
))
2550 y
= force_const_mem (mode
, y
);
2552 /* If X or Y are memory references, verify that their addresses are valid
2554 if (GET_CODE (x
) == MEM
2555 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2556 && ! push_operand (x
, GET_MODE (x
)))
2558 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2559 x
= change_address (x
, VOIDmode
, XEXP (x
, 0));
2561 if (GET_CODE (y
) == MEM
2562 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2564 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2565 y
= change_address (y
, VOIDmode
, XEXP (y
, 0));
2567 if (mode
== BLKmode
)
2570 return emit_move_insn_1 (x
, y
);
2573 /* Low level part of emit_move_insn.
2574 Called just like emit_move_insn, but assumes X and Y
2575 are basically valid. */
2578 emit_move_insn_1 (x
, y
)
2581 enum machine_mode mode
= GET_MODE (x
);
2582 enum machine_mode submode
;
2583 enum mode_class
class = GET_MODE_CLASS (mode
);
2586 if (mode
>= MAX_MACHINE_MODE
)
2589 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2591 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2593 /* Expand complex moves by moving real part and imag part, if possible. */
2594 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
2595 && BLKmode
!= (submode
= mode_for_size ((GET_MODE_UNIT_SIZE (mode
)
2597 (class == MODE_COMPLEX_INT
2598 ? MODE_INT
: MODE_FLOAT
),
2600 && (mov_optab
->handlers
[(int) submode
].insn_code
2601 != CODE_FOR_nothing
))
2603 /* Don't split destination if it is a stack push. */
2604 int stack
= push_operand (x
, GET_MODE (x
));
2606 /* If this is a stack, push the highpart first, so it
2607 will be in the argument order.
2609 In that case, change_address is used only to convert
2610 the mode, not to change the address. */
2613 /* Note that the real part always precedes the imag part in memory
2614 regardless of machine's endianness. */
2615 #ifdef STACK_GROWS_DOWNWARD
2616 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2617 (gen_rtx_MEM (submode
, (XEXP (x
, 0))),
2618 gen_imagpart (submode
, y
)));
2619 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2620 (gen_rtx_MEM (submode
, (XEXP (x
, 0))),
2621 gen_realpart (submode
, y
)));
2623 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2624 (gen_rtx_MEM (submode
, (XEXP (x
, 0))),
2625 gen_realpart (submode
, y
)));
2626 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2627 (gen_rtx_MEM (submode
, (XEXP (x
, 0))),
2628 gen_imagpart (submode
, y
)));
2633 /* If this is a complex value with each part being smaller than a
2634 word, the usual calling sequence will likely pack the pieces into
2635 a single register. Unfortunately, SUBREG of hard registers only
2636 deals in terms of words, so we have a problem converting input
2637 arguments to the CONCAT of two registers that is used elsewhere
2638 for complex values. If this is before reload, we can copy it into
2639 memory and reload. FIXME, we should see about using extract and
2640 insert on integer registers, but complex short and complex char
2641 variables should be rarely used. */
2642 if (GET_MODE_BITSIZE (mode
) < 2*BITS_PER_WORD
2643 && (reload_in_progress
| reload_completed
) == 0)
2645 int packed_dest_p
= (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
2646 int packed_src_p
= (REG_P (y
) && REGNO (y
) < FIRST_PSEUDO_REGISTER
);
2648 if (packed_dest_p
|| packed_src_p
)
2650 enum mode_class reg_class
= ((class == MODE_COMPLEX_FLOAT
)
2651 ? MODE_FLOAT
: MODE_INT
);
2653 enum machine_mode reg_mode
=
2654 mode_for_size (GET_MODE_BITSIZE (mode
), reg_class
, 1);
2656 if (reg_mode
!= BLKmode
)
2658 rtx mem
= assign_stack_temp (reg_mode
,
2659 GET_MODE_SIZE (mode
), 0);
2661 rtx cmem
= change_address (mem
, mode
, NULL_RTX
);
2663 current_function
->cannot_inline
2664 = "function uses short complex types";
2668 rtx sreg
= gen_rtx_SUBREG (reg_mode
, x
, 0);
2669 emit_move_insn_1 (cmem
, y
);
2670 return emit_move_insn_1 (sreg
, mem
);
2674 rtx sreg
= gen_rtx_SUBREG (reg_mode
, y
, 0);
2675 emit_move_insn_1 (mem
, sreg
);
2676 return emit_move_insn_1 (x
, cmem
);
2682 /* Show the output dies here. This is necessary for pseudos;
2683 hard regs shouldn't appear here except as return values.
2684 We never want to emit such a clobber after reload. */
2686 && ! (reload_in_progress
|| reload_completed
))
2688 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
2691 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2692 (gen_realpart (submode
, x
), gen_realpart (submode
, y
)));
2693 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2694 (gen_imagpart (submode
, x
), gen_imagpart (submode
, y
)));
2697 return get_last_insn ();
2700 /* This will handle any multi-word mode that lacks a move_insn pattern.
2701 However, you will get better code if you define such patterns,
2702 even if they must turn into multiple assembler instructions. */
2703 else if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2707 #ifdef PUSH_ROUNDING
2709 /* If X is a push on the stack, do the push now and replace
2710 X with a reference to the stack pointer. */
2711 if (push_operand (x
, GET_MODE (x
)))
2713 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
2714 x
= change_address (x
, VOIDmode
, stack_pointer_rtx
);
2718 /* Show the output dies here. This is necessary for pseudos;
2719 hard regs shouldn't appear here except as return values.
2720 We never want to emit such a clobber after reload. */
2722 && ! (reload_in_progress
|| reload_completed
))
2724 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
2728 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
2731 rtx xpart
= operand_subword (x
, i
, 1, mode
);
2732 rtx ypart
= operand_subword (y
, i
, 1, mode
);
2734 /* If we can't get a part of Y, put Y into memory if it is a
2735 constant. Otherwise, force it into a register. If we still
2736 can't get a part of Y, abort. */
2737 if (ypart
== 0 && CONSTANT_P (y
))
2739 y
= force_const_mem (mode
, y
);
2740 ypart
= operand_subword (y
, i
, 1, mode
);
2742 else if (ypart
== 0)
2743 ypart
= operand_subword_force (y
, i
, mode
);
2745 if (xpart
== 0 || ypart
== 0)
2748 last_insn
= emit_move_insn (xpart
, ypart
);
2757 /* Pushing data onto the stack. */
2759 /* Push a block of length SIZE (perhaps variable)
2760 and return an rtx to address the beginning of the block.
2761 Note that it is not possible for the value returned to be a QUEUED.
2762 The value may be virtual_outgoing_args_rtx.
2764 EXTRA is the number of bytes of padding to push in addition to SIZE.
2765 BELOW nonzero means this padding comes at low addresses;
2766 otherwise, the padding comes at high addresses. */
2769 push_block (size
, extra
, below
)
2775 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
2776 if (CONSTANT_P (size
))
2777 anti_adjust_stack (plus_constant (size
, extra
));
2778 else if (GET_CODE (size
) == REG
&& extra
== 0)
2779 anti_adjust_stack (size
);
2782 rtx temp
= copy_to_mode_reg (Pmode
, size
);
2784 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
2785 temp
, 0, OPTAB_LIB_WIDEN
);
2786 anti_adjust_stack (temp
);
2789 #if defined (STACK_GROWS_DOWNWARD) \
2790 || (defined (ARGS_GROW_DOWNWARD) \
2791 && !defined (ACCUMULATE_OUTGOING_ARGS))
2793 /* Return the lowest stack address when STACK or ARGS grow downward and
2794 we are not aaccumulating outgoing arguments (the c4x port uses such
2796 temp
= virtual_outgoing_args_rtx
;
2797 if (extra
!= 0 && below
)
2798 temp
= plus_constant (temp
, extra
);
2800 if (GET_CODE (size
) == CONST_INT
)
2801 temp
= plus_constant (virtual_outgoing_args_rtx
,
2802 - INTVAL (size
) - (below
? 0 : extra
));
2803 else if (extra
!= 0 && !below
)
2804 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
2805 negate_rtx (Pmode
, plus_constant (size
, extra
)));
2807 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
2808 negate_rtx (Pmode
, size
));
2811 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
2817 return gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
2820 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2821 block of SIZE bytes. */
2824 get_push_address (size
)
2829 if (STACK_PUSH_CODE
== POST_DEC
)
2830 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (size
));
2831 else if (STACK_PUSH_CODE
== POST_INC
)
2832 temp
= gen_rtx_MINUS (Pmode
, stack_pointer_rtx
, GEN_INT (size
));
2834 temp
= stack_pointer_rtx
;
2836 return copy_to_reg (temp
);
2839 /* Generate code to push X onto the stack, assuming it has mode MODE and
2841 MODE is redundant except when X is a CONST_INT (since they don't
2843 SIZE is an rtx for the size of data to be copied (in bytes),
2844 needed only if X is BLKmode.
2846 ALIGN (in bytes) is maximum alignment we can assume.
2848 If PARTIAL and REG are both nonzero, then copy that many of the first
2849 words of X into registers starting with REG, and push the rest of X.
2850 The amount of space pushed is decreased by PARTIAL words,
2851 rounded *down* to a multiple of PARM_BOUNDARY.
2852 REG must be a hard register in this case.
2853 If REG is zero but PARTIAL is not, take any all others actions for an
2854 argument partially in registers, but do not actually load any
2857 EXTRA is the amount in bytes of extra space to leave next to this arg.
2858 This is ignored if an argument block has already been allocated.
2860 On a machine that lacks real push insns, ARGS_ADDR is the address of
2861 the bottom of the argument block for this call. We use indexing off there
2862 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2863 argument block has not been preallocated.
2865 ARGS_SO_FAR is the size of args previously pushed for this call.
2867 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2868 for arguments passed in registers. If nonzero, it will be the number
2869 of bytes required. */
2872 emit_push_insn (x
, mode
, type
, size
, align
, partial
, reg
, extra
,
2873 args_addr
, args_so_far
, reg_parm_stack_space
)
2875 enum machine_mode mode
;
2884 int reg_parm_stack_space
;
2887 enum direction stack_direction
2888 #ifdef STACK_GROWS_DOWNWARD
2894 /* Decide where to pad the argument: `downward' for below,
2895 `upward' for above, or `none' for don't pad it.
2896 Default is below for small data on big-endian machines; else above. */
2897 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
2899 /* Invert direction if stack is post-update. */
2900 if (STACK_PUSH_CODE
== POST_INC
|| STACK_PUSH_CODE
== POST_DEC
)
2901 if (where_pad
!= none
)
2902 where_pad
= (where_pad
== downward
? upward
: downward
);
2904 xinner
= x
= protect_from_queue (x
, 0);
2906 if (mode
== BLKmode
)
2908 /* Copy a block into the stack, entirely or partially. */
2911 int used
= partial
* UNITS_PER_WORD
;
2912 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
2920 /* USED is now the # of bytes we need not copy to the stack
2921 because registers will take care of them. */
2924 xinner
= change_address (xinner
, BLKmode
,
2925 plus_constant (XEXP (xinner
, 0), used
));
2927 /* If the partial register-part of the arg counts in its stack size,
2928 skip the part of stack space corresponding to the registers.
2929 Otherwise, start copying to the beginning of the stack space,
2930 by setting SKIP to 0. */
2931 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
2933 #ifdef PUSH_ROUNDING
2934 /* Do it with several push insns if that doesn't take lots of insns
2935 and if there is no difficulty with push insns that skip bytes
2936 on the stack for alignment purposes. */
2938 && GET_CODE (size
) == CONST_INT
2940 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
2941 /* Here we avoid the case of a structure whose weak alignment
2942 forces many pushes of a small amount of data,
2943 and such small pushes do rounding that causes trouble. */
2944 && ((! SLOW_UNALIGNED_ACCESS
)
2945 || align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
2946 || PUSH_ROUNDING (align
) == align
)
2947 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
2949 /* Push padding now if padding above and stack grows down,
2950 or if padding below and stack grows up.
2951 But if space already allocated, this has already been done. */
2952 if (extra
&& args_addr
== 0
2953 && where_pad
!= none
&& where_pad
!= stack_direction
)
2954 anti_adjust_stack (GEN_INT (extra
));
2956 move_by_pieces (gen_rtx_MEM (BLKmode
, gen_push_operand ()), xinner
,
2957 INTVAL (size
) - used
, align
);
2959 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
2963 in_check_memory_usage
= 1;
2964 temp
= get_push_address (INTVAL(size
) - used
);
2965 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
2966 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
2968 XEXP (xinner
, 0), Pmode
,
2969 GEN_INT (INTVAL(size
) - used
),
2970 TYPE_MODE (sizetype
));
2972 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
2974 GEN_INT (INTVAL(size
) - used
),
2975 TYPE_MODE (sizetype
),
2976 GEN_INT (MEMORY_USE_RW
),
2977 TYPE_MODE (integer_type_node
));
2978 in_check_memory_usage
= 0;
2982 #endif /* PUSH_ROUNDING */
2984 /* Otherwise make space on the stack and copy the data
2985 to the address of that space. */
2987 /* Deduct words put into registers from the size we must copy. */
2990 if (GET_CODE (size
) == CONST_INT
)
2991 size
= GEN_INT (INTVAL (size
) - used
);
2993 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
2994 GEN_INT (used
), NULL_RTX
, 0,
2998 /* Get the address of the stack space.
2999 In this case, we do not deal with EXTRA separately.
3000 A single stack adjust will do. */
3003 temp
= push_block (size
, extra
, where_pad
== downward
);
3006 else if (GET_CODE (args_so_far
) == CONST_INT
)
3007 temp
= memory_address (BLKmode
,
3008 plus_constant (args_addr
,
3009 skip
+ INTVAL (args_so_far
)));
3011 temp
= memory_address (BLKmode
,
3012 plus_constant (gen_rtx_PLUS (Pmode
,
3016 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
3020 in_check_memory_usage
= 1;
3021 target
= copy_to_reg (temp
);
3022 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
3023 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
3025 XEXP (xinner
, 0), Pmode
,
3026 size
, TYPE_MODE (sizetype
));
3028 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
3030 size
, TYPE_MODE (sizetype
),
3031 GEN_INT (MEMORY_USE_RW
),
3032 TYPE_MODE (integer_type_node
));
3033 in_check_memory_usage
= 0;
3036 /* TEMP is the address of the block. Copy the data there. */
3037 if (GET_CODE (size
) == CONST_INT
3038 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
), align
)))
3040 move_by_pieces (gen_rtx_MEM (BLKmode
, temp
), xinner
,
3041 INTVAL (size
), align
);
3046 rtx opalign
= GEN_INT (align
);
3047 enum machine_mode mode
;
3048 rtx target
= gen_rtx_MEM (BLKmode
, temp
);
3050 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
3052 mode
= GET_MODE_WIDER_MODE (mode
))
3054 enum insn_code code
= movstr_optab
[(int) mode
];
3055 insn_operand_predicate_fn pred
;
3057 if (code
!= CODE_FOR_nothing
3058 && ((GET_CODE (size
) == CONST_INT
3059 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
3060 <= (GET_MODE_MASK (mode
) >> 1)))
3061 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
3062 && (!(pred
= insn_data
[(int) code
].operand
[0].predicate
)
3063 || ((*pred
) (target
, BLKmode
)))
3064 && (!(pred
= insn_data
[(int) code
].operand
[1].predicate
)
3065 || ((*pred
) (xinner
, BLKmode
)))
3066 && (!(pred
= insn_data
[(int) code
].operand
[3].predicate
)
3067 || ((*pred
) (opalign
, VOIDmode
))))
3069 rtx op2
= convert_to_mode (mode
, size
, 1);
3070 rtx last
= get_last_insn ();
3073 pred
= insn_data
[(int) code
].operand
[2].predicate
;
3074 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
3075 op2
= copy_to_mode_reg (mode
, op2
);
3077 pat
= GEN_FCN ((int) code
) (target
, xinner
,
3085 delete_insns_since (last
);
3090 #ifndef ACCUMULATE_OUTGOING_ARGS
3091 /* If the source is referenced relative to the stack pointer,
3092 copy it to another register to stabilize it. We do not need
3093 to do this if we know that we won't be changing sp. */
3095 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3096 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3097 temp
= copy_to_reg (temp
);
3100 /* Make inhibit_defer_pop nonzero around the library call
3101 to force it to pop the bcopy-arguments right away. */
3103 #ifdef TARGET_MEM_FUNCTIONS
3104 emit_library_call (memcpy_libfunc
, 0,
3105 VOIDmode
, 3, temp
, Pmode
, XEXP (xinner
, 0), Pmode
,
3106 convert_to_mode (TYPE_MODE (sizetype
),
3107 size
, TREE_UNSIGNED (sizetype
)),
3108 TYPE_MODE (sizetype
));
3110 emit_library_call (bcopy_libfunc
, 0,
3111 VOIDmode
, 3, XEXP (xinner
, 0), Pmode
, temp
, Pmode
,
3112 convert_to_mode (TYPE_MODE (integer_type_node
),
3114 TREE_UNSIGNED (integer_type_node
)),
3115 TYPE_MODE (integer_type_node
));
3120 else if (partial
> 0)
3122 /* Scalar partly in registers. */
3124 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3127 /* # words of start of argument
3128 that we must make space for but need not store. */
3129 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
3130 int args_offset
= INTVAL (args_so_far
);
3133 /* Push padding now if padding above and stack grows down,
3134 or if padding below and stack grows up.
3135 But if space already allocated, this has already been done. */
3136 if (extra
&& args_addr
== 0
3137 && where_pad
!= none
&& where_pad
!= stack_direction
)
3138 anti_adjust_stack (GEN_INT (extra
));
3140 /* If we make space by pushing it, we might as well push
3141 the real data. Otherwise, we can leave OFFSET nonzero
3142 and leave the space uninitialized. */
3146 /* Now NOT_STACK gets the number of words that we don't need to
3147 allocate on the stack. */
3148 not_stack
= partial
- offset
;
3150 /* If the partial register-part of the arg counts in its stack size,
3151 skip the part of stack space corresponding to the registers.
3152 Otherwise, start copying to the beginning of the stack space,
3153 by setting SKIP to 0. */
3154 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3156 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3157 x
= validize_mem (force_const_mem (mode
, x
));
3159 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3160 SUBREGs of such registers are not allowed. */
3161 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
3162 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3163 x
= copy_to_reg (x
);
3165 /* Loop over all the words allocated on the stack for this arg. */
3166 /* We can do it by words, because any scalar bigger than a word
3167 has a size a multiple of a word. */
3168 #ifndef PUSH_ARGS_REVERSED
3169 for (i
= not_stack
; i
< size
; i
++)
3171 for (i
= size
- 1; i
>= not_stack
; i
--)
3173 if (i
>= not_stack
+ offset
)
3174 emit_push_insn (operand_subword_force (x
, i
, mode
),
3175 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3177 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3179 reg_parm_stack_space
);
3184 rtx target
= NULL_RTX
;
3186 /* Push padding now if padding above and stack grows down,
3187 or if padding below and stack grows up.
3188 But if space already allocated, this has already been done. */
3189 if (extra
&& args_addr
== 0
3190 && where_pad
!= none
&& where_pad
!= stack_direction
)
3191 anti_adjust_stack (GEN_INT (extra
));
3193 #ifdef PUSH_ROUNDING
3195 addr
= gen_push_operand ();
3199 if (GET_CODE (args_so_far
) == CONST_INT
)
3201 = memory_address (mode
,
3202 plus_constant (args_addr
,
3203 INTVAL (args_so_far
)));
3205 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3210 emit_move_insn (gen_rtx_MEM (mode
, addr
), x
);
3212 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
3214 in_check_memory_usage
= 1;
3216 target
= get_push_address (GET_MODE_SIZE (mode
));
3218 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
3219 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
3222 GEN_INT (GET_MODE_SIZE (mode
)),
3223 TYPE_MODE (sizetype
));
3225 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
3227 GEN_INT (GET_MODE_SIZE (mode
)),
3228 TYPE_MODE (sizetype
),
3229 GEN_INT (MEMORY_USE_RW
),
3230 TYPE_MODE (integer_type_node
));
3231 in_check_memory_usage
= 0;
3236 /* If part should go in registers, copy that part
3237 into the appropriate registers. Do this now, at the end,
3238 since mem-to-mem copies above may do function calls. */
3239 if (partial
> 0 && reg
!= 0)
3241 /* Handle calls that pass values in multiple non-contiguous locations.
3242 The Irix 6 ABI has examples of this. */
3243 if (GET_CODE (reg
) == PARALLEL
)
3244 emit_group_load (reg
, x
, -1, align
); /* ??? size? */
3246 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
3249 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3250 anti_adjust_stack (GEN_INT (extra
));
3253 /* Expand an assignment that stores the value of FROM into TO.
3254 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3255 (This may contain a QUEUED rtx;
3256 if the value is constant, this rtx is a constant.)
3257 Otherwise, the returned value is NULL_RTX.
3259 SUGGEST_REG is no longer actually used.
3260 It used to mean, copy the value through a register
3261 and return that register, if that is possible.
3262 We now use WANT_VALUE to decide whether to do this. */
3265 expand_assignment (to
, from
, want_value
, suggest_reg
)
3268 int suggest_reg ATTRIBUTE_UNUSED
;
3270 register rtx to_rtx
= 0;
3273 /* Don't crash if the lhs of the assignment was erroneous. */
3275 if (TREE_CODE (to
) == ERROR_MARK
)
3277 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3278 return want_value
? result
: NULL_RTX
;
3281 /* Assignment of a structure component needs special treatment
3282 if the structure component's rtx is not simply a MEM.
3283 Assignment of an array element at a constant index, and assignment of
3284 an array element in an unaligned packed structure field, has the same
3287 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
3288 || TREE_CODE (to
) == ARRAY_REF
)
3290 enum machine_mode mode1
;
3300 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
3301 &unsignedp
, &volatilep
, &alignment
);
3303 /* If we are going to use store_bit_field and extract_bit_field,
3304 make sure to_rtx will be safe for multiple use. */
3306 if (mode1
== VOIDmode
&& want_value
)
3307 tem
= stabilize_reference (tem
);
3309 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_DONT
);
3312 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
3314 if (GET_CODE (to_rtx
) != MEM
)
3317 if (GET_MODE (offset_rtx
) != ptr_mode
)
3319 #ifdef POINTERS_EXTEND_UNSIGNED
3320 offset_rtx
= convert_memory_address (ptr_mode
, offset_rtx
);
3322 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
3326 /* A constant address in TO_RTX can have VOIDmode, we must not try
3327 to call force_reg for that case. Avoid that case. */
3328 if (GET_CODE (to_rtx
) == MEM
3329 && GET_MODE (to_rtx
) == BLKmode
3330 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
3332 && (bitpos
% bitsize
) == 0
3333 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
3334 && (alignment
* BITS_PER_UNIT
) == GET_MODE_ALIGNMENT (mode1
))
3336 rtx temp
= change_address (to_rtx
, mode1
,
3337 plus_constant (XEXP (to_rtx
, 0),
3340 if (GET_CODE (XEXP (temp
, 0)) == REG
)
3343 to_rtx
= change_address (to_rtx
, mode1
,
3344 force_reg (GET_MODE (XEXP (temp
, 0)),
3349 to_rtx
= change_address (to_rtx
, VOIDmode
,
3350 gen_rtx_PLUS (ptr_mode
, XEXP (to_rtx
, 0),
3351 force_reg (ptr_mode
,
3357 if (GET_CODE (to_rtx
) == MEM
)
3359 /* When the offset is zero, to_rtx is the address of the
3360 structure we are storing into, and hence may be shared.
3361 We must make a new MEM before setting the volatile bit. */
3363 to_rtx
= copy_rtx (to_rtx
);
3365 MEM_VOLATILE_P (to_rtx
) = 1;
3367 #if 0 /* This was turned off because, when a field is volatile
3368 in an object which is not volatile, the object may be in a register,
3369 and then we would abort over here. */
3375 if (TREE_CODE (to
) == COMPONENT_REF
3376 && TREE_READONLY (TREE_OPERAND (to
, 1)))
3379 to_rtx
= copy_rtx (to_rtx
);
3381 RTX_UNCHANGING_P (to_rtx
) = 1;
3384 /* Check the access. */
3385 if (current_function_check_memory_usage
&& GET_CODE (to_rtx
) == MEM
)
3390 enum machine_mode best_mode
;
3392 best_mode
= get_best_mode (bitsize
, bitpos
,
3393 TYPE_ALIGN (TREE_TYPE (tem
)),
3395 if (best_mode
== VOIDmode
)
3398 best_mode_size
= GET_MODE_BITSIZE (best_mode
);
3399 to_addr
= plus_constant (XEXP (to_rtx
, 0), (bitpos
/ BITS_PER_UNIT
));
3400 size
= CEIL ((bitpos
% best_mode_size
) + bitsize
, best_mode_size
);
3401 size
*= GET_MODE_SIZE (best_mode
);
3403 /* Check the access right of the pointer. */
3405 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
3407 GEN_INT (size
), TYPE_MODE (sizetype
),
3408 GEN_INT (MEMORY_USE_WO
),
3409 TYPE_MODE (integer_type_node
));
3412 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
3414 /* Spurious cast makes HPUX compiler happy. */
3415 ? (enum machine_mode
) TYPE_MODE (TREE_TYPE (to
))
3418 /* Required alignment of containing datum. */
3420 int_size_in_bytes (TREE_TYPE (tem
)),
3421 get_alias_set (to
));
3422 preserve_temp_slots (result
);
3426 /* If the value is meaningful, convert RESULT to the proper mode.
3427 Otherwise, return nothing. */
3428 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
3429 TYPE_MODE (TREE_TYPE (from
)),
3431 TREE_UNSIGNED (TREE_TYPE (to
)))
3435 /* If the rhs is a function call and its value is not an aggregate,
3436 call the function before we start to compute the lhs.
3437 This is needed for correct code for cases such as
3438 val = setjmp (buf) on machines where reference to val
3439 requires loading up part of an address in a separate insn.
3441 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3442 a promoted variable where the zero- or sign- extension needs to be done.
3443 Handling this in the normal way is safe because no computation is done
3445 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
)
3446 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
3447 && ! (TREE_CODE (to
) == VAR_DECL
&& GET_CODE (DECL_RTL (to
)) == REG
))
3452 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3454 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_WO
);
3456 /* Handle calls that return values in multiple non-contiguous locations.
3457 The Irix 6 ABI has examples of this. */
3458 if (GET_CODE (to_rtx
) == PARALLEL
)
3459 emit_group_load (to_rtx
, value
, int_size_in_bytes (TREE_TYPE (from
)),
3460 TYPE_ALIGN (TREE_TYPE (from
)) / BITS_PER_UNIT
);
3461 else if (GET_MODE (to_rtx
) == BLKmode
)
3462 emit_block_move (to_rtx
, value
, expr_size (from
),
3463 TYPE_ALIGN (TREE_TYPE (from
)) / BITS_PER_UNIT
);
3466 #ifdef POINTERS_EXTEND_UNSIGNED
3467 if (TREE_CODE (TREE_TYPE (to
)) == REFERENCE_TYPE
3468 || TREE_CODE (TREE_TYPE (to
)) == POINTER_TYPE
)
3469 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
3471 emit_move_insn (to_rtx
, value
);
3473 preserve_temp_slots (to_rtx
);
3476 return want_value
? to_rtx
: NULL_RTX
;
3479 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3480 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3484 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_WO
);
3485 if (GET_CODE (to_rtx
) == MEM
)
3486 MEM_ALIAS_SET (to_rtx
) = get_alias_set (to
);
3489 /* Don't move directly into a return register. */
3490 if (TREE_CODE (to
) == RESULT_DECL
&& GET_CODE (to_rtx
) == REG
)
3495 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
3496 emit_move_insn (to_rtx
, temp
);
3497 preserve_temp_slots (to_rtx
);
3500 return want_value
? to_rtx
: NULL_RTX
;
3503 /* In case we are returning the contents of an object which overlaps
3504 the place the value is being stored, use a safe function when copying
3505 a value through a pointer into a structure value return block. */
3506 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
3507 && current_function_returns_struct
3508 && !current_function_returns_pcc_struct
)
3513 size
= expr_size (from
);
3514 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
,
3515 EXPAND_MEMORY_USE_DONT
);
3517 /* Copy the rights of the bitmap. */
3518 if (current_function_check_memory_usage
)
3519 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
3520 XEXP (to_rtx
, 0), Pmode
,
3521 XEXP (from_rtx
, 0), Pmode
,
3522 convert_to_mode (TYPE_MODE (sizetype
),
3523 size
, TREE_UNSIGNED (sizetype
)),
3524 TYPE_MODE (sizetype
));
3526 #ifdef TARGET_MEM_FUNCTIONS
3527 emit_library_call (memcpy_libfunc
, 0,
3528 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3529 XEXP (from_rtx
, 0), Pmode
,
3530 convert_to_mode (TYPE_MODE (sizetype
),
3531 size
, TREE_UNSIGNED (sizetype
)),
3532 TYPE_MODE (sizetype
));
3534 emit_library_call (bcopy_libfunc
, 0,
3535 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
3536 XEXP (to_rtx
, 0), Pmode
,
3537 convert_to_mode (TYPE_MODE (integer_type_node
),
3538 size
, TREE_UNSIGNED (integer_type_node
)),
3539 TYPE_MODE (integer_type_node
));
3542 preserve_temp_slots (to_rtx
);
3545 return want_value
? to_rtx
: NULL_RTX
;
3548 /* Compute FROM and store the value in the rtx we got. */
3551 result
= store_expr (from
, to_rtx
, want_value
);
3552 preserve_temp_slots (result
);
3555 return want_value
? result
: NULL_RTX
;
3558 /* Generate code for computing expression EXP,
3559 and storing the value into TARGET.
3560 TARGET may contain a QUEUED rtx.
3562 If WANT_VALUE is nonzero, return a copy of the value
3563 not in TARGET, so that we can be sure to use the proper
3564 value in a containing expression even if TARGET has something
3565 else stored in it. If possible, we copy the value through a pseudo
3566 and return that pseudo. Or, if the value is constant, we try to
3567 return the constant. In some cases, we return a pseudo
3568 copied *from* TARGET.
3570 If the mode is BLKmode then we may return TARGET itself.
3571 It turns out that in BLKmode it doesn't cause a problem.
3572 because C has no operators that could combine two different
3573 assignments into the same BLKmode object with different values
3574 with no sequence point. Will other languages need this to
3577 If WANT_VALUE is 0, we return NULL, to make sure
3578 to catch quickly any cases where the caller uses the value
3579 and fails to set WANT_VALUE. */
3582 store_expr (exp
, target
, want_value
)
3584 register rtx target
;
3588 int dont_return_target
= 0;
3590 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
3592 /* Perform first part of compound expression, then assign from second
3594 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
3596 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
3598 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
3600 /* For conditional expression, get safe form of the target. Then
3601 test the condition, doing the appropriate assignment on either
3602 side. This avoids the creation of unnecessary temporaries.
3603 For non-BLKmode, it is more efficient not to do this. */
3605 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
3608 target
= protect_from_queue (target
, 1);
3610 do_pending_stack_adjust ();
3612 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
3613 start_cleanup_deferral ();
3614 store_expr (TREE_OPERAND (exp
, 1), target
, 0);
3615 end_cleanup_deferral ();
3617 emit_jump_insn (gen_jump (lab2
));
3620 start_cleanup_deferral ();
3621 store_expr (TREE_OPERAND (exp
, 2), target
, 0);
3622 end_cleanup_deferral ();
3627 return want_value
? target
: NULL_RTX
;
3629 else if (queued_subexp_p (target
))
3630 /* If target contains a postincrement, let's not risk
3631 using it as the place to generate the rhs. */
3633 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
3635 /* Expand EXP into a new pseudo. */
3636 temp
= gen_reg_rtx (GET_MODE (target
));
3637 temp
= expand_expr (exp
, temp
, GET_MODE (target
), 0);
3640 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
), 0);
3642 /* If target is volatile, ANSI requires accessing the value
3643 *from* the target, if it is accessed. So make that happen.
3644 In no case return the target itself. */
3645 if (! MEM_VOLATILE_P (target
) && want_value
)
3646 dont_return_target
= 1;
3648 else if (want_value
&& GET_CODE (target
) == MEM
&& ! MEM_VOLATILE_P (target
)
3649 && GET_MODE (target
) != BLKmode
)
3650 /* If target is in memory and caller wants value in a register instead,
3651 arrange that. Pass TARGET as target for expand_expr so that,
3652 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3653 We know expand_expr will not use the target in that case.
3654 Don't do this if TARGET is volatile because we are supposed
3655 to write it and then read it. */
3657 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
3658 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
3659 temp
= copy_to_reg (temp
);
3660 dont_return_target
= 1;
3662 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
3663 /* If this is an scalar in a register that is stored in a wider mode
3664 than the declared mode, compute the result into its declared mode
3665 and then convert to the wider mode. Our value is the computed
3668 /* If we don't want a value, we can do the conversion inside EXP,
3669 which will often result in some optimizations. Do the conversion
3670 in two steps: first change the signedness, if needed, then
3671 the extend. But don't do this if the type of EXP is a subtype
3672 of something else since then the conversion might involve
3673 more than just converting modes. */
3674 if (! want_value
&& INTEGRAL_TYPE_P (TREE_TYPE (exp
))
3675 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
3677 if (TREE_UNSIGNED (TREE_TYPE (exp
))
3678 != SUBREG_PROMOTED_UNSIGNED_P (target
))
3681 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target
),
3685 exp
= convert (type_for_mode (GET_MODE (SUBREG_REG (target
)),
3686 SUBREG_PROMOTED_UNSIGNED_P (target
)),
3690 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
3692 /* If TEMP is a volatile MEM and we want a result value, make
3693 the access now so it gets done only once. Likewise if
3694 it contains TARGET. */
3695 if (GET_CODE (temp
) == MEM
&& want_value
3696 && (MEM_VOLATILE_P (temp
)
3697 || reg_mentioned_p (SUBREG_REG (target
), XEXP (temp
, 0))))
3698 temp
= copy_to_reg (temp
);
3700 /* If TEMP is a VOIDmode constant, use convert_modes to make
3701 sure that we properly convert it. */
3702 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
3703 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
3704 TYPE_MODE (TREE_TYPE (exp
)), temp
,
3705 SUBREG_PROMOTED_UNSIGNED_P (target
));
3707 convert_move (SUBREG_REG (target
), temp
,
3708 SUBREG_PROMOTED_UNSIGNED_P (target
));
3710 /* If we promoted a constant, change the mode back down to match
3711 target. Otherwise, the caller might get confused by a result whose
3712 mode is larger than expected. */
3714 if (want_value
&& GET_MODE (temp
) != GET_MODE (target
)
3715 && GET_MODE (temp
) != VOIDmode
)
3717 temp
= gen_rtx_SUBREG (GET_MODE (target
), temp
, 0);
3718 SUBREG_PROMOTED_VAR_P (temp
) = 1;
3719 SUBREG_PROMOTED_UNSIGNED_P (temp
)
3720 = SUBREG_PROMOTED_UNSIGNED_P (target
);
3723 return want_value
? temp
: NULL_RTX
;
3727 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
3728 /* Return TARGET if it's a specified hardware register.
3729 If TARGET is a volatile mem ref, either return TARGET
3730 or return a reg copied *from* TARGET; ANSI requires this.
3732 Otherwise, if TEMP is not TARGET, return TEMP
3733 if it is constant (for efficiency),
3734 or if we really want the correct value. */
3735 if (!(target
&& GET_CODE (target
) == REG
3736 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
3737 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
3738 && ! rtx_equal_p (temp
, target
)
3739 && (CONSTANT_P (temp
) || want_value
))
3740 dont_return_target
= 1;
3743 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3744 the same as that of TARGET, adjust the constant. This is needed, for
3745 example, in case it is a CONST_DOUBLE and we want only a word-sized
3747 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
3748 && TREE_CODE (exp
) != ERROR_MARK
3749 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
3750 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
3751 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
3753 if (current_function_check_memory_usage
3754 && GET_CODE (target
) == MEM
3755 && AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
3757 if (GET_CODE (temp
) == MEM
)
3758 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
3759 XEXP (target
, 0), Pmode
,
3760 XEXP (temp
, 0), Pmode
,
3761 expr_size (exp
), TYPE_MODE (sizetype
));
3763 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
3764 XEXP (target
, 0), Pmode
,
3765 expr_size (exp
), TYPE_MODE (sizetype
),
3766 GEN_INT (MEMORY_USE_WO
),
3767 TYPE_MODE (integer_type_node
));
3770 /* If value was not generated in the target, store it there.
3771 Convert the value to TARGET's type first if nec. */
3772 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3773 one or both of them are volatile memory refs, we have to distinguish
3775 - expand_expr has used TARGET. In this case, we must not generate
3776 another copy. This can be detected by TARGET being equal according
3778 - expand_expr has not used TARGET - that means that the source just
3779 happens to have the same RTX form. Since temp will have been created
3780 by expand_expr, it will compare unequal according to == .
3781 We must generate a copy in this case, to reach the correct number
3782 of volatile memory references. */
3784 if ((! rtx_equal_p (temp
, target
)
3785 || (temp
!= target
&& (side_effects_p (temp
)
3786 || side_effects_p (target
))))
3787 && TREE_CODE (exp
) != ERROR_MARK
)
3789 target
= protect_from_queue (target
, 1);
3790 if (GET_MODE (temp
) != GET_MODE (target
)
3791 && GET_MODE (temp
) != VOIDmode
)
3793 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
3794 if (dont_return_target
)
3796 /* In this case, we will return TEMP,
3797 so make sure it has the proper mode.
3798 But don't forget to store the value into TARGET. */
3799 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
3800 emit_move_insn (target
, temp
);
3803 convert_move (target
, temp
, unsignedp
);
3806 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
3808 /* Handle copying a string constant into an array.
3809 The string constant may be shorter than the array.
3810 So copy just the string's actual length, and clear the rest. */
3814 /* Get the size of the data type of the string,
3815 which is actually the size of the target. */
3816 size
= expr_size (exp
);
3817 if (GET_CODE (size
) == CONST_INT
3818 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
3819 emit_block_move (target
, temp
, size
,
3820 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3823 /* Compute the size of the data to copy from the string. */
3825 = size_binop (MIN_EXPR
,
3826 make_tree (sizetype
, size
),
3828 build_int_2 (TREE_STRING_LENGTH (exp
), 0)));
3829 rtx copy_size_rtx
= expand_expr (copy_size
, NULL_RTX
,
3833 /* Copy that much. */
3834 emit_block_move (target
, temp
, copy_size_rtx
,
3835 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3837 /* Figure out how much is left in TARGET that we have to clear.
3838 Do all calculations in ptr_mode. */
3840 addr
= XEXP (target
, 0);
3841 addr
= convert_modes (ptr_mode
, Pmode
, addr
, 1);
3843 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
3845 addr
= plus_constant (addr
, TREE_STRING_LENGTH (exp
));
3846 size
= plus_constant (size
, - TREE_STRING_LENGTH (exp
));
3850 addr
= force_reg (ptr_mode
, addr
);
3851 addr
= expand_binop (ptr_mode
, add_optab
, addr
,
3852 copy_size_rtx
, NULL_RTX
, 0,
3855 size
= expand_binop (ptr_mode
, sub_optab
, size
,
3856 copy_size_rtx
, NULL_RTX
, 0,
3859 label
= gen_label_rtx ();
3860 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
3861 GET_MODE (size
), 0, 0, label
);
3864 if (size
!= const0_rtx
)
3866 /* Be sure we can write on ADDR. */
3867 if (current_function_check_memory_usage
)
3868 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
3870 size
, TYPE_MODE (sizetype
),
3871 GEN_INT (MEMORY_USE_WO
),
3872 TYPE_MODE (integer_type_node
));
3873 #ifdef TARGET_MEM_FUNCTIONS
3874 emit_library_call (memset_libfunc
, 0, VOIDmode
, 3,
3876 const0_rtx
, TYPE_MODE (integer_type_node
),
3877 convert_to_mode (TYPE_MODE (sizetype
),
3879 TREE_UNSIGNED (sizetype
)),
3880 TYPE_MODE (sizetype
));
3882 emit_library_call (bzero_libfunc
, 0, VOIDmode
, 2,
3884 convert_to_mode (TYPE_MODE (integer_type_node
),
3886 TREE_UNSIGNED (integer_type_node
)),
3887 TYPE_MODE (integer_type_node
));
3895 /* Handle calls that return values in multiple non-contiguous locations.
3896 The Irix 6 ABI has examples of this. */
3897 else if (GET_CODE (target
) == PARALLEL
)
3898 emit_group_load (target
, temp
, int_size_in_bytes (TREE_TYPE (exp
)),
3899 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3900 else if (GET_MODE (temp
) == BLKmode
)
3901 emit_block_move (target
, temp
, expr_size (exp
),
3902 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3904 emit_move_insn (target
, temp
);
3907 /* If we don't want a value, return NULL_RTX. */
3911 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3912 ??? The latter test doesn't seem to make sense. */
3913 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
3916 /* Return TARGET itself if it is a hard register. */
3917 else if (want_value
&& GET_MODE (target
) != BLKmode
3918 && ! (GET_CODE (target
) == REG
3919 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
3920 return copy_to_reg (target
);
3926 /* Return 1 if EXP just contains zeros. */
3934 switch (TREE_CODE (exp
))
3938 case NON_LVALUE_EXPR
:
3939 return is_zeros_p (TREE_OPERAND (exp
, 0));
3942 return TREE_INT_CST_LOW (exp
) == 0 && TREE_INT_CST_HIGH (exp
) == 0;
3946 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
3949 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp
), dconst0
);
3952 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
3953 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
3954 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
3955 if (! is_zeros_p (TREE_VALUE (elt
)))
3965 /* Return 1 if EXP contains mostly (3/4) zeros. */
3968 mostly_zeros_p (exp
)
3971 if (TREE_CODE (exp
) == CONSTRUCTOR
)
3973 int elts
= 0, zeros
= 0;
3974 tree elt
= CONSTRUCTOR_ELTS (exp
);
3975 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
3977 /* If there are no ranges of true bits, it is all zero. */
3978 return elt
== NULL_TREE
;
3980 for (; elt
; elt
= TREE_CHAIN (elt
))
3982 /* We do not handle the case where the index is a RANGE_EXPR,
3983 so the statistic will be somewhat inaccurate.
3984 We do make a more accurate count in store_constructor itself,
3985 so since this function is only used for nested array elements,
3986 this should be close enough. */
3987 if (mostly_zeros_p (TREE_VALUE (elt
)))
3992 return 4 * zeros
>= 3 * elts
;
3995 return is_zeros_p (exp
);
3998 /* Helper function for store_constructor.
3999 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4000 TYPE is the type of the CONSTRUCTOR, not the element type.
4001 ALIGN and CLEARED are as for store_constructor.
4003 This provides a recursive shortcut back to store_constructor when it isn't
4004 necessary to go through store_field. This is so that we can pass through
4005 the cleared field to let store_constructor know that we may not have to
4006 clear a substructure if the outer structure has already been cleared. */
4009 store_constructor_field (target
, bitsize
, bitpos
,
4010 mode
, exp
, type
, align
, cleared
)
4012 int bitsize
, bitpos
;
4013 enum machine_mode mode
;
4018 if (TREE_CODE (exp
) == CONSTRUCTOR
4019 && bitpos
% BITS_PER_UNIT
== 0
4020 /* If we have a non-zero bitpos for a register target, then we just
4021 let store_field do the bitfield handling. This is unlikely to
4022 generate unnecessary clear instructions anyways. */
4023 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
4026 target
= change_address (target
, VOIDmode
,
4027 plus_constant (XEXP (target
, 0),
4028 bitpos
/ BITS_PER_UNIT
));
4029 store_constructor (exp
, target
, align
, cleared
);
4032 store_field (target
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0,
4033 (align
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
,
4034 int_size_in_bytes (type
), cleared
);
4037 /* Store the value of constructor EXP into the rtx TARGET.
4038 TARGET is either a REG or a MEM.
4039 ALIGN is the maximum known alignment for TARGET, in bits.
4040 CLEARED is true if TARGET is known to have been zero'd. */
4043 store_constructor (exp
, target
, align
, cleared
)
4049 tree type
= TREE_TYPE (exp
);
4050 #ifdef WORD_REGISTER_OPERATIONS
4051 rtx exp_size
= expr_size (exp
);
4054 /* We know our target cannot conflict, since safe_from_p has been called. */
4056 /* Don't try copying piece by piece into a hard register
4057 since that is vulnerable to being clobbered by EXP.
4058 Instead, construct in a pseudo register and then copy it all. */
4059 if (GET_CODE (target
) == REG
&& REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4061 rtx temp
= gen_reg_rtx (GET_MODE (target
));
4062 store_constructor (exp
, temp
, 0);
4063 emit_move_insn (target
, temp
);
4068 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
4069 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4073 /* Inform later passes that the whole union value is dead. */
4074 if (TREE_CODE (type
) == UNION_TYPE
4075 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4076 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4078 /* If we are building a static constructor into a register,
4079 set the initial value as zero so we can fold the value into
4080 a constant. But if more than one register is involved,
4081 this probably loses. */
4082 else if (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
4083 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4086 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4091 /* If the constructor has fewer fields than the structure
4092 or if we are initializing the structure to mostly zeros,
4093 clear the whole structure first. */
4094 else if ((list_length (CONSTRUCTOR_ELTS (exp
))
4095 != list_length (TYPE_FIELDS (type
)))
4096 || mostly_zeros_p (exp
))
4099 clear_storage (target
, expr_size (exp
),
4100 (align
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
);
4105 /* Inform later passes that the old value is dead. */
4106 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4108 /* Store each element of the constructor into
4109 the corresponding field of TARGET. */
4111 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4113 register tree field
= TREE_PURPOSE (elt
);
4114 #ifdef WORD_REGISTER_OPERATIONS
4115 tree value
= TREE_VALUE (elt
);
4117 register enum machine_mode mode
;
4121 tree pos
, constant
= 0, offset
= 0;
4122 rtx to_rtx
= target
;
4124 /* Just ignore missing fields.
4125 We cleared the whole structure, above,
4126 if any fields are missing. */
4130 if (cleared
&& is_zeros_p (TREE_VALUE (elt
)))
4133 bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
4134 unsignedp
= TREE_UNSIGNED (field
);
4135 mode
= DECL_MODE (field
);
4136 if (DECL_BIT_FIELD (field
))
4139 pos
= DECL_FIELD_BITPOS (field
);
4140 if (TREE_CODE (pos
) == INTEGER_CST
)
4142 else if (TREE_CODE (pos
) == PLUS_EXPR
4143 && TREE_CODE (TREE_OPERAND (pos
, 1)) == INTEGER_CST
)
4144 constant
= TREE_OPERAND (pos
, 1), offset
= TREE_OPERAND (pos
, 0);
4149 bitpos
= TREE_INT_CST_LOW (constant
);
4155 if (contains_placeholder_p (offset
))
4156 offset
= build (WITH_RECORD_EXPR
, sizetype
,
4157 offset
, make_tree (TREE_TYPE (exp
), target
));
4159 offset
= size_binop (FLOOR_DIV_EXPR
, offset
,
4160 size_int (BITS_PER_UNIT
));
4162 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4163 if (GET_CODE (to_rtx
) != MEM
)
4166 if (GET_MODE (offset_rtx
) != ptr_mode
)
4168 #ifdef POINTERS_EXTEND_UNSIGNED
4169 offset_rtx
= convert_memory_address (ptr_mode
, offset_rtx
);
4171 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4176 = change_address (to_rtx
, VOIDmode
,
4177 gen_rtx_PLUS (ptr_mode
, XEXP (to_rtx
, 0),
4178 force_reg (ptr_mode
,
4182 if (TREE_READONLY (field
))
4184 if (GET_CODE (to_rtx
) == MEM
)
4185 to_rtx
= copy_rtx (to_rtx
);
4187 RTX_UNCHANGING_P (to_rtx
) = 1;
4190 #ifdef WORD_REGISTER_OPERATIONS
4191 /* If this initializes a field that is smaller than a word, at the
4192 start of a word, try to widen it to a full word.
4193 This special case allows us to output C++ member function
4194 initializations in a form that the optimizers can understand. */
4196 && GET_CODE (target
) == REG
4197 && bitsize
< BITS_PER_WORD
4198 && bitpos
% BITS_PER_WORD
== 0
4199 && GET_MODE_CLASS (mode
) == MODE_INT
4200 && TREE_CODE (value
) == INTEGER_CST
4201 && GET_CODE (exp_size
) == CONST_INT
4202 && bitpos
+ BITS_PER_WORD
<= INTVAL (exp_size
) * BITS_PER_UNIT
)
4204 tree type
= TREE_TYPE (value
);
4205 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4207 type
= type_for_size (BITS_PER_WORD
, TREE_UNSIGNED (type
));
4208 value
= convert (type
, value
);
4210 if (BYTES_BIG_ENDIAN
)
4212 = fold (build (LSHIFT_EXPR
, type
, value
,
4213 build_int_2 (BITS_PER_WORD
- bitsize
, 0)));
4214 bitsize
= BITS_PER_WORD
;
4218 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
4219 TREE_VALUE (elt
), type
,
4221 DECL_ALIGN (TREE_PURPOSE (elt
))),
4225 else if (TREE_CODE (type
) == ARRAY_TYPE
)
4230 tree domain
= TYPE_DOMAIN (type
);
4231 HOST_WIDE_INT minelt
= TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain
));
4232 HOST_WIDE_INT maxelt
= TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain
));
4233 tree elttype
= TREE_TYPE (type
);
4235 /* If the constructor has fewer elements than the array,
4236 clear the whole array first. Similarly if this is
4237 static constructor of a non-BLKmode object. */
4238 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
4242 HOST_WIDE_INT count
= 0, zero_count
= 0;
4244 /* This loop is a more accurate version of the loop in
4245 mostly_zeros_p (it handles RANGE_EXPR in an index).
4246 It is also needed to check for missing elements. */
4247 for (elt
= CONSTRUCTOR_ELTS (exp
);
4249 elt
= TREE_CHAIN (elt
))
4251 tree index
= TREE_PURPOSE (elt
);
4252 HOST_WIDE_INT this_node_count
;
4253 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4255 tree lo_index
= TREE_OPERAND (index
, 0);
4256 tree hi_index
= TREE_OPERAND (index
, 1);
4257 if (TREE_CODE (lo_index
) != INTEGER_CST
4258 || TREE_CODE (hi_index
) != INTEGER_CST
)
4263 this_node_count
= TREE_INT_CST_LOW (hi_index
)
4264 - TREE_INT_CST_LOW (lo_index
) + 1;
4267 this_node_count
= 1;
4268 count
+= this_node_count
;
4269 if (mostly_zeros_p (TREE_VALUE (elt
)))
4270 zero_count
+= this_node_count
;
4272 /* Clear the entire array first if there are any missing elements,
4273 or if the incidence of zero elements is >= 75%. */
4274 if (count
< maxelt
- minelt
+ 1
4275 || 4 * zero_count
>= 3 * count
)
4281 clear_storage (target
, expr_size (exp
),
4282 (align
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
);
4286 /* Inform later passes that the old value is dead. */
4287 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4289 /* Store each element of the constructor into
4290 the corresponding element of TARGET, determined
4291 by counting the elements. */
4292 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
4294 elt
= TREE_CHAIN (elt
), i
++)
4296 register enum machine_mode mode
;
4300 tree value
= TREE_VALUE (elt
);
4301 int align
= TYPE_ALIGN (TREE_TYPE (value
));
4302 tree index
= TREE_PURPOSE (elt
);
4303 rtx xtarget
= target
;
4305 if (cleared
&& is_zeros_p (value
))
4308 mode
= TYPE_MODE (elttype
);
4309 bitsize
= GET_MODE_BITSIZE (mode
);
4310 unsignedp
= TREE_UNSIGNED (elttype
);
4312 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4314 tree lo_index
= TREE_OPERAND (index
, 0);
4315 tree hi_index
= TREE_OPERAND (index
, 1);
4316 rtx index_r
, pos_rtx
, addr
, hi_r
, loop_top
, loop_end
;
4317 struct nesting
*loop
;
4318 HOST_WIDE_INT lo
, hi
, count
;
4321 /* If the range is constant and "small", unroll the loop. */
4322 if (TREE_CODE (lo_index
) == INTEGER_CST
4323 && TREE_CODE (hi_index
) == INTEGER_CST
4324 && (lo
= TREE_INT_CST_LOW (lo_index
),
4325 hi
= TREE_INT_CST_LOW (hi_index
),
4326 count
= hi
- lo
+ 1,
4327 (GET_CODE (target
) != MEM
4329 || (TREE_CODE (TYPE_SIZE (elttype
)) == INTEGER_CST
4330 && TREE_INT_CST_LOW (TYPE_SIZE (elttype
)) * count
4333 lo
-= minelt
; hi
-= minelt
;
4334 for (; lo
<= hi
; lo
++)
4336 bitpos
= lo
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
));
4337 store_constructor_field (target
, bitsize
, bitpos
, mode
,
4338 value
, type
, align
, cleared
);
4343 hi_r
= expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
4344 loop_top
= gen_label_rtx ();
4345 loop_end
= gen_label_rtx ();
4347 unsignedp
= TREE_UNSIGNED (domain
);
4349 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
4351 DECL_RTL (index
) = index_r
4352 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
4355 if (TREE_CODE (value
) == SAVE_EXPR
4356 && SAVE_EXPR_RTL (value
) == 0)
4358 /* Make sure value gets expanded once before the
4360 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
4363 store_expr (lo_index
, index_r
, 0);
4364 loop
= expand_start_loop (0);
4366 /* Assign value to element index. */
4367 position
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE (elttype
),
4368 size_int (BITS_PER_UNIT
));
4369 position
= size_binop (MULT_EXPR
,
4370 size_binop (MINUS_EXPR
, index
,
4371 TYPE_MIN_VALUE (domain
)),
4373 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
4374 addr
= gen_rtx_PLUS (Pmode
, XEXP (target
, 0), pos_rtx
);
4375 xtarget
= change_address (target
, mode
, addr
);
4376 if (TREE_CODE (value
) == CONSTRUCTOR
)
4377 store_constructor (value
, xtarget
, align
, cleared
);
4379 store_expr (value
, xtarget
, 0);
4381 expand_exit_loop_if_false (loop
,
4382 build (LT_EXPR
, integer_type_node
,
4385 expand_increment (build (PREINCREMENT_EXPR
,
4387 index
, integer_one_node
), 0, 0);
4389 emit_label (loop_end
);
4391 /* Needed by stupid register allocation. to extend the
4392 lifetime of pseudo-regs used by target past the end
4394 emit_insn (gen_rtx_USE (GET_MODE (target
), target
));
4397 else if ((index
!= 0 && TREE_CODE (index
) != INTEGER_CST
)
4398 || TREE_CODE (TYPE_SIZE (elttype
)) != INTEGER_CST
)
4404 index
= size_int (i
);
4407 index
= size_binop (MINUS_EXPR
, index
,
4408 TYPE_MIN_VALUE (domain
));
4409 position
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE (elttype
),
4410 size_int (BITS_PER_UNIT
));
4411 position
= size_binop (MULT_EXPR
, index
, position
);
4412 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
4413 addr
= gen_rtx_PLUS (Pmode
, XEXP (target
, 0), pos_rtx
);
4414 xtarget
= change_address (target
, mode
, addr
);
4415 store_expr (value
, xtarget
, 0);
4420 bitpos
= ((TREE_INT_CST_LOW (index
) - minelt
)
4421 * TREE_INT_CST_LOW (TYPE_SIZE (elttype
)));
4423 bitpos
= (i
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
)));
4424 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
4425 type
, align
, cleared
);
4429 /* set constructor assignments */
4430 else if (TREE_CODE (type
) == SET_TYPE
)
4432 tree elt
= CONSTRUCTOR_ELTS (exp
);
4433 int nbytes
= int_size_in_bytes (type
), nbits
;
4434 tree domain
= TYPE_DOMAIN (type
);
4435 tree domain_min
, domain_max
, bitlength
;
4437 /* The default implementation strategy is to extract the constant
4438 parts of the constructor, use that to initialize the target,
4439 and then "or" in whatever non-constant ranges we need in addition.
4441 If a large set is all zero or all ones, it is
4442 probably better to set it using memset (if available) or bzero.
4443 Also, if a large set has just a single range, it may also be
4444 better to first clear all the first clear the set (using
4445 bzero/memset), and set the bits we want. */
4447 /* Check for all zeros. */
4448 if (elt
== NULL_TREE
)
4451 clear_storage (target
, expr_size (exp
),
4452 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
4456 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
4457 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
4458 bitlength
= size_binop (PLUS_EXPR
,
4459 size_binop (MINUS_EXPR
, domain_max
, domain_min
),
4462 if (nbytes
< 0 || TREE_CODE (bitlength
) != INTEGER_CST
)
4464 nbits
= TREE_INT_CST_LOW (bitlength
);
4466 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4467 are "complicated" (more than one range), initialize (the
4468 constant parts) by copying from a constant. */
4469 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
4470 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
4472 int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
4473 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
4474 char *bit_buffer
= (char *) alloca (nbits
);
4475 HOST_WIDE_INT word
= 0;
4478 int offset
= 0; /* In bytes from beginning of set. */
4479 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
4482 if (bit_buffer
[ibit
])
4484 if (BYTES_BIG_ENDIAN
)
4485 word
|= (1 << (set_word_size
- 1 - bit_pos
));
4487 word
|= 1 << bit_pos
;
4490 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
4492 if (word
!= 0 || ! cleared
)
4494 rtx datum
= GEN_INT (word
);
4496 /* The assumption here is that it is safe to use
4497 XEXP if the set is multi-word, but not if
4498 it's single-word. */
4499 if (GET_CODE (target
) == MEM
)
4501 to_rtx
= plus_constant (XEXP (target
, 0), offset
);
4502 to_rtx
= change_address (target
, mode
, to_rtx
);
4504 else if (offset
== 0)
4508 emit_move_insn (to_rtx
, datum
);
4514 offset
+= set_word_size
/ BITS_PER_UNIT
;
4520 /* Don't bother clearing storage if the set is all ones. */
4521 if (TREE_CHAIN (elt
) != NULL_TREE
4522 || (TREE_PURPOSE (elt
) == NULL_TREE
4524 : (TREE_CODE (TREE_VALUE (elt
)) != INTEGER_CST
4525 || TREE_CODE (TREE_PURPOSE (elt
)) != INTEGER_CST
4526 || (TREE_INT_CST_LOW (TREE_VALUE (elt
))
4527 - TREE_INT_CST_LOW (TREE_PURPOSE (elt
)) + 1
4529 clear_storage (target
, expr_size (exp
),
4530 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
4533 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
4535 /* start of range of element or NULL */
4536 tree startbit
= TREE_PURPOSE (elt
);
4537 /* end of range of element, or element value */
4538 tree endbit
= TREE_VALUE (elt
);
4539 #ifdef TARGET_MEM_FUNCTIONS
4540 HOST_WIDE_INT startb
, endb
;
4542 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
4544 bitlength_rtx
= expand_expr (bitlength
,
4545 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
4547 /* handle non-range tuple element like [ expr ] */
4548 if (startbit
== NULL_TREE
)
4550 startbit
= save_expr (endbit
);
4553 startbit
= convert (sizetype
, startbit
);
4554 endbit
= convert (sizetype
, endbit
);
4555 if (! integer_zerop (domain_min
))
4557 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
4558 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
4560 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
4561 EXPAND_CONST_ADDRESS
);
4562 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
4563 EXPAND_CONST_ADDRESS
);
4567 targetx
= assign_stack_temp (GET_MODE (target
),
4568 GET_MODE_SIZE (GET_MODE (target
)),
4570 emit_move_insn (targetx
, target
);
4572 else if (GET_CODE (target
) == MEM
)
4577 #ifdef TARGET_MEM_FUNCTIONS
4578 /* Optimization: If startbit and endbit are
4579 constants divisible by BITS_PER_UNIT,
4580 call memset instead. */
4581 if (TREE_CODE (startbit
) == INTEGER_CST
4582 && TREE_CODE (endbit
) == INTEGER_CST
4583 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
4584 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
4586 emit_library_call (memset_libfunc
, 0,
4588 plus_constant (XEXP (targetx
, 0),
4589 startb
/ BITS_PER_UNIT
),
4591 constm1_rtx
, TYPE_MODE (integer_type_node
),
4592 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
4593 TYPE_MODE (sizetype
));
4598 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__setbits"),
4599 0, VOIDmode
, 4, XEXP (targetx
, 0), Pmode
,
4600 bitlength_rtx
, TYPE_MODE (sizetype
),
4601 startbit_rtx
, TYPE_MODE (sizetype
),
4602 endbit_rtx
, TYPE_MODE (sizetype
));
4605 emit_move_insn (target
, targetx
);
4613 /* Store the value of EXP (an expression tree)
4614 into a subfield of TARGET which has mode MODE and occupies
4615 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4616 If MODE is VOIDmode, it means that we are storing into a bit-field.
4618 If VALUE_MODE is VOIDmode, return nothing in particular.
4619 UNSIGNEDP is not used in this case.
4621 Otherwise, return an rtx for the value stored. This rtx
4622 has mode VALUE_MODE if that is convenient to do.
4623 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4625 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4626 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4628 ALIAS_SET is the alias set for the destination. This value will
4629 (in general) be different from that for TARGET, since TARGET is a
4630 reference to the containing structure. */
4633 store_field (target
, bitsize
, bitpos
, mode
, exp
, value_mode
,
4634 unsignedp
, align
, total_size
, alias_set
)
4636 int bitsize
, bitpos
;
4637 enum machine_mode mode
;
4639 enum machine_mode value_mode
;
4645 HOST_WIDE_INT width_mask
= 0;
4647 if (TREE_CODE (exp
) == ERROR_MARK
)
4650 if (bitsize
< HOST_BITS_PER_WIDE_INT
)
4651 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
4653 /* If we are storing into an unaligned field of an aligned union that is
4654 in a register, we may have the mode of TARGET being an integer mode but
4655 MODE == BLKmode. In that case, get an aligned object whose size and
4656 alignment are the same as TARGET and store TARGET into it (we can avoid
4657 the store if the field being stored is the entire width of TARGET). Then
4658 call ourselves recursively to store the field into a BLKmode version of
4659 that object. Finally, load from the object into TARGET. This is not
4660 very efficient in general, but should only be slightly more expensive
4661 than the otherwise-required unaligned accesses. Perhaps this can be
4662 cleaned up later. */
4665 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
4667 rtx object
= assign_stack_temp (GET_MODE (target
),
4668 GET_MODE_SIZE (GET_MODE (target
)), 0);
4669 rtx blk_object
= copy_rtx (object
);
4671 MEM_SET_IN_STRUCT_P (object
, 1);
4672 MEM_SET_IN_STRUCT_P (blk_object
, 1);
4673 PUT_MODE (blk_object
, BLKmode
);
4675 if (bitsize
!= GET_MODE_BITSIZE (GET_MODE (target
)))
4676 emit_move_insn (object
, target
);
4678 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0,
4679 align
, total_size
, alias_set
);
4681 /* Even though we aren't returning target, we need to
4682 give it the updated value. */
4683 emit_move_insn (target
, object
);
4688 /* If the structure is in a register or if the component
4689 is a bit field, we cannot use addressing to access it.
4690 Use bit-field techniques or SUBREG to store in it. */
4692 if (mode
== VOIDmode
4693 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
4694 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
4695 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
4696 || GET_CODE (target
) == REG
4697 || GET_CODE (target
) == SUBREG
4698 /* If the field isn't aligned enough to store as an ordinary memref,
4699 store it as a bit field. */
4700 || (SLOW_UNALIGNED_ACCESS
4701 && align
* BITS_PER_UNIT
< GET_MODE_ALIGNMENT (mode
))
4702 || (SLOW_UNALIGNED_ACCESS
&& bitpos
% GET_MODE_ALIGNMENT (mode
) != 0))
4704 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
4706 /* If BITSIZE is narrower than the size of the type of EXP
4707 we will be narrowing TEMP. Normally, what's wanted are the
4708 low-order bits. However, if EXP's type is a record and this is
4709 big-endian machine, we want the upper BITSIZE bits. */
4710 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
4711 && bitsize
< GET_MODE_BITSIZE (GET_MODE (temp
))
4712 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
4713 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
4714 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
4718 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4720 if (mode
!= VOIDmode
&& mode
!= BLKmode
4721 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
4722 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
4724 /* If the modes of TARGET and TEMP are both BLKmode, both
4725 must be in memory and BITPOS must be aligned on a byte
4726 boundary. If so, we simply do a block copy. */
4727 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
4729 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
4730 || bitpos
% BITS_PER_UNIT
!= 0)
4733 target
= change_address (target
, VOIDmode
,
4734 plus_constant (XEXP (target
, 0),
4735 bitpos
/ BITS_PER_UNIT
));
4737 emit_block_move (target
, temp
,
4738 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
4742 return value_mode
== VOIDmode
? const0_rtx
: target
;
4745 /* Store the value in the bitfield. */
4746 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
, align
, total_size
);
4747 if (value_mode
!= VOIDmode
)
4749 /* The caller wants an rtx for the value. */
4750 /* If possible, avoid refetching from the bitfield itself. */
4752 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
4755 enum machine_mode tmode
;
4758 return expand_and (temp
, GEN_INT (width_mask
), NULL_RTX
);
4759 tmode
= GET_MODE (temp
);
4760 if (tmode
== VOIDmode
)
4762 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
4763 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
4764 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
4766 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
4767 NULL_RTX
, value_mode
, 0, align
,
4774 rtx addr
= XEXP (target
, 0);
4777 /* If a value is wanted, it must be the lhs;
4778 so make the address stable for multiple use. */
4780 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
4781 && ! CONSTANT_ADDRESS_P (addr
)
4782 /* A frame-pointer reference is already stable. */
4783 && ! (GET_CODE (addr
) == PLUS
4784 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
4785 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
4786 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
4787 addr
= copy_to_reg (addr
);
4789 /* Now build a reference to just the desired component. */
4791 to_rtx
= copy_rtx (change_address (target
, mode
,
4792 plus_constant (addr
,
4794 / BITS_PER_UNIT
))));
4795 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
4796 MEM_ALIAS_SET (to_rtx
) = alias_set
;
4798 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
4802 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4803 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4804 ARRAY_REFs and find the ultimate containing object, which we return.
4806 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4807 bit position, and *PUNSIGNEDP to the signedness of the field.
4808 If the position of the field is variable, we store a tree
4809 giving the variable offset (in units) in *POFFSET.
4810 This offset is in addition to the bit position.
4811 If the position is not variable, we store 0 in *POFFSET.
4812 We set *PALIGNMENT to the alignment in bytes of the address that will be
4813 computed. This is the alignment of the thing we return if *POFFSET
4814 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4816 If any of the extraction expressions is volatile,
4817 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4819 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4820 is a mode that can be used to access the field. In that case, *PBITSIZE
4823 If the field describes a variable-sized object, *PMODE is set to
4824 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4825 this case, but the address of the object can be found. */
4828 get_inner_reference (exp
, pbitsize
, pbitpos
, poffset
, pmode
,
4829 punsignedp
, pvolatilep
, palignment
)
4834 enum machine_mode
*pmode
;
4839 tree orig_exp
= exp
;
4841 enum machine_mode mode
= VOIDmode
;
4842 tree offset
= integer_zero_node
;
4843 unsigned int alignment
= BIGGEST_ALIGNMENT
;
4845 if (TREE_CODE (exp
) == COMPONENT_REF
)
4847 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
4848 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
4849 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
4850 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
4852 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
4854 size_tree
= TREE_OPERAND (exp
, 1);
4855 *punsignedp
= TREE_UNSIGNED (exp
);
4859 mode
= TYPE_MODE (TREE_TYPE (exp
));
4860 if (mode
== BLKmode
)
4861 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
4863 *pbitsize
= GET_MODE_BITSIZE (mode
);
4864 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4869 if (TREE_CODE (size_tree
) != INTEGER_CST
)
4870 mode
= BLKmode
, *pbitsize
= -1;
4872 *pbitsize
= TREE_INT_CST_LOW (size_tree
);
4875 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4876 and find the ultimate containing object. */
4882 if (TREE_CODE (exp
) == COMPONENT_REF
|| TREE_CODE (exp
) == BIT_FIELD_REF
)
4884 tree pos
= (TREE_CODE (exp
) == COMPONENT_REF
4885 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp
, 1))
4886 : TREE_OPERAND (exp
, 2));
4887 tree constant
= integer_zero_node
, var
= pos
;
4889 /* If this field hasn't been filled in yet, don't go
4890 past it. This should only happen when folding expressions
4891 made during type construction. */
4895 /* Assume here that the offset is a multiple of a unit.
4896 If not, there should be an explicitly added constant. */
4897 if (TREE_CODE (pos
) == PLUS_EXPR
4898 && TREE_CODE (TREE_OPERAND (pos
, 1)) == INTEGER_CST
)
4899 constant
= TREE_OPERAND (pos
, 1), var
= TREE_OPERAND (pos
, 0);
4900 else if (TREE_CODE (pos
) == INTEGER_CST
)
4901 constant
= pos
, var
= integer_zero_node
;
4903 *pbitpos
+= TREE_INT_CST_LOW (constant
);
4904 offset
= size_binop (PLUS_EXPR
, offset
,
4905 size_binop (EXACT_DIV_EXPR
, var
,
4906 size_int (BITS_PER_UNIT
)));
4909 else if (TREE_CODE (exp
) == ARRAY_REF
)
4911 /* This code is based on the code in case ARRAY_REF in expand_expr
4912 below. We assume here that the size of an array element is
4913 always an integral multiple of BITS_PER_UNIT. */
4915 tree index
= TREE_OPERAND (exp
, 1);
4916 tree domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4918 = domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
4919 tree index_type
= TREE_TYPE (index
);
4922 if (TYPE_PRECISION (index_type
) != TYPE_PRECISION (sizetype
))
4924 index
= convert (type_for_size (TYPE_PRECISION (sizetype
), 0),
4926 index_type
= TREE_TYPE (index
);
4929 /* Optimize the special-case of a zero lower bound.
4931 We convert the low_bound to sizetype to avoid some problems
4932 with constant folding. (E.g. suppose the lower bound is 1,
4933 and its mode is QI. Without the conversion, (ARRAY
4934 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4935 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4937 But sizetype isn't quite right either (especially if
4938 the lowbound is negative). FIXME */
4940 if (! integer_zerop (low_bound
))
4941 index
= fold (build (MINUS_EXPR
, index_type
, index
,
4942 convert (sizetype
, low_bound
)));
4944 if (TREE_CODE (index
) == INTEGER_CST
)
4946 index
= convert (sbitsizetype
, index
);
4947 index_type
= TREE_TYPE (index
);
4950 xindex
= fold (build (MULT_EXPR
, sbitsizetype
, index
,
4951 convert (sbitsizetype
,
4952 TYPE_SIZE (TREE_TYPE (exp
)))));
4954 if (TREE_CODE (xindex
) == INTEGER_CST
4955 && TREE_INT_CST_HIGH (xindex
) == 0)
4956 *pbitpos
+= TREE_INT_CST_LOW (xindex
);
4959 /* Either the bit offset calculated above is not constant, or
4960 it overflowed. In either case, redo the multiplication
4961 against the size in units. This is especially important
4962 in the non-constant case to avoid a division at runtime. */
4963 xindex
= fold (build (MULT_EXPR
, ssizetype
, index
,
4965 TYPE_SIZE_UNIT (TREE_TYPE (exp
)))));
4967 if (contains_placeholder_p (xindex
))
4968 xindex
= build (WITH_RECORD_EXPR
, sizetype
, xindex
, exp
);
4970 offset
= size_binop (PLUS_EXPR
, offset
, xindex
);
4973 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
4974 && ! ((TREE_CODE (exp
) == NOP_EXPR
4975 || TREE_CODE (exp
) == CONVERT_EXPR
)
4976 && ! (TREE_CODE (TREE_TYPE (exp
)) == UNION_TYPE
4977 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
4979 && (TYPE_MODE (TREE_TYPE (exp
))
4980 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
4983 /* If any reference in the chain is volatile, the effect is volatile. */
4984 if (TREE_THIS_VOLATILE (exp
))
4987 /* If the offset is non-constant already, then we can't assume any
4988 alignment more than the alignment here. */
4989 if (! integer_zerop (offset
))
4990 alignment
= MIN (alignment
, TYPE_ALIGN (TREE_TYPE (exp
)));
4992 exp
= TREE_OPERAND (exp
, 0);
4995 if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'd')
4996 alignment
= MIN (alignment
, DECL_ALIGN (exp
));
4997 else if (TREE_TYPE (exp
) != 0)
4998 alignment
= MIN (alignment
, TYPE_ALIGN (TREE_TYPE (exp
)));
5000 if (integer_zerop (offset
))
5003 if (offset
!= 0 && contains_placeholder_p (offset
))
5004 offset
= build (WITH_RECORD_EXPR
, sizetype
, offset
, orig_exp
);
5008 *palignment
= alignment
/ BITS_PER_UNIT
;
5012 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5013 static enum memory_use_mode
5014 get_memory_usage_from_modifier (modifier
)
5015 enum expand_modifier modifier
;
5021 return MEMORY_USE_RO
;
5023 case EXPAND_MEMORY_USE_WO
:
5024 return MEMORY_USE_WO
;
5026 case EXPAND_MEMORY_USE_RW
:
5027 return MEMORY_USE_RW
;
5029 case EXPAND_MEMORY_USE_DONT
:
5030 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5031 MEMORY_USE_DONT, because they are modifiers to a call of
5032 expand_expr in the ADDR_EXPR case of expand_expr. */
5033 case EXPAND_CONST_ADDRESS
:
5034 case EXPAND_INITIALIZER
:
5035 return MEMORY_USE_DONT
;
5036 case EXPAND_MEMORY_USE_BAD
:
5042 /* Given an rtx VALUE that may contain additions and multiplications,
5043 return an equivalent value that just refers to a register or memory.
5044 This is done by generating instructions to perform the arithmetic
5045 and returning a pseudo-register containing the value.
5047 The returned value may be a REG, SUBREG, MEM or constant. */
5050 force_operand (value
, target
)
5053 register optab binoptab
= 0;
5054 /* Use a temporary to force order of execution of calls to
5058 /* Use subtarget as the target for operand 0 of a binary operation. */
5059 register rtx subtarget
= (target
!= 0 && GET_CODE (target
) == REG
? target
: 0);
5061 /* Check for a PIC address load. */
5063 && (GET_CODE (value
) == PLUS
|| GET_CODE (value
) == MINUS
)
5064 && XEXP (value
, 0) == pic_offset_table_rtx
5065 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5066 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5067 || GET_CODE (XEXP (value
, 1)) == CONST
))
5070 subtarget
= gen_reg_rtx (GET_MODE (value
));
5071 emit_move_insn (subtarget
, value
);
5075 if (GET_CODE (value
) == PLUS
)
5076 binoptab
= add_optab
;
5077 else if (GET_CODE (value
) == MINUS
)
5078 binoptab
= sub_optab
;
5079 else if (GET_CODE (value
) == MULT
)
5081 op2
= XEXP (value
, 1);
5082 if (!CONSTANT_P (op2
)
5083 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5085 tmp
= force_operand (XEXP (value
, 0), subtarget
);
5086 return expand_mult (GET_MODE (value
), tmp
,
5087 force_operand (op2
, NULL_RTX
),
5093 op2
= XEXP (value
, 1);
5094 if (!CONSTANT_P (op2
)
5095 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5097 if (binoptab
== sub_optab
&& GET_CODE (op2
) == CONST_INT
)
5099 binoptab
= add_optab
;
5100 op2
= negate_rtx (GET_MODE (value
), op2
);
5103 /* Check for an addition with OP2 a constant integer and our first
5104 operand a PLUS of a virtual register and something else. In that
5105 case, we want to emit the sum of the virtual register and the
5106 constant first and then add the other value. This allows virtual
5107 register instantiation to simply modify the constant rather than
5108 creating another one around this addition. */
5109 if (binoptab
== add_optab
&& GET_CODE (op2
) == CONST_INT
5110 && GET_CODE (XEXP (value
, 0)) == PLUS
5111 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
5112 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5113 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5115 rtx temp
= expand_binop (GET_MODE (value
), binoptab
,
5116 XEXP (XEXP (value
, 0), 0), op2
,
5117 subtarget
, 0, OPTAB_LIB_WIDEN
);
5118 return expand_binop (GET_MODE (value
), binoptab
, temp
,
5119 force_operand (XEXP (XEXP (value
, 0), 1), 0),
5120 target
, 0, OPTAB_LIB_WIDEN
);
5123 tmp
= force_operand (XEXP (value
, 0), subtarget
);
5124 return expand_binop (GET_MODE (value
), binoptab
, tmp
,
5125 force_operand (op2
, NULL_RTX
),
5126 target
, 0, OPTAB_LIB_WIDEN
);
5127 /* We give UNSIGNEDP = 0 to expand_binop
5128 because the only operations we are expanding here are signed ones. */
5133 /* Subroutine of expand_expr:
5134 save the non-copied parts (LIST) of an expr (LHS), and return a list
5135 which can restore these values to their previous values,
5136 should something modify their storage. */
5139 save_noncopied_parts (lhs
, list
)
5146 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
5147 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
5148 parts
= chainon (parts
, save_noncopied_parts (lhs
, TREE_VALUE (tail
)));
5151 tree part
= TREE_VALUE (tail
);
5152 tree part_type
= TREE_TYPE (part
);
5153 tree to_be_saved
= build (COMPONENT_REF
, part_type
, lhs
, part
);
5154 rtx target
= assign_temp (part_type
, 0, 1, 1);
5155 if (! memory_address_p (TYPE_MODE (part_type
), XEXP (target
, 0)))
5156 target
= change_address (target
, TYPE_MODE (part_type
), NULL_RTX
);
5157 parts
= tree_cons (to_be_saved
,
5158 build (RTL_EXPR
, part_type
, NULL_TREE
,
5161 store_expr (TREE_PURPOSE (parts
), RTL_EXPR_RTL (TREE_VALUE (parts
)), 0);
5166 /* Subroutine of expand_expr:
5167 record the non-copied parts (LIST) of an expr (LHS), and return a list
5168 which specifies the initial values of these parts. */
5171 init_noncopied_parts (lhs
, list
)
5178 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
5179 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
5180 parts
= chainon (parts
, init_noncopied_parts (lhs
, TREE_VALUE (tail
)));
5181 else if (TREE_PURPOSE (tail
))
5183 tree part
= TREE_VALUE (tail
);
5184 tree part_type
= TREE_TYPE (part
);
5185 tree to_be_initialized
= build (COMPONENT_REF
, part_type
, lhs
, part
);
5186 parts
= tree_cons (TREE_PURPOSE (tail
), to_be_initialized
, parts
);
5191 /* Subroutine of expand_expr: return nonzero iff there is no way that
5192 EXP can reference X, which is being modified. TOP_P is nonzero if this
5193 call is going to be used to determine whether we need a temporary
5194 for EXP, as opposed to a recursive call to this function.
5196 It is always safe for this routine to return zero since it merely
5197 searches for optimization opportunities. */
5200 safe_from_p (x
, exp
, top_p
)
5207 static int save_expr_count
;
5208 static int save_expr_size
= 0;
5209 static tree
*save_expr_rewritten
;
5210 static tree save_expr_trees
[256];
5213 /* If EXP has varying size, we MUST use a target since we currently
5214 have no way of allocating temporaries of variable size
5215 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5216 So we assume here that something at a higher level has prevented a
5217 clash. This is somewhat bogus, but the best we can do. Only
5218 do this when X is BLKmode and when we are at the top level. */
5219 || (top_p
&& TREE_TYPE (exp
) != 0 && TYPE_SIZE (TREE_TYPE (exp
)) != 0
5220 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
5221 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
5222 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
5223 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
5225 && GET_MODE (x
) == BLKmode
))
5228 if (top_p
&& save_expr_size
== 0)
5232 save_expr_count
= 0;
5233 save_expr_size
= sizeof (save_expr_trees
) / sizeof (save_expr_trees
[0]);
5234 save_expr_rewritten
= &save_expr_trees
[0];
5236 rtn
= safe_from_p (x
, exp
, 1);
5238 for (i
= 0; i
< save_expr_count
; ++i
)
5240 if (TREE_CODE (save_expr_trees
[i
]) != ERROR_MARK
)
5242 TREE_SET_CODE (save_expr_trees
[i
], SAVE_EXPR
);
5250 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5251 find the underlying pseudo. */
5252 if (GET_CODE (x
) == SUBREG
)
5255 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5259 /* If X is a location in the outgoing argument area, it is always safe. */
5260 if (GET_CODE (x
) == MEM
5261 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
5262 || (GET_CODE (XEXP (x
, 0)) == PLUS
5263 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
)))
5266 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
5269 exp_rtl
= DECL_RTL (exp
);
5276 if (TREE_CODE (exp
) == TREE_LIST
)
5277 return ((TREE_VALUE (exp
) == 0
5278 || safe_from_p (x
, TREE_VALUE (exp
), 0))
5279 && (TREE_CHAIN (exp
) == 0
5280 || safe_from_p (x
, TREE_CHAIN (exp
), 0)));
5281 else if (TREE_CODE (exp
) == ERROR_MARK
)
5282 return 1; /* An already-visited SAVE_EXPR? */
5287 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5291 return (safe_from_p (x
, TREE_OPERAND (exp
, 0), 0)
5292 && safe_from_p (x
, TREE_OPERAND (exp
, 1), 0));
5296 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5297 the expression. If it is set, we conflict iff we are that rtx or
5298 both are in memory. Otherwise, we check all operands of the
5299 expression recursively. */
5301 switch (TREE_CODE (exp
))
5304 return (staticp (TREE_OPERAND (exp
, 0))
5305 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0)
5306 || TREE_STATIC (exp
));
5309 if (GET_CODE (x
) == MEM
)
5314 exp_rtl
= CALL_EXPR_RTL (exp
);
5317 /* Assume that the call will clobber all hard registers and
5319 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5320 || GET_CODE (x
) == MEM
)
5327 /* If a sequence exists, we would have to scan every instruction
5328 in the sequence to see if it was safe. This is probably not
5330 if (RTL_EXPR_SEQUENCE (exp
))
5333 exp_rtl
= RTL_EXPR_RTL (exp
);
5336 case WITH_CLEANUP_EXPR
:
5337 exp_rtl
= RTL_EXPR_RTL (exp
);
5340 case CLEANUP_POINT_EXPR
:
5341 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5344 exp_rtl
= SAVE_EXPR_RTL (exp
);
5348 /* This SAVE_EXPR might appear many times in the top-level
5349 safe_from_p() expression, and if it has a complex
5350 subexpression, examining it multiple times could result
5351 in a combinatorial explosion. E.g. on an Alpha
5352 running at least 200MHz, a Fortran test case compiled with
5353 optimization took about 28 minutes to compile -- even though
5354 it was only a few lines long, and the complicated line causing
5355 so much time to be spent in the earlier version of safe_from_p()
5356 had only 293 or so unique nodes.
5358 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5359 where it is so we can turn it back in the top-level safe_from_p()
5362 /* For now, don't bother re-sizing the array. */
5363 if (save_expr_count
>= save_expr_size
)
5365 save_expr_rewritten
[save_expr_count
++] = exp
;
5367 nops
= tree_code_length
[(int) SAVE_EXPR
];
5368 for (i
= 0; i
< nops
; i
++)
5370 tree operand
= TREE_OPERAND (exp
, i
);
5371 if (operand
== NULL_TREE
)
5373 TREE_SET_CODE (exp
, ERROR_MARK
);
5374 if (!safe_from_p (x
, operand
, 0))
5376 TREE_SET_CODE (exp
, SAVE_EXPR
);
5378 TREE_SET_CODE (exp
, ERROR_MARK
);
5382 /* The only operand we look at is operand 1. The rest aren't
5383 part of the expression. */
5384 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
5386 case METHOD_CALL_EXPR
:
5387 /* This takes a rtx argument, but shouldn't appear here. */
5394 /* If we have an rtx, we do not need to scan our operands. */
5398 nops
= tree_code_length
[(int) TREE_CODE (exp
)];
5399 for (i
= 0; i
< nops
; i
++)
5400 if (TREE_OPERAND (exp
, i
) != 0
5401 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
5405 /* If we have an rtl, find any enclosed object. Then see if we conflict
5409 if (GET_CODE (exp_rtl
) == SUBREG
)
5411 exp_rtl
= SUBREG_REG (exp_rtl
);
5412 if (GET_CODE (exp_rtl
) == REG
5413 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
5417 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5418 are memory and EXP is not readonly. */
5419 return ! (rtx_equal_p (x
, exp_rtl
)
5420 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
5421 && ! TREE_READONLY (exp
)));
5424 /* If we reach here, it is safe. */
5428 /* Subroutine of expand_expr: return nonzero iff EXP is an
5429 expression whose type is statically determinable. */
5435 if (TREE_CODE (exp
) == PARM_DECL
5436 || TREE_CODE (exp
) == VAR_DECL
5437 || TREE_CODE (exp
) == CALL_EXPR
|| TREE_CODE (exp
) == TARGET_EXPR
5438 || TREE_CODE (exp
) == COMPONENT_REF
5439 || TREE_CODE (exp
) == ARRAY_REF
)
5444 /* Subroutine of expand_expr: return rtx if EXP is a
5445 variable or parameter; else return 0. */
5452 switch (TREE_CODE (exp
))
5456 return DECL_RTL (exp
);
5462 #ifdef MAX_INTEGER_COMPUTATION_MODE
5464 check_max_integer_computation_mode (exp
)
5467 enum tree_code code
;
5468 enum machine_mode mode
;
5470 /* Strip any NOPs that don't change the mode. */
5472 code
= TREE_CODE (exp
);
5474 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5475 if (code
== NOP_EXPR
5476 && TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
5479 /* First check the type of the overall operation. We need only look at
5480 unary, binary and relational operations. */
5481 if (TREE_CODE_CLASS (code
) == '1'
5482 || TREE_CODE_CLASS (code
) == '2'
5483 || TREE_CODE_CLASS (code
) == '<')
5485 mode
= TYPE_MODE (TREE_TYPE (exp
));
5486 if (GET_MODE_CLASS (mode
) == MODE_INT
5487 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5488 fatal ("unsupported wide integer operation");
5491 /* Check operand of a unary op. */
5492 if (TREE_CODE_CLASS (code
) == '1')
5494 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5495 if (GET_MODE_CLASS (mode
) == MODE_INT
5496 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5497 fatal ("unsupported wide integer operation");
5500 /* Check operands of a binary/comparison op. */
5501 if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<')
5503 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5504 if (GET_MODE_CLASS (mode
) == MODE_INT
5505 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5506 fatal ("unsupported wide integer operation");
5508 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
5509 if (GET_MODE_CLASS (mode
) == MODE_INT
5510 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5511 fatal ("unsupported wide integer operation");
5517 /* expand_expr: generate code for computing expression EXP.
5518 An rtx for the computed value is returned. The value is never null.
5519 In the case of a void EXP, const0_rtx is returned.
5521 The value may be stored in TARGET if TARGET is nonzero.
5522 TARGET is just a suggestion; callers must assume that
5523 the rtx returned may not be the same as TARGET.
5525 If TARGET is CONST0_RTX, it means that the value will be ignored.
5527 If TMODE is not VOIDmode, it suggests generating the
5528 result in mode TMODE. But this is done only when convenient.
5529 Otherwise, TMODE is ignored and the value generated in its natural mode.
5530 TMODE is just a suggestion; callers must assume that
5531 the rtx returned may not have mode TMODE.
5533 Note that TARGET may have neither TMODE nor MODE. In that case, it
5534 probably will not be used.
5536 If MODIFIER is EXPAND_SUM then when EXP is an addition
5537 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5538 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5539 products as above, or REG or MEM, or constant.
5540 Ordinarily in such cases we would output mul or add instructions
5541 and then return a pseudo reg containing the sum.
5543 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5544 it also marks a label as absolutely required (it can't be dead).
5545 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5546 This is used for outputting expressions used in initializers.
5548 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5549 with a constant address even if that address is not normally legitimate.
5550 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5553 expand_expr (exp
, target
, tmode
, modifier
)
5556 enum machine_mode tmode
;
5557 enum expand_modifier modifier
;
5559 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5560 This is static so it will be accessible to our recursive callees. */
5561 static tree placeholder_list
= 0;
5562 register rtx op0
, op1
, temp
;
5563 tree type
= TREE_TYPE (exp
);
5564 int unsignedp
= TREE_UNSIGNED (type
);
5565 register enum machine_mode mode
;
5566 register enum tree_code code
= TREE_CODE (exp
);
5568 rtx subtarget
, original_target
;
5571 /* Used by check-memory-usage to make modifier read only. */
5572 enum expand_modifier ro_modifier
;
5574 /* Handle ERROR_MARK before anybody tries to access its type. */
5575 if (TREE_CODE (exp
) == ERROR_MARK
)
5577 op0
= CONST0_RTX (tmode
);
5583 mode
= TYPE_MODE (type
);
5584 /* Use subtarget as the target for operand 0 of a binary operation. */
5585 subtarget
= (target
!= 0 && GET_CODE (target
) == REG
? target
: 0);
5586 original_target
= target
;
5587 ignore
= (target
== const0_rtx
5588 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
5589 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
5590 || code
== COND_EXPR
)
5591 && TREE_CODE (type
) == VOID_TYPE
));
5593 /* Make a read-only version of the modifier. */
5594 if (modifier
== EXPAND_NORMAL
|| modifier
== EXPAND_SUM
5595 || modifier
== EXPAND_CONST_ADDRESS
|| modifier
== EXPAND_INITIALIZER
)
5596 ro_modifier
= modifier
;
5598 ro_modifier
= EXPAND_NORMAL
;
5600 /* Don't use hard regs as subtargets, because the combiner
5601 can only handle pseudo regs. */
5602 if (subtarget
&& REGNO (subtarget
) < FIRST_PSEUDO_REGISTER
)
5604 /* Avoid subtargets inside loops,
5605 since they hide some invariant expressions. */
5606 if (preserve_subexpressions_p ())
5609 /* If we are going to ignore this result, we need only do something
5610 if there is a side-effect somewhere in the expression. If there
5611 is, short-circuit the most common cases here. Note that we must
5612 not call expand_expr with anything but const0_rtx in case this
5613 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5617 if (! TREE_SIDE_EFFECTS (exp
))
5620 /* Ensure we reference a volatile object even if value is ignored. */
5621 if (TREE_THIS_VOLATILE (exp
)
5622 && TREE_CODE (exp
) != FUNCTION_DECL
5623 && mode
!= VOIDmode
&& mode
!= BLKmode
)
5625 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, ro_modifier
);
5626 if (GET_CODE (temp
) == MEM
)
5627 temp
= copy_to_reg (temp
);
5631 if (TREE_CODE_CLASS (code
) == '1')
5632 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
5633 VOIDmode
, ro_modifier
);
5634 else if (TREE_CODE_CLASS (code
) == '2'
5635 || TREE_CODE_CLASS (code
) == '<')
5637 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, ro_modifier
);
5638 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, ro_modifier
);
5641 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
5642 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
5643 /* If the second operand has no side effects, just evaluate
5645 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
5646 VOIDmode
, ro_modifier
);
5651 #ifdef MAX_INTEGER_COMPUTATION_MODE
5652 /* Only check stuff here if the mode we want is different from the mode
5653 of the expression; if it's the same, check_max_integer_computiation_mode
5654 will handle it. Do we really need to check this stuff at all? */
5657 && GET_MODE (target
) != mode
5658 && TREE_CODE (exp
) != INTEGER_CST
5659 && TREE_CODE (exp
) != PARM_DECL
5660 && TREE_CODE (exp
) != ARRAY_REF
5661 && TREE_CODE (exp
) != COMPONENT_REF
5662 && TREE_CODE (exp
) != BIT_FIELD_REF
5663 && TREE_CODE (exp
) != INDIRECT_REF
5664 && TREE_CODE (exp
) != CALL_EXPR
5665 && TREE_CODE (exp
) != VAR_DECL
5666 && TREE_CODE (exp
) != RTL_EXPR
)
5668 enum machine_mode mode
= GET_MODE (target
);
5670 if (GET_MODE_CLASS (mode
) == MODE_INT
5671 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5672 fatal ("unsupported wide integer operation");
5676 && TREE_CODE (exp
) != INTEGER_CST
5677 && TREE_CODE (exp
) != PARM_DECL
5678 && TREE_CODE (exp
) != ARRAY_REF
5679 && TREE_CODE (exp
) != COMPONENT_REF
5680 && TREE_CODE (exp
) != BIT_FIELD_REF
5681 && TREE_CODE (exp
) != INDIRECT_REF
5682 && TREE_CODE (exp
) != VAR_DECL
5683 && TREE_CODE (exp
) != CALL_EXPR
5684 && TREE_CODE (exp
) != RTL_EXPR
5685 && GET_MODE_CLASS (tmode
) == MODE_INT
5686 && tmode
> MAX_INTEGER_COMPUTATION_MODE
)
5687 fatal ("unsupported wide integer operation");
5689 check_max_integer_computation_mode (exp
);
5692 /* If will do cse, generate all results into pseudo registers
5693 since 1) that allows cse to find more things
5694 and 2) otherwise cse could produce an insn the machine
5697 if (! cse_not_expected
&& mode
!= BLKmode
&& target
5698 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
))
5705 tree function
= decl_function_context (exp
);
5706 /* Handle using a label in a containing function. */
5707 if (function
!= current_function_decl
5708 && function
!= inline_function_decl
&& function
!= 0)
5710 struct function
*p
= find_function_data (function
);
5711 /* Allocate in the memory associated with the function
5712 that the label is in. */
5713 push_obstacks (p
->function_obstack
,
5714 p
->function_maybepermanent_obstack
);
5716 p
->expr
->x_forced_labels
5717 = gen_rtx_EXPR_LIST (VOIDmode
, label_rtx (exp
),
5718 p
->expr
->x_forced_labels
);
5723 if (modifier
== EXPAND_INITIALIZER
)
5724 forced_labels
= gen_rtx_EXPR_LIST (VOIDmode
,
5729 temp
= gen_rtx_MEM (FUNCTION_MODE
,
5730 gen_rtx_LABEL_REF (Pmode
, label_rtx (exp
)));
5731 if (function
!= current_function_decl
5732 && function
!= inline_function_decl
&& function
!= 0)
5733 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
5738 if (DECL_RTL (exp
) == 0)
5740 error_with_decl (exp
, "prior parameter's size depends on `%s'");
5741 return CONST0_RTX (mode
);
5744 /* ... fall through ... */
5747 /* If a static var's type was incomplete when the decl was written,
5748 but the type is complete now, lay out the decl now. */
5749 if (DECL_SIZE (exp
) == 0 && TYPE_SIZE (TREE_TYPE (exp
)) != 0
5750 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
5752 push_obstacks_nochange ();
5753 end_temporary_allocation ();
5754 layout_decl (exp
, 0);
5755 PUT_MODE (DECL_RTL (exp
), DECL_MODE (exp
));
5759 /* Although static-storage variables start off initialized, according to
5760 ANSI C, a memcpy could overwrite them with uninitialized values. So
5761 we check them too. This also lets us check for read-only variables
5762 accessed via a non-const declaration, in case it won't be detected
5763 any other way (e.g., in an embedded system or OS kernel without
5766 Aggregates are not checked here; they're handled elsewhere. */
5767 if (current_function
&& current_function_check_memory_usage
5769 && GET_CODE (DECL_RTL (exp
)) == MEM
5770 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
5772 enum memory_use_mode memory_usage
;
5773 memory_usage
= get_memory_usage_from_modifier (modifier
);
5775 if (memory_usage
!= MEMORY_USE_DONT
)
5776 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
5777 XEXP (DECL_RTL (exp
), 0), Pmode
,
5778 GEN_INT (int_size_in_bytes (type
)),
5779 TYPE_MODE (sizetype
),
5780 GEN_INT (memory_usage
),
5781 TYPE_MODE (integer_type_node
));
5784 /* ... fall through ... */
5788 if (DECL_RTL (exp
) == 0)
5791 /* Ensure variable marked as used even if it doesn't go through
5792 a parser. If it hasn't be used yet, write out an external
5794 if (! TREE_USED (exp
))
5796 assemble_external (exp
);
5797 TREE_USED (exp
) = 1;
5800 /* Show we haven't gotten RTL for this yet. */
5803 /* Handle variables inherited from containing functions. */
5804 context
= decl_function_context (exp
);
5806 /* We treat inline_function_decl as an alias for the current function
5807 because that is the inline function whose vars, types, etc.
5808 are being merged into the current function.
5809 See expand_inline_function. */
5811 if (context
!= 0 && context
!= current_function_decl
5812 && context
!= inline_function_decl
5813 /* If var is static, we don't need a static chain to access it. */
5814 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
5815 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
5819 /* Mark as non-local and addressable. */
5820 DECL_NONLOCAL (exp
) = 1;
5821 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
5823 mark_addressable (exp
);
5824 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
5826 addr
= XEXP (DECL_RTL (exp
), 0);
5827 if (GET_CODE (addr
) == MEM
)
5828 addr
= gen_rtx_MEM (Pmode
,
5829 fix_lexical_addr (XEXP (addr
, 0), exp
));
5831 addr
= fix_lexical_addr (addr
, exp
);
5832 temp
= change_address (DECL_RTL (exp
), mode
, addr
);
5835 /* This is the case of an array whose size is to be determined
5836 from its initializer, while the initializer is still being parsed.
5839 else if (GET_CODE (DECL_RTL (exp
)) == MEM
5840 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
5841 temp
= change_address (DECL_RTL (exp
), GET_MODE (DECL_RTL (exp
)),
5842 XEXP (DECL_RTL (exp
), 0));
5844 /* If DECL_RTL is memory, we are in the normal case and either
5845 the address is not valid or it is not a register and -fforce-addr
5846 is specified, get the address into a register. */
5848 else if (GET_CODE (DECL_RTL (exp
)) == MEM
5849 && modifier
!= EXPAND_CONST_ADDRESS
5850 && modifier
!= EXPAND_SUM
5851 && modifier
!= EXPAND_INITIALIZER
5852 && (! memory_address_p (DECL_MODE (exp
),
5853 XEXP (DECL_RTL (exp
), 0))
5855 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
5856 temp
= change_address (DECL_RTL (exp
), VOIDmode
,
5857 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
5859 /* If we got something, return it. But first, set the alignment
5860 the address is a register. */
5863 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
5864 mark_reg_pointer (XEXP (temp
, 0),
5865 DECL_ALIGN (exp
) / BITS_PER_UNIT
);
5870 /* If the mode of DECL_RTL does not match that of the decl, it
5871 must be a promoted value. We return a SUBREG of the wanted mode,
5872 but mark it so that we know that it was already extended. */
5874 if (GET_CODE (DECL_RTL (exp
)) == REG
5875 && GET_MODE (DECL_RTL (exp
)) != mode
)
5877 /* Get the signedness used for this variable. Ensure we get the
5878 same mode we got when the variable was declared. */
5879 if (GET_MODE (DECL_RTL (exp
))
5880 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
, 0))
5883 temp
= gen_rtx_SUBREG (mode
, DECL_RTL (exp
), 0);
5884 SUBREG_PROMOTED_VAR_P (temp
) = 1;
5885 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
5889 return DECL_RTL (exp
);
5892 return immed_double_const (TREE_INT_CST_LOW (exp
),
5893 TREE_INT_CST_HIGH (exp
),
5897 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
,
5898 EXPAND_MEMORY_USE_BAD
);
5901 /* If optimized, generate immediate CONST_DOUBLE
5902 which will be turned into memory by reload if necessary.
5904 We used to force a register so that loop.c could see it. But
5905 this does not allow gen_* patterns to perform optimizations with
5906 the constants. It also produces two insns in cases like "x = 1.0;".
5907 On most machines, floating-point constants are not permitted in
5908 many insns, so we'd end up copying it to a register in any case.
5910 Now, we do the copying in expand_binop, if appropriate. */
5911 return immed_real_const (exp
);
5915 if (! TREE_CST_RTL (exp
))
5916 output_constant_def (exp
);
5918 /* TREE_CST_RTL probably contains a constant address.
5919 On RISC machines where a constant address isn't valid,
5920 make some insns to get that address into a register. */
5921 if (GET_CODE (TREE_CST_RTL (exp
)) == MEM
5922 && modifier
!= EXPAND_CONST_ADDRESS
5923 && modifier
!= EXPAND_INITIALIZER
5924 && modifier
!= EXPAND_SUM
5925 && (! memory_address_p (mode
, XEXP (TREE_CST_RTL (exp
), 0))
5927 && GET_CODE (XEXP (TREE_CST_RTL (exp
), 0)) != REG
)))
5928 return change_address (TREE_CST_RTL (exp
), VOIDmode
,
5929 copy_rtx (XEXP (TREE_CST_RTL (exp
), 0)));
5930 return TREE_CST_RTL (exp
);
5932 case EXPR_WITH_FILE_LOCATION
:
5935 char *saved_input_filename
= input_filename
;
5936 int saved_lineno
= lineno
;
5937 input_filename
= EXPR_WFL_FILENAME (exp
);
5938 lineno
= EXPR_WFL_LINENO (exp
);
5939 if (EXPR_WFL_EMIT_LINE_NOTE (exp
))
5940 emit_line_note (input_filename
, lineno
);
5941 /* Possibly avoid switching back and force here */
5942 to_return
= expand_expr (EXPR_WFL_NODE (exp
), target
, tmode
, modifier
);
5943 input_filename
= saved_input_filename
;
5944 lineno
= saved_lineno
;
5949 context
= decl_function_context (exp
);
5951 /* If this SAVE_EXPR was at global context, assume we are an
5952 initialization function and move it into our context. */
5954 SAVE_EXPR_CONTEXT (exp
) = current_function_decl
;
5956 /* We treat inline_function_decl as an alias for the current function
5957 because that is the inline function whose vars, types, etc.
5958 are being merged into the current function.
5959 See expand_inline_function. */
5960 if (context
== current_function_decl
|| context
== inline_function_decl
)
5963 /* If this is non-local, handle it. */
5966 /* The following call just exists to abort if the context is
5967 not of a containing function. */
5968 find_function_data (context
);
5970 temp
= SAVE_EXPR_RTL (exp
);
5971 if (temp
&& GET_CODE (temp
) == REG
)
5973 put_var_into_stack (exp
);
5974 temp
= SAVE_EXPR_RTL (exp
);
5976 if (temp
== 0 || GET_CODE (temp
) != MEM
)
5978 return change_address (temp
, mode
,
5979 fix_lexical_addr (XEXP (temp
, 0), exp
));
5981 if (SAVE_EXPR_RTL (exp
) == 0)
5983 if (mode
== VOIDmode
)
5986 temp
= assign_temp (type
, 3, 0, 0);
5988 SAVE_EXPR_RTL (exp
) = temp
;
5989 if (!optimize
&& GET_CODE (temp
) == REG
)
5990 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, temp
,
5993 /* If the mode of TEMP does not match that of the expression, it
5994 must be a promoted value. We pass store_expr a SUBREG of the
5995 wanted mode but mark it so that we know that it was already
5996 extended. Note that `unsignedp' was modified above in
5999 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
6001 temp
= gen_rtx_SUBREG (mode
, SAVE_EXPR_RTL (exp
), 0);
6002 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6003 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
6006 if (temp
== const0_rtx
)
6007 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6008 EXPAND_MEMORY_USE_BAD
);
6010 store_expr (TREE_OPERAND (exp
, 0), temp
, 0);
6012 TREE_USED (exp
) = 1;
6015 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6016 must be a promoted value. We return a SUBREG of the wanted mode,
6017 but mark it so that we know that it was already extended. */
6019 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
6020 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
6022 /* Compute the signedness and make the proper SUBREG. */
6023 promote_mode (type
, mode
, &unsignedp
, 0);
6024 temp
= gen_rtx_SUBREG (mode
, SAVE_EXPR_RTL (exp
), 0);
6025 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6026 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
6030 return SAVE_EXPR_RTL (exp
);
6035 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6036 TREE_OPERAND (exp
, 0) = unsave_expr_now (TREE_OPERAND (exp
, 0));
6040 case PLACEHOLDER_EXPR
:
6042 tree placeholder_expr
;
6044 /* If there is an object on the head of the placeholder list,
6045 see if some object in it of type TYPE or a pointer to it. For
6046 further information, see tree.def. */
6047 for (placeholder_expr
= placeholder_list
;
6048 placeholder_expr
!= 0;
6049 placeholder_expr
= TREE_CHAIN (placeholder_expr
))
6051 tree need_type
= TYPE_MAIN_VARIANT (type
);
6053 tree old_list
= placeholder_list
;
6056 /* Find the outermost reference that is of the type we want.
6057 If none, see if any object has a type that is a pointer to
6058 the type we want. */
6059 for (elt
= TREE_PURPOSE (placeholder_expr
);
6060 elt
!= 0 && object
== 0;
6062 = ((TREE_CODE (elt
) == COMPOUND_EXPR
6063 || TREE_CODE (elt
) == COND_EXPR
)
6064 ? TREE_OPERAND (elt
, 1)
6065 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6066 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6067 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6068 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6069 ? TREE_OPERAND (elt
, 0) : 0))
6070 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
6073 for (elt
= TREE_PURPOSE (placeholder_expr
);
6074 elt
!= 0 && object
== 0;
6076 = ((TREE_CODE (elt
) == COMPOUND_EXPR
6077 || TREE_CODE (elt
) == COND_EXPR
)
6078 ? TREE_OPERAND (elt
, 1)
6079 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6080 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6081 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6082 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6083 ? TREE_OPERAND (elt
, 0) : 0))
6084 if (POINTER_TYPE_P (TREE_TYPE (elt
))
6085 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
6087 object
= build1 (INDIRECT_REF
, need_type
, elt
);
6091 /* Expand this object skipping the list entries before
6092 it was found in case it is also a PLACEHOLDER_EXPR.
6093 In that case, we want to translate it using subsequent
6095 placeholder_list
= TREE_CHAIN (placeholder_expr
);
6096 temp
= expand_expr (object
, original_target
, tmode
,
6098 placeholder_list
= old_list
;
6104 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6107 case WITH_RECORD_EXPR
:
6108 /* Put the object on the placeholder list, expand our first operand,
6109 and pop the list. */
6110 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
6112 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
6113 tmode
, ro_modifier
);
6114 placeholder_list
= TREE_CHAIN (placeholder_list
);
6118 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6119 expand_goto (TREE_OPERAND (exp
, 0));
6121 expand_computed_goto (TREE_OPERAND (exp
, 0));
6125 expand_exit_loop_if_false (NULL_PTR
,
6126 invert_truthvalue (TREE_OPERAND (exp
, 0)));
6129 case LABELED_BLOCK_EXPR
:
6130 if (LABELED_BLOCK_BODY (exp
))
6131 expand_expr_stmt (LABELED_BLOCK_BODY (exp
));
6132 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp
)));
6135 case EXIT_BLOCK_EXPR
:
6136 if (EXIT_BLOCK_RETURN (exp
))
6137 sorry ("returned value in block_exit_expr");
6138 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp
)));
6143 expand_start_loop (1);
6144 expand_expr_stmt (TREE_OPERAND (exp
, 0));
6152 tree vars
= TREE_OPERAND (exp
, 0);
6153 int vars_need_expansion
= 0;
6155 /* Need to open a binding contour here because
6156 if there are any cleanups they must be contained here. */
6157 expand_start_bindings (2);
6159 /* Mark the corresponding BLOCK for output in its proper place. */
6160 if (TREE_OPERAND (exp
, 2) != 0
6161 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
6162 insert_block (TREE_OPERAND (exp
, 2));
6164 /* If VARS have not yet been expanded, expand them now. */
6167 if (DECL_RTL (vars
) == 0)
6169 vars_need_expansion
= 1;
6172 expand_decl_init (vars
);
6173 vars
= TREE_CHAIN (vars
);
6176 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, ro_modifier
);
6178 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
6184 if (RTL_EXPR_SEQUENCE (exp
))
6186 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
6188 emit_insns (RTL_EXPR_SEQUENCE (exp
));
6189 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
6191 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
6192 free_temps_for_rtl_expr (exp
);
6193 return RTL_EXPR_RTL (exp
);
6196 /* If we don't need the result, just ensure we evaluate any
6201 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
6202 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
,
6203 EXPAND_MEMORY_USE_BAD
);
6207 /* All elts simple constants => refer to a constant in memory. But
6208 if this is a non-BLKmode mode, let it store a field at a time
6209 since that should make a CONST_INT or CONST_DOUBLE when we
6210 fold. Likewise, if we have a target we can use, it is best to
6211 store directly into the target unless the type is large enough
6212 that memcpy will be used. If we are making an initializer and
6213 all operands are constant, put it in memory as well. */
6214 else if ((TREE_STATIC (exp
)
6215 && ((mode
== BLKmode
6216 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
6217 || TREE_ADDRESSABLE (exp
)
6218 || (TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
6219 && (!MOVE_BY_PIECES_P
6220 (TREE_INT_CST_LOW (TYPE_SIZE (type
))/BITS_PER_UNIT
,
6221 TYPE_ALIGN (type
) / BITS_PER_UNIT
))
6222 && ! mostly_zeros_p (exp
))))
6223 || (modifier
== EXPAND_INITIALIZER
&& TREE_CONSTANT (exp
)))
6225 rtx constructor
= output_constant_def (exp
);
6226 if (modifier
!= EXPAND_CONST_ADDRESS
6227 && modifier
!= EXPAND_INITIALIZER
6228 && modifier
!= EXPAND_SUM
6229 && (! memory_address_p (GET_MODE (constructor
),
6230 XEXP (constructor
, 0))
6232 && GET_CODE (XEXP (constructor
, 0)) != REG
)))
6233 constructor
= change_address (constructor
, VOIDmode
,
6234 XEXP (constructor
, 0));
6240 /* Handle calls that pass values in multiple non-contiguous
6241 locations. The Irix 6 ABI has examples of this. */
6242 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
6243 || GET_CODE (target
) == PARALLEL
)
6245 if (mode
!= BLKmode
&& ! TREE_ADDRESSABLE (exp
))
6246 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
6248 target
= assign_temp (type
, 0, 1, 1);
6251 if (TREE_READONLY (exp
))
6253 if (GET_CODE (target
) == MEM
)
6254 target
= copy_rtx (target
);
6256 RTX_UNCHANGING_P (target
) = 1;
6259 store_constructor (exp
, target
, TYPE_ALIGN (TREE_TYPE (exp
)), 0);
6265 tree exp1
= TREE_OPERAND (exp
, 0);
6268 tree string
= string_constant (exp1
, &index
);
6271 /* Try to optimize reads from const strings. */
6273 && TREE_CODE (string
) == STRING_CST
6274 && TREE_CODE (index
) == INTEGER_CST
6275 && !TREE_INT_CST_HIGH (index
)
6276 && (i
= TREE_INT_CST_LOW (index
)) < TREE_STRING_LENGTH (string
)
6277 && GET_MODE_CLASS (mode
) == MODE_INT
6278 && GET_MODE_SIZE (mode
) == 1
6279 && modifier
!= EXPAND_MEMORY_USE_WO
)
6280 return GEN_INT (TREE_STRING_POINTER (string
)[i
]);
6282 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
6283 op0
= memory_address (mode
, op0
);
6285 if (current_function
&& current_function_check_memory_usage
6286 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
6288 enum memory_use_mode memory_usage
;
6289 memory_usage
= get_memory_usage_from_modifier (modifier
);
6291 if (memory_usage
!= MEMORY_USE_DONT
)
6293 in_check_memory_usage
= 1;
6294 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
6296 GEN_INT (int_size_in_bytes (type
)),
6297 TYPE_MODE (sizetype
),
6298 GEN_INT (memory_usage
),
6299 TYPE_MODE (integer_type_node
));
6300 in_check_memory_usage
= 0;
6304 temp
= gen_rtx_MEM (mode
, op0
);
6305 /* If address was computed by addition,
6306 mark this as an element of an aggregate. */
6307 if (TREE_CODE (exp1
) == PLUS_EXPR
6308 || (TREE_CODE (exp1
) == SAVE_EXPR
6309 && TREE_CODE (TREE_OPERAND (exp1
, 0)) == PLUS_EXPR
)
6310 || AGGREGATE_TYPE_P (TREE_TYPE (exp
))
6311 || (TREE_CODE (exp1
) == ADDR_EXPR
6312 && (exp2
= TREE_OPERAND (exp1
, 0))
6313 && AGGREGATE_TYPE_P (TREE_TYPE (exp2
))))
6314 MEM_SET_IN_STRUCT_P (temp
, 1);
6316 MEM_VOLATILE_P (temp
) = TREE_THIS_VOLATILE (exp
) | flag_volatile
;
6317 MEM_ALIAS_SET (temp
) = get_alias_set (exp
);
6319 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6320 here, because, in C and C++, the fact that a location is accessed
6321 through a pointer to const does not mean that the value there can
6322 never change. Languages where it can never change should
6323 also set TREE_STATIC. */
6324 RTX_UNCHANGING_P (temp
) = TREE_READONLY (exp
) & TREE_STATIC (exp
);
6329 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
6333 tree array
= TREE_OPERAND (exp
, 0);
6334 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
6335 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
6336 tree index
= TREE_OPERAND (exp
, 1);
6337 tree index_type
= TREE_TYPE (index
);
6340 /* Optimize the special-case of a zero lower bound.
6342 We convert the low_bound to sizetype to avoid some problems
6343 with constant folding. (E.g. suppose the lower bound is 1,
6344 and its mode is QI. Without the conversion, (ARRAY
6345 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6346 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6348 But sizetype isn't quite right either (especially if
6349 the lowbound is negative). FIXME */
6351 if (! integer_zerop (low_bound
))
6352 index
= fold (build (MINUS_EXPR
, index_type
, index
,
6353 convert (sizetype
, low_bound
)));
6355 /* Fold an expression like: "foo"[2].
6356 This is not done in fold so it won't happen inside &.
6357 Don't fold if this is for wide characters since it's too
6358 difficult to do correctly and this is a very rare case. */
6360 if (TREE_CODE (array
) == STRING_CST
6361 && TREE_CODE (index
) == INTEGER_CST
6362 && !TREE_INT_CST_HIGH (index
)
6363 && (i
= TREE_INT_CST_LOW (index
)) < TREE_STRING_LENGTH (array
)
6364 && GET_MODE_CLASS (mode
) == MODE_INT
6365 && GET_MODE_SIZE (mode
) == 1)
6366 return GEN_INT (TREE_STRING_POINTER (array
)[i
]);
6368 /* If this is a constant index into a constant array,
6369 just get the value from the array. Handle both the cases when
6370 we have an explicit constructor and when our operand is a variable
6371 that was declared const. */
6373 if (TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
))
6375 if (TREE_CODE (index
) == INTEGER_CST
6376 && TREE_INT_CST_HIGH (index
) == 0)
6378 tree elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0));
6380 i
= TREE_INT_CST_LOW (index
);
6382 elem
= TREE_CHAIN (elem
);
6384 return expand_expr (fold (TREE_VALUE (elem
)), target
,
6385 tmode
, ro_modifier
);
6389 else if (optimize
>= 1
6390 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
6391 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
6392 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
6394 if (TREE_CODE (index
) == INTEGER_CST
)
6396 tree init
= DECL_INITIAL (array
);
6398 i
= TREE_INT_CST_LOW (index
);
6399 if (TREE_CODE (init
) == CONSTRUCTOR
)
6401 tree elem
= CONSTRUCTOR_ELTS (init
);
6404 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
))
6405 elem
= TREE_CHAIN (elem
);
6407 return expand_expr (fold (TREE_VALUE (elem
)), target
,
6408 tmode
, ro_modifier
);
6410 else if (TREE_CODE (init
) == STRING_CST
6411 && TREE_INT_CST_HIGH (index
) == 0
6412 && (TREE_INT_CST_LOW (index
)
6413 < TREE_STRING_LENGTH (init
)))
6415 (TREE_STRING_POINTER
6416 (init
)[TREE_INT_CST_LOW (index
)]));
6421 /* ... fall through ... */
6425 /* If the operand is a CONSTRUCTOR, we can just extract the
6426 appropriate field if it is present. Don't do this if we have
6427 already written the data since we want to refer to that copy
6428 and varasm.c assumes that's what we'll do. */
6429 if (code
!= ARRAY_REF
6430 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
6431 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
6435 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
6436 elt
= TREE_CHAIN (elt
))
6437 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
6438 /* We can normally use the value of the field in the
6439 CONSTRUCTOR. However, if this is a bitfield in
6440 an integral mode that we can fit in a HOST_WIDE_INT,
6441 we must mask only the number of bits in the bitfield,
6442 since this is done implicitly by the constructor. If
6443 the bitfield does not meet either of those conditions,
6444 we can't do this optimization. */
6445 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
6446 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
6448 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
6449 <= HOST_BITS_PER_WIDE_INT
))))
6451 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
6452 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
6454 int bitsize
= DECL_FIELD_SIZE (TREE_PURPOSE (elt
));
6456 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
6458 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
6459 op0
= expand_and (op0
, op1
, target
);
6463 enum machine_mode imode
6464 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
6466 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
6469 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
6471 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
6481 enum machine_mode mode1
;
6487 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
6488 &mode1
, &unsignedp
, &volatilep
,
6491 /* If we got back the original object, something is wrong. Perhaps
6492 we are evaluating an expression too early. In any event, don't
6493 infinitely recurse. */
6497 /* If TEM's type is a union of variable size, pass TARGET to the inner
6498 computation, since it will need a temporary and TARGET is known
6499 to have to do. This occurs in unchecked conversion in Ada. */
6501 op0
= expand_expr (tem
,
6502 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
6503 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
6505 ? target
: NULL_RTX
),
6507 modifier
== EXPAND_INITIALIZER
6508 ? modifier
: EXPAND_NORMAL
);
6510 /* If this is a constant, put it into a register if it is a
6511 legitimate constant and memory if it isn't. */
6512 if (CONSTANT_P (op0
))
6514 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
6515 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
))
6516 op0
= force_reg (mode
, op0
);
6518 op0
= validize_mem (force_const_mem (mode
, op0
));
6523 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
6525 if (GET_CODE (op0
) != MEM
)
6528 if (GET_MODE (offset_rtx
) != ptr_mode
)
6530 #ifdef POINTERS_EXTEND_UNSIGNED
6531 offset_rtx
= convert_memory_address (ptr_mode
, offset_rtx
);
6533 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
6537 /* A constant address in TO_RTX can have VOIDmode, we must not try
6538 to call force_reg for that case. Avoid that case. */
6539 if (GET_CODE (op0
) == MEM
6540 && GET_MODE (op0
) == BLKmode
6541 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
6543 && (bitpos
% bitsize
) == 0
6544 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
6545 && (alignment
* BITS_PER_UNIT
) == GET_MODE_ALIGNMENT (mode1
))
6547 rtx temp
= change_address (op0
, mode1
,
6548 plus_constant (XEXP (op0
, 0),
6551 if (GET_CODE (XEXP (temp
, 0)) == REG
)
6554 op0
= change_address (op0
, mode1
,
6555 force_reg (GET_MODE (XEXP (temp
, 0)),
6561 op0
= change_address (op0
, VOIDmode
,
6562 gen_rtx_PLUS (ptr_mode
, XEXP (op0
, 0),
6563 force_reg (ptr_mode
,
6567 /* Don't forget about volatility even if this is a bitfield. */
6568 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
6570 op0
= copy_rtx (op0
);
6571 MEM_VOLATILE_P (op0
) = 1;
6574 /* Check the access. */
6575 if (current_function_check_memory_usage
&& GET_CODE (op0
) == MEM
)
6577 enum memory_use_mode memory_usage
;
6578 memory_usage
= get_memory_usage_from_modifier (modifier
);
6580 if (memory_usage
!= MEMORY_USE_DONT
)
6585 to
= plus_constant (XEXP (op0
, 0), (bitpos
/ BITS_PER_UNIT
));
6586 size
= (bitpos
% BITS_PER_UNIT
) + bitsize
+ BITS_PER_UNIT
- 1;
6588 /* Check the access right of the pointer. */
6589 if (size
> BITS_PER_UNIT
)
6590 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
6592 GEN_INT (size
/ BITS_PER_UNIT
),
6593 TYPE_MODE (sizetype
),
6594 GEN_INT (memory_usage
),
6595 TYPE_MODE (integer_type_node
));
6599 /* In cases where an aligned union has an unaligned object
6600 as a field, we might be extracting a BLKmode value from
6601 an integer-mode (e.g., SImode) object. Handle this case
6602 by doing the extract into an object as wide as the field
6603 (which we know to be the width of a basic mode), then
6604 storing into memory, and changing the mode to BLKmode.
6605 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6606 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6607 if (mode1
== VOIDmode
6608 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
6609 || (modifier
!= EXPAND_CONST_ADDRESS
6610 && modifier
!= EXPAND_INITIALIZER
6611 && ((mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
6612 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
6613 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
6614 /* If the field isn't aligned enough to fetch as a memref,
6615 fetch it as a bit field. */
6616 || (SLOW_UNALIGNED_ACCESS
6617 && ((TYPE_ALIGN (TREE_TYPE (tem
)) < (unsigned int) GET_MODE_ALIGNMENT (mode
))
6618 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0))))))
6620 enum machine_mode ext_mode
= mode
;
6622 if (ext_mode
== BLKmode
)
6623 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
6625 if (ext_mode
== BLKmode
)
6627 /* In this case, BITPOS must start at a byte boundary and
6628 TARGET, if specified, must be a MEM. */
6629 if (GET_CODE (op0
) != MEM
6630 || (target
!= 0 && GET_CODE (target
) != MEM
)
6631 || bitpos
% BITS_PER_UNIT
!= 0)
6634 op0
= change_address (op0
, VOIDmode
,
6635 plus_constant (XEXP (op0
, 0),
6636 bitpos
/ BITS_PER_UNIT
));
6638 target
= assign_temp (type
, 0, 1, 1);
6640 emit_block_move (target
, op0
,
6641 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
6648 op0
= validize_mem (op0
);
6650 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
6651 mark_reg_pointer (XEXP (op0
, 0), alignment
);
6653 op0
= extract_bit_field (op0
, bitsize
, bitpos
,
6654 unsignedp
, target
, ext_mode
, ext_mode
,
6656 int_size_in_bytes (TREE_TYPE (tem
)));
6658 /* If the result is a record type and BITSIZE is narrower than
6659 the mode of OP0, an integral mode, and this is a big endian
6660 machine, we must put the field into the high-order bits. */
6661 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
6662 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
6663 && bitsize
< GET_MODE_BITSIZE (GET_MODE (op0
)))
6664 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
6665 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
6669 if (mode
== BLKmode
)
6671 rtx
new = assign_stack_temp (ext_mode
,
6672 bitsize
/ BITS_PER_UNIT
, 0);
6674 emit_move_insn (new, op0
);
6675 op0
= copy_rtx (new);
6676 PUT_MODE (op0
, BLKmode
);
6677 MEM_SET_IN_STRUCT_P (op0
, 1);
6683 /* If the result is BLKmode, use that to access the object
6685 if (mode
== BLKmode
)
6688 /* Get a reference to just this component. */
6689 if (modifier
== EXPAND_CONST_ADDRESS
6690 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
6691 op0
= gen_rtx_MEM (mode1
, plus_constant (XEXP (op0
, 0),
6692 (bitpos
/ BITS_PER_UNIT
)));
6694 op0
= change_address (op0
, mode1
,
6695 plus_constant (XEXP (op0
, 0),
6696 (bitpos
/ BITS_PER_UNIT
)));
6698 if (GET_CODE (op0
) == MEM
)
6699 MEM_ALIAS_SET (op0
) = get_alias_set (exp
);
6701 if (GET_CODE (XEXP (op0
, 0)) == REG
)
6702 mark_reg_pointer (XEXP (op0
, 0), alignment
);
6704 MEM_SET_IN_STRUCT_P (op0
, 1);
6705 MEM_VOLATILE_P (op0
) |= volatilep
;
6706 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
6707 || modifier
== EXPAND_CONST_ADDRESS
6708 || modifier
== EXPAND_INITIALIZER
)
6710 else if (target
== 0)
6711 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
6713 convert_move (target
, op0
, unsignedp
);
6717 /* Intended for a reference to a buffer of a file-object in Pascal.
6718 But it's not certain that a special tree code will really be
6719 necessary for these. INDIRECT_REF might work for them. */
6725 /* Pascal set IN expression.
6728 rlo = set_low - (set_low%bits_per_word);
6729 the_word = set [ (index - rlo)/bits_per_word ];
6730 bit_index = index % bits_per_word;
6731 bitmask = 1 << bit_index;
6732 return !!(the_word & bitmask); */
6734 tree set
= TREE_OPERAND (exp
, 0);
6735 tree index
= TREE_OPERAND (exp
, 1);
6736 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
6737 tree set_type
= TREE_TYPE (set
);
6738 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
6739 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
6740 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
6741 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
6742 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
6743 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
6744 rtx setaddr
= XEXP (setval
, 0);
6745 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
6747 rtx diff
, quo
, rem
, addr
, bit
, result
;
6749 preexpand_calls (exp
);
6751 /* If domain is empty, answer is no. Likewise if index is constant
6752 and out of bounds. */
6753 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
6754 && TREE_CODE (set_low_bound
) == INTEGER_CST
6755 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
6756 || (TREE_CODE (index
) == INTEGER_CST
6757 && TREE_CODE (set_low_bound
) == INTEGER_CST
6758 && tree_int_cst_lt (index
, set_low_bound
))
6759 || (TREE_CODE (set_high_bound
) == INTEGER_CST
6760 && TREE_CODE (index
) == INTEGER_CST
6761 && tree_int_cst_lt (set_high_bound
, index
))))
6765 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
6767 /* If we get here, we have to generate the code for both cases
6768 (in range and out of range). */
6770 op0
= gen_label_rtx ();
6771 op1
= gen_label_rtx ();
6773 if (! (GET_CODE (index_val
) == CONST_INT
6774 && GET_CODE (lo_r
) == CONST_INT
))
6776 emit_cmp_and_jump_insns (index_val
, lo_r
, LT
, NULL_RTX
,
6777 GET_MODE (index_val
), iunsignedp
, 0, op1
);
6780 if (! (GET_CODE (index_val
) == CONST_INT
6781 && GET_CODE (hi_r
) == CONST_INT
))
6783 emit_cmp_and_jump_insns (index_val
, hi_r
, GT
, NULL_RTX
,
6784 GET_MODE (index_val
), iunsignedp
, 0, op1
);
6787 /* Calculate the element number of bit zero in the first word
6789 if (GET_CODE (lo_r
) == CONST_INT
)
6790 rlow
= GEN_INT (INTVAL (lo_r
)
6791 & ~ ((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
6793 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
6794 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
6795 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
6797 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
6798 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
6800 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
6801 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
6802 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
6803 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
6805 addr
= memory_address (byte_mode
,
6806 expand_binop (index_mode
, add_optab
, diff
,
6807 setaddr
, NULL_RTX
, iunsignedp
,
6810 /* Extract the bit we want to examine */
6811 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
6812 gen_rtx_MEM (byte_mode
, addr
),
6813 make_tree (TREE_TYPE (index
), rem
),
6815 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
6816 GET_MODE (target
) == byte_mode
? target
: 0,
6817 1, OPTAB_LIB_WIDEN
);
6819 if (result
!= target
)
6820 convert_move (target
, result
, 1);
6822 /* Output the code to handle the out-of-range case. */
6825 emit_move_insn (target
, const0_rtx
);
6830 case WITH_CLEANUP_EXPR
:
6831 if (RTL_EXPR_RTL (exp
) == 0)
6834 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, ro_modifier
);
6835 expand_decl_cleanup (NULL_TREE
, TREE_OPERAND (exp
, 2));
6837 /* That's it for this cleanup. */
6838 TREE_OPERAND (exp
, 2) = 0;
6840 return RTL_EXPR_RTL (exp
);
6842 case CLEANUP_POINT_EXPR
:
6844 /* Start a new binding layer that will keep track of all cleanup
6845 actions to be performed. */
6846 expand_start_bindings (2);
6848 target_temp_slot_level
= temp_slot_level
;
6850 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, ro_modifier
);
6851 /* If we're going to use this value, load it up now. */
6853 op0
= force_not_mem (op0
);
6854 preserve_temp_slots (op0
);
6855 expand_end_bindings (NULL_TREE
, 0, 0);
6860 /* Check for a built-in function. */
6861 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
6862 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
6864 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6865 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
6867 /* If this call was expanded already by preexpand_calls,
6868 just return the result we got. */
6869 if (CALL_EXPR_RTL (exp
) != 0)
6870 return CALL_EXPR_RTL (exp
);
6872 return expand_call (exp
, target
, ignore
);
6874 case NON_LVALUE_EXPR
:
6877 case REFERENCE_EXPR
:
6878 if (TREE_CODE (type
) == UNION_TYPE
)
6880 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
6883 if (mode
!= BLKmode
)
6884 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
6886 target
= assign_temp (type
, 0, 1, 1);
6889 if (GET_CODE (target
) == MEM
)
6890 /* Store data into beginning of memory target. */
6891 store_expr (TREE_OPERAND (exp
, 0),
6892 change_address (target
, TYPE_MODE (valtype
), 0), 0);
6894 else if (GET_CODE (target
) == REG
)
6895 /* Store this field into a union of the proper type. */
6896 store_field (target
, GET_MODE_BITSIZE (TYPE_MODE (valtype
)), 0,
6897 TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
6899 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp
, 0))),
6904 /* Return the entire union. */
6908 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
6910 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
6913 /* If the signedness of the conversion differs and OP0 is
6914 a promoted SUBREG, clear that indication since we now
6915 have to do the proper extension. */
6916 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
6917 && GET_CODE (op0
) == SUBREG
)
6918 SUBREG_PROMOTED_VAR_P (op0
) = 0;
6923 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, 0);
6924 if (GET_MODE (op0
) == mode
)
6927 /* If OP0 is a constant, just convert it into the proper mode. */
6928 if (CONSTANT_P (op0
))
6930 convert_modes (mode
, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
6931 op0
, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
6933 if (modifier
== EXPAND_INITIALIZER
)
6934 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
6938 convert_to_mode (mode
, op0
,
6939 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
6941 convert_move (target
, op0
,
6942 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
6946 /* We come here from MINUS_EXPR when the second operand is a
6949 this_optab
= add_optab
;
6951 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6952 something else, make sure we add the register to the constant and
6953 then to the other thing. This case can occur during strength
6954 reduction and doing it this way will produce better code if the
6955 frame pointer or argument pointer is eliminated.
6957 fold-const.c will ensure that the constant is always in the inner
6958 PLUS_EXPR, so the only case we need to do anything about is if
6959 sp, ap, or fp is our second argument, in which case we must swap
6960 the innermost first argument and our second argument. */
6962 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
6963 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
6964 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
6965 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
6966 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
6967 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
6969 tree t
= TREE_OPERAND (exp
, 1);
6971 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6972 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
6975 /* If the result is to be ptr_mode and we are adding an integer to
6976 something, we might be forming a constant. So try to use
6977 plus_constant. If it produces a sum and we can't accept it,
6978 use force_operand. This allows P = &ARR[const] to generate
6979 efficient code on machines where a SYMBOL_REF is not a valid
6982 If this is an EXPAND_SUM call, always return the sum. */
6983 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
6984 || mode
== ptr_mode
)
6986 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
6987 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
6988 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
6992 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
6994 /* Use immed_double_const to ensure that the constant is
6995 truncated according to the mode of OP1, then sign extended
6996 to a HOST_WIDE_INT. Using the constant directly can result
6997 in non-canonical RTL in a 64x32 cross compile. */
6999 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
7001 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
7002 op1
= plus_constant (op1
, INTVAL (constant_part
));
7003 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7004 op1
= force_operand (op1
, target
);
7008 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7009 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
7010 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
7014 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7016 if (! CONSTANT_P (op0
))
7018 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7019 VOIDmode
, modifier
);
7020 /* Don't go to both_summands if modifier
7021 says it's not right to return a PLUS. */
7022 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7026 /* Use immed_double_const to ensure that the constant is
7027 truncated according to the mode of OP1, then sign extended
7028 to a HOST_WIDE_INT. Using the constant directly can result
7029 in non-canonical RTL in a 64x32 cross compile. */
7031 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
7033 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7034 op0
= plus_constant (op0
, INTVAL (constant_part
));
7035 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7036 op0
= force_operand (op0
, target
);
7041 /* No sense saving up arithmetic to be done
7042 if it's all in the wrong mode to form part of an address.
7043 And force_operand won't know whether to sign-extend or
7045 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7046 || mode
!= ptr_mode
)
7049 preexpand_calls (exp
);
7050 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7053 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, ro_modifier
);
7054 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, ro_modifier
);
7057 /* Make sure any term that's a sum with a constant comes last. */
7058 if (GET_CODE (op0
) == PLUS
7059 && CONSTANT_P (XEXP (op0
, 1)))
7065 /* If adding to a sum including a constant,
7066 associate it to put the constant outside. */
7067 if (GET_CODE (op1
) == PLUS
7068 && CONSTANT_P (XEXP (op1
, 1)))
7070 rtx constant_term
= const0_rtx
;
7072 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
7075 /* Ensure that MULT comes first if there is one. */
7076 else if (GET_CODE (op0
) == MULT
)
7077 op0
= gen_rtx_PLUS (mode
, op0
, XEXP (op1
, 0));
7079 op0
= gen_rtx_PLUS (mode
, XEXP (op1
, 0), op0
);
7081 /* Let's also eliminate constants from op0 if possible. */
7082 op0
= eliminate_constant_term (op0
, &constant_term
);
7084 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7085 their sum should be a constant. Form it into OP1, since the
7086 result we want will then be OP0 + OP1. */
7088 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
7093 op1
= gen_rtx_PLUS (mode
, constant_term
, XEXP (op1
, 1));
7096 /* Put a constant term last and put a multiplication first. */
7097 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
7098 temp
= op1
, op1
= op0
, op0
= temp
;
7100 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
7101 return temp
? temp
: gen_rtx_PLUS (mode
, op0
, op1
);
7104 /* For initializers, we are allowed to return a MINUS of two
7105 symbolic constants. Here we handle all cases when both operands
7107 /* Handle difference of two symbolic constants,
7108 for the sake of an initializer. */
7109 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7110 && really_constant_p (TREE_OPERAND (exp
, 0))
7111 && really_constant_p (TREE_OPERAND (exp
, 1)))
7113 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
,
7114 VOIDmode
, ro_modifier
);
7115 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7116 VOIDmode
, ro_modifier
);
7118 /* If the last operand is a CONST_INT, use plus_constant of
7119 the negated constant. Else make the MINUS. */
7120 if (GET_CODE (op1
) == CONST_INT
)
7121 return plus_constant (op0
, - INTVAL (op1
));
7123 return gen_rtx_MINUS (mode
, op0
, op1
);
7125 /* Convert A - const to A + (-const). */
7126 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7128 tree negated
= fold (build1 (NEGATE_EXPR
, type
,
7129 TREE_OPERAND (exp
, 1)));
7131 /* Deal with the case where we can't negate the constant
7133 if (TREE_UNSIGNED (type
) || TREE_OVERFLOW (negated
))
7135 tree newtype
= signed_type (type
);
7136 tree newop0
= convert (newtype
, TREE_OPERAND (exp
, 0));
7137 tree newop1
= convert (newtype
, TREE_OPERAND (exp
, 1));
7138 tree newneg
= fold (build1 (NEGATE_EXPR
, newtype
, newop1
));
7140 if (! TREE_OVERFLOW (newneg
))
7141 return expand_expr (convert (type
,
7142 build (PLUS_EXPR
, newtype
,
7144 target
, tmode
, ro_modifier
);
7148 exp
= build (PLUS_EXPR
, type
, TREE_OPERAND (exp
, 0), negated
);
7152 this_optab
= sub_optab
;
7156 preexpand_calls (exp
);
7157 /* If first operand is constant, swap them.
7158 Thus the following special case checks need only
7159 check the second operand. */
7160 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
7162 register tree t1
= TREE_OPERAND (exp
, 0);
7163 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
7164 TREE_OPERAND (exp
, 1) = t1
;
7167 /* Attempt to return something suitable for generating an
7168 indexed address, for machines that support that. */
7170 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
7171 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7172 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
7174 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7177 /* Apply distributive law if OP0 is x+c. */
7178 if (GET_CODE (op0
) == PLUS
7179 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
7184 (mode
, XEXP (op0
, 0),
7185 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))),
7186 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))
7187 * INTVAL (XEXP (op0
, 1))));
7189 if (GET_CODE (op0
) != REG
)
7190 op0
= force_operand (op0
, NULL_RTX
);
7191 if (GET_CODE (op0
) != REG
)
7192 op0
= copy_to_mode_reg (mode
, op0
);
7195 gen_rtx_MULT (mode
, op0
,
7196 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))));
7199 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7202 /* Check for multiplying things that have been extended
7203 from a narrower type. If this machine supports multiplying
7204 in that narrower type with a result in the desired type,
7205 do it that way, and avoid the explicit type-conversion. */
7206 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
7207 && TREE_CODE (type
) == INTEGER_TYPE
7208 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7209 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7210 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7211 && int_fits_type_p (TREE_OPERAND (exp
, 1),
7212 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7213 /* Don't use a widening multiply if a shift will do. */
7214 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
7215 > HOST_BITS_PER_WIDE_INT
)
7216 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
7218 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7219 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7221 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
7222 /* If both operands are extended, they must either both
7223 be zero-extended or both be sign-extended. */
7224 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7226 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
7228 enum machine_mode innermode
7229 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
7230 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7231 ? smul_widen_optab
: umul_widen_optab
);
7232 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7233 ? umul_widen_optab
: smul_widen_optab
);
7234 if (mode
== GET_MODE_WIDER_MODE (innermode
))
7236 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
7238 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7239 NULL_RTX
, VOIDmode
, 0);
7240 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7241 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7244 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7245 NULL_RTX
, VOIDmode
, 0);
7248 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
7249 && innermode
== word_mode
)
7252 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7253 NULL_RTX
, VOIDmode
, 0);
7254 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7255 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7258 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7259 NULL_RTX
, VOIDmode
, 0);
7260 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
7261 unsignedp
, OPTAB_LIB_WIDEN
);
7262 htem
= expand_mult_highpart_adjust (innermode
,
7263 gen_highpart (innermode
, temp
),
7265 gen_highpart (innermode
, temp
),
7267 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
7272 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7273 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7274 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
7276 case TRUNC_DIV_EXPR
:
7277 case FLOOR_DIV_EXPR
:
7279 case ROUND_DIV_EXPR
:
7280 case EXACT_DIV_EXPR
:
7281 preexpand_calls (exp
);
7282 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7284 /* Possible optimization: compute the dividend with EXPAND_SUM
7285 then if the divisor is constant can optimize the case
7286 where some terms of the dividend have coeffs divisible by it. */
7287 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7288 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7289 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
7292 this_optab
= flodiv_optab
;
7295 case TRUNC_MOD_EXPR
:
7296 case FLOOR_MOD_EXPR
:
7298 case ROUND_MOD_EXPR
:
7299 preexpand_calls (exp
);
7300 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7302 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7303 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7304 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
7306 case FIX_ROUND_EXPR
:
7307 case FIX_FLOOR_EXPR
:
7309 abort (); /* Not used for C. */
7311 case FIX_TRUNC_EXPR
:
7312 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7314 target
= gen_reg_rtx (mode
);
7315 expand_fix (target
, op0
, unsignedp
);
7319 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7321 target
= gen_reg_rtx (mode
);
7322 /* expand_float can't figure out what to do if FROM has VOIDmode.
7323 So give it the correct mode. With -O, cse will optimize this. */
7324 if (GET_MODE (op0
) == VOIDmode
)
7325 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7327 expand_float (target
, op0
,
7328 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7332 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7333 temp
= expand_unop (mode
, neg_optab
, op0
, target
, 0);
7339 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7341 /* Handle complex values specially. */
7342 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
7343 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
7344 return expand_complex_abs (mode
, op0
, target
, unsignedp
);
7346 /* Unsigned abs is simply the operand. Testing here means we don't
7347 risk generating incorrect code below. */
7348 if (TREE_UNSIGNED (type
))
7351 return expand_abs (mode
, op0
, target
,
7352 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
7356 target
= original_target
;
7357 if (target
== 0 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1), 1)
7358 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
7359 || GET_MODE (target
) != mode
7360 || (GET_CODE (target
) == REG
7361 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
7362 target
= gen_reg_rtx (mode
);
7363 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7364 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
7366 /* First try to do it with a special MIN or MAX instruction.
7367 If that does not win, use a conditional jump to select the proper
7369 this_optab
= (TREE_UNSIGNED (type
)
7370 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
7371 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
7373 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
7378 /* At this point, a MEM target is no longer useful; we will get better
7381 if (GET_CODE (target
) == MEM
)
7382 target
= gen_reg_rtx (mode
);
7385 emit_move_insn (target
, op0
);
7387 op0
= gen_label_rtx ();
7389 /* If this mode is an integer too wide to compare properly,
7390 compare word by word. Rely on cse to optimize constant cases. */
7391 if (GET_MODE_CLASS (mode
) == MODE_INT
&& ! can_compare_p (mode
, ccp_jump
))
7393 if (code
== MAX_EXPR
)
7394 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
7395 target
, op1
, NULL_RTX
, op0
);
7397 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
7398 op1
, target
, NULL_RTX
, op0
);
7402 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)));
7403 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
7404 unsignedp
, mode
, NULL_RTX
, 0, NULL_RTX
,
7407 emit_move_insn (target
, op1
);
7412 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7413 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
7419 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7420 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
7425 /* ??? Can optimize bitwise operations with one arg constant.
7426 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7427 and (a bitwise1 b) bitwise2 b (etc)
7428 but that is probably not worth while. */
7430 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7431 boolean values when we want in all cases to compute both of them. In
7432 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7433 as actual zero-or-1 values and then bitwise anding. In cases where
7434 there cannot be any side effects, better code would be made by
7435 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7436 how to recognize those cases. */
7438 case TRUTH_AND_EXPR
:
7440 this_optab
= and_optab
;
7445 this_optab
= ior_optab
;
7448 case TRUTH_XOR_EXPR
:
7450 this_optab
= xor_optab
;
7457 preexpand_calls (exp
);
7458 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7460 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7461 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
7464 /* Could determine the answer when only additive constants differ. Also,
7465 the addition of one can be handled by changing the condition. */
7472 preexpand_calls (exp
);
7473 temp
= do_store_flag (exp
, target
, tmode
!= VOIDmode
? tmode
: mode
, 0);
7477 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7478 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
7480 && GET_CODE (original_target
) == REG
7481 && (GET_MODE (original_target
)
7482 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
7484 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
7487 if (temp
!= original_target
)
7488 temp
= copy_to_reg (temp
);
7490 op1
= gen_label_rtx ();
7491 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
7492 GET_MODE (temp
), unsignedp
, 0, op1
);
7493 emit_move_insn (temp
, const1_rtx
);
7498 /* If no set-flag instruction, must generate a conditional
7499 store into a temporary variable. Drop through
7500 and handle this like && and ||. */
7502 case TRUTH_ANDIF_EXPR
:
7503 case TRUTH_ORIF_EXPR
:
7505 && (target
== 0 || ! safe_from_p (target
, exp
, 1)
7506 /* Make sure we don't have a hard reg (such as function's return
7507 value) live across basic blocks, if not optimizing. */
7508 || (!optimize
&& GET_CODE (target
) == REG
7509 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
7510 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7513 emit_clr_insn (target
);
7515 op1
= gen_label_rtx ();
7516 jumpifnot (exp
, op1
);
7519 emit_0_to_1_insn (target
);
7522 return ignore
? const0_rtx
: target
;
7524 case TRUTH_NOT_EXPR
:
7525 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
7526 /* The parser is careful to generate TRUTH_NOT_EXPR
7527 only with operands that are always zero or one. */
7528 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
7529 target
, 1, OPTAB_LIB_WIDEN
);
7535 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
7537 return expand_expr (TREE_OPERAND (exp
, 1),
7538 (ignore
? const0_rtx
: target
),
7542 /* If we would have a "singleton" (see below) were it not for a
7543 conversion in each arm, bring that conversion back out. */
7544 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7545 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
7546 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
7547 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
7549 tree
true = TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
7550 tree
false = TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
7552 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7553 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7554 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7555 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7556 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7557 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7558 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7559 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7560 return expand_expr (build1 (NOP_EXPR
, type
,
7561 build (COND_EXPR
, TREE_TYPE (true),
7562 TREE_OPERAND (exp
, 0),
7564 target
, tmode
, modifier
);
7568 /* Note that COND_EXPRs whose type is a structure or union
7569 are required to be constructed to contain assignments of
7570 a temporary variable, so that we can evaluate them here
7571 for side effect only. If type is void, we must do likewise. */
7573 /* If an arm of the branch requires a cleanup,
7574 only that cleanup is performed. */
7577 tree binary_op
= 0, unary_op
= 0;
7579 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7580 convert it to our mode, if necessary. */
7581 if (integer_onep (TREE_OPERAND (exp
, 1))
7582 && integer_zerop (TREE_OPERAND (exp
, 2))
7583 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
7587 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
7592 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, ro_modifier
);
7593 if (GET_MODE (op0
) == mode
)
7597 target
= gen_reg_rtx (mode
);
7598 convert_move (target
, op0
, unsignedp
);
7602 /* Check for X ? A + B : A. If we have this, we can copy A to the
7603 output and conditionally add B. Similarly for unary operations.
7604 Don't do this if X has side-effects because those side effects
7605 might affect A or B and the "?" operation is a sequence point in
7606 ANSI. (operand_equal_p tests for side effects.) */
7608 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
7609 && operand_equal_p (TREE_OPERAND (exp
, 2),
7610 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
7611 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
7612 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
7613 && operand_equal_p (TREE_OPERAND (exp
, 1),
7614 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
7615 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
7616 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
7617 && operand_equal_p (TREE_OPERAND (exp
, 2),
7618 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
7619 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
7620 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
7621 && operand_equal_p (TREE_OPERAND (exp
, 1),
7622 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
7623 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
7625 /* If we are not to produce a result, we have no target. Otherwise,
7626 if a target was specified use it; it will not be used as an
7627 intermediate target unless it is safe. If no target, use a
7632 else if (original_target
7633 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
7634 || (singleton
&& GET_CODE (original_target
) == REG
7635 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
7636 && original_target
== var_rtx (singleton
)))
7637 && GET_MODE (original_target
) == mode
7638 #ifdef HAVE_conditional_move
7639 && (! can_conditionally_move_p (mode
)
7640 || GET_CODE (original_target
) == REG
7641 || TREE_ADDRESSABLE (type
))
7643 && ! (GET_CODE (original_target
) == MEM
7644 && MEM_VOLATILE_P (original_target
)))
7645 temp
= original_target
;
7646 else if (TREE_ADDRESSABLE (type
))
7649 temp
= assign_temp (type
, 0, 0, 1);
7651 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7652 do the test of X as a store-flag operation, do this as
7653 A + ((X != 0) << log C). Similarly for other simple binary
7654 operators. Only do for C == 1 if BRANCH_COST is low. */
7655 if (temp
&& singleton
&& binary_op
7656 && (TREE_CODE (binary_op
) == PLUS_EXPR
7657 || TREE_CODE (binary_op
) == MINUS_EXPR
7658 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
7659 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
7660 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
7661 : integer_onep (TREE_OPERAND (binary_op
, 1)))
7662 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
7665 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
? add_optab
7666 : TREE_CODE (binary_op
) == MINUS_EXPR
? sub_optab
7667 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
7670 /* If we had X ? A : A + 1, do this as A + (X == 0).
7672 We have to invert the truth value here and then put it
7673 back later if do_store_flag fails. We cannot simply copy
7674 TREE_OPERAND (exp, 0) to another variable and modify that
7675 because invert_truthvalue can modify the tree pointed to
7677 if (singleton
== TREE_OPERAND (exp
, 1))
7678 TREE_OPERAND (exp
, 0)
7679 = invert_truthvalue (TREE_OPERAND (exp
, 0));
7681 result
= do_store_flag (TREE_OPERAND (exp
, 0),
7682 (safe_from_p (temp
, singleton
, 1)
7684 mode
, BRANCH_COST
<= 1);
7686 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
7687 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
7688 build_int_2 (tree_log2
7692 (safe_from_p (temp
, singleton
, 1)
7693 ? temp
: NULL_RTX
), 0);
7697 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
7698 return expand_binop (mode
, boptab
, op1
, result
, temp
,
7699 unsignedp
, OPTAB_LIB_WIDEN
);
7701 else if (singleton
== TREE_OPERAND (exp
, 1))
7702 TREE_OPERAND (exp
, 0)
7703 = invert_truthvalue (TREE_OPERAND (exp
, 0));
7706 do_pending_stack_adjust ();
7708 op0
= gen_label_rtx ();
7710 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
7714 /* If the target conflicts with the other operand of the
7715 binary op, we can't use it. Also, we can't use the target
7716 if it is a hard register, because evaluating the condition
7717 might clobber it. */
7719 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
7720 || (GET_CODE (temp
) == REG
7721 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
7722 temp
= gen_reg_rtx (mode
);
7723 store_expr (singleton
, temp
, 0);
7726 expand_expr (singleton
,
7727 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
7728 if (singleton
== TREE_OPERAND (exp
, 1))
7729 jumpif (TREE_OPERAND (exp
, 0), op0
);
7731 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
7733 start_cleanup_deferral ();
7734 if (binary_op
&& temp
== 0)
7735 /* Just touch the other operand. */
7736 expand_expr (TREE_OPERAND (binary_op
, 1),
7737 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
7739 store_expr (build (TREE_CODE (binary_op
), type
,
7740 make_tree (type
, temp
),
7741 TREE_OPERAND (binary_op
, 1)),
7744 store_expr (build1 (TREE_CODE (unary_op
), type
,
7745 make_tree (type
, temp
)),
7749 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7750 comparison operator. If we have one of these cases, set the
7751 output to A, branch on A (cse will merge these two references),
7752 then set the output to FOO. */
7754 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
7755 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
7756 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7757 TREE_OPERAND (exp
, 1), 0)
7758 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
7759 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
7760 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
7762 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
7763 temp
= gen_reg_rtx (mode
);
7764 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
7765 jumpif (TREE_OPERAND (exp
, 0), op0
);
7767 start_cleanup_deferral ();
7768 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
7772 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
7773 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
7774 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7775 TREE_OPERAND (exp
, 2), 0)
7776 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
7777 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
7778 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
7780 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
7781 temp
= gen_reg_rtx (mode
);
7782 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
7783 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
7785 start_cleanup_deferral ();
7786 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
7791 op1
= gen_label_rtx ();
7792 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
7794 start_cleanup_deferral ();
7796 /* One branch of the cond can be void, if it never returns. For
7797 example A ? throw : E */
7799 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
7800 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
7802 expand_expr (TREE_OPERAND (exp
, 1),
7803 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
7804 end_cleanup_deferral ();
7806 emit_jump_insn (gen_jump (op1
));
7809 start_cleanup_deferral ();
7811 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
7812 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
7814 expand_expr (TREE_OPERAND (exp
, 2),
7815 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
7818 end_cleanup_deferral ();
7829 /* Something needs to be initialized, but we didn't know
7830 where that thing was when building the tree. For example,
7831 it could be the return value of a function, or a parameter
7832 to a function which lays down in the stack, or a temporary
7833 variable which must be passed by reference.
7835 We guarantee that the expression will either be constructed
7836 or copied into our original target. */
7838 tree slot
= TREE_OPERAND (exp
, 0);
7839 tree cleanups
= NULL_TREE
;
7842 if (TREE_CODE (slot
) != VAR_DECL
)
7846 target
= original_target
;
7850 if (DECL_RTL (slot
) != 0)
7852 target
= DECL_RTL (slot
);
7853 /* If we have already expanded the slot, so don't do
7855 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
7860 target
= assign_temp (type
, 2, 0, 1);
7861 /* All temp slots at this level must not conflict. */
7862 preserve_temp_slots (target
);
7863 DECL_RTL (slot
) = target
;
7864 if (TREE_ADDRESSABLE (slot
))
7866 TREE_ADDRESSABLE (slot
) = 0;
7867 mark_addressable (slot
);
7870 /* Since SLOT is not known to the called function
7871 to belong to its stack frame, we must build an explicit
7872 cleanup. This case occurs when we must build up a reference
7873 to pass the reference as an argument. In this case,
7874 it is very likely that such a reference need not be
7877 if (TREE_OPERAND (exp
, 2) == 0)
7878 TREE_OPERAND (exp
, 2) = maybe_build_cleanup (slot
);
7879 cleanups
= TREE_OPERAND (exp
, 2);
7884 /* This case does occur, when expanding a parameter which
7885 needs to be constructed on the stack. The target
7886 is the actual stack address that we want to initialize.
7887 The function we call will perform the cleanup in this case. */
7889 /* If we have already assigned it space, use that space,
7890 not target that we were passed in, as our target
7891 parameter is only a hint. */
7892 if (DECL_RTL (slot
) != 0)
7894 target
= DECL_RTL (slot
);
7895 /* If we have already expanded the slot, so don't do
7897 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
7902 DECL_RTL (slot
) = target
;
7903 /* If we must have an addressable slot, then make sure that
7904 the RTL that we just stored in slot is OK. */
7905 if (TREE_ADDRESSABLE (slot
))
7907 TREE_ADDRESSABLE (slot
) = 0;
7908 mark_addressable (slot
);
7913 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
7914 /* Mark it as expanded. */
7915 TREE_OPERAND (exp
, 1) = NULL_TREE
;
7917 TREE_USED (slot
) = 1;
7918 store_expr (exp1
, target
, 0);
7920 expand_decl_cleanup (NULL_TREE
, cleanups
);
7927 tree lhs
= TREE_OPERAND (exp
, 0);
7928 tree rhs
= TREE_OPERAND (exp
, 1);
7929 tree noncopied_parts
= 0;
7930 tree lhs_type
= TREE_TYPE (lhs
);
7932 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
7933 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0 && !fixed_type_p (rhs
))
7934 noncopied_parts
= init_noncopied_parts (stabilize_reference (lhs
),
7935 TYPE_NONCOPIED_PARTS (lhs_type
));
7936 while (noncopied_parts
!= 0)
7938 expand_assignment (TREE_VALUE (noncopied_parts
),
7939 TREE_PURPOSE (noncopied_parts
), 0, 0);
7940 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
7947 /* If lhs is complex, expand calls in rhs before computing it.
7948 That's so we don't compute a pointer and save it over a call.
7949 If lhs is simple, compute it first so we can give it as a
7950 target if the rhs is just a call. This avoids an extra temp and copy
7951 and that prevents a partial-subsumption which makes bad code.
7952 Actually we could treat component_ref's of vars like vars. */
7954 tree lhs
= TREE_OPERAND (exp
, 0);
7955 tree rhs
= TREE_OPERAND (exp
, 1);
7956 tree noncopied_parts
= 0;
7957 tree lhs_type
= TREE_TYPE (lhs
);
7961 if (TREE_CODE (lhs
) != VAR_DECL
7962 && TREE_CODE (lhs
) != RESULT_DECL
7963 && TREE_CODE (lhs
) != PARM_DECL
7964 && ! (TREE_CODE (lhs
) == INDIRECT_REF
7965 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs
, 0)))))
7966 preexpand_calls (exp
);
7968 /* Check for |= or &= of a bitfield of size one into another bitfield
7969 of size 1. In this case, (unless we need the result of the
7970 assignment) we can do this more efficiently with a
7971 test followed by an assignment, if necessary.
7973 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7974 things change so we do, this code should be enhanced to
7977 && TREE_CODE (lhs
) == COMPONENT_REF
7978 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
7979 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
7980 && TREE_OPERAND (rhs
, 0) == lhs
7981 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
7982 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs
, 1))) == 1
7983 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))) == 1)
7985 rtx label
= gen_label_rtx ();
7987 do_jump (TREE_OPERAND (rhs
, 1),
7988 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
7989 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
7990 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
7991 (TREE_CODE (rhs
) == BIT_IOR_EXPR
7993 : integer_zero_node
)),
7995 do_pending_stack_adjust ();
8000 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0
8001 && ! (fixed_type_p (lhs
) && fixed_type_p (rhs
)))
8002 noncopied_parts
= save_noncopied_parts (stabilize_reference (lhs
),
8003 TYPE_NONCOPIED_PARTS (lhs_type
));
8005 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8006 while (noncopied_parts
!= 0)
8008 expand_assignment (TREE_PURPOSE (noncopied_parts
),
8009 TREE_VALUE (noncopied_parts
), 0, 0);
8010 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
8016 if (!TREE_OPERAND (exp
, 0))
8017 expand_null_return ();
8019 expand_return (TREE_OPERAND (exp
, 0));
8022 case PREINCREMENT_EXPR
:
8023 case PREDECREMENT_EXPR
:
8024 return expand_increment (exp
, 0, ignore
);
8026 case POSTINCREMENT_EXPR
:
8027 case POSTDECREMENT_EXPR
:
8028 /* Faster to treat as pre-increment if result is not used. */
8029 return expand_increment (exp
, ! ignore
, ignore
);
8032 /* If nonzero, TEMP will be set to the address of something that might
8033 be a MEM corresponding to a stack slot. */
8036 /* Are we taking the address of a nested function? */
8037 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
8038 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
8039 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0))
8040 && ! TREE_STATIC (exp
))
8042 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
8043 op0
= force_operand (op0
, target
);
8045 /* If we are taking the address of something erroneous, just
8047 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
8051 /* We make sure to pass const0_rtx down if we came in with
8052 ignore set, to avoid doing the cleanups twice for something. */
8053 op0
= expand_expr (TREE_OPERAND (exp
, 0),
8054 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
8055 (modifier
== EXPAND_INITIALIZER
8056 ? modifier
: EXPAND_CONST_ADDRESS
));
8058 /* If we are going to ignore the result, OP0 will have been set
8059 to const0_rtx, so just return it. Don't get confused and
8060 think we are taking the address of the constant. */
8064 op0
= protect_from_queue (op0
, 0);
8066 /* We would like the object in memory. If it is a constant, we can
8067 have it be statically allocated into memory. For a non-constant,
8068 we need to allocate some memory and store the value into it. */
8070 if (CONSTANT_P (op0
))
8071 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8073 else if (GET_CODE (op0
) == MEM
)
8075 mark_temp_addr_taken (op0
);
8076 temp
= XEXP (op0
, 0);
8079 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
8080 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
8082 /* If this object is in a register, it must be not
8084 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8085 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
8087 mark_temp_addr_taken (memloc
);
8088 emit_move_insn (memloc
, op0
);
8092 if (GET_CODE (op0
) != MEM
)
8095 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8097 temp
= XEXP (op0
, 0);
8098 #ifdef POINTERS_EXTEND_UNSIGNED
8099 if (GET_MODE (temp
) == Pmode
&& GET_MODE (temp
) != mode
8100 && mode
== ptr_mode
)
8101 temp
= convert_memory_address (ptr_mode
, temp
);
8106 op0
= force_operand (XEXP (op0
, 0), target
);
8109 if (flag_force_addr
&& GET_CODE (op0
) != REG
)
8110 op0
= force_reg (Pmode
, op0
);
8112 if (GET_CODE (op0
) == REG
8113 && ! REG_USERVAR_P (op0
))
8114 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)) / BITS_PER_UNIT
);
8116 /* If we might have had a temp slot, add an equivalent address
8119 update_temp_slot_address (temp
, op0
);
8121 #ifdef POINTERS_EXTEND_UNSIGNED
8122 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
8123 && mode
== ptr_mode
)
8124 op0
= convert_memory_address (ptr_mode
, op0
);
8129 case ENTRY_VALUE_EXPR
:
8132 /* COMPLEX type for Extended Pascal & Fortran */
8135 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8138 /* Get the rtx code of the operands. */
8139 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8140 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
8143 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8147 /* Move the real (op0) and imaginary (op1) parts to their location. */
8148 emit_move_insn (gen_realpart (mode
, target
), op0
);
8149 emit_move_insn (gen_imagpart (mode
, target
), op1
);
8151 insns
= get_insns ();
8154 /* Complex construction should appear as a single unit. */
8155 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8156 each with a separate pseudo as destination.
8157 It's not correct for flow to treat them as a unit. */
8158 if (GET_CODE (target
) != CONCAT
)
8159 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
8167 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8168 return gen_realpart (mode
, op0
);
8171 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8172 return gen_imagpart (mode
, op0
);
8176 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8180 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8183 target
= gen_reg_rtx (mode
);
8187 /* Store the realpart and the negated imagpart to target. */
8188 emit_move_insn (gen_realpart (partmode
, target
),
8189 gen_realpart (partmode
, op0
));
8191 imag_t
= gen_imagpart (partmode
, target
);
8192 temp
= expand_unop (partmode
, neg_optab
,
8193 gen_imagpart (partmode
, op0
), imag_t
, 0);
8195 emit_move_insn (imag_t
, temp
);
8197 insns
= get_insns ();
8200 /* Conjugate should appear as a single unit
8201 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8202 each with a separate pseudo as destination.
8203 It's not correct for flow to treat them as a unit. */
8204 if (GET_CODE (target
) != CONCAT
)
8205 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
8212 case TRY_CATCH_EXPR
:
8214 tree handler
= TREE_OPERAND (exp
, 1);
8216 expand_eh_region_start ();
8218 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8220 expand_eh_region_end (handler
);
8225 case TRY_FINALLY_EXPR
:
8227 tree try_block
= TREE_OPERAND (exp
, 0);
8228 tree finally_block
= TREE_OPERAND (exp
, 1);
8229 rtx finally_label
= gen_label_rtx ();
8230 rtx done_label
= gen_label_rtx ();
8231 rtx return_link
= gen_reg_rtx (Pmode
);
8232 tree cleanup
= build (GOTO_SUBROUTINE_EXPR
, void_type_node
,
8233 (tree
) finally_label
, (tree
) return_link
);
8234 TREE_SIDE_EFFECTS (cleanup
) = 1;
8236 /* Start a new binding layer that will keep track of all cleanup
8237 actions to be performed. */
8238 expand_start_bindings (2);
8240 target_temp_slot_level
= temp_slot_level
;
8242 expand_decl_cleanup (NULL_TREE
, cleanup
);
8243 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
8245 preserve_temp_slots (op0
);
8246 expand_end_bindings (NULL_TREE
, 0, 0);
8247 emit_jump (done_label
);
8248 emit_label (finally_label
);
8249 expand_expr (finally_block
, const0_rtx
, VOIDmode
, 0);
8250 emit_indirect_jump (return_link
);
8251 emit_label (done_label
);
8255 case GOTO_SUBROUTINE_EXPR
:
8257 rtx subr
= (rtx
) TREE_OPERAND (exp
, 0);
8258 rtx return_link
= *(rtx
*) &TREE_OPERAND (exp
, 1);
8259 rtx return_address
= gen_label_rtx ();
8260 emit_move_insn (return_link
, gen_rtx_LABEL_REF (Pmode
, return_address
));
8262 emit_label (return_address
);
8268 rtx dcc
= get_dynamic_cleanup_chain ();
8269 emit_move_insn (dcc
, validize_mem (gen_rtx_MEM (Pmode
, dcc
)));
8275 rtx dhc
= get_dynamic_handler_chain ();
8276 emit_move_insn (dhc
, validize_mem (gen_rtx_MEM (Pmode
, dhc
)));
8281 return expand_builtin_va_arg (TREE_OPERAND (exp
, 0), type
);
8284 return (*lang_expand_expr
) (exp
, original_target
, tmode
, modifier
);
8287 /* Here to do an ordinary binary operator, generating an instruction
8288 from the optab already placed in `this_optab'. */
8290 preexpand_calls (exp
);
8291 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8293 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8294 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8296 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
8297 unsignedp
, OPTAB_LIB_WIDEN
);
8303 /* Return the tree node and offset if a given argument corresponds to
8304 a string constant. */
8307 string_constant (arg
, ptr_offset
)
8313 if (TREE_CODE (arg
) == ADDR_EXPR
8314 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
8316 *ptr_offset
= integer_zero_node
;
8317 return TREE_OPERAND (arg
, 0);
8319 else if (TREE_CODE (arg
) == PLUS_EXPR
)
8321 tree arg0
= TREE_OPERAND (arg
, 0);
8322 tree arg1
= TREE_OPERAND (arg
, 1);
8327 if (TREE_CODE (arg0
) == ADDR_EXPR
8328 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
8331 return TREE_OPERAND (arg0
, 0);
8333 else if (TREE_CODE (arg1
) == ADDR_EXPR
8334 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
8337 return TREE_OPERAND (arg1
, 0);
8344 /* Expand code for a post- or pre- increment or decrement
8345 and return the RTX for the result.
8346 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8349 expand_increment (exp
, post
, ignore
)
8353 register rtx op0
, op1
;
8354 register rtx temp
, value
;
8355 register tree incremented
= TREE_OPERAND (exp
, 0);
8356 optab this_optab
= add_optab
;
8358 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
8359 int op0_is_copy
= 0;
8360 int single_insn
= 0;
8361 /* 1 means we can't store into OP0 directly,
8362 because it is a subreg narrower than a word,
8363 and we don't dare clobber the rest of the word. */
8366 /* Stabilize any component ref that might need to be
8367 evaluated more than once below. */
8369 || TREE_CODE (incremented
) == BIT_FIELD_REF
8370 || (TREE_CODE (incremented
) == COMPONENT_REF
8371 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
8372 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
8373 incremented
= stabilize_reference (incremented
);
8374 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8375 ones into save exprs so that they don't accidentally get evaluated
8376 more than once by the code below. */
8377 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
8378 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
8379 incremented
= save_expr (incremented
);
8381 /* Compute the operands as RTX.
8382 Note whether OP0 is the actual lvalue or a copy of it:
8383 I believe it is a copy iff it is a register or subreg
8384 and insns were generated in computing it. */
8386 temp
= get_last_insn ();
8387 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_RW
);
8389 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8390 in place but instead must do sign- or zero-extension during assignment,
8391 so we copy it into a new register and let the code below use it as
8394 Note that we can safely modify this SUBREG since it is know not to be
8395 shared (it was made by the expand_expr call above). */
8397 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
8400 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
8404 else if (GET_CODE (op0
) == SUBREG
8405 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
8407 /* We cannot increment this SUBREG in place. If we are
8408 post-incrementing, get a copy of the old value. Otherwise,
8409 just mark that we cannot increment in place. */
8411 op0
= copy_to_reg (op0
);
8416 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
8417 && temp
!= get_last_insn ());
8418 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
,
8419 EXPAND_MEMORY_USE_BAD
);
8421 /* Decide whether incrementing or decrementing. */
8422 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
8423 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
8424 this_optab
= sub_optab
;
8426 /* Convert decrement by a constant into a negative increment. */
8427 if (this_optab
== sub_optab
8428 && GET_CODE (op1
) == CONST_INT
)
8430 op1
= GEN_INT (- INTVAL (op1
));
8431 this_optab
= add_optab
;
8434 /* For a preincrement, see if we can do this with a single instruction. */
8437 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
8438 if (icode
!= (int) CODE_FOR_nothing
8439 /* Make sure that OP0 is valid for operands 0 and 1
8440 of the insn we want to queue. */
8441 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
8442 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
)
8443 && (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
8447 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8448 then we cannot just increment OP0. We must therefore contrive to
8449 increment the original value. Then, for postincrement, we can return
8450 OP0 since it is a copy of the old value. For preincrement, expand here
8451 unless we can do it with a single insn.
8453 Likewise if storing directly into OP0 would clobber high bits
8454 we need to preserve (bad_subreg). */
8455 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
8457 /* This is the easiest way to increment the value wherever it is.
8458 Problems with multiple evaluation of INCREMENTED are prevented
8459 because either (1) it is a component_ref or preincrement,
8460 in which case it was stabilized above, or (2) it is an array_ref
8461 with constant index in an array in a register, which is
8462 safe to reevaluate. */
8463 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
8464 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
8465 ? MINUS_EXPR
: PLUS_EXPR
),
8468 TREE_OPERAND (exp
, 1));
8470 while (TREE_CODE (incremented
) == NOP_EXPR
8471 || TREE_CODE (incremented
) == CONVERT_EXPR
)
8473 newexp
= convert (TREE_TYPE (incremented
), newexp
);
8474 incremented
= TREE_OPERAND (incremented
, 0);
8477 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
, 0);
8478 return post
? op0
: temp
;
8483 /* We have a true reference to the value in OP0.
8484 If there is an insn to add or subtract in this mode, queue it.
8485 Queueing the increment insn avoids the register shuffling
8486 that often results if we must increment now and first save
8487 the old value for subsequent use. */
8489 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8490 op0
= stabilize (op0
);
8493 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
8494 if (icode
!= (int) CODE_FOR_nothing
8495 /* Make sure that OP0 is valid for operands 0 and 1
8496 of the insn we want to queue. */
8497 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
8498 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
))
8500 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
8501 op1
= force_reg (mode
, op1
);
8503 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
8505 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
8507 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
8508 ? force_reg (Pmode
, XEXP (op0
, 0))
8509 : copy_to_reg (XEXP (op0
, 0)));
8512 op0
= change_address (op0
, VOIDmode
, addr
);
8513 temp
= force_reg (GET_MODE (op0
), op0
);
8514 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
8515 op1
= force_reg (mode
, op1
);
8517 /* The increment queue is LIFO, thus we have to `queue'
8518 the instructions in reverse order. */
8519 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
8520 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
8525 /* Preincrement, or we can't increment with one simple insn. */
8527 /* Save a copy of the value before inc or dec, to return it later. */
8528 temp
= value
= copy_to_reg (op0
);
8530 /* Arrange to return the incremented value. */
8531 /* Copy the rtx because expand_binop will protect from the queue,
8532 and the results of that would be invalid for us to return
8533 if our caller does emit_queue before using our result. */
8534 temp
= copy_rtx (value
= op0
);
8536 /* Increment however we can. */
8537 op1
= expand_binop (mode
, this_optab
, value
, op1
,
8538 current_function_check_memory_usage
? NULL_RTX
: op0
,
8539 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
8540 /* Make sure the value is stored into OP0. */
8542 emit_move_insn (op0
, op1
);
8547 /* Expand all function calls contained within EXP, innermost ones first.
8548 But don't look within expressions that have sequence points.
8549 For each CALL_EXPR, record the rtx for its value
8550 in the CALL_EXPR_RTL field. */
8553 preexpand_calls (exp
)
8556 register int nops
, i
;
8557 int type
= TREE_CODE_CLASS (TREE_CODE (exp
));
8559 if (! do_preexpand_calls
)
8562 /* Only expressions and references can contain calls. */
8564 if (type
!= 'e' && type
!= '<' && type
!= '1' && type
!= '2' && type
!= 'r')
8567 switch (TREE_CODE (exp
))
8570 /* Do nothing if already expanded. */
8571 if (CALL_EXPR_RTL (exp
) != 0
8572 /* Do nothing if the call returns a variable-sized object. */
8573 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp
))) != INTEGER_CST
8574 /* Do nothing to built-in functions. */
8575 || (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
8576 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
8578 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
8581 CALL_EXPR_RTL (exp
) = expand_call (exp
, NULL_RTX
, 0);
8586 case TRUTH_ANDIF_EXPR
:
8587 case TRUTH_ORIF_EXPR
:
8588 /* If we find one of these, then we can be sure
8589 the adjust will be done for it (since it makes jumps).
8590 Do it now, so that if this is inside an argument
8591 of a function, we don't get the stack adjustment
8592 after some other args have already been pushed. */
8593 do_pending_stack_adjust ();
8598 case WITH_CLEANUP_EXPR
:
8599 case CLEANUP_POINT_EXPR
:
8600 case TRY_CATCH_EXPR
:
8604 if (SAVE_EXPR_RTL (exp
) != 0)
8611 nops
= tree_code_length
[(int) TREE_CODE (exp
)];
8612 for (i
= 0; i
< nops
; i
++)
8613 if (TREE_OPERAND (exp
, i
) != 0)
8615 if (TREE_CODE (exp
) == TARGET_EXPR
&& i
== 2)
8616 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
8617 It doesn't happen before the call is made. */
8621 type
= TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, i
)));
8622 if (type
== 'e' || type
== '<' || type
== '1' || type
== '2'
8624 preexpand_calls (TREE_OPERAND (exp
, i
));
8629 /* At the start of a function, record that we have no previously-pushed
8630 arguments waiting to be popped. */
8633 init_pending_stack_adjust ()
8635 pending_stack_adjust
= 0;
8638 /* When exiting from function, if safe, clear out any pending stack adjust
8639 so the adjustment won't get done.
8641 Note, if the current function calls alloca, then it must have a
8642 frame pointer regardless of the value of flag_omit_frame_pointer. */
8645 clear_pending_stack_adjust ()
8647 #ifdef EXIT_IGNORE_STACK
8649 && (! flag_omit_frame_pointer
|| current_function_calls_alloca
)
8650 && EXIT_IGNORE_STACK
8651 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
8652 && ! flag_inline_functions
)
8653 pending_stack_adjust
= 0;
8657 /* Pop any previously-pushed arguments that have not been popped yet. */
8660 do_pending_stack_adjust ()
8662 if (inhibit_defer_pop
== 0)
8664 if (pending_stack_adjust
!= 0)
8665 adjust_stack (GEN_INT (pending_stack_adjust
));
8666 pending_stack_adjust
= 0;
8670 /* Expand conditional expressions. */
8672 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8673 LABEL is an rtx of code CODE_LABEL, in this function and all the
8677 jumpifnot (exp
, label
)
8681 do_jump (exp
, label
, NULL_RTX
);
8684 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
8691 do_jump (exp
, NULL_RTX
, label
);
8694 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8695 the result is zero, or IF_TRUE_LABEL if the result is one.
8696 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8697 meaning fall through in that case.
8699 do_jump always does any pending stack adjust except when it does not
8700 actually perform a jump. An example where there is no jump
8701 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
8703 This function is responsible for optimizing cases such as
8704 &&, || and comparison operators in EXP. */
8707 do_jump (exp
, if_false_label
, if_true_label
)
8709 rtx if_false_label
, if_true_label
;
8711 register enum tree_code code
= TREE_CODE (exp
);
8712 /* Some cases need to create a label to jump to
8713 in order to properly fall through.
8714 These cases set DROP_THROUGH_LABEL nonzero. */
8715 rtx drop_through_label
= 0;
8719 enum machine_mode mode
;
8721 #ifdef MAX_INTEGER_COMPUTATION_MODE
8722 check_max_integer_computation_mode (exp
);
8733 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
8739 /* This is not true with #pragma weak */
8741 /* The address of something can never be zero. */
8743 emit_jump (if_true_label
);
8748 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
8749 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
8750 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
)
8753 /* If we are narrowing the operand, we have to do the compare in the
8755 if ((TYPE_PRECISION (TREE_TYPE (exp
))
8756 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
8758 case NON_LVALUE_EXPR
:
8759 case REFERENCE_EXPR
:
8764 /* These cannot change zero->non-zero or vice versa. */
8765 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
8769 /* This is never less insns than evaluating the PLUS_EXPR followed by
8770 a test and can be longer if the test is eliminated. */
8772 /* Reduce to minus. */
8773 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
8774 TREE_OPERAND (exp
, 0),
8775 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
8776 TREE_OPERAND (exp
, 1))));
8777 /* Process as MINUS. */
8781 /* Non-zero iff operands of minus differ. */
8782 do_compare_and_jump (build (NE_EXPR
, TREE_TYPE (exp
),
8783 TREE_OPERAND (exp
, 0),
8784 TREE_OPERAND (exp
, 1)),
8785 NE
, NE
, if_false_label
, if_true_label
);
8789 /* If we are AND'ing with a small constant, do this comparison in the
8790 smallest type that fits. If the machine doesn't have comparisons
8791 that small, it will be converted back to the wider comparison.
8792 This helps if we are testing the sign bit of a narrower object.
8793 combine can't do this for us because it can't know whether a
8794 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
8796 if (! SLOW_BYTE_ACCESS
8797 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
8798 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
8799 && (i
= floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))) >= 0
8800 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
8801 && (type
= type_for_mode (mode
, 1)) != 0
8802 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
8803 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
8804 != CODE_FOR_nothing
))
8806 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
8811 case TRUTH_NOT_EXPR
:
8812 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
8815 case TRUTH_ANDIF_EXPR
:
8816 if (if_false_label
== 0)
8817 if_false_label
= drop_through_label
= gen_label_rtx ();
8818 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
8819 start_cleanup_deferral ();
8820 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
8821 end_cleanup_deferral ();
8824 case TRUTH_ORIF_EXPR
:
8825 if (if_true_label
== 0)
8826 if_true_label
= drop_through_label
= gen_label_rtx ();
8827 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
8828 start_cleanup_deferral ();
8829 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
8830 end_cleanup_deferral ();
8835 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
8836 preserve_temp_slots (NULL_RTX
);
8840 do_pending_stack_adjust ();
8841 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
8848 int bitsize
, bitpos
, unsignedp
;
8849 enum machine_mode mode
;
8855 /* Get description of this reference. We don't actually care
8856 about the underlying object here. */
8857 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
8858 &mode
, &unsignedp
, &volatilep
,
8861 type
= type_for_size (bitsize
, unsignedp
);
8862 if (! SLOW_BYTE_ACCESS
8863 && type
!= 0 && bitsize
>= 0
8864 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
8865 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
8866 != CODE_FOR_nothing
))
8868 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
8875 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
8876 if (integer_onep (TREE_OPERAND (exp
, 1))
8877 && integer_zerop (TREE_OPERAND (exp
, 2)))
8878 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
8880 else if (integer_zerop (TREE_OPERAND (exp
, 1))
8881 && integer_onep (TREE_OPERAND (exp
, 2)))
8882 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
8886 register rtx label1
= gen_label_rtx ();
8887 drop_through_label
= gen_label_rtx ();
8889 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
8891 start_cleanup_deferral ();
8892 /* Now the THEN-expression. */
8893 do_jump (TREE_OPERAND (exp
, 1),
8894 if_false_label
? if_false_label
: drop_through_label
,
8895 if_true_label
? if_true_label
: drop_through_label
);
8896 /* In case the do_jump just above never jumps. */
8897 do_pending_stack_adjust ();
8898 emit_label (label1
);
8900 /* Now the ELSE-expression. */
8901 do_jump (TREE_OPERAND (exp
, 2),
8902 if_false_label
? if_false_label
: drop_through_label
,
8903 if_true_label
? if_true_label
: drop_through_label
);
8904 end_cleanup_deferral ();
8910 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8912 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
8913 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
8915 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
8916 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
8919 (build (TRUTH_ANDIF_EXPR
, TREE_TYPE (exp
),
8920 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
8921 fold (build1 (REALPART_EXPR
,
8922 TREE_TYPE (inner_type
),
8924 fold (build1 (REALPART_EXPR
,
8925 TREE_TYPE (inner_type
),
8927 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
8928 fold (build1 (IMAGPART_EXPR
,
8929 TREE_TYPE (inner_type
),
8931 fold (build1 (IMAGPART_EXPR
,
8932 TREE_TYPE (inner_type
),
8934 if_false_label
, if_true_label
);
8937 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
8938 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
8940 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
8941 && !can_compare_p (TYPE_MODE (inner_type
), ccp_jump
))
8942 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
8944 do_compare_and_jump (exp
, EQ
, EQ
, if_false_label
, if_true_label
);
8950 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8952 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
8953 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
8955 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
8956 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
8959 (build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
),
8960 fold (build (NE_EXPR
, TREE_TYPE (exp
),
8961 fold (build1 (REALPART_EXPR
,
8962 TREE_TYPE (inner_type
),
8964 fold (build1 (REALPART_EXPR
,
8965 TREE_TYPE (inner_type
),
8967 fold (build (NE_EXPR
, TREE_TYPE (exp
),
8968 fold (build1 (IMAGPART_EXPR
,
8969 TREE_TYPE (inner_type
),
8971 fold (build1 (IMAGPART_EXPR
,
8972 TREE_TYPE (inner_type
),
8974 if_false_label
, if_true_label
);
8977 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
8978 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
8980 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
8981 && !can_compare_p (TYPE_MODE (inner_type
), ccp_jump
))
8982 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
8984 do_compare_and_jump (exp
, NE
, NE
, if_false_label
, if_true_label
);
8989 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
8990 if (GET_MODE_CLASS (mode
) == MODE_INT
8991 && ! can_compare_p (mode
, ccp_jump
))
8992 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
8994 do_compare_and_jump (exp
, LT
, LTU
, if_false_label
, if_true_label
);
8998 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
8999 if (GET_MODE_CLASS (mode
) == MODE_INT
9000 && ! can_compare_p (mode
, ccp_jump
))
9001 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
9003 do_compare_and_jump (exp
, LE
, LEU
, if_false_label
, if_true_label
);
9007 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9008 if (GET_MODE_CLASS (mode
) == MODE_INT
9009 && ! can_compare_p (mode
, ccp_jump
))
9010 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
9012 do_compare_and_jump (exp
, GT
, GTU
, if_false_label
, if_true_label
);
9016 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9017 if (GET_MODE_CLASS (mode
) == MODE_INT
9018 && ! can_compare_p (mode
, ccp_jump
))
9019 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
9021 do_compare_and_jump (exp
, GE
, GEU
, if_false_label
, if_true_label
);
9026 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
9028 /* This is not needed any more and causes poor code since it causes
9029 comparisons and tests from non-SI objects to have different code
9031 /* Copy to register to avoid generating bad insns by cse
9032 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9033 if (!cse_not_expected
&& GET_CODE (temp
) == MEM
)
9034 temp
= copy_to_reg (temp
);
9036 do_pending_stack_adjust ();
9037 /* Do any postincrements in the expression that was tested. */
9040 if (GET_CODE (temp
) == CONST_INT
|| GET_CODE (temp
) == LABEL_REF
)
9042 rtx target
= temp
== const0_rtx
? if_false_label
: if_true_label
;
9046 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
9047 && ! can_compare_p (GET_MODE (temp
), ccp_jump
))
9048 /* Note swapping the labels gives us not-equal. */
9049 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
9050 else if (GET_MODE (temp
) != VOIDmode
)
9051 do_compare_rtx_and_jump (temp
, CONST0_RTX (GET_MODE (temp
)),
9052 NE
, TREE_UNSIGNED (TREE_TYPE (exp
)),
9053 GET_MODE (temp
), NULL_RTX
, 0,
9054 if_false_label
, if_true_label
);
9059 if (drop_through_label
)
9061 /* If do_jump produces code that might be jumped around,
9062 do any stack adjusts from that code, before the place
9063 where control merges in. */
9064 do_pending_stack_adjust ();
9065 emit_label (drop_through_label
);
9069 /* Given a comparison expression EXP for values too wide to be compared
9070 with one insn, test the comparison and jump to the appropriate label.
9071 The code of EXP is ignored; we always test GT if SWAP is 0,
9072 and LT if SWAP is 1. */
9075 do_jump_by_parts_greater (exp
, swap
, if_false_label
, if_true_label
)
9078 rtx if_false_label
, if_true_label
;
9080 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
9081 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
9082 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9083 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9085 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
);
9088 /* Compare OP0 with OP1, word at a time, in mode MODE.
9089 UNSIGNEDP says to do unsigned comparison.
9090 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9093 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
)
9094 enum machine_mode mode
;
9097 rtx if_false_label
, if_true_label
;
9099 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
9100 rtx drop_through_label
= 0;
9103 if (! if_true_label
|| ! if_false_label
)
9104 drop_through_label
= gen_label_rtx ();
9105 if (! if_true_label
)
9106 if_true_label
= drop_through_label
;
9107 if (! if_false_label
)
9108 if_false_label
= drop_through_label
;
9110 /* Compare a word at a time, high order first. */
9111 for (i
= 0; i
< nwords
; i
++)
9113 rtx op0_word
, op1_word
;
9115 if (WORDS_BIG_ENDIAN
)
9117 op0_word
= operand_subword_force (op0
, i
, mode
);
9118 op1_word
= operand_subword_force (op1
, i
, mode
);
9122 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
9123 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
9126 /* All but high-order word must be compared as unsigned. */
9127 do_compare_rtx_and_jump (op0_word
, op1_word
, GT
,
9128 (unsignedp
|| i
> 0), word_mode
, NULL_RTX
, 0,
9129 NULL_RTX
, if_true_label
);
9131 /* Consider lower words only if these are equal. */
9132 do_compare_rtx_and_jump (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
9133 NULL_RTX
, 0, NULL_RTX
, if_false_label
);
9137 emit_jump (if_false_label
);
9138 if (drop_through_label
)
9139 emit_label (drop_through_label
);
9142 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9143 with one insn, test the comparison and jump to the appropriate label. */
9146 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
)
9148 rtx if_false_label
, if_true_label
;
9150 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
9151 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9152 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9153 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
9155 rtx drop_through_label
= 0;
9157 if (! if_false_label
)
9158 drop_through_label
= if_false_label
= gen_label_rtx ();
9160 for (i
= 0; i
< nwords
; i
++)
9161 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, mode
),
9162 operand_subword_force (op1
, i
, mode
),
9163 EQ
, TREE_UNSIGNED (TREE_TYPE (exp
)),
9164 word_mode
, NULL_RTX
, 0, if_false_label
,
9168 emit_jump (if_true_label
);
9169 if (drop_through_label
)
9170 emit_label (drop_through_label
);
9173 /* Jump according to whether OP0 is 0.
9174 We assume that OP0 has an integer mode that is too wide
9175 for the available compare insns. */
9178 do_jump_by_parts_equality_rtx (op0
, if_false_label
, if_true_label
)
9180 rtx if_false_label
, if_true_label
;
9182 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
9185 rtx drop_through_label
= 0;
9187 /* The fastest way of doing this comparison on almost any machine is to
9188 "or" all the words and compare the result. If all have to be loaded
9189 from memory and this is a very wide item, it's possible this may
9190 be slower, but that's highly unlikely. */
9192 part
= gen_reg_rtx (word_mode
);
9193 emit_move_insn (part
, operand_subword_force (op0
, 0, GET_MODE (op0
)));
9194 for (i
= 1; i
< nwords
&& part
!= 0; i
++)
9195 part
= expand_binop (word_mode
, ior_optab
, part
,
9196 operand_subword_force (op0
, i
, GET_MODE (op0
)),
9197 part
, 1, OPTAB_WIDEN
);
9201 do_compare_rtx_and_jump (part
, const0_rtx
, EQ
, 1, word_mode
,
9202 NULL_RTX
, 0, if_false_label
, if_true_label
);
9207 /* If we couldn't do the "or" simply, do this with a series of compares. */
9208 if (! if_false_label
)
9209 drop_through_label
= if_false_label
= gen_label_rtx ();
9211 for (i
= 0; i
< nwords
; i
++)
9212 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, GET_MODE (op0
)),
9213 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
, 0,
9214 if_false_label
, NULL_RTX
);
9217 emit_jump (if_true_label
);
9219 if (drop_through_label
)
9220 emit_label (drop_through_label
);
9223 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9224 (including code to compute the values to be compared)
9225 and set (CC0) according to the result.
9226 The decision as to signed or unsigned comparison must be made by the caller.
9228 We force a stack adjustment unless there are currently
9229 things pushed on the stack that aren't yet used.
9231 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9234 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9235 size of MODE should be used. */
9238 compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
, size
, align
)
9239 register rtx op0
, op1
;
9242 enum machine_mode mode
;
9248 /* If one operand is constant, make it the second one. Only do this
9249 if the other operand is not constant as well. */
9251 if ((CONSTANT_P (op0
) && ! CONSTANT_P (op1
))
9252 || (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) != CONST_INT
))
9257 code
= swap_condition (code
);
9262 op0
= force_not_mem (op0
);
9263 op1
= force_not_mem (op1
);
9266 do_pending_stack_adjust ();
9268 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
9269 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
9273 /* There's no need to do this now that combine.c can eliminate lots of
9274 sign extensions. This can be less efficient in certain cases on other
9277 /* If this is a signed equality comparison, we can do it as an
9278 unsigned comparison since zero-extension is cheaper than sign
9279 extension and comparisons with zero are done as unsigned. This is
9280 the case even on machines that can do fast sign extension, since
9281 zero-extension is easier to combine with other operations than
9282 sign-extension is. If we are comparing against a constant, we must
9283 convert it to what it would look like unsigned. */
9284 if ((code
== EQ
|| code
== NE
) && ! unsignedp
9285 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
9287 if (GET_CODE (op1
) == CONST_INT
9288 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
9289 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
9294 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
, align
);
9296 return gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
9299 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9300 The decision as to signed or unsigned comparison must be made by the caller.
9302 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9305 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9306 size of MODE should be used. */
9309 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
, size
, align
,
9310 if_false_label
, if_true_label
)
9311 register rtx op0
, op1
;
9314 enum machine_mode mode
;
9317 rtx if_false_label
, if_true_label
;
9320 int dummy_true_label
= 0;
9322 /* Reverse the comparison if that is safe and we want to jump if it is
9324 if (! if_true_label
&& ! FLOAT_MODE_P (mode
))
9326 if_true_label
= if_false_label
;
9328 code
= reverse_condition (code
);
9331 /* If one operand is constant, make it the second one. Only do this
9332 if the other operand is not constant as well. */
9334 if ((CONSTANT_P (op0
) && ! CONSTANT_P (op1
))
9335 || (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) != CONST_INT
))
9340 code
= swap_condition (code
);
9345 op0
= force_not_mem (op0
);
9346 op1
= force_not_mem (op1
);
9349 do_pending_stack_adjust ();
9351 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
9352 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
9354 if (tem
== const_true_rtx
)
9357 emit_jump (if_true_label
);
9362 emit_jump (if_false_label
);
9368 /* There's no need to do this now that combine.c can eliminate lots of
9369 sign extensions. This can be less efficient in certain cases on other
9372 /* If this is a signed equality comparison, we can do it as an
9373 unsigned comparison since zero-extension is cheaper than sign
9374 extension and comparisons with zero are done as unsigned. This is
9375 the case even on machines that can do fast sign extension, since
9376 zero-extension is easier to combine with other operations than
9377 sign-extension is. If we are comparing against a constant, we must
9378 convert it to what it would look like unsigned. */
9379 if ((code
== EQ
|| code
== NE
) && ! unsignedp
9380 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
9382 if (GET_CODE (op1
) == CONST_INT
9383 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
9384 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
9389 if (! if_true_label
)
9391 dummy_true_label
= 1;
9392 if_true_label
= gen_label_rtx ();
9395 emit_cmp_and_jump_insns (op0
, op1
, code
, size
, mode
, unsignedp
, align
,
9399 emit_jump (if_false_label
);
9400 if (dummy_true_label
)
9401 emit_label (if_true_label
);
9404 /* Generate code for a comparison expression EXP (including code to compute
9405 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9406 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9407 generated code will drop through.
9408 SIGNED_CODE should be the rtx operation for this comparison for
9409 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9411 We force a stack adjustment unless there are currently
9412 things pushed on the stack that aren't yet used. */
9415 do_compare_and_jump (exp
, signed_code
, unsigned_code
, if_false_label
,
9418 enum rtx_code signed_code
, unsigned_code
;
9419 rtx if_false_label
, if_true_label
;
9421 register rtx op0
, op1
;
9423 register enum machine_mode mode
;
9427 /* Don't crash if the comparison was erroneous. */
9428 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
9429 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
9432 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9433 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9434 mode
= TYPE_MODE (type
);
9435 unsignedp
= TREE_UNSIGNED (type
);
9436 code
= unsignedp
? unsigned_code
: signed_code
;
9438 #ifdef HAVE_canonicalize_funcptr_for_compare
9439 /* If function pointers need to be "canonicalized" before they can
9440 be reliably compared, then canonicalize them. */
9441 if (HAVE_canonicalize_funcptr_for_compare
9442 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
9443 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
9446 rtx new_op0
= gen_reg_rtx (mode
);
9448 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
9452 if (HAVE_canonicalize_funcptr_for_compare
9453 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
9454 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
9457 rtx new_op1
= gen_reg_rtx (mode
);
9459 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
9464 /* Do any postincrements in the expression that was tested. */
9467 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
,
9469 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
9470 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
,
9471 if_false_label
, if_true_label
);
9474 /* Generate code to calculate EXP using a store-flag instruction
9475 and return an rtx for the result. EXP is either a comparison
9476 or a TRUTH_NOT_EXPR whose operand is a comparison.
9478 If TARGET is nonzero, store the result there if convenient.
9480 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9483 Return zero if there is no suitable set-flag instruction
9484 available on this machine.
9486 Once expand_expr has been called on the arguments of the comparison,
9487 we are committed to doing the store flag, since it is not safe to
9488 re-evaluate the expression. We emit the store-flag insn by calling
9489 emit_store_flag, but only expand the arguments if we have a reason
9490 to believe that emit_store_flag will be successful. If we think that
9491 it will, but it isn't, we have to simulate the store-flag with a
9492 set/jump/set sequence. */
9495 do_store_flag (exp
, target
, mode
, only_cheap
)
9498 enum machine_mode mode
;
9502 tree arg0
, arg1
, type
;
9504 enum machine_mode operand_mode
;
9508 enum insn_code icode
;
9509 rtx subtarget
= target
;
9512 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9513 result at the end. We can't simply invert the test since it would
9514 have already been inverted if it were valid. This case occurs for
9515 some floating-point comparisons. */
9517 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
9518 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
9520 arg0
= TREE_OPERAND (exp
, 0);
9521 arg1
= TREE_OPERAND (exp
, 1);
9522 type
= TREE_TYPE (arg0
);
9523 operand_mode
= TYPE_MODE (type
);
9524 unsignedp
= TREE_UNSIGNED (type
);
9526 /* We won't bother with BLKmode store-flag operations because it would mean
9527 passing a lot of information to emit_store_flag. */
9528 if (operand_mode
== BLKmode
)
9531 /* We won't bother with store-flag operations involving function pointers
9532 when function pointers must be canonicalized before comparisons. */
9533 #ifdef HAVE_canonicalize_funcptr_for_compare
9534 if (HAVE_canonicalize_funcptr_for_compare
9535 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
9536 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
9538 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
9539 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
9540 == FUNCTION_TYPE
))))
9547 /* Get the rtx comparison code to use. We know that EXP is a comparison
9548 operation of some type. Some comparisons against 1 and -1 can be
9549 converted to comparisons with zero. Do so here so that the tests
9550 below will be aware that we have a comparison with zero. These
9551 tests will not catch constants in the first operand, but constants
9552 are rarely passed as the first operand. */
9554 switch (TREE_CODE (exp
))
9563 if (integer_onep (arg1
))
9564 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
9566 code
= unsignedp
? LTU
: LT
;
9569 if (! unsignedp
&& integer_all_onesp (arg1
))
9570 arg1
= integer_zero_node
, code
= LT
;
9572 code
= unsignedp
? LEU
: LE
;
9575 if (! unsignedp
&& integer_all_onesp (arg1
))
9576 arg1
= integer_zero_node
, code
= GE
;
9578 code
= unsignedp
? GTU
: GT
;
9581 if (integer_onep (arg1
))
9582 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
9584 code
= unsignedp
? GEU
: GE
;
9590 /* Put a constant second. */
9591 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
9593 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
9594 code
= swap_condition (code
);
9597 /* If this is an equality or inequality test of a single bit, we can
9598 do this by shifting the bit being tested to the low-order bit and
9599 masking the result with the constant 1. If the condition was EQ,
9600 we xor it with 1. This does not require an scc insn and is faster
9601 than an scc insn even if we have it. */
9603 if ((code
== NE
|| code
== EQ
)
9604 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
9605 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
9607 tree inner
= TREE_OPERAND (arg0
, 0);
9608 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
9611 /* If INNER is a right shift of a constant and it plus BITNUM does
9612 not overflow, adjust BITNUM and INNER. */
9614 if (TREE_CODE (inner
) == RSHIFT_EXPR
9615 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
9616 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
9617 && (bitnum
+ TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1))
9618 < TYPE_PRECISION (type
)))
9620 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
9621 inner
= TREE_OPERAND (inner
, 0);
9624 /* If we are going to be able to omit the AND below, we must do our
9625 operations as unsigned. If we must use the AND, we have a choice.
9626 Normally unsigned is faster, but for some machines signed is. */
9627 ops_unsignedp
= (bitnum
== TYPE_PRECISION (type
) - 1 ? 1
9628 #ifdef LOAD_EXTEND_OP
9629 : (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1)
9635 if (subtarget
== 0 || GET_CODE (subtarget
) != REG
9636 || GET_MODE (subtarget
) != operand_mode
9637 || ! safe_from_p (subtarget
, inner
, 1))
9640 op0
= expand_expr (inner
, subtarget
, VOIDmode
, 0);
9643 op0
= expand_shift (RSHIFT_EXPR
, GET_MODE (op0
), op0
,
9644 size_int (bitnum
), subtarget
, ops_unsignedp
);
9646 if (GET_MODE (op0
) != mode
)
9647 op0
= convert_to_mode (mode
, op0
, ops_unsignedp
);
9649 if ((code
== EQ
&& ! invert
) || (code
== NE
&& invert
))
9650 op0
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
, subtarget
,
9651 ops_unsignedp
, OPTAB_LIB_WIDEN
);
9653 /* Put the AND last so it can combine with more things. */
9654 if (bitnum
!= TYPE_PRECISION (type
) - 1)
9655 op0
= expand_and (op0
, const1_rtx
, subtarget
);
9660 /* Now see if we are likely to be able to do this. Return if not. */
9661 if (! can_compare_p (operand_mode
, ccp_store_flag
))
9663 icode
= setcc_gen_code
[(int) code
];
9664 if (icode
== CODE_FOR_nothing
9665 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
9667 /* We can only do this if it is one of the special cases that
9668 can be handled without an scc insn. */
9669 if ((code
== LT
&& integer_zerop (arg1
))
9670 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
9672 else if (BRANCH_COST
>= 0
9673 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
9674 && TREE_CODE (type
) != REAL_TYPE
9675 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
9676 != CODE_FOR_nothing
)
9677 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
9678 != CODE_FOR_nothing
)))
9684 preexpand_calls (exp
);
9685 if (subtarget
== 0 || GET_CODE (subtarget
) != REG
9686 || GET_MODE (subtarget
) != operand_mode
9687 || ! safe_from_p (subtarget
, arg1
, 1))
9690 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
9691 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
9694 target
= gen_reg_rtx (mode
);
9696 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9697 because, if the emit_store_flag does anything it will succeed and
9698 OP0 and OP1 will not be used subsequently. */
9700 result
= emit_store_flag (target
, code
,
9701 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
9702 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
9703 operand_mode
, unsignedp
, 1);
9708 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
9709 result
, 0, OPTAB_LIB_WIDEN
);
9713 /* If this failed, we have to do this with set/compare/jump/set code. */
9714 if (GET_CODE (target
) != REG
9715 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
9716 target
= gen_reg_rtx (GET_MODE (target
));
9718 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
9719 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
9720 operand_mode
, NULL_RTX
, 0);
9721 if (GET_CODE (result
) == CONST_INT
)
9722 return (((result
== const0_rtx
&& ! invert
)
9723 || (result
!= const0_rtx
&& invert
))
9724 ? const0_rtx
: const1_rtx
);
9726 label
= gen_label_rtx ();
9727 if (bcc_gen_fctn
[(int) code
] == 0)
9730 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
9731 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
9737 /* Generate a tablejump instruction (used for switch statements). */
9739 #ifdef HAVE_tablejump
9741 /* INDEX is the value being switched on, with the lowest value
9742 in the table already subtracted.
9743 MODE is its expected mode (needed if INDEX is constant).
9744 RANGE is the length of the jump table.
9745 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9747 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9748 index value is out of range. */
9751 do_tablejump (index
, mode
, range
, table_label
, default_label
)
9752 rtx index
, range
, table_label
, default_label
;
9753 enum machine_mode mode
;
9755 register rtx temp
, vector
;
9757 /* Do an unsigned comparison (in the proper mode) between the index
9758 expression and the value which represents the length of the range.
9759 Since we just finished subtracting the lower bound of the range
9760 from the index expression, this comparison allows us to simultaneously
9761 check that the original index expression value is both greater than
9762 or equal to the minimum value of the range and less than or equal to
9763 the maximum value of the range. */
9765 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
9768 /* If index is in range, it must fit in Pmode.
9769 Convert to Pmode so we can index with it. */
9771 index
= convert_to_mode (Pmode
, index
, 1);
9773 /* Don't let a MEM slip thru, because then INDEX that comes
9774 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9775 and break_out_memory_refs will go to work on it and mess it up. */
9776 #ifdef PIC_CASE_VECTOR_ADDRESS
9777 if (flag_pic
&& GET_CODE (index
) != REG
)
9778 index
= copy_to_mode_reg (Pmode
, index
);
9781 /* If flag_force_addr were to affect this address
9782 it could interfere with the tricky assumptions made
9783 about addresses that contain label-refs,
9784 which may be valid only very near the tablejump itself. */
9785 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9786 GET_MODE_SIZE, because this indicates how large insns are. The other
9787 uses should all be Pmode, because they are addresses. This code
9788 could fail if addresses and insns are not the same size. */
9789 index
= gen_rtx_PLUS (Pmode
,
9790 gen_rtx_MULT (Pmode
, index
,
9791 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
9792 gen_rtx_LABEL_REF (Pmode
, table_label
));
9793 #ifdef PIC_CASE_VECTOR_ADDRESS
9795 index
= PIC_CASE_VECTOR_ADDRESS (index
);
9798 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
9799 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
9800 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
9801 RTX_UNCHANGING_P (vector
) = 1;
9802 convert_move (temp
, vector
, 0);
9804 emit_jump_insn (gen_tablejump (temp
, table_label
));
9806 /* If we are generating PIC code or if the table is PC-relative, the
9807 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9808 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
9812 #endif /* HAVE_tablejump */