1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
42 #include "typeclass.h"
45 #include "langhooks.h"
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first. */
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
67 #define STACK_PUSH_CODE PRE_INC
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
84 /* Don't check memory usage, since code is being emitted to check a memory
85 usage. Used when current_function_check_memory_usage is true, to avoid
86 infinite recursion. */
87 static int in_check_memory_usage
;
89 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
90 static tree placeholder_list
= 0;
92 /* This structure is used by move_by_pieces to describe the move to
103 int explicit_inc_from
;
104 unsigned HOST_WIDE_INT len
;
105 HOST_WIDE_INT offset
;
109 /* This structure is used by store_by_pieces to describe the clear to
112 struct store_by_pieces
118 unsigned HOST_WIDE_INT len
;
119 HOST_WIDE_INT offset
;
120 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
125 extern struct obstack permanent_obstack
;
127 static rtx get_push_address
PARAMS ((int));
129 static rtx enqueue_insn
PARAMS ((rtx
, rtx
));
130 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
131 PARAMS ((unsigned HOST_WIDE_INT
,
133 static void move_by_pieces_1
PARAMS ((rtx (*) (rtx
, ...), enum machine_mode
,
134 struct move_by_pieces
*));
135 static rtx clear_by_pieces_1
PARAMS ((PTR
, HOST_WIDE_INT
,
137 static void clear_by_pieces
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
139 static void store_by_pieces_1
PARAMS ((struct store_by_pieces
*,
141 static void store_by_pieces_2
PARAMS ((rtx (*) (rtx
, ...),
143 struct store_by_pieces
*));
144 static rtx get_subtarget
PARAMS ((rtx
));
145 static int is_zeros_p
PARAMS ((tree
));
146 static int mostly_zeros_p
PARAMS ((tree
));
147 static void store_constructor_field
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
148 HOST_WIDE_INT
, enum machine_mode
,
149 tree
, tree
, int, int));
150 static void store_constructor
PARAMS ((tree
, rtx
, int, HOST_WIDE_INT
));
151 static rtx store_field
PARAMS ((rtx
, HOST_WIDE_INT
,
152 HOST_WIDE_INT
, enum machine_mode
,
153 tree
, enum machine_mode
, int, tree
,
155 static enum memory_use_mode
156 get_memory_usage_from_modifier
PARAMS ((enum expand_modifier
));
157 static rtx var_rtx
PARAMS ((tree
));
158 static HOST_WIDE_INT highest_pow2_factor
PARAMS ((tree
));
159 static rtx expand_increment
PARAMS ((tree
, int, int));
160 static void do_jump_by_parts_greater
PARAMS ((tree
, int, rtx
, rtx
));
161 static void do_jump_by_parts_equality
PARAMS ((tree
, rtx
, rtx
));
162 static void do_compare_and_jump
PARAMS ((tree
, enum rtx_code
, enum rtx_code
,
164 static rtx do_store_flag
PARAMS ((tree
, rtx
, enum machine_mode
, int));
166 static void emit_single_push_insn
PARAMS ((enum machine_mode
, rtx
, tree
));
168 static void do_tablejump
PARAMS ((rtx
, enum machine_mode
, rtx
, rtx
, rtx
));
170 /* Record for each mode whether we can move a register directly to or
171 from an object of that mode in memory. If we can't, we won't try
172 to use that mode directly when accessing a field of that mode. */
174 static char direct_load
[NUM_MACHINE_MODES
];
175 static char direct_store
[NUM_MACHINE_MODES
];
177 /* If a memory-to-memory move would take MOVE_RATIO or more simple
178 move-instruction sequences, we will do a movstr or libcall instead. */
181 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
184 /* If we are optimizing for space (-Os), cut down the default move ratio. */
185 #define MOVE_RATIO (optimize_size ? 3 : 15)
189 /* This macro is used to determine whether move_by_pieces should be called
190 to perform a structure copy. */
191 #ifndef MOVE_BY_PIECES_P
192 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
193 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
196 /* This array records the insn_code of insns to perform block moves. */
197 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
199 /* This array records the insn_code of insns to perform block clears. */
200 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
202 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
204 #ifndef SLOW_UNALIGNED_ACCESS
205 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
208 /* This is run once per compilation to set up which modes can be used
209 directly in memory and to initialize the block move optab. */
215 enum machine_mode mode
;
221 /* Try indexing by frame ptr and try by stack ptr.
222 It is known that on the Convex the stack ptr isn't a valid index.
223 With luck, one or the other is valid on any machine. */
224 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
225 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
227 insn
= emit_insn (gen_rtx_SET (0, NULL_RTX
, NULL_RTX
));
228 pat
= PATTERN (insn
);
230 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
231 mode
= (enum machine_mode
) ((int) mode
+ 1))
236 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
237 PUT_MODE (mem
, mode
);
238 PUT_MODE (mem1
, mode
);
240 /* See if there is some register that can be used in this mode and
241 directly loaded or stored from memory. */
243 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
244 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
245 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
248 if (! HARD_REGNO_MODE_OK (regno
, mode
))
251 reg
= gen_rtx_REG (mode
, regno
);
254 SET_DEST (pat
) = reg
;
255 if (recog (pat
, insn
, &num_clobbers
) >= 0)
256 direct_load
[(int) mode
] = 1;
258 SET_SRC (pat
) = mem1
;
259 SET_DEST (pat
) = reg
;
260 if (recog (pat
, insn
, &num_clobbers
) >= 0)
261 direct_load
[(int) mode
] = 1;
264 SET_DEST (pat
) = mem
;
265 if (recog (pat
, insn
, &num_clobbers
) >= 0)
266 direct_store
[(int) mode
] = 1;
269 SET_DEST (pat
) = mem1
;
270 if (recog (pat
, insn
, &num_clobbers
) >= 0)
271 direct_store
[(int) mode
] = 1;
278 /* This is run at the start of compiling a function. */
283 cfun
->expr
= (struct expr_status
*) xmalloc (sizeof (struct expr_status
));
286 pending_stack_adjust
= 0;
287 stack_pointer_delta
= 0;
288 inhibit_defer_pop
= 0;
290 apply_args_value
= 0;
296 struct expr_status
*p
;
301 ggc_mark_rtx (p
->x_saveregs_value
);
302 ggc_mark_rtx (p
->x_apply_args_value
);
303 ggc_mark_rtx (p
->x_forced_labels
);
314 /* Small sanity check that the queue is empty at the end of a function. */
317 finish_expr_for_function ()
323 /* Manage the queue of increment instructions to be output
324 for POSTINCREMENT_EXPR expressions, etc. */
326 /* Queue up to increment (or change) VAR later. BODY says how:
327 BODY should be the same thing you would pass to emit_insn
328 to increment right away. It will go to emit_insn later on.
330 The value is a QUEUED expression to be used in place of VAR
331 where you want to guarantee the pre-incrementation value of VAR. */
334 enqueue_insn (var
, body
)
337 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
), var
, NULL_RTX
, NULL_RTX
,
338 body
, pending_chain
);
339 return pending_chain
;
342 /* Use protect_from_queue to convert a QUEUED expression
343 into something that you can put immediately into an instruction.
344 If the queued incrementation has not happened yet,
345 protect_from_queue returns the variable itself.
346 If the incrementation has happened, protect_from_queue returns a temp
347 that contains a copy of the old value of the variable.
349 Any time an rtx which might possibly be a QUEUED is to be put
350 into an instruction, it must be passed through protect_from_queue first.
351 QUEUED expressions are not meaningful in instructions.
353 Do not pass a value through protect_from_queue and then hold
354 on to it for a while before putting it in an instruction!
355 If the queue is flushed in between, incorrect code will result. */
358 protect_from_queue (x
, modify
)
362 RTX_CODE code
= GET_CODE (x
);
364 #if 0 /* A QUEUED can hang around after the queue is forced out. */
365 /* Shortcut for most common case. */
366 if (pending_chain
== 0)
372 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
373 use of autoincrement. Make a copy of the contents of the memory
374 location rather than a copy of the address, but not if the value is
375 of mode BLKmode. Don't modify X in place since it might be
377 if (code
== MEM
&& GET_MODE (x
) != BLKmode
378 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
381 rtx
new = replace_equiv_address_nv (x
, QUEUED_VAR (y
));
385 rtx temp
= gen_reg_rtx (GET_MODE (x
));
387 emit_insn_before (gen_move_insn (temp
, new),
392 /* Copy the address into a pseudo, so that the returned value
393 remains correct across calls to emit_queue. */
394 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
397 /* Otherwise, recursively protect the subexpressions of all
398 the kinds of rtx's that can contain a QUEUED. */
401 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
402 if (tem
!= XEXP (x
, 0))
408 else if (code
== PLUS
|| code
== MULT
)
410 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
411 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
412 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
421 /* If the increment has not happened, use the variable itself. Copy it
422 into a new pseudo so that the value remains correct across calls to
424 if (QUEUED_INSN (x
) == 0)
425 return copy_to_reg (QUEUED_VAR (x
));
426 /* If the increment has happened and a pre-increment copy exists,
428 if (QUEUED_COPY (x
) != 0)
429 return QUEUED_COPY (x
);
430 /* The increment has happened but we haven't set up a pre-increment copy.
431 Set one up now, and use it. */
432 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
433 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
435 return QUEUED_COPY (x
);
438 /* Return nonzero if X contains a QUEUED expression:
439 if it contains anything that will be altered by a queued increment.
440 We handle only combinations of MEM, PLUS, MINUS and MULT operators
441 since memory addresses generally contain only those. */
447 enum rtx_code code
= GET_CODE (x
);
453 return queued_subexp_p (XEXP (x
, 0));
457 return (queued_subexp_p (XEXP (x
, 0))
458 || queued_subexp_p (XEXP (x
, 1)));
464 /* Perform all the pending incrementations. */
470 while ((p
= pending_chain
))
472 rtx body
= QUEUED_BODY (p
);
474 if (GET_CODE (body
) == SEQUENCE
)
476 QUEUED_INSN (p
) = XVECEXP (QUEUED_BODY (p
), 0, 0);
477 emit_insn (QUEUED_BODY (p
));
480 QUEUED_INSN (p
) = emit_insn (QUEUED_BODY (p
));
481 pending_chain
= QUEUED_NEXT (p
);
485 /* Copy data from FROM to TO, where the machine modes are not the same.
486 Both modes may be integer, or both may be floating.
487 UNSIGNEDP should be nonzero if FROM is an unsigned type.
488 This causes zero-extension instead of sign-extension. */
491 convert_move (to
, from
, unsignedp
)
495 enum machine_mode to_mode
= GET_MODE (to
);
496 enum machine_mode from_mode
= GET_MODE (from
);
497 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
498 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
502 /* rtx code for making an equivalent value. */
503 enum rtx_code equiv_code
= (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
);
505 to
= protect_from_queue (to
, 1);
506 from
= protect_from_queue (from
, 0);
508 if (to_real
!= from_real
)
511 /* If FROM is a SUBREG that indicates that we have already done at least
512 the required extension, strip it. We don't handle such SUBREGs as
515 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
516 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
517 >= GET_MODE_SIZE (to_mode
))
518 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
519 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
521 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
524 if (to_mode
== from_mode
525 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
527 emit_move_insn (to
, from
);
531 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
533 if (GET_MODE_BITSIZE (from_mode
) != GET_MODE_BITSIZE (to_mode
))
536 if (VECTOR_MODE_P (to_mode
))
537 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
539 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
541 emit_move_insn (to
, from
);
545 if (to_real
!= from_real
)
552 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
))
554 /* Try converting directly if the insn is supported. */
555 if ((code
= can_extend_p (to_mode
, from_mode
, 0))
558 emit_unop_insn (code
, to
, from
, UNKNOWN
);
563 #ifdef HAVE_trunchfqf2
564 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
566 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
570 #ifdef HAVE_trunctqfqf2
571 if (HAVE_trunctqfqf2
&& from_mode
== TQFmode
&& to_mode
== QFmode
)
573 emit_unop_insn (CODE_FOR_trunctqfqf2
, to
, from
, UNKNOWN
);
577 #ifdef HAVE_truncsfqf2
578 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
580 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
584 #ifdef HAVE_truncdfqf2
585 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
587 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
591 #ifdef HAVE_truncxfqf2
592 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
594 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
598 #ifdef HAVE_trunctfqf2
599 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
601 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
606 #ifdef HAVE_trunctqfhf2
607 if (HAVE_trunctqfhf2
&& from_mode
== TQFmode
&& to_mode
== HFmode
)
609 emit_unop_insn (CODE_FOR_trunctqfhf2
, to
, from
, UNKNOWN
);
613 #ifdef HAVE_truncsfhf2
614 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
616 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
620 #ifdef HAVE_truncdfhf2
621 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
623 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
627 #ifdef HAVE_truncxfhf2
628 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
630 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
634 #ifdef HAVE_trunctfhf2
635 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
637 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
642 #ifdef HAVE_truncsftqf2
643 if (HAVE_truncsftqf2
&& from_mode
== SFmode
&& to_mode
== TQFmode
)
645 emit_unop_insn (CODE_FOR_truncsftqf2
, to
, from
, UNKNOWN
);
649 #ifdef HAVE_truncdftqf2
650 if (HAVE_truncdftqf2
&& from_mode
== DFmode
&& to_mode
== TQFmode
)
652 emit_unop_insn (CODE_FOR_truncdftqf2
, to
, from
, UNKNOWN
);
656 #ifdef HAVE_truncxftqf2
657 if (HAVE_truncxftqf2
&& from_mode
== XFmode
&& to_mode
== TQFmode
)
659 emit_unop_insn (CODE_FOR_truncxftqf2
, to
, from
, UNKNOWN
);
663 #ifdef HAVE_trunctftqf2
664 if (HAVE_trunctftqf2
&& from_mode
== TFmode
&& to_mode
== TQFmode
)
666 emit_unop_insn (CODE_FOR_trunctftqf2
, to
, from
, UNKNOWN
);
671 #ifdef HAVE_truncdfsf2
672 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
674 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
678 #ifdef HAVE_truncxfsf2
679 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
681 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
685 #ifdef HAVE_trunctfsf2
686 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
688 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
692 #ifdef HAVE_truncxfdf2
693 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
695 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
699 #ifdef HAVE_trunctfdf2
700 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
702 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
714 libcall
= extendsfdf2_libfunc
;
718 libcall
= extendsfxf2_libfunc
;
722 libcall
= extendsftf2_libfunc
;
734 libcall
= truncdfsf2_libfunc
;
738 libcall
= extenddfxf2_libfunc
;
742 libcall
= extenddftf2_libfunc
;
754 libcall
= truncxfsf2_libfunc
;
758 libcall
= truncxfdf2_libfunc
;
770 libcall
= trunctfsf2_libfunc
;
774 libcall
= trunctfdf2_libfunc
;
786 if (libcall
== (rtx
) 0)
787 /* This conversion is not implemented yet. */
791 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
793 insns
= get_insns ();
795 emit_libcall_block (insns
, to
, value
, gen_rtx_FLOAT_TRUNCATE (to_mode
,
800 /* Now both modes are integers. */
802 /* Handle expanding beyond a word. */
803 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
804 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
811 enum machine_mode lowpart_mode
;
812 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
814 /* Try converting directly if the insn is supported. */
815 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
818 /* If FROM is a SUBREG, put it into a register. Do this
819 so that we always generate the same set of insns for
820 better cse'ing; if an intermediate assignment occurred,
821 we won't be doing the operation directly on the SUBREG. */
822 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
823 from
= force_reg (from_mode
, from
);
824 emit_unop_insn (code
, to
, from
, equiv_code
);
827 /* Next, try converting via full word. */
828 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
829 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
830 != CODE_FOR_nothing
))
832 if (GET_CODE (to
) == REG
)
833 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
834 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
835 emit_unop_insn (code
, to
,
836 gen_lowpart (word_mode
, to
), equiv_code
);
840 /* No special multiword conversion insn; do it by hand. */
843 /* Since we will turn this into a no conflict block, we must ensure
844 that the source does not overlap the target. */
846 if (reg_overlap_mentioned_p (to
, from
))
847 from
= force_reg (from_mode
, from
);
849 /* Get a copy of FROM widened to a word, if necessary. */
850 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
851 lowpart_mode
= word_mode
;
853 lowpart_mode
= from_mode
;
855 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
857 lowpart
= gen_lowpart (lowpart_mode
, to
);
858 emit_move_insn (lowpart
, lowfrom
);
860 /* Compute the value to put in each remaining word. */
862 fill_value
= const0_rtx
;
867 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
868 && STORE_FLAG_VALUE
== -1)
870 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
872 fill_value
= gen_reg_rtx (word_mode
);
873 emit_insn (gen_slt (fill_value
));
879 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
880 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
882 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
886 /* Fill the remaining words. */
887 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
889 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
890 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
895 if (fill_value
!= subword
)
896 emit_move_insn (subword
, fill_value
);
899 insns
= get_insns ();
902 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
903 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
907 /* Truncating multi-word to a word or less. */
908 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
909 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
911 if (!((GET_CODE (from
) == MEM
912 && ! MEM_VOLATILE_P (from
)
913 && direct_load
[(int) to_mode
]
914 && ! mode_dependent_address_p (XEXP (from
, 0)))
915 || GET_CODE (from
) == REG
916 || GET_CODE (from
) == SUBREG
))
917 from
= force_reg (from_mode
, from
);
918 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
922 /* Handle pointer conversion. */ /* SPEE 900220. */
923 if (to_mode
== PQImode
)
925 if (from_mode
!= QImode
)
926 from
= convert_to_mode (QImode
, from
, unsignedp
);
928 #ifdef HAVE_truncqipqi2
929 if (HAVE_truncqipqi2
)
931 emit_unop_insn (CODE_FOR_truncqipqi2
, to
, from
, UNKNOWN
);
934 #endif /* HAVE_truncqipqi2 */
938 if (from_mode
== PQImode
)
940 if (to_mode
!= QImode
)
942 from
= convert_to_mode (QImode
, from
, unsignedp
);
947 #ifdef HAVE_extendpqiqi2
948 if (HAVE_extendpqiqi2
)
950 emit_unop_insn (CODE_FOR_extendpqiqi2
, to
, from
, UNKNOWN
);
953 #endif /* HAVE_extendpqiqi2 */
958 if (to_mode
== PSImode
)
960 if (from_mode
!= SImode
)
961 from
= convert_to_mode (SImode
, from
, unsignedp
);
963 #ifdef HAVE_truncsipsi2
964 if (HAVE_truncsipsi2
)
966 emit_unop_insn (CODE_FOR_truncsipsi2
, to
, from
, UNKNOWN
);
969 #endif /* HAVE_truncsipsi2 */
973 if (from_mode
== PSImode
)
975 if (to_mode
!= SImode
)
977 from
= convert_to_mode (SImode
, from
, unsignedp
);
982 #ifdef HAVE_extendpsisi2
983 if (! unsignedp
&& HAVE_extendpsisi2
)
985 emit_unop_insn (CODE_FOR_extendpsisi2
, to
, from
, UNKNOWN
);
988 #endif /* HAVE_extendpsisi2 */
989 #ifdef HAVE_zero_extendpsisi2
990 if (unsignedp
&& HAVE_zero_extendpsisi2
)
992 emit_unop_insn (CODE_FOR_zero_extendpsisi2
, to
, from
, UNKNOWN
);
995 #endif /* HAVE_zero_extendpsisi2 */
1000 if (to_mode
== PDImode
)
1002 if (from_mode
!= DImode
)
1003 from
= convert_to_mode (DImode
, from
, unsignedp
);
1005 #ifdef HAVE_truncdipdi2
1006 if (HAVE_truncdipdi2
)
1008 emit_unop_insn (CODE_FOR_truncdipdi2
, to
, from
, UNKNOWN
);
1011 #endif /* HAVE_truncdipdi2 */
1015 if (from_mode
== PDImode
)
1017 if (to_mode
!= DImode
)
1019 from
= convert_to_mode (DImode
, from
, unsignedp
);
1024 #ifdef HAVE_extendpdidi2
1025 if (HAVE_extendpdidi2
)
1027 emit_unop_insn (CODE_FOR_extendpdidi2
, to
, from
, UNKNOWN
);
1030 #endif /* HAVE_extendpdidi2 */
1035 /* Now follow all the conversions between integers
1036 no more than a word long. */
1038 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1039 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
1040 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1041 GET_MODE_BITSIZE (from_mode
)))
1043 if (!((GET_CODE (from
) == MEM
1044 && ! MEM_VOLATILE_P (from
)
1045 && direct_load
[(int) to_mode
]
1046 && ! mode_dependent_address_p (XEXP (from
, 0)))
1047 || GET_CODE (from
) == REG
1048 || GET_CODE (from
) == SUBREG
))
1049 from
= force_reg (from_mode
, from
);
1050 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
1051 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
1052 from
= copy_to_reg (from
);
1053 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
1057 /* Handle extension. */
1058 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
1060 /* Convert directly if that works. */
1061 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
1062 != CODE_FOR_nothing
)
1064 emit_unop_insn (code
, to
, from
, equiv_code
);
1069 enum machine_mode intermediate
;
1073 /* Search for a mode to convert via. */
1074 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
1075 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
1076 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
1077 != CODE_FOR_nothing
)
1078 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
1079 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1080 GET_MODE_BITSIZE (intermediate
))))
1081 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
1082 != CODE_FOR_nothing
))
1084 convert_move (to
, convert_to_mode (intermediate
, from
,
1085 unsignedp
), unsignedp
);
1089 /* No suitable intermediate mode.
1090 Generate what we need with shifts. */
1091 shift_amount
= build_int_2 (GET_MODE_BITSIZE (to_mode
)
1092 - GET_MODE_BITSIZE (from_mode
), 0);
1093 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
1094 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
1096 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
1099 emit_move_insn (to
, tmp
);
1104 /* Support special truncate insns for certain modes. */
1106 if (from_mode
== DImode
&& to_mode
== SImode
)
1108 #ifdef HAVE_truncdisi2
1109 if (HAVE_truncdisi2
)
1111 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1115 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1119 if (from_mode
== DImode
&& to_mode
== HImode
)
1121 #ifdef HAVE_truncdihi2
1122 if (HAVE_truncdihi2
)
1124 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1128 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1132 if (from_mode
== DImode
&& to_mode
== QImode
)
1134 #ifdef HAVE_truncdiqi2
1135 if (HAVE_truncdiqi2
)
1137 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1141 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1145 if (from_mode
== SImode
&& to_mode
== HImode
)
1147 #ifdef HAVE_truncsihi2
1148 if (HAVE_truncsihi2
)
1150 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1154 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1158 if (from_mode
== SImode
&& to_mode
== QImode
)
1160 #ifdef HAVE_truncsiqi2
1161 if (HAVE_truncsiqi2
)
1163 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1167 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1171 if (from_mode
== HImode
&& to_mode
== QImode
)
1173 #ifdef HAVE_trunchiqi2
1174 if (HAVE_trunchiqi2
)
1176 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1180 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1184 if (from_mode
== TImode
&& to_mode
== DImode
)
1186 #ifdef HAVE_trunctidi2
1187 if (HAVE_trunctidi2
)
1189 emit_unop_insn (CODE_FOR_trunctidi2
, to
, from
, UNKNOWN
);
1193 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1197 if (from_mode
== TImode
&& to_mode
== SImode
)
1199 #ifdef HAVE_trunctisi2
1200 if (HAVE_trunctisi2
)
1202 emit_unop_insn (CODE_FOR_trunctisi2
, to
, from
, UNKNOWN
);
1206 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1210 if (from_mode
== TImode
&& to_mode
== HImode
)
1212 #ifdef HAVE_trunctihi2
1213 if (HAVE_trunctihi2
)
1215 emit_unop_insn (CODE_FOR_trunctihi2
, to
, from
, UNKNOWN
);
1219 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1223 if (from_mode
== TImode
&& to_mode
== QImode
)
1225 #ifdef HAVE_trunctiqi2
1226 if (HAVE_trunctiqi2
)
1228 emit_unop_insn (CODE_FOR_trunctiqi2
, to
, from
, UNKNOWN
);
1232 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1236 /* Handle truncation of volatile memrefs, and so on;
1237 the things that couldn't be truncated directly,
1238 and for which there was no special instruction. */
1239 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1241 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1242 emit_move_insn (to
, temp
);
1246 /* Mode combination is not recognized. */
1250 /* Return an rtx for a value that would result
1251 from converting X to mode MODE.
1252 Both X and MODE may be floating, or both integer.
1253 UNSIGNEDP is nonzero if X is an unsigned value.
1254 This can be done by referring to a part of X in place
1255 or by copying to a new temporary with conversion.
1257 This function *must not* call protect_from_queue
1258 except when putting X into an insn (in which case convert_move does it). */
1261 convert_to_mode (mode
, x
, unsignedp
)
1262 enum machine_mode mode
;
1266 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
1269 /* Return an rtx for a value that would result
1270 from converting X from mode OLDMODE to mode MODE.
1271 Both modes may be floating, or both integer.
1272 UNSIGNEDP is nonzero if X is an unsigned value.
1274 This can be done by referring to a part of X in place
1275 or by copying to a new temporary with conversion.
1277 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1279 This function *must not* call protect_from_queue
1280 except when putting X into an insn (in which case convert_move does it). */
1283 convert_modes (mode
, oldmode
, x
, unsignedp
)
1284 enum machine_mode mode
, oldmode
;
1290 /* If FROM is a SUBREG that indicates that we have already done at least
1291 the required extension, strip it. */
1293 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1294 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1295 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1296 x
= gen_lowpart (mode
, x
);
1298 if (GET_MODE (x
) != VOIDmode
)
1299 oldmode
= GET_MODE (x
);
1301 if (mode
== oldmode
)
1304 /* There is one case that we must handle specially: If we are converting
1305 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1306 we are to interpret the constant as unsigned, gen_lowpart will do
1307 the wrong if the constant appears negative. What we want to do is
1308 make the high-order word of the constant zero, not all ones. */
1310 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1311 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1312 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1314 HOST_WIDE_INT val
= INTVAL (x
);
1316 if (oldmode
!= VOIDmode
1317 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
1319 int width
= GET_MODE_BITSIZE (oldmode
);
1321 /* We need to zero extend VAL. */
1322 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1325 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
1328 /* We can do this with a gen_lowpart if both desired and current modes
1329 are integer, and this is either a constant integer, a register, or a
1330 non-volatile MEM. Except for the constant case where MODE is no
1331 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1333 if ((GET_CODE (x
) == CONST_INT
1334 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1335 || (GET_MODE_CLASS (mode
) == MODE_INT
1336 && GET_MODE_CLASS (oldmode
) == MODE_INT
1337 && (GET_CODE (x
) == CONST_DOUBLE
1338 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
1339 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
1340 && direct_load
[(int) mode
])
1341 || (GET_CODE (x
) == REG
1342 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
1343 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
1345 /* ?? If we don't know OLDMODE, we have to assume here that
1346 X does not need sign- or zero-extension. This may not be
1347 the case, but it's the best we can do. */
1348 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1349 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1351 HOST_WIDE_INT val
= INTVAL (x
);
1352 int width
= GET_MODE_BITSIZE (oldmode
);
1354 /* We must sign or zero-extend in this case. Start by
1355 zero-extending, then sign extend if we need to. */
1356 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1358 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1359 val
|= (HOST_WIDE_INT
) (-1) << width
;
1361 return GEN_INT (trunc_int_for_mode (val
, mode
));
1364 return gen_lowpart (mode
, x
);
1367 temp
= gen_reg_rtx (mode
);
1368 convert_move (temp
, x
, unsignedp
);
1372 /* This macro is used to determine what the largest unit size that
1373 move_by_pieces can use is. */
1375 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1376 move efficiently, as opposed to MOVE_MAX which is the maximum
1377 number of bytes we can move with a single instruction. */
1379 #ifndef MOVE_MAX_PIECES
1380 #define MOVE_MAX_PIECES MOVE_MAX
1383 /* Generate several move instructions to copy LEN bytes from block FROM to
1384 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1385 and TO through protect_from_queue before calling.
1387 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1388 used to push FROM to the stack.
1390 ALIGN is maximum alignment we can assume. */
1393 move_by_pieces (to
, from
, len
, align
)
1395 unsigned HOST_WIDE_INT len
;
1398 struct move_by_pieces data
;
1399 rtx to_addr
, from_addr
= XEXP (from
, 0);
1400 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
1401 enum machine_mode mode
= VOIDmode
, tmode
;
1402 enum insn_code icode
;
1405 data
.from_addr
= from_addr
;
1408 to_addr
= XEXP (to
, 0);
1411 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1412 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1414 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1421 #ifdef STACK_GROWS_DOWNWARD
1427 data
.to_addr
= to_addr
;
1430 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1431 || GET_CODE (from_addr
) == POST_INC
1432 || GET_CODE (from_addr
) == POST_DEC
);
1434 data
.explicit_inc_from
= 0;
1435 data
.explicit_inc_to
= 0;
1436 if (data
.reverse
) data
.offset
= len
;
1439 /* If copying requires more than two move insns,
1440 copy addresses to registers (to make displacements shorter)
1441 and use post-increment if available. */
1442 if (!(data
.autinc_from
&& data
.autinc_to
)
1443 && move_by_pieces_ninsns (len
, align
) > 2)
1445 /* Find the mode of the largest move... */
1446 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1447 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1448 if (GET_MODE_SIZE (tmode
) < max_size
)
1451 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
1453 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1454 data
.autinc_from
= 1;
1455 data
.explicit_inc_from
= -1;
1457 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
1459 data
.from_addr
= copy_addr_to_reg (from_addr
);
1460 data
.autinc_from
= 1;
1461 data
.explicit_inc_from
= 1;
1463 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1464 data
.from_addr
= copy_addr_to_reg (from_addr
);
1465 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
1467 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1469 data
.explicit_inc_to
= -1;
1471 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
1473 data
.to_addr
= copy_addr_to_reg (to_addr
);
1475 data
.explicit_inc_to
= 1;
1477 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1478 data
.to_addr
= copy_addr_to_reg (to_addr
);
1481 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1482 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1483 align
= MOVE_MAX
* BITS_PER_UNIT
;
1485 /* First move what we can in the largest integer mode, then go to
1486 successively smaller modes. */
1488 while (max_size
> 1)
1490 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1491 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1492 if (GET_MODE_SIZE (tmode
) < max_size
)
1495 if (mode
== VOIDmode
)
1498 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1499 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1500 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1502 max_size
= GET_MODE_SIZE (mode
);
1505 /* The code above should have handled everything. */
1510 /* Return number of insns required to move L bytes by pieces.
1511 ALIGN (in bits) is maximum alignment we can assume. */
1513 static unsigned HOST_WIDE_INT
1514 move_by_pieces_ninsns (l
, align
)
1515 unsigned HOST_WIDE_INT l
;
1518 unsigned HOST_WIDE_INT n_insns
= 0;
1519 unsigned HOST_WIDE_INT max_size
= MOVE_MAX
+ 1;
1521 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1522 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1523 align
= MOVE_MAX
* BITS_PER_UNIT
;
1525 while (max_size
> 1)
1527 enum machine_mode mode
= VOIDmode
, tmode
;
1528 enum insn_code icode
;
1530 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1531 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1532 if (GET_MODE_SIZE (tmode
) < max_size
)
1535 if (mode
== VOIDmode
)
1538 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1539 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1540 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1542 max_size
= GET_MODE_SIZE (mode
);
1550 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1551 with move instructions for mode MODE. GENFUN is the gen_... function
1552 to make a move insn for that mode. DATA has all the other info. */
1555 move_by_pieces_1 (genfun
, mode
, data
)
1556 rtx (*genfun
) PARAMS ((rtx
, ...));
1557 enum machine_mode mode
;
1558 struct move_by_pieces
*data
;
1560 unsigned int size
= GET_MODE_SIZE (mode
);
1561 rtx to1
= NULL_RTX
, from1
;
1563 while (data
->len
>= size
)
1566 data
->offset
-= size
;
1570 if (data
->autinc_to
)
1571 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1574 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1577 if (data
->autinc_from
)
1578 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1581 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1583 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1584 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
1585 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1586 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (-size
)));
1589 emit_insn ((*genfun
) (to1
, from1
));
1592 #ifdef PUSH_ROUNDING
1593 emit_single_push_insn (mode
, from1
, NULL
);
1599 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1600 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1601 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1602 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1604 if (! data
->reverse
)
1605 data
->offset
+= size
;
1611 /* Emit code to move a block Y to a block X.
1612 This may be done with string-move instructions,
1613 with multiple scalar move instructions, or with a library call.
1615 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1617 SIZE is an rtx that says how long they are.
1618 ALIGN is the maximum alignment we can assume they have.
1620 Return the address of the new block, if memcpy is called and returns it,
1624 emit_block_move (x
, y
, size
)
1629 #ifdef TARGET_MEM_FUNCTIONS
1631 tree call_expr
, arg_list
;
1633 unsigned int align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1635 if (GET_MODE (x
) != BLKmode
)
1638 if (GET_MODE (y
) != BLKmode
)
1641 x
= protect_from_queue (x
, 1);
1642 y
= protect_from_queue (y
, 0);
1643 size
= protect_from_queue (size
, 0);
1645 if (GET_CODE (x
) != MEM
)
1647 if (GET_CODE (y
) != MEM
)
1652 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1653 move_by_pieces (x
, y
, INTVAL (size
), align
);
1656 /* Try the most limited insn first, because there's no point
1657 including more than one in the machine description unless
1658 the more limited one has some advantage. */
1660 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1661 enum machine_mode mode
;
1663 /* Since this is a move insn, we don't care about volatility. */
1666 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1667 mode
= GET_MODE_WIDER_MODE (mode
))
1669 enum insn_code code
= movstr_optab
[(int) mode
];
1670 insn_operand_predicate_fn pred
;
1672 if (code
!= CODE_FOR_nothing
1673 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1674 here because if SIZE is less than the mode mask, as it is
1675 returned by the macro, it will definitely be less than the
1676 actual mode mask. */
1677 && ((GET_CODE (size
) == CONST_INT
1678 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1679 <= (GET_MODE_MASK (mode
) >> 1)))
1680 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1681 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1682 || (*pred
) (x
, BLKmode
))
1683 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1684 || (*pred
) (y
, BLKmode
))
1685 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1686 || (*pred
) (opalign
, VOIDmode
)))
1689 rtx last
= get_last_insn ();
1692 op2
= convert_to_mode (mode
, size
, 1);
1693 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1694 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1695 op2
= copy_to_mode_reg (mode
, op2
);
1697 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1705 delete_insns_since (last
);
1711 /* X, Y, or SIZE may have been passed through protect_from_queue.
1713 It is unsafe to save the value generated by protect_from_queue
1714 and reuse it later. Consider what happens if emit_queue is
1715 called before the return value from protect_from_queue is used.
1717 Expansion of the CALL_EXPR below will call emit_queue before
1718 we are finished emitting RTL for argument setup. So if we are
1719 not careful we could get the wrong value for an argument.
1721 To avoid this problem we go ahead and emit code to copy X, Y &
1722 SIZE into new pseudos. We can then place those new pseudos
1723 into an RTL_EXPR and use them later, even after a call to
1726 Note this is not strictly needed for library calls since they
1727 do not call emit_queue before loading their arguments. However,
1728 we may need to have library calls call emit_queue in the future
1729 since failing to do so could cause problems for targets which
1730 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1731 x
= copy_to_mode_reg (Pmode
, XEXP (x
, 0));
1732 y
= copy_to_mode_reg (Pmode
, XEXP (y
, 0));
1734 #ifdef TARGET_MEM_FUNCTIONS
1735 size
= copy_to_mode_reg (TYPE_MODE (sizetype
), size
);
1737 size
= convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1738 TREE_UNSIGNED (integer_type_node
));
1739 size
= copy_to_mode_reg (TYPE_MODE (integer_type_node
), size
);
1742 #ifdef TARGET_MEM_FUNCTIONS
1743 /* It is incorrect to use the libcall calling conventions to call
1744 memcpy in this context.
1746 This could be a user call to memcpy and the user may wish to
1747 examine the return value from memcpy.
1749 For targets where libcalls and normal calls have different conventions
1750 for returning pointers, we could end up generating incorrect code.
1752 So instead of using a libcall sequence we build up a suitable
1753 CALL_EXPR and expand the call in the normal fashion. */
1754 if (fn
== NULL_TREE
)
1758 /* This was copied from except.c, I don't know if all this is
1759 necessary in this context or not. */
1760 fn
= get_identifier ("memcpy");
1761 fntype
= build_pointer_type (void_type_node
);
1762 fntype
= build_function_type (fntype
, NULL_TREE
);
1763 fn
= build_decl (FUNCTION_DECL
, fn
, fntype
);
1764 ggc_add_tree_root (&fn
, 1);
1765 DECL_EXTERNAL (fn
) = 1;
1766 TREE_PUBLIC (fn
) = 1;
1767 DECL_ARTIFICIAL (fn
) = 1;
1768 TREE_NOTHROW (fn
) = 1;
1769 make_decl_rtl (fn
, NULL
);
1770 assemble_external (fn
);
1773 /* We need to make an argument list for the function call.
1775 memcpy has three arguments, the first two are void * addresses and
1776 the last is a size_t byte count for the copy. */
1778 = build_tree_list (NULL_TREE
,
1779 make_tree (build_pointer_type (void_type_node
), x
));
1780 TREE_CHAIN (arg_list
)
1781 = build_tree_list (NULL_TREE
,
1782 make_tree (build_pointer_type (void_type_node
), y
));
1783 TREE_CHAIN (TREE_CHAIN (arg_list
))
1784 = build_tree_list (NULL_TREE
, make_tree (sizetype
, size
));
1785 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list
))) = NULL_TREE
;
1787 /* Now we have to build up the CALL_EXPR itself. */
1788 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1789 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1790 call_expr
, arg_list
, NULL_TREE
);
1791 TREE_SIDE_EFFECTS (call_expr
) = 1;
1793 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
1795 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
1796 VOIDmode
, 3, y
, Pmode
, x
, Pmode
,
1797 convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1798 TREE_UNSIGNED (integer_type_node
)),
1799 TYPE_MODE (integer_type_node
));
1802 /* If we are initializing a readonly value, show the above call
1803 clobbered it. Otherwise, a load from it may erroneously be hoisted
1805 if (RTX_UNCHANGING_P (x
))
1806 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
1812 /* Copy all or part of a value X into registers starting at REGNO.
1813 The number of registers to be filled is NREGS. */
1816 move_block_to_reg (regno
, x
, nregs
, mode
)
1820 enum machine_mode mode
;
1823 #ifdef HAVE_load_multiple
1831 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1832 x
= validize_mem (force_const_mem (mode
, x
));
1834 /* See if the machine can do this with a load multiple insn. */
1835 #ifdef HAVE_load_multiple
1836 if (HAVE_load_multiple
)
1838 last
= get_last_insn ();
1839 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1847 delete_insns_since (last
);
1851 for (i
= 0; i
< nregs
; i
++)
1852 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1853 operand_subword_force (x
, i
, mode
));
1856 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1857 The number of registers to be filled is NREGS. SIZE indicates the number
1858 of bytes in the object X. */
1861 move_block_from_reg (regno
, x
, nregs
, size
)
1868 #ifdef HAVE_store_multiple
1872 enum machine_mode mode
;
1877 /* If SIZE is that of a mode no bigger than a word, just use that
1878 mode's store operation. */
1879 if (size
<= UNITS_PER_WORD
1880 && (mode
= mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0)) != BLKmode
)
1882 emit_move_insn (adjust_address (x
, mode
, 0), gen_rtx_REG (mode
, regno
));
1886 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1887 to the left before storing to memory. Note that the previous test
1888 doesn't handle all cases (e.g. SIZE == 3). */
1889 if (size
< UNITS_PER_WORD
&& BYTES_BIG_ENDIAN
)
1891 rtx tem
= operand_subword (x
, 0, 1, BLKmode
);
1897 shift
= expand_shift (LSHIFT_EXPR
, word_mode
,
1898 gen_rtx_REG (word_mode
, regno
),
1899 build_int_2 ((UNITS_PER_WORD
- size
)
1900 * BITS_PER_UNIT
, 0), NULL_RTX
, 0);
1901 emit_move_insn (tem
, shift
);
1905 /* See if the machine can do this with a store multiple insn. */
1906 #ifdef HAVE_store_multiple
1907 if (HAVE_store_multiple
)
1909 last
= get_last_insn ();
1910 pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1918 delete_insns_since (last
);
1922 for (i
= 0; i
< nregs
; i
++)
1924 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1929 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1933 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1934 registers represented by a PARALLEL. SSIZE represents the total size of
1935 block SRC in bytes, or -1 if not known. */
1936 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1937 the balance will be in what would be the low-order memory addresses, i.e.
1938 left justified for big endian, right justified for little endian. This
1939 happens to be true for the targets currently using this support. If this
1940 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1944 emit_group_load (dst
, orig_src
, ssize
)
1951 if (GET_CODE (dst
) != PARALLEL
)
1954 /* Check for a NULL entry, used to indicate that the parameter goes
1955 both on the stack and in registers. */
1956 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1961 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
1963 /* Process the pieces. */
1964 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1966 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1967 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1968 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1971 /* Handle trailing fragments that run over the size of the struct. */
1972 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1974 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1975 bytelen
= ssize
- bytepos
;
1980 /* If we won't be loading directly from memory, protect the real source
1981 from strange tricks we might play; but make sure that the source can
1982 be loaded directly into the destination. */
1984 if (GET_CODE (orig_src
) != MEM
1985 && (!CONSTANT_P (orig_src
)
1986 || (GET_MODE (orig_src
) != mode
1987 && GET_MODE (orig_src
) != VOIDmode
)))
1989 if (GET_MODE (orig_src
) == VOIDmode
)
1990 src
= gen_reg_rtx (mode
);
1992 src
= gen_reg_rtx (GET_MODE (orig_src
));
1994 emit_move_insn (src
, orig_src
);
1997 /* Optimize the access just a bit. */
1998 if (GET_CODE (src
) == MEM
1999 && MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
)
2000 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2001 && bytelen
== GET_MODE_SIZE (mode
))
2003 tmps
[i
] = gen_reg_rtx (mode
);
2004 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
2006 else if (GET_CODE (src
) == CONCAT
)
2009 && bytelen
== GET_MODE_SIZE (GET_MODE (XEXP (src
, 0))))
2010 tmps
[i
] = XEXP (src
, 0);
2011 else if (bytepos
== (HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)))
2012 && bytelen
== GET_MODE_SIZE (GET_MODE (XEXP (src
, 1))))
2013 tmps
[i
] = XEXP (src
, 1);
2014 else if (bytepos
== 0)
2016 rtx mem
= assign_stack_temp (GET_MODE (src
),
2017 GET_MODE_SIZE (GET_MODE (src
)), 0);
2018 emit_move_insn (mem
, src
);
2019 tmps
[i
] = adjust_address (mem
, mode
, 0);
2024 else if (CONSTANT_P (src
)
2025 || (GET_CODE (src
) == REG
&& GET_MODE (src
) == mode
))
2028 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
2029 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
2032 if (BYTES_BIG_ENDIAN
&& shift
)
2033 expand_binop (mode
, ashl_optab
, tmps
[i
], GEN_INT (shift
),
2034 tmps
[i
], 0, OPTAB_WIDEN
);
2039 /* Copy the extracted pieces into the proper (probable) hard regs. */
2040 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2041 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
2044 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2045 registers represented by a PARALLEL. SSIZE represents the total size of
2046 block DST, or -1 if not known. */
2049 emit_group_store (orig_dst
, src
, ssize
)
2056 if (GET_CODE (src
) != PARALLEL
)
2059 /* Check for a NULL entry, used to indicate that the parameter goes
2060 both on the stack and in registers. */
2061 if (XEXP (XVECEXP (src
, 0, 0), 0))
2066 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (src
, 0));
2068 /* Copy the (probable) hard regs into pseudos. */
2069 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2071 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2072 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2073 emit_move_insn (tmps
[i
], reg
);
2077 /* If we won't be storing directly into memory, protect the real destination
2078 from strange tricks we might play. */
2080 if (GET_CODE (dst
) == PARALLEL
)
2084 /* We can get a PARALLEL dst if there is a conditional expression in
2085 a return statement. In that case, the dst and src are the same,
2086 so no action is necessary. */
2087 if (rtx_equal_p (dst
, src
))
2090 /* It is unclear if we can ever reach here, but we may as well handle
2091 it. Allocate a temporary, and split this into a store/load to/from
2094 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
2095 emit_group_store (temp
, src
, ssize
);
2096 emit_group_load (dst
, temp
, ssize
);
2099 else if (GET_CODE (dst
) != MEM
)
2101 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
2102 /* Make life a bit easier for combine. */
2103 emit_move_insn (dst
, const0_rtx
);
2106 /* Process the pieces. */
2107 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2109 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2110 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2111 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2113 /* Handle trailing fragments that run over the size of the struct. */
2114 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2116 if (BYTES_BIG_ENDIAN
)
2118 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2119 expand_binop (mode
, ashr_optab
, tmps
[i
], GEN_INT (shift
),
2120 tmps
[i
], 0, OPTAB_WIDEN
);
2122 bytelen
= ssize
- bytepos
;
2125 /* Optimize the access just a bit. */
2126 if (GET_CODE (dst
) == MEM
2127 && MEM_ALIGN (dst
) >= GET_MODE_ALIGNMENT (mode
)
2128 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2129 && bytelen
== GET_MODE_SIZE (mode
))
2130 emit_move_insn (adjust_address (dst
, mode
, bytepos
), tmps
[i
]);
2132 store_bit_field (dst
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2133 mode
, tmps
[i
], ssize
);
2138 /* Copy from the pseudo into the (probable) hard reg. */
2139 if (GET_CODE (dst
) == REG
)
2140 emit_move_insn (orig_dst
, dst
);
2143 /* Generate code to copy a BLKmode object of TYPE out of a
2144 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2145 is null, a stack temporary is created. TGTBLK is returned.
2147 The primary purpose of this routine is to handle functions
2148 that return BLKmode structures in registers. Some machines
2149 (the PA for example) want to return all small structures
2150 in registers regardless of the structure's alignment. */
2153 copy_blkmode_from_reg (tgtblk
, srcreg
, type
)
2158 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2159 rtx src
= NULL
, dst
= NULL
;
2160 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2161 unsigned HOST_WIDE_INT bitpos
, xbitpos
, big_endian_correction
= 0;
2165 tgtblk
= assign_temp (build_qualified_type (type
,
2167 | TYPE_QUAL_CONST
)),
2169 preserve_temp_slots (tgtblk
);
2172 /* This code assumes srcreg is at least a full word. If it isn't,
2173 copy it into a new pseudo which is a full word. */
2174 if (GET_MODE (srcreg
) != BLKmode
2175 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2176 srcreg
= convert_to_mode (word_mode
, srcreg
, TREE_UNSIGNED (type
));
2178 /* Structures whose size is not a multiple of a word are aligned
2179 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2180 machine, this means we must skip the empty high order bytes when
2181 calculating the bit offset. */
2182 if (BYTES_BIG_ENDIAN
&& bytes
% UNITS_PER_WORD
)
2183 big_endian_correction
2184 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2186 /* Copy the structure BITSIZE bites at a time.
2188 We could probably emit more efficient code for machines which do not use
2189 strict alignment, but it doesn't seem worth the effort at the current
2191 for (bitpos
= 0, xbitpos
= big_endian_correction
;
2192 bitpos
< bytes
* BITS_PER_UNIT
;
2193 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2195 /* We need a new source operand each time xbitpos is on a
2196 word boundary and when xbitpos == big_endian_correction
2197 (the first time through). */
2198 if (xbitpos
% BITS_PER_WORD
== 0
2199 || xbitpos
== big_endian_correction
)
2200 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2203 /* We need a new destination operand each time bitpos is on
2205 if (bitpos
% BITS_PER_WORD
== 0)
2206 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2208 /* Use xbitpos for the source extraction (right justified) and
2209 xbitpos for the destination store (left justified). */
2210 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2211 extract_bit_field (src
, bitsize
,
2212 xbitpos
% BITS_PER_WORD
, 1,
2213 NULL_RTX
, word_mode
, word_mode
,
2221 /* Add a USE expression for REG to the (possibly empty) list pointed
2222 to by CALL_FUSAGE. REG must denote a hard register. */
2225 use_reg (call_fusage
, reg
)
2226 rtx
*call_fusage
, reg
;
2228 if (GET_CODE (reg
) != REG
2229 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2233 = gen_rtx_EXPR_LIST (VOIDmode
,
2234 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2237 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2238 starting at REGNO. All of these registers must be hard registers. */
2241 use_regs (call_fusage
, regno
, nregs
)
2248 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
2251 for (i
= 0; i
< nregs
; i
++)
2252 use_reg (call_fusage
, gen_rtx_REG (reg_raw_mode
[regno
+ i
], regno
+ i
));
2255 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2256 PARALLEL REGS. This is for calls that pass values in multiple
2257 non-contiguous locations. The Irix 6 ABI has examples of this. */
2260 use_group_regs (call_fusage
, regs
)
2266 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2268 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2270 /* A NULL entry means the parameter goes both on the stack and in
2271 registers. This can also be a MEM for targets that pass values
2272 partially on the stack and partially in registers. */
2273 if (reg
!= 0 && GET_CODE (reg
) == REG
)
2274 use_reg (call_fusage
, reg
);
2280 can_store_by_pieces (len
, constfun
, constfundata
, align
)
2281 unsigned HOST_WIDE_INT len
;
2282 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
2286 unsigned HOST_WIDE_INT max_size
, l
;
2287 HOST_WIDE_INT offset
= 0;
2288 enum machine_mode mode
, tmode
;
2289 enum insn_code icode
;
2293 if (! MOVE_BY_PIECES_P (len
, align
))
2296 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2297 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2298 align
= MOVE_MAX
* BITS_PER_UNIT
;
2300 /* We would first store what we can in the largest integer mode, then go to
2301 successively smaller modes. */
2304 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2309 max_size
= MOVE_MAX_PIECES
+ 1;
2310 while (max_size
> 1)
2312 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2313 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2314 if (GET_MODE_SIZE (tmode
) < max_size
)
2317 if (mode
== VOIDmode
)
2320 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2321 if (icode
!= CODE_FOR_nothing
2322 && align
>= GET_MODE_ALIGNMENT (mode
))
2324 unsigned int size
= GET_MODE_SIZE (mode
);
2331 cst
= (*constfun
) (constfundata
, offset
, mode
);
2332 if (!LEGITIMATE_CONSTANT_P (cst
))
2342 max_size
= GET_MODE_SIZE (mode
);
2345 /* The code above should have handled everything. */
2353 /* Generate several move instructions to store LEN bytes generated by
2354 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2355 pointer which will be passed as argument in every CONSTFUN call.
2356 ALIGN is maximum alignment we can assume. */
2359 store_by_pieces (to
, len
, constfun
, constfundata
, align
)
2361 unsigned HOST_WIDE_INT len
;
2362 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
2366 struct store_by_pieces data
;
2368 if (! MOVE_BY_PIECES_P (len
, align
))
2370 to
= protect_from_queue (to
, 1);
2371 data
.constfun
= constfun
;
2372 data
.constfundata
= constfundata
;
2375 store_by_pieces_1 (&data
, align
);
2378 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2379 rtx with BLKmode). The caller must pass TO through protect_from_queue
2380 before calling. ALIGN is maximum alignment we can assume. */
2383 clear_by_pieces (to
, len
, align
)
2385 unsigned HOST_WIDE_INT len
;
2388 struct store_by_pieces data
;
2390 data
.constfun
= clear_by_pieces_1
;
2391 data
.constfundata
= NULL
;
2394 store_by_pieces_1 (&data
, align
);
2397 /* Callback routine for clear_by_pieces.
2398 Return const0_rtx unconditionally. */
2401 clear_by_pieces_1 (data
, offset
, mode
)
2402 PTR data ATTRIBUTE_UNUSED
;
2403 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
;
2404 enum machine_mode mode ATTRIBUTE_UNUSED
;
2409 /* Subroutine of clear_by_pieces and store_by_pieces.
2410 Generate several move instructions to store LEN bytes of block TO. (A MEM
2411 rtx with BLKmode). The caller must pass TO through protect_from_queue
2412 before calling. ALIGN is maximum alignment we can assume. */
2415 store_by_pieces_1 (data
, align
)
2416 struct store_by_pieces
*data
;
2419 rtx to_addr
= XEXP (data
->to
, 0);
2420 unsigned HOST_WIDE_INT max_size
= MOVE_MAX_PIECES
+ 1;
2421 enum machine_mode mode
= VOIDmode
, tmode
;
2422 enum insn_code icode
;
2425 data
->to_addr
= to_addr
;
2427 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2428 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2430 data
->explicit_inc_to
= 0;
2432 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2434 data
->offset
= data
->len
;
2436 /* If storing requires more than two move insns,
2437 copy addresses to registers (to make displacements shorter)
2438 and use post-increment if available. */
2439 if (!data
->autinc_to
2440 && move_by_pieces_ninsns (data
->len
, align
) > 2)
2442 /* Determine the main mode we'll be using. */
2443 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2444 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2445 if (GET_MODE_SIZE (tmode
) < max_size
)
2448 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2450 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2451 data
->autinc_to
= 1;
2452 data
->explicit_inc_to
= -1;
2455 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2456 && ! data
->autinc_to
)
2458 data
->to_addr
= copy_addr_to_reg (to_addr
);
2459 data
->autinc_to
= 1;
2460 data
->explicit_inc_to
= 1;
2463 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2464 data
->to_addr
= copy_addr_to_reg (to_addr
);
2467 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2468 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2469 align
= MOVE_MAX
* BITS_PER_UNIT
;
2471 /* First store what we can in the largest integer mode, then go to
2472 successively smaller modes. */
2474 while (max_size
> 1)
2476 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2477 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2478 if (GET_MODE_SIZE (tmode
) < max_size
)
2481 if (mode
== VOIDmode
)
2484 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2485 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2486 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2488 max_size
= GET_MODE_SIZE (mode
);
2491 /* The code above should have handled everything. */
2496 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2497 with move instructions for mode MODE. GENFUN is the gen_... function
2498 to make a move insn for that mode. DATA has all the other info. */
2501 store_by_pieces_2 (genfun
, mode
, data
)
2502 rtx (*genfun
) PARAMS ((rtx
, ...));
2503 enum machine_mode mode
;
2504 struct store_by_pieces
*data
;
2506 unsigned int size
= GET_MODE_SIZE (mode
);
2509 while (data
->len
>= size
)
2512 data
->offset
-= size
;
2514 if (data
->autinc_to
)
2515 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2518 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2520 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2521 emit_insn (gen_add2_insn (data
->to_addr
,
2522 GEN_INT (-(HOST_WIDE_INT
) size
)));
2524 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2525 emit_insn ((*genfun
) (to1
, cst
));
2527 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2528 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2530 if (! data
->reverse
)
2531 data
->offset
+= size
;
2537 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2538 its length in bytes. */
2541 clear_storage (object
, size
)
2545 #ifdef TARGET_MEM_FUNCTIONS
2547 tree call_expr
, arg_list
;
2550 unsigned int align
= (GET_CODE (object
) == MEM
? MEM_ALIGN (object
)
2551 : GET_MODE_ALIGNMENT (GET_MODE (object
)));
2553 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2554 just move a zero. Otherwise, do this a piece at a time. */
2555 if (GET_MODE (object
) != BLKmode
2556 && GET_CODE (size
) == CONST_INT
2557 && GET_MODE_SIZE (GET_MODE (object
)) == (unsigned int) INTVAL (size
))
2558 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2561 object
= protect_from_queue (object
, 1);
2562 size
= protect_from_queue (size
, 0);
2564 if (GET_CODE (size
) == CONST_INT
2565 && MOVE_BY_PIECES_P (INTVAL (size
), align
))
2566 clear_by_pieces (object
, INTVAL (size
), align
);
2569 /* Try the most limited insn first, because there's no point
2570 including more than one in the machine description unless
2571 the more limited one has some advantage. */
2573 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2574 enum machine_mode mode
;
2576 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2577 mode
= GET_MODE_WIDER_MODE (mode
))
2579 enum insn_code code
= clrstr_optab
[(int) mode
];
2580 insn_operand_predicate_fn pred
;
2582 if (code
!= CODE_FOR_nothing
2583 /* We don't need MODE to be narrower than
2584 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2585 the mode mask, as it is returned by the macro, it will
2586 definitely be less than the actual mode mask. */
2587 && ((GET_CODE (size
) == CONST_INT
2588 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2589 <= (GET_MODE_MASK (mode
) >> 1)))
2590 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2591 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2592 || (*pred
) (object
, BLKmode
))
2593 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
2594 || (*pred
) (opalign
, VOIDmode
)))
2597 rtx last
= get_last_insn ();
2600 op1
= convert_to_mode (mode
, size
, 1);
2601 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2602 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
2603 op1
= copy_to_mode_reg (mode
, op1
);
2605 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2612 delete_insns_since (last
);
2616 /* OBJECT or SIZE may have been passed through protect_from_queue.
2618 It is unsafe to save the value generated by protect_from_queue
2619 and reuse it later. Consider what happens if emit_queue is
2620 called before the return value from protect_from_queue is used.
2622 Expansion of the CALL_EXPR below will call emit_queue before
2623 we are finished emitting RTL for argument setup. So if we are
2624 not careful we could get the wrong value for an argument.
2626 To avoid this problem we go ahead and emit code to copy OBJECT
2627 and SIZE into new pseudos. We can then place those new pseudos
2628 into an RTL_EXPR and use them later, even after a call to
2631 Note this is not strictly needed for library calls since they
2632 do not call emit_queue before loading their arguments. However,
2633 we may need to have library calls call emit_queue in the future
2634 since failing to do so could cause problems for targets which
2635 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2636 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2638 #ifdef TARGET_MEM_FUNCTIONS
2639 size
= copy_to_mode_reg (TYPE_MODE (sizetype
), size
);
2641 size
= convert_to_mode (TYPE_MODE (integer_type_node
), size
,
2642 TREE_UNSIGNED (integer_type_node
));
2643 size
= copy_to_mode_reg (TYPE_MODE (integer_type_node
), size
);
2646 #ifdef TARGET_MEM_FUNCTIONS
2647 /* It is incorrect to use the libcall calling conventions to call
2648 memset in this context.
2650 This could be a user call to memset and the user may wish to
2651 examine the return value from memset.
2653 For targets where libcalls and normal calls have different
2654 conventions for returning pointers, we could end up generating
2657 So instead of using a libcall sequence we build up a suitable
2658 CALL_EXPR and expand the call in the normal fashion. */
2659 if (fn
== NULL_TREE
)
2663 /* This was copied from except.c, I don't know if all this is
2664 necessary in this context or not. */
2665 fn
= get_identifier ("memset");
2666 fntype
= build_pointer_type (void_type_node
);
2667 fntype
= build_function_type (fntype
, NULL_TREE
);
2668 fn
= build_decl (FUNCTION_DECL
, fn
, fntype
);
2669 ggc_add_tree_root (&fn
, 1);
2670 DECL_EXTERNAL (fn
) = 1;
2671 TREE_PUBLIC (fn
) = 1;
2672 DECL_ARTIFICIAL (fn
) = 1;
2673 TREE_NOTHROW (fn
) = 1;
2674 make_decl_rtl (fn
, NULL
);
2675 assemble_external (fn
);
2678 /* We need to make an argument list for the function call.
2680 memset has three arguments, the first is a void * addresses, the
2681 second an integer with the initialization value, the last is a
2682 size_t byte count for the copy. */
2684 = build_tree_list (NULL_TREE
,
2685 make_tree (build_pointer_type (void_type_node
),
2687 TREE_CHAIN (arg_list
)
2688 = build_tree_list (NULL_TREE
,
2689 make_tree (integer_type_node
, const0_rtx
));
2690 TREE_CHAIN (TREE_CHAIN (arg_list
))
2691 = build_tree_list (NULL_TREE
, make_tree (sizetype
, size
));
2692 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list
))) = NULL_TREE
;
2694 /* Now we have to build up the CALL_EXPR itself. */
2695 call_expr
= build1 (ADDR_EXPR
,
2696 build_pointer_type (TREE_TYPE (fn
)), fn
);
2697 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2698 call_expr
, arg_list
, NULL_TREE
);
2699 TREE_SIDE_EFFECTS (call_expr
) = 1;
2701 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
2703 emit_library_call (bzero_libfunc
, LCT_NORMAL
,
2704 VOIDmode
, 2, object
, Pmode
, size
,
2705 TYPE_MODE (integer_type_node
));
2708 /* If we are initializing a readonly value, show the above call
2709 clobbered it. Otherwise, a load from it may erroneously be
2710 hoisted from a loop. */
2711 if (RTX_UNCHANGING_P (object
))
2712 emit_insn (gen_rtx_CLOBBER (VOIDmode
, object
));
2719 /* Generate code to copy Y into X.
2720 Both Y and X must have the same mode, except that
2721 Y can be a constant with VOIDmode.
2722 This mode cannot be BLKmode; use emit_block_move for that.
2724 Return the last instruction emitted. */
2727 emit_move_insn (x
, y
)
2730 enum machine_mode mode
= GET_MODE (x
);
2731 rtx y_cst
= NULL_RTX
;
2734 x
= protect_from_queue (x
, 1);
2735 y
= protect_from_queue (y
, 0);
2737 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
2740 /* Never force constant_p_rtx to memory. */
2741 if (GET_CODE (y
) == CONSTANT_P_RTX
)
2743 else if (CONSTANT_P (y
) && ! LEGITIMATE_CONSTANT_P (y
))
2746 y
= force_const_mem (mode
, y
);
2749 /* If X or Y are memory references, verify that their addresses are valid
2751 if (GET_CODE (x
) == MEM
2752 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2753 && ! push_operand (x
, GET_MODE (x
)))
2755 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2756 x
= validize_mem (x
);
2758 if (GET_CODE (y
) == MEM
2759 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2761 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2762 y
= validize_mem (y
);
2764 if (mode
== BLKmode
)
2767 last_insn
= emit_move_insn_1 (x
, y
);
2769 if (y_cst
&& GET_CODE (x
) == REG
)
2770 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
2775 /* Low level part of emit_move_insn.
2776 Called just like emit_move_insn, but assumes X and Y
2777 are basically valid. */
2780 emit_move_insn_1 (x
, y
)
2783 enum machine_mode mode
= GET_MODE (x
);
2784 enum machine_mode submode
;
2785 enum mode_class
class = GET_MODE_CLASS (mode
);
2788 if ((unsigned int) mode
>= (unsigned int) MAX_MACHINE_MODE
)
2791 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2793 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2795 /* Expand complex moves by moving real part and imag part, if possible. */
2796 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
2797 && BLKmode
!= (submode
= mode_for_size ((GET_MODE_UNIT_SIZE (mode
)
2799 (class == MODE_COMPLEX_INT
2800 ? MODE_INT
: MODE_FLOAT
),
2802 && (mov_optab
->handlers
[(int) submode
].insn_code
2803 != CODE_FOR_nothing
))
2805 /* Don't split destination if it is a stack push. */
2806 int stack
= push_operand (x
, GET_MODE (x
));
2808 #ifdef PUSH_ROUNDING
2809 /* In case we output to the stack, but the size is smaller machine can
2810 push exactly, we need to use move instructions. */
2812 && PUSH_ROUNDING (GET_MODE_SIZE (submode
)) != GET_MODE_SIZE (submode
))
2815 int offset1
, offset2
;
2817 /* Do not use anti_adjust_stack, since we don't want to update
2818 stack_pointer_delta. */
2819 temp
= expand_binop (Pmode
,
2820 #ifdef STACK_GROWS_DOWNWARD
2827 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))),
2831 if (temp
!= stack_pointer_rtx
)
2832 emit_move_insn (stack_pointer_rtx
, temp
);
2833 #ifdef STACK_GROWS_DOWNWARD
2835 offset2
= GET_MODE_SIZE (submode
);
2837 offset1
= -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)));
2838 offset2
= (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))
2839 + GET_MODE_SIZE (submode
));
2841 emit_move_insn (change_address (x
, submode
,
2842 gen_rtx_PLUS (Pmode
,
2844 GEN_INT (offset1
))),
2845 gen_realpart (submode
, y
));
2846 emit_move_insn (change_address (x
, submode
,
2847 gen_rtx_PLUS (Pmode
,
2849 GEN_INT (offset2
))),
2850 gen_imagpart (submode
, y
));
2854 /* If this is a stack, push the highpart first, so it
2855 will be in the argument order.
2857 In that case, change_address is used only to convert
2858 the mode, not to change the address. */
2861 /* Note that the real part always precedes the imag part in memory
2862 regardless of machine's endianness. */
2863 #ifdef STACK_GROWS_DOWNWARD
2864 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2865 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2866 gen_imagpart (submode
, y
)));
2867 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2868 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2869 gen_realpart (submode
, y
)));
2871 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2872 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2873 gen_realpart (submode
, y
)));
2874 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2875 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2876 gen_imagpart (submode
, y
)));
2881 rtx realpart_x
, realpart_y
;
2882 rtx imagpart_x
, imagpart_y
;
2884 /* If this is a complex value with each part being smaller than a
2885 word, the usual calling sequence will likely pack the pieces into
2886 a single register. Unfortunately, SUBREG of hard registers only
2887 deals in terms of words, so we have a problem converting input
2888 arguments to the CONCAT of two registers that is used elsewhere
2889 for complex values. If this is before reload, we can copy it into
2890 memory and reload. FIXME, we should see about using extract and
2891 insert on integer registers, but complex short and complex char
2892 variables should be rarely used. */
2893 if (GET_MODE_BITSIZE (mode
) < 2 * BITS_PER_WORD
2894 && (reload_in_progress
| reload_completed
) == 0)
2896 int packed_dest_p
= (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
2897 int packed_src_p
= (REG_P (y
) && REGNO (y
) < FIRST_PSEUDO_REGISTER
);
2899 if (packed_dest_p
|| packed_src_p
)
2901 enum mode_class reg_class
= ((class == MODE_COMPLEX_FLOAT
)
2902 ? MODE_FLOAT
: MODE_INT
);
2904 enum machine_mode reg_mode
2905 = mode_for_size (GET_MODE_BITSIZE (mode
), reg_class
, 1);
2907 if (reg_mode
!= BLKmode
)
2909 rtx mem
= assign_stack_temp (reg_mode
,
2910 GET_MODE_SIZE (mode
), 0);
2911 rtx cmem
= adjust_address (mem
, mode
, 0);
2914 = N_("function using short complex types cannot be inline");
2918 rtx sreg
= gen_rtx_SUBREG (reg_mode
, x
, 0);
2919 emit_move_insn_1 (cmem
, y
);
2920 return emit_move_insn_1 (sreg
, mem
);
2924 rtx sreg
= gen_rtx_SUBREG (reg_mode
, y
, 0);
2925 emit_move_insn_1 (mem
, sreg
);
2926 return emit_move_insn_1 (x
, cmem
);
2932 realpart_x
= gen_realpart (submode
, x
);
2933 realpart_y
= gen_realpart (submode
, y
);
2934 imagpart_x
= gen_imagpart (submode
, x
);
2935 imagpart_y
= gen_imagpart (submode
, y
);
2937 /* Show the output dies here. This is necessary for SUBREGs
2938 of pseudos since we cannot track their lifetimes correctly;
2939 hard regs shouldn't appear here except as return values.
2940 We never want to emit such a clobber after reload. */
2942 && ! (reload_in_progress
|| reload_completed
)
2943 && (GET_CODE (realpart_x
) == SUBREG
2944 || GET_CODE (imagpart_x
) == SUBREG
))
2946 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
2949 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2950 (realpart_x
, realpart_y
));
2951 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2952 (imagpart_x
, imagpart_y
));
2955 return get_last_insn ();
2958 /* This will handle any multi-word mode that lacks a move_insn pattern.
2959 However, you will get better code if you define such patterns,
2960 even if they must turn into multiple assembler instructions. */
2961 else if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2967 #ifdef PUSH_ROUNDING
2969 /* If X is a push on the stack, do the push now and replace
2970 X with a reference to the stack pointer. */
2971 if (push_operand (x
, GET_MODE (x
)))
2976 /* Do not use anti_adjust_stack, since we don't want to update
2977 stack_pointer_delta. */
2978 temp
= expand_binop (Pmode
,
2979 #ifdef STACK_GROWS_DOWNWARD
2986 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))),
2990 if (temp
!= stack_pointer_rtx
)
2991 emit_move_insn (stack_pointer_rtx
, temp
);
2993 code
= GET_CODE (XEXP (x
, 0));
2994 /* Just hope that small offsets off SP are OK. */
2995 if (code
== POST_INC
)
2996 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
2997 GEN_INT (-(HOST_WIDE_INT
)
2998 GET_MODE_SIZE (GET_MODE (x
))));
2999 else if (code
== POST_DEC
)
3000 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3001 GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
3003 temp
= stack_pointer_rtx
;
3005 x
= change_address (x
, VOIDmode
, temp
);
3009 /* If we are in reload, see if either operand is a MEM whose address
3010 is scheduled for replacement. */
3011 if (reload_in_progress
&& GET_CODE (x
) == MEM
3012 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3013 x
= replace_equiv_address_nv (x
, inner
);
3014 if (reload_in_progress
&& GET_CODE (y
) == MEM
3015 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3016 y
= replace_equiv_address_nv (y
, inner
);
3022 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3025 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3026 rtx ypart
= operand_subword (y
, i
, 1, mode
);
3028 /* If we can't get a part of Y, put Y into memory if it is a
3029 constant. Otherwise, force it into a register. If we still
3030 can't get a part of Y, abort. */
3031 if (ypart
== 0 && CONSTANT_P (y
))
3033 y
= force_const_mem (mode
, y
);
3034 ypart
= operand_subword (y
, i
, 1, mode
);
3036 else if (ypart
== 0)
3037 ypart
= operand_subword_force (y
, i
, mode
);
3039 if (xpart
== 0 || ypart
== 0)
3042 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3044 last_insn
= emit_move_insn (xpart
, ypart
);
3047 seq
= gen_sequence ();
3050 /* Show the output dies here. This is necessary for SUBREGs
3051 of pseudos since we cannot track their lifetimes correctly;
3052 hard regs shouldn't appear here except as return values.
3053 We never want to emit such a clobber after reload. */
3055 && ! (reload_in_progress
|| reload_completed
)
3056 && need_clobber
!= 0)
3058 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3069 /* Pushing data onto the stack. */
3071 /* Push a block of length SIZE (perhaps variable)
3072 and return an rtx to address the beginning of the block.
3073 Note that it is not possible for the value returned to be a QUEUED.
3074 The value may be virtual_outgoing_args_rtx.
3076 EXTRA is the number of bytes of padding to push in addition to SIZE.
3077 BELOW nonzero means this padding comes at low addresses;
3078 otherwise, the padding comes at high addresses. */
3081 push_block (size
, extra
, below
)
3087 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3088 if (CONSTANT_P (size
))
3089 anti_adjust_stack (plus_constant (size
, extra
));
3090 else if (GET_CODE (size
) == REG
&& extra
== 0)
3091 anti_adjust_stack (size
);
3094 temp
= copy_to_mode_reg (Pmode
, size
);
3096 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3097 temp
, 0, OPTAB_LIB_WIDEN
);
3098 anti_adjust_stack (temp
);
3101 #ifndef STACK_GROWS_DOWNWARD
3107 temp
= virtual_outgoing_args_rtx
;
3108 if (extra
!= 0 && below
)
3109 temp
= plus_constant (temp
, extra
);
3113 if (GET_CODE (size
) == CONST_INT
)
3114 temp
= plus_constant (virtual_outgoing_args_rtx
,
3115 -INTVAL (size
) - (below
? 0 : extra
));
3116 else if (extra
!= 0 && !below
)
3117 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3118 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3120 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3121 negate_rtx (Pmode
, size
));
3124 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3128 /* Return an rtx for the address of the beginning of an as-if-it-was-pushed
3129 block of SIZE bytes. */
3132 get_push_address (size
)
3137 if (STACK_PUSH_CODE
== POST_DEC
)
3138 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (size
));
3139 else if (STACK_PUSH_CODE
== POST_INC
)
3140 temp
= gen_rtx_MINUS (Pmode
, stack_pointer_rtx
, GEN_INT (size
));
3142 temp
= stack_pointer_rtx
;
3144 return copy_to_reg (temp
);
3147 #ifdef PUSH_ROUNDING
3149 /* Emit single push insn. */
3152 emit_single_push_insn (mode
, x
, type
)
3154 enum machine_mode mode
;
3158 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3160 enum insn_code icode
;
3161 insn_operand_predicate_fn pred
;
3163 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3164 /* If there is push pattern, use it. Otherwise try old way of throwing
3165 MEM representing push operation to move expander. */
3166 icode
= push_optab
->handlers
[(int) mode
].insn_code
;
3167 if (icode
!= CODE_FOR_nothing
)
3169 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3170 && !((*pred
) (x
, mode
))))
3171 x
= force_reg (mode
, x
);
3172 emit_insn (GEN_FCN (icode
) (x
));
3175 if (GET_MODE_SIZE (mode
) == rounded_size
)
3176 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3179 #ifdef STACK_GROWS_DOWNWARD
3180 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3181 GEN_INT (-(HOST_WIDE_INT
)rounded_size
));
3183 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3184 GEN_INT (rounded_size
));
3186 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3189 dest
= gen_rtx_MEM (mode
, dest_addr
);
3193 set_mem_attributes (dest
, type
, 1);
3195 if (flag_optimize_sibling_calls
)
3196 /* Function incoming arguments may overlap with sibling call
3197 outgoing arguments and we cannot allow reordering of reads
3198 from function arguments with stores to outgoing arguments
3199 of sibling calls. */
3200 set_mem_alias_set (dest
, 0);
3202 emit_move_insn (dest
, x
);
3206 /* Generate code to push X onto the stack, assuming it has mode MODE and
3208 MODE is redundant except when X is a CONST_INT (since they don't
3210 SIZE is an rtx for the size of data to be copied (in bytes),
3211 needed only if X is BLKmode.
3213 ALIGN (in bits) is maximum alignment we can assume.
3215 If PARTIAL and REG are both nonzero, then copy that many of the first
3216 words of X into registers starting with REG, and push the rest of X.
3217 The amount of space pushed is decreased by PARTIAL words,
3218 rounded *down* to a multiple of PARM_BOUNDARY.
3219 REG must be a hard register in this case.
3220 If REG is zero but PARTIAL is not, take any all others actions for an
3221 argument partially in registers, but do not actually load any
3224 EXTRA is the amount in bytes of extra space to leave next to this arg.
3225 This is ignored if an argument block has already been allocated.
3227 On a machine that lacks real push insns, ARGS_ADDR is the address of
3228 the bottom of the argument block for this call. We use indexing off there
3229 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3230 argument block has not been preallocated.
3232 ARGS_SO_FAR is the size of args previously pushed for this call.
3234 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3235 for arguments passed in registers. If nonzero, it will be the number
3236 of bytes required. */
3239 emit_push_insn (x
, mode
, type
, size
, align
, partial
, reg
, extra
,
3240 args_addr
, args_so_far
, reg_parm_stack_space
,
3243 enum machine_mode mode
;
3252 int reg_parm_stack_space
;
3256 enum direction stack_direction
3257 #ifdef STACK_GROWS_DOWNWARD
3263 /* Decide where to pad the argument: `downward' for below,
3264 `upward' for above, or `none' for don't pad it.
3265 Default is below for small data on big-endian machines; else above. */
3266 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3268 /* Invert direction if stack is post-decrement.
3270 if (STACK_PUSH_CODE
== POST_DEC
)
3271 if (where_pad
!= none
)
3272 where_pad
= (where_pad
== downward
? upward
: downward
);
3274 xinner
= x
= protect_from_queue (x
, 0);
3276 if (mode
== BLKmode
)
3278 /* Copy a block into the stack, entirely or partially. */
3281 int used
= partial
* UNITS_PER_WORD
;
3282 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3290 /* USED is now the # of bytes we need not copy to the stack
3291 because registers will take care of them. */
3294 xinner
= adjust_address (xinner
, BLKmode
, used
);
3296 /* If the partial register-part of the arg counts in its stack size,
3297 skip the part of stack space corresponding to the registers.
3298 Otherwise, start copying to the beginning of the stack space,
3299 by setting SKIP to 0. */
3300 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3302 #ifdef PUSH_ROUNDING
3303 /* Do it with several push insns if that doesn't take lots of insns
3304 and if there is no difficulty with push insns that skip bytes
3305 on the stack for alignment purposes. */
3308 && GET_CODE (size
) == CONST_INT
3310 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3311 /* Here we avoid the case of a structure whose weak alignment
3312 forces many pushes of a small amount of data,
3313 and such small pushes do rounding that causes trouble. */
3314 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3315 || align
>= BIGGEST_ALIGNMENT
3316 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3317 == (align
/ BITS_PER_UNIT
)))
3318 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3320 /* Push padding now if padding above and stack grows down,
3321 or if padding below and stack grows up.
3322 But if space already allocated, this has already been done. */
3323 if (extra
&& args_addr
== 0
3324 && where_pad
!= none
&& where_pad
!= stack_direction
)
3325 anti_adjust_stack (GEN_INT (extra
));
3327 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
);
3329 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
3333 in_check_memory_usage
= 1;
3334 temp
= get_push_address (INTVAL (size
) - used
);
3335 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
3336 emit_library_call (chkr_copy_bitmap_libfunc
,
3337 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, temp
,
3338 Pmode
, XEXP (xinner
, 0), Pmode
,
3339 GEN_INT (INTVAL (size
) - used
),
3340 TYPE_MODE (sizetype
));
3342 emit_library_call (chkr_set_right_libfunc
,
3343 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, temp
,
3344 Pmode
, GEN_INT (INTVAL (size
) - used
),
3345 TYPE_MODE (sizetype
),
3346 GEN_INT (MEMORY_USE_RW
),
3347 TYPE_MODE (integer_type_node
));
3348 in_check_memory_usage
= 0;
3352 #endif /* PUSH_ROUNDING */
3356 /* Otherwise make space on the stack and copy the data
3357 to the address of that space. */
3359 /* Deduct words put into registers from the size we must copy. */
3362 if (GET_CODE (size
) == CONST_INT
)
3363 size
= GEN_INT (INTVAL (size
) - used
);
3365 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3366 GEN_INT (used
), NULL_RTX
, 0,
3370 /* Get the address of the stack space.
3371 In this case, we do not deal with EXTRA separately.
3372 A single stack adjust will do. */
3375 temp
= push_block (size
, extra
, where_pad
== downward
);
3378 else if (GET_CODE (args_so_far
) == CONST_INT
)
3379 temp
= memory_address (BLKmode
,
3380 plus_constant (args_addr
,
3381 skip
+ INTVAL (args_so_far
)));
3383 temp
= memory_address (BLKmode
,
3384 plus_constant (gen_rtx_PLUS (Pmode
,
3388 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
3390 in_check_memory_usage
= 1;
3391 target
= copy_to_reg (temp
);
3392 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
3393 emit_library_call (chkr_copy_bitmap_libfunc
,
3394 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3,
3396 XEXP (xinner
, 0), Pmode
,
3397 size
, TYPE_MODE (sizetype
));
3399 emit_library_call (chkr_set_right_libfunc
,
3400 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3,
3402 size
, TYPE_MODE (sizetype
),
3403 GEN_INT (MEMORY_USE_RW
),
3404 TYPE_MODE (integer_type_node
));
3405 in_check_memory_usage
= 0;
3408 target
= gen_rtx_MEM (BLKmode
, temp
);
3412 set_mem_attributes (target
, type
, 1);
3413 /* Function incoming arguments may overlap with sibling call
3414 outgoing arguments and we cannot allow reordering of reads
3415 from function arguments with stores to outgoing arguments
3416 of sibling calls. */
3417 set_mem_alias_set (target
, 0);
3420 set_mem_align (target
, align
);
3422 /* TEMP is the address of the block. Copy the data there. */
3423 if (GET_CODE (size
) == CONST_INT
3424 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size
), align
))
3426 move_by_pieces (target
, xinner
, INTVAL (size
), align
);
3431 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
3432 enum machine_mode mode
;
3434 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
3436 mode
= GET_MODE_WIDER_MODE (mode
))
3438 enum insn_code code
= movstr_optab
[(int) mode
];
3439 insn_operand_predicate_fn pred
;
3441 if (code
!= CODE_FOR_nothing
3442 && ((GET_CODE (size
) == CONST_INT
3443 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
3444 <= (GET_MODE_MASK (mode
) >> 1)))
3445 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
3446 && (!(pred
= insn_data
[(int) code
].operand
[0].predicate
)
3447 || ((*pred
) (target
, BLKmode
)))
3448 && (!(pred
= insn_data
[(int) code
].operand
[1].predicate
)
3449 || ((*pred
) (xinner
, BLKmode
)))
3450 && (!(pred
= insn_data
[(int) code
].operand
[3].predicate
)
3451 || ((*pred
) (opalign
, VOIDmode
))))
3453 rtx op2
= convert_to_mode (mode
, size
, 1);
3454 rtx last
= get_last_insn ();
3457 pred
= insn_data
[(int) code
].operand
[2].predicate
;
3458 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
3459 op2
= copy_to_mode_reg (mode
, op2
);
3461 pat
= GEN_FCN ((int) code
) (target
, xinner
,
3469 delete_insns_since (last
);
3474 if (!ACCUMULATE_OUTGOING_ARGS
)
3476 /* If the source is referenced relative to the stack pointer,
3477 copy it to another register to stabilize it. We do not need
3478 to do this if we know that we won't be changing sp. */
3480 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3481 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3482 temp
= copy_to_reg (temp
);
3485 /* Make inhibit_defer_pop nonzero around the library call
3486 to force it to pop the bcopy-arguments right away. */
3488 #ifdef TARGET_MEM_FUNCTIONS
3489 emit_library_call (memcpy_libfunc
, LCT_NORMAL
,
3490 VOIDmode
, 3, temp
, Pmode
, XEXP (xinner
, 0), Pmode
,
3491 convert_to_mode (TYPE_MODE (sizetype
),
3492 size
, TREE_UNSIGNED (sizetype
)),
3493 TYPE_MODE (sizetype
));
3495 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
3496 VOIDmode
, 3, XEXP (xinner
, 0), Pmode
, temp
, Pmode
,
3497 convert_to_mode (TYPE_MODE (integer_type_node
),
3499 TREE_UNSIGNED (integer_type_node
)),
3500 TYPE_MODE (integer_type_node
));
3505 else if (partial
> 0)
3507 /* Scalar partly in registers. */
3509 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3512 /* # words of start of argument
3513 that we must make space for but need not store. */
3514 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
3515 int args_offset
= INTVAL (args_so_far
);
3518 /* Push padding now if padding above and stack grows down,
3519 or if padding below and stack grows up.
3520 But if space already allocated, this has already been done. */
3521 if (extra
&& args_addr
== 0
3522 && where_pad
!= none
&& where_pad
!= stack_direction
)
3523 anti_adjust_stack (GEN_INT (extra
));
3525 /* If we make space by pushing it, we might as well push
3526 the real data. Otherwise, we can leave OFFSET nonzero
3527 and leave the space uninitialized. */
3531 /* Now NOT_STACK gets the number of words that we don't need to
3532 allocate on the stack. */
3533 not_stack
= partial
- offset
;
3535 /* If the partial register-part of the arg counts in its stack size,
3536 skip the part of stack space corresponding to the registers.
3537 Otherwise, start copying to the beginning of the stack space,
3538 by setting SKIP to 0. */
3539 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3541 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3542 x
= validize_mem (force_const_mem (mode
, x
));
3544 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3545 SUBREGs of such registers are not allowed. */
3546 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
3547 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3548 x
= copy_to_reg (x
);
3550 /* Loop over all the words allocated on the stack for this arg. */
3551 /* We can do it by words, because any scalar bigger than a word
3552 has a size a multiple of a word. */
3553 #ifndef PUSH_ARGS_REVERSED
3554 for (i
= not_stack
; i
< size
; i
++)
3556 for (i
= size
- 1; i
>= not_stack
; i
--)
3558 if (i
>= not_stack
+ offset
)
3559 emit_push_insn (operand_subword_force (x
, i
, mode
),
3560 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3562 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3564 reg_parm_stack_space
, alignment_pad
);
3569 rtx target
= NULL_RTX
;
3572 /* Push padding now if padding above and stack grows down,
3573 or if padding below and stack grows up.
3574 But if space already allocated, this has already been done. */
3575 if (extra
&& args_addr
== 0
3576 && where_pad
!= none
&& where_pad
!= stack_direction
)
3577 anti_adjust_stack (GEN_INT (extra
));
3579 #ifdef PUSH_ROUNDING
3580 if (args_addr
== 0 && PUSH_ARGS
)
3581 emit_single_push_insn (mode
, x
, type
);
3585 if (GET_CODE (args_so_far
) == CONST_INT
)
3587 = memory_address (mode
,
3588 plus_constant (args_addr
,
3589 INTVAL (args_so_far
)));
3591 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3594 dest
= gen_rtx_MEM (mode
, addr
);
3597 set_mem_attributes (dest
, type
, 1);
3598 /* Function incoming arguments may overlap with sibling call
3599 outgoing arguments and we cannot allow reordering of reads
3600 from function arguments with stores to outgoing arguments
3601 of sibling calls. */
3602 set_mem_alias_set (dest
, 0);
3605 emit_move_insn (dest
, x
);
3608 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
3610 in_check_memory_usage
= 1;
3612 target
= get_push_address (GET_MODE_SIZE (mode
));
3614 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
3615 emit_library_call (chkr_copy_bitmap_libfunc
,
3616 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, target
,
3617 Pmode
, XEXP (x
, 0), Pmode
,
3618 GEN_INT (GET_MODE_SIZE (mode
)),
3619 TYPE_MODE (sizetype
));
3621 emit_library_call (chkr_set_right_libfunc
,
3622 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, target
,
3623 Pmode
, GEN_INT (GET_MODE_SIZE (mode
)),
3624 TYPE_MODE (sizetype
),
3625 GEN_INT (MEMORY_USE_RW
),
3626 TYPE_MODE (integer_type_node
));
3627 in_check_memory_usage
= 0;
3632 /* If part should go in registers, copy that part
3633 into the appropriate registers. Do this now, at the end,
3634 since mem-to-mem copies above may do function calls. */
3635 if (partial
> 0 && reg
!= 0)
3637 /* Handle calls that pass values in multiple non-contiguous locations.
3638 The Irix 6 ABI has examples of this. */
3639 if (GET_CODE (reg
) == PARALLEL
)
3640 emit_group_load (reg
, x
, -1); /* ??? size? */
3642 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
3645 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3646 anti_adjust_stack (GEN_INT (extra
));
3648 if (alignment_pad
&& args_addr
== 0)
3649 anti_adjust_stack (alignment_pad
);
3652 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3660 /* Only registers can be subtargets. */
3661 || GET_CODE (x
) != REG
3662 /* If the register is readonly, it can't be set more than once. */
3663 || RTX_UNCHANGING_P (x
)
3664 /* Don't use hard regs to avoid extending their life. */
3665 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3666 /* Avoid subtargets inside loops,
3667 since they hide some invariant expressions. */
3668 || preserve_subexpressions_p ())
3672 /* Expand an assignment that stores the value of FROM into TO.
3673 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3674 (This may contain a QUEUED rtx;
3675 if the value is constant, this rtx is a constant.)
3676 Otherwise, the returned value is NULL_RTX.
3678 SUGGEST_REG is no longer actually used.
3679 It used to mean, copy the value through a register
3680 and return that register, if that is possible.
3681 We now use WANT_VALUE to decide whether to do this. */
3684 expand_assignment (to
, from
, want_value
, suggest_reg
)
3687 int suggest_reg ATTRIBUTE_UNUSED
;
3692 /* Don't crash if the lhs of the assignment was erroneous. */
3694 if (TREE_CODE (to
) == ERROR_MARK
)
3696 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3697 return want_value
? result
: NULL_RTX
;
3700 /* Assignment of a structure component needs special treatment
3701 if the structure component's rtx is not simply a MEM.
3702 Assignment of an array element at a constant index, and assignment of
3703 an array element in an unaligned packed structure field, has the same
3706 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
3707 || TREE_CODE (to
) == ARRAY_REF
|| TREE_CODE (to
) == ARRAY_RANGE_REF
)
3709 enum machine_mode mode1
;
3710 HOST_WIDE_INT bitsize
, bitpos
;
3718 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
3719 &unsignedp
, &volatilep
);
3721 /* If we are going to use store_bit_field and extract_bit_field,
3722 make sure to_rtx will be safe for multiple use. */
3724 if (mode1
== VOIDmode
&& want_value
)
3725 tem
= stabilize_reference (tem
);
3727 orig_to_rtx
= to_rtx
3728 = expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_DONT
);
3731 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
3733 if (GET_CODE (to_rtx
) != MEM
)
3736 if (GET_MODE (offset_rtx
) != ptr_mode
)
3737 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
3739 #ifdef POINTERS_EXTEND_UNSIGNED
3740 if (GET_MODE (offset_rtx
) != Pmode
)
3741 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
3744 /* A constant address in TO_RTX can have VOIDmode, we must not try
3745 to call force_reg for that case. Avoid that case. */
3746 if (GET_CODE (to_rtx
) == MEM
3747 && GET_MODE (to_rtx
) == BLKmode
3748 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
3750 && (bitpos
% bitsize
) == 0
3751 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
3752 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
3755 = adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
3757 if (GET_CODE (XEXP (temp
, 0)) == REG
)
3760 to_rtx
= (replace_equiv_address
3761 (to_rtx
, force_reg (GET_MODE (XEXP (temp
, 0)),
3766 to_rtx
= offset_address (to_rtx
, offset_rtx
,
3767 highest_pow2_factor (offset
));
3770 if (GET_CODE (to_rtx
) == MEM
)
3772 tree old_expr
= MEM_EXPR (to_rtx
);
3774 /* If the field is at offset zero, we could have been given the
3775 DECL_RTX of the parent struct. Don't munge it. */
3776 to_rtx
= shallow_copy_rtx (to_rtx
);
3778 set_mem_attributes (to_rtx
, to
, 0);
3780 /* If we changed MEM_EXPR, that means we're now referencing
3781 the COMPONENT_REF, which means that MEM_OFFSET must be
3782 relative to that field. But we've not yet reflected BITPOS
3783 in TO_RTX. This will be done in store_field. Adjust for
3784 that by biasing MEM_OFFSET by -bitpos. */
3785 if (MEM_EXPR (to_rtx
) != old_expr
&& MEM_OFFSET (to_rtx
)
3786 && (bitpos
/ BITS_PER_UNIT
) != 0)
3787 set_mem_offset (to_rtx
, GEN_INT (INTVAL (MEM_OFFSET (to_rtx
))
3788 - (bitpos
/ BITS_PER_UNIT
)));
3791 /* Deal with volatile and readonly fields. The former is only done
3792 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3793 if (volatilep
&& GET_CODE (to_rtx
) == MEM
)
3795 if (to_rtx
== orig_to_rtx
)
3796 to_rtx
= copy_rtx (to_rtx
);
3797 MEM_VOLATILE_P (to_rtx
) = 1;
3800 if (TREE_CODE (to
) == COMPONENT_REF
3801 && TREE_READONLY (TREE_OPERAND (to
, 1)))
3803 if (to_rtx
== orig_to_rtx
)
3804 to_rtx
= copy_rtx (to_rtx
);
3805 RTX_UNCHANGING_P (to_rtx
) = 1;
3808 if (! can_address_p (to
))
3810 if (to_rtx
== orig_to_rtx
)
3811 to_rtx
= copy_rtx (to_rtx
);
3812 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
3815 /* Check the access. */
3816 if (current_function_check_memory_usage
&& GET_CODE (to_rtx
) == MEM
)
3821 enum machine_mode best_mode
;
3823 best_mode
= get_best_mode (bitsize
, bitpos
,
3824 TYPE_ALIGN (TREE_TYPE (tem
)),
3826 if (best_mode
== VOIDmode
)
3829 best_mode_size
= GET_MODE_BITSIZE (best_mode
);
3830 to_addr
= plus_constant (XEXP (to_rtx
, 0), bitpos
/ BITS_PER_UNIT
);
3831 size
= CEIL ((bitpos
% best_mode_size
) + bitsize
, best_mode_size
);
3832 size
*= GET_MODE_SIZE (best_mode
);
3834 /* Check the access right of the pointer. */
3835 in_check_memory_usage
= 1;
3837 emit_library_call (chkr_check_addr_libfunc
, LCT_CONST_MAKE_BLOCK
,
3838 VOIDmode
, 3, to_addr
, Pmode
,
3839 GEN_INT (size
), TYPE_MODE (sizetype
),
3840 GEN_INT (MEMORY_USE_WO
),
3841 TYPE_MODE (integer_type_node
));
3842 in_check_memory_usage
= 0;
3845 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
3847 /* Spurious cast for HPUX compiler. */
3848 ? ((enum machine_mode
)
3849 TYPE_MODE (TREE_TYPE (to
)))
3851 unsignedp
, TREE_TYPE (tem
), get_alias_set (to
));
3853 preserve_temp_slots (result
);
3857 /* If the value is meaningful, convert RESULT to the proper mode.
3858 Otherwise, return nothing. */
3859 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
3860 TYPE_MODE (TREE_TYPE (from
)),
3862 TREE_UNSIGNED (TREE_TYPE (to
)))
3866 /* If the rhs is a function call and its value is not an aggregate,
3867 call the function before we start to compute the lhs.
3868 This is needed for correct code for cases such as
3869 val = setjmp (buf) on machines where reference to val
3870 requires loading up part of an address in a separate insn.
3872 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3873 since it might be a promoted variable where the zero- or sign- extension
3874 needs to be done. Handling this in the normal way is safe because no
3875 computation is done before the call. */
3876 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
)
3877 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
3878 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
3879 && GET_CODE (DECL_RTL (to
)) == REG
))
3884 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3886 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_WO
);
3888 /* Handle calls that return values in multiple non-contiguous locations.
3889 The Irix 6 ABI has examples of this. */
3890 if (GET_CODE (to_rtx
) == PARALLEL
)
3891 emit_group_load (to_rtx
, value
, int_size_in_bytes (TREE_TYPE (from
)));
3892 else if (GET_MODE (to_rtx
) == BLKmode
)
3893 emit_block_move (to_rtx
, value
, expr_size (from
));
3896 #ifdef POINTERS_EXTEND_UNSIGNED
3897 if (POINTER_TYPE_P (TREE_TYPE (to
))
3898 && GET_MODE (to_rtx
) != GET_MODE (value
))
3899 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
3901 emit_move_insn (to_rtx
, value
);
3903 preserve_temp_slots (to_rtx
);
3906 return want_value
? to_rtx
: NULL_RTX
;
3909 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3910 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3913 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_WO
);
3915 /* Don't move directly into a return register. */
3916 if (TREE_CODE (to
) == RESULT_DECL
3917 && (GET_CODE (to_rtx
) == REG
|| GET_CODE (to_rtx
) == PARALLEL
))
3922 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
3924 if (GET_CODE (to_rtx
) == PARALLEL
)
3925 emit_group_load (to_rtx
, temp
, int_size_in_bytes (TREE_TYPE (from
)));
3927 emit_move_insn (to_rtx
, temp
);
3929 preserve_temp_slots (to_rtx
);
3932 return want_value
? to_rtx
: NULL_RTX
;
3935 /* In case we are returning the contents of an object which overlaps
3936 the place the value is being stored, use a safe function when copying
3937 a value through a pointer into a structure value return block. */
3938 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
3939 && current_function_returns_struct
3940 && !current_function_returns_pcc_struct
)
3945 size
= expr_size (from
);
3946 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
,
3947 EXPAND_MEMORY_USE_DONT
);
3949 /* Copy the rights of the bitmap. */
3950 if (current_function_check_memory_usage
)
3951 emit_library_call (chkr_copy_bitmap_libfunc
, LCT_CONST_MAKE_BLOCK
,
3952 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3953 XEXP (from_rtx
, 0), Pmode
,
3954 convert_to_mode (TYPE_MODE (sizetype
),
3955 size
, TREE_UNSIGNED (sizetype
)),
3956 TYPE_MODE (sizetype
));
3958 #ifdef TARGET_MEM_FUNCTIONS
3959 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
3960 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3961 XEXP (from_rtx
, 0), Pmode
,
3962 convert_to_mode (TYPE_MODE (sizetype
),
3963 size
, TREE_UNSIGNED (sizetype
)),
3964 TYPE_MODE (sizetype
));
3966 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
3967 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
3968 XEXP (to_rtx
, 0), Pmode
,
3969 convert_to_mode (TYPE_MODE (integer_type_node
),
3970 size
, TREE_UNSIGNED (integer_type_node
)),
3971 TYPE_MODE (integer_type_node
));
3974 preserve_temp_slots (to_rtx
);
3977 return want_value
? to_rtx
: NULL_RTX
;
3980 /* Compute FROM and store the value in the rtx we got. */
3983 result
= store_expr (from
, to_rtx
, want_value
);
3984 preserve_temp_slots (result
);
3987 return want_value
? result
: NULL_RTX
;
3990 /* Generate code for computing expression EXP,
3991 and storing the value into TARGET.
3992 TARGET may contain a QUEUED rtx.
3994 If WANT_VALUE is nonzero, return a copy of the value
3995 not in TARGET, so that we can be sure to use the proper
3996 value in a containing expression even if TARGET has something
3997 else stored in it. If possible, we copy the value through a pseudo
3998 and return that pseudo. Or, if the value is constant, we try to
3999 return the constant. In some cases, we return a pseudo
4000 copied *from* TARGET.
4002 If the mode is BLKmode then we may return TARGET itself.
4003 It turns out that in BLKmode it doesn't cause a problem.
4004 because C has no operators that could combine two different
4005 assignments into the same BLKmode object with different values
4006 with no sequence point. Will other languages need this to
4009 If WANT_VALUE is 0, we return NULL, to make sure
4010 to catch quickly any cases where the caller uses the value
4011 and fails to set WANT_VALUE. */
4014 store_expr (exp
, target
, want_value
)
4020 int dont_return_target
= 0;
4021 int dont_store_target
= 0;
4023 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4025 /* Perform first part of compound expression, then assign from second
4027 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
4029 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
4031 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4033 /* For conditional expression, get safe form of the target. Then
4034 test the condition, doing the appropriate assignment on either
4035 side. This avoids the creation of unnecessary temporaries.
4036 For non-BLKmode, it is more efficient not to do this. */
4038 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4041 target
= protect_from_queue (target
, 1);
4043 do_pending_stack_adjust ();
4045 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
4046 start_cleanup_deferral ();
4047 store_expr (TREE_OPERAND (exp
, 1), target
, 0);
4048 end_cleanup_deferral ();
4050 emit_jump_insn (gen_jump (lab2
));
4053 start_cleanup_deferral ();
4054 store_expr (TREE_OPERAND (exp
, 2), target
, 0);
4055 end_cleanup_deferral ();
4060 return want_value
? target
: NULL_RTX
;
4062 else if (queued_subexp_p (target
))
4063 /* If target contains a postincrement, let's not risk
4064 using it as the place to generate the rhs. */
4066 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
4068 /* Expand EXP into a new pseudo. */
4069 temp
= gen_reg_rtx (GET_MODE (target
));
4070 temp
= expand_expr (exp
, temp
, GET_MODE (target
), 0);
4073 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
), 0);
4075 /* If target is volatile, ANSI requires accessing the value
4076 *from* the target, if it is accessed. So make that happen.
4077 In no case return the target itself. */
4078 if (! MEM_VOLATILE_P (target
) && want_value
)
4079 dont_return_target
= 1;
4081 else if (want_value
&& GET_CODE (target
) == MEM
&& ! MEM_VOLATILE_P (target
)
4082 && GET_MODE (target
) != BLKmode
)
4083 /* If target is in memory and caller wants value in a register instead,
4084 arrange that. Pass TARGET as target for expand_expr so that,
4085 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4086 We know expand_expr will not use the target in that case.
4087 Don't do this if TARGET is volatile because we are supposed
4088 to write it and then read it. */
4090 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
4091 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
4093 /* If TEMP is already in the desired TARGET, only copy it from
4094 memory and don't store it there again. */
4096 || (rtx_equal_p (temp
, target
)
4097 && ! side_effects_p (temp
) && ! side_effects_p (target
)))
4098 dont_store_target
= 1;
4099 temp
= copy_to_reg (temp
);
4101 dont_return_target
= 1;
4103 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4104 /* If this is an scalar in a register that is stored in a wider mode
4105 than the declared mode, compute the result into its declared mode
4106 and then convert to the wider mode. Our value is the computed
4109 /* If we don't want a value, we can do the conversion inside EXP,
4110 which will often result in some optimizations. Do the conversion
4111 in two steps: first change the signedness, if needed, then
4112 the extend. But don't do this if the type of EXP is a subtype
4113 of something else since then the conversion might involve
4114 more than just converting modes. */
4115 if (! want_value
&& INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4116 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
4118 if (TREE_UNSIGNED (TREE_TYPE (exp
))
4119 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4122 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target
),
4126 exp
= convert (type_for_mode (GET_MODE (SUBREG_REG (target
)),
4127 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4131 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
4133 /* If TEMP is a volatile MEM and we want a result value, make
4134 the access now so it gets done only once. Likewise if
4135 it contains TARGET. */
4136 if (GET_CODE (temp
) == MEM
&& want_value
4137 && (MEM_VOLATILE_P (temp
)
4138 || reg_mentioned_p (SUBREG_REG (target
), XEXP (temp
, 0))))
4139 temp
= copy_to_reg (temp
);
4141 /* If TEMP is a VOIDmode constant, use convert_modes to make
4142 sure that we properly convert it. */
4143 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4145 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4146 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4147 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4148 GET_MODE (target
), temp
,
4149 SUBREG_PROMOTED_UNSIGNED_P (target
));
4152 convert_move (SUBREG_REG (target
), temp
,
4153 SUBREG_PROMOTED_UNSIGNED_P (target
));
4155 /* If we promoted a constant, change the mode back down to match
4156 target. Otherwise, the caller might get confused by a result whose
4157 mode is larger than expected. */
4159 if (want_value
&& GET_MODE (temp
) != GET_MODE (target
)
4160 && GET_MODE (temp
) != VOIDmode
)
4162 temp
= gen_lowpart_SUBREG (GET_MODE (target
), temp
);
4163 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4164 SUBREG_PROMOTED_UNSIGNED_P (temp
)
4165 = SUBREG_PROMOTED_UNSIGNED_P (target
);
4168 return want_value
? temp
: NULL_RTX
;
4172 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
4173 /* Return TARGET if it's a specified hardware register.
4174 If TARGET is a volatile mem ref, either return TARGET
4175 or return a reg copied *from* TARGET; ANSI requires this.
4177 Otherwise, if TEMP is not TARGET, return TEMP
4178 if it is constant (for efficiency),
4179 or if we really want the correct value. */
4180 if (!(target
&& GET_CODE (target
) == REG
4181 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4182 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
4183 && ! rtx_equal_p (temp
, target
)
4184 && (CONSTANT_P (temp
) || want_value
))
4185 dont_return_target
= 1;
4188 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4189 the same as that of TARGET, adjust the constant. This is needed, for
4190 example, in case it is a CONST_DOUBLE and we want only a word-sized
4192 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4193 && TREE_CODE (exp
) != ERROR_MARK
4194 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4195 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4196 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
4198 if (current_function_check_memory_usage
4199 && GET_CODE (target
) == MEM
4200 && AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
4202 in_check_memory_usage
= 1;
4203 if (GET_CODE (temp
) == MEM
)
4204 emit_library_call (chkr_copy_bitmap_libfunc
, LCT_CONST_MAKE_BLOCK
,
4205 VOIDmode
, 3, XEXP (target
, 0), Pmode
,
4206 XEXP (temp
, 0), Pmode
,
4207 expr_size (exp
), TYPE_MODE (sizetype
));
4209 emit_library_call (chkr_check_addr_libfunc
, LCT_CONST_MAKE_BLOCK
,
4210 VOIDmode
, 3, XEXP (target
, 0), Pmode
,
4211 expr_size (exp
), TYPE_MODE (sizetype
),
4212 GEN_INT (MEMORY_USE_WO
),
4213 TYPE_MODE (integer_type_node
));
4214 in_check_memory_usage
= 0;
4217 /* If value was not generated in the target, store it there.
4218 Convert the value to TARGET's type first if nec. */
4219 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4220 one or both of them are volatile memory refs, we have to distinguish
4222 - expand_expr has used TARGET. In this case, we must not generate
4223 another copy. This can be detected by TARGET being equal according
4225 - expand_expr has not used TARGET - that means that the source just
4226 happens to have the same RTX form. Since temp will have been created
4227 by expand_expr, it will compare unequal according to == .
4228 We must generate a copy in this case, to reach the correct number
4229 of volatile memory references. */
4231 if ((! rtx_equal_p (temp
, target
)
4232 || (temp
!= target
&& (side_effects_p (temp
)
4233 || side_effects_p (target
))))
4234 && TREE_CODE (exp
) != ERROR_MARK
4235 && ! dont_store_target
)
4237 target
= protect_from_queue (target
, 1);
4238 if (GET_MODE (temp
) != GET_MODE (target
)
4239 && GET_MODE (temp
) != VOIDmode
)
4241 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4242 if (dont_return_target
)
4244 /* In this case, we will return TEMP,
4245 so make sure it has the proper mode.
4246 But don't forget to store the value into TARGET. */
4247 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4248 emit_move_insn (target
, temp
);
4251 convert_move (target
, temp
, unsignedp
);
4254 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4256 /* Handle copying a string constant into an array.
4257 The string constant may be shorter than the array.
4258 So copy just the string's actual length, and clear the rest. */
4262 /* Get the size of the data type of the string,
4263 which is actually the size of the target. */
4264 size
= expr_size (exp
);
4265 if (GET_CODE (size
) == CONST_INT
4266 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4267 emit_block_move (target
, temp
, size
);
4270 /* Compute the size of the data to copy from the string. */
4272 = size_binop (MIN_EXPR
,
4273 make_tree (sizetype
, size
),
4274 size_int (TREE_STRING_LENGTH (exp
)));
4275 rtx copy_size_rtx
= expand_expr (copy_size
, NULL_RTX
,
4279 /* Copy that much. */
4280 emit_block_move (target
, temp
, copy_size_rtx
);
4282 /* Figure out how much is left in TARGET that we have to clear.
4283 Do all calculations in ptr_mode. */
4285 addr
= XEXP (target
, 0);
4286 addr
= convert_modes (ptr_mode
, Pmode
, addr
, 1);
4288 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4290 addr
= plus_constant (addr
, TREE_STRING_LENGTH (exp
));
4291 size
= plus_constant (size
, -TREE_STRING_LENGTH (exp
));
4295 addr
= force_reg (ptr_mode
, addr
);
4296 addr
= expand_binop (ptr_mode
, add_optab
, addr
,
4297 copy_size_rtx
, NULL_RTX
, 0,
4300 size
= expand_binop (ptr_mode
, sub_optab
, size
,
4301 copy_size_rtx
, NULL_RTX
, 0,
4304 label
= gen_label_rtx ();
4305 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4306 GET_MODE (size
), 0, label
);
4309 if (size
!= const0_rtx
)
4311 rtx dest
= gen_rtx_MEM (BLKmode
, addr
);
4313 MEM_COPY_ATTRIBUTES (dest
, target
);
4315 /* The residual likely does not have the same alignment
4316 as the original target. While we could compute the
4317 alignment of the residual, it hardely seems worth
4319 set_mem_align (dest
, BITS_PER_UNIT
);
4321 /* Be sure we can write on ADDR. */
4322 in_check_memory_usage
= 1;
4323 if (current_function_check_memory_usage
)
4324 emit_library_call (chkr_check_addr_libfunc
,
4325 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3,
4327 size
, TYPE_MODE (sizetype
),
4328 GEN_INT (MEMORY_USE_WO
),
4329 TYPE_MODE (integer_type_node
));
4330 in_check_memory_usage
= 0;
4331 clear_storage (dest
, size
);
4338 /* Handle calls that return values in multiple non-contiguous locations.
4339 The Irix 6 ABI has examples of this. */
4340 else if (GET_CODE (target
) == PARALLEL
)
4341 emit_group_load (target
, temp
, int_size_in_bytes (TREE_TYPE (exp
)));
4342 else if (GET_MODE (temp
) == BLKmode
)
4343 emit_block_move (target
, temp
, expr_size (exp
));
4345 emit_move_insn (target
, temp
);
4348 /* If we don't want a value, return NULL_RTX. */
4352 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4353 ??? The latter test doesn't seem to make sense. */
4354 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
4357 /* Return TARGET itself if it is a hard register. */
4358 else if (want_value
&& GET_MODE (target
) != BLKmode
4359 && ! (GET_CODE (target
) == REG
4360 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4361 return copy_to_reg (target
);
4367 /* Return 1 if EXP just contains zeros. */
4375 switch (TREE_CODE (exp
))
4379 case NON_LVALUE_EXPR
:
4380 case VIEW_CONVERT_EXPR
:
4381 return is_zeros_p (TREE_OPERAND (exp
, 0));
4384 return integer_zerop (exp
);
4388 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
4391 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp
), dconst0
);
4394 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4395 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
4396 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4397 if (! is_zeros_p (TREE_VALUE (elt
)))
4407 /* Return 1 if EXP contains mostly (3/4) zeros. */
4410 mostly_zeros_p (exp
)
4413 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4415 int elts
= 0, zeros
= 0;
4416 tree elt
= CONSTRUCTOR_ELTS (exp
);
4417 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4419 /* If there are no ranges of true bits, it is all zero. */
4420 return elt
== NULL_TREE
;
4422 for (; elt
; elt
= TREE_CHAIN (elt
))
4424 /* We do not handle the case where the index is a RANGE_EXPR,
4425 so the statistic will be somewhat inaccurate.
4426 We do make a more accurate count in store_constructor itself,
4427 so since this function is only used for nested array elements,
4428 this should be close enough. */
4429 if (mostly_zeros_p (TREE_VALUE (elt
)))
4434 return 4 * zeros
>= 3 * elts
;
4437 return is_zeros_p (exp
);
4440 /* Helper function for store_constructor.
4441 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4442 TYPE is the type of the CONSTRUCTOR, not the element type.
4443 CLEARED is as for store_constructor.
4444 ALIAS_SET is the alias set to use for any stores.
4446 This provides a recursive shortcut back to store_constructor when it isn't
4447 necessary to go through store_field. This is so that we can pass through
4448 the cleared field to let store_constructor know that we may not have to
4449 clear a substructure if the outer structure has already been cleared. */
4452 store_constructor_field (target
, bitsize
, bitpos
, mode
, exp
, type
, cleared
,
4455 unsigned HOST_WIDE_INT bitsize
;
4456 HOST_WIDE_INT bitpos
;
4457 enum machine_mode mode
;
4462 if (TREE_CODE (exp
) == CONSTRUCTOR
4463 && bitpos
% BITS_PER_UNIT
== 0
4464 /* If we have a non-zero bitpos for a register target, then we just
4465 let store_field do the bitfield handling. This is unlikely to
4466 generate unnecessary clear instructions anyways. */
4467 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
4469 if (GET_CODE (target
) == MEM
)
4471 = adjust_address (target
,
4472 GET_MODE (target
) == BLKmode
4474 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4475 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
4478 /* Update the alias set, if required. */
4479 if (GET_CODE (target
) == MEM
&& ! MEM_KEEP_ALIAS_SET_P (target
)
4480 && MEM_ALIAS_SET (target
) != 0)
4482 target
= copy_rtx (target
);
4483 set_mem_alias_set (target
, alias_set
);
4486 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
4489 store_field (target
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
4493 /* Store the value of constructor EXP into the rtx TARGET.
4494 TARGET is either a REG or a MEM; we know it cannot conflict, since
4495 safe_from_p has been called.
4496 CLEARED is true if TARGET is known to have been zero'd.
4497 SIZE is the number of bytes of TARGET we are allowed to modify: this
4498 may not be the same as the size of EXP if we are assigning to a field
4499 which has been packed to exclude padding bits. */
4502 store_constructor (exp
, target
, cleared
, size
)
4508 tree type
= TREE_TYPE (exp
);
4509 #ifdef WORD_REGISTER_OPERATIONS
4510 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4513 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
4514 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4518 /* We either clear the aggregate or indicate the value is dead. */
4519 if ((TREE_CODE (type
) == UNION_TYPE
4520 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4522 && ! CONSTRUCTOR_ELTS (exp
))
4523 /* If the constructor is empty, clear the union. */
4525 clear_storage (target
, expr_size (exp
));
4529 /* If we are building a static constructor into a register,
4530 set the initial value as zero so we can fold the value into
4531 a constant. But if more than one register is involved,
4532 this probably loses. */
4533 else if (! cleared
&& GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
4534 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4536 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4540 /* If the constructor has fewer fields than the structure
4541 or if we are initializing the structure to mostly zeros,
4542 clear the whole structure first. Don't do this if TARGET is a
4543 register whose mode size isn't equal to SIZE since clear_storage
4544 can't handle this case. */
4545 else if (! cleared
&& size
> 0
4546 && ((list_length (CONSTRUCTOR_ELTS (exp
))
4547 != fields_length (type
))
4548 || mostly_zeros_p (exp
))
4549 && (GET_CODE (target
) != REG
4550 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
4553 clear_storage (target
, GEN_INT (size
));
4558 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4560 /* Store each element of the constructor into
4561 the corresponding field of TARGET. */
4563 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4565 tree field
= TREE_PURPOSE (elt
);
4566 tree value
= TREE_VALUE (elt
);
4567 enum machine_mode mode
;
4568 HOST_WIDE_INT bitsize
;
4569 HOST_WIDE_INT bitpos
= 0;
4572 rtx to_rtx
= target
;
4574 /* Just ignore missing fields.
4575 We cleared the whole structure, above,
4576 if any fields are missing. */
4580 if (cleared
&& is_zeros_p (value
))
4583 if (host_integerp (DECL_SIZE (field
), 1))
4584 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4588 unsignedp
= TREE_UNSIGNED (field
);
4589 mode
= DECL_MODE (field
);
4590 if (DECL_BIT_FIELD (field
))
4593 offset
= DECL_FIELD_OFFSET (field
);
4594 if (host_integerp (offset
, 0)
4595 && host_integerp (bit_position (field
), 0))
4597 bitpos
= int_bit_position (field
);
4601 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
4607 if (contains_placeholder_p (offset
))
4608 offset
= build (WITH_RECORD_EXPR
, sizetype
,
4609 offset
, make_tree (TREE_TYPE (exp
), target
));
4611 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4612 if (GET_CODE (to_rtx
) != MEM
)
4615 if (GET_MODE (offset_rtx
) != ptr_mode
)
4616 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4618 #ifdef POINTERS_EXTEND_UNSIGNED
4619 if (GET_MODE (offset_rtx
) != Pmode
)
4620 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
4623 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4624 highest_pow2_factor (offset
));
4627 if (TREE_READONLY (field
))
4629 if (GET_CODE (to_rtx
) == MEM
)
4630 to_rtx
= copy_rtx (to_rtx
);
4632 RTX_UNCHANGING_P (to_rtx
) = 1;
4635 #ifdef WORD_REGISTER_OPERATIONS
4636 /* If this initializes a field that is smaller than a word, at the
4637 start of a word, try to widen it to a full word.
4638 This special case allows us to output C++ member function
4639 initializations in a form that the optimizers can understand. */
4640 if (GET_CODE (target
) == REG
4641 && bitsize
< BITS_PER_WORD
4642 && bitpos
% BITS_PER_WORD
== 0
4643 && GET_MODE_CLASS (mode
) == MODE_INT
4644 && TREE_CODE (value
) == INTEGER_CST
4646 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
4648 tree type
= TREE_TYPE (value
);
4650 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4652 type
= type_for_size (BITS_PER_WORD
, TREE_UNSIGNED (type
));
4653 value
= convert (type
, value
);
4656 if (BYTES_BIG_ENDIAN
)
4658 = fold (build (LSHIFT_EXPR
, type
, value
,
4659 build_int_2 (BITS_PER_WORD
- bitsize
, 0)));
4660 bitsize
= BITS_PER_WORD
;
4665 if (GET_CODE (to_rtx
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (to_rtx
)
4666 && DECL_NONADDRESSABLE_P (field
))
4668 to_rtx
= copy_rtx (to_rtx
);
4669 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4672 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
4673 value
, type
, cleared
,
4674 get_alias_set (TREE_TYPE (field
)));
4677 else if (TREE_CODE (type
) == ARRAY_TYPE
)
4682 tree domain
= TYPE_DOMAIN (type
);
4683 tree elttype
= TREE_TYPE (type
);
4684 int const_bounds_p
= (TYPE_MIN_VALUE (domain
)
4685 && TYPE_MAX_VALUE (domain
)
4686 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
4687 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
4688 HOST_WIDE_INT minelt
= 0;
4689 HOST_WIDE_INT maxelt
= 0;
4691 /* If we have constant bounds for the range of the type, get them. */
4694 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
4695 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
4698 /* If the constructor has fewer elements than the array,
4699 clear the whole array first. Similarly if this is
4700 static constructor of a non-BLKmode object. */
4701 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
4705 HOST_WIDE_INT count
= 0, zero_count
= 0;
4706 need_to_clear
= ! const_bounds_p
;
4708 /* This loop is a more accurate version of the loop in
4709 mostly_zeros_p (it handles RANGE_EXPR in an index).
4710 It is also needed to check for missing elements. */
4711 for (elt
= CONSTRUCTOR_ELTS (exp
);
4712 elt
!= NULL_TREE
&& ! need_to_clear
;
4713 elt
= TREE_CHAIN (elt
))
4715 tree index
= TREE_PURPOSE (elt
);
4716 HOST_WIDE_INT this_node_count
;
4718 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4720 tree lo_index
= TREE_OPERAND (index
, 0);
4721 tree hi_index
= TREE_OPERAND (index
, 1);
4723 if (! host_integerp (lo_index
, 1)
4724 || ! host_integerp (hi_index
, 1))
4730 this_node_count
= (tree_low_cst (hi_index
, 1)
4731 - tree_low_cst (lo_index
, 1) + 1);
4734 this_node_count
= 1;
4736 count
+= this_node_count
;
4737 if (mostly_zeros_p (TREE_VALUE (elt
)))
4738 zero_count
+= this_node_count
;
4741 /* Clear the entire array first if there are any missing elements,
4742 or if the incidence of zero elements is >= 75%. */
4744 && (count
< maxelt
- minelt
+ 1 || 4 * zero_count
>= 3 * count
))
4748 if (need_to_clear
&& size
> 0)
4751 clear_storage (target
, GEN_INT (size
));
4754 else if (REG_P (target
))
4755 /* Inform later passes that the old value is dead. */
4756 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4758 /* Store each element of the constructor into
4759 the corresponding element of TARGET, determined
4760 by counting the elements. */
4761 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
4763 elt
= TREE_CHAIN (elt
), i
++)
4765 enum machine_mode mode
;
4766 HOST_WIDE_INT bitsize
;
4767 HOST_WIDE_INT bitpos
;
4769 tree value
= TREE_VALUE (elt
);
4770 tree index
= TREE_PURPOSE (elt
);
4771 rtx xtarget
= target
;
4773 if (cleared
&& is_zeros_p (value
))
4776 unsignedp
= TREE_UNSIGNED (elttype
);
4777 mode
= TYPE_MODE (elttype
);
4778 if (mode
== BLKmode
)
4779 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
4780 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
4783 bitsize
= GET_MODE_BITSIZE (mode
);
4785 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4787 tree lo_index
= TREE_OPERAND (index
, 0);
4788 tree hi_index
= TREE_OPERAND (index
, 1);
4789 rtx index_r
, pos_rtx
, hi_r
, loop_top
, loop_end
;
4790 struct nesting
*loop
;
4791 HOST_WIDE_INT lo
, hi
, count
;
4794 /* If the range is constant and "small", unroll the loop. */
4796 && host_integerp (lo_index
, 0)
4797 && host_integerp (hi_index
, 0)
4798 && (lo
= tree_low_cst (lo_index
, 0),
4799 hi
= tree_low_cst (hi_index
, 0),
4800 count
= hi
- lo
+ 1,
4801 (GET_CODE (target
) != MEM
4803 || (host_integerp (TYPE_SIZE (elttype
), 1)
4804 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
4807 lo
-= minelt
; hi
-= minelt
;
4808 for (; lo
<= hi
; lo
++)
4810 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
4812 if (GET_CODE (target
) == MEM
4813 && !MEM_KEEP_ALIAS_SET_P (target
)
4814 && TYPE_NONALIASED_COMPONENT (type
))
4816 target
= copy_rtx (target
);
4817 MEM_KEEP_ALIAS_SET_P (target
) = 1;
4820 store_constructor_field
4821 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
4822 get_alias_set (elttype
));
4827 hi_r
= expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
4828 loop_top
= gen_label_rtx ();
4829 loop_end
= gen_label_rtx ();
4831 unsignedp
= TREE_UNSIGNED (domain
);
4833 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
4836 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
4838 SET_DECL_RTL (index
, index_r
);
4839 if (TREE_CODE (value
) == SAVE_EXPR
4840 && SAVE_EXPR_RTL (value
) == 0)
4842 /* Make sure value gets expanded once before the
4844 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
4847 store_expr (lo_index
, index_r
, 0);
4848 loop
= expand_start_loop (0);
4850 /* Assign value to element index. */
4852 = convert (ssizetype
,
4853 fold (build (MINUS_EXPR
, TREE_TYPE (index
),
4854 index
, TYPE_MIN_VALUE (domain
))));
4855 position
= size_binop (MULT_EXPR
, position
,
4857 TYPE_SIZE_UNIT (elttype
)));
4859 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
4860 xtarget
= offset_address (target
, pos_rtx
,
4861 highest_pow2_factor (position
));
4862 xtarget
= adjust_address (xtarget
, mode
, 0);
4863 if (TREE_CODE (value
) == CONSTRUCTOR
)
4864 store_constructor (value
, xtarget
, cleared
,
4865 bitsize
/ BITS_PER_UNIT
);
4867 store_expr (value
, xtarget
, 0);
4869 expand_exit_loop_if_false (loop
,
4870 build (LT_EXPR
, integer_type_node
,
4873 expand_increment (build (PREINCREMENT_EXPR
,
4875 index
, integer_one_node
), 0, 0);
4877 emit_label (loop_end
);
4880 else if ((index
!= 0 && ! host_integerp (index
, 0))
4881 || ! host_integerp (TYPE_SIZE (elttype
), 1))
4886 index
= ssize_int (1);
4889 index
= convert (ssizetype
,
4890 fold (build (MINUS_EXPR
, index
,
4891 TYPE_MIN_VALUE (domain
))));
4893 position
= size_binop (MULT_EXPR
, index
,
4895 TYPE_SIZE_UNIT (elttype
)));
4896 xtarget
= offset_address (target
,
4897 expand_expr (position
, 0, VOIDmode
, 0),
4898 highest_pow2_factor (position
));
4899 xtarget
= adjust_address (xtarget
, mode
, 0);
4900 store_expr (value
, xtarget
, 0);
4905 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
4906 * tree_low_cst (TYPE_SIZE (elttype
), 1));
4908 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
4910 if (GET_CODE (target
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (target
)
4911 && TYPE_NONALIASED_COMPONENT (type
))
4913 target
= copy_rtx (target
);
4914 MEM_KEEP_ALIAS_SET_P (target
) = 1;
4917 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
4918 type
, cleared
, get_alias_set (elttype
));
4924 /* Set constructor assignments. */
4925 else if (TREE_CODE (type
) == SET_TYPE
)
4927 tree elt
= CONSTRUCTOR_ELTS (exp
);
4928 unsigned HOST_WIDE_INT nbytes
= int_size_in_bytes (type
), nbits
;
4929 tree domain
= TYPE_DOMAIN (type
);
4930 tree domain_min
, domain_max
, bitlength
;
4932 /* The default implementation strategy is to extract the constant
4933 parts of the constructor, use that to initialize the target,
4934 and then "or" in whatever non-constant ranges we need in addition.
4936 If a large set is all zero or all ones, it is
4937 probably better to set it using memset (if available) or bzero.
4938 Also, if a large set has just a single range, it may also be
4939 better to first clear all the first clear the set (using
4940 bzero/memset), and set the bits we want. */
4942 /* Check for all zeros. */
4943 if (elt
== NULL_TREE
&& size
> 0)
4946 clear_storage (target
, GEN_INT (size
));
4950 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
4951 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
4952 bitlength
= size_binop (PLUS_EXPR
,
4953 size_diffop (domain_max
, domain_min
),
4956 nbits
= tree_low_cst (bitlength
, 1);
4958 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4959 are "complicated" (more than one range), initialize (the
4960 constant parts) by copying from a constant. */
4961 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
4962 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
4964 unsigned int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
4965 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
4966 char *bit_buffer
= (char *) alloca (nbits
);
4967 HOST_WIDE_INT word
= 0;
4968 unsigned int bit_pos
= 0;
4969 unsigned int ibit
= 0;
4970 unsigned int offset
= 0; /* In bytes from beginning of set. */
4972 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
4975 if (bit_buffer
[ibit
])
4977 if (BYTES_BIG_ENDIAN
)
4978 word
|= (1 << (set_word_size
- 1 - bit_pos
));
4980 word
|= 1 << bit_pos
;
4984 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
4986 if (word
!= 0 || ! cleared
)
4988 rtx datum
= GEN_INT (word
);
4991 /* The assumption here is that it is safe to use
4992 XEXP if the set is multi-word, but not if
4993 it's single-word. */
4994 if (GET_CODE (target
) == MEM
)
4995 to_rtx
= adjust_address (target
, mode
, offset
);
4996 else if (offset
== 0)
5000 emit_move_insn (to_rtx
, datum
);
5007 offset
+= set_word_size
/ BITS_PER_UNIT
;
5012 /* Don't bother clearing storage if the set is all ones. */
5013 if (TREE_CHAIN (elt
) != NULL_TREE
5014 || (TREE_PURPOSE (elt
) == NULL_TREE
5016 : ( ! host_integerp (TREE_VALUE (elt
), 0)
5017 || ! host_integerp (TREE_PURPOSE (elt
), 0)
5018 || (tree_low_cst (TREE_VALUE (elt
), 0)
5019 - tree_low_cst (TREE_PURPOSE (elt
), 0) + 1
5020 != (HOST_WIDE_INT
) nbits
))))
5021 clear_storage (target
, expr_size (exp
));
5023 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
5025 /* Start of range of element or NULL. */
5026 tree startbit
= TREE_PURPOSE (elt
);
5027 /* End of range of element, or element value. */
5028 tree endbit
= TREE_VALUE (elt
);
5029 #ifdef TARGET_MEM_FUNCTIONS
5030 HOST_WIDE_INT startb
, endb
;
5032 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
5034 bitlength_rtx
= expand_expr (bitlength
,
5035 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
5037 /* Handle non-range tuple element like [ expr ]. */
5038 if (startbit
== NULL_TREE
)
5040 startbit
= save_expr (endbit
);
5044 startbit
= convert (sizetype
, startbit
);
5045 endbit
= convert (sizetype
, endbit
);
5046 if (! integer_zerop (domain_min
))
5048 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
5049 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
5051 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
5052 EXPAND_CONST_ADDRESS
);
5053 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
5054 EXPAND_CONST_ADDRESS
);
5060 ((build_qualified_type (type_for_mode (GET_MODE (target
), 0),
5063 emit_move_insn (targetx
, target
);
5066 else if (GET_CODE (target
) == MEM
)
5071 #ifdef TARGET_MEM_FUNCTIONS
5072 /* Optimization: If startbit and endbit are
5073 constants divisible by BITS_PER_UNIT,
5074 call memset instead. */
5075 if (TREE_CODE (startbit
) == INTEGER_CST
5076 && TREE_CODE (endbit
) == INTEGER_CST
5077 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
5078 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
5080 emit_library_call (memset_libfunc
, LCT_NORMAL
,
5082 plus_constant (XEXP (targetx
, 0),
5083 startb
/ BITS_PER_UNIT
),
5085 constm1_rtx
, TYPE_MODE (integer_type_node
),
5086 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
5087 TYPE_MODE (sizetype
));
5091 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__setbits"),
5092 LCT_NORMAL
, VOIDmode
, 4, XEXP (targetx
, 0),
5093 Pmode
, bitlength_rtx
, TYPE_MODE (sizetype
),
5094 startbit_rtx
, TYPE_MODE (sizetype
),
5095 endbit_rtx
, TYPE_MODE (sizetype
));
5098 emit_move_insn (target
, targetx
);
5106 /* Store the value of EXP (an expression tree)
5107 into a subfield of TARGET which has mode MODE and occupies
5108 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5109 If MODE is VOIDmode, it means that we are storing into a bit-field.
5111 If VALUE_MODE is VOIDmode, return nothing in particular.
5112 UNSIGNEDP is not used in this case.
5114 Otherwise, return an rtx for the value stored. This rtx
5115 has mode VALUE_MODE if that is convenient to do.
5116 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5118 TYPE is the type of the underlying object,
5120 ALIAS_SET is the alias set for the destination. This value will
5121 (in general) be different from that for TARGET, since TARGET is a
5122 reference to the containing structure. */
5125 store_field (target
, bitsize
, bitpos
, mode
, exp
, value_mode
, unsignedp
, type
,
5128 HOST_WIDE_INT bitsize
;
5129 HOST_WIDE_INT bitpos
;
5130 enum machine_mode mode
;
5132 enum machine_mode value_mode
;
5137 HOST_WIDE_INT width_mask
= 0;
5139 if (TREE_CODE (exp
) == ERROR_MARK
)
5142 /* If we have nothing to store, do nothing unless the expression has
5145 return expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
5146 else if (bitsize
>=0 && bitsize
< HOST_BITS_PER_WIDE_INT
)
5147 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5149 /* If we are storing into an unaligned field of an aligned union that is
5150 in a register, we may have the mode of TARGET being an integer mode but
5151 MODE == BLKmode. In that case, get an aligned object whose size and
5152 alignment are the same as TARGET and store TARGET into it (we can avoid
5153 the store if the field being stored is the entire width of TARGET). Then
5154 call ourselves recursively to store the field into a BLKmode version of
5155 that object. Finally, load from the object into TARGET. This is not
5156 very efficient in general, but should only be slightly more expensive
5157 than the otherwise-required unaligned accesses. Perhaps this can be
5158 cleaned up later. */
5161 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
5165 (build_qualified_type (type
, TYPE_QUALS (type
) | TYPE_QUAL_CONST
),
5167 rtx blk_object
= copy_rtx (object
);
5169 PUT_MODE (blk_object
, BLKmode
);
5171 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5172 emit_move_insn (object
, target
);
5174 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
5177 emit_move_insn (target
, object
);
5179 /* We want to return the BLKmode version of the data. */
5183 if (GET_CODE (target
) == CONCAT
)
5185 /* We're storing into a struct containing a single __complex. */
5189 return store_expr (exp
, target
, 0);
5192 /* If the structure is in a register or if the component
5193 is a bit field, we cannot use addressing to access it.
5194 Use bit-field techniques or SUBREG to store in it. */
5196 if (mode
== VOIDmode
5197 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5198 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5199 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5200 || GET_CODE (target
) == REG
5201 || GET_CODE (target
) == SUBREG
5202 /* If the field isn't aligned enough to store as an ordinary memref,
5203 store it as a bit field. */
5204 || (mode
!= BLKmode
&& SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
))
5205 && (MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
)
5206 || bitpos
% GET_MODE_ALIGNMENT (mode
)))
5207 /* If the RHS and field are a constant size and the size of the
5208 RHS isn't the same size as the bitfield, we must use bitfield
5211 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5212 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5214 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
5216 /* If BITSIZE is narrower than the size of the type of EXP
5217 we will be narrowing TEMP. Normally, what's wanted are the
5218 low-order bits. However, if EXP's type is a record and this is
5219 big-endian machine, we want the upper BITSIZE bits. */
5220 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5221 && bitsize
< GET_MODE_BITSIZE (GET_MODE (temp
))
5222 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5223 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5224 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5228 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5230 if (mode
!= VOIDmode
&& mode
!= BLKmode
5231 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5232 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5234 /* If the modes of TARGET and TEMP are both BLKmode, both
5235 must be in memory and BITPOS must be aligned on a byte
5236 boundary. If so, we simply do a block copy. */
5237 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5239 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
5240 || bitpos
% BITS_PER_UNIT
!= 0)
5243 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5244 emit_block_move (target
, temp
,
5245 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5248 return value_mode
== VOIDmode
? const0_rtx
: target
;
5251 /* Store the value in the bitfield. */
5252 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
,
5253 int_size_in_bytes (type
));
5255 if (value_mode
!= VOIDmode
)
5257 /* The caller wants an rtx for the value.
5258 If possible, avoid refetching from the bitfield itself. */
5260 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
5263 enum machine_mode tmode
;
5266 return expand_and (temp
,
5270 GET_MODE (temp
) == VOIDmode
5272 : GET_MODE (temp
))), NULL_RTX
);
5274 tmode
= GET_MODE (temp
);
5275 if (tmode
== VOIDmode
)
5277 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
5278 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5279 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5282 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
5283 NULL_RTX
, value_mode
, VOIDmode
,
5284 int_size_in_bytes (type
));
5290 rtx addr
= XEXP (target
, 0);
5291 rtx to_rtx
= target
;
5293 /* If a value is wanted, it must be the lhs;
5294 so make the address stable for multiple use. */
5296 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
5297 && ! CONSTANT_ADDRESS_P (addr
)
5298 /* A frame-pointer reference is already stable. */
5299 && ! (GET_CODE (addr
) == PLUS
5300 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
5301 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
5302 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
5303 to_rtx
= replace_equiv_address (to_rtx
, copy_to_reg (addr
));
5305 /* Now build a reference to just the desired component. */
5307 to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5309 if (to_rtx
== target
)
5310 to_rtx
= copy_rtx (to_rtx
);
5312 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5313 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5314 set_mem_alias_set (to_rtx
, alias_set
);
5316 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
5320 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5321 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5322 codes and find the ultimate containing object, which we return.
5324 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5325 bit position, and *PUNSIGNEDP to the signedness of the field.
5326 If the position of the field is variable, we store a tree
5327 giving the variable offset (in units) in *POFFSET.
5328 This offset is in addition to the bit position.
5329 If the position is not variable, we store 0 in *POFFSET.
5331 If any of the extraction expressions is volatile,
5332 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5334 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5335 is a mode that can be used to access the field. In that case, *PBITSIZE
5338 If the field describes a variable-sized object, *PMODE is set to
5339 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5340 this case, but the address of the object can be found. */
5343 get_inner_reference (exp
, pbitsize
, pbitpos
, poffset
, pmode
,
5344 punsignedp
, pvolatilep
)
5346 HOST_WIDE_INT
*pbitsize
;
5347 HOST_WIDE_INT
*pbitpos
;
5349 enum machine_mode
*pmode
;
5354 enum machine_mode mode
= VOIDmode
;
5355 tree offset
= size_zero_node
;
5356 tree bit_offset
= bitsize_zero_node
;
5357 tree placeholder_ptr
= 0;
5360 /* First get the mode, signedness, and size. We do this from just the
5361 outermost expression. */
5362 if (TREE_CODE (exp
) == COMPONENT_REF
)
5364 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5365 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5366 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5368 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
5370 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5372 size_tree
= TREE_OPERAND (exp
, 1);
5373 *punsignedp
= TREE_UNSIGNED (exp
);
5377 mode
= TYPE_MODE (TREE_TYPE (exp
));
5378 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
5380 if (mode
== BLKmode
)
5381 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5383 *pbitsize
= GET_MODE_BITSIZE (mode
);
5388 if (! host_integerp (size_tree
, 1))
5389 mode
= BLKmode
, *pbitsize
= -1;
5391 *pbitsize
= tree_low_cst (size_tree
, 1);
5394 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5395 and find the ultimate containing object. */
5398 if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5399 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
, TREE_OPERAND (exp
, 2));
5400 else if (TREE_CODE (exp
) == COMPONENT_REF
)
5402 tree field
= TREE_OPERAND (exp
, 1);
5403 tree this_offset
= DECL_FIELD_OFFSET (field
);
5405 /* If this field hasn't been filled in yet, don't go
5406 past it. This should only happen when folding expressions
5407 made during type construction. */
5408 if (this_offset
== 0)
5410 else if (! TREE_CONSTANT (this_offset
)
5411 && contains_placeholder_p (this_offset
))
5412 this_offset
= build (WITH_RECORD_EXPR
, sizetype
, this_offset
, exp
);
5414 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5415 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5416 DECL_FIELD_BIT_OFFSET (field
));
5418 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5421 else if (TREE_CODE (exp
) == ARRAY_REF
5422 || TREE_CODE (exp
) == ARRAY_RANGE_REF
)
5424 tree index
= TREE_OPERAND (exp
, 1);
5425 tree array
= TREE_OPERAND (exp
, 0);
5426 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
5427 tree low_bound
= (domain
? TYPE_MIN_VALUE (domain
) : 0);
5428 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array
)));
5430 /* We assume all arrays have sizes that are a multiple of a byte.
5431 First subtract the lower bound, if any, in the type of the
5432 index, then convert to sizetype and multiply by the size of the
5434 if (low_bound
!= 0 && ! integer_zerop (low_bound
))
5435 index
= fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5438 /* If the index has a self-referential type, pass it to a
5439 WITH_RECORD_EXPR; if the component size is, pass our
5440 component to one. */
5441 if (! TREE_CONSTANT (index
)
5442 && contains_placeholder_p (index
))
5443 index
= build (WITH_RECORD_EXPR
, TREE_TYPE (index
), index
, exp
);
5444 if (! TREE_CONSTANT (unit_size
)
5445 && contains_placeholder_p (unit_size
))
5446 unit_size
= build (WITH_RECORD_EXPR
, sizetype
, unit_size
, array
);
5448 offset
= size_binop (PLUS_EXPR
, offset
,
5449 size_binop (MULT_EXPR
,
5450 convert (sizetype
, index
),
5454 else if (TREE_CODE (exp
) == PLACEHOLDER_EXPR
)
5456 tree
new = find_placeholder (exp
, &placeholder_ptr
);
5458 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5459 We might have been called from tree optimization where we
5460 haven't set up an object yet. */
5468 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
5469 && TREE_CODE (exp
) != VIEW_CONVERT_EXPR
5470 && ! ((TREE_CODE (exp
) == NOP_EXPR
5471 || TREE_CODE (exp
) == CONVERT_EXPR
)
5472 && (TYPE_MODE (TREE_TYPE (exp
))
5473 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
5476 /* If any reference in the chain is volatile, the effect is volatile. */
5477 if (TREE_THIS_VOLATILE (exp
))
5480 exp
= TREE_OPERAND (exp
, 0);
5483 /* If OFFSET is constant, see if we can return the whole thing as a
5484 constant bit position. Otherwise, split it up. */
5485 if (host_integerp (offset
, 0)
5486 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5488 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5489 && host_integerp (tem
, 0))
5490 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5492 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5498 /* Return 1 if T is an expression that get_inner_reference handles. */
5501 handled_component_p (t
)
5504 switch (TREE_CODE (t
))
5509 case ARRAY_RANGE_REF
:
5510 case NON_LVALUE_EXPR
:
5511 case VIEW_CONVERT_EXPR
:
5516 return (TYPE_MODE (TREE_TYPE (t
))
5517 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t
, 0))));
5524 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5526 static enum memory_use_mode
5527 get_memory_usage_from_modifier (modifier
)
5528 enum expand_modifier modifier
;
5534 return MEMORY_USE_RO
;
5536 case EXPAND_MEMORY_USE_WO
:
5537 return MEMORY_USE_WO
;
5539 case EXPAND_MEMORY_USE_RW
:
5540 return MEMORY_USE_RW
;
5542 case EXPAND_MEMORY_USE_DONT
:
5543 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5544 MEMORY_USE_DONT, because they are modifiers to a call of
5545 expand_expr in the ADDR_EXPR case of expand_expr. */
5546 case EXPAND_CONST_ADDRESS
:
5547 case EXPAND_INITIALIZER
:
5548 return MEMORY_USE_DONT
;
5549 case EXPAND_MEMORY_USE_BAD
:
5555 /* Given an rtx VALUE that may contain additions and multiplications, return
5556 an equivalent value that just refers to a register, memory, or constant.
5557 This is done by generating instructions to perform the arithmetic and
5558 returning a pseudo-register containing the value.
5560 The returned value may be a REG, SUBREG, MEM or constant. */
5563 force_operand (value
, target
)
5567 /* Use a temporary to force order of execution of calls to
5571 /* Use subtarget as the target for operand 0 of a binary operation. */
5572 rtx subtarget
= get_subtarget (target
);
5574 /* Check for a PIC address load. */
5576 && (GET_CODE (value
) == PLUS
|| GET_CODE (value
) == MINUS
)
5577 && XEXP (value
, 0) == pic_offset_table_rtx
5578 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5579 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5580 || GET_CODE (XEXP (value
, 1)) == CONST
))
5583 subtarget
= gen_reg_rtx (GET_MODE (value
));
5584 emit_move_insn (subtarget
, value
);
5588 if (GET_CODE (value
) == PLUS
)
5589 binoptab
= add_optab
;
5590 else if (GET_CODE (value
) == MINUS
)
5591 binoptab
= sub_optab
;
5592 else if (GET_CODE (value
) == MULT
)
5594 op2
= XEXP (value
, 1);
5595 if (!CONSTANT_P (op2
)
5596 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5598 tmp
= force_operand (XEXP (value
, 0), subtarget
);
5599 return expand_mult (GET_MODE (value
), tmp
,
5600 force_operand (op2
, NULL_RTX
),
5606 op2
= XEXP (value
, 1);
5607 if (!CONSTANT_P (op2
)
5608 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5610 if (binoptab
== sub_optab
&& GET_CODE (op2
) == CONST_INT
)
5612 binoptab
= add_optab
;
5613 op2
= negate_rtx (GET_MODE (value
), op2
);
5616 /* Check for an addition with OP2 a constant integer and our first
5617 operand a PLUS of a virtual register and something else. In that
5618 case, we want to emit the sum of the virtual register and the
5619 constant first and then add the other value. This allows virtual
5620 register instantiation to simply modify the constant rather than
5621 creating another one around this addition. */
5622 if (binoptab
== add_optab
&& GET_CODE (op2
) == CONST_INT
5623 && GET_CODE (XEXP (value
, 0)) == PLUS
5624 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
5625 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5626 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5628 rtx temp
= expand_binop (GET_MODE (value
), binoptab
,
5629 XEXP (XEXP (value
, 0), 0), op2
,
5630 subtarget
, 0, OPTAB_LIB_WIDEN
);
5631 return expand_binop (GET_MODE (value
), binoptab
, temp
,
5632 force_operand (XEXP (XEXP (value
, 0), 1), 0),
5633 target
, 0, OPTAB_LIB_WIDEN
);
5636 tmp
= force_operand (XEXP (value
, 0), subtarget
);
5637 return expand_binop (GET_MODE (value
), binoptab
, tmp
,
5638 force_operand (op2
, NULL_RTX
),
5639 target
, 0, OPTAB_LIB_WIDEN
);
5640 /* We give UNSIGNEDP = 0 to expand_binop
5641 because the only operations we are expanding here are signed ones. */
5644 #ifdef INSN_SCHEDULING
5645 /* On machines that have insn scheduling, we want all memory reference to be
5646 explicit, so we need to deal with such paradoxical SUBREGs. */
5647 if (GET_CODE (value
) == SUBREG
&& GET_CODE (SUBREG_REG (value
)) == MEM
5648 && (GET_MODE_SIZE (GET_MODE (value
))
5649 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
5651 = simplify_gen_subreg (GET_MODE (value
),
5652 force_reg (GET_MODE (SUBREG_REG (value
)),
5653 force_operand (SUBREG_REG (value
),
5655 GET_MODE (SUBREG_REG (value
)),
5656 SUBREG_BYTE (value
));
5662 /* Subroutine of expand_expr: return nonzero iff there is no way that
5663 EXP can reference X, which is being modified. TOP_P is nonzero if this
5664 call is going to be used to determine whether we need a temporary
5665 for EXP, as opposed to a recursive call to this function.
5667 It is always safe for this routine to return zero since it merely
5668 searches for optimization opportunities. */
5671 safe_from_p (x
, exp
, top_p
)
5678 static tree save_expr_list
;
5681 /* If EXP has varying size, we MUST use a target since we currently
5682 have no way of allocating temporaries of variable size
5683 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5684 So we assume here that something at a higher level has prevented a
5685 clash. This is somewhat bogus, but the best we can do. Only
5686 do this when X is BLKmode and when we are at the top level. */
5687 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
5688 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
5689 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
5690 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
5691 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
5693 && GET_MODE (x
) == BLKmode
)
5694 /* If X is in the outgoing argument area, it is always safe. */
5695 || (GET_CODE (x
) == MEM
5696 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
5697 || (GET_CODE (XEXP (x
, 0)) == PLUS
5698 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
5701 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5702 find the underlying pseudo. */
5703 if (GET_CODE (x
) == SUBREG
)
5706 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5710 /* A SAVE_EXPR might appear many times in the expression passed to the
5711 top-level safe_from_p call, and if it has a complex subexpression,
5712 examining it multiple times could result in a combinatorial explosion.
5713 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5714 with optimization took about 28 minutes to compile -- even though it was
5715 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5716 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5717 we have processed. Note that the only test of top_p was above. */
5726 rtn
= safe_from_p (x
, exp
, 0);
5728 for (t
= save_expr_list
; t
!= 0; t
= TREE_CHAIN (t
))
5729 TREE_PRIVATE (TREE_PURPOSE (t
)) = 0;
5734 /* Now look at our tree code and possibly recurse. */
5735 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
5738 exp_rtl
= DECL_RTL_SET_P (exp
) ? DECL_RTL (exp
) : NULL_RTX
;
5745 if (TREE_CODE (exp
) == TREE_LIST
)
5746 return ((TREE_VALUE (exp
) == 0
5747 || safe_from_p (x
, TREE_VALUE (exp
), 0))
5748 && (TREE_CHAIN (exp
) == 0
5749 || safe_from_p (x
, TREE_CHAIN (exp
), 0)));
5750 else if (TREE_CODE (exp
) == ERROR_MARK
)
5751 return 1; /* An already-visited SAVE_EXPR? */
5756 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5760 return (safe_from_p (x
, TREE_OPERAND (exp
, 0), 0)
5761 && safe_from_p (x
, TREE_OPERAND (exp
, 1), 0));
5765 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5766 the expression. If it is set, we conflict iff we are that rtx or
5767 both are in memory. Otherwise, we check all operands of the
5768 expression recursively. */
5770 switch (TREE_CODE (exp
))
5773 /* If the operand is static or we are static, we can't conflict.
5774 Likewise if we don't conflict with the operand at all. */
5775 if (staticp (TREE_OPERAND (exp
, 0))
5776 || TREE_STATIC (exp
)
5777 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
5780 /* Otherwise, the only way this can conflict is if we are taking
5781 the address of a DECL a that address if part of X, which is
5783 exp
= TREE_OPERAND (exp
, 0);
5786 if (!DECL_RTL_SET_P (exp
)
5787 || GET_CODE (DECL_RTL (exp
)) != MEM
)
5790 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
5795 if (GET_CODE (x
) == MEM
5796 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
5797 get_alias_set (exp
)))
5802 /* Assume that the call will clobber all hard registers and
5804 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5805 || GET_CODE (x
) == MEM
)
5810 /* If a sequence exists, we would have to scan every instruction
5811 in the sequence to see if it was safe. This is probably not
5813 if (RTL_EXPR_SEQUENCE (exp
))
5816 exp_rtl
= RTL_EXPR_RTL (exp
);
5819 case WITH_CLEANUP_EXPR
:
5820 exp_rtl
= WITH_CLEANUP_EXPR_RTL (exp
);
5823 case CLEANUP_POINT_EXPR
:
5824 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5827 exp_rtl
= SAVE_EXPR_RTL (exp
);
5831 /* If we've already scanned this, don't do it again. Otherwise,
5832 show we've scanned it and record for clearing the flag if we're
5834 if (TREE_PRIVATE (exp
))
5837 TREE_PRIVATE (exp
) = 1;
5838 if (! safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
5840 TREE_PRIVATE (exp
) = 0;
5844 save_expr_list
= tree_cons (exp
, NULL_TREE
, save_expr_list
);
5848 /* The only operand we look at is operand 1. The rest aren't
5849 part of the expression. */
5850 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
5852 case METHOD_CALL_EXPR
:
5853 /* This takes an rtx argument, but shouldn't appear here. */
5860 /* If we have an rtx, we do not need to scan our operands. */
5864 nops
= first_rtl_op (TREE_CODE (exp
));
5865 for (i
= 0; i
< nops
; i
++)
5866 if (TREE_OPERAND (exp
, i
) != 0
5867 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
5870 /* If this is a language-specific tree code, it may require
5871 special handling. */
5872 if ((unsigned int) TREE_CODE (exp
)
5873 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5874 && !(*lang_hooks
.safe_from_p
) (x
, exp
))
5878 /* If we have an rtl, find any enclosed object. Then see if we conflict
5882 if (GET_CODE (exp_rtl
) == SUBREG
)
5884 exp_rtl
= SUBREG_REG (exp_rtl
);
5885 if (GET_CODE (exp_rtl
) == REG
5886 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
5890 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5891 are memory and they conflict. */
5892 return ! (rtx_equal_p (x
, exp_rtl
)
5893 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
5894 && true_dependence (exp_rtl
, GET_MODE (x
), x
,
5895 rtx_addr_varies_p
)));
5898 /* If we reach here, it is safe. */
5902 /* Subroutine of expand_expr: return rtx if EXP is a
5903 variable or parameter; else return 0. */
5910 switch (TREE_CODE (exp
))
5914 return DECL_RTL (exp
);
5920 #ifdef MAX_INTEGER_COMPUTATION_MODE
5923 check_max_integer_computation_mode (exp
)
5926 enum tree_code code
;
5927 enum machine_mode mode
;
5929 /* Strip any NOPs that don't change the mode. */
5931 code
= TREE_CODE (exp
);
5933 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5934 if (code
== NOP_EXPR
5935 && TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
5938 /* First check the type of the overall operation. We need only look at
5939 unary, binary and relational operations. */
5940 if (TREE_CODE_CLASS (code
) == '1'
5941 || TREE_CODE_CLASS (code
) == '2'
5942 || TREE_CODE_CLASS (code
) == '<')
5944 mode
= TYPE_MODE (TREE_TYPE (exp
));
5945 if (GET_MODE_CLASS (mode
) == MODE_INT
5946 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5947 internal_error ("unsupported wide integer operation");
5950 /* Check operand of a unary op. */
5951 if (TREE_CODE_CLASS (code
) == '1')
5953 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5954 if (GET_MODE_CLASS (mode
) == MODE_INT
5955 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5956 internal_error ("unsupported wide integer operation");
5959 /* Check operands of a binary/comparison op. */
5960 if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<')
5962 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5963 if (GET_MODE_CLASS (mode
) == MODE_INT
5964 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5965 internal_error ("unsupported wide integer operation");
5967 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
5968 if (GET_MODE_CLASS (mode
) == MODE_INT
5969 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5970 internal_error ("unsupported wide integer operation");
5975 /* Return the highest power of two that EXP is known to be a multiple of.
5976 This is used in updating alignment of MEMs in array references. */
5978 static HOST_WIDE_INT
5979 highest_pow2_factor (exp
)
5982 HOST_WIDE_INT c0
, c1
;
5984 switch (TREE_CODE (exp
))
5987 /* If the integer is expressable in a HOST_WIDE_INT, we can find the
5988 lowest bit that's a one. If the result is zero, pessimize by
5989 returning 1. This is overly-conservative, but such things should not
5990 happen in the offset expressions that we are called with. */
5991 if (host_integerp (exp
, 0))
5993 c0
= tree_low_cst (exp
, 0);
5994 c0
= c0
< 0 ? - c0
: c0
;
5995 return c0
!= 0 ? c0
& -c0
: 1;
5999 case PLUS_EXPR
: case MINUS_EXPR
:
6000 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6001 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6002 return MIN (c0
, c1
);
6005 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6006 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6009 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6011 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6012 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6013 return MAX (1, c0
/ c1
);
6015 case NON_LVALUE_EXPR
: case NOP_EXPR
: case CONVERT_EXPR
:
6016 case COMPOUND_EXPR
: case SAVE_EXPR
: case WITH_RECORD_EXPR
:
6017 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
6020 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6021 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
6022 return MIN (c0
, c1
);
6031 /* Return an object on the placeholder list that matches EXP, a
6032 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6033 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6034 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6035 is a location which initially points to a starting location in the
6036 placeholder list (zero means start of the list) and where a pointer into
6037 the placeholder list at which the object is found is placed. */
6040 find_placeholder (exp
, plist
)
6044 tree type
= TREE_TYPE (exp
);
6045 tree placeholder_expr
;
6047 for (placeholder_expr
6048 = plist
&& *plist
? TREE_CHAIN (*plist
) : placeholder_list
;
6049 placeholder_expr
!= 0;
6050 placeholder_expr
= TREE_CHAIN (placeholder_expr
))
6052 tree need_type
= TYPE_MAIN_VARIANT (type
);
6055 /* Find the outermost reference that is of the type we want. If none,
6056 see if any object has a type that is a pointer to the type we
6058 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6059 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
6060 || TREE_CODE (elt
) == COND_EXPR
)
6061 ? TREE_OPERAND (elt
, 1)
6062 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6063 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6064 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6065 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6066 ? TREE_OPERAND (elt
, 0) : 0))
6067 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
6070 *plist
= placeholder_expr
;
6074 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6076 = ((TREE_CODE (elt
) == COMPOUND_EXPR
6077 || TREE_CODE (elt
) == COND_EXPR
)
6078 ? TREE_OPERAND (elt
, 1)
6079 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6080 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6081 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6082 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6083 ? TREE_OPERAND (elt
, 0) : 0))
6084 if (POINTER_TYPE_P (TREE_TYPE (elt
))
6085 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
6089 *plist
= placeholder_expr
;
6090 return build1 (INDIRECT_REF
, need_type
, elt
);
6097 /* expand_expr: generate code for computing expression EXP.
6098 An rtx for the computed value is returned. The value is never null.
6099 In the case of a void EXP, const0_rtx is returned.
6101 The value may be stored in TARGET if TARGET is nonzero.
6102 TARGET is just a suggestion; callers must assume that
6103 the rtx returned may not be the same as TARGET.
6105 If TARGET is CONST0_RTX, it means that the value will be ignored.
6107 If TMODE is not VOIDmode, it suggests generating the
6108 result in mode TMODE. But this is done only when convenient.
6109 Otherwise, TMODE is ignored and the value generated in its natural mode.
6110 TMODE is just a suggestion; callers must assume that
6111 the rtx returned may not have mode TMODE.
6113 Note that TARGET may have neither TMODE nor MODE. In that case, it
6114 probably will not be used.
6116 If MODIFIER is EXPAND_SUM then when EXP is an addition
6117 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6118 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6119 products as above, or REG or MEM, or constant.
6120 Ordinarily in such cases we would output mul or add instructions
6121 and then return a pseudo reg containing the sum.
6123 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6124 it also marks a label as absolutely required (it can't be dead).
6125 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6126 This is used for outputting expressions used in initializers.
6128 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6129 with a constant address even if that address is not normally legitimate.
6130 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6133 expand_expr (exp
, target
, tmode
, modifier
)
6136 enum machine_mode tmode
;
6137 enum expand_modifier modifier
;
6140 tree type
= TREE_TYPE (exp
);
6141 int unsignedp
= TREE_UNSIGNED (type
);
6142 enum machine_mode mode
;
6143 enum tree_code code
= TREE_CODE (exp
);
6145 rtx subtarget
, original_target
;
6148 /* Used by check-memory-usage to make modifier read only. */
6149 enum expand_modifier ro_modifier
;
6151 /* Handle ERROR_MARK before anybody tries to access its type. */
6152 if (TREE_CODE (exp
) == ERROR_MARK
|| TREE_CODE (type
) == ERROR_MARK
)
6154 op0
= CONST0_RTX (tmode
);
6160 mode
= TYPE_MODE (type
);
6161 /* Use subtarget as the target for operand 0 of a binary operation. */
6162 subtarget
= get_subtarget (target
);
6163 original_target
= target
;
6164 ignore
= (target
== const0_rtx
6165 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
6166 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
6167 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
6168 && TREE_CODE (type
) == VOID_TYPE
));
6170 /* Make a read-only version of the modifier. */
6171 if (modifier
== EXPAND_NORMAL
|| modifier
== EXPAND_SUM
6172 || modifier
== EXPAND_CONST_ADDRESS
|| modifier
== EXPAND_INITIALIZER
)
6173 ro_modifier
= modifier
;
6175 ro_modifier
= EXPAND_NORMAL
;
6177 /* If we are going to ignore this result, we need only do something
6178 if there is a side-effect somewhere in the expression. If there
6179 is, short-circuit the most common cases here. Note that we must
6180 not call expand_expr with anything but const0_rtx in case this
6181 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6185 if (! TREE_SIDE_EFFECTS (exp
))
6188 /* Ensure we reference a volatile object even if value is ignored, but
6189 don't do this if all we are doing is taking its address. */
6190 if (TREE_THIS_VOLATILE (exp
)
6191 && TREE_CODE (exp
) != FUNCTION_DECL
6192 && mode
!= VOIDmode
&& mode
!= BLKmode
6193 && modifier
!= EXPAND_CONST_ADDRESS
)
6195 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, ro_modifier
);
6196 if (GET_CODE (temp
) == MEM
)
6197 temp
= copy_to_reg (temp
);
6201 if (TREE_CODE_CLASS (code
) == '1' || code
== COMPONENT_REF
6202 || code
== INDIRECT_REF
|| code
== BUFFER_REF
)
6203 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
6204 VOIDmode
, ro_modifier
);
6205 else if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<'
6206 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
6208 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6210 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
,
6214 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6215 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
6216 /* If the second operand has no side effects, just evaluate
6218 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
6219 VOIDmode
, ro_modifier
);
6220 else if (code
== BIT_FIELD_REF
)
6222 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6224 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
,
6226 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
,
6234 #ifdef MAX_INTEGER_COMPUTATION_MODE
6235 /* Only check stuff here if the mode we want is different from the mode
6236 of the expression; if it's the same, check_max_integer_computiation_mode
6237 will handle it. Do we really need to check this stuff at all? */
6240 && GET_MODE (target
) != mode
6241 && TREE_CODE (exp
) != INTEGER_CST
6242 && TREE_CODE (exp
) != PARM_DECL
6243 && TREE_CODE (exp
) != ARRAY_REF
6244 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6245 && TREE_CODE (exp
) != COMPONENT_REF
6246 && TREE_CODE (exp
) != BIT_FIELD_REF
6247 && TREE_CODE (exp
) != INDIRECT_REF
6248 && TREE_CODE (exp
) != CALL_EXPR
6249 && TREE_CODE (exp
) != VAR_DECL
6250 && TREE_CODE (exp
) != RTL_EXPR
)
6252 enum machine_mode mode
= GET_MODE (target
);
6254 if (GET_MODE_CLASS (mode
) == MODE_INT
6255 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6256 internal_error ("unsupported wide integer operation");
6260 && TREE_CODE (exp
) != INTEGER_CST
6261 && TREE_CODE (exp
) != PARM_DECL
6262 && TREE_CODE (exp
) != ARRAY_REF
6263 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6264 && TREE_CODE (exp
) != COMPONENT_REF
6265 && TREE_CODE (exp
) != BIT_FIELD_REF
6266 && TREE_CODE (exp
) != INDIRECT_REF
6267 && TREE_CODE (exp
) != VAR_DECL
6268 && TREE_CODE (exp
) != CALL_EXPR
6269 && TREE_CODE (exp
) != RTL_EXPR
6270 && GET_MODE_CLASS (tmode
) == MODE_INT
6271 && tmode
> MAX_INTEGER_COMPUTATION_MODE
)
6272 internal_error ("unsupported wide integer operation");
6274 check_max_integer_computation_mode (exp
);
6277 /* If will do cse, generate all results into pseudo registers
6278 since 1) that allows cse to find more things
6279 and 2) otherwise cse could produce an insn the machine
6282 if (! cse_not_expected
&& mode
!= BLKmode
&& target
6283 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
))
6290 tree function
= decl_function_context (exp
);
6291 /* Handle using a label in a containing function. */
6292 if (function
!= current_function_decl
6293 && function
!= inline_function_decl
&& function
!= 0)
6295 struct function
*p
= find_function_data (function
);
6296 p
->expr
->x_forced_labels
6297 = gen_rtx_EXPR_LIST (VOIDmode
, label_rtx (exp
),
6298 p
->expr
->x_forced_labels
);
6302 if (modifier
== EXPAND_INITIALIZER
)
6303 forced_labels
= gen_rtx_EXPR_LIST (VOIDmode
,
6308 temp
= gen_rtx_MEM (FUNCTION_MODE
,
6309 gen_rtx_LABEL_REF (Pmode
, label_rtx (exp
)));
6310 if (function
!= current_function_decl
6311 && function
!= inline_function_decl
&& function
!= 0)
6312 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
6317 if (DECL_RTL (exp
) == 0)
6319 error_with_decl (exp
, "prior parameter's size depends on `%s'");
6320 return CONST0_RTX (mode
);
6323 /* ... fall through ... */
6326 /* If a static var's type was incomplete when the decl was written,
6327 but the type is complete now, lay out the decl now. */
6328 if (DECL_SIZE (exp
) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
6329 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6331 rtx value
= DECL_RTL_IF_SET (exp
);
6333 layout_decl (exp
, 0);
6335 /* If the RTL was already set, update its mode and memory
6339 PUT_MODE (value
, DECL_MODE (exp
));
6340 SET_DECL_RTL (exp
, 0);
6341 set_mem_attributes (value
, exp
, 1);
6342 SET_DECL_RTL (exp
, value
);
6346 /* Although static-storage variables start off initialized, according to
6347 ANSI C, a memcpy could overwrite them with uninitialized values. So
6348 we check them too. This also lets us check for read-only variables
6349 accessed via a non-const declaration, in case it won't be detected
6350 any other way (e.g., in an embedded system or OS kernel without
6353 Aggregates are not checked here; they're handled elsewhere. */
6354 if (cfun
&& current_function_check_memory_usage
6356 && GET_CODE (DECL_RTL (exp
)) == MEM
6357 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
6359 enum memory_use_mode memory_usage
;
6360 memory_usage
= get_memory_usage_from_modifier (modifier
);
6362 in_check_memory_usage
= 1;
6363 if (memory_usage
!= MEMORY_USE_DONT
)
6364 emit_library_call (chkr_check_addr_libfunc
,
6365 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3,
6366 XEXP (DECL_RTL (exp
), 0), Pmode
,
6367 GEN_INT (int_size_in_bytes (type
)),
6368 TYPE_MODE (sizetype
),
6369 GEN_INT (memory_usage
),
6370 TYPE_MODE (integer_type_node
));
6371 in_check_memory_usage
= 0;
6374 /* ... fall through ... */
6378 if (DECL_RTL (exp
) == 0)
6381 /* Ensure variable marked as used even if it doesn't go through
6382 a parser. If it hasn't be used yet, write out an external
6384 if (! TREE_USED (exp
))
6386 assemble_external (exp
);
6387 TREE_USED (exp
) = 1;
6390 /* Show we haven't gotten RTL for this yet. */
6393 /* Handle variables inherited from containing functions. */
6394 context
= decl_function_context (exp
);
6396 /* We treat inline_function_decl as an alias for the current function
6397 because that is the inline function whose vars, types, etc.
6398 are being merged into the current function.
6399 See expand_inline_function. */
6401 if (context
!= 0 && context
!= current_function_decl
6402 && context
!= inline_function_decl
6403 /* If var is static, we don't need a static chain to access it. */
6404 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
6405 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
6409 /* Mark as non-local and addressable. */
6410 DECL_NONLOCAL (exp
) = 1;
6411 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
6413 mark_addressable (exp
);
6414 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
6416 addr
= XEXP (DECL_RTL (exp
), 0);
6417 if (GET_CODE (addr
) == MEM
)
6419 = replace_equiv_address (addr
,
6420 fix_lexical_addr (XEXP (addr
, 0), exp
));
6422 addr
= fix_lexical_addr (addr
, exp
);
6424 temp
= replace_equiv_address (DECL_RTL (exp
), addr
);
6427 /* This is the case of an array whose size is to be determined
6428 from its initializer, while the initializer is still being parsed.
6431 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6432 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
6433 temp
= validize_mem (DECL_RTL (exp
));
6435 /* If DECL_RTL is memory, we are in the normal case and either
6436 the address is not valid or it is not a register and -fforce-addr
6437 is specified, get the address into a register. */
6439 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6440 && modifier
!= EXPAND_CONST_ADDRESS
6441 && modifier
!= EXPAND_SUM
6442 && modifier
!= EXPAND_INITIALIZER
6443 && (! memory_address_p (DECL_MODE (exp
),
6444 XEXP (DECL_RTL (exp
), 0))
6446 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
6447 temp
= replace_equiv_address (DECL_RTL (exp
),
6448 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
6450 /* If we got something, return it. But first, set the alignment
6451 if the address is a register. */
6454 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
6455 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6460 /* If the mode of DECL_RTL does not match that of the decl, it
6461 must be a promoted value. We return a SUBREG of the wanted mode,
6462 but mark it so that we know that it was already extended. */
6464 if (GET_CODE (DECL_RTL (exp
)) == REG
6465 && GET_MODE (DECL_RTL (exp
)) != mode
)
6467 /* Get the signedness used for this variable. Ensure we get the
6468 same mode we got when the variable was declared. */
6469 if (GET_MODE (DECL_RTL (exp
))
6470 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
, 0))
6473 temp
= gen_lowpart_SUBREG (mode
, DECL_RTL (exp
));
6474 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6475 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
6479 return DECL_RTL (exp
);
6482 return immed_double_const (TREE_INT_CST_LOW (exp
),
6483 TREE_INT_CST_HIGH (exp
), mode
);
6486 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
,
6487 EXPAND_MEMORY_USE_BAD
);
6490 /* If optimized, generate immediate CONST_DOUBLE
6491 which will be turned into memory by reload if necessary.
6493 We used to force a register so that loop.c could see it. But
6494 this does not allow gen_* patterns to perform optimizations with
6495 the constants. It also produces two insns in cases like "x = 1.0;".
6496 On most machines, floating-point constants are not permitted in
6497 many insns, so we'd end up copying it to a register in any case.
6499 Now, we do the copying in expand_binop, if appropriate. */
6500 return immed_real_const (exp
);
6504 if (! TREE_CST_RTL (exp
))
6505 output_constant_def (exp
, 1);
6507 /* TREE_CST_RTL probably contains a constant address.
6508 On RISC machines where a constant address isn't valid,
6509 make some insns to get that address into a register. */
6510 if (GET_CODE (TREE_CST_RTL (exp
)) == MEM
6511 && modifier
!= EXPAND_CONST_ADDRESS
6512 && modifier
!= EXPAND_INITIALIZER
6513 && modifier
!= EXPAND_SUM
6514 && (! memory_address_p (mode
, XEXP (TREE_CST_RTL (exp
), 0))
6516 && GET_CODE (XEXP (TREE_CST_RTL (exp
), 0)) != REG
)))
6517 return replace_equiv_address (TREE_CST_RTL (exp
),
6518 copy_rtx (XEXP (TREE_CST_RTL (exp
), 0)));
6519 return TREE_CST_RTL (exp
);
6521 case EXPR_WITH_FILE_LOCATION
:
6524 const char *saved_input_filename
= input_filename
;
6525 int saved_lineno
= lineno
;
6526 input_filename
= EXPR_WFL_FILENAME (exp
);
6527 lineno
= EXPR_WFL_LINENO (exp
);
6528 if (EXPR_WFL_EMIT_LINE_NOTE (exp
))
6529 emit_line_note (input_filename
, lineno
);
6530 /* Possibly avoid switching back and forth here. */
6531 to_return
= expand_expr (EXPR_WFL_NODE (exp
), target
, tmode
, modifier
);
6532 input_filename
= saved_input_filename
;
6533 lineno
= saved_lineno
;
6538 context
= decl_function_context (exp
);
6540 /* If this SAVE_EXPR was at global context, assume we are an
6541 initialization function and move it into our context. */
6543 SAVE_EXPR_CONTEXT (exp
) = current_function_decl
;
6545 /* We treat inline_function_decl as an alias for the current function
6546 because that is the inline function whose vars, types, etc.
6547 are being merged into the current function.
6548 See expand_inline_function. */
6549 if (context
== current_function_decl
|| context
== inline_function_decl
)
6552 /* If this is non-local, handle it. */
6555 /* The following call just exists to abort if the context is
6556 not of a containing function. */
6557 find_function_data (context
);
6559 temp
= SAVE_EXPR_RTL (exp
);
6560 if (temp
&& GET_CODE (temp
) == REG
)
6562 put_var_into_stack (exp
);
6563 temp
= SAVE_EXPR_RTL (exp
);
6565 if (temp
== 0 || GET_CODE (temp
) != MEM
)
6568 replace_equiv_address (temp
,
6569 fix_lexical_addr (XEXP (temp
, 0), exp
));
6571 if (SAVE_EXPR_RTL (exp
) == 0)
6573 if (mode
== VOIDmode
)
6576 temp
= assign_temp (build_qualified_type (type
,
6578 | TYPE_QUAL_CONST
)),
6581 SAVE_EXPR_RTL (exp
) = temp
;
6582 if (!optimize
&& GET_CODE (temp
) == REG
)
6583 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, temp
,
6586 /* If the mode of TEMP does not match that of the expression, it
6587 must be a promoted value. We pass store_expr a SUBREG of the
6588 wanted mode but mark it so that we know that it was already
6589 extended. Note that `unsignedp' was modified above in
6592 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
6594 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6595 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6596 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
6599 if (temp
== const0_rtx
)
6600 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6601 EXPAND_MEMORY_USE_BAD
);
6603 store_expr (TREE_OPERAND (exp
, 0), temp
, 0);
6605 TREE_USED (exp
) = 1;
6608 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6609 must be a promoted value. We return a SUBREG of the wanted mode,
6610 but mark it so that we know that it was already extended. */
6612 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
6613 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
6615 /* Compute the signedness and make the proper SUBREG. */
6616 promote_mode (type
, mode
, &unsignedp
, 0);
6617 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6618 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6619 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
6623 return SAVE_EXPR_RTL (exp
);
6628 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6629 TREE_OPERAND (exp
, 0) = unsave_expr_now (TREE_OPERAND (exp
, 0));
6633 case PLACEHOLDER_EXPR
:
6635 tree old_list
= placeholder_list
;
6636 tree placeholder_expr
= 0;
6638 exp
= find_placeholder (exp
, &placeholder_expr
);
6642 placeholder_list
= TREE_CHAIN (placeholder_expr
);
6643 temp
= expand_expr (exp
, original_target
, tmode
, ro_modifier
);
6644 placeholder_list
= old_list
;
6648 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6651 case WITH_RECORD_EXPR
:
6652 /* Put the object on the placeholder list, expand our first operand,
6653 and pop the list. */
6654 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
6656 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
6657 tmode
, ro_modifier
);
6658 placeholder_list
= TREE_CHAIN (placeholder_list
);
6662 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6663 expand_goto (TREE_OPERAND (exp
, 0));
6665 expand_computed_goto (TREE_OPERAND (exp
, 0));
6669 expand_exit_loop_if_false (NULL
,
6670 invert_truthvalue (TREE_OPERAND (exp
, 0)));
6673 case LABELED_BLOCK_EXPR
:
6674 if (LABELED_BLOCK_BODY (exp
))
6675 expand_expr_stmt (LABELED_BLOCK_BODY (exp
));
6676 /* Should perhaps use expand_label, but this is simpler and safer. */
6677 do_pending_stack_adjust ();
6678 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp
)));
6681 case EXIT_BLOCK_EXPR
:
6682 if (EXIT_BLOCK_RETURN (exp
))
6683 sorry ("returned value in block_exit_expr");
6684 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp
)));
6689 expand_start_loop (1);
6690 expand_expr_stmt (TREE_OPERAND (exp
, 0));
6698 tree vars
= TREE_OPERAND (exp
, 0);
6699 int vars_need_expansion
= 0;
6701 /* Need to open a binding contour here because
6702 if there are any cleanups they must be contained here. */
6703 expand_start_bindings (2);
6705 /* Mark the corresponding BLOCK for output in its proper place. */
6706 if (TREE_OPERAND (exp
, 2) != 0
6707 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
6708 insert_block (TREE_OPERAND (exp
, 2));
6710 /* If VARS have not yet been expanded, expand them now. */
6713 if (!DECL_RTL_SET_P (vars
))
6715 vars_need_expansion
= 1;
6718 expand_decl_init (vars
);
6719 vars
= TREE_CHAIN (vars
);
6722 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, ro_modifier
);
6724 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
6730 if (RTL_EXPR_SEQUENCE (exp
))
6732 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
6734 emit_insns (RTL_EXPR_SEQUENCE (exp
));
6735 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
6737 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
6738 free_temps_for_rtl_expr (exp
);
6739 return RTL_EXPR_RTL (exp
);
6742 /* If we don't need the result, just ensure we evaluate any
6747 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
6748 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
,
6749 EXPAND_MEMORY_USE_BAD
);
6753 /* All elts simple constants => refer to a constant in memory. But
6754 if this is a non-BLKmode mode, let it store a field at a time
6755 since that should make a CONST_INT or CONST_DOUBLE when we
6756 fold. Likewise, if we have a target we can use, it is best to
6757 store directly into the target unless the type is large enough
6758 that memcpy will be used. If we are making an initializer and
6759 all operands are constant, put it in memory as well. */
6760 else if ((TREE_STATIC (exp
)
6761 && ((mode
== BLKmode
6762 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
6763 || TREE_ADDRESSABLE (exp
)
6764 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
6765 && (! MOVE_BY_PIECES_P
6766 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
6768 && ! mostly_zeros_p (exp
))))
6769 || (modifier
== EXPAND_INITIALIZER
&& TREE_CONSTANT (exp
)))
6771 rtx constructor
= output_constant_def (exp
, 1);
6773 if (modifier
!= EXPAND_CONST_ADDRESS
6774 && modifier
!= EXPAND_INITIALIZER
6775 && modifier
!= EXPAND_SUM
)
6776 constructor
= validize_mem (constructor
);
6782 /* Handle calls that pass values in multiple non-contiguous
6783 locations. The Irix 6 ABI has examples of this. */
6784 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
6785 || GET_CODE (target
) == PARALLEL
)
6787 = assign_temp (build_qualified_type (type
,
6789 | (TREE_READONLY (exp
)
6790 * TYPE_QUAL_CONST
))),
6791 TREE_ADDRESSABLE (exp
), 1, 1);
6793 store_constructor (exp
, target
, 0,
6794 int_size_in_bytes (TREE_TYPE (exp
)));
6800 tree exp1
= TREE_OPERAND (exp
, 0);
6802 tree string
= string_constant (exp1
, &index
);
6804 /* Try to optimize reads from const strings. */
6806 && TREE_CODE (string
) == STRING_CST
6807 && TREE_CODE (index
) == INTEGER_CST
6808 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
6809 && GET_MODE_CLASS (mode
) == MODE_INT
6810 && GET_MODE_SIZE (mode
) == 1
6811 && modifier
!= EXPAND_MEMORY_USE_WO
)
6813 GEN_INT (TREE_STRING_POINTER (string
)[TREE_INT_CST_LOW (index
)]);
6815 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
6816 op0
= memory_address (mode
, op0
);
6818 if (cfun
&& current_function_check_memory_usage
6819 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
6821 enum memory_use_mode memory_usage
;
6822 memory_usage
= get_memory_usage_from_modifier (modifier
);
6824 if (memory_usage
!= MEMORY_USE_DONT
)
6826 in_check_memory_usage
= 1;
6827 emit_library_call (chkr_check_addr_libfunc
,
6828 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, op0
,
6829 Pmode
, GEN_INT (int_size_in_bytes (type
)),
6830 TYPE_MODE (sizetype
),
6831 GEN_INT (memory_usage
),
6832 TYPE_MODE (integer_type_node
));
6833 in_check_memory_usage
= 0;
6837 temp
= gen_rtx_MEM (mode
, op0
);
6838 set_mem_attributes (temp
, exp
, 0);
6840 /* If we are writing to this object and its type is a record with
6841 readonly fields, we must mark it as readonly so it will
6842 conflict with readonly references to those fields. */
6843 if (modifier
== EXPAND_MEMORY_USE_WO
&& readonly_fields_p (type
))
6844 RTX_UNCHANGING_P (temp
) = 1;
6850 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
6854 tree array
= TREE_OPERAND (exp
, 0);
6855 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
6856 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
6857 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
6860 /* Optimize the special-case of a zero lower bound.
6862 We convert the low_bound to sizetype to avoid some problems
6863 with constant folding. (E.g. suppose the lower bound is 1,
6864 and its mode is QI. Without the conversion, (ARRAY
6865 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6866 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6868 if (! integer_zerop (low_bound
))
6869 index
= size_diffop (index
, convert (sizetype
, low_bound
));
6871 /* Fold an expression like: "foo"[2].
6872 This is not done in fold so it won't happen inside &.
6873 Don't fold if this is for wide characters since it's too
6874 difficult to do correctly and this is a very rare case. */
6876 if (modifier
!= EXPAND_CONST_ADDRESS
&& modifier
!= EXPAND_INITIALIZER
6877 && TREE_CODE (array
) == STRING_CST
6878 && TREE_CODE (index
) == INTEGER_CST
6879 && compare_tree_int (index
, TREE_STRING_LENGTH (array
)) < 0
6880 && GET_MODE_CLASS (mode
) == MODE_INT
6881 && GET_MODE_SIZE (mode
) == 1)
6883 GEN_INT (TREE_STRING_POINTER (array
)[TREE_INT_CST_LOW (index
)]);
6885 /* If this is a constant index into a constant array,
6886 just get the value from the array. Handle both the cases when
6887 we have an explicit constructor and when our operand is a variable
6888 that was declared const. */
6890 if (modifier
!= EXPAND_CONST_ADDRESS
&& modifier
!= EXPAND_INITIALIZER
6891 && TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
)
6892 && TREE_CODE (index
) == INTEGER_CST
6893 && 0 > compare_tree_int (index
,
6894 list_length (CONSTRUCTOR_ELTS
6895 (TREE_OPERAND (exp
, 0)))))
6899 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
6900 i
= TREE_INT_CST_LOW (index
);
6901 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
6905 return expand_expr (fold (TREE_VALUE (elem
)), target
,
6906 tmode
, ro_modifier
);
6909 else if (optimize
>= 1
6910 && modifier
!= EXPAND_CONST_ADDRESS
6911 && modifier
!= EXPAND_INITIALIZER
6912 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
6913 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
6914 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
6916 if (TREE_CODE (index
) == INTEGER_CST
)
6918 tree init
= DECL_INITIAL (array
);
6920 if (TREE_CODE (init
) == CONSTRUCTOR
)
6924 for (elem
= CONSTRUCTOR_ELTS (init
);
6926 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
6927 elem
= TREE_CHAIN (elem
))
6930 if (elem
&& !TREE_SIDE_EFFECTS (TREE_VALUE (elem
)))
6931 return expand_expr (fold (TREE_VALUE (elem
)), target
,
6932 tmode
, ro_modifier
);
6934 else if (TREE_CODE (init
) == STRING_CST
6935 && 0 > compare_tree_int (index
,
6936 TREE_STRING_LENGTH (init
)))
6938 tree type
= TREE_TYPE (TREE_TYPE (init
));
6939 enum machine_mode mode
= TYPE_MODE (type
);
6941 if (GET_MODE_CLASS (mode
) == MODE_INT
6942 && GET_MODE_SIZE (mode
) == 1)
6944 (TREE_STRING_POINTER
6945 (init
)[TREE_INT_CST_LOW (index
)]));
6954 case ARRAY_RANGE_REF
:
6955 /* If the operand is a CONSTRUCTOR, we can just extract the
6956 appropriate field if it is present. Don't do this if we have
6957 already written the data since we want to refer to that copy
6958 and varasm.c assumes that's what we'll do. */
6959 if (code
== COMPONENT_REF
6960 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
6961 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
6965 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
6966 elt
= TREE_CHAIN (elt
))
6967 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
6968 /* We can normally use the value of the field in the
6969 CONSTRUCTOR. However, if this is a bitfield in
6970 an integral mode that we can fit in a HOST_WIDE_INT,
6971 we must mask only the number of bits in the bitfield,
6972 since this is done implicitly by the constructor. If
6973 the bitfield does not meet either of those conditions,
6974 we can't do this optimization. */
6975 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
6976 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
6978 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
6979 <= HOST_BITS_PER_WIDE_INT
))))
6981 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
6982 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
6984 HOST_WIDE_INT bitsize
6985 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt
)));
6987 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
6989 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
6990 op0
= expand_and (op0
, op1
, target
);
6994 enum machine_mode imode
6995 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
6997 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
7000 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
7002 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
7012 enum machine_mode mode1
;
7013 HOST_WIDE_INT bitsize
, bitpos
;
7016 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7017 &mode1
, &unsignedp
, &volatilep
);
7020 /* If we got back the original object, something is wrong. Perhaps
7021 we are evaluating an expression too early. In any event, don't
7022 infinitely recurse. */
7026 /* If TEM's type is a union of variable size, pass TARGET to the inner
7027 computation, since it will need a temporary and TARGET is known
7028 to have to do. This occurs in unchecked conversion in Ada. */
7032 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
7033 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
7035 ? target
: NULL_RTX
),
7037 (modifier
== EXPAND_INITIALIZER
7038 || modifier
== EXPAND_CONST_ADDRESS
)
7039 ? modifier
: EXPAND_NORMAL
);
7041 /* If this is a constant, put it into a register if it is a
7042 legitimate constant and OFFSET is 0 and memory if it isn't. */
7043 if (CONSTANT_P (op0
))
7045 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
7046 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
7048 op0
= force_reg (mode
, op0
);
7050 op0
= validize_mem (force_const_mem (mode
, op0
));
7055 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
7057 /* If this object is in a register, put it into memory.
7058 This case can't occur in C, but can in Ada if we have
7059 unchecked conversion of an expression from a scalar type to
7060 an array or record type. */
7061 if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7062 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
7064 /* If the operand is a SAVE_EXPR, we can deal with this by
7065 forcing the SAVE_EXPR into memory. */
7066 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
7068 put_var_into_stack (TREE_OPERAND (exp
, 0));
7069 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
7074 = build_qualified_type (TREE_TYPE (tem
),
7075 (TYPE_QUALS (TREE_TYPE (tem
))
7076 | TYPE_QUAL_CONST
));
7077 rtx memloc
= assign_temp (nt
, 1, 1, 1);
7079 emit_move_insn (memloc
, op0
);
7084 if (GET_CODE (op0
) != MEM
)
7087 if (GET_MODE (offset_rtx
) != ptr_mode
)
7088 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
7090 #ifdef POINTERS_EXTEND_UNSIGNED
7091 if (GET_MODE (offset_rtx
) != Pmode
)
7092 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
7095 /* A constant address in OP0 can have VOIDmode, we must not try
7096 to call force_reg for that case. Avoid that case. */
7097 if (GET_CODE (op0
) == MEM
7098 && GET_MODE (op0
) == BLKmode
7099 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
7101 && (bitpos
% bitsize
) == 0
7102 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
7103 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
7105 rtx temp
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7107 if (GET_CODE (XEXP (temp
, 0)) == REG
)
7110 op0
= (replace_equiv_address
7112 force_reg (GET_MODE (XEXP (temp
, 0)),
7117 op0
= offset_address (op0
, offset_rtx
,
7118 highest_pow2_factor (offset
));
7121 /* Don't forget about volatility even if this is a bitfield. */
7122 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
7124 if (op0
== orig_op0
)
7125 op0
= copy_rtx (op0
);
7127 MEM_VOLATILE_P (op0
) = 1;
7130 /* Check the access. */
7131 if (cfun
!= 0 && current_function_check_memory_usage
7132 && GET_CODE (op0
) == MEM
)
7134 enum memory_use_mode memory_usage
;
7135 memory_usage
= get_memory_usage_from_modifier (modifier
);
7137 if (memory_usage
!= MEMORY_USE_DONT
)
7142 to
= plus_constant (XEXP (op0
, 0), (bitpos
/ BITS_PER_UNIT
));
7143 size
= (bitpos
% BITS_PER_UNIT
) + bitsize
+ BITS_PER_UNIT
- 1;
7145 /* Check the access right of the pointer. */
7146 in_check_memory_usage
= 1;
7147 if (size
> BITS_PER_UNIT
)
7148 emit_library_call (chkr_check_addr_libfunc
,
7149 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, to
,
7150 Pmode
, GEN_INT (size
/ BITS_PER_UNIT
),
7151 TYPE_MODE (sizetype
),
7152 GEN_INT (memory_usage
),
7153 TYPE_MODE (integer_type_node
));
7154 in_check_memory_usage
= 0;
7158 /* In cases where an aligned union has an unaligned object
7159 as a field, we might be extracting a BLKmode value from
7160 an integer-mode (e.g., SImode) object. Handle this case
7161 by doing the extract into an object as wide as the field
7162 (which we know to be the width of a basic mode), then
7163 storing into memory, and changing the mode to BLKmode. */
7164 if (mode1
== VOIDmode
7165 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7166 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
7167 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7168 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
7169 && modifier
!= EXPAND_CONST_ADDRESS
7170 && modifier
!= EXPAND_INITIALIZER
)
7171 /* If the field isn't aligned enough to fetch as a memref,
7172 fetch it as a bit field. */
7173 || (mode1
!= BLKmode
7174 && SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))
7175 && ((TYPE_ALIGN (TREE_TYPE (tem
))
7176 < GET_MODE_ALIGNMENT (mode
))
7177 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)))
7178 /* If the type and the field are a constant size and the
7179 size of the type isn't the same size as the bitfield,
7180 we must use bitfield operations. */
7182 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
)))
7184 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7187 enum machine_mode ext_mode
= mode
;
7189 if (ext_mode
== BLKmode
7190 && ! (target
!= 0 && GET_CODE (op0
) == MEM
7191 && GET_CODE (target
) == MEM
7192 && bitpos
% BITS_PER_UNIT
== 0))
7193 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7195 if (ext_mode
== BLKmode
)
7197 /* In this case, BITPOS must start at a byte boundary and
7198 TARGET, if specified, must be a MEM. */
7199 if (GET_CODE (op0
) != MEM
7200 || (target
!= 0 && GET_CODE (target
) != MEM
)
7201 || bitpos
% BITS_PER_UNIT
!= 0)
7204 op0
= adjust_address (op0
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
7206 target
= assign_temp (type
, 0, 1, 1);
7208 emit_block_move (target
, op0
,
7209 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7215 op0
= validize_mem (op0
);
7217 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
7218 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7220 op0
= extract_bit_field (op0
, bitsize
, bitpos
,
7221 unsignedp
, target
, ext_mode
, ext_mode
,
7222 int_size_in_bytes (TREE_TYPE (tem
)));
7224 /* If the result is a record type and BITSIZE is narrower than
7225 the mode of OP0, an integral mode, and this is a big endian
7226 machine, we must put the field into the high-order bits. */
7227 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7228 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7229 && bitsize
< GET_MODE_BITSIZE (GET_MODE (op0
)))
7230 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7231 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7235 if (mode
== BLKmode
)
7237 rtx
new = assign_temp (build_qualified_type
7238 (type_for_mode (ext_mode
, 0),
7239 TYPE_QUAL_CONST
), 0, 1, 1);
7241 emit_move_insn (new, op0
);
7242 op0
= copy_rtx (new);
7243 PUT_MODE (op0
, BLKmode
);
7244 set_mem_attributes (op0
, exp
, 1);
7250 /* If the result is BLKmode, use that to access the object
7252 if (mode
== BLKmode
)
7255 /* Get a reference to just this component. */
7256 if (modifier
== EXPAND_CONST_ADDRESS
7257 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7258 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7260 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7262 if (op0
== orig_op0
)
7263 op0
= copy_rtx (op0
);
7265 set_mem_attributes (op0
, exp
, 0);
7266 if (GET_CODE (XEXP (op0
, 0)) == REG
)
7267 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7269 MEM_VOLATILE_P (op0
) |= volatilep
;
7270 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7271 || modifier
== EXPAND_CONST_ADDRESS
7272 || modifier
== EXPAND_INITIALIZER
)
7274 else if (target
== 0)
7275 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7277 convert_move (target
, op0
, unsignedp
);
7283 rtx insn
, before
= get_last_insn (), vtbl_ref
;
7285 /* Evaluate the interior expression. */
7286 subtarget
= expand_expr (TREE_OPERAND (exp
, 0), target
,
7289 /* Get or create an instruction off which to hang a note. */
7290 if (REG_P (subtarget
))
7293 insn
= get_last_insn ();
7296 if (! INSN_P (insn
))
7297 insn
= prev_nonnote_insn (insn
);
7301 target
= gen_reg_rtx (GET_MODE (subtarget
));
7302 insn
= emit_move_insn (target
, subtarget
);
7305 /* Collect the data for the note. */
7306 vtbl_ref
= XEXP (DECL_RTL (TREE_OPERAND (exp
, 1)), 0);
7307 vtbl_ref
= plus_constant (vtbl_ref
,
7308 tree_low_cst (TREE_OPERAND (exp
, 2), 0));
7309 /* Discard the initial CONST that was added. */
7310 vtbl_ref
= XEXP (vtbl_ref
, 0);
7313 = gen_rtx_EXPR_LIST (REG_VTABLE_REF
, vtbl_ref
, REG_NOTES (insn
));
7318 /* Intended for a reference to a buffer of a file-object in Pascal.
7319 But it's not certain that a special tree code will really be
7320 necessary for these. INDIRECT_REF might work for them. */
7326 /* Pascal set IN expression.
7329 rlo = set_low - (set_low%bits_per_word);
7330 the_word = set [ (index - rlo)/bits_per_word ];
7331 bit_index = index % bits_per_word;
7332 bitmask = 1 << bit_index;
7333 return !!(the_word & bitmask); */
7335 tree set
= TREE_OPERAND (exp
, 0);
7336 tree index
= TREE_OPERAND (exp
, 1);
7337 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
7338 tree set_type
= TREE_TYPE (set
);
7339 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
7340 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
7341 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
7342 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
7343 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
7344 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
7345 rtx setaddr
= XEXP (setval
, 0);
7346 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
7348 rtx diff
, quo
, rem
, addr
, bit
, result
;
7350 /* If domain is empty, answer is no. Likewise if index is constant
7351 and out of bounds. */
7352 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
7353 && TREE_CODE (set_low_bound
) == INTEGER_CST
7354 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
7355 || (TREE_CODE (index
) == INTEGER_CST
7356 && TREE_CODE (set_low_bound
) == INTEGER_CST
7357 && tree_int_cst_lt (index
, set_low_bound
))
7358 || (TREE_CODE (set_high_bound
) == INTEGER_CST
7359 && TREE_CODE (index
) == INTEGER_CST
7360 && tree_int_cst_lt (set_high_bound
, index
))))
7364 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7366 /* If we get here, we have to generate the code for both cases
7367 (in range and out of range). */
7369 op0
= gen_label_rtx ();
7370 op1
= gen_label_rtx ();
7372 if (! (GET_CODE (index_val
) == CONST_INT
7373 && GET_CODE (lo_r
) == CONST_INT
))
7374 emit_cmp_and_jump_insns (index_val
, lo_r
, LT
, NULL_RTX
,
7375 GET_MODE (index_val
), iunsignedp
, op1
);
7377 if (! (GET_CODE (index_val
) == CONST_INT
7378 && GET_CODE (hi_r
) == CONST_INT
))
7379 emit_cmp_and_jump_insns (index_val
, hi_r
, GT
, NULL_RTX
,
7380 GET_MODE (index_val
), iunsignedp
, op1
);
7382 /* Calculate the element number of bit zero in the first word
7384 if (GET_CODE (lo_r
) == CONST_INT
)
7385 rlow
= GEN_INT (INTVAL (lo_r
)
7386 & ~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
7388 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
7389 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
7390 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7392 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
7393 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7395 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
7396 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7397 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
7398 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7400 addr
= memory_address (byte_mode
,
7401 expand_binop (index_mode
, add_optab
, diff
,
7402 setaddr
, NULL_RTX
, iunsignedp
,
7405 /* Extract the bit we want to examine. */
7406 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
7407 gen_rtx_MEM (byte_mode
, addr
),
7408 make_tree (TREE_TYPE (index
), rem
),
7410 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
7411 GET_MODE (target
) == byte_mode
? target
: 0,
7412 1, OPTAB_LIB_WIDEN
);
7414 if (result
!= target
)
7415 convert_move (target
, result
, 1);
7417 /* Output the code to handle the out-of-range case. */
7420 emit_move_insn (target
, const0_rtx
);
7425 case WITH_CLEANUP_EXPR
:
7426 if (WITH_CLEANUP_EXPR_RTL (exp
) == 0)
7428 WITH_CLEANUP_EXPR_RTL (exp
)
7429 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, ro_modifier
);
7430 expand_decl_cleanup (NULL_TREE
, TREE_OPERAND (exp
, 1));
7432 /* That's it for this cleanup. */
7433 TREE_OPERAND (exp
, 1) = 0;
7435 return WITH_CLEANUP_EXPR_RTL (exp
);
7437 case CLEANUP_POINT_EXPR
:
7439 /* Start a new binding layer that will keep track of all cleanup
7440 actions to be performed. */
7441 expand_start_bindings (2);
7443 target_temp_slot_level
= temp_slot_level
;
7445 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, ro_modifier
);
7446 /* If we're going to use this value, load it up now. */
7448 op0
= force_not_mem (op0
);
7449 preserve_temp_slots (op0
);
7450 expand_end_bindings (NULL_TREE
, 0, 0);
7455 /* Check for a built-in function. */
7456 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7457 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7459 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7461 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7462 == BUILT_IN_FRONTEND
)
7463 return (*lang_expand_expr
) (exp
, original_target
, tmode
, modifier
);
7465 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7468 return expand_call (exp
, target
, ignore
);
7470 case NON_LVALUE_EXPR
:
7473 case REFERENCE_EXPR
:
7474 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7477 if (TREE_CODE (type
) == UNION_TYPE
)
7479 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7481 /* If both input and output are BLKmode, this conversion isn't doing
7482 anything except possibly changing memory attribute. */
7483 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7485 rtx result
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7488 result
= copy_rtx (result
);
7489 set_mem_attributes (result
, exp
, 0);
7494 target
= assign_temp (type
, 0, 1, 1);
7496 if (GET_CODE (target
) == MEM
)
7497 /* Store data into beginning of memory target. */
7498 store_expr (TREE_OPERAND (exp
, 0),
7499 adjust_address (target
, TYPE_MODE (valtype
), 0), 0);
7501 else if (GET_CODE (target
) == REG
)
7502 /* Store this field into a union of the proper type. */
7503 store_field (target
,
7504 MIN ((int_size_in_bytes (TREE_TYPE
7505 (TREE_OPERAND (exp
, 0)))
7507 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7508 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7509 VOIDmode
, 0, type
, 0);
7513 /* Return the entire union. */
7517 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7519 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7522 /* If the signedness of the conversion differs and OP0 is
7523 a promoted SUBREG, clear that indication since we now
7524 have to do the proper extension. */
7525 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7526 && GET_CODE (op0
) == SUBREG
)
7527 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7532 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, 0);
7533 if (GET_MODE (op0
) == mode
)
7536 /* If OP0 is a constant, just convert it into the proper mode. */
7537 if (CONSTANT_P (op0
))
7539 convert_modes (mode
, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7540 op0
, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7542 if (modifier
== EXPAND_INITIALIZER
)
7543 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7547 convert_to_mode (mode
, op0
,
7548 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7550 convert_move (target
, op0
,
7551 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7554 case VIEW_CONVERT_EXPR
:
7555 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, ro_modifier
);
7557 /* If the input and output modes are both the same, we are done.
7558 Otherwise, if neither mode is BLKmode and both are within a word, we
7559 can use gen_lowpart. If neither is true, make sure the operand is
7560 in memory and convert the MEM to the new mode. */
7561 if (TYPE_MODE (type
) == GET_MODE (op0
))
7563 else if (TYPE_MODE (type
) != BLKmode
&& GET_MODE (op0
) != BLKmode
7564 && GET_MODE_SIZE (TYPE_MODE (type
)) <= UNITS_PER_WORD
7565 && GET_MODE_SIZE (GET_MODE (op0
)) <= UNITS_PER_WORD
)
7566 op0
= gen_lowpart (TYPE_MODE (type
), op0
);
7567 else if (GET_CODE (op0
) != MEM
)
7569 /* If the operand is not a MEM, force it into memory. Since we
7570 are going to be be changing the mode of the MEM, don't call
7571 force_const_mem for constants because we don't allow pool
7572 constants to change mode. */
7573 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7575 if (TREE_ADDRESSABLE (exp
))
7578 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
7580 = assign_stack_temp_for_type
7581 (TYPE_MODE (inner_type
),
7582 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
7584 emit_move_insn (target
, op0
);
7588 /* At this point, OP0 is in the correct mode. If the output type is such
7589 that the operand is known to be aligned, indicate that it is.
7590 Otherwise, we need only be concerned about alignment for non-BLKmode
7592 if (GET_CODE (op0
) == MEM
)
7594 op0
= copy_rtx (op0
);
7596 if (TYPE_ALIGN_OK (type
))
7597 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
7598 else if (TYPE_MODE (type
) != BLKmode
&& STRICT_ALIGNMENT
7599 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
7601 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7602 HOST_WIDE_INT temp_size
= MAX (int_size_in_bytes (inner_type
),
7603 GET_MODE_SIZE (TYPE_MODE (type
)));
7604 rtx
new = assign_stack_temp_for_type (TYPE_MODE (type
),
7605 temp_size
, 0, type
);
7606 rtx new_with_op0_mode
= copy_rtx (new);
7608 if (TREE_ADDRESSABLE (exp
))
7611 PUT_MODE (new_with_op0_mode
, GET_MODE (op0
));
7612 if (GET_MODE (op0
) == BLKmode
)
7613 emit_block_move (new_with_op0_mode
, op0
,
7614 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type
))));
7616 emit_move_insn (new_with_op0_mode
, op0
);
7621 PUT_MODE (op0
, TYPE_MODE (type
));
7627 /* We come here from MINUS_EXPR when the second operand is a
7630 this_optab
= ! unsignedp
&& flag_trapv
7631 && (GET_MODE_CLASS(mode
) == MODE_INT
)
7632 ? addv_optab
: add_optab
;
7634 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7635 something else, make sure we add the register to the constant and
7636 then to the other thing. This case can occur during strength
7637 reduction and doing it this way will produce better code if the
7638 frame pointer or argument pointer is eliminated.
7640 fold-const.c will ensure that the constant is always in the inner
7641 PLUS_EXPR, so the only case we need to do anything about is if
7642 sp, ap, or fp is our second argument, in which case we must swap
7643 the innermost first argument and our second argument. */
7645 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7646 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7647 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
7648 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7649 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7650 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7652 tree t
= TREE_OPERAND (exp
, 1);
7654 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7655 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
7658 /* If the result is to be ptr_mode and we are adding an integer to
7659 something, we might be forming a constant. So try to use
7660 plus_constant. If it produces a sum and we can't accept it,
7661 use force_operand. This allows P = &ARR[const] to generate
7662 efficient code on machines where a SYMBOL_REF is not a valid
7665 If this is an EXPAND_SUM call, always return the sum. */
7666 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7667 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
7669 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
7670 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7671 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
7675 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
7677 /* Use immed_double_const to ensure that the constant is
7678 truncated according to the mode of OP1, then sign extended
7679 to a HOST_WIDE_INT. Using the constant directly can result
7680 in non-canonical RTL in a 64x32 cross compile. */
7682 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
7684 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
7685 op1
= plus_constant (op1
, INTVAL (constant_part
));
7686 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7687 op1
= force_operand (op1
, target
);
7691 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7692 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
7693 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
7697 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7699 if (! CONSTANT_P (op0
))
7701 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7702 VOIDmode
, modifier
);
7703 /* Don't go to both_summands if modifier
7704 says it's not right to return a PLUS. */
7705 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7709 /* Use immed_double_const to ensure that the constant is
7710 truncated according to the mode of OP1, then sign extended
7711 to a HOST_WIDE_INT. Using the constant directly can result
7712 in non-canonical RTL in a 64x32 cross compile. */
7714 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
7716 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7717 op0
= plus_constant (op0
, INTVAL (constant_part
));
7718 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7719 op0
= force_operand (op0
, target
);
7724 /* No sense saving up arithmetic to be done
7725 if it's all in the wrong mode to form part of an address.
7726 And force_operand won't know whether to sign-extend or
7728 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7729 || mode
!= ptr_mode
)
7732 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7735 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, ro_modifier
);
7736 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, ro_modifier
);
7739 /* Make sure any term that's a sum with a constant comes last. */
7740 if (GET_CODE (op0
) == PLUS
7741 && CONSTANT_P (XEXP (op0
, 1)))
7747 /* If adding to a sum including a constant,
7748 associate it to put the constant outside. */
7749 if (GET_CODE (op1
) == PLUS
7750 && CONSTANT_P (XEXP (op1
, 1)))
7752 rtx constant_term
= const0_rtx
;
7754 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
7757 /* Ensure that MULT comes first if there is one. */
7758 else if (GET_CODE (op0
) == MULT
)
7759 op0
= gen_rtx_PLUS (mode
, op0
, XEXP (op1
, 0));
7761 op0
= gen_rtx_PLUS (mode
, XEXP (op1
, 0), op0
);
7763 /* Let's also eliminate constants from op0 if possible. */
7764 op0
= eliminate_constant_term (op0
, &constant_term
);
7766 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7767 their sum should be a constant. Form it into OP1, since the
7768 result we want will then be OP0 + OP1. */
7770 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
7775 op1
= gen_rtx_PLUS (mode
, constant_term
, XEXP (op1
, 1));
7778 /* Put a constant term last and put a multiplication first. */
7779 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
7780 temp
= op1
, op1
= op0
, op0
= temp
;
7782 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
7783 return temp
? temp
: gen_rtx_PLUS (mode
, op0
, op1
);
7786 /* For initializers, we are allowed to return a MINUS of two
7787 symbolic constants. Here we handle all cases when both operands
7789 /* Handle difference of two symbolic constants,
7790 for the sake of an initializer. */
7791 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7792 && really_constant_p (TREE_OPERAND (exp
, 0))
7793 && really_constant_p (TREE_OPERAND (exp
, 1)))
7795 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
,
7796 VOIDmode
, ro_modifier
);
7797 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7798 VOIDmode
, ro_modifier
);
7800 /* If the last operand is a CONST_INT, use plus_constant of
7801 the negated constant. Else make the MINUS. */
7802 if (GET_CODE (op1
) == CONST_INT
)
7803 return plus_constant (op0
, - INTVAL (op1
));
7805 return gen_rtx_MINUS (mode
, op0
, op1
);
7807 /* Convert A - const to A + (-const). */
7808 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7810 tree negated
= fold (build1 (NEGATE_EXPR
, type
,
7811 TREE_OPERAND (exp
, 1)));
7813 if (TREE_UNSIGNED (type
) || TREE_OVERFLOW (negated
))
7814 /* If we can't negate the constant in TYPE, leave it alone and
7815 expand_binop will negate it for us. We used to try to do it
7816 here in the signed version of TYPE, but that doesn't work
7817 on POINTER_TYPEs. */;
7820 exp
= build (PLUS_EXPR
, type
, TREE_OPERAND (exp
, 0), negated
);
7824 this_optab
= ! unsignedp
&& flag_trapv
7825 && (GET_MODE_CLASS(mode
) == MODE_INT
)
7826 ? subv_optab
: sub_optab
;
7830 /* If first operand is constant, swap them.
7831 Thus the following special case checks need only
7832 check the second operand. */
7833 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
7835 tree t1
= TREE_OPERAND (exp
, 0);
7836 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
7837 TREE_OPERAND (exp
, 1) = t1
;
7840 /* Attempt to return something suitable for generating an
7841 indexed address, for machines that support that. */
7843 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
7844 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7845 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
7847 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7850 /* Apply distributive law if OP0 is x+c. */
7851 if (GET_CODE (op0
) == PLUS
7852 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
7857 (mode
, XEXP (op0
, 0),
7858 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))),
7859 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))
7860 * INTVAL (XEXP (op0
, 1))));
7862 if (GET_CODE (op0
) != REG
)
7863 op0
= force_operand (op0
, NULL_RTX
);
7864 if (GET_CODE (op0
) != REG
)
7865 op0
= copy_to_mode_reg (mode
, op0
);
7868 gen_rtx_MULT (mode
, op0
,
7869 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))));
7872 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7875 /* Check for multiplying things that have been extended
7876 from a narrower type. If this machine supports multiplying
7877 in that narrower type with a result in the desired type,
7878 do it that way, and avoid the explicit type-conversion. */
7879 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
7880 && TREE_CODE (type
) == INTEGER_TYPE
7881 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7882 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7883 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7884 && int_fits_type_p (TREE_OPERAND (exp
, 1),
7885 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7886 /* Don't use a widening multiply if a shift will do. */
7887 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
7888 > HOST_BITS_PER_WIDE_INT
)
7889 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
7891 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7892 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7894 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
7895 /* If both operands are extended, they must either both
7896 be zero-extended or both be sign-extended. */
7897 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7899 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
7901 enum machine_mode innermode
7902 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
7903 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7904 ? smul_widen_optab
: umul_widen_optab
);
7905 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7906 ? umul_widen_optab
: smul_widen_optab
);
7907 if (mode
== GET_MODE_WIDER_MODE (innermode
))
7909 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
7911 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7912 NULL_RTX
, VOIDmode
, 0);
7913 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7914 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7917 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7918 NULL_RTX
, VOIDmode
, 0);
7921 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
7922 && innermode
== word_mode
)
7925 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7926 NULL_RTX
, VOIDmode
, 0);
7927 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7928 op1
= convert_modes (innermode
, mode
,
7929 expand_expr (TREE_OPERAND (exp
, 1),
7930 NULL_RTX
, VOIDmode
, 0),
7933 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7934 NULL_RTX
, VOIDmode
, 0);
7935 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
7936 unsignedp
, OPTAB_LIB_WIDEN
);
7937 htem
= expand_mult_highpart_adjust (innermode
,
7938 gen_highpart (innermode
, temp
),
7940 gen_highpart (innermode
, temp
),
7942 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
7947 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7948 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7949 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
7951 case TRUNC_DIV_EXPR
:
7952 case FLOOR_DIV_EXPR
:
7954 case ROUND_DIV_EXPR
:
7955 case EXACT_DIV_EXPR
:
7956 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7958 /* Possible optimization: compute the dividend with EXPAND_SUM
7959 then if the divisor is constant can optimize the case
7960 where some terms of the dividend have coeffs divisible by it. */
7961 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7962 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7963 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
7966 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7967 expensive divide. If not, combine will rebuild the original
7969 if (flag_unsafe_math_optimizations
&& optimize
&& !optimize_size
7970 && !real_onep (TREE_OPERAND (exp
, 0)))
7971 return expand_expr (build (MULT_EXPR
, type
, TREE_OPERAND (exp
, 0),
7972 build (RDIV_EXPR
, type
,
7973 build_real (type
, dconst1
),
7974 TREE_OPERAND (exp
, 1))),
7975 target
, tmode
, unsignedp
);
7976 this_optab
= sdiv_optab
;
7979 case TRUNC_MOD_EXPR
:
7980 case FLOOR_MOD_EXPR
:
7982 case ROUND_MOD_EXPR
:
7983 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7985 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7986 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7987 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
7989 case FIX_ROUND_EXPR
:
7990 case FIX_FLOOR_EXPR
:
7992 abort (); /* Not used for C. */
7994 case FIX_TRUNC_EXPR
:
7995 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7997 target
= gen_reg_rtx (mode
);
7998 expand_fix (target
, op0
, unsignedp
);
8002 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8004 target
= gen_reg_rtx (mode
);
8005 /* expand_float can't figure out what to do if FROM has VOIDmode.
8006 So give it the correct mode. With -O, cse will optimize this. */
8007 if (GET_MODE (op0
) == VOIDmode
)
8008 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8010 expand_float (target
, op0
,
8011 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8015 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8016 temp
= expand_unop (mode
,
8017 ! unsignedp
&& flag_trapv
8018 && (GET_MODE_CLASS(mode
) == MODE_INT
)
8019 ? negv_optab
: neg_optab
, op0
, target
, 0);
8025 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8027 /* Handle complex values specially. */
8028 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
8029 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
8030 return expand_complex_abs (mode
, op0
, target
, unsignedp
);
8032 /* Unsigned abs is simply the operand. Testing here means we don't
8033 risk generating incorrect code below. */
8034 if (TREE_UNSIGNED (type
))
8037 return expand_abs (mode
, op0
, target
, unsignedp
,
8038 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
8042 target
= original_target
;
8043 if (target
== 0 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1), 1)
8044 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
8045 || GET_MODE (target
) != mode
8046 || (GET_CODE (target
) == REG
8047 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8048 target
= gen_reg_rtx (mode
);
8049 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8050 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8052 /* First try to do it with a special MIN or MAX instruction.
8053 If that does not win, use a conditional jump to select the proper
8055 this_optab
= (TREE_UNSIGNED (type
)
8056 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
8057 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
8059 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8064 /* At this point, a MEM target is no longer useful; we will get better
8067 if (GET_CODE (target
) == MEM
)
8068 target
= gen_reg_rtx (mode
);
8071 emit_move_insn (target
, op0
);
8073 op0
= gen_label_rtx ();
8075 /* If this mode is an integer too wide to compare properly,
8076 compare word by word. Rely on cse to optimize constant cases. */
8077 if (GET_MODE_CLASS (mode
) == MODE_INT
8078 && ! can_compare_p (GE
, mode
, ccp_jump
))
8080 if (code
== MAX_EXPR
)
8081 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8082 target
, op1
, NULL_RTX
, op0
);
8084 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8085 op1
, target
, NULL_RTX
, op0
);
8089 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)));
8090 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
8091 unsignedp
, mode
, NULL_RTX
, NULL_RTX
,
8094 emit_move_insn (target
, op1
);
8099 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8100 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8106 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8107 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
8112 /* ??? Can optimize bitwise operations with one arg constant.
8113 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8114 and (a bitwise1 b) bitwise2 b (etc)
8115 but that is probably not worth while. */
8117 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8118 boolean values when we want in all cases to compute both of them. In
8119 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8120 as actual zero-or-1 values and then bitwise anding. In cases where
8121 there cannot be any side effects, better code would be made by
8122 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8123 how to recognize those cases. */
8125 case TRUTH_AND_EXPR
:
8127 this_optab
= and_optab
;
8132 this_optab
= ior_optab
;
8135 case TRUTH_XOR_EXPR
:
8137 this_optab
= xor_optab
;
8144 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8146 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8147 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
8150 /* Could determine the answer when only additive constants differ. Also,
8151 the addition of one can be handled by changing the condition. */
8158 case UNORDERED_EXPR
:
8165 temp
= do_store_flag (exp
, target
, tmode
!= VOIDmode
? tmode
: mode
, 0);
8169 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8170 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
8172 && GET_CODE (original_target
) == REG
8173 && (GET_MODE (original_target
)
8174 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
8176 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
8179 if (temp
!= original_target
)
8180 temp
= copy_to_reg (temp
);
8182 op1
= gen_label_rtx ();
8183 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
8184 GET_MODE (temp
), unsignedp
, op1
);
8185 emit_move_insn (temp
, const1_rtx
);
8190 /* If no set-flag instruction, must generate a conditional
8191 store into a temporary variable. Drop through
8192 and handle this like && and ||. */
8194 case TRUTH_ANDIF_EXPR
:
8195 case TRUTH_ORIF_EXPR
:
8197 && (target
== 0 || ! safe_from_p (target
, exp
, 1)
8198 /* Make sure we don't have a hard reg (such as function's return
8199 value) live across basic blocks, if not optimizing. */
8200 || (!optimize
&& GET_CODE (target
) == REG
8201 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8202 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8205 emit_clr_insn (target
);
8207 op1
= gen_label_rtx ();
8208 jumpifnot (exp
, op1
);
8211 emit_0_to_1_insn (target
);
8214 return ignore
? const0_rtx
: target
;
8216 case TRUTH_NOT_EXPR
:
8217 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8218 /* The parser is careful to generate TRUTH_NOT_EXPR
8219 only with operands that are always zero or one. */
8220 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8221 target
, 1, OPTAB_LIB_WIDEN
);
8227 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
8229 return expand_expr (TREE_OPERAND (exp
, 1),
8230 (ignore
? const0_rtx
: target
),
8234 /* If we would have a "singleton" (see below) were it not for a
8235 conversion in each arm, bring that conversion back out. */
8236 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8237 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
8238 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
8239 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
8241 tree iftrue
= TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
8242 tree iffalse
= TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
8244 if ((TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '2'
8245 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8246 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '2'
8247 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0))
8248 || (TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '1'
8249 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8250 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '1'
8251 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0)))
8252 return expand_expr (build1 (NOP_EXPR
, type
,
8253 build (COND_EXPR
, TREE_TYPE (iftrue
),
8254 TREE_OPERAND (exp
, 0),
8256 target
, tmode
, modifier
);
8260 /* Note that COND_EXPRs whose type is a structure or union
8261 are required to be constructed to contain assignments of
8262 a temporary variable, so that we can evaluate them here
8263 for side effect only. If type is void, we must do likewise. */
8265 /* If an arm of the branch requires a cleanup,
8266 only that cleanup is performed. */
8269 tree binary_op
= 0, unary_op
= 0;
8271 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8272 convert it to our mode, if necessary. */
8273 if (integer_onep (TREE_OPERAND (exp
, 1))
8274 && integer_zerop (TREE_OPERAND (exp
, 2))
8275 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8279 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
8284 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, ro_modifier
);
8285 if (GET_MODE (op0
) == mode
)
8289 target
= gen_reg_rtx (mode
);
8290 convert_move (target
, op0
, unsignedp
);
8294 /* Check for X ? A + B : A. If we have this, we can copy A to the
8295 output and conditionally add B. Similarly for unary operations.
8296 Don't do this if X has side-effects because those side effects
8297 might affect A or B and the "?" operation is a sequence point in
8298 ANSI. (operand_equal_p tests for side effects.) */
8300 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
8301 && operand_equal_p (TREE_OPERAND (exp
, 2),
8302 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8303 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
8304 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
8305 && operand_equal_p (TREE_OPERAND (exp
, 1),
8306 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8307 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
8308 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
8309 && operand_equal_p (TREE_OPERAND (exp
, 2),
8310 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8311 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
8312 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
8313 && operand_equal_p (TREE_OPERAND (exp
, 1),
8314 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8315 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
8317 /* If we are not to produce a result, we have no target. Otherwise,
8318 if a target was specified use it; it will not be used as an
8319 intermediate target unless it is safe. If no target, use a
8324 else if (original_target
8325 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
8326 || (singleton
&& GET_CODE (original_target
) == REG
8327 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
8328 && original_target
== var_rtx (singleton
)))
8329 && GET_MODE (original_target
) == mode
8330 #ifdef HAVE_conditional_move
8331 && (! can_conditionally_move_p (mode
)
8332 || GET_CODE (original_target
) == REG
8333 || TREE_ADDRESSABLE (type
))
8335 && (GET_CODE (original_target
) != MEM
8336 || TREE_ADDRESSABLE (type
)))
8337 temp
= original_target
;
8338 else if (TREE_ADDRESSABLE (type
))
8341 temp
= assign_temp (type
, 0, 0, 1);
8343 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8344 do the test of X as a store-flag operation, do this as
8345 A + ((X != 0) << log C). Similarly for other simple binary
8346 operators. Only do for C == 1 if BRANCH_COST is low. */
8347 if (temp
&& singleton
&& binary_op
8348 && (TREE_CODE (binary_op
) == PLUS_EXPR
8349 || TREE_CODE (binary_op
) == MINUS_EXPR
8350 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
8351 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
8352 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
8353 : integer_onep (TREE_OPERAND (binary_op
, 1)))
8354 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8357 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
8358 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8359 ? addv_optab
: add_optab
)
8360 : TREE_CODE (binary_op
) == MINUS_EXPR
8361 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8362 ? subv_optab
: sub_optab
)
8363 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
8366 /* If we had X ? A : A + 1, do this as A + (X == 0).
8368 We have to invert the truth value here and then put it
8369 back later if do_store_flag fails. We cannot simply copy
8370 TREE_OPERAND (exp, 0) to another variable and modify that
8371 because invert_truthvalue can modify the tree pointed to
8373 if (singleton
== TREE_OPERAND (exp
, 1))
8374 TREE_OPERAND (exp
, 0)
8375 = invert_truthvalue (TREE_OPERAND (exp
, 0));
8377 result
= do_store_flag (TREE_OPERAND (exp
, 0),
8378 (safe_from_p (temp
, singleton
, 1)
8380 mode
, BRANCH_COST
<= 1);
8382 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
8383 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
8384 build_int_2 (tree_log2
8388 (safe_from_p (temp
, singleton
, 1)
8389 ? temp
: NULL_RTX
), 0);
8393 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
8394 return expand_binop (mode
, boptab
, op1
, result
, temp
,
8395 unsignedp
, OPTAB_LIB_WIDEN
);
8397 else if (singleton
== TREE_OPERAND (exp
, 1))
8398 TREE_OPERAND (exp
, 0)
8399 = invert_truthvalue (TREE_OPERAND (exp
, 0));
8402 do_pending_stack_adjust ();
8404 op0
= gen_label_rtx ();
8406 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
8410 /* If the target conflicts with the other operand of the
8411 binary op, we can't use it. Also, we can't use the target
8412 if it is a hard register, because evaluating the condition
8413 might clobber it. */
8415 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
8416 || (GET_CODE (temp
) == REG
8417 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
8418 temp
= gen_reg_rtx (mode
);
8419 store_expr (singleton
, temp
, 0);
8422 expand_expr (singleton
,
8423 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8424 if (singleton
== TREE_OPERAND (exp
, 1))
8425 jumpif (TREE_OPERAND (exp
, 0), op0
);
8427 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8429 start_cleanup_deferral ();
8430 if (binary_op
&& temp
== 0)
8431 /* Just touch the other operand. */
8432 expand_expr (TREE_OPERAND (binary_op
, 1),
8433 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8435 store_expr (build (TREE_CODE (binary_op
), type
,
8436 make_tree (type
, temp
),
8437 TREE_OPERAND (binary_op
, 1)),
8440 store_expr (build1 (TREE_CODE (unary_op
), type
,
8441 make_tree (type
, temp
)),
8445 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8446 comparison operator. If we have one of these cases, set the
8447 output to A, branch on A (cse will merge these two references),
8448 then set the output to FOO. */
8450 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8451 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8452 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8453 TREE_OPERAND (exp
, 1), 0)
8454 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8455 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
8456 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
8458 if (GET_CODE (temp
) == REG
8459 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8460 temp
= gen_reg_rtx (mode
);
8461 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8462 jumpif (TREE_OPERAND (exp
, 0), op0
);
8464 start_cleanup_deferral ();
8465 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8469 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8470 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8471 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8472 TREE_OPERAND (exp
, 2), 0)
8473 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8474 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
8475 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
8477 if (GET_CODE (temp
) == REG
8478 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8479 temp
= gen_reg_rtx (mode
);
8480 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8481 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8483 start_cleanup_deferral ();
8484 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8489 op1
= gen_label_rtx ();
8490 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8492 start_cleanup_deferral ();
8494 /* One branch of the cond can be void, if it never returns. For
8495 example A ? throw : E */
8497 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8498 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8500 expand_expr (TREE_OPERAND (exp
, 1),
8501 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8502 end_cleanup_deferral ();
8504 emit_jump_insn (gen_jump (op1
));
8507 start_cleanup_deferral ();
8509 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8510 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8512 expand_expr (TREE_OPERAND (exp
, 2),
8513 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8516 end_cleanup_deferral ();
8527 /* Something needs to be initialized, but we didn't know
8528 where that thing was when building the tree. For example,
8529 it could be the return value of a function, or a parameter
8530 to a function which lays down in the stack, or a temporary
8531 variable which must be passed by reference.
8533 We guarantee that the expression will either be constructed
8534 or copied into our original target. */
8536 tree slot
= TREE_OPERAND (exp
, 0);
8537 tree cleanups
= NULL_TREE
;
8540 if (TREE_CODE (slot
) != VAR_DECL
)
8544 target
= original_target
;
8546 /* Set this here so that if we get a target that refers to a
8547 register variable that's already been used, put_reg_into_stack
8548 knows that it should fix up those uses. */
8549 TREE_USED (slot
) = 1;
8553 if (DECL_RTL_SET_P (slot
))
8555 target
= DECL_RTL (slot
);
8556 /* If we have already expanded the slot, so don't do
8558 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8563 target
= assign_temp (type
, 2, 0, 1);
8564 /* All temp slots at this level must not conflict. */
8565 preserve_temp_slots (target
);
8566 SET_DECL_RTL (slot
, target
);
8567 if (TREE_ADDRESSABLE (slot
))
8568 put_var_into_stack (slot
);
8570 /* Since SLOT is not known to the called function
8571 to belong to its stack frame, we must build an explicit
8572 cleanup. This case occurs when we must build up a reference
8573 to pass the reference as an argument. In this case,
8574 it is very likely that such a reference need not be
8577 if (TREE_OPERAND (exp
, 2) == 0)
8578 TREE_OPERAND (exp
, 2) = maybe_build_cleanup (slot
);
8579 cleanups
= TREE_OPERAND (exp
, 2);
8584 /* This case does occur, when expanding a parameter which
8585 needs to be constructed on the stack. The target
8586 is the actual stack address that we want to initialize.
8587 The function we call will perform the cleanup in this case. */
8589 /* If we have already assigned it space, use that space,
8590 not target that we were passed in, as our target
8591 parameter is only a hint. */
8592 if (DECL_RTL_SET_P (slot
))
8594 target
= DECL_RTL (slot
);
8595 /* If we have already expanded the slot, so don't do
8597 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8602 SET_DECL_RTL (slot
, target
);
8603 /* If we must have an addressable slot, then make sure that
8604 the RTL that we just stored in slot is OK. */
8605 if (TREE_ADDRESSABLE (slot
))
8606 put_var_into_stack (slot
);
8610 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
8611 /* Mark it as expanded. */
8612 TREE_OPERAND (exp
, 1) = NULL_TREE
;
8614 store_expr (exp1
, target
, 0);
8616 expand_decl_cleanup (NULL_TREE
, cleanups
);
8623 tree lhs
= TREE_OPERAND (exp
, 0);
8624 tree rhs
= TREE_OPERAND (exp
, 1);
8626 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8632 /* If lhs is complex, expand calls in rhs before computing it.
8633 That's so we don't compute a pointer and save it over a
8634 call. If lhs is simple, compute it first so we can give it
8635 as a target if the rhs is just a call. This avoids an
8636 extra temp and copy and that prevents a partial-subsumption
8637 which makes bad code. Actually we could treat
8638 component_ref's of vars like vars. */
8640 tree lhs
= TREE_OPERAND (exp
, 0);
8641 tree rhs
= TREE_OPERAND (exp
, 1);
8645 /* Check for |= or &= of a bitfield of size one into another bitfield
8646 of size 1. In this case, (unless we need the result of the
8647 assignment) we can do this more efficiently with a
8648 test followed by an assignment, if necessary.
8650 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8651 things change so we do, this code should be enhanced to
8654 && TREE_CODE (lhs
) == COMPONENT_REF
8655 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
8656 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
8657 && TREE_OPERAND (rhs
, 0) == lhs
8658 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
8659 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
8660 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
8662 rtx label
= gen_label_rtx ();
8664 do_jump (TREE_OPERAND (rhs
, 1),
8665 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
8666 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
8667 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
8668 (TREE_CODE (rhs
) == BIT_IOR_EXPR
8670 : integer_zero_node
)),
8672 do_pending_stack_adjust ();
8677 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8683 if (!TREE_OPERAND (exp
, 0))
8684 expand_null_return ();
8686 expand_return (TREE_OPERAND (exp
, 0));
8689 case PREINCREMENT_EXPR
:
8690 case PREDECREMENT_EXPR
:
8691 return expand_increment (exp
, 0, ignore
);
8693 case POSTINCREMENT_EXPR
:
8694 case POSTDECREMENT_EXPR
:
8695 /* Faster to treat as pre-increment if result is not used. */
8696 return expand_increment (exp
, ! ignore
, ignore
);
8699 /* Are we taking the address of a nested function? */
8700 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
8701 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
8702 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0))
8703 && ! TREE_STATIC (exp
))
8705 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
8706 op0
= force_operand (op0
, target
);
8708 /* If we are taking the address of something erroneous, just
8710 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
8712 /* If we are taking the address of a constant and are at the
8713 top level, we have to use output_constant_def since we can't
8714 call force_const_mem at top level. */
8716 && (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
8717 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0)))
8719 op0
= XEXP (output_constant_def (TREE_OPERAND (exp
, 0), 0), 0);
8722 /* We make sure to pass const0_rtx down if we came in with
8723 ignore set, to avoid doing the cleanups twice for something. */
8724 op0
= expand_expr (TREE_OPERAND (exp
, 0),
8725 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
8726 (modifier
== EXPAND_INITIALIZER
8727 ? modifier
: EXPAND_CONST_ADDRESS
));
8729 /* If we are going to ignore the result, OP0 will have been set
8730 to const0_rtx, so just return it. Don't get confused and
8731 think we are taking the address of the constant. */
8735 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8736 clever and returns a REG when given a MEM. */
8737 op0
= protect_from_queue (op0
, 1);
8739 /* We would like the object in memory. If it is a constant, we can
8740 have it be statically allocated into memory. For a non-constant,
8741 we need to allocate some memory and store the value into it. */
8743 if (CONSTANT_P (op0
))
8744 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8746 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
8747 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
8748 || GET_CODE (op0
) == PARALLEL
)
8750 /* If this object is in a register, it must can't be BLKmode. */
8751 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8752 tree nt
= build_qualified_type (inner_type
,
8753 (TYPE_QUALS (inner_type
)
8754 | TYPE_QUAL_CONST
));
8755 rtx memloc
= assign_temp (nt
, 1, 1, 1);
8757 if (GET_CODE (op0
) == PARALLEL
)
8758 /* Handle calls that pass values in multiple non-contiguous
8759 locations. The Irix 6 ABI has examples of this. */
8760 emit_group_store (memloc
, op0
, int_size_in_bytes (inner_type
));
8762 emit_move_insn (memloc
, op0
);
8767 if (GET_CODE (op0
) != MEM
)
8770 mark_temp_addr_taken (op0
);
8771 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8773 op0
= XEXP (op0
, 0);
8774 #ifdef POINTERS_EXTEND_UNSIGNED
8775 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
8776 && mode
== ptr_mode
)
8777 op0
= convert_memory_address (ptr_mode
, op0
);
8782 /* If OP0 is not aligned as least as much as the type requires, we
8783 need to make a temporary, copy OP0 to it, and take the address of
8784 the temporary. We want to use the alignment of the type, not of
8785 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8786 the test for BLKmode means that can't happen. The test for
8787 BLKmode is because we never make mis-aligned MEMs with
8790 We don't need to do this at all if the machine doesn't have
8791 strict alignment. */
8792 if (STRICT_ALIGNMENT
&& GET_MODE (op0
) == BLKmode
8793 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
8795 && MEM_ALIGN (op0
) < BIGGEST_ALIGNMENT
)
8797 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8799 = assign_stack_temp_for_type
8800 (TYPE_MODE (inner_type
),
8801 MEM_SIZE (op0
) ? INTVAL (MEM_SIZE (op0
))
8802 : int_size_in_bytes (inner_type
),
8803 1, build_qualified_type (inner_type
,
8804 (TYPE_QUALS (inner_type
)
8805 | TYPE_QUAL_CONST
)));
8807 if (TYPE_ALIGN_OK (inner_type
))
8810 emit_block_move (new, op0
, expr_size (TREE_OPERAND (exp
, 0)));
8814 op0
= force_operand (XEXP (op0
, 0), target
);
8817 if (flag_force_addr
&& GET_CODE (op0
) != REG
)
8818 op0
= force_reg (Pmode
, op0
);
8820 if (GET_CODE (op0
) == REG
8821 && ! REG_USERVAR_P (op0
))
8822 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)));
8824 #ifdef POINTERS_EXTEND_UNSIGNED
8825 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
8826 && mode
== ptr_mode
)
8827 op0
= convert_memory_address (ptr_mode
, op0
);
8832 case ENTRY_VALUE_EXPR
:
8835 /* COMPLEX type for Extended Pascal & Fortran */
8838 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8841 /* Get the rtx code of the operands. */
8842 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8843 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
8846 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8850 /* Move the real (op0) and imaginary (op1) parts to their location. */
8851 emit_move_insn (gen_realpart (mode
, target
), op0
);
8852 emit_move_insn (gen_imagpart (mode
, target
), op1
);
8854 insns
= get_insns ();
8857 /* Complex construction should appear as a single unit. */
8858 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8859 each with a separate pseudo as destination.
8860 It's not correct for flow to treat them as a unit. */
8861 if (GET_CODE (target
) != CONCAT
)
8862 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
8870 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8871 return gen_realpart (mode
, op0
);
8874 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8875 return gen_imagpart (mode
, op0
);
8879 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8883 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8886 target
= gen_reg_rtx (mode
);
8890 /* Store the realpart and the negated imagpart to target. */
8891 emit_move_insn (gen_realpart (partmode
, target
),
8892 gen_realpart (partmode
, op0
));
8894 imag_t
= gen_imagpart (partmode
, target
);
8895 temp
= expand_unop (partmode
,
8896 ! unsignedp
&& flag_trapv
8897 && (GET_MODE_CLASS(partmode
) == MODE_INT
)
8898 ? negv_optab
: neg_optab
,
8899 gen_imagpart (partmode
, op0
), imag_t
, 0);
8901 emit_move_insn (imag_t
, temp
);
8903 insns
= get_insns ();
8906 /* Conjugate should appear as a single unit
8907 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8908 each with a separate pseudo as destination.
8909 It's not correct for flow to treat them as a unit. */
8910 if (GET_CODE (target
) != CONCAT
)
8911 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
8918 case TRY_CATCH_EXPR
:
8920 tree handler
= TREE_OPERAND (exp
, 1);
8922 expand_eh_region_start ();
8924 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8926 expand_eh_region_end_cleanup (handler
);
8931 case TRY_FINALLY_EXPR
:
8933 tree try_block
= TREE_OPERAND (exp
, 0);
8934 tree finally_block
= TREE_OPERAND (exp
, 1);
8935 rtx finally_label
= gen_label_rtx ();
8936 rtx done_label
= gen_label_rtx ();
8937 rtx return_link
= gen_reg_rtx (Pmode
);
8938 tree cleanup
= build (GOTO_SUBROUTINE_EXPR
, void_type_node
,
8939 (tree
) finally_label
, (tree
) return_link
);
8940 TREE_SIDE_EFFECTS (cleanup
) = 1;
8942 /* Start a new binding layer that will keep track of all cleanup
8943 actions to be performed. */
8944 expand_start_bindings (2);
8946 target_temp_slot_level
= temp_slot_level
;
8948 expand_decl_cleanup (NULL_TREE
, cleanup
);
8949 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
8951 preserve_temp_slots (op0
);
8952 expand_end_bindings (NULL_TREE
, 0, 0);
8953 emit_jump (done_label
);
8954 emit_label (finally_label
);
8955 expand_expr (finally_block
, const0_rtx
, VOIDmode
, 0);
8956 emit_indirect_jump (return_link
);
8957 emit_label (done_label
);
8961 case GOTO_SUBROUTINE_EXPR
:
8963 rtx subr
= (rtx
) TREE_OPERAND (exp
, 0);
8964 rtx return_link
= *(rtx
*) &TREE_OPERAND (exp
, 1);
8965 rtx return_address
= gen_label_rtx ();
8966 emit_move_insn (return_link
,
8967 gen_rtx_LABEL_REF (Pmode
, return_address
));
8969 emit_label (return_address
);
8974 return expand_builtin_va_arg (TREE_OPERAND (exp
, 0), type
);
8977 return get_exception_pointer (cfun
);
8980 /* Function descriptors are not valid except for as
8981 initialization constants, and should not be expanded. */
8985 return (*lang_expand_expr
) (exp
, original_target
, tmode
, modifier
);
8988 /* Here to do an ordinary binary operator, generating an instruction
8989 from the optab already placed in `this_optab'. */
8991 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8993 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8994 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8996 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
8997 unsignedp
, OPTAB_LIB_WIDEN
);
9003 /* Return the tree node if a ARG corresponds to a string constant or zero
9004 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9005 in bytes within the string that ARG is accessing. The type of the
9006 offset will be `sizetype'. */
9009 string_constant (arg
, ptr_offset
)
9015 if (TREE_CODE (arg
) == ADDR_EXPR
9016 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
9018 *ptr_offset
= size_zero_node
;
9019 return TREE_OPERAND (arg
, 0);
9021 else if (TREE_CODE (arg
) == PLUS_EXPR
)
9023 tree arg0
= TREE_OPERAND (arg
, 0);
9024 tree arg1
= TREE_OPERAND (arg
, 1);
9029 if (TREE_CODE (arg0
) == ADDR_EXPR
9030 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
9032 *ptr_offset
= convert (sizetype
, arg1
);
9033 return TREE_OPERAND (arg0
, 0);
9035 else if (TREE_CODE (arg1
) == ADDR_EXPR
9036 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
9038 *ptr_offset
= convert (sizetype
, arg0
);
9039 return TREE_OPERAND (arg1
, 0);
9046 /* Expand code for a post- or pre- increment or decrement
9047 and return the RTX for the result.
9048 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9051 expand_increment (exp
, post
, ignore
)
9057 tree incremented
= TREE_OPERAND (exp
, 0);
9058 optab this_optab
= add_optab
;
9060 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
9061 int op0_is_copy
= 0;
9062 int single_insn
= 0;
9063 /* 1 means we can't store into OP0 directly,
9064 because it is a subreg narrower than a word,
9065 and we don't dare clobber the rest of the word. */
9068 /* Stabilize any component ref that might need to be
9069 evaluated more than once below. */
9071 || TREE_CODE (incremented
) == BIT_FIELD_REF
9072 || (TREE_CODE (incremented
) == COMPONENT_REF
9073 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
9074 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
9075 incremented
= stabilize_reference (incremented
);
9076 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9077 ones into save exprs so that they don't accidentally get evaluated
9078 more than once by the code below. */
9079 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
9080 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
9081 incremented
= save_expr (incremented
);
9083 /* Compute the operands as RTX.
9084 Note whether OP0 is the actual lvalue or a copy of it:
9085 I believe it is a copy iff it is a register or subreg
9086 and insns were generated in computing it. */
9088 temp
= get_last_insn ();
9089 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_RW
);
9091 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9092 in place but instead must do sign- or zero-extension during assignment,
9093 so we copy it into a new register and let the code below use it as
9096 Note that we can safely modify this SUBREG since it is know not to be
9097 shared (it was made by the expand_expr call above). */
9099 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9102 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9106 else if (GET_CODE (op0
) == SUBREG
9107 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9109 /* We cannot increment this SUBREG in place. If we are
9110 post-incrementing, get a copy of the old value. Otherwise,
9111 just mark that we cannot increment in place. */
9113 op0
= copy_to_reg (op0
);
9118 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
9119 && temp
!= get_last_insn ());
9120 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
,
9121 EXPAND_MEMORY_USE_BAD
);
9123 /* Decide whether incrementing or decrementing. */
9124 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9125 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9126 this_optab
= sub_optab
;
9128 /* Convert decrement by a constant into a negative increment. */
9129 if (this_optab
== sub_optab
9130 && GET_CODE (op1
) == CONST_INT
)
9132 op1
= GEN_INT (-INTVAL (op1
));
9133 this_optab
= add_optab
;
9136 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp
)))
9137 this_optab
= this_optab
== add_optab
? addv_optab
: subv_optab
;
9139 /* For a preincrement, see if we can do this with a single instruction. */
9142 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9143 if (icode
!= (int) CODE_FOR_nothing
9144 /* Make sure that OP0 is valid for operands 0 and 1
9145 of the insn we want to queue. */
9146 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9147 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
)
9148 && (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9152 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9153 then we cannot just increment OP0. We must therefore contrive to
9154 increment the original value. Then, for postincrement, we can return
9155 OP0 since it is a copy of the old value. For preincrement, expand here
9156 unless we can do it with a single insn.
9158 Likewise if storing directly into OP0 would clobber high bits
9159 we need to preserve (bad_subreg). */
9160 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9162 /* This is the easiest way to increment the value wherever it is.
9163 Problems with multiple evaluation of INCREMENTED are prevented
9164 because either (1) it is a component_ref or preincrement,
9165 in which case it was stabilized above, or (2) it is an array_ref
9166 with constant index in an array in a register, which is
9167 safe to reevaluate. */
9168 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9169 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9170 ? MINUS_EXPR
: PLUS_EXPR
),
9173 TREE_OPERAND (exp
, 1));
9175 while (TREE_CODE (incremented
) == NOP_EXPR
9176 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9178 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9179 incremented
= TREE_OPERAND (incremented
, 0);
9182 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
, 0);
9183 return post
? op0
: temp
;
9188 /* We have a true reference to the value in OP0.
9189 If there is an insn to add or subtract in this mode, queue it.
9190 Queueing the increment insn avoids the register shuffling
9191 that often results if we must increment now and first save
9192 the old value for subsequent use. */
9194 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9195 op0
= stabilize (op0
);
9198 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9199 if (icode
!= (int) CODE_FOR_nothing
9200 /* Make sure that OP0 is valid for operands 0 and 1
9201 of the insn we want to queue. */
9202 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9203 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
))
9205 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9206 op1
= force_reg (mode
, op1
);
9208 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9210 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9212 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
9213 ? force_reg (Pmode
, XEXP (op0
, 0))
9214 : copy_to_reg (XEXP (op0
, 0)));
9217 op0
= replace_equiv_address (op0
, addr
);
9218 temp
= force_reg (GET_MODE (op0
), op0
);
9219 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9220 op1
= force_reg (mode
, op1
);
9222 /* The increment queue is LIFO, thus we have to `queue'
9223 the instructions in reverse order. */
9224 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9225 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9230 /* Preincrement, or we can't increment with one simple insn. */
9232 /* Save a copy of the value before inc or dec, to return it later. */
9233 temp
= value
= copy_to_reg (op0
);
9235 /* Arrange to return the incremented value. */
9236 /* Copy the rtx because expand_binop will protect from the queue,
9237 and the results of that would be invalid for us to return
9238 if our caller does emit_queue before using our result. */
9239 temp
= copy_rtx (value
= op0
);
9241 /* Increment however we can. */
9242 op1
= expand_binop (mode
, this_optab
, value
, op1
,
9243 current_function_check_memory_usage
? NULL_RTX
: op0
,
9244 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9245 /* Make sure the value is stored into OP0. */
9247 emit_move_insn (op0
, op1
);
9252 /* At the start of a function, record that we have no previously-pushed
9253 arguments waiting to be popped. */
9256 init_pending_stack_adjust ()
9258 pending_stack_adjust
= 0;
9261 /* When exiting from function, if safe, clear out any pending stack adjust
9262 so the adjustment won't get done.
9264 Note, if the current function calls alloca, then it must have a
9265 frame pointer regardless of the value of flag_omit_frame_pointer. */
9268 clear_pending_stack_adjust ()
9270 #ifdef EXIT_IGNORE_STACK
9272 && (! flag_omit_frame_pointer
|| current_function_calls_alloca
)
9273 && EXIT_IGNORE_STACK
9274 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
9275 && ! flag_inline_functions
)
9277 stack_pointer_delta
-= pending_stack_adjust
,
9278 pending_stack_adjust
= 0;
9283 /* Pop any previously-pushed arguments that have not been popped yet. */
9286 do_pending_stack_adjust ()
9288 if (inhibit_defer_pop
== 0)
9290 if (pending_stack_adjust
!= 0)
9291 adjust_stack (GEN_INT (pending_stack_adjust
));
9292 pending_stack_adjust
= 0;
9296 /* Expand conditional expressions. */
9298 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9299 LABEL is an rtx of code CODE_LABEL, in this function and all the
9303 jumpifnot (exp
, label
)
9307 do_jump (exp
, label
, NULL_RTX
);
9310 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9317 do_jump (exp
, NULL_RTX
, label
);
9320 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9321 the result is zero, or IF_TRUE_LABEL if the result is one.
9322 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9323 meaning fall through in that case.
9325 do_jump always does any pending stack adjust except when it does not
9326 actually perform a jump. An example where there is no jump
9327 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9329 This function is responsible for optimizing cases such as
9330 &&, || and comparison operators in EXP. */
9333 do_jump (exp
, if_false_label
, if_true_label
)
9335 rtx if_false_label
, if_true_label
;
9337 enum tree_code code
= TREE_CODE (exp
);
9338 /* Some cases need to create a label to jump to
9339 in order to properly fall through.
9340 These cases set DROP_THROUGH_LABEL nonzero. */
9341 rtx drop_through_label
= 0;
9345 enum machine_mode mode
;
9347 #ifdef MAX_INTEGER_COMPUTATION_MODE
9348 check_max_integer_computation_mode (exp
);
9359 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
9365 /* This is not true with #pragma weak */
9367 /* The address of something can never be zero. */
9369 emit_jump (if_true_label
);
9374 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
9375 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
9376 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
9377 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_RANGE_REF
)
9380 /* If we are narrowing the operand, we have to do the compare in the
9382 if ((TYPE_PRECISION (TREE_TYPE (exp
))
9383 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9385 case NON_LVALUE_EXPR
:
9386 case REFERENCE_EXPR
:
9391 /* These cannot change zero->non-zero or vice versa. */
9392 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9395 case WITH_RECORD_EXPR
:
9396 /* Put the object on the placeholder list, recurse through our first
9397 operand, and pop the list. */
9398 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
9400 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9401 placeholder_list
= TREE_CHAIN (placeholder_list
);
9405 /* This is never less insns than evaluating the PLUS_EXPR followed by
9406 a test and can be longer if the test is eliminated. */
9408 /* Reduce to minus. */
9409 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
9410 TREE_OPERAND (exp
, 0),
9411 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
9412 TREE_OPERAND (exp
, 1))));
9413 /* Process as MINUS. */
9417 /* Non-zero iff operands of minus differ. */
9418 do_compare_and_jump (build (NE_EXPR
, TREE_TYPE (exp
),
9419 TREE_OPERAND (exp
, 0),
9420 TREE_OPERAND (exp
, 1)),
9421 NE
, NE
, if_false_label
, if_true_label
);
9425 /* If we are AND'ing with a small constant, do this comparison in the
9426 smallest type that fits. If the machine doesn't have comparisons
9427 that small, it will be converted back to the wider comparison.
9428 This helps if we are testing the sign bit of a narrower object.
9429 combine can't do this for us because it can't know whether a
9430 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9432 if (! SLOW_BYTE_ACCESS
9433 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
9434 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
9435 && (i
= tree_floor_log2 (TREE_OPERAND (exp
, 1))) >= 0
9436 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
9437 && (type
= type_for_mode (mode
, 1)) != 0
9438 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9439 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9440 != CODE_FOR_nothing
))
9442 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9447 case TRUTH_NOT_EXPR
:
9448 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9451 case TRUTH_ANDIF_EXPR
:
9452 if (if_false_label
== 0)
9453 if_false_label
= drop_through_label
= gen_label_rtx ();
9454 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
9455 start_cleanup_deferral ();
9456 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9457 end_cleanup_deferral ();
9460 case TRUTH_ORIF_EXPR
:
9461 if (if_true_label
== 0)
9462 if_true_label
= drop_through_label
= gen_label_rtx ();
9463 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
9464 start_cleanup_deferral ();
9465 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9466 end_cleanup_deferral ();
9471 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
9472 preserve_temp_slots (NULL_RTX
);
9476 do_pending_stack_adjust ();
9477 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9483 case ARRAY_RANGE_REF
:
9485 HOST_WIDE_INT bitsize
, bitpos
;
9487 enum machine_mode mode
;
9492 /* Get description of this reference. We don't actually care
9493 about the underlying object here. */
9494 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
9495 &unsignedp
, &volatilep
);
9497 type
= type_for_size (bitsize
, unsignedp
);
9498 if (! SLOW_BYTE_ACCESS
9499 && type
!= 0 && bitsize
>= 0
9500 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9501 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9502 != CODE_FOR_nothing
))
9504 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9511 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9512 if (integer_onep (TREE_OPERAND (exp
, 1))
9513 && integer_zerop (TREE_OPERAND (exp
, 2)))
9514 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9516 else if (integer_zerop (TREE_OPERAND (exp
, 1))
9517 && integer_onep (TREE_OPERAND (exp
, 2)))
9518 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9522 rtx label1
= gen_label_rtx ();
9523 drop_through_label
= gen_label_rtx ();
9525 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
9527 start_cleanup_deferral ();
9528 /* Now the THEN-expression. */
9529 do_jump (TREE_OPERAND (exp
, 1),
9530 if_false_label
? if_false_label
: drop_through_label
,
9531 if_true_label
? if_true_label
: drop_through_label
);
9532 /* In case the do_jump just above never jumps. */
9533 do_pending_stack_adjust ();
9534 emit_label (label1
);
9536 /* Now the ELSE-expression. */
9537 do_jump (TREE_OPERAND (exp
, 2),
9538 if_false_label
? if_false_label
: drop_through_label
,
9539 if_true_label
? if_true_label
: drop_through_label
);
9540 end_cleanup_deferral ();
9546 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9548 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9549 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9551 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
9552 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
9555 (build (TRUTH_ANDIF_EXPR
, TREE_TYPE (exp
),
9556 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9557 fold (build1 (REALPART_EXPR
,
9558 TREE_TYPE (inner_type
),
9560 fold (build1 (REALPART_EXPR
,
9561 TREE_TYPE (inner_type
),
9563 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9564 fold (build1 (IMAGPART_EXPR
,
9565 TREE_TYPE (inner_type
),
9567 fold (build1 (IMAGPART_EXPR
,
9568 TREE_TYPE (inner_type
),
9570 if_false_label
, if_true_label
);
9573 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
9574 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9576 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9577 && !can_compare_p (EQ
, TYPE_MODE (inner_type
), ccp_jump
))
9578 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
9580 do_compare_and_jump (exp
, EQ
, EQ
, if_false_label
, if_true_label
);
9586 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9588 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9589 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9591 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
9592 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
9595 (build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
),
9596 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9597 fold (build1 (REALPART_EXPR
,
9598 TREE_TYPE (inner_type
),
9600 fold (build1 (REALPART_EXPR
,
9601 TREE_TYPE (inner_type
),
9603 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9604 fold (build1 (IMAGPART_EXPR
,
9605 TREE_TYPE (inner_type
),
9607 fold (build1 (IMAGPART_EXPR
,
9608 TREE_TYPE (inner_type
),
9610 if_false_label
, if_true_label
);
9613 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
9614 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9616 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9617 && !can_compare_p (NE
, TYPE_MODE (inner_type
), ccp_jump
))
9618 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
9620 do_compare_and_jump (exp
, NE
, NE
, if_false_label
, if_true_label
);
9625 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9626 if (GET_MODE_CLASS (mode
) == MODE_INT
9627 && ! can_compare_p (LT
, mode
, ccp_jump
))
9628 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
9630 do_compare_and_jump (exp
, LT
, LTU
, if_false_label
, if_true_label
);
9634 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9635 if (GET_MODE_CLASS (mode
) == MODE_INT
9636 && ! can_compare_p (LE
, mode
, ccp_jump
))
9637 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
9639 do_compare_and_jump (exp
, LE
, LEU
, if_false_label
, if_true_label
);
9643 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9644 if (GET_MODE_CLASS (mode
) == MODE_INT
9645 && ! can_compare_p (GT
, mode
, ccp_jump
))
9646 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
9648 do_compare_and_jump (exp
, GT
, GTU
, if_false_label
, if_true_label
);
9652 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9653 if (GET_MODE_CLASS (mode
) == MODE_INT
9654 && ! can_compare_p (GE
, mode
, ccp_jump
))
9655 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
9657 do_compare_and_jump (exp
, GE
, GEU
, if_false_label
, if_true_label
);
9660 case UNORDERED_EXPR
:
9663 enum rtx_code cmp
, rcmp
;
9666 if (code
== UNORDERED_EXPR
)
9667 cmp
= UNORDERED
, rcmp
= ORDERED
;
9669 cmp
= ORDERED
, rcmp
= UNORDERED
;
9670 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9673 if (! can_compare_p (cmp
, mode
, ccp_jump
)
9674 && (can_compare_p (rcmp
, mode
, ccp_jump
)
9675 /* If the target doesn't provide either UNORDERED or ORDERED
9676 comparisons, canonicalize on UNORDERED for the library. */
9677 || rcmp
== UNORDERED
))
9681 do_compare_and_jump (exp
, cmp
, cmp
, if_false_label
, if_true_label
);
9683 do_compare_and_jump (exp
, rcmp
, rcmp
, if_true_label
, if_false_label
);
9688 enum rtx_code rcode1
;
9689 enum tree_code tcode2
;
9713 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9714 if (can_compare_p (rcode1
, mode
, ccp_jump
))
9715 do_compare_and_jump (exp
, rcode1
, rcode1
, if_false_label
,
9719 tree op0
= save_expr (TREE_OPERAND (exp
, 0));
9720 tree op1
= save_expr (TREE_OPERAND (exp
, 1));
9723 /* If the target doesn't support combined unordered
9724 compares, decompose into UNORDERED + comparison. */
9725 cmp0
= fold (build (UNORDERED_EXPR
, TREE_TYPE (exp
), op0
, op1
));
9726 cmp1
= fold (build (tcode2
, TREE_TYPE (exp
), op0
, op1
));
9727 exp
= build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
), cmp0
, cmp1
);
9728 do_jump (exp
, if_false_label
, if_true_label
);
9734 __builtin_expect (<test>, 0) and
9735 __builtin_expect (<test>, 1)
9737 We need to do this here, so that <test> is not converted to a SCC
9738 operation on machines that use condition code registers and COMPARE
9739 like the PowerPC, and then the jump is done based on whether the SCC
9740 operation produced a 1 or 0. */
9742 /* Check for a built-in function. */
9743 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
9745 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
9746 tree arglist
= TREE_OPERAND (exp
, 1);
9748 if (TREE_CODE (fndecl
) == FUNCTION_DECL
9749 && DECL_BUILT_IN (fndecl
)
9750 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
9751 && arglist
!= NULL_TREE
9752 && TREE_CHAIN (arglist
) != NULL_TREE
)
9754 rtx seq
= expand_builtin_expect_jump (exp
, if_false_label
,
9757 if (seq
!= NULL_RTX
)
9764 /* fall through and generate the normal code. */
9768 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
9770 /* This is not needed any more and causes poor code since it causes
9771 comparisons and tests from non-SI objects to have different code
9773 /* Copy to register to avoid generating bad insns by cse
9774 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9775 if (!cse_not_expected
&& GET_CODE (temp
) == MEM
)
9776 temp
= copy_to_reg (temp
);
9778 do_pending_stack_adjust ();
9779 /* Do any postincrements in the expression that was tested. */
9782 if (GET_CODE (temp
) == CONST_INT
9783 || (GET_CODE (temp
) == CONST_DOUBLE
&& GET_MODE (temp
) == VOIDmode
)
9784 || GET_CODE (temp
) == LABEL_REF
)
9786 rtx target
= temp
== const0_rtx
? if_false_label
: if_true_label
;
9790 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
9791 && ! can_compare_p (NE
, GET_MODE (temp
), ccp_jump
))
9792 /* Note swapping the labels gives us not-equal. */
9793 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
9794 else if (GET_MODE (temp
) != VOIDmode
)
9795 do_compare_rtx_and_jump (temp
, CONST0_RTX (GET_MODE (temp
)),
9796 NE
, TREE_UNSIGNED (TREE_TYPE (exp
)),
9797 GET_MODE (temp
), NULL_RTX
,
9798 if_false_label
, if_true_label
);
9803 if (drop_through_label
)
9805 /* If do_jump produces code that might be jumped around,
9806 do any stack adjusts from that code, before the place
9807 where control merges in. */
9808 do_pending_stack_adjust ();
9809 emit_label (drop_through_label
);
9813 /* Given a comparison expression EXP for values too wide to be compared
9814 with one insn, test the comparison and jump to the appropriate label.
9815 The code of EXP is ignored; we always test GT if SWAP is 0,
9816 and LT if SWAP is 1. */
9819 do_jump_by_parts_greater (exp
, swap
, if_false_label
, if_true_label
)
9822 rtx if_false_label
, if_true_label
;
9824 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
9825 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
9826 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9827 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9829 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
);
9832 /* Compare OP0 with OP1, word at a time, in mode MODE.
9833 UNSIGNEDP says to do unsigned comparison.
9834 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9837 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
)
9838 enum machine_mode mode
;
9841 rtx if_false_label
, if_true_label
;
9843 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
9844 rtx drop_through_label
= 0;
9847 if (! if_true_label
|| ! if_false_label
)
9848 drop_through_label
= gen_label_rtx ();
9849 if (! if_true_label
)
9850 if_true_label
= drop_through_label
;
9851 if (! if_false_label
)
9852 if_false_label
= drop_through_label
;
9854 /* Compare a word at a time, high order first. */
9855 for (i
= 0; i
< nwords
; i
++)
9857 rtx op0_word
, op1_word
;
9859 if (WORDS_BIG_ENDIAN
)
9861 op0_word
= operand_subword_force (op0
, i
, mode
);
9862 op1_word
= operand_subword_force (op1
, i
, mode
);
9866 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
9867 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
9870 /* All but high-order word must be compared as unsigned. */
9871 do_compare_rtx_and_jump (op0_word
, op1_word
, GT
,
9872 (unsignedp
|| i
> 0), word_mode
, NULL_RTX
,
9873 NULL_RTX
, if_true_label
);
9875 /* Consider lower words only if these are equal. */
9876 do_compare_rtx_and_jump (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
9877 NULL_RTX
, NULL_RTX
, if_false_label
);
9881 emit_jump (if_false_label
);
9882 if (drop_through_label
)
9883 emit_label (drop_through_label
);
9886 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9887 with one insn, test the comparison and jump to the appropriate label. */
9890 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
)
9892 rtx if_false_label
, if_true_label
;
9894 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
9895 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9896 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9897 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
9899 rtx drop_through_label
= 0;
9901 if (! if_false_label
)
9902 drop_through_label
= if_false_label
= gen_label_rtx ();
9904 for (i
= 0; i
< nwords
; i
++)
9905 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, mode
),
9906 operand_subword_force (op1
, i
, mode
),
9907 EQ
, TREE_UNSIGNED (TREE_TYPE (exp
)),
9908 word_mode
, NULL_RTX
, if_false_label
, NULL_RTX
);
9911 emit_jump (if_true_label
);
9912 if (drop_through_label
)
9913 emit_label (drop_through_label
);
9916 /* Jump according to whether OP0 is 0.
9917 We assume that OP0 has an integer mode that is too wide
9918 for the available compare insns. */
9921 do_jump_by_parts_equality_rtx (op0
, if_false_label
, if_true_label
)
9923 rtx if_false_label
, if_true_label
;
9925 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
9928 rtx drop_through_label
= 0;
9930 /* The fastest way of doing this comparison on almost any machine is to
9931 "or" all the words and compare the result. If all have to be loaded
9932 from memory and this is a very wide item, it's possible this may
9933 be slower, but that's highly unlikely. */
9935 part
= gen_reg_rtx (word_mode
);
9936 emit_move_insn (part
, operand_subword_force (op0
, 0, GET_MODE (op0
)));
9937 for (i
= 1; i
< nwords
&& part
!= 0; i
++)
9938 part
= expand_binop (word_mode
, ior_optab
, part
,
9939 operand_subword_force (op0
, i
, GET_MODE (op0
)),
9940 part
, 1, OPTAB_WIDEN
);
9944 do_compare_rtx_and_jump (part
, const0_rtx
, EQ
, 1, word_mode
,
9945 NULL_RTX
, if_false_label
, if_true_label
);
9950 /* If we couldn't do the "or" simply, do this with a series of compares. */
9951 if (! if_false_label
)
9952 drop_through_label
= if_false_label
= gen_label_rtx ();
9954 for (i
= 0; i
< nwords
; i
++)
9955 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, GET_MODE (op0
)),
9956 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
,
9957 if_false_label
, NULL_RTX
);
9960 emit_jump (if_true_label
);
9962 if (drop_through_label
)
9963 emit_label (drop_through_label
);
9966 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9967 (including code to compute the values to be compared)
9968 and set (CC0) according to the result.
9969 The decision as to signed or unsigned comparison must be made by the caller.
9971 We force a stack adjustment unless there are currently
9972 things pushed on the stack that aren't yet used.
9974 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9978 compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
, size
)
9982 enum machine_mode mode
;
9987 /* If one operand is constant, make it the second one. Only do this
9988 if the other operand is not constant as well. */
9990 if (swap_commutative_operands_p (op0
, op1
))
9995 code
= swap_condition (code
);
10000 op0
= force_not_mem (op0
);
10001 op1
= force_not_mem (op1
);
10004 do_pending_stack_adjust ();
10006 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
10007 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
10011 /* There's no need to do this now that combine.c can eliminate lots of
10012 sign extensions. This can be less efficient in certain cases on other
10015 /* If this is a signed equality comparison, we can do it as an
10016 unsigned comparison since zero-extension is cheaper than sign
10017 extension and comparisons with zero are done as unsigned. This is
10018 the case even on machines that can do fast sign extension, since
10019 zero-extension is easier to combine with other operations than
10020 sign-extension is. If we are comparing against a constant, we must
10021 convert it to what it would look like unsigned. */
10022 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10023 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10025 if (GET_CODE (op1
) == CONST_INT
10026 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10027 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10032 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
);
10034 return gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
10037 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10038 The decision as to signed or unsigned comparison must be made by the caller.
10040 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10044 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
, size
,
10045 if_false_label
, if_true_label
)
10047 enum rtx_code code
;
10049 enum machine_mode mode
;
10051 rtx if_false_label
, if_true_label
;
10054 int dummy_true_label
= 0;
10056 /* Reverse the comparison if that is safe and we want to jump if it is
10058 if (! if_true_label
&& ! FLOAT_MODE_P (mode
))
10060 if_true_label
= if_false_label
;
10061 if_false_label
= 0;
10062 code
= reverse_condition (code
);
10065 /* If one operand is constant, make it the second one. Only do this
10066 if the other operand is not constant as well. */
10068 if (swap_commutative_operands_p (op0
, op1
))
10073 code
= swap_condition (code
);
10076 if (flag_force_mem
)
10078 op0
= force_not_mem (op0
);
10079 op1
= force_not_mem (op1
);
10082 do_pending_stack_adjust ();
10084 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
10085 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
10087 if (tem
== const_true_rtx
)
10090 emit_jump (if_true_label
);
10094 if (if_false_label
)
10095 emit_jump (if_false_label
);
10101 /* There's no need to do this now that combine.c can eliminate lots of
10102 sign extensions. This can be less efficient in certain cases on other
10105 /* If this is a signed equality comparison, we can do it as an
10106 unsigned comparison since zero-extension is cheaper than sign
10107 extension and comparisons with zero are done as unsigned. This is
10108 the case even on machines that can do fast sign extension, since
10109 zero-extension is easier to combine with other operations than
10110 sign-extension is. If we are comparing against a constant, we must
10111 convert it to what it would look like unsigned. */
10112 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10113 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10115 if (GET_CODE (op1
) == CONST_INT
10116 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10117 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10122 if (! if_true_label
)
10124 dummy_true_label
= 1;
10125 if_true_label
= gen_label_rtx ();
10128 emit_cmp_and_jump_insns (op0
, op1
, code
, size
, mode
, unsignedp
,
10131 if (if_false_label
)
10132 emit_jump (if_false_label
);
10133 if (dummy_true_label
)
10134 emit_label (if_true_label
);
10137 /* Generate code for a comparison expression EXP (including code to compute
10138 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10139 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10140 generated code will drop through.
10141 SIGNED_CODE should be the rtx operation for this comparison for
10142 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10144 We force a stack adjustment unless there are currently
10145 things pushed on the stack that aren't yet used. */
10148 do_compare_and_jump (exp
, signed_code
, unsigned_code
, if_false_label
,
10151 enum rtx_code signed_code
, unsigned_code
;
10152 rtx if_false_label
, if_true_label
;
10156 enum machine_mode mode
;
10158 enum rtx_code code
;
10160 /* Don't crash if the comparison was erroneous. */
10161 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
10162 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
10165 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
10166 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == ERROR_MARK
)
10169 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10170 mode
= TYPE_MODE (type
);
10171 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
10172 && (TREE_CODE (TREE_OPERAND (exp
, 1)) != INTEGER_CST
10173 || (GET_MODE_BITSIZE (mode
)
10174 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
,
10177 /* op0 might have been replaced by promoted constant, in which
10178 case the type of second argument should be used. */
10179 type
= TREE_TYPE (TREE_OPERAND (exp
, 1));
10180 mode
= TYPE_MODE (type
);
10182 unsignedp
= TREE_UNSIGNED (type
);
10183 code
= unsignedp
? unsigned_code
: signed_code
;
10185 #ifdef HAVE_canonicalize_funcptr_for_compare
10186 /* If function pointers need to be "canonicalized" before they can
10187 be reliably compared, then canonicalize them. */
10188 if (HAVE_canonicalize_funcptr_for_compare
10189 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10190 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10193 rtx new_op0
= gen_reg_rtx (mode
);
10195 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
10199 if (HAVE_canonicalize_funcptr_for_compare
10200 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10201 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10204 rtx new_op1
= gen_reg_rtx (mode
);
10206 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
10211 /* Do any postincrements in the expression that was tested. */
10214 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
,
10216 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
10217 if_false_label
, if_true_label
);
10220 /* Generate code to calculate EXP using a store-flag instruction
10221 and return an rtx for the result. EXP is either a comparison
10222 or a TRUTH_NOT_EXPR whose operand is a comparison.
10224 If TARGET is nonzero, store the result there if convenient.
10226 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10229 Return zero if there is no suitable set-flag instruction
10230 available on this machine.
10232 Once expand_expr has been called on the arguments of the comparison,
10233 we are committed to doing the store flag, since it is not safe to
10234 re-evaluate the expression. We emit the store-flag insn by calling
10235 emit_store_flag, but only expand the arguments if we have a reason
10236 to believe that emit_store_flag will be successful. If we think that
10237 it will, but it isn't, we have to simulate the store-flag with a
10238 set/jump/set sequence. */
10241 do_store_flag (exp
, target
, mode
, only_cheap
)
10244 enum machine_mode mode
;
10247 enum rtx_code code
;
10248 tree arg0
, arg1
, type
;
10250 enum machine_mode operand_mode
;
10254 enum insn_code icode
;
10255 rtx subtarget
= target
;
10258 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10259 result at the end. We can't simply invert the test since it would
10260 have already been inverted if it were valid. This case occurs for
10261 some floating-point comparisons. */
10263 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
10264 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
10266 arg0
= TREE_OPERAND (exp
, 0);
10267 arg1
= TREE_OPERAND (exp
, 1);
10269 /* Don't crash if the comparison was erroneous. */
10270 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
10273 type
= TREE_TYPE (arg0
);
10274 operand_mode
= TYPE_MODE (type
);
10275 unsignedp
= TREE_UNSIGNED (type
);
10277 /* We won't bother with BLKmode store-flag operations because it would mean
10278 passing a lot of information to emit_store_flag. */
10279 if (operand_mode
== BLKmode
)
10282 /* We won't bother with store-flag operations involving function pointers
10283 when function pointers must be canonicalized before comparisons. */
10284 #ifdef HAVE_canonicalize_funcptr_for_compare
10285 if (HAVE_canonicalize_funcptr_for_compare
10286 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10287 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10289 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10290 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10291 == FUNCTION_TYPE
))))
10298 /* Get the rtx comparison code to use. We know that EXP is a comparison
10299 operation of some type. Some comparisons against 1 and -1 can be
10300 converted to comparisons with zero. Do so here so that the tests
10301 below will be aware that we have a comparison with zero. These
10302 tests will not catch constants in the first operand, but constants
10303 are rarely passed as the first operand. */
10305 switch (TREE_CODE (exp
))
10314 if (integer_onep (arg1
))
10315 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10317 code
= unsignedp
? LTU
: LT
;
10320 if (! unsignedp
&& integer_all_onesp (arg1
))
10321 arg1
= integer_zero_node
, code
= LT
;
10323 code
= unsignedp
? LEU
: LE
;
10326 if (! unsignedp
&& integer_all_onesp (arg1
))
10327 arg1
= integer_zero_node
, code
= GE
;
10329 code
= unsignedp
? GTU
: GT
;
10332 if (integer_onep (arg1
))
10333 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
10335 code
= unsignedp
? GEU
: GE
;
10338 case UNORDERED_EXPR
:
10364 /* Put a constant second. */
10365 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
10367 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
10368 code
= swap_condition (code
);
10371 /* If this is an equality or inequality test of a single bit, we can
10372 do this by shifting the bit being tested to the low-order bit and
10373 masking the result with the constant 1. If the condition was EQ,
10374 we xor it with 1. This does not require an scc insn and is faster
10375 than an scc insn even if we have it. */
10377 if ((code
== NE
|| code
== EQ
)
10378 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
10379 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10381 tree inner
= TREE_OPERAND (arg0
, 0);
10382 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
10385 /* If INNER is a right shift of a constant and it plus BITNUM does
10386 not overflow, adjust BITNUM and INNER. */
10388 if (TREE_CODE (inner
) == RSHIFT_EXPR
10389 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
10390 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
10391 && bitnum
< TYPE_PRECISION (type
)
10392 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
10393 bitnum
- TYPE_PRECISION (type
)))
10395 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
10396 inner
= TREE_OPERAND (inner
, 0);
10399 /* If we are going to be able to omit the AND below, we must do our
10400 operations as unsigned. If we must use the AND, we have a choice.
10401 Normally unsigned is faster, but for some machines signed is. */
10402 ops_unsignedp
= (bitnum
== TYPE_PRECISION (type
) - 1 ? 1
10403 #ifdef LOAD_EXTEND_OP
10404 : (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1)
10410 if (! get_subtarget (subtarget
)
10411 || GET_MODE (subtarget
) != operand_mode
10412 || ! safe_from_p (subtarget
, inner
, 1))
10415 op0
= expand_expr (inner
, subtarget
, VOIDmode
, 0);
10418 op0
= expand_shift (RSHIFT_EXPR
, operand_mode
, op0
,
10419 size_int (bitnum
), subtarget
, ops_unsignedp
);
10421 if (GET_MODE (op0
) != mode
)
10422 op0
= convert_to_mode (mode
, op0
, ops_unsignedp
);
10424 if ((code
== EQ
&& ! invert
) || (code
== NE
&& invert
))
10425 op0
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
, subtarget
,
10426 ops_unsignedp
, OPTAB_LIB_WIDEN
);
10428 /* Put the AND last so it can combine with more things. */
10429 if (bitnum
!= TYPE_PRECISION (type
) - 1)
10430 op0
= expand_and (op0
, const1_rtx
, subtarget
);
10435 /* Now see if we are likely to be able to do this. Return if not. */
10436 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
10439 icode
= setcc_gen_code
[(int) code
];
10440 if (icode
== CODE_FOR_nothing
10441 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
10443 /* We can only do this if it is one of the special cases that
10444 can be handled without an scc insn. */
10445 if ((code
== LT
&& integer_zerop (arg1
))
10446 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
10448 else if (BRANCH_COST
>= 0
10449 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
10450 && TREE_CODE (type
) != REAL_TYPE
10451 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
10452 != CODE_FOR_nothing
)
10453 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
10454 != CODE_FOR_nothing
)))
10460 if (! get_subtarget (target
)
10461 || GET_MODE (subtarget
) != operand_mode
10462 || ! safe_from_p (subtarget
, arg1
, 1))
10465 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
10466 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
10469 target
= gen_reg_rtx (mode
);
10471 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10472 because, if the emit_store_flag does anything it will succeed and
10473 OP0 and OP1 will not be used subsequently. */
10475 result
= emit_store_flag (target
, code
,
10476 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
10477 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
10478 operand_mode
, unsignedp
, 1);
10483 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
10484 result
, 0, OPTAB_LIB_WIDEN
);
10488 /* If this failed, we have to do this with set/compare/jump/set code. */
10489 if (GET_CODE (target
) != REG
10490 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
10491 target
= gen_reg_rtx (GET_MODE (target
));
10493 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
10494 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
10495 operand_mode
, NULL_RTX
);
10496 if (GET_CODE (result
) == CONST_INT
)
10497 return (((result
== const0_rtx
&& ! invert
)
10498 || (result
!= const0_rtx
&& invert
))
10499 ? const0_rtx
: const1_rtx
);
10501 label
= gen_label_rtx ();
10502 if (bcc_gen_fctn
[(int) code
] == 0)
10505 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
10506 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
10507 emit_label (label
);
10513 /* Stubs in case we haven't got a casesi insn. */
10514 #ifndef HAVE_casesi
10515 # define HAVE_casesi 0
10516 # define gen_casesi(a, b, c, d, e) (0)
10517 # define CODE_FOR_casesi CODE_FOR_nothing
10520 /* If the machine does not have a case insn that compares the bounds,
10521 this means extra overhead for dispatch tables, which raises the
10522 threshold for using them. */
10523 #ifndef CASE_VALUES_THRESHOLD
10524 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10525 #endif /* CASE_VALUES_THRESHOLD */
10528 case_values_threshold ()
10530 return CASE_VALUES_THRESHOLD
;
10533 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10534 0 otherwise (i.e. if there is no casesi instruction). */
10536 try_casesi (index_type
, index_expr
, minval
, range
,
10537 table_label
, default_label
)
10538 tree index_type
, index_expr
, minval
, range
;
10539 rtx table_label ATTRIBUTE_UNUSED
;
10542 enum machine_mode index_mode
= SImode
;
10543 int index_bits
= GET_MODE_BITSIZE (index_mode
);
10544 rtx op1
, op2
, index
;
10545 enum machine_mode op_mode
;
10550 /* Convert the index to SImode. */
10551 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
10553 enum machine_mode omode
= TYPE_MODE (index_type
);
10554 rtx rangertx
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
10556 /* We must handle the endpoints in the original mode. */
10557 index_expr
= build (MINUS_EXPR
, index_type
,
10558 index_expr
, minval
);
10559 minval
= integer_zero_node
;
10560 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10561 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
10562 omode
, 1, default_label
);
10563 /* Now we can safely truncate. */
10564 index
= convert_to_mode (index_mode
, index
, 0);
10568 if (TYPE_MODE (index_type
) != index_mode
)
10570 index_expr
= convert (type_for_size (index_bits
, 0),
10572 index_type
= TREE_TYPE (index_expr
);
10575 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10578 index
= protect_from_queue (index
, 0);
10579 do_pending_stack_adjust ();
10581 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
10582 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
10584 index
= copy_to_mode_reg (op_mode
, index
);
10586 op1
= expand_expr (minval
, NULL_RTX
, VOIDmode
, 0);
10588 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
10589 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
10590 op1
, TREE_UNSIGNED (TREE_TYPE (minval
)));
10591 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
10593 op1
= copy_to_mode_reg (op_mode
, op1
);
10595 op2
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
10597 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
10598 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
10599 op2
, TREE_UNSIGNED (TREE_TYPE (range
)));
10600 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
10602 op2
= copy_to_mode_reg (op_mode
, op2
);
10604 emit_jump_insn (gen_casesi (index
, op1
, op2
,
10605 table_label
, default_label
));
10609 /* Attempt to generate a tablejump instruction; same concept. */
10610 #ifndef HAVE_tablejump
10611 #define HAVE_tablejump 0
10612 #define gen_tablejump(x, y) (0)
10615 /* Subroutine of the next function.
10617 INDEX is the value being switched on, with the lowest value
10618 in the table already subtracted.
10619 MODE is its expected mode (needed if INDEX is constant).
10620 RANGE is the length of the jump table.
10621 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10623 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10624 index value is out of range. */
10627 do_tablejump (index
, mode
, range
, table_label
, default_label
)
10628 rtx index
, range
, table_label
, default_label
;
10629 enum machine_mode mode
;
10633 /* Do an unsigned comparison (in the proper mode) between the index
10634 expression and the value which represents the length of the range.
10635 Since we just finished subtracting the lower bound of the range
10636 from the index expression, this comparison allows us to simultaneously
10637 check that the original index expression value is both greater than
10638 or equal to the minimum value of the range and less than or equal to
10639 the maximum value of the range. */
10641 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
10644 /* If index is in range, it must fit in Pmode.
10645 Convert to Pmode so we can index with it. */
10647 index
= convert_to_mode (Pmode
, index
, 1);
10649 /* Don't let a MEM slip thru, because then INDEX that comes
10650 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10651 and break_out_memory_refs will go to work on it and mess it up. */
10652 #ifdef PIC_CASE_VECTOR_ADDRESS
10653 if (flag_pic
&& GET_CODE (index
) != REG
)
10654 index
= copy_to_mode_reg (Pmode
, index
);
10657 /* If flag_force_addr were to affect this address
10658 it could interfere with the tricky assumptions made
10659 about addresses that contain label-refs,
10660 which may be valid only very near the tablejump itself. */
10661 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10662 GET_MODE_SIZE, because this indicates how large insns are. The other
10663 uses should all be Pmode, because they are addresses. This code
10664 could fail if addresses and insns are not the same size. */
10665 index
= gen_rtx_PLUS (Pmode
,
10666 gen_rtx_MULT (Pmode
, index
,
10667 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
10668 gen_rtx_LABEL_REF (Pmode
, table_label
));
10669 #ifdef PIC_CASE_VECTOR_ADDRESS
10671 index
= PIC_CASE_VECTOR_ADDRESS (index
);
10674 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
10675 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
10676 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
10677 RTX_UNCHANGING_P (vector
) = 1;
10678 convert_move (temp
, vector
, 0);
10680 emit_jump_insn (gen_tablejump (temp
, table_label
));
10682 /* If we are generating PIC code or if the table is PC-relative, the
10683 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10684 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
10689 try_tablejump (index_type
, index_expr
, minval
, range
,
10690 table_label
, default_label
)
10691 tree index_type
, index_expr
, minval
, range
;
10692 rtx table_label
, default_label
;
10696 if (! HAVE_tablejump
)
10699 index_expr
= fold (build (MINUS_EXPR
, index_type
,
10700 convert (index_type
, index_expr
),
10701 convert (index_type
, minval
)));
10702 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10704 index
= protect_from_queue (index
, 0);
10705 do_pending_stack_adjust ();
10707 do_tablejump (index
, TYPE_MODE (index_type
),
10708 convert_modes (TYPE_MODE (index_type
),
10709 TYPE_MODE (TREE_TYPE (range
)),
10710 expand_expr (range
, NULL_RTX
,
10712 TREE_UNSIGNED (TREE_TYPE (range
))),
10713 table_label
, default_label
);