1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
42 #include "typeclass.h"
45 #include "langhooks.h"
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first. */
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
67 #define STACK_PUSH_CODE PRE_INC
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
76 /* Convert defined/undefined to boolean. */
77 #ifdef TARGET_MEM_FUNCTIONS
78 #undef TARGET_MEM_FUNCTIONS
79 #define TARGET_MEM_FUNCTIONS 1
81 #define TARGET_MEM_FUNCTIONS 0
85 /* If this is nonzero, we do not bother generating VOLATILE
86 around volatile memory references, and we are willing to
87 output indirect addresses. If cse is to follow, we reject
88 indirect addresses so a useful potential cse is generated;
89 if it is used only once, instruction combination will produce
90 the same indirect address eventually. */
93 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
94 static tree placeholder_list
= 0;
96 /* This structure is used by move_by_pieces to describe the move to
107 int explicit_inc_from
;
108 unsigned HOST_WIDE_INT len
;
109 HOST_WIDE_INT offset
;
113 /* This structure is used by store_by_pieces to describe the clear to
116 struct store_by_pieces
122 unsigned HOST_WIDE_INT len
;
123 HOST_WIDE_INT offset
;
124 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
129 static rtx enqueue_insn
PARAMS ((rtx
, rtx
));
130 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
131 PARAMS ((unsigned HOST_WIDE_INT
,
133 static void move_by_pieces_1
PARAMS ((rtx (*) (rtx
, ...), enum machine_mode
,
134 struct move_by_pieces
*));
135 static bool block_move_libcall_safe_for_call_parm
PARAMS ((void));
136 static bool emit_block_move_via_movstr
PARAMS ((rtx
, rtx
, rtx
, unsigned));
137 static rtx emit_block_move_via_libcall
PARAMS ((rtx
, rtx
, rtx
));
138 static tree emit_block_move_libcall_fn
PARAMS ((int));
139 static void emit_block_move_via_loop
PARAMS ((rtx
, rtx
, rtx
, unsigned));
140 static rtx clear_by_pieces_1
PARAMS ((PTR
, HOST_WIDE_INT
,
142 static void clear_by_pieces
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
144 static void store_by_pieces_1
PARAMS ((struct store_by_pieces
*,
146 static void store_by_pieces_2
PARAMS ((rtx (*) (rtx
, ...),
148 struct store_by_pieces
*));
149 static bool clear_storage_via_clrstr
PARAMS ((rtx
, rtx
, unsigned));
150 static rtx clear_storage_via_libcall
PARAMS ((rtx
, rtx
));
151 static tree clear_storage_libcall_fn
PARAMS ((int));
152 static rtx compress_float_constant
PARAMS ((rtx
, rtx
));
153 static rtx get_subtarget
PARAMS ((rtx
));
154 static int is_zeros_p
PARAMS ((tree
));
155 static int mostly_zeros_p
PARAMS ((tree
));
156 static void store_constructor_field
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
157 HOST_WIDE_INT
, enum machine_mode
,
158 tree
, tree
, int, int));
159 static void store_constructor
PARAMS ((tree
, rtx
, int, HOST_WIDE_INT
));
160 static rtx store_field
PARAMS ((rtx
, HOST_WIDE_INT
,
161 HOST_WIDE_INT
, enum machine_mode
,
162 tree
, enum machine_mode
, int, tree
,
164 static rtx var_rtx
PARAMS ((tree
));
165 static HOST_WIDE_INT highest_pow2_factor
PARAMS ((tree
));
166 static HOST_WIDE_INT highest_pow2_factor_for_type
PARAMS ((tree
, tree
));
167 static int is_aligning_offset
PARAMS ((tree
, tree
));
168 static rtx expand_increment
PARAMS ((tree
, int, int));
169 static void do_jump_by_parts_greater
PARAMS ((tree
, int, rtx
, rtx
));
170 static void do_jump_by_parts_equality
PARAMS ((tree
, rtx
, rtx
));
171 static void do_compare_and_jump
PARAMS ((tree
, enum rtx_code
, enum rtx_code
,
173 static rtx do_store_flag
PARAMS ((tree
, rtx
, enum machine_mode
, int));
175 static void emit_single_push_insn
PARAMS ((enum machine_mode
, rtx
, tree
));
177 static void do_tablejump
PARAMS ((rtx
, enum machine_mode
, rtx
, rtx
, rtx
));
179 /* Record for each mode whether we can move a register directly to or
180 from an object of that mode in memory. If we can't, we won't try
181 to use that mode directly when accessing a field of that mode. */
183 static char direct_load
[NUM_MACHINE_MODES
];
184 static char direct_store
[NUM_MACHINE_MODES
];
186 /* Record for each mode whether we can float-extend from memory. */
188 static bool float_extend_from_mem
[NUM_MACHINE_MODES
][NUM_MACHINE_MODES
];
190 /* If a memory-to-memory move would take MOVE_RATIO or more simple
191 move-instruction sequences, we will do a movstr or libcall instead. */
194 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
197 /* If we are optimizing for space (-Os), cut down the default move ratio. */
198 #define MOVE_RATIO (optimize_size ? 3 : 15)
202 /* This macro is used to determine whether move_by_pieces should be called
203 to perform a structure copy. */
204 #ifndef MOVE_BY_PIECES_P
205 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
206 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
209 /* If a clear memory operation would take CLEAR_RATIO or more simple
210 move-instruction sequences, we will do a clrstr or libcall instead. */
213 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
214 #define CLEAR_RATIO 2
216 /* If we are optimizing for space, cut down the default clear ratio. */
217 #define CLEAR_RATIO (optimize_size ? 3 : 15)
221 /* This macro is used to determine whether clear_by_pieces should be
222 called to clear storage. */
223 #ifndef CLEAR_BY_PIECES_P
224 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
225 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
228 /* This array records the insn_code of insns to perform block moves. */
229 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
231 /* This array records the insn_code of insns to perform block clears. */
232 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
234 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
236 #ifndef SLOW_UNALIGNED_ACCESS
237 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
240 /* This is run once per compilation to set up which modes can be used
241 directly in memory and to initialize the block move optab. */
247 enum machine_mode mode
;
252 /* Try indexing by frame ptr and try by stack ptr.
253 It is known that on the Convex the stack ptr isn't a valid index.
254 With luck, one or the other is valid on any machine. */
255 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
256 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
258 /* A scratch register we can modify in-place below to avoid
259 useless RTL allocations. */
260 reg
= gen_rtx_REG (VOIDmode
, -1);
262 insn
= rtx_alloc (INSN
);
263 pat
= gen_rtx_SET (0, NULL_RTX
, NULL_RTX
);
264 PATTERN (insn
) = pat
;
266 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
267 mode
= (enum machine_mode
) ((int) mode
+ 1))
271 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
272 PUT_MODE (mem
, mode
);
273 PUT_MODE (mem1
, mode
);
274 PUT_MODE (reg
, mode
);
276 /* See if there is some register that can be used in this mode and
277 directly loaded or stored from memory. */
279 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
280 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
281 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
284 if (! HARD_REGNO_MODE_OK (regno
, mode
))
290 SET_DEST (pat
) = reg
;
291 if (recog (pat
, insn
, &num_clobbers
) >= 0)
292 direct_load
[(int) mode
] = 1;
294 SET_SRC (pat
) = mem1
;
295 SET_DEST (pat
) = reg
;
296 if (recog (pat
, insn
, &num_clobbers
) >= 0)
297 direct_load
[(int) mode
] = 1;
300 SET_DEST (pat
) = mem
;
301 if (recog (pat
, insn
, &num_clobbers
) >= 0)
302 direct_store
[(int) mode
] = 1;
305 SET_DEST (pat
) = mem1
;
306 if (recog (pat
, insn
, &num_clobbers
) >= 0)
307 direct_store
[(int) mode
] = 1;
311 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
313 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
314 mode
= GET_MODE_WIDER_MODE (mode
))
316 enum machine_mode srcmode
;
317 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
318 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
322 ic
= can_extend_p (mode
, srcmode
, 0);
323 if (ic
== CODE_FOR_nothing
)
326 PUT_MODE (mem
, srcmode
);
328 if ((*insn_data
[ic
].operand
[1].predicate
) (mem
, srcmode
))
329 float_extend_from_mem
[mode
][srcmode
] = true;
334 /* This is run at the start of compiling a function. */
339 cfun
->expr
= (struct expr_status
*) ggc_alloc (sizeof (struct expr_status
));
342 pending_stack_adjust
= 0;
343 stack_pointer_delta
= 0;
344 inhibit_defer_pop
= 0;
346 apply_args_value
= 0;
350 /* Small sanity check that the queue is empty at the end of a function. */
353 finish_expr_for_function ()
359 /* Manage the queue of increment instructions to be output
360 for POSTINCREMENT_EXPR expressions, etc. */
362 /* Queue up to increment (or change) VAR later. BODY says how:
363 BODY should be the same thing you would pass to emit_insn
364 to increment right away. It will go to emit_insn later on.
366 The value is a QUEUED expression to be used in place of VAR
367 where you want to guarantee the pre-incrementation value of VAR. */
370 enqueue_insn (var
, body
)
373 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
), var
, NULL_RTX
, NULL_RTX
,
374 body
, pending_chain
);
375 return pending_chain
;
378 /* Use protect_from_queue to convert a QUEUED expression
379 into something that you can put immediately into an instruction.
380 If the queued incrementation has not happened yet,
381 protect_from_queue returns the variable itself.
382 If the incrementation has happened, protect_from_queue returns a temp
383 that contains a copy of the old value of the variable.
385 Any time an rtx which might possibly be a QUEUED is to be put
386 into an instruction, it must be passed through protect_from_queue first.
387 QUEUED expressions are not meaningful in instructions.
389 Do not pass a value through protect_from_queue and then hold
390 on to it for a while before putting it in an instruction!
391 If the queue is flushed in between, incorrect code will result. */
394 protect_from_queue (x
, modify
)
398 RTX_CODE code
= GET_CODE (x
);
400 #if 0 /* A QUEUED can hang around after the queue is forced out. */
401 /* Shortcut for most common case. */
402 if (pending_chain
== 0)
408 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
409 use of autoincrement. Make a copy of the contents of the memory
410 location rather than a copy of the address, but not if the value is
411 of mode BLKmode. Don't modify X in place since it might be
413 if (code
== MEM
&& GET_MODE (x
) != BLKmode
414 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
417 rtx
new = replace_equiv_address_nv (x
, QUEUED_VAR (y
));
421 rtx temp
= gen_reg_rtx (GET_MODE (x
));
423 emit_insn_before (gen_move_insn (temp
, new),
428 /* Copy the address into a pseudo, so that the returned value
429 remains correct across calls to emit_queue. */
430 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
433 /* Otherwise, recursively protect the subexpressions of all
434 the kinds of rtx's that can contain a QUEUED. */
437 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
438 if (tem
!= XEXP (x
, 0))
444 else if (code
== PLUS
|| code
== MULT
)
446 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
447 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
448 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
457 /* If the increment has not happened, use the variable itself. Copy it
458 into a new pseudo so that the value remains correct across calls to
460 if (QUEUED_INSN (x
) == 0)
461 return copy_to_reg (QUEUED_VAR (x
));
462 /* If the increment has happened and a pre-increment copy exists,
464 if (QUEUED_COPY (x
) != 0)
465 return QUEUED_COPY (x
);
466 /* The increment has happened but we haven't set up a pre-increment copy.
467 Set one up now, and use it. */
468 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
469 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
471 return QUEUED_COPY (x
);
474 /* Return nonzero if X contains a QUEUED expression:
475 if it contains anything that will be altered by a queued increment.
476 We handle only combinations of MEM, PLUS, MINUS and MULT operators
477 since memory addresses generally contain only those. */
483 enum rtx_code code
= GET_CODE (x
);
489 return queued_subexp_p (XEXP (x
, 0));
493 return (queued_subexp_p (XEXP (x
, 0))
494 || queued_subexp_p (XEXP (x
, 1)));
500 /* Perform all the pending incrementations. */
506 while ((p
= pending_chain
))
508 rtx body
= QUEUED_BODY (p
);
510 switch (GET_CODE (body
))
518 QUEUED_INSN (p
) = body
;
522 #ifdef ENABLE_CHECKING
529 QUEUED_INSN (p
) = emit_insn (body
);
533 pending_chain
= QUEUED_NEXT (p
);
537 /* Copy data from FROM to TO, where the machine modes are not the same.
538 Both modes may be integer, or both may be floating.
539 UNSIGNEDP should be nonzero if FROM is an unsigned type.
540 This causes zero-extension instead of sign-extension. */
543 convert_move (to
, from
, unsignedp
)
547 enum machine_mode to_mode
= GET_MODE (to
);
548 enum machine_mode from_mode
= GET_MODE (from
);
549 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
550 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
554 /* rtx code for making an equivalent value. */
555 enum rtx_code equiv_code
= (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
);
557 to
= protect_from_queue (to
, 1);
558 from
= protect_from_queue (from
, 0);
560 if (to_real
!= from_real
)
563 /* If FROM is a SUBREG that indicates that we have already done at least
564 the required extension, strip it. We don't handle such SUBREGs as
567 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
568 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
569 >= GET_MODE_SIZE (to_mode
))
570 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
571 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
573 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
576 if (to_mode
== from_mode
577 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
579 emit_move_insn (to
, from
);
583 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
585 if (GET_MODE_BITSIZE (from_mode
) != GET_MODE_BITSIZE (to_mode
))
588 if (VECTOR_MODE_P (to_mode
))
589 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
591 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
593 emit_move_insn (to
, from
);
597 if (to_real
!= from_real
)
604 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
))
606 /* Try converting directly if the insn is supported. */
607 if ((code
= can_extend_p (to_mode
, from_mode
, 0))
610 emit_unop_insn (code
, to
, from
, UNKNOWN
);
615 #ifdef HAVE_trunchfqf2
616 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
618 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
622 #ifdef HAVE_trunctqfqf2
623 if (HAVE_trunctqfqf2
&& from_mode
== TQFmode
&& to_mode
== QFmode
)
625 emit_unop_insn (CODE_FOR_trunctqfqf2
, to
, from
, UNKNOWN
);
629 #ifdef HAVE_truncsfqf2
630 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
632 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
636 #ifdef HAVE_truncdfqf2
637 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
639 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
643 #ifdef HAVE_truncxfqf2
644 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
646 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
650 #ifdef HAVE_trunctfqf2
651 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
653 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
658 #ifdef HAVE_trunctqfhf2
659 if (HAVE_trunctqfhf2
&& from_mode
== TQFmode
&& to_mode
== HFmode
)
661 emit_unop_insn (CODE_FOR_trunctqfhf2
, to
, from
, UNKNOWN
);
665 #ifdef HAVE_truncsfhf2
666 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
668 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
672 #ifdef HAVE_truncdfhf2
673 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
675 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
679 #ifdef HAVE_truncxfhf2
680 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
682 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
686 #ifdef HAVE_trunctfhf2
687 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
689 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
694 #ifdef HAVE_truncsftqf2
695 if (HAVE_truncsftqf2
&& from_mode
== SFmode
&& to_mode
== TQFmode
)
697 emit_unop_insn (CODE_FOR_truncsftqf2
, to
, from
, UNKNOWN
);
701 #ifdef HAVE_truncdftqf2
702 if (HAVE_truncdftqf2
&& from_mode
== DFmode
&& to_mode
== TQFmode
)
704 emit_unop_insn (CODE_FOR_truncdftqf2
, to
, from
, UNKNOWN
);
708 #ifdef HAVE_truncxftqf2
709 if (HAVE_truncxftqf2
&& from_mode
== XFmode
&& to_mode
== TQFmode
)
711 emit_unop_insn (CODE_FOR_truncxftqf2
, to
, from
, UNKNOWN
);
715 #ifdef HAVE_trunctftqf2
716 if (HAVE_trunctftqf2
&& from_mode
== TFmode
&& to_mode
== TQFmode
)
718 emit_unop_insn (CODE_FOR_trunctftqf2
, to
, from
, UNKNOWN
);
723 #ifdef HAVE_truncdfsf2
724 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
726 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
730 #ifdef HAVE_truncxfsf2
731 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
733 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
737 #ifdef HAVE_trunctfsf2
738 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
740 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
744 #ifdef HAVE_truncxfdf2
745 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
747 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
751 #ifdef HAVE_trunctfdf2
752 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
754 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
766 libcall
= extendsfdf2_libfunc
;
770 libcall
= extendsfxf2_libfunc
;
774 libcall
= extendsftf2_libfunc
;
786 libcall
= truncdfsf2_libfunc
;
790 libcall
= extenddfxf2_libfunc
;
794 libcall
= extenddftf2_libfunc
;
806 libcall
= truncxfsf2_libfunc
;
810 libcall
= truncxfdf2_libfunc
;
822 libcall
= trunctfsf2_libfunc
;
826 libcall
= trunctfdf2_libfunc
;
838 if (libcall
== (rtx
) 0)
839 /* This conversion is not implemented yet. */
843 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
845 insns
= get_insns ();
847 emit_libcall_block (insns
, to
, value
, gen_rtx_FLOAT_TRUNCATE (to_mode
,
852 /* Now both modes are integers. */
854 /* Handle expanding beyond a word. */
855 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
856 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
863 enum machine_mode lowpart_mode
;
864 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
866 /* Try converting directly if the insn is supported. */
867 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
870 /* If FROM is a SUBREG, put it into a register. Do this
871 so that we always generate the same set of insns for
872 better cse'ing; if an intermediate assignment occurred,
873 we won't be doing the operation directly on the SUBREG. */
874 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
875 from
= force_reg (from_mode
, from
);
876 emit_unop_insn (code
, to
, from
, equiv_code
);
879 /* Next, try converting via full word. */
880 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
881 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
882 != CODE_FOR_nothing
))
884 if (GET_CODE (to
) == REG
)
885 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
886 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
887 emit_unop_insn (code
, to
,
888 gen_lowpart (word_mode
, to
), equiv_code
);
892 /* No special multiword conversion insn; do it by hand. */
895 /* Since we will turn this into a no conflict block, we must ensure
896 that the source does not overlap the target. */
898 if (reg_overlap_mentioned_p (to
, from
))
899 from
= force_reg (from_mode
, from
);
901 /* Get a copy of FROM widened to a word, if necessary. */
902 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
903 lowpart_mode
= word_mode
;
905 lowpart_mode
= from_mode
;
907 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
909 lowpart
= gen_lowpart (lowpart_mode
, to
);
910 emit_move_insn (lowpart
, lowfrom
);
912 /* Compute the value to put in each remaining word. */
914 fill_value
= const0_rtx
;
919 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
920 && STORE_FLAG_VALUE
== -1)
922 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
924 fill_value
= gen_reg_rtx (word_mode
);
925 emit_insn (gen_slt (fill_value
));
931 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
932 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
934 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
938 /* Fill the remaining words. */
939 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
941 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
942 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
947 if (fill_value
!= subword
)
948 emit_move_insn (subword
, fill_value
);
951 insns
= get_insns ();
954 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
955 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
959 /* Truncating multi-word to a word or less. */
960 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
961 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
963 if (!((GET_CODE (from
) == MEM
964 && ! MEM_VOLATILE_P (from
)
965 && direct_load
[(int) to_mode
]
966 && ! mode_dependent_address_p (XEXP (from
, 0)))
967 || GET_CODE (from
) == REG
968 || GET_CODE (from
) == SUBREG
))
969 from
= force_reg (from_mode
, from
);
970 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
974 /* Handle pointer conversion. */ /* SPEE 900220. */
975 if (to_mode
== PQImode
)
977 if (from_mode
!= QImode
)
978 from
= convert_to_mode (QImode
, from
, unsignedp
);
980 #ifdef HAVE_truncqipqi2
981 if (HAVE_truncqipqi2
)
983 emit_unop_insn (CODE_FOR_truncqipqi2
, to
, from
, UNKNOWN
);
986 #endif /* HAVE_truncqipqi2 */
990 if (from_mode
== PQImode
)
992 if (to_mode
!= QImode
)
994 from
= convert_to_mode (QImode
, from
, unsignedp
);
999 #ifdef HAVE_extendpqiqi2
1000 if (HAVE_extendpqiqi2
)
1002 emit_unop_insn (CODE_FOR_extendpqiqi2
, to
, from
, UNKNOWN
);
1005 #endif /* HAVE_extendpqiqi2 */
1010 if (to_mode
== PSImode
)
1012 if (from_mode
!= SImode
)
1013 from
= convert_to_mode (SImode
, from
, unsignedp
);
1015 #ifdef HAVE_truncsipsi2
1016 if (HAVE_truncsipsi2
)
1018 emit_unop_insn (CODE_FOR_truncsipsi2
, to
, from
, UNKNOWN
);
1021 #endif /* HAVE_truncsipsi2 */
1025 if (from_mode
== PSImode
)
1027 if (to_mode
!= SImode
)
1029 from
= convert_to_mode (SImode
, from
, unsignedp
);
1034 #ifdef HAVE_extendpsisi2
1035 if (! unsignedp
&& HAVE_extendpsisi2
)
1037 emit_unop_insn (CODE_FOR_extendpsisi2
, to
, from
, UNKNOWN
);
1040 #endif /* HAVE_extendpsisi2 */
1041 #ifdef HAVE_zero_extendpsisi2
1042 if (unsignedp
&& HAVE_zero_extendpsisi2
)
1044 emit_unop_insn (CODE_FOR_zero_extendpsisi2
, to
, from
, UNKNOWN
);
1047 #endif /* HAVE_zero_extendpsisi2 */
1052 if (to_mode
== PDImode
)
1054 if (from_mode
!= DImode
)
1055 from
= convert_to_mode (DImode
, from
, unsignedp
);
1057 #ifdef HAVE_truncdipdi2
1058 if (HAVE_truncdipdi2
)
1060 emit_unop_insn (CODE_FOR_truncdipdi2
, to
, from
, UNKNOWN
);
1063 #endif /* HAVE_truncdipdi2 */
1067 if (from_mode
== PDImode
)
1069 if (to_mode
!= DImode
)
1071 from
= convert_to_mode (DImode
, from
, unsignedp
);
1076 #ifdef HAVE_extendpdidi2
1077 if (HAVE_extendpdidi2
)
1079 emit_unop_insn (CODE_FOR_extendpdidi2
, to
, from
, UNKNOWN
);
1082 #endif /* HAVE_extendpdidi2 */
1087 /* Now follow all the conversions between integers
1088 no more than a word long. */
1090 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1091 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
1092 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1093 GET_MODE_BITSIZE (from_mode
)))
1095 if (!((GET_CODE (from
) == MEM
1096 && ! MEM_VOLATILE_P (from
)
1097 && direct_load
[(int) to_mode
]
1098 && ! mode_dependent_address_p (XEXP (from
, 0)))
1099 || GET_CODE (from
) == REG
1100 || GET_CODE (from
) == SUBREG
))
1101 from
= force_reg (from_mode
, from
);
1102 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
1103 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
1104 from
= copy_to_reg (from
);
1105 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
1109 /* Handle extension. */
1110 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
1112 /* Convert directly if that works. */
1113 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
1114 != CODE_FOR_nothing
)
1117 from
= force_not_mem (from
);
1119 emit_unop_insn (code
, to
, from
, equiv_code
);
1124 enum machine_mode intermediate
;
1128 /* Search for a mode to convert via. */
1129 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
1130 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
1131 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
1132 != CODE_FOR_nothing
)
1133 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
1134 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1135 GET_MODE_BITSIZE (intermediate
))))
1136 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
1137 != CODE_FOR_nothing
))
1139 convert_move (to
, convert_to_mode (intermediate
, from
,
1140 unsignedp
), unsignedp
);
1144 /* No suitable intermediate mode.
1145 Generate what we need with shifts. */
1146 shift_amount
= build_int_2 (GET_MODE_BITSIZE (to_mode
)
1147 - GET_MODE_BITSIZE (from_mode
), 0);
1148 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
1149 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
1151 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
1154 emit_move_insn (to
, tmp
);
1159 /* Support special truncate insns for certain modes. */
1161 if (from_mode
== DImode
&& to_mode
== SImode
)
1163 #ifdef HAVE_truncdisi2
1164 if (HAVE_truncdisi2
)
1166 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1170 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1174 if (from_mode
== DImode
&& to_mode
== HImode
)
1176 #ifdef HAVE_truncdihi2
1177 if (HAVE_truncdihi2
)
1179 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1183 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1187 if (from_mode
== DImode
&& to_mode
== QImode
)
1189 #ifdef HAVE_truncdiqi2
1190 if (HAVE_truncdiqi2
)
1192 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1196 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1200 if (from_mode
== SImode
&& to_mode
== HImode
)
1202 #ifdef HAVE_truncsihi2
1203 if (HAVE_truncsihi2
)
1205 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1209 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1213 if (from_mode
== SImode
&& to_mode
== QImode
)
1215 #ifdef HAVE_truncsiqi2
1216 if (HAVE_truncsiqi2
)
1218 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1222 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1226 if (from_mode
== HImode
&& to_mode
== QImode
)
1228 #ifdef HAVE_trunchiqi2
1229 if (HAVE_trunchiqi2
)
1231 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1235 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1239 if (from_mode
== TImode
&& to_mode
== DImode
)
1241 #ifdef HAVE_trunctidi2
1242 if (HAVE_trunctidi2
)
1244 emit_unop_insn (CODE_FOR_trunctidi2
, to
, from
, UNKNOWN
);
1248 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1252 if (from_mode
== TImode
&& to_mode
== SImode
)
1254 #ifdef HAVE_trunctisi2
1255 if (HAVE_trunctisi2
)
1257 emit_unop_insn (CODE_FOR_trunctisi2
, to
, from
, UNKNOWN
);
1261 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1265 if (from_mode
== TImode
&& to_mode
== HImode
)
1267 #ifdef HAVE_trunctihi2
1268 if (HAVE_trunctihi2
)
1270 emit_unop_insn (CODE_FOR_trunctihi2
, to
, from
, UNKNOWN
);
1274 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1278 if (from_mode
== TImode
&& to_mode
== QImode
)
1280 #ifdef HAVE_trunctiqi2
1281 if (HAVE_trunctiqi2
)
1283 emit_unop_insn (CODE_FOR_trunctiqi2
, to
, from
, UNKNOWN
);
1287 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1291 /* Handle truncation of volatile memrefs, and so on;
1292 the things that couldn't be truncated directly,
1293 and for which there was no special instruction. */
1294 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1296 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1297 emit_move_insn (to
, temp
);
1301 /* Mode combination is not recognized. */
1305 /* Return an rtx for a value that would result
1306 from converting X to mode MODE.
1307 Both X and MODE may be floating, or both integer.
1308 UNSIGNEDP is nonzero if X is an unsigned value.
1309 This can be done by referring to a part of X in place
1310 or by copying to a new temporary with conversion.
1312 This function *must not* call protect_from_queue
1313 except when putting X into an insn (in which case convert_move does it). */
1316 convert_to_mode (mode
, x
, unsignedp
)
1317 enum machine_mode mode
;
1321 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
1324 /* Return an rtx for a value that would result
1325 from converting X from mode OLDMODE to mode MODE.
1326 Both modes may be floating, or both integer.
1327 UNSIGNEDP is nonzero if X is an unsigned value.
1329 This can be done by referring to a part of X in place
1330 or by copying to a new temporary with conversion.
1332 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1334 This function *must not* call protect_from_queue
1335 except when putting X into an insn (in which case convert_move does it). */
1338 convert_modes (mode
, oldmode
, x
, unsignedp
)
1339 enum machine_mode mode
, oldmode
;
1345 /* If FROM is a SUBREG that indicates that we have already done at least
1346 the required extension, strip it. */
1348 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1349 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1350 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1351 x
= gen_lowpart (mode
, x
);
1353 if (GET_MODE (x
) != VOIDmode
)
1354 oldmode
= GET_MODE (x
);
1356 if (mode
== oldmode
)
1359 /* There is one case that we must handle specially: If we are converting
1360 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1361 we are to interpret the constant as unsigned, gen_lowpart will do
1362 the wrong if the constant appears negative. What we want to do is
1363 make the high-order word of the constant zero, not all ones. */
1365 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1366 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1367 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1369 HOST_WIDE_INT val
= INTVAL (x
);
1371 if (oldmode
!= VOIDmode
1372 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
1374 int width
= GET_MODE_BITSIZE (oldmode
);
1376 /* We need to zero extend VAL. */
1377 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1380 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
1383 /* We can do this with a gen_lowpart if both desired and current modes
1384 are integer, and this is either a constant integer, a register, or a
1385 non-volatile MEM. Except for the constant case where MODE is no
1386 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1388 if ((GET_CODE (x
) == CONST_INT
1389 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1390 || (GET_MODE_CLASS (mode
) == MODE_INT
1391 && GET_MODE_CLASS (oldmode
) == MODE_INT
1392 && (GET_CODE (x
) == CONST_DOUBLE
1393 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
1394 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
1395 && direct_load
[(int) mode
])
1396 || (GET_CODE (x
) == REG
1397 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
1398 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
1400 /* ?? If we don't know OLDMODE, we have to assume here that
1401 X does not need sign- or zero-extension. This may not be
1402 the case, but it's the best we can do. */
1403 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1404 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1406 HOST_WIDE_INT val
= INTVAL (x
);
1407 int width
= GET_MODE_BITSIZE (oldmode
);
1409 /* We must sign or zero-extend in this case. Start by
1410 zero-extending, then sign extend if we need to. */
1411 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1413 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1414 val
|= (HOST_WIDE_INT
) (-1) << width
;
1416 return gen_int_mode (val
, mode
);
1419 return gen_lowpart (mode
, x
);
1422 temp
= gen_reg_rtx (mode
);
1423 convert_move (temp
, x
, unsignedp
);
1427 /* This macro is used to determine what the largest unit size that
1428 move_by_pieces can use is. */
1430 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1431 move efficiently, as opposed to MOVE_MAX which is the maximum
1432 number of bytes we can move with a single instruction. */
1434 #ifndef MOVE_MAX_PIECES
1435 #define MOVE_MAX_PIECES MOVE_MAX
1438 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1439 store efficiently. Due to internal GCC limitations, this is
1440 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1441 for an immediate constant. */
1443 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1445 /* Generate several move instructions to copy LEN bytes from block FROM to
1446 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1447 and TO through protect_from_queue before calling.
1449 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1450 used to push FROM to the stack.
1452 ALIGN is maximum alignment we can assume. */
1455 move_by_pieces (to
, from
, len
, align
)
1457 unsigned HOST_WIDE_INT len
;
1460 struct move_by_pieces data
;
1461 rtx to_addr
, from_addr
= XEXP (from
, 0);
1462 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
1463 enum machine_mode mode
= VOIDmode
, tmode
;
1464 enum insn_code icode
;
1467 data
.from_addr
= from_addr
;
1470 to_addr
= XEXP (to
, 0);
1473 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1474 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1476 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1483 #ifdef STACK_GROWS_DOWNWARD
1489 data
.to_addr
= to_addr
;
1492 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1493 || GET_CODE (from_addr
) == POST_INC
1494 || GET_CODE (from_addr
) == POST_DEC
);
1496 data
.explicit_inc_from
= 0;
1497 data
.explicit_inc_to
= 0;
1498 if (data
.reverse
) data
.offset
= len
;
1501 /* If copying requires more than two move insns,
1502 copy addresses to registers (to make displacements shorter)
1503 and use post-increment if available. */
1504 if (!(data
.autinc_from
&& data
.autinc_to
)
1505 && move_by_pieces_ninsns (len
, align
) > 2)
1507 /* Find the mode of the largest move... */
1508 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1509 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1510 if (GET_MODE_SIZE (tmode
) < max_size
)
1513 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
1515 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1516 data
.autinc_from
= 1;
1517 data
.explicit_inc_from
= -1;
1519 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
1521 data
.from_addr
= copy_addr_to_reg (from_addr
);
1522 data
.autinc_from
= 1;
1523 data
.explicit_inc_from
= 1;
1525 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1526 data
.from_addr
= copy_addr_to_reg (from_addr
);
1527 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
1529 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1531 data
.explicit_inc_to
= -1;
1533 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
1535 data
.to_addr
= copy_addr_to_reg (to_addr
);
1537 data
.explicit_inc_to
= 1;
1539 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1540 data
.to_addr
= copy_addr_to_reg (to_addr
);
1543 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1544 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1545 align
= MOVE_MAX
* BITS_PER_UNIT
;
1547 /* First move what we can in the largest integer mode, then go to
1548 successively smaller modes. */
1550 while (max_size
> 1)
1552 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1553 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1554 if (GET_MODE_SIZE (tmode
) < max_size
)
1557 if (mode
== VOIDmode
)
1560 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1561 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1562 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1564 max_size
= GET_MODE_SIZE (mode
);
1567 /* The code above should have handled everything. */
1572 /* Return number of insns required to move L bytes by pieces.
1573 ALIGN (in bits) is maximum alignment we can assume. */
1575 static unsigned HOST_WIDE_INT
1576 move_by_pieces_ninsns (l
, align
)
1577 unsigned HOST_WIDE_INT l
;
1580 unsigned HOST_WIDE_INT n_insns
= 0;
1581 unsigned HOST_WIDE_INT max_size
= MOVE_MAX
+ 1;
1583 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1584 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1585 align
= MOVE_MAX
* BITS_PER_UNIT
;
1587 while (max_size
> 1)
1589 enum machine_mode mode
= VOIDmode
, tmode
;
1590 enum insn_code icode
;
1592 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1593 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1594 if (GET_MODE_SIZE (tmode
) < max_size
)
1597 if (mode
== VOIDmode
)
1600 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1601 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1602 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1604 max_size
= GET_MODE_SIZE (mode
);
1612 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1613 with move instructions for mode MODE. GENFUN is the gen_... function
1614 to make a move insn for that mode. DATA has all the other info. */
1617 move_by_pieces_1 (genfun
, mode
, data
)
1618 rtx (*genfun
) PARAMS ((rtx
, ...));
1619 enum machine_mode mode
;
1620 struct move_by_pieces
*data
;
1622 unsigned int size
= GET_MODE_SIZE (mode
);
1623 rtx to1
= NULL_RTX
, from1
;
1625 while (data
->len
>= size
)
1628 data
->offset
-= size
;
1632 if (data
->autinc_to
)
1633 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1636 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1639 if (data
->autinc_from
)
1640 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1643 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1645 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1646 emit_insn (gen_add2_insn (data
->to_addr
,
1647 GEN_INT (-(HOST_WIDE_INT
)size
)));
1648 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1649 emit_insn (gen_add2_insn (data
->from_addr
,
1650 GEN_INT (-(HOST_WIDE_INT
)size
)));
1653 emit_insn ((*genfun
) (to1
, from1
));
1656 #ifdef PUSH_ROUNDING
1657 emit_single_push_insn (mode
, from1
, NULL
);
1663 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1664 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1665 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1666 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1668 if (! data
->reverse
)
1669 data
->offset
+= size
;
1675 /* Emit code to move a block Y to a block X. This may be done with
1676 string-move instructions, with multiple scalar move instructions,
1677 or with a library call.
1679 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1680 SIZE is an rtx that says how long they are.
1681 ALIGN is the maximum alignment we can assume they have.
1682 METHOD describes what kind of copy this is, and what mechanisms may be used.
1684 Return the address of the new block, if memcpy is called and returns it,
1688 emit_block_move (x
, y
, size
, method
)
1690 enum block_op_methods method
;
1698 case BLOCK_OP_NORMAL
:
1699 may_use_call
= true;
1702 case BLOCK_OP_CALL_PARM
:
1703 may_use_call
= block_move_libcall_safe_for_call_parm ();
1705 /* Make inhibit_defer_pop nonzero around the library call
1706 to force it to pop the arguments right away. */
1710 case BLOCK_OP_NO_LIBCALL
:
1711 may_use_call
= false;
1718 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1720 if (GET_MODE (x
) != BLKmode
)
1722 if (GET_MODE (y
) != BLKmode
)
1725 x
= protect_from_queue (x
, 1);
1726 y
= protect_from_queue (y
, 0);
1727 size
= protect_from_queue (size
, 0);
1729 if (GET_CODE (x
) != MEM
)
1731 if (GET_CODE (y
) != MEM
)
1736 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1737 can be incorrect is coming from __builtin_memcpy. */
1738 if (GET_CODE (size
) == CONST_INT
)
1740 x
= shallow_copy_rtx (x
);
1741 y
= shallow_copy_rtx (y
);
1742 set_mem_size (x
, size
);
1743 set_mem_size (y
, size
);
1746 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1747 move_by_pieces (x
, y
, INTVAL (size
), align
);
1748 else if (emit_block_move_via_movstr (x
, y
, size
, align
))
1750 else if (may_use_call
)
1751 retval
= emit_block_move_via_libcall (x
, y
, size
);
1753 emit_block_move_via_loop (x
, y
, size
, align
);
1755 if (method
== BLOCK_OP_CALL_PARM
)
1761 /* A subroutine of emit_block_move. Returns true if calling the
1762 block move libcall will not clobber any parameters which may have
1763 already been placed on the stack. */
1766 block_move_libcall_safe_for_call_parm ()
1772 /* Check to see whether memcpy takes all register arguments. */
1774 takes_regs_uninit
, takes_regs_no
, takes_regs_yes
1775 } takes_regs
= takes_regs_uninit
;
1779 case takes_regs_uninit
:
1781 CUMULATIVE_ARGS args_so_far
;
1784 fn
= emit_block_move_libcall_fn (false);
1785 INIT_CUMULATIVE_ARGS (args_so_far
, TREE_TYPE (fn
), NULL_RTX
, 0);
1787 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1788 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1790 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1791 rtx tmp
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
1792 if (!tmp
|| !REG_P (tmp
))
1793 goto fail_takes_regs
;
1794 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1795 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, mode
,
1797 goto fail_takes_regs
;
1799 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, NULL_TREE
, 1);
1802 takes_regs
= takes_regs_yes
;
1805 case takes_regs_yes
:
1809 takes_regs
= takes_regs_no
;
1820 /* A subroutine of emit_block_move. Expand a movstr pattern;
1821 return true if successful. */
1824 emit_block_move_via_movstr (x
, y
, size
, align
)
1828 /* Try the most limited insn first, because there's no point
1829 including more than one in the machine description unless
1830 the more limited one has some advantage. */
1832 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1833 enum machine_mode mode
;
1835 /* Since this is a move insn, we don't care about volatility. */
1838 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1839 mode
= GET_MODE_WIDER_MODE (mode
))
1841 enum insn_code code
= movstr_optab
[(int) mode
];
1842 insn_operand_predicate_fn pred
;
1844 if (code
!= CODE_FOR_nothing
1845 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1846 here because if SIZE is less than the mode mask, as it is
1847 returned by the macro, it will definitely be less than the
1848 actual mode mask. */
1849 && ((GET_CODE (size
) == CONST_INT
1850 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1851 <= (GET_MODE_MASK (mode
) >> 1)))
1852 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1853 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1854 || (*pred
) (x
, BLKmode
))
1855 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1856 || (*pred
) (y
, BLKmode
))
1857 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1858 || (*pred
) (opalign
, VOIDmode
)))
1861 rtx last
= get_last_insn ();
1864 op2
= convert_to_mode (mode
, size
, 1);
1865 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1866 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1867 op2
= copy_to_mode_reg (mode
, op2
);
1869 /* ??? When called via emit_block_move_for_call, it'd be
1870 nice if there were some way to inform the backend, so
1871 that it doesn't fail the expansion because it thinks
1872 emitting the libcall would be more efficient. */
1874 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1882 delete_insns_since (last
);
1890 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1891 Return the return value from memcpy, 0 otherwise. */
1894 emit_block_move_via_libcall (dst
, src
, size
)
1897 tree call_expr
, arg_list
, fn
, src_tree
, dst_tree
, size_tree
;
1898 enum machine_mode size_mode
;
1901 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1903 It is unsafe to save the value generated by protect_from_queue
1904 and reuse it later. Consider what happens if emit_queue is
1905 called before the return value from protect_from_queue is used.
1907 Expansion of the CALL_EXPR below will call emit_queue before
1908 we are finished emitting RTL for argument setup. So if we are
1909 not careful we could get the wrong value for an argument.
1911 To avoid this problem we go ahead and emit code to copy X, Y &
1912 SIZE into new pseudos. We can then place those new pseudos
1913 into an RTL_EXPR and use them later, even after a call to
1916 Note this is not strictly needed for library calls since they
1917 do not call emit_queue before loading their arguments. However,
1918 we may need to have library calls call emit_queue in the future
1919 since failing to do so could cause problems for targets which
1920 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1922 dst
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
1923 src
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
1925 if (TARGET_MEM_FUNCTIONS
)
1926 size_mode
= TYPE_MODE (sizetype
);
1928 size_mode
= TYPE_MODE (unsigned_type_node
);
1929 size
= convert_to_mode (size_mode
, size
, 1);
1930 size
= copy_to_mode_reg (size_mode
, size
);
1932 /* It is incorrect to use the libcall calling conventions to call
1933 memcpy in this context. This could be a user call to memcpy and
1934 the user may wish to examine the return value from memcpy. For
1935 targets where libcalls and normal calls have different conventions
1936 for returning pointers, we could end up generating incorrect code.
1938 For convenience, we generate the call to bcopy this way as well. */
1940 dst_tree
= make_tree (ptr_type_node
, dst
);
1941 src_tree
= make_tree (ptr_type_node
, src
);
1942 if (TARGET_MEM_FUNCTIONS
)
1943 size_tree
= make_tree (sizetype
, size
);
1945 size_tree
= make_tree (unsigned_type_node
, size
);
1947 fn
= emit_block_move_libcall_fn (true);
1948 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
1949 if (TARGET_MEM_FUNCTIONS
)
1951 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1952 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1956 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1957 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1960 /* Now we have to build up the CALL_EXPR itself. */
1961 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1962 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1963 call_expr
, arg_list
, NULL_TREE
);
1964 TREE_SIDE_EFFECTS (call_expr
) = 1;
1966 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
1968 /* If we are initializing a readonly value, show the above call
1969 clobbered it. Otherwise, a load from it may erroneously be
1970 hoisted from a loop. */
1971 if (RTX_UNCHANGING_P (dst
))
1972 emit_insn (gen_rtx_CLOBBER (VOIDmode
, dst
));
1974 return (TARGET_MEM_FUNCTIONS
? retval
: NULL_RTX
);
1977 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1978 for the function we use for block copies. The first time FOR_CALL
1979 is true, we call assemble_external. */
1981 static GTY(()) tree block_move_fn
;
1984 emit_block_move_libcall_fn (for_call
)
1987 static bool emitted_extern
;
1988 tree fn
= block_move_fn
, args
;
1992 if (TARGET_MEM_FUNCTIONS
)
1994 fn
= get_identifier ("memcpy");
1995 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
1996 const_ptr_type_node
, sizetype
,
2001 fn
= get_identifier ("bcopy");
2002 args
= build_function_type_list (void_type_node
, const_ptr_type_node
,
2003 ptr_type_node
, unsigned_type_node
,
2007 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
2008 DECL_EXTERNAL (fn
) = 1;
2009 TREE_PUBLIC (fn
) = 1;
2010 DECL_ARTIFICIAL (fn
) = 1;
2011 TREE_NOTHROW (fn
) = 1;
2016 if (for_call
&& !emitted_extern
)
2018 emitted_extern
= true;
2019 make_decl_rtl (fn
, NULL
);
2020 assemble_external (fn
);
2026 /* A subroutine of emit_block_move. Copy the data via an explicit
2027 loop. This is used only when libcalls are forbidden. */
2028 /* ??? It'd be nice to copy in hunks larger than QImode. */
2031 emit_block_move_via_loop (x
, y
, size
, align
)
2033 unsigned int align ATTRIBUTE_UNUSED
;
2035 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
2036 enum machine_mode iter_mode
;
2038 iter_mode
= GET_MODE (size
);
2039 if (iter_mode
== VOIDmode
)
2040 iter_mode
= word_mode
;
2042 top_label
= gen_label_rtx ();
2043 cmp_label
= gen_label_rtx ();
2044 iter
= gen_reg_rtx (iter_mode
);
2046 emit_move_insn (iter
, const0_rtx
);
2048 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
2049 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
2050 do_pending_stack_adjust ();
2052 emit_note (NULL
, NOTE_INSN_LOOP_BEG
);
2054 emit_jump (cmp_label
);
2055 emit_label (top_label
);
2057 tmp
= convert_modes (Pmode
, iter_mode
, iter
, true);
2058 x_addr
= gen_rtx_PLUS (Pmode
, x_addr
, tmp
);
2059 y_addr
= gen_rtx_PLUS (Pmode
, y_addr
, tmp
);
2060 x
= change_address (x
, QImode
, x_addr
);
2061 y
= change_address (y
, QImode
, y_addr
);
2063 emit_move_insn (x
, y
);
2065 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
2066 true, OPTAB_LIB_WIDEN
);
2068 emit_move_insn (iter
, tmp
);
2070 emit_note (NULL
, NOTE_INSN_LOOP_CONT
);
2071 emit_label (cmp_label
);
2073 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
2076 emit_note (NULL
, NOTE_INSN_LOOP_END
);
2079 /* Copy all or part of a value X into registers starting at REGNO.
2080 The number of registers to be filled is NREGS. */
2083 move_block_to_reg (regno
, x
, nregs
, mode
)
2087 enum machine_mode mode
;
2090 #ifdef HAVE_load_multiple
2098 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
2099 x
= validize_mem (force_const_mem (mode
, x
));
2101 /* See if the machine can do this with a load multiple insn. */
2102 #ifdef HAVE_load_multiple
2103 if (HAVE_load_multiple
)
2105 last
= get_last_insn ();
2106 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
2114 delete_insns_since (last
);
2118 for (i
= 0; i
< nregs
; i
++)
2119 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
2120 operand_subword_force (x
, i
, mode
));
2123 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2124 The number of registers to be filled is NREGS. SIZE indicates the number
2125 of bytes in the object X. */
2128 move_block_from_reg (regno
, x
, nregs
, size
)
2135 #ifdef HAVE_store_multiple
2139 enum machine_mode mode
;
2144 /* If SIZE is that of a mode no bigger than a word, just use that
2145 mode's store operation. */
2146 if (size
<= UNITS_PER_WORD
2147 && (mode
= mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0)) != BLKmode
2148 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
)
2150 emit_move_insn (adjust_address (x
, mode
, 0), gen_rtx_REG (mode
, regno
));
2154 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
2155 to the left before storing to memory. Note that the previous test
2156 doesn't handle all cases (e.g. SIZE == 3). */
2157 if (size
< UNITS_PER_WORD
2159 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
)
2161 rtx tem
= operand_subword (x
, 0, 1, BLKmode
);
2167 shift
= expand_shift (LSHIFT_EXPR
, word_mode
,
2168 gen_rtx_REG (word_mode
, regno
),
2169 build_int_2 ((UNITS_PER_WORD
- size
)
2170 * BITS_PER_UNIT
, 0), NULL_RTX
, 0);
2171 emit_move_insn (tem
, shift
);
2175 /* See if the machine can do this with a store multiple insn. */
2176 #ifdef HAVE_store_multiple
2177 if (HAVE_store_multiple
)
2179 last
= get_last_insn ();
2180 pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
2188 delete_insns_since (last
);
2192 for (i
= 0; i
< nregs
; i
++)
2194 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
2199 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
2203 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2204 registers represented by a PARALLEL. SSIZE represents the total size of
2205 block SRC in bytes, or -1 if not known. */
2206 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2207 the balance will be in what would be the low-order memory addresses, i.e.
2208 left justified for big endian, right justified for little endian. This
2209 happens to be true for the targets currently using this support. If this
2210 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2214 emit_group_load (dst
, orig_src
, ssize
)
2221 if (GET_CODE (dst
) != PARALLEL
)
2224 /* Check for a NULL entry, used to indicate that the parameter goes
2225 both on the stack and in registers. */
2226 if (XEXP (XVECEXP (dst
, 0, 0), 0))
2231 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
2233 /* Process the pieces. */
2234 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2236 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
2237 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
2238 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2241 /* Handle trailing fragments that run over the size of the struct. */
2242 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2244 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2245 bytelen
= ssize
- bytepos
;
2250 /* If we won't be loading directly from memory, protect the real source
2251 from strange tricks we might play; but make sure that the source can
2252 be loaded directly into the destination. */
2254 if (GET_CODE (orig_src
) != MEM
2255 && (!CONSTANT_P (orig_src
)
2256 || (GET_MODE (orig_src
) != mode
2257 && GET_MODE (orig_src
) != VOIDmode
)))
2259 if (GET_MODE (orig_src
) == VOIDmode
)
2260 src
= gen_reg_rtx (mode
);
2262 src
= gen_reg_rtx (GET_MODE (orig_src
));
2264 emit_move_insn (src
, orig_src
);
2267 /* Optimize the access just a bit. */
2268 if (GET_CODE (src
) == MEM
2269 && MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
)
2270 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2271 && bytelen
== GET_MODE_SIZE (mode
))
2273 tmps
[i
] = gen_reg_rtx (mode
);
2274 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
2276 else if (GET_CODE (src
) == CONCAT
)
2278 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
2279 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
2281 if ((bytepos
== 0 && bytelen
== slen0
)
2282 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
2284 /* The following assumes that the concatenated objects all
2285 have the same size. In this case, a simple calculation
2286 can be used to determine the object and the bit field
2288 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
2289 if (! CONSTANT_P (tmps
[i
])
2290 && (GET_CODE (tmps
[i
]) != REG
|| GET_MODE (tmps
[i
]) != mode
))
2291 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
2292 (bytepos
% slen0
) * BITS_PER_UNIT
,
2293 1, NULL_RTX
, mode
, mode
, ssize
);
2295 else if (bytepos
== 0)
2297 rtx mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
2298 emit_move_insn (mem
, src
);
2299 tmps
[i
] = adjust_address (mem
, mode
, 0);
2304 else if (CONSTANT_P (src
)
2305 || (GET_CODE (src
) == REG
&& GET_MODE (src
) == mode
))
2308 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
2309 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
2312 if (BYTES_BIG_ENDIAN
&& shift
)
2313 expand_binop (mode
, ashl_optab
, tmps
[i
], GEN_INT (shift
),
2314 tmps
[i
], 0, OPTAB_WIDEN
);
2319 /* Copy the extracted pieces into the proper (probable) hard regs. */
2320 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2321 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
2324 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2325 registers represented by a PARALLEL. SSIZE represents the total size of
2326 block DST, or -1 if not known. */
2329 emit_group_store (orig_dst
, src
, ssize
)
2336 if (GET_CODE (src
) != PARALLEL
)
2339 /* Check for a NULL entry, used to indicate that the parameter goes
2340 both on the stack and in registers. */
2341 if (XEXP (XVECEXP (src
, 0, 0), 0))
2346 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (src
, 0));
2348 /* Copy the (probable) hard regs into pseudos. */
2349 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2351 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2352 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2353 emit_move_insn (tmps
[i
], reg
);
2357 /* If we won't be storing directly into memory, protect the real destination
2358 from strange tricks we might play. */
2360 if (GET_CODE (dst
) == PARALLEL
)
2364 /* We can get a PARALLEL dst if there is a conditional expression in
2365 a return statement. In that case, the dst and src are the same,
2366 so no action is necessary. */
2367 if (rtx_equal_p (dst
, src
))
2370 /* It is unclear if we can ever reach here, but we may as well handle
2371 it. Allocate a temporary, and split this into a store/load to/from
2374 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
2375 emit_group_store (temp
, src
, ssize
);
2376 emit_group_load (dst
, temp
, ssize
);
2379 else if (GET_CODE (dst
) != MEM
&& GET_CODE (dst
) != CONCAT
)
2381 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
2382 /* Make life a bit easier for combine. */
2383 emit_move_insn (dst
, const0_rtx
);
2386 /* Process the pieces. */
2387 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2389 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2390 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2391 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2394 /* Handle trailing fragments that run over the size of the struct. */
2395 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2397 if (BYTES_BIG_ENDIAN
)
2399 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2400 expand_binop (mode
, ashr_optab
, tmps
[i
], GEN_INT (shift
),
2401 tmps
[i
], 0, OPTAB_WIDEN
);
2403 bytelen
= ssize
- bytepos
;
2406 if (GET_CODE (dst
) == CONCAT
)
2408 if (bytepos
+ bytelen
<= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2409 dest
= XEXP (dst
, 0);
2410 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2412 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2413 dest
= XEXP (dst
, 1);
2419 /* Optimize the access just a bit. */
2420 if (GET_CODE (dest
) == MEM
2421 && MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
)
2422 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2423 && bytelen
== GET_MODE_SIZE (mode
))
2424 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2426 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2427 mode
, tmps
[i
], ssize
);
2432 /* Copy from the pseudo into the (probable) hard reg. */
2433 if (GET_CODE (dst
) == REG
)
2434 emit_move_insn (orig_dst
, dst
);
2437 /* Generate code to copy a BLKmode object of TYPE out of a
2438 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2439 is null, a stack temporary is created. TGTBLK is returned.
2441 The primary purpose of this routine is to handle functions
2442 that return BLKmode structures in registers. Some machines
2443 (the PA for example) want to return all small structures
2444 in registers regardless of the structure's alignment. */
2447 copy_blkmode_from_reg (tgtblk
, srcreg
, type
)
2452 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2453 rtx src
= NULL
, dst
= NULL
;
2454 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2455 unsigned HOST_WIDE_INT bitpos
, xbitpos
, big_endian_correction
= 0;
2459 tgtblk
= assign_temp (build_qualified_type (type
,
2461 | TYPE_QUAL_CONST
)),
2463 preserve_temp_slots (tgtblk
);
2466 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2467 into a new pseudo which is a full word.
2469 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2470 the wrong part of the register gets copied so we fake a type conversion
2472 if (GET_MODE (srcreg
) != BLKmode
2473 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2475 if (FUNCTION_ARG_REG_LITTLE_ENDIAN
)
2476 srcreg
= simplify_gen_subreg (word_mode
, srcreg
, GET_MODE (srcreg
), 0);
2478 srcreg
= convert_to_mode (word_mode
, srcreg
, TREE_UNSIGNED (type
));
2481 /* Structures whose size is not a multiple of a word are aligned
2482 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2483 machine, this means we must skip the empty high order bytes when
2484 calculating the bit offset. */
2485 if (BYTES_BIG_ENDIAN
2486 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2487 && bytes
% UNITS_PER_WORD
)
2488 big_endian_correction
2489 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2491 /* Copy the structure BITSIZE bites at a time.
2493 We could probably emit more efficient code for machines which do not use
2494 strict alignment, but it doesn't seem worth the effort at the current
2496 for (bitpos
= 0, xbitpos
= big_endian_correction
;
2497 bitpos
< bytes
* BITS_PER_UNIT
;
2498 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2500 /* We need a new source operand each time xbitpos is on a
2501 word boundary and when xbitpos == big_endian_correction
2502 (the first time through). */
2503 if (xbitpos
% BITS_PER_WORD
== 0
2504 || xbitpos
== big_endian_correction
)
2505 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2508 /* We need a new destination operand each time bitpos is on
2510 if (bitpos
% BITS_PER_WORD
== 0)
2511 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2513 /* Use xbitpos for the source extraction (right justified) and
2514 xbitpos for the destination store (left justified). */
2515 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2516 extract_bit_field (src
, bitsize
,
2517 xbitpos
% BITS_PER_WORD
, 1,
2518 NULL_RTX
, word_mode
, word_mode
,
2526 /* Add a USE expression for REG to the (possibly empty) list pointed
2527 to by CALL_FUSAGE. REG must denote a hard register. */
2530 use_reg (call_fusage
, reg
)
2531 rtx
*call_fusage
, reg
;
2533 if (GET_CODE (reg
) != REG
2534 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2538 = gen_rtx_EXPR_LIST (VOIDmode
,
2539 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2542 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2543 starting at REGNO. All of these registers must be hard registers. */
2546 use_regs (call_fusage
, regno
, nregs
)
2553 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
2556 for (i
= 0; i
< nregs
; i
++)
2557 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2560 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2561 PARALLEL REGS. This is for calls that pass values in multiple
2562 non-contiguous locations. The Irix 6 ABI has examples of this. */
2565 use_group_regs (call_fusage
, regs
)
2571 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2573 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2575 /* A NULL entry means the parameter goes both on the stack and in
2576 registers. This can also be a MEM for targets that pass values
2577 partially on the stack and partially in registers. */
2578 if (reg
!= 0 && GET_CODE (reg
) == REG
)
2579 use_reg (call_fusage
, reg
);
2584 /* Determine whether the LEN bytes generated by CONSTFUN can be
2585 stored to memory using several move instructions. CONSTFUNDATA is
2586 a pointer which will be passed as argument in every CONSTFUN call.
2587 ALIGN is maximum alignment we can assume. Return nonzero if a
2588 call to store_by_pieces should succeed. */
2591 can_store_by_pieces (len
, constfun
, constfundata
, align
)
2592 unsigned HOST_WIDE_INT len
;
2593 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
2597 unsigned HOST_WIDE_INT max_size
, l
;
2598 HOST_WIDE_INT offset
= 0;
2599 enum machine_mode mode
, tmode
;
2600 enum insn_code icode
;
2604 if (! MOVE_BY_PIECES_P (len
, align
))
2607 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2608 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2609 align
= MOVE_MAX
* BITS_PER_UNIT
;
2611 /* We would first store what we can in the largest integer mode, then go to
2612 successively smaller modes. */
2615 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2620 max_size
= STORE_MAX_PIECES
+ 1;
2621 while (max_size
> 1)
2623 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2624 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2625 if (GET_MODE_SIZE (tmode
) < max_size
)
2628 if (mode
== VOIDmode
)
2631 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2632 if (icode
!= CODE_FOR_nothing
2633 && align
>= GET_MODE_ALIGNMENT (mode
))
2635 unsigned int size
= GET_MODE_SIZE (mode
);
2642 cst
= (*constfun
) (constfundata
, offset
, mode
);
2643 if (!LEGITIMATE_CONSTANT_P (cst
))
2653 max_size
= GET_MODE_SIZE (mode
);
2656 /* The code above should have handled everything. */
2664 /* Generate several move instructions to store LEN bytes generated by
2665 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2666 pointer which will be passed as argument in every CONSTFUN call.
2667 ALIGN is maximum alignment we can assume. */
2670 store_by_pieces (to
, len
, constfun
, constfundata
, align
)
2672 unsigned HOST_WIDE_INT len
;
2673 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
2677 struct store_by_pieces data
;
2679 if (! MOVE_BY_PIECES_P (len
, align
))
2681 to
= protect_from_queue (to
, 1);
2682 data
.constfun
= constfun
;
2683 data
.constfundata
= constfundata
;
2686 store_by_pieces_1 (&data
, align
);
2689 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2690 rtx with BLKmode). The caller must pass TO through protect_from_queue
2691 before calling. ALIGN is maximum alignment we can assume. */
2694 clear_by_pieces (to
, len
, align
)
2696 unsigned HOST_WIDE_INT len
;
2699 struct store_by_pieces data
;
2701 data
.constfun
= clear_by_pieces_1
;
2702 data
.constfundata
= NULL
;
2705 store_by_pieces_1 (&data
, align
);
2708 /* Callback routine for clear_by_pieces.
2709 Return const0_rtx unconditionally. */
2712 clear_by_pieces_1 (data
, offset
, mode
)
2713 PTR data ATTRIBUTE_UNUSED
;
2714 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
;
2715 enum machine_mode mode ATTRIBUTE_UNUSED
;
2720 /* Subroutine of clear_by_pieces and store_by_pieces.
2721 Generate several move instructions to store LEN bytes of block TO. (A MEM
2722 rtx with BLKmode). The caller must pass TO through protect_from_queue
2723 before calling. ALIGN is maximum alignment we can assume. */
2726 store_by_pieces_1 (data
, align
)
2727 struct store_by_pieces
*data
;
2730 rtx to_addr
= XEXP (data
->to
, 0);
2731 unsigned HOST_WIDE_INT max_size
= STORE_MAX_PIECES
+ 1;
2732 enum machine_mode mode
= VOIDmode
, tmode
;
2733 enum insn_code icode
;
2736 data
->to_addr
= to_addr
;
2738 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2739 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2741 data
->explicit_inc_to
= 0;
2743 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2745 data
->offset
= data
->len
;
2747 /* If storing requires more than two move insns,
2748 copy addresses to registers (to make displacements shorter)
2749 and use post-increment if available. */
2750 if (!data
->autinc_to
2751 && move_by_pieces_ninsns (data
->len
, align
) > 2)
2753 /* Determine the main mode we'll be using. */
2754 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2755 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2756 if (GET_MODE_SIZE (tmode
) < max_size
)
2759 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2761 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2762 data
->autinc_to
= 1;
2763 data
->explicit_inc_to
= -1;
2766 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2767 && ! data
->autinc_to
)
2769 data
->to_addr
= copy_addr_to_reg (to_addr
);
2770 data
->autinc_to
= 1;
2771 data
->explicit_inc_to
= 1;
2774 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2775 data
->to_addr
= copy_addr_to_reg (to_addr
);
2778 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2779 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2780 align
= MOVE_MAX
* BITS_PER_UNIT
;
2782 /* First store what we can in the largest integer mode, then go to
2783 successively smaller modes. */
2785 while (max_size
> 1)
2787 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2788 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2789 if (GET_MODE_SIZE (tmode
) < max_size
)
2792 if (mode
== VOIDmode
)
2795 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2796 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2797 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2799 max_size
= GET_MODE_SIZE (mode
);
2802 /* The code above should have handled everything. */
2807 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2808 with move instructions for mode MODE. GENFUN is the gen_... function
2809 to make a move insn for that mode. DATA has all the other info. */
2812 store_by_pieces_2 (genfun
, mode
, data
)
2813 rtx (*genfun
) PARAMS ((rtx
, ...));
2814 enum machine_mode mode
;
2815 struct store_by_pieces
*data
;
2817 unsigned int size
= GET_MODE_SIZE (mode
);
2820 while (data
->len
>= size
)
2823 data
->offset
-= size
;
2825 if (data
->autinc_to
)
2826 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2829 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2831 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2832 emit_insn (gen_add2_insn (data
->to_addr
,
2833 GEN_INT (-(HOST_WIDE_INT
) size
)));
2835 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2836 emit_insn ((*genfun
) (to1
, cst
));
2838 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2839 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2841 if (! data
->reverse
)
2842 data
->offset
+= size
;
2848 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2849 its length in bytes. */
2852 clear_storage (object
, size
)
2857 unsigned int align
= (GET_CODE (object
) == MEM
? MEM_ALIGN (object
)
2858 : GET_MODE_ALIGNMENT (GET_MODE (object
)));
2860 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2861 just move a zero. Otherwise, do this a piece at a time. */
2862 if (GET_MODE (object
) != BLKmode
2863 && GET_CODE (size
) == CONST_INT
2864 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (object
)))
2865 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2868 object
= protect_from_queue (object
, 1);
2869 size
= protect_from_queue (size
, 0);
2871 if (GET_CODE (size
) == CONST_INT
2872 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2873 clear_by_pieces (object
, INTVAL (size
), align
);
2874 else if (clear_storage_via_clrstr (object
, size
, align
))
2877 retval
= clear_storage_via_libcall (object
, size
);
2883 /* A subroutine of clear_storage. Expand a clrstr pattern;
2884 return true if successful. */
2887 clear_storage_via_clrstr (object
, size
, align
)
2891 /* Try the most limited insn first, because there's no point
2892 including more than one in the machine description unless
2893 the more limited one has some advantage. */
2895 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2896 enum machine_mode mode
;
2898 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2899 mode
= GET_MODE_WIDER_MODE (mode
))
2901 enum insn_code code
= clrstr_optab
[(int) mode
];
2902 insn_operand_predicate_fn pred
;
2904 if (code
!= CODE_FOR_nothing
2905 /* We don't need MODE to be narrower than
2906 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2907 the mode mask, as it is returned by the macro, it will
2908 definitely be less than the actual mode mask. */
2909 && ((GET_CODE (size
) == CONST_INT
2910 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2911 <= (GET_MODE_MASK (mode
) >> 1)))
2912 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2913 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2914 || (*pred
) (object
, BLKmode
))
2915 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
2916 || (*pred
) (opalign
, VOIDmode
)))
2919 rtx last
= get_last_insn ();
2922 op1
= convert_to_mode (mode
, size
, 1);
2923 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2924 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
2925 op1
= copy_to_mode_reg (mode
, op1
);
2927 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2934 delete_insns_since (last
);
2941 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2942 Return the return value of memset, 0 otherwise. */
2945 clear_storage_via_libcall (object
, size
)
2948 tree call_expr
, arg_list
, fn
, object_tree
, size_tree
;
2949 enum machine_mode size_mode
;
2952 /* OBJECT or SIZE may have been passed through protect_from_queue.
2954 It is unsafe to save the value generated by protect_from_queue
2955 and reuse it later. Consider what happens if emit_queue is
2956 called before the return value from protect_from_queue is used.
2958 Expansion of the CALL_EXPR below will call emit_queue before
2959 we are finished emitting RTL for argument setup. So if we are
2960 not careful we could get the wrong value for an argument.
2962 To avoid this problem we go ahead and emit code to copy OBJECT
2963 and SIZE into new pseudos. We can then place those new pseudos
2964 into an RTL_EXPR and use them later, even after a call to
2967 Note this is not strictly needed for library calls since they
2968 do not call emit_queue before loading their arguments. However,
2969 we may need to have library calls call emit_queue in the future
2970 since failing to do so could cause problems for targets which
2971 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2973 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2975 if (TARGET_MEM_FUNCTIONS
)
2976 size_mode
= TYPE_MODE (sizetype
);
2978 size_mode
= TYPE_MODE (unsigned_type_node
);
2979 size
= convert_to_mode (size_mode
, size
, 1);
2980 size
= copy_to_mode_reg (size_mode
, size
);
2982 /* It is incorrect to use the libcall calling conventions to call
2983 memset in this context. This could be a user call to memset and
2984 the user may wish to examine the return value from memset. For
2985 targets where libcalls and normal calls have different conventions
2986 for returning pointers, we could end up generating incorrect code.
2988 For convenience, we generate the call to bzero this way as well. */
2990 object_tree
= make_tree (ptr_type_node
, object
);
2991 if (TARGET_MEM_FUNCTIONS
)
2992 size_tree
= make_tree (sizetype
, size
);
2994 size_tree
= make_tree (unsigned_type_node
, size
);
2996 fn
= clear_storage_libcall_fn (true);
2997 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
2998 if (TARGET_MEM_FUNCTIONS
)
2999 arg_list
= tree_cons (NULL_TREE
, integer_zero_node
, arg_list
);
3000 arg_list
= tree_cons (NULL_TREE
, object_tree
, arg_list
);
3002 /* Now we have to build up the CALL_EXPR itself. */
3003 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
3004 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
3005 call_expr
, arg_list
, NULL_TREE
);
3006 TREE_SIDE_EFFECTS (call_expr
) = 1;
3008 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
3010 /* If we are initializing a readonly value, show the above call
3011 clobbered it. Otherwise, a load from it may erroneously be
3012 hoisted from a loop. */
3013 if (RTX_UNCHANGING_P (object
))
3014 emit_insn (gen_rtx_CLOBBER (VOIDmode
, object
));
3016 return (TARGET_MEM_FUNCTIONS
? retval
: NULL_RTX
);
3019 /* A subroutine of clear_storage_via_libcall. Create the tree node
3020 for the function we use for block clears. The first time FOR_CALL
3021 is true, we call assemble_external. */
3023 static GTY(()) tree block_clear_fn
;
3026 clear_storage_libcall_fn (for_call
)
3029 static bool emitted_extern
;
3030 tree fn
= block_clear_fn
, args
;
3034 if (TARGET_MEM_FUNCTIONS
)
3036 fn
= get_identifier ("memset");
3037 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
3038 integer_type_node
, sizetype
,
3043 fn
= get_identifier ("bzero");
3044 args
= build_function_type_list (void_type_node
, ptr_type_node
,
3045 unsigned_type_node
, NULL_TREE
);
3048 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
3049 DECL_EXTERNAL (fn
) = 1;
3050 TREE_PUBLIC (fn
) = 1;
3051 DECL_ARTIFICIAL (fn
) = 1;
3052 TREE_NOTHROW (fn
) = 1;
3054 block_clear_fn
= fn
;
3057 if (for_call
&& !emitted_extern
)
3059 emitted_extern
= true;
3060 make_decl_rtl (fn
, NULL
);
3061 assemble_external (fn
);
3067 /* Generate code to copy Y into X.
3068 Both Y and X must have the same mode, except that
3069 Y can be a constant with VOIDmode.
3070 This mode cannot be BLKmode; use emit_block_move for that.
3072 Return the last instruction emitted. */
3075 emit_move_insn (x
, y
)
3078 enum machine_mode mode
= GET_MODE (x
);
3079 rtx y_cst
= NULL_RTX
;
3082 x
= protect_from_queue (x
, 1);
3083 y
= protect_from_queue (y
, 0);
3085 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
3088 /* Never force constant_p_rtx to memory. */
3089 if (GET_CODE (y
) == CONSTANT_P_RTX
)
3091 else if (CONSTANT_P (y
))
3094 && FLOAT_MODE_P (GET_MODE (x
))
3095 && (last_insn
= compress_float_constant (x
, y
)))
3098 if (!LEGITIMATE_CONSTANT_P (y
))
3101 y
= force_const_mem (mode
, y
);
3105 /* If X or Y are memory references, verify that their addresses are valid
3107 if (GET_CODE (x
) == MEM
3108 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
3109 && ! push_operand (x
, GET_MODE (x
)))
3111 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
3112 x
= validize_mem (x
);
3114 if (GET_CODE (y
) == MEM
3115 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
3117 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
3118 y
= validize_mem (y
);
3120 if (mode
== BLKmode
)
3123 last_insn
= emit_move_insn_1 (x
, y
);
3125 if (y_cst
&& GET_CODE (x
) == REG
)
3126 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
3131 /* Low level part of emit_move_insn.
3132 Called just like emit_move_insn, but assumes X and Y
3133 are basically valid. */
3136 emit_move_insn_1 (x
, y
)
3139 enum machine_mode mode
= GET_MODE (x
);
3140 enum machine_mode submode
;
3141 enum mode_class
class = GET_MODE_CLASS (mode
);
3143 if ((unsigned int) mode
>= (unsigned int) MAX_MACHINE_MODE
)
3146 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
3148 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
3150 /* Expand complex moves by moving real part and imag part, if possible. */
3151 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
3152 && BLKmode
!= (submode
= mode_for_size ((GET_MODE_UNIT_SIZE (mode
)
3154 (class == MODE_COMPLEX_INT
3155 ? MODE_INT
: MODE_FLOAT
),
3157 && (mov_optab
->handlers
[(int) submode
].insn_code
3158 != CODE_FOR_nothing
))
3160 /* Don't split destination if it is a stack push. */
3161 int stack
= push_operand (x
, GET_MODE (x
));
3163 #ifdef PUSH_ROUNDING
3164 /* In case we output to the stack, but the size is smaller machine can
3165 push exactly, we need to use move instructions. */
3167 && (PUSH_ROUNDING (GET_MODE_SIZE (submode
))
3168 != GET_MODE_SIZE (submode
)))
3171 HOST_WIDE_INT offset1
, offset2
;
3173 /* Do not use anti_adjust_stack, since we don't want to update
3174 stack_pointer_delta. */
3175 temp
= expand_binop (Pmode
,
3176 #ifdef STACK_GROWS_DOWNWARD
3184 (GET_MODE_SIZE (GET_MODE (x
)))),
3185 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
3187 if (temp
!= stack_pointer_rtx
)
3188 emit_move_insn (stack_pointer_rtx
, temp
);
3190 #ifdef STACK_GROWS_DOWNWARD
3192 offset2
= GET_MODE_SIZE (submode
);
3194 offset1
= -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)));
3195 offset2
= (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))
3196 + GET_MODE_SIZE (submode
));
3199 emit_move_insn (change_address (x
, submode
,
3200 gen_rtx_PLUS (Pmode
,
3202 GEN_INT (offset1
))),
3203 gen_realpart (submode
, y
));
3204 emit_move_insn (change_address (x
, submode
,
3205 gen_rtx_PLUS (Pmode
,
3207 GEN_INT (offset2
))),
3208 gen_imagpart (submode
, y
));
3212 /* If this is a stack, push the highpart first, so it
3213 will be in the argument order.
3215 In that case, change_address is used only to convert
3216 the mode, not to change the address. */
3219 /* Note that the real part always precedes the imag part in memory
3220 regardless of machine's endianness. */
3221 #ifdef STACK_GROWS_DOWNWARD
3222 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
3223 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3224 gen_imagpart (submode
, y
)));
3225 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
3226 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3227 gen_realpart (submode
, y
)));
3229 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
3230 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3231 gen_realpart (submode
, y
)));
3232 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
3233 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3234 gen_imagpart (submode
, y
)));
3239 rtx realpart_x
, realpart_y
;
3240 rtx imagpart_x
, imagpart_y
;
3242 /* If this is a complex value with each part being smaller than a
3243 word, the usual calling sequence will likely pack the pieces into
3244 a single register. Unfortunately, SUBREG of hard registers only
3245 deals in terms of words, so we have a problem converting input
3246 arguments to the CONCAT of two registers that is used elsewhere
3247 for complex values. If this is before reload, we can copy it into
3248 memory and reload. FIXME, we should see about using extract and
3249 insert on integer registers, but complex short and complex char
3250 variables should be rarely used. */
3251 if (GET_MODE_BITSIZE (mode
) < 2 * BITS_PER_WORD
3252 && (reload_in_progress
| reload_completed
) == 0)
3255 = (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
3257 = (REG_P (y
) && REGNO (y
) < FIRST_PSEUDO_REGISTER
);
3259 if (packed_dest_p
|| packed_src_p
)
3261 enum mode_class reg_class
= ((class == MODE_COMPLEX_FLOAT
)
3262 ? MODE_FLOAT
: MODE_INT
);
3264 enum machine_mode reg_mode
3265 = mode_for_size (GET_MODE_BITSIZE (mode
), reg_class
, 1);
3267 if (reg_mode
!= BLKmode
)
3269 rtx mem
= assign_stack_temp (reg_mode
,
3270 GET_MODE_SIZE (mode
), 0);
3271 rtx cmem
= adjust_address (mem
, mode
, 0);
3274 = N_("function using short complex types cannot be inline");
3278 rtx sreg
= gen_rtx_SUBREG (reg_mode
, x
, 0);
3280 emit_move_insn_1 (cmem
, y
);
3281 return emit_move_insn_1 (sreg
, mem
);
3285 rtx sreg
= gen_rtx_SUBREG (reg_mode
, y
, 0);
3287 emit_move_insn_1 (mem
, sreg
);
3288 return emit_move_insn_1 (x
, cmem
);
3294 realpart_x
= gen_realpart (submode
, x
);
3295 realpart_y
= gen_realpart (submode
, y
);
3296 imagpart_x
= gen_imagpart (submode
, x
);
3297 imagpart_y
= gen_imagpart (submode
, y
);
3299 /* Show the output dies here. This is necessary for SUBREGs
3300 of pseudos since we cannot track their lifetimes correctly;
3301 hard regs shouldn't appear here except as return values.
3302 We never want to emit such a clobber after reload. */
3304 && ! (reload_in_progress
|| reload_completed
)
3305 && (GET_CODE (realpart_x
) == SUBREG
3306 || GET_CODE (imagpart_x
) == SUBREG
))
3307 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3309 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
3310 (realpart_x
, realpart_y
));
3311 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
3312 (imagpart_x
, imagpart_y
));
3315 return get_last_insn ();
3318 /* This will handle any multi-word or full-word mode that lacks a move_insn
3319 pattern. However, you will get better code if you define such patterns,
3320 even if they must turn into multiple assembler instructions. */
3321 else if (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
3328 #ifdef PUSH_ROUNDING
3330 /* If X is a push on the stack, do the push now and replace
3331 X with a reference to the stack pointer. */
3332 if (push_operand (x
, GET_MODE (x
)))
3337 /* Do not use anti_adjust_stack, since we don't want to update
3338 stack_pointer_delta. */
3339 temp
= expand_binop (Pmode
,
3340 #ifdef STACK_GROWS_DOWNWARD
3348 (GET_MODE_SIZE (GET_MODE (x
)))),
3349 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
3351 if (temp
!= stack_pointer_rtx
)
3352 emit_move_insn (stack_pointer_rtx
, temp
);
3354 code
= GET_CODE (XEXP (x
, 0));
3356 /* Just hope that small offsets off SP are OK. */
3357 if (code
== POST_INC
)
3358 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3359 GEN_INT (-((HOST_WIDE_INT
)
3360 GET_MODE_SIZE (GET_MODE (x
)))));
3361 else if (code
== POST_DEC
)
3362 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3363 GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
3365 temp
= stack_pointer_rtx
;
3367 x
= change_address (x
, VOIDmode
, temp
);
3371 /* If we are in reload, see if either operand is a MEM whose address
3372 is scheduled for replacement. */
3373 if (reload_in_progress
&& GET_CODE (x
) == MEM
3374 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3375 x
= replace_equiv_address_nv (x
, inner
);
3376 if (reload_in_progress
&& GET_CODE (y
) == MEM
3377 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3378 y
= replace_equiv_address_nv (y
, inner
);
3384 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3387 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3388 rtx ypart
= operand_subword (y
, i
, 1, mode
);
3390 /* If we can't get a part of Y, put Y into memory if it is a
3391 constant. Otherwise, force it into a register. If we still
3392 can't get a part of Y, abort. */
3393 if (ypart
== 0 && CONSTANT_P (y
))
3395 y
= force_const_mem (mode
, y
);
3396 ypart
= operand_subword (y
, i
, 1, mode
);
3398 else if (ypart
== 0)
3399 ypart
= operand_subword_force (y
, i
, mode
);
3401 if (xpart
== 0 || ypart
== 0)
3404 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3406 last_insn
= emit_move_insn (xpart
, ypart
);
3412 /* Show the output dies here. This is necessary for SUBREGs
3413 of pseudos since we cannot track their lifetimes correctly;
3414 hard regs shouldn't appear here except as return values.
3415 We never want to emit such a clobber after reload. */
3417 && ! (reload_in_progress
|| reload_completed
)
3418 && need_clobber
!= 0)
3419 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3429 /* If Y is representable exactly in a narrower mode, and the target can
3430 perform the extension directly from constant or memory, then emit the
3431 move as an extension. */
3434 compress_float_constant (x
, y
)
3437 enum machine_mode dstmode
= GET_MODE (x
);
3438 enum machine_mode orig_srcmode
= GET_MODE (y
);
3439 enum machine_mode srcmode
;
3442 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3444 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3445 srcmode
!= orig_srcmode
;
3446 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3449 rtx trunc_y
, last_insn
;
3451 /* Skip if the target can't extend this way. */
3452 ic
= can_extend_p (dstmode
, srcmode
, 0);
3453 if (ic
== CODE_FOR_nothing
)
3456 /* Skip if the narrowed value isn't exact. */
3457 if (! exact_real_truncate (srcmode
, &r
))
3460 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3462 if (LEGITIMATE_CONSTANT_P (trunc_y
))
3464 /* Skip if the target needs extra instructions to perform
3466 if (! (*insn_data
[ic
].operand
[1].predicate
) (trunc_y
, srcmode
))
3469 else if (float_extend_from_mem
[dstmode
][srcmode
])
3470 trunc_y
= validize_mem (force_const_mem (srcmode
, trunc_y
));
3474 emit_unop_insn (ic
, x
, trunc_y
, UNKNOWN
);
3475 last_insn
= get_last_insn ();
3477 if (GET_CODE (x
) == REG
)
3478 REG_NOTES (last_insn
)
3479 = gen_rtx_EXPR_LIST (REG_EQUAL
, y
, REG_NOTES (last_insn
));
3487 /* Pushing data onto the stack. */
3489 /* Push a block of length SIZE (perhaps variable)
3490 and return an rtx to address the beginning of the block.
3491 Note that it is not possible for the value returned to be a QUEUED.
3492 The value may be virtual_outgoing_args_rtx.
3494 EXTRA is the number of bytes of padding to push in addition to SIZE.
3495 BELOW nonzero means this padding comes at low addresses;
3496 otherwise, the padding comes at high addresses. */
3499 push_block (size
, extra
, below
)
3505 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3506 if (CONSTANT_P (size
))
3507 anti_adjust_stack (plus_constant (size
, extra
));
3508 else if (GET_CODE (size
) == REG
&& extra
== 0)
3509 anti_adjust_stack (size
);
3512 temp
= copy_to_mode_reg (Pmode
, size
);
3514 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3515 temp
, 0, OPTAB_LIB_WIDEN
);
3516 anti_adjust_stack (temp
);
3519 #ifndef STACK_GROWS_DOWNWARD
3525 temp
= virtual_outgoing_args_rtx
;
3526 if (extra
!= 0 && below
)
3527 temp
= plus_constant (temp
, extra
);
3531 if (GET_CODE (size
) == CONST_INT
)
3532 temp
= plus_constant (virtual_outgoing_args_rtx
,
3533 -INTVAL (size
) - (below
? 0 : extra
));
3534 else if (extra
!= 0 && !below
)
3535 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3536 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3538 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3539 negate_rtx (Pmode
, size
));
3542 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3545 #ifdef PUSH_ROUNDING
3547 /* Emit single push insn. */
3550 emit_single_push_insn (mode
, x
, type
)
3552 enum machine_mode mode
;
3556 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3558 enum insn_code icode
;
3559 insn_operand_predicate_fn pred
;
3561 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3562 /* If there is push pattern, use it. Otherwise try old way of throwing
3563 MEM representing push operation to move expander. */
3564 icode
= push_optab
->handlers
[(int) mode
].insn_code
;
3565 if (icode
!= CODE_FOR_nothing
)
3567 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3568 && !((*pred
) (x
, mode
))))
3569 x
= force_reg (mode
, x
);
3570 emit_insn (GEN_FCN (icode
) (x
));
3573 if (GET_MODE_SIZE (mode
) == rounded_size
)
3574 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3577 #ifdef STACK_GROWS_DOWNWARD
3578 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3579 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3581 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3582 GEN_INT (rounded_size
));
3584 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3587 dest
= gen_rtx_MEM (mode
, dest_addr
);
3591 set_mem_attributes (dest
, type
, 1);
3593 if (flag_optimize_sibling_calls
)
3594 /* Function incoming arguments may overlap with sibling call
3595 outgoing arguments and we cannot allow reordering of reads
3596 from function arguments with stores to outgoing arguments
3597 of sibling calls. */
3598 set_mem_alias_set (dest
, 0);
3600 emit_move_insn (dest
, x
);
3604 /* Generate code to push X onto the stack, assuming it has mode MODE and
3606 MODE is redundant except when X is a CONST_INT (since they don't
3608 SIZE is an rtx for the size of data to be copied (in bytes),
3609 needed only if X is BLKmode.
3611 ALIGN (in bits) is maximum alignment we can assume.
3613 If PARTIAL and REG are both nonzero, then copy that many of the first
3614 words of X into registers starting with REG, and push the rest of X.
3615 The amount of space pushed is decreased by PARTIAL words,
3616 rounded *down* to a multiple of PARM_BOUNDARY.
3617 REG must be a hard register in this case.
3618 If REG is zero but PARTIAL is not, take any all others actions for an
3619 argument partially in registers, but do not actually load any
3622 EXTRA is the amount in bytes of extra space to leave next to this arg.
3623 This is ignored if an argument block has already been allocated.
3625 On a machine that lacks real push insns, ARGS_ADDR is the address of
3626 the bottom of the argument block for this call. We use indexing off there
3627 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3628 argument block has not been preallocated.
3630 ARGS_SO_FAR is the size of args previously pushed for this call.
3632 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3633 for arguments passed in registers. If nonzero, it will be the number
3634 of bytes required. */
3637 emit_push_insn (x
, mode
, type
, size
, align
, partial
, reg
, extra
,
3638 args_addr
, args_so_far
, reg_parm_stack_space
,
3641 enum machine_mode mode
;
3650 int reg_parm_stack_space
;
3654 enum direction stack_direction
3655 #ifdef STACK_GROWS_DOWNWARD
3661 /* Decide where to pad the argument: `downward' for below,
3662 `upward' for above, or `none' for don't pad it.
3663 Default is below for small data on big-endian machines; else above. */
3664 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3666 /* Invert direction if stack is post-decrement.
3668 if (STACK_PUSH_CODE
== POST_DEC
)
3669 if (where_pad
!= none
)
3670 where_pad
= (where_pad
== downward
? upward
: downward
);
3672 xinner
= x
= protect_from_queue (x
, 0);
3674 if (mode
== BLKmode
)
3676 /* Copy a block into the stack, entirely or partially. */
3679 int used
= partial
* UNITS_PER_WORD
;
3680 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3688 /* USED is now the # of bytes we need not copy to the stack
3689 because registers will take care of them. */
3692 xinner
= adjust_address (xinner
, BLKmode
, used
);
3694 /* If the partial register-part of the arg counts in its stack size,
3695 skip the part of stack space corresponding to the registers.
3696 Otherwise, start copying to the beginning of the stack space,
3697 by setting SKIP to 0. */
3698 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3700 #ifdef PUSH_ROUNDING
3701 /* Do it with several push insns if that doesn't take lots of insns
3702 and if there is no difficulty with push insns that skip bytes
3703 on the stack for alignment purposes. */
3706 && GET_CODE (size
) == CONST_INT
3708 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3709 /* Here we avoid the case of a structure whose weak alignment
3710 forces many pushes of a small amount of data,
3711 and such small pushes do rounding that causes trouble. */
3712 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3713 || align
>= BIGGEST_ALIGNMENT
3714 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3715 == (align
/ BITS_PER_UNIT
)))
3716 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3718 /* Push padding now if padding above and stack grows down,
3719 or if padding below and stack grows up.
3720 But if space already allocated, this has already been done. */
3721 if (extra
&& args_addr
== 0
3722 && where_pad
!= none
&& where_pad
!= stack_direction
)
3723 anti_adjust_stack (GEN_INT (extra
));
3725 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
);
3728 #endif /* PUSH_ROUNDING */
3732 /* Otherwise make space on the stack and copy the data
3733 to the address of that space. */
3735 /* Deduct words put into registers from the size we must copy. */
3738 if (GET_CODE (size
) == CONST_INT
)
3739 size
= GEN_INT (INTVAL (size
) - used
);
3741 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3742 GEN_INT (used
), NULL_RTX
, 0,
3746 /* Get the address of the stack space.
3747 In this case, we do not deal with EXTRA separately.
3748 A single stack adjust will do. */
3751 temp
= push_block (size
, extra
, where_pad
== downward
);
3754 else if (GET_CODE (args_so_far
) == CONST_INT
)
3755 temp
= memory_address (BLKmode
,
3756 plus_constant (args_addr
,
3757 skip
+ INTVAL (args_so_far
)));
3759 temp
= memory_address (BLKmode
,
3760 plus_constant (gen_rtx_PLUS (Pmode
,
3765 if (!ACCUMULATE_OUTGOING_ARGS
)
3767 /* If the source is referenced relative to the stack pointer,
3768 copy it to another register to stabilize it. We do not need
3769 to do this if we know that we won't be changing sp. */
3771 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3772 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3773 temp
= copy_to_reg (temp
);
3776 target
= gen_rtx_MEM (BLKmode
, temp
);
3780 set_mem_attributes (target
, type
, 1);
3781 /* Function incoming arguments may overlap with sibling call
3782 outgoing arguments and we cannot allow reordering of reads
3783 from function arguments with stores to outgoing arguments
3784 of sibling calls. */
3785 set_mem_alias_set (target
, 0);
3788 /* ALIGN may well be better aligned than TYPE, e.g. due to
3789 PARM_BOUNDARY. Assume the caller isn't lying. */
3790 set_mem_align (target
, align
);
3792 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
3795 else if (partial
> 0)
3797 /* Scalar partly in registers. */
3799 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3802 /* # words of start of argument
3803 that we must make space for but need not store. */
3804 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
3805 int args_offset
= INTVAL (args_so_far
);
3808 /* Push padding now if padding above and stack grows down,
3809 or if padding below and stack grows up.
3810 But if space already allocated, this has already been done. */
3811 if (extra
&& args_addr
== 0
3812 && where_pad
!= none
&& where_pad
!= stack_direction
)
3813 anti_adjust_stack (GEN_INT (extra
));
3815 /* If we make space by pushing it, we might as well push
3816 the real data. Otherwise, we can leave OFFSET nonzero
3817 and leave the space uninitialized. */
3821 /* Now NOT_STACK gets the number of words that we don't need to
3822 allocate on the stack. */
3823 not_stack
= partial
- offset
;
3825 /* If the partial register-part of the arg counts in its stack size,
3826 skip the part of stack space corresponding to the registers.
3827 Otherwise, start copying to the beginning of the stack space,
3828 by setting SKIP to 0. */
3829 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3831 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3832 x
= validize_mem (force_const_mem (mode
, x
));
3834 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3835 SUBREGs of such registers are not allowed. */
3836 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
3837 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3838 x
= copy_to_reg (x
);
3840 /* Loop over all the words allocated on the stack for this arg. */
3841 /* We can do it by words, because any scalar bigger than a word
3842 has a size a multiple of a word. */
3843 #ifndef PUSH_ARGS_REVERSED
3844 for (i
= not_stack
; i
< size
; i
++)
3846 for (i
= size
- 1; i
>= not_stack
; i
--)
3848 if (i
>= not_stack
+ offset
)
3849 emit_push_insn (operand_subword_force (x
, i
, mode
),
3850 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3852 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3854 reg_parm_stack_space
, alignment_pad
);
3859 rtx target
= NULL_RTX
;
3862 /* Push padding now if padding above and stack grows down,
3863 or if padding below and stack grows up.
3864 But if space already allocated, this has already been done. */
3865 if (extra
&& args_addr
== 0
3866 && where_pad
!= none
&& where_pad
!= stack_direction
)
3867 anti_adjust_stack (GEN_INT (extra
));
3869 #ifdef PUSH_ROUNDING
3870 if (args_addr
== 0 && PUSH_ARGS
)
3871 emit_single_push_insn (mode
, x
, type
);
3875 if (GET_CODE (args_so_far
) == CONST_INT
)
3877 = memory_address (mode
,
3878 plus_constant (args_addr
,
3879 INTVAL (args_so_far
)));
3881 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3884 dest
= gen_rtx_MEM (mode
, addr
);
3887 set_mem_attributes (dest
, type
, 1);
3888 /* Function incoming arguments may overlap with sibling call
3889 outgoing arguments and we cannot allow reordering of reads
3890 from function arguments with stores to outgoing arguments
3891 of sibling calls. */
3892 set_mem_alias_set (dest
, 0);
3895 emit_move_insn (dest
, x
);
3899 /* If part should go in registers, copy that part
3900 into the appropriate registers. Do this now, at the end,
3901 since mem-to-mem copies above may do function calls. */
3902 if (partial
> 0 && reg
!= 0)
3904 /* Handle calls that pass values in multiple non-contiguous locations.
3905 The Irix 6 ABI has examples of this. */
3906 if (GET_CODE (reg
) == PARALLEL
)
3907 emit_group_load (reg
, x
, -1); /* ??? size? */
3909 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
3912 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3913 anti_adjust_stack (GEN_INT (extra
));
3915 if (alignment_pad
&& args_addr
== 0)
3916 anti_adjust_stack (alignment_pad
);
3919 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3927 /* Only registers can be subtargets. */
3928 || GET_CODE (x
) != REG
3929 /* If the register is readonly, it can't be set more than once. */
3930 || RTX_UNCHANGING_P (x
)
3931 /* Don't use hard regs to avoid extending their life. */
3932 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3933 /* Avoid subtargets inside loops,
3934 since they hide some invariant expressions. */
3935 || preserve_subexpressions_p ())
3939 /* Expand an assignment that stores the value of FROM into TO.
3940 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3941 (This may contain a QUEUED rtx;
3942 if the value is constant, this rtx is a constant.)
3943 Otherwise, the returned value is NULL_RTX.
3945 SUGGEST_REG is no longer actually used.
3946 It used to mean, copy the value through a register
3947 and return that register, if that is possible.
3948 We now use WANT_VALUE to decide whether to do this. */
3951 expand_assignment (to
, from
, want_value
, suggest_reg
)
3954 int suggest_reg ATTRIBUTE_UNUSED
;
3959 /* Don't crash if the lhs of the assignment was erroneous. */
3961 if (TREE_CODE (to
) == ERROR_MARK
)
3963 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3964 return want_value
? result
: NULL_RTX
;
3967 /* Assignment of a structure component needs special treatment
3968 if the structure component's rtx is not simply a MEM.
3969 Assignment of an array element at a constant index, and assignment of
3970 an array element in an unaligned packed structure field, has the same
3973 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
3974 || TREE_CODE (to
) == ARRAY_REF
|| TREE_CODE (to
) == ARRAY_RANGE_REF
)
3976 enum machine_mode mode1
;
3977 HOST_WIDE_INT bitsize
, bitpos
;
3985 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
3986 &unsignedp
, &volatilep
);
3988 /* If we are going to use store_bit_field and extract_bit_field,
3989 make sure to_rtx will be safe for multiple use. */
3991 if (mode1
== VOIDmode
&& want_value
)
3992 tem
= stabilize_reference (tem
);
3994 orig_to_rtx
= to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, 0);
3998 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
4000 if (GET_CODE (to_rtx
) != MEM
)
4003 #ifdef POINTERS_EXTEND_UNSIGNED
4004 if (GET_MODE (offset_rtx
) != Pmode
)
4005 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
4007 if (GET_MODE (offset_rtx
) != ptr_mode
)
4008 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4011 /* A constant address in TO_RTX can have VOIDmode, we must not try
4012 to call force_reg for that case. Avoid that case. */
4013 if (GET_CODE (to_rtx
) == MEM
4014 && GET_MODE (to_rtx
) == BLKmode
4015 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
4017 && (bitpos
% bitsize
) == 0
4018 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
4019 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
4021 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
4025 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4026 highest_pow2_factor_for_type (TREE_TYPE (to
),
4030 if (GET_CODE (to_rtx
) == MEM
)
4032 /* If the field is at offset zero, we could have been given the
4033 DECL_RTX of the parent struct. Don't munge it. */
4034 to_rtx
= shallow_copy_rtx (to_rtx
);
4036 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
4039 /* Deal with volatile and readonly fields. The former is only done
4040 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4041 if (volatilep
&& GET_CODE (to_rtx
) == MEM
)
4043 if (to_rtx
== orig_to_rtx
)
4044 to_rtx
= copy_rtx (to_rtx
);
4045 MEM_VOLATILE_P (to_rtx
) = 1;
4048 if (TREE_CODE (to
) == COMPONENT_REF
4049 && TREE_READONLY (TREE_OPERAND (to
, 1)))
4051 if (to_rtx
== orig_to_rtx
)
4052 to_rtx
= copy_rtx (to_rtx
);
4053 RTX_UNCHANGING_P (to_rtx
) = 1;
4056 if (GET_CODE (to_rtx
) == MEM
&& ! can_address_p (to
))
4058 if (to_rtx
== orig_to_rtx
)
4059 to_rtx
= copy_rtx (to_rtx
);
4060 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4063 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
4065 /* Spurious cast for HPUX compiler. */
4066 ? ((enum machine_mode
)
4067 TYPE_MODE (TREE_TYPE (to
)))
4069 unsignedp
, TREE_TYPE (tem
), get_alias_set (to
));
4071 preserve_temp_slots (result
);
4075 /* If the value is meaningful, convert RESULT to the proper mode.
4076 Otherwise, return nothing. */
4077 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
4078 TYPE_MODE (TREE_TYPE (from
)),
4080 TREE_UNSIGNED (TREE_TYPE (to
)))
4084 /* If the rhs is a function call and its value is not an aggregate,
4085 call the function before we start to compute the lhs.
4086 This is needed for correct code for cases such as
4087 val = setjmp (buf) on machines where reference to val
4088 requires loading up part of an address in a separate insn.
4090 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4091 since it might be a promoted variable where the zero- or sign- extension
4092 needs to be done. Handling this in the normal way is safe because no
4093 computation is done before the call. */
4094 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
)
4095 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
4096 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
4097 && GET_CODE (DECL_RTL (to
)) == REG
))
4102 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
4104 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4106 /* Handle calls that return values in multiple non-contiguous locations.
4107 The Irix 6 ABI has examples of this. */
4108 if (GET_CODE (to_rtx
) == PARALLEL
)
4109 emit_group_load (to_rtx
, value
, int_size_in_bytes (TREE_TYPE (from
)));
4110 else if (GET_MODE (to_rtx
) == BLKmode
)
4111 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
4114 #ifdef POINTERS_EXTEND_UNSIGNED
4115 if (POINTER_TYPE_P (TREE_TYPE (to
))
4116 && GET_MODE (to_rtx
) != GET_MODE (value
))
4117 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
4119 emit_move_insn (to_rtx
, value
);
4121 preserve_temp_slots (to_rtx
);
4124 return want_value
? to_rtx
: NULL_RTX
;
4127 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4128 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4131 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4133 /* Don't move directly into a return register. */
4134 if (TREE_CODE (to
) == RESULT_DECL
4135 && (GET_CODE (to_rtx
) == REG
|| GET_CODE (to_rtx
) == PARALLEL
))
4140 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
4142 if (GET_CODE (to_rtx
) == PARALLEL
)
4143 emit_group_load (to_rtx
, temp
, int_size_in_bytes (TREE_TYPE (from
)));
4145 emit_move_insn (to_rtx
, temp
);
4147 preserve_temp_slots (to_rtx
);
4150 return want_value
? to_rtx
: NULL_RTX
;
4153 /* In case we are returning the contents of an object which overlaps
4154 the place the value is being stored, use a safe function when copying
4155 a value through a pointer into a structure value return block. */
4156 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
4157 && current_function_returns_struct
4158 && !current_function_returns_pcc_struct
)
4163 size
= expr_size (from
);
4164 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
4166 if (TARGET_MEM_FUNCTIONS
)
4167 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
4168 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
4169 XEXP (from_rtx
, 0), Pmode
,
4170 convert_to_mode (TYPE_MODE (sizetype
),
4171 size
, TREE_UNSIGNED (sizetype
)),
4172 TYPE_MODE (sizetype
));
4174 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
4175 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
4176 XEXP (to_rtx
, 0), Pmode
,
4177 convert_to_mode (TYPE_MODE (integer_type_node
),
4179 TREE_UNSIGNED (integer_type_node
)),
4180 TYPE_MODE (integer_type_node
));
4182 preserve_temp_slots (to_rtx
);
4185 return want_value
? to_rtx
: NULL_RTX
;
4188 /* Compute FROM and store the value in the rtx we got. */
4191 result
= store_expr (from
, to_rtx
, want_value
);
4192 preserve_temp_slots (result
);
4195 return want_value
? result
: NULL_RTX
;
4198 /* Generate code for computing expression EXP,
4199 and storing the value into TARGET.
4200 TARGET may contain a QUEUED rtx.
4202 If WANT_VALUE is nonzero, return a copy of the value
4203 not in TARGET, so that we can be sure to use the proper
4204 value in a containing expression even if TARGET has something
4205 else stored in it. If possible, we copy the value through a pseudo
4206 and return that pseudo. Or, if the value is constant, we try to
4207 return the constant. In some cases, we return a pseudo
4208 copied *from* TARGET.
4210 If the mode is BLKmode then we may return TARGET itself.
4211 It turns out that in BLKmode it doesn't cause a problem.
4212 because C has no operators that could combine two different
4213 assignments into the same BLKmode object with different values
4214 with no sequence point. Will other languages need this to
4217 If WANT_VALUE is 0, we return NULL, to make sure
4218 to catch quickly any cases where the caller uses the value
4219 and fails to set WANT_VALUE. */
4222 store_expr (exp
, target
, want_value
)
4228 int dont_return_target
= 0;
4229 int dont_store_target
= 0;
4231 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4233 /* Perform first part of compound expression, then assign from second
4235 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
4237 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
4239 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4241 /* For conditional expression, get safe form of the target. Then
4242 test the condition, doing the appropriate assignment on either
4243 side. This avoids the creation of unnecessary temporaries.
4244 For non-BLKmode, it is more efficient not to do this. */
4246 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4249 target
= protect_from_queue (target
, 1);
4251 do_pending_stack_adjust ();
4253 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
4254 start_cleanup_deferral ();
4255 store_expr (TREE_OPERAND (exp
, 1), target
, 0);
4256 end_cleanup_deferral ();
4258 emit_jump_insn (gen_jump (lab2
));
4261 start_cleanup_deferral ();
4262 store_expr (TREE_OPERAND (exp
, 2), target
, 0);
4263 end_cleanup_deferral ();
4268 return want_value
? target
: NULL_RTX
;
4270 else if (queued_subexp_p (target
))
4271 /* If target contains a postincrement, let's not risk
4272 using it as the place to generate the rhs. */
4274 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
4276 /* Expand EXP into a new pseudo. */
4277 temp
= gen_reg_rtx (GET_MODE (target
));
4278 temp
= expand_expr (exp
, temp
, GET_MODE (target
), 0);
4281 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
), 0);
4283 /* If target is volatile, ANSI requires accessing the value
4284 *from* the target, if it is accessed. So make that happen.
4285 In no case return the target itself. */
4286 if (! MEM_VOLATILE_P (target
) && want_value
)
4287 dont_return_target
= 1;
4289 else if (want_value
&& GET_CODE (target
) == MEM
&& ! MEM_VOLATILE_P (target
)
4290 && GET_MODE (target
) != BLKmode
)
4291 /* If target is in memory and caller wants value in a register instead,
4292 arrange that. Pass TARGET as target for expand_expr so that,
4293 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4294 We know expand_expr will not use the target in that case.
4295 Don't do this if TARGET is volatile because we are supposed
4296 to write it and then read it. */
4298 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
4299 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
4301 /* If TEMP is already in the desired TARGET, only copy it from
4302 memory and don't store it there again. */
4304 || (rtx_equal_p (temp
, target
)
4305 && ! side_effects_p (temp
) && ! side_effects_p (target
)))
4306 dont_store_target
= 1;
4307 temp
= copy_to_reg (temp
);
4309 dont_return_target
= 1;
4311 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4312 /* If this is an scalar in a register that is stored in a wider mode
4313 than the declared mode, compute the result into its declared mode
4314 and then convert to the wider mode. Our value is the computed
4317 rtx inner_target
= 0;
4319 /* If we don't want a value, we can do the conversion inside EXP,
4320 which will often result in some optimizations. Do the conversion
4321 in two steps: first change the signedness, if needed, then
4322 the extend. But don't do this if the type of EXP is a subtype
4323 of something else since then the conversion might involve
4324 more than just converting modes. */
4325 if (! want_value
&& INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4326 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
4328 if (TREE_UNSIGNED (TREE_TYPE (exp
))
4329 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4331 ((*lang_hooks
.types
.signed_or_unsigned_type
)
4332 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)), exp
);
4334 exp
= convert ((*lang_hooks
.types
.type_for_mode
)
4335 (GET_MODE (SUBREG_REG (target
)),
4336 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4339 inner_target
= SUBREG_REG (target
);
4342 temp
= expand_expr (exp
, inner_target
, VOIDmode
, 0);
4344 /* If TEMP is a volatile MEM and we want a result value, make
4345 the access now so it gets done only once. Likewise if
4346 it contains TARGET. */
4347 if (GET_CODE (temp
) == MEM
&& want_value
4348 && (MEM_VOLATILE_P (temp
)
4349 || reg_mentioned_p (SUBREG_REG (target
), XEXP (temp
, 0))))
4350 temp
= copy_to_reg (temp
);
4352 /* If TEMP is a VOIDmode constant, use convert_modes to make
4353 sure that we properly convert it. */
4354 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4356 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4357 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4358 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4359 GET_MODE (target
), temp
,
4360 SUBREG_PROMOTED_UNSIGNED_P (target
));
4363 convert_move (SUBREG_REG (target
), temp
,
4364 SUBREG_PROMOTED_UNSIGNED_P (target
));
4366 /* If we promoted a constant, change the mode back down to match
4367 target. Otherwise, the caller might get confused by a result whose
4368 mode is larger than expected. */
4370 if (want_value
&& GET_MODE (temp
) != GET_MODE (target
))
4372 if (GET_MODE (temp
) != VOIDmode
)
4374 temp
= gen_lowpart_SUBREG (GET_MODE (target
), temp
);
4375 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4376 SUBREG_PROMOTED_UNSIGNED_SET (temp
,
4377 SUBREG_PROMOTED_UNSIGNED_P (target
));
4380 temp
= convert_modes (GET_MODE (target
),
4381 GET_MODE (SUBREG_REG (target
)),
4382 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4385 return want_value
? temp
: NULL_RTX
;
4389 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
4390 /* Return TARGET if it's a specified hardware register.
4391 If TARGET is a volatile mem ref, either return TARGET
4392 or return a reg copied *from* TARGET; ANSI requires this.
4394 Otherwise, if TEMP is not TARGET, return TEMP
4395 if it is constant (for efficiency),
4396 or if we really want the correct value. */
4397 if (!(target
&& GET_CODE (target
) == REG
4398 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4399 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
4400 && ! rtx_equal_p (temp
, target
)
4401 && (CONSTANT_P (temp
) || want_value
))
4402 dont_return_target
= 1;
4405 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4406 the same as that of TARGET, adjust the constant. This is needed, for
4407 example, in case it is a CONST_DOUBLE and we want only a word-sized
4409 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4410 && TREE_CODE (exp
) != ERROR_MARK
4411 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4412 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4413 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
4415 /* If value was not generated in the target, store it there.
4416 Convert the value to TARGET's type first if necessary.
4417 If TEMP and TARGET compare equal according to rtx_equal_p, but
4418 one or both of them are volatile memory refs, we have to distinguish
4420 - expand_expr has used TARGET. In this case, we must not generate
4421 another copy. This can be detected by TARGET being equal according
4423 - expand_expr has not used TARGET - that means that the source just
4424 happens to have the same RTX form. Since temp will have been created
4425 by expand_expr, it will compare unequal according to == .
4426 We must generate a copy in this case, to reach the correct number
4427 of volatile memory references. */
4429 if ((! rtx_equal_p (temp
, target
)
4430 || (temp
!= target
&& (side_effects_p (temp
)
4431 || side_effects_p (target
))))
4432 && TREE_CODE (exp
) != ERROR_MARK
4433 && ! dont_store_target
4434 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4435 but TARGET is not valid memory reference, TEMP will differ
4436 from TARGET although it is really the same location. */
4437 && (TREE_CODE_CLASS (TREE_CODE (exp
)) != 'd'
4438 || target
!= DECL_RTL_IF_SET (exp
))
4439 /* If there's nothing to copy, don't bother. Don't call expr_size
4440 unless necessary, because some front-ends (C++) expr_size-hook
4441 aborts on objects that are not supposed to be bit-copied or
4443 && expr_size (exp
) != const0_rtx
)
4445 target
= protect_from_queue (target
, 1);
4446 if (GET_MODE (temp
) != GET_MODE (target
)
4447 && GET_MODE (temp
) != VOIDmode
)
4449 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4450 if (dont_return_target
)
4452 /* In this case, we will return TEMP,
4453 so make sure it has the proper mode.
4454 But don't forget to store the value into TARGET. */
4455 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4456 emit_move_insn (target
, temp
);
4459 convert_move (target
, temp
, unsignedp
);
4462 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4464 /* Handle copying a string constant into an array. The string
4465 constant may be shorter than the array. So copy just the string's
4466 actual length, and clear the rest. First get the size of the data
4467 type of the string, which is actually the size of the target. */
4468 rtx size
= expr_size (exp
);
4470 if (GET_CODE (size
) == CONST_INT
4471 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4472 emit_block_move (target
, temp
, size
, BLOCK_OP_NORMAL
);
4475 /* Compute the size of the data to copy from the string. */
4477 = size_binop (MIN_EXPR
,
4478 make_tree (sizetype
, size
),
4479 size_int (TREE_STRING_LENGTH (exp
)));
4480 rtx copy_size_rtx
= expand_expr (copy_size
, NULL_RTX
,
4484 /* Copy that much. */
4485 copy_size_rtx
= convert_to_mode (ptr_mode
, copy_size_rtx
, 0);
4486 emit_block_move (target
, temp
, copy_size_rtx
, BLOCK_OP_NORMAL
);
4488 /* Figure out how much is left in TARGET that we have to clear.
4489 Do all calculations in ptr_mode. */
4490 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4492 size
= plus_constant (size
, -INTVAL (copy_size_rtx
));
4493 target
= adjust_address (target
, BLKmode
,
4494 INTVAL (copy_size_rtx
));
4498 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
4499 copy_size_rtx
, NULL_RTX
, 0,
4502 #ifdef POINTERS_EXTEND_UNSIGNED
4503 if (GET_MODE (copy_size_rtx
) != Pmode
)
4504 copy_size_rtx
= convert_memory_address (Pmode
,
4508 target
= offset_address (target
, copy_size_rtx
,
4509 highest_pow2_factor (copy_size
));
4510 label
= gen_label_rtx ();
4511 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4512 GET_MODE (size
), 0, label
);
4515 if (size
!= const0_rtx
)
4516 clear_storage (target
, size
);
4522 /* Handle calls that return values in multiple non-contiguous locations.
4523 The Irix 6 ABI has examples of this. */
4524 else if (GET_CODE (target
) == PARALLEL
)
4525 emit_group_load (target
, temp
, int_size_in_bytes (TREE_TYPE (exp
)));
4526 else if (GET_MODE (temp
) == BLKmode
)
4527 emit_block_move (target
, temp
, expr_size (exp
), BLOCK_OP_NORMAL
);
4529 emit_move_insn (target
, temp
);
4532 /* If we don't want a value, return NULL_RTX. */
4536 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4537 ??? The latter test doesn't seem to make sense. */
4538 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
4541 /* Return TARGET itself if it is a hard register. */
4542 else if (want_value
&& GET_MODE (target
) != BLKmode
4543 && ! (GET_CODE (target
) == REG
4544 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4545 return copy_to_reg (target
);
4551 /* Return 1 if EXP just contains zeros. */
4559 switch (TREE_CODE (exp
))
4563 case NON_LVALUE_EXPR
:
4564 case VIEW_CONVERT_EXPR
:
4565 return is_zeros_p (TREE_OPERAND (exp
, 0));
4568 return integer_zerop (exp
);
4572 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
4575 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp
), dconst0
);
4578 for (elt
= TREE_VECTOR_CST_ELTS (exp
); elt
;
4579 elt
= TREE_CHAIN (elt
))
4580 if (!is_zeros_p (TREE_VALUE (elt
)))
4586 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4587 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
4588 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4589 if (! is_zeros_p (TREE_VALUE (elt
)))
4599 /* Return 1 if EXP contains mostly (3/4) zeros. */
4602 mostly_zeros_p (exp
)
4605 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4607 int elts
= 0, zeros
= 0;
4608 tree elt
= CONSTRUCTOR_ELTS (exp
);
4609 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4611 /* If there are no ranges of true bits, it is all zero. */
4612 return elt
== NULL_TREE
;
4614 for (; elt
; elt
= TREE_CHAIN (elt
))
4616 /* We do not handle the case where the index is a RANGE_EXPR,
4617 so the statistic will be somewhat inaccurate.
4618 We do make a more accurate count in store_constructor itself,
4619 so since this function is only used for nested array elements,
4620 this should be close enough. */
4621 if (mostly_zeros_p (TREE_VALUE (elt
)))
4626 return 4 * zeros
>= 3 * elts
;
4629 return is_zeros_p (exp
);
4632 /* Helper function for store_constructor.
4633 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4634 TYPE is the type of the CONSTRUCTOR, not the element type.
4635 CLEARED is as for store_constructor.
4636 ALIAS_SET is the alias set to use for any stores.
4638 This provides a recursive shortcut back to store_constructor when it isn't
4639 necessary to go through store_field. This is so that we can pass through
4640 the cleared field to let store_constructor know that we may not have to
4641 clear a substructure if the outer structure has already been cleared. */
4644 store_constructor_field (target
, bitsize
, bitpos
, mode
, exp
, type
, cleared
,
4647 unsigned HOST_WIDE_INT bitsize
;
4648 HOST_WIDE_INT bitpos
;
4649 enum machine_mode mode
;
4654 if (TREE_CODE (exp
) == CONSTRUCTOR
4655 && bitpos
% BITS_PER_UNIT
== 0
4656 /* If we have a nonzero bitpos for a register target, then we just
4657 let store_field do the bitfield handling. This is unlikely to
4658 generate unnecessary clear instructions anyways. */
4659 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
4661 if (GET_CODE (target
) == MEM
)
4663 = adjust_address (target
,
4664 GET_MODE (target
) == BLKmode
4666 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4667 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
4670 /* Update the alias set, if required. */
4671 if (GET_CODE (target
) == MEM
&& ! MEM_KEEP_ALIAS_SET_P (target
)
4672 && MEM_ALIAS_SET (target
) != 0)
4674 target
= copy_rtx (target
);
4675 set_mem_alias_set (target
, alias_set
);
4678 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
4681 store_field (target
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
4685 /* Store the value of constructor EXP into the rtx TARGET.
4686 TARGET is either a REG or a MEM; we know it cannot conflict, since
4687 safe_from_p has been called.
4688 CLEARED is true if TARGET is known to have been zero'd.
4689 SIZE is the number of bytes of TARGET we are allowed to modify: this
4690 may not be the same as the size of EXP if we are assigning to a field
4691 which has been packed to exclude padding bits. */
4694 store_constructor (exp
, target
, cleared
, size
)
4700 tree type
= TREE_TYPE (exp
);
4701 #ifdef WORD_REGISTER_OPERATIONS
4702 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4705 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
4706 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4710 /* We either clear the aggregate or indicate the value is dead. */
4711 if ((TREE_CODE (type
) == UNION_TYPE
4712 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4714 && ! CONSTRUCTOR_ELTS (exp
))
4715 /* If the constructor is empty, clear the union. */
4717 clear_storage (target
, expr_size (exp
));
4721 /* If we are building a static constructor into a register,
4722 set the initial value as zero so we can fold the value into
4723 a constant. But if more than one register is involved,
4724 this probably loses. */
4725 else if (! cleared
&& GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
4726 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4728 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4732 /* If the constructor has fewer fields than the structure
4733 or if we are initializing the structure to mostly zeros,
4734 clear the whole structure first. Don't do this if TARGET is a
4735 register whose mode size isn't equal to SIZE since clear_storage
4736 can't handle this case. */
4737 else if (! cleared
&& size
> 0
4738 && ((list_length (CONSTRUCTOR_ELTS (exp
))
4739 != fields_length (type
))
4740 || mostly_zeros_p (exp
))
4741 && (GET_CODE (target
) != REG
4742 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
4745 clear_storage (target
, GEN_INT (size
));
4750 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4752 /* Store each element of the constructor into
4753 the corresponding field of TARGET. */
4755 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4757 tree field
= TREE_PURPOSE (elt
);
4758 tree value
= TREE_VALUE (elt
);
4759 enum machine_mode mode
;
4760 HOST_WIDE_INT bitsize
;
4761 HOST_WIDE_INT bitpos
= 0;
4764 rtx to_rtx
= target
;
4766 /* Just ignore missing fields.
4767 We cleared the whole structure, above,
4768 if any fields are missing. */
4772 if (cleared
&& is_zeros_p (value
))
4775 if (host_integerp (DECL_SIZE (field
), 1))
4776 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4780 unsignedp
= TREE_UNSIGNED (field
);
4781 mode
= DECL_MODE (field
);
4782 if (DECL_BIT_FIELD (field
))
4785 offset
= DECL_FIELD_OFFSET (field
);
4786 if (host_integerp (offset
, 0)
4787 && host_integerp (bit_position (field
), 0))
4789 bitpos
= int_bit_position (field
);
4793 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
4799 if (contains_placeholder_p (offset
))
4800 offset
= build (WITH_RECORD_EXPR
, sizetype
,
4801 offset
, make_tree (TREE_TYPE (exp
), target
));
4803 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4804 if (GET_CODE (to_rtx
) != MEM
)
4807 #ifdef POINTERS_EXTEND_UNSIGNED
4808 if (GET_MODE (offset_rtx
) != Pmode
)
4809 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
4811 if (GET_MODE (offset_rtx
) != ptr_mode
)
4812 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4815 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4816 highest_pow2_factor (offset
));
4819 if (TREE_READONLY (field
))
4821 if (GET_CODE (to_rtx
) == MEM
)
4822 to_rtx
= copy_rtx (to_rtx
);
4824 RTX_UNCHANGING_P (to_rtx
) = 1;
4827 #ifdef WORD_REGISTER_OPERATIONS
4828 /* If this initializes a field that is smaller than a word, at the
4829 start of a word, try to widen it to a full word.
4830 This special case allows us to output C++ member function
4831 initializations in a form that the optimizers can understand. */
4832 if (GET_CODE (target
) == REG
4833 && bitsize
< BITS_PER_WORD
4834 && bitpos
% BITS_PER_WORD
== 0
4835 && GET_MODE_CLASS (mode
) == MODE_INT
4836 && TREE_CODE (value
) == INTEGER_CST
4838 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
4840 tree type
= TREE_TYPE (value
);
4842 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4844 type
= (*lang_hooks
.types
.type_for_size
)
4845 (BITS_PER_WORD
, TREE_UNSIGNED (type
));
4846 value
= convert (type
, value
);
4849 if (BYTES_BIG_ENDIAN
)
4851 = fold (build (LSHIFT_EXPR
, type
, value
,
4852 build_int_2 (BITS_PER_WORD
- bitsize
, 0)));
4853 bitsize
= BITS_PER_WORD
;
4858 if (GET_CODE (to_rtx
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (to_rtx
)
4859 && DECL_NONADDRESSABLE_P (field
))
4861 to_rtx
= copy_rtx (to_rtx
);
4862 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4865 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
4866 value
, type
, cleared
,
4867 get_alias_set (TREE_TYPE (field
)));
4870 else if (TREE_CODE (type
) == ARRAY_TYPE
4871 || TREE_CODE (type
) == VECTOR_TYPE
)
4876 tree domain
= TYPE_DOMAIN (type
);
4877 tree elttype
= TREE_TYPE (type
);
4879 HOST_WIDE_INT minelt
= 0;
4880 HOST_WIDE_INT maxelt
= 0;
4882 /* Vectors are like arrays, but the domain is stored via an array
4884 if (TREE_CODE (type
) == VECTOR_TYPE
)
4886 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4887 the same field as TYPE_DOMAIN, we are not guaranteed that
4889 domain
= TYPE_DEBUG_REPRESENTATION_TYPE (type
);
4890 domain
= TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain
)));
4893 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
4894 && TYPE_MAX_VALUE (domain
)
4895 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
4896 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
4898 /* If we have constant bounds for the range of the type, get them. */
4901 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
4902 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
4905 /* If the constructor has fewer elements than the array,
4906 clear the whole array first. Similarly if this is
4907 static constructor of a non-BLKmode object. */
4908 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
4912 HOST_WIDE_INT count
= 0, zero_count
= 0;
4913 need_to_clear
= ! const_bounds_p
;
4915 /* This loop is a more accurate version of the loop in
4916 mostly_zeros_p (it handles RANGE_EXPR in an index).
4917 It is also needed to check for missing elements. */
4918 for (elt
= CONSTRUCTOR_ELTS (exp
);
4919 elt
!= NULL_TREE
&& ! need_to_clear
;
4920 elt
= TREE_CHAIN (elt
))
4922 tree index
= TREE_PURPOSE (elt
);
4923 HOST_WIDE_INT this_node_count
;
4925 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4927 tree lo_index
= TREE_OPERAND (index
, 0);
4928 tree hi_index
= TREE_OPERAND (index
, 1);
4930 if (! host_integerp (lo_index
, 1)
4931 || ! host_integerp (hi_index
, 1))
4937 this_node_count
= (tree_low_cst (hi_index
, 1)
4938 - tree_low_cst (lo_index
, 1) + 1);
4941 this_node_count
= 1;
4943 count
+= this_node_count
;
4944 if (mostly_zeros_p (TREE_VALUE (elt
)))
4945 zero_count
+= this_node_count
;
4948 /* Clear the entire array first if there are any missing elements,
4949 or if the incidence of zero elements is >= 75%. */
4951 && (count
< maxelt
- minelt
+ 1 || 4 * zero_count
>= 3 * count
))
4955 if (need_to_clear
&& size
> 0)
4960 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4962 clear_storage (target
, GEN_INT (size
));
4966 else if (REG_P (target
))
4967 /* Inform later passes that the old value is dead. */
4968 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4970 /* Store each element of the constructor into
4971 the corresponding element of TARGET, determined
4972 by counting the elements. */
4973 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
4975 elt
= TREE_CHAIN (elt
), i
++)
4977 enum machine_mode mode
;
4978 HOST_WIDE_INT bitsize
;
4979 HOST_WIDE_INT bitpos
;
4981 tree value
= TREE_VALUE (elt
);
4982 tree index
= TREE_PURPOSE (elt
);
4983 rtx xtarget
= target
;
4985 if (cleared
&& is_zeros_p (value
))
4988 unsignedp
= TREE_UNSIGNED (elttype
);
4989 mode
= TYPE_MODE (elttype
);
4990 if (mode
== BLKmode
)
4991 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
4992 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
4995 bitsize
= GET_MODE_BITSIZE (mode
);
4997 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4999 tree lo_index
= TREE_OPERAND (index
, 0);
5000 tree hi_index
= TREE_OPERAND (index
, 1);
5001 rtx index_r
, pos_rtx
, hi_r
, loop_top
, loop_end
;
5002 struct nesting
*loop
;
5003 HOST_WIDE_INT lo
, hi
, count
;
5006 /* If the range is constant and "small", unroll the loop. */
5008 && host_integerp (lo_index
, 0)
5009 && host_integerp (hi_index
, 0)
5010 && (lo
= tree_low_cst (lo_index
, 0),
5011 hi
= tree_low_cst (hi_index
, 0),
5012 count
= hi
- lo
+ 1,
5013 (GET_CODE (target
) != MEM
5015 || (host_integerp (TYPE_SIZE (elttype
), 1)
5016 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
5019 lo
-= minelt
; hi
-= minelt
;
5020 for (; lo
<= hi
; lo
++)
5022 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
5024 if (GET_CODE (target
) == MEM
5025 && !MEM_KEEP_ALIAS_SET_P (target
)
5026 && TREE_CODE (type
) == ARRAY_TYPE
5027 && TYPE_NONALIASED_COMPONENT (type
))
5029 target
= copy_rtx (target
);
5030 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5033 store_constructor_field
5034 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
5035 get_alias_set (elttype
));
5040 hi_r
= expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
5041 loop_top
= gen_label_rtx ();
5042 loop_end
= gen_label_rtx ();
5044 unsignedp
= TREE_UNSIGNED (domain
);
5046 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
5049 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
5051 SET_DECL_RTL (index
, index_r
);
5052 if (TREE_CODE (value
) == SAVE_EXPR
5053 && SAVE_EXPR_RTL (value
) == 0)
5055 /* Make sure value gets expanded once before the
5057 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
5060 store_expr (lo_index
, index_r
, 0);
5061 loop
= expand_start_loop (0);
5063 /* Assign value to element index. */
5065 = convert (ssizetype
,
5066 fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5067 index
, TYPE_MIN_VALUE (domain
))));
5068 position
= size_binop (MULT_EXPR
, position
,
5070 TYPE_SIZE_UNIT (elttype
)));
5072 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
5073 xtarget
= offset_address (target
, pos_rtx
,
5074 highest_pow2_factor (position
));
5075 xtarget
= adjust_address (xtarget
, mode
, 0);
5076 if (TREE_CODE (value
) == CONSTRUCTOR
)
5077 store_constructor (value
, xtarget
, cleared
,
5078 bitsize
/ BITS_PER_UNIT
);
5080 store_expr (value
, xtarget
, 0);
5082 expand_exit_loop_if_false (loop
,
5083 build (LT_EXPR
, integer_type_node
,
5086 expand_increment (build (PREINCREMENT_EXPR
,
5088 index
, integer_one_node
), 0, 0);
5090 emit_label (loop_end
);
5093 else if ((index
!= 0 && ! host_integerp (index
, 0))
5094 || ! host_integerp (TYPE_SIZE (elttype
), 1))
5099 index
= ssize_int (1);
5102 index
= convert (ssizetype
,
5103 fold (build (MINUS_EXPR
, index
,
5104 TYPE_MIN_VALUE (domain
))));
5106 position
= size_binop (MULT_EXPR
, index
,
5108 TYPE_SIZE_UNIT (elttype
)));
5109 xtarget
= offset_address (target
,
5110 expand_expr (position
, 0, VOIDmode
, 0),
5111 highest_pow2_factor (position
));
5112 xtarget
= adjust_address (xtarget
, mode
, 0);
5113 store_expr (value
, xtarget
, 0);
5118 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
5119 * tree_low_cst (TYPE_SIZE (elttype
), 1));
5121 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
5123 if (GET_CODE (target
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (target
)
5124 && TREE_CODE (type
) == ARRAY_TYPE
5125 && TYPE_NONALIASED_COMPONENT (type
))
5127 target
= copy_rtx (target
);
5128 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5131 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
5132 type
, cleared
, get_alias_set (elttype
));
5138 /* Set constructor assignments. */
5139 else if (TREE_CODE (type
) == SET_TYPE
)
5141 tree elt
= CONSTRUCTOR_ELTS (exp
);
5142 unsigned HOST_WIDE_INT nbytes
= int_size_in_bytes (type
), nbits
;
5143 tree domain
= TYPE_DOMAIN (type
);
5144 tree domain_min
, domain_max
, bitlength
;
5146 /* The default implementation strategy is to extract the constant
5147 parts of the constructor, use that to initialize the target,
5148 and then "or" in whatever non-constant ranges we need in addition.
5150 If a large set is all zero or all ones, it is
5151 probably better to set it using memset (if available) or bzero.
5152 Also, if a large set has just a single range, it may also be
5153 better to first clear all the first clear the set (using
5154 bzero/memset), and set the bits we want. */
5156 /* Check for all zeros. */
5157 if (elt
== NULL_TREE
&& size
> 0)
5160 clear_storage (target
, GEN_INT (size
));
5164 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
5165 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
5166 bitlength
= size_binop (PLUS_EXPR
,
5167 size_diffop (domain_max
, domain_min
),
5170 nbits
= tree_low_cst (bitlength
, 1);
5172 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5173 are "complicated" (more than one range), initialize (the
5174 constant parts) by copying from a constant. */
5175 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
5176 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
5178 unsigned int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
5179 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
5180 char *bit_buffer
= (char *) alloca (nbits
);
5181 HOST_WIDE_INT word
= 0;
5182 unsigned int bit_pos
= 0;
5183 unsigned int ibit
= 0;
5184 unsigned int offset
= 0; /* In bytes from beginning of set. */
5186 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
5189 if (bit_buffer
[ibit
])
5191 if (BYTES_BIG_ENDIAN
)
5192 word
|= (1 << (set_word_size
- 1 - bit_pos
));
5194 word
|= 1 << bit_pos
;
5198 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
5200 if (word
!= 0 || ! cleared
)
5202 rtx datum
= GEN_INT (word
);
5205 /* The assumption here is that it is safe to use
5206 XEXP if the set is multi-word, but not if
5207 it's single-word. */
5208 if (GET_CODE (target
) == MEM
)
5209 to_rtx
= adjust_address (target
, mode
, offset
);
5210 else if (offset
== 0)
5214 emit_move_insn (to_rtx
, datum
);
5221 offset
+= set_word_size
/ BITS_PER_UNIT
;
5226 /* Don't bother clearing storage if the set is all ones. */
5227 if (TREE_CHAIN (elt
) != NULL_TREE
5228 || (TREE_PURPOSE (elt
) == NULL_TREE
5230 : ( ! host_integerp (TREE_VALUE (elt
), 0)
5231 || ! host_integerp (TREE_PURPOSE (elt
), 0)
5232 || (tree_low_cst (TREE_VALUE (elt
), 0)
5233 - tree_low_cst (TREE_PURPOSE (elt
), 0) + 1
5234 != (HOST_WIDE_INT
) nbits
))))
5235 clear_storage (target
, expr_size (exp
));
5237 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
5239 /* Start of range of element or NULL. */
5240 tree startbit
= TREE_PURPOSE (elt
);
5241 /* End of range of element, or element value. */
5242 tree endbit
= TREE_VALUE (elt
);
5243 HOST_WIDE_INT startb
, endb
;
5244 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
5246 bitlength_rtx
= expand_expr (bitlength
,
5247 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
5249 /* Handle non-range tuple element like [ expr ]. */
5250 if (startbit
== NULL_TREE
)
5252 startbit
= save_expr (endbit
);
5256 startbit
= convert (sizetype
, startbit
);
5257 endbit
= convert (sizetype
, endbit
);
5258 if (! integer_zerop (domain_min
))
5260 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
5261 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
5263 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
5264 EXPAND_CONST_ADDRESS
);
5265 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
5266 EXPAND_CONST_ADDRESS
);
5272 ((build_qualified_type ((*lang_hooks
.types
.type_for_mode
)
5273 (GET_MODE (target
), 0),
5276 emit_move_insn (targetx
, target
);
5279 else if (GET_CODE (target
) == MEM
)
5284 /* Optimization: If startbit and endbit are constants divisible
5285 by BITS_PER_UNIT, call memset instead. */
5286 if (TARGET_MEM_FUNCTIONS
5287 && TREE_CODE (startbit
) == INTEGER_CST
5288 && TREE_CODE (endbit
) == INTEGER_CST
5289 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
5290 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
5292 emit_library_call (memset_libfunc
, LCT_NORMAL
,
5294 plus_constant (XEXP (targetx
, 0),
5295 startb
/ BITS_PER_UNIT
),
5297 constm1_rtx
, TYPE_MODE (integer_type_node
),
5298 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
5299 TYPE_MODE (sizetype
));
5302 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__setbits"),
5303 LCT_NORMAL
, VOIDmode
, 4, XEXP (targetx
, 0),
5304 Pmode
, bitlength_rtx
, TYPE_MODE (sizetype
),
5305 startbit_rtx
, TYPE_MODE (sizetype
),
5306 endbit_rtx
, TYPE_MODE (sizetype
));
5309 emit_move_insn (target
, targetx
);
5317 /* Store the value of EXP (an expression tree)
5318 into a subfield of TARGET which has mode MODE and occupies
5319 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5320 If MODE is VOIDmode, it means that we are storing into a bit-field.
5322 If VALUE_MODE is VOIDmode, return nothing in particular.
5323 UNSIGNEDP is not used in this case.
5325 Otherwise, return an rtx for the value stored. This rtx
5326 has mode VALUE_MODE if that is convenient to do.
5327 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5329 TYPE is the type of the underlying object,
5331 ALIAS_SET is the alias set for the destination. This value will
5332 (in general) be different from that for TARGET, since TARGET is a
5333 reference to the containing structure. */
5336 store_field (target
, bitsize
, bitpos
, mode
, exp
, value_mode
, unsignedp
, type
,
5339 HOST_WIDE_INT bitsize
;
5340 HOST_WIDE_INT bitpos
;
5341 enum machine_mode mode
;
5343 enum machine_mode value_mode
;
5348 HOST_WIDE_INT width_mask
= 0;
5350 if (TREE_CODE (exp
) == ERROR_MARK
)
5353 /* If we have nothing to store, do nothing unless the expression has
5356 return expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
5357 else if (bitsize
>=0 && bitsize
< HOST_BITS_PER_WIDE_INT
)
5358 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5360 /* If we are storing into an unaligned field of an aligned union that is
5361 in a register, we may have the mode of TARGET being an integer mode but
5362 MODE == BLKmode. In that case, get an aligned object whose size and
5363 alignment are the same as TARGET and store TARGET into it (we can avoid
5364 the store if the field being stored is the entire width of TARGET). Then
5365 call ourselves recursively to store the field into a BLKmode version of
5366 that object. Finally, load from the object into TARGET. This is not
5367 very efficient in general, but should only be slightly more expensive
5368 than the otherwise-required unaligned accesses. Perhaps this can be
5369 cleaned up later. */
5372 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
5376 (build_qualified_type (type
, TYPE_QUALS (type
) | TYPE_QUAL_CONST
),
5378 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
5380 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5381 emit_move_insn (object
, target
);
5383 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
5386 emit_move_insn (target
, object
);
5388 /* We want to return the BLKmode version of the data. */
5392 if (GET_CODE (target
) == CONCAT
)
5394 /* We're storing into a struct containing a single __complex. */
5398 return store_expr (exp
, target
, 0);
5401 /* If the structure is in a register or if the component
5402 is a bit field, we cannot use addressing to access it.
5403 Use bit-field techniques or SUBREG to store in it. */
5405 if (mode
== VOIDmode
5406 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5407 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5408 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5409 || GET_CODE (target
) == REG
5410 || GET_CODE (target
) == SUBREG
5411 /* If the field isn't aligned enough to store as an ordinary memref,
5412 store it as a bit field. */
5413 || (mode
!= BLKmode
&& SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
))
5414 && (MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
)
5415 || bitpos
% GET_MODE_ALIGNMENT (mode
)))
5416 /* If the RHS and field are a constant size and the size of the
5417 RHS isn't the same size as the bitfield, we must use bitfield
5420 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5421 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5423 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
5425 /* If BITSIZE is narrower than the size of the type of EXP
5426 we will be narrowing TEMP. Normally, what's wanted are the
5427 low-order bits. However, if EXP's type is a record and this is
5428 big-endian machine, we want the upper BITSIZE bits. */
5429 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5430 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
5431 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5432 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5433 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5437 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5439 if (mode
!= VOIDmode
&& mode
!= BLKmode
5440 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5441 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5443 /* If the modes of TARGET and TEMP are both BLKmode, both
5444 must be in memory and BITPOS must be aligned on a byte
5445 boundary. If so, we simply do a block copy. */
5446 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5448 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
5449 || bitpos
% BITS_PER_UNIT
!= 0)
5452 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5453 emit_block_move (target
, temp
,
5454 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5458 return value_mode
== VOIDmode
? const0_rtx
: target
;
5461 /* Store the value in the bitfield. */
5462 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
,
5463 int_size_in_bytes (type
));
5465 if (value_mode
!= VOIDmode
)
5467 /* The caller wants an rtx for the value.
5468 If possible, avoid refetching from the bitfield itself. */
5470 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
5473 enum machine_mode tmode
;
5475 tmode
= GET_MODE (temp
);
5476 if (tmode
== VOIDmode
)
5480 return expand_and (tmode
, temp
,
5481 gen_int_mode (width_mask
, tmode
),
5484 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
5485 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5486 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5489 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
5490 NULL_RTX
, value_mode
, VOIDmode
,
5491 int_size_in_bytes (type
));
5497 rtx addr
= XEXP (target
, 0);
5498 rtx to_rtx
= target
;
5500 /* If a value is wanted, it must be the lhs;
5501 so make the address stable for multiple use. */
5503 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
5504 && ! CONSTANT_ADDRESS_P (addr
)
5505 /* A frame-pointer reference is already stable. */
5506 && ! (GET_CODE (addr
) == PLUS
5507 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
5508 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
5509 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
5510 to_rtx
= replace_equiv_address (to_rtx
, copy_to_reg (addr
));
5512 /* Now build a reference to just the desired component. */
5514 to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5516 if (to_rtx
== target
)
5517 to_rtx
= copy_rtx (to_rtx
);
5519 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5520 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5521 set_mem_alias_set (to_rtx
, alias_set
);
5523 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
5527 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5528 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5529 codes and find the ultimate containing object, which we return.
5531 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5532 bit position, and *PUNSIGNEDP to the signedness of the field.
5533 If the position of the field is variable, we store a tree
5534 giving the variable offset (in units) in *POFFSET.
5535 This offset is in addition to the bit position.
5536 If the position is not variable, we store 0 in *POFFSET.
5538 If any of the extraction expressions is volatile,
5539 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5541 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5542 is a mode that can be used to access the field. In that case, *PBITSIZE
5545 If the field describes a variable-sized object, *PMODE is set to
5546 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5547 this case, but the address of the object can be found. */
5550 get_inner_reference (exp
, pbitsize
, pbitpos
, poffset
, pmode
,
5551 punsignedp
, pvolatilep
)
5553 HOST_WIDE_INT
*pbitsize
;
5554 HOST_WIDE_INT
*pbitpos
;
5556 enum machine_mode
*pmode
;
5561 enum machine_mode mode
= VOIDmode
;
5562 tree offset
= size_zero_node
;
5563 tree bit_offset
= bitsize_zero_node
;
5564 tree placeholder_ptr
= 0;
5567 /* First get the mode, signedness, and size. We do this from just the
5568 outermost expression. */
5569 if (TREE_CODE (exp
) == COMPONENT_REF
)
5571 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5572 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5573 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5575 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
5577 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5579 size_tree
= TREE_OPERAND (exp
, 1);
5580 *punsignedp
= TREE_UNSIGNED (exp
);
5584 mode
= TYPE_MODE (TREE_TYPE (exp
));
5585 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
5587 if (mode
== BLKmode
)
5588 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5590 *pbitsize
= GET_MODE_BITSIZE (mode
);
5595 if (! host_integerp (size_tree
, 1))
5596 mode
= BLKmode
, *pbitsize
= -1;
5598 *pbitsize
= tree_low_cst (size_tree
, 1);
5601 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5602 and find the ultimate containing object. */
5605 if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5606 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
, TREE_OPERAND (exp
, 2));
5607 else if (TREE_CODE (exp
) == COMPONENT_REF
)
5609 tree field
= TREE_OPERAND (exp
, 1);
5610 tree this_offset
= DECL_FIELD_OFFSET (field
);
5612 /* If this field hasn't been filled in yet, don't go
5613 past it. This should only happen when folding expressions
5614 made during type construction. */
5615 if (this_offset
== 0)
5617 else if (! TREE_CONSTANT (this_offset
)
5618 && contains_placeholder_p (this_offset
))
5619 this_offset
= build (WITH_RECORD_EXPR
, sizetype
, this_offset
, exp
);
5621 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5622 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5623 DECL_FIELD_BIT_OFFSET (field
));
5625 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5628 else if (TREE_CODE (exp
) == ARRAY_REF
5629 || TREE_CODE (exp
) == ARRAY_RANGE_REF
)
5631 tree index
= TREE_OPERAND (exp
, 1);
5632 tree array
= TREE_OPERAND (exp
, 0);
5633 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
5634 tree low_bound
= (domain
? TYPE_MIN_VALUE (domain
) : 0);
5635 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array
)));
5637 /* We assume all arrays have sizes that are a multiple of a byte.
5638 First subtract the lower bound, if any, in the type of the
5639 index, then convert to sizetype and multiply by the size of the
5641 if (low_bound
!= 0 && ! integer_zerop (low_bound
))
5642 index
= fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5645 /* If the index has a self-referential type, pass it to a
5646 WITH_RECORD_EXPR; if the component size is, pass our
5647 component to one. */
5648 if (! TREE_CONSTANT (index
)
5649 && contains_placeholder_p (index
))
5650 index
= build (WITH_RECORD_EXPR
, TREE_TYPE (index
), index
, exp
);
5651 if (! TREE_CONSTANT (unit_size
)
5652 && contains_placeholder_p (unit_size
))
5653 unit_size
= build (WITH_RECORD_EXPR
, sizetype
, unit_size
, array
);
5655 offset
= size_binop (PLUS_EXPR
, offset
,
5656 size_binop (MULT_EXPR
,
5657 convert (sizetype
, index
),
5661 else if (TREE_CODE (exp
) == PLACEHOLDER_EXPR
)
5663 tree
new = find_placeholder (exp
, &placeholder_ptr
);
5665 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5666 We might have been called from tree optimization where we
5667 haven't set up an object yet. */
5675 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
5676 && TREE_CODE (exp
) != VIEW_CONVERT_EXPR
5677 && ! ((TREE_CODE (exp
) == NOP_EXPR
5678 || TREE_CODE (exp
) == CONVERT_EXPR
)
5679 && (TYPE_MODE (TREE_TYPE (exp
))
5680 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
5683 /* If any reference in the chain is volatile, the effect is volatile. */
5684 if (TREE_THIS_VOLATILE (exp
))
5687 exp
= TREE_OPERAND (exp
, 0);
5690 /* If OFFSET is constant, see if we can return the whole thing as a
5691 constant bit position. Otherwise, split it up. */
5692 if (host_integerp (offset
, 0)
5693 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5695 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5696 && host_integerp (tem
, 0))
5697 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5699 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5705 /* Return 1 if T is an expression that get_inner_reference handles. */
5708 handled_component_p (t
)
5711 switch (TREE_CODE (t
))
5716 case ARRAY_RANGE_REF
:
5717 case NON_LVALUE_EXPR
:
5718 case VIEW_CONVERT_EXPR
:
5723 return (TYPE_MODE (TREE_TYPE (t
))
5724 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t
, 0))));
5731 /* Given an rtx VALUE that may contain additions and multiplications, return
5732 an equivalent value that just refers to a register, memory, or constant.
5733 This is done by generating instructions to perform the arithmetic and
5734 returning a pseudo-register containing the value.
5736 The returned value may be a REG, SUBREG, MEM or constant. */
5739 force_operand (value
, target
)
5743 /* Use subtarget as the target for operand 0 of a binary operation. */
5744 rtx subtarget
= get_subtarget (target
);
5745 enum rtx_code code
= GET_CODE (value
);
5747 /* Check for a PIC address load. */
5748 if ((code
== PLUS
|| code
== MINUS
)
5749 && XEXP (value
, 0) == pic_offset_table_rtx
5750 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5751 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5752 || GET_CODE (XEXP (value
, 1)) == CONST
))
5755 subtarget
= gen_reg_rtx (GET_MODE (value
));
5756 emit_move_insn (subtarget
, value
);
5760 if (code
== ZERO_EXTEND
|| code
== SIGN_EXTEND
)
5763 target
= gen_reg_rtx (GET_MODE (value
));
5764 convert_move (target
, force_operand (XEXP (value
, 0), NULL
),
5765 code
== ZERO_EXTEND
);
5769 if (GET_RTX_CLASS (code
) == '2' || GET_RTX_CLASS (code
) == 'c')
5771 op2
= XEXP (value
, 1);
5772 if (!CONSTANT_P (op2
) && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5774 if (code
== MINUS
&& GET_CODE (op2
) == CONST_INT
)
5777 op2
= negate_rtx (GET_MODE (value
), op2
);
5780 /* Check for an addition with OP2 a constant integer and our first
5781 operand a PLUS of a virtual register and something else. In that
5782 case, we want to emit the sum of the virtual register and the
5783 constant first and then add the other value. This allows virtual
5784 register instantiation to simply modify the constant rather than
5785 creating another one around this addition. */
5786 if (code
== PLUS
&& GET_CODE (op2
) == CONST_INT
5787 && GET_CODE (XEXP (value
, 0)) == PLUS
5788 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
5789 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5790 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5792 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
5793 XEXP (XEXP (value
, 0), 0), op2
,
5794 subtarget
, 0, OPTAB_LIB_WIDEN
);
5795 return expand_simple_binop (GET_MODE (value
), code
, temp
,
5796 force_operand (XEXP (XEXP (value
,
5798 target
, 0, OPTAB_LIB_WIDEN
);
5801 op1
= force_operand (XEXP (value
, 0), subtarget
);
5802 op2
= force_operand (op2
, NULL_RTX
);
5806 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
5808 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
5809 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5810 target
, 1, OPTAB_LIB_WIDEN
);
5812 return expand_divmod (0,
5813 FLOAT_MODE_P (GET_MODE (value
))
5814 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
5815 GET_MODE (value
), op1
, op2
, target
, 0);
5818 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5822 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
5826 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5830 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5831 target
, 0, OPTAB_LIB_WIDEN
);
5834 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5835 target
, 1, OPTAB_LIB_WIDEN
);
5838 if (GET_RTX_CLASS (code
) == '1')
5840 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
5841 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
5844 #ifdef INSN_SCHEDULING
5845 /* On machines that have insn scheduling, we want all memory reference to be
5846 explicit, so we need to deal with such paradoxical SUBREGs. */
5847 if (GET_CODE (value
) == SUBREG
&& GET_CODE (SUBREG_REG (value
)) == MEM
5848 && (GET_MODE_SIZE (GET_MODE (value
))
5849 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
5851 = simplify_gen_subreg (GET_MODE (value
),
5852 force_reg (GET_MODE (SUBREG_REG (value
)),
5853 force_operand (SUBREG_REG (value
),
5855 GET_MODE (SUBREG_REG (value
)),
5856 SUBREG_BYTE (value
));
5862 /* Subroutine of expand_expr: return nonzero iff there is no way that
5863 EXP can reference X, which is being modified. TOP_P is nonzero if this
5864 call is going to be used to determine whether we need a temporary
5865 for EXP, as opposed to a recursive call to this function.
5867 It is always safe for this routine to return zero since it merely
5868 searches for optimization opportunities. */
5871 safe_from_p (x
, exp
, top_p
)
5878 static tree save_expr_list
;
5881 /* If EXP has varying size, we MUST use a target since we currently
5882 have no way of allocating temporaries of variable size
5883 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5884 So we assume here that something at a higher level has prevented a
5885 clash. This is somewhat bogus, but the best we can do. Only
5886 do this when X is BLKmode and when we are at the top level. */
5887 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
5888 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
5889 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
5890 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
5891 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
5893 && GET_MODE (x
) == BLKmode
)
5894 /* If X is in the outgoing argument area, it is always safe. */
5895 || (GET_CODE (x
) == MEM
5896 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
5897 || (GET_CODE (XEXP (x
, 0)) == PLUS
5898 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
5901 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5902 find the underlying pseudo. */
5903 if (GET_CODE (x
) == SUBREG
)
5906 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5910 /* A SAVE_EXPR might appear many times in the expression passed to the
5911 top-level safe_from_p call, and if it has a complex subexpression,
5912 examining it multiple times could result in a combinatorial explosion.
5913 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5914 with optimization took about 28 minutes to compile -- even though it was
5915 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5916 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5917 we have processed. Note that the only test of top_p was above. */
5926 rtn
= safe_from_p (x
, exp
, 0);
5928 for (t
= save_expr_list
; t
!= 0; t
= TREE_CHAIN (t
))
5929 TREE_PRIVATE (TREE_PURPOSE (t
)) = 0;
5934 /* Now look at our tree code and possibly recurse. */
5935 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
5938 exp_rtl
= DECL_RTL_IF_SET (exp
);
5945 if (TREE_CODE (exp
) == TREE_LIST
)
5946 return ((TREE_VALUE (exp
) == 0
5947 || safe_from_p (x
, TREE_VALUE (exp
), 0))
5948 && (TREE_CHAIN (exp
) == 0
5949 || safe_from_p (x
, TREE_CHAIN (exp
), 0)));
5950 else if (TREE_CODE (exp
) == ERROR_MARK
)
5951 return 1; /* An already-visited SAVE_EXPR? */
5956 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5960 return (safe_from_p (x
, TREE_OPERAND (exp
, 0), 0)
5961 && safe_from_p (x
, TREE_OPERAND (exp
, 1), 0));
5965 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5966 the expression. If it is set, we conflict iff we are that rtx or
5967 both are in memory. Otherwise, we check all operands of the
5968 expression recursively. */
5970 switch (TREE_CODE (exp
))
5973 /* If the operand is static or we are static, we can't conflict.
5974 Likewise if we don't conflict with the operand at all. */
5975 if (staticp (TREE_OPERAND (exp
, 0))
5976 || TREE_STATIC (exp
)
5977 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
5980 /* Otherwise, the only way this can conflict is if we are taking
5981 the address of a DECL a that address if part of X, which is
5983 exp
= TREE_OPERAND (exp
, 0);
5986 if (!DECL_RTL_SET_P (exp
)
5987 || GET_CODE (DECL_RTL (exp
)) != MEM
)
5990 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
5995 if (GET_CODE (x
) == MEM
5996 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
5997 get_alias_set (exp
)))
6002 /* Assume that the call will clobber all hard registers and
6004 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6005 || GET_CODE (x
) == MEM
)
6010 /* If a sequence exists, we would have to scan every instruction
6011 in the sequence to see if it was safe. This is probably not
6013 if (RTL_EXPR_SEQUENCE (exp
))
6016 exp_rtl
= RTL_EXPR_RTL (exp
);
6019 case WITH_CLEANUP_EXPR
:
6020 exp_rtl
= WITH_CLEANUP_EXPR_RTL (exp
);
6023 case CLEANUP_POINT_EXPR
:
6024 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6027 exp_rtl
= SAVE_EXPR_RTL (exp
);
6031 /* If we've already scanned this, don't do it again. Otherwise,
6032 show we've scanned it and record for clearing the flag if we're
6034 if (TREE_PRIVATE (exp
))
6037 TREE_PRIVATE (exp
) = 1;
6038 if (! safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
6040 TREE_PRIVATE (exp
) = 0;
6044 save_expr_list
= tree_cons (exp
, NULL_TREE
, save_expr_list
);
6048 /* The only operand we look at is operand 1. The rest aren't
6049 part of the expression. */
6050 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
6052 case METHOD_CALL_EXPR
:
6053 /* This takes an rtx argument, but shouldn't appear here. */
6060 /* If we have an rtx, we do not need to scan our operands. */
6064 nops
= first_rtl_op (TREE_CODE (exp
));
6065 for (i
= 0; i
< nops
; i
++)
6066 if (TREE_OPERAND (exp
, i
) != 0
6067 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
6070 /* If this is a language-specific tree code, it may require
6071 special handling. */
6072 if ((unsigned int) TREE_CODE (exp
)
6073 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6074 && !(*lang_hooks
.safe_from_p
) (x
, exp
))
6078 /* If we have an rtl, find any enclosed object. Then see if we conflict
6082 if (GET_CODE (exp_rtl
) == SUBREG
)
6084 exp_rtl
= SUBREG_REG (exp_rtl
);
6085 if (GET_CODE (exp_rtl
) == REG
6086 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
6090 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6091 are memory and they conflict. */
6092 return ! (rtx_equal_p (x
, exp_rtl
)
6093 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
6094 && true_dependence (exp_rtl
, VOIDmode
, x
,
6095 rtx_addr_varies_p
)));
6098 /* If we reach here, it is safe. */
6102 /* Subroutine of expand_expr: return rtx if EXP is a
6103 variable or parameter; else return 0. */
6110 switch (TREE_CODE (exp
))
6114 return DECL_RTL (exp
);
6120 #ifdef MAX_INTEGER_COMPUTATION_MODE
6123 check_max_integer_computation_mode (exp
)
6126 enum tree_code code
;
6127 enum machine_mode mode
;
6129 /* Strip any NOPs that don't change the mode. */
6131 code
= TREE_CODE (exp
);
6133 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6134 if (code
== NOP_EXPR
6135 && TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
6138 /* First check the type of the overall operation. We need only look at
6139 unary, binary and relational operations. */
6140 if (TREE_CODE_CLASS (code
) == '1'
6141 || TREE_CODE_CLASS (code
) == '2'
6142 || TREE_CODE_CLASS (code
) == '<')
6144 mode
= TYPE_MODE (TREE_TYPE (exp
));
6145 if (GET_MODE_CLASS (mode
) == MODE_INT
6146 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6147 internal_error ("unsupported wide integer operation");
6150 /* Check operand of a unary op. */
6151 if (TREE_CODE_CLASS (code
) == '1')
6153 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6154 if (GET_MODE_CLASS (mode
) == MODE_INT
6155 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6156 internal_error ("unsupported wide integer operation");
6159 /* Check operands of a binary/comparison op. */
6160 if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<')
6162 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6163 if (GET_MODE_CLASS (mode
) == MODE_INT
6164 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6165 internal_error ("unsupported wide integer operation");
6167 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
6168 if (GET_MODE_CLASS (mode
) == MODE_INT
6169 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6170 internal_error ("unsupported wide integer operation");
6175 /* Return the highest power of two that EXP is known to be a multiple of.
6176 This is used in updating alignment of MEMs in array references. */
6178 static HOST_WIDE_INT
6179 highest_pow2_factor (exp
)
6182 HOST_WIDE_INT c0
, c1
;
6184 switch (TREE_CODE (exp
))
6187 /* We can find the lowest bit that's a one. If the low
6188 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6189 We need to handle this case since we can find it in a COND_EXPR,
6190 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6191 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6193 if (TREE_CONSTANT_OVERFLOW (exp
))
6194 return BIGGEST_ALIGNMENT
;
6197 /* Note: tree_low_cst is intentionally not used here,
6198 we don't care about the upper bits. */
6199 c0
= TREE_INT_CST_LOW (exp
);
6201 return c0
? c0
: BIGGEST_ALIGNMENT
;
6205 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
6206 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6207 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6208 return MIN (c0
, c1
);
6211 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6212 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6215 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6217 if (integer_pow2p (TREE_OPERAND (exp
, 1))
6218 && host_integerp (TREE_OPERAND (exp
, 1), 1))
6220 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6221 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
6222 return MAX (1, c0
/ c1
);
6226 case NON_LVALUE_EXPR
: case NOP_EXPR
: case CONVERT_EXPR
:
6227 case SAVE_EXPR
: case WITH_RECORD_EXPR
:
6228 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
6231 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6234 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6235 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
6236 return MIN (c0
, c1
);
6245 /* Similar, except that it is known that the expression must be a multiple
6246 of the alignment of TYPE. */
6248 static HOST_WIDE_INT
6249 highest_pow2_factor_for_type (type
, exp
)
6253 HOST_WIDE_INT type_align
, factor
;
6255 factor
= highest_pow2_factor (exp
);
6256 type_align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
6257 return MAX (factor
, type_align
);
6260 /* Return an object on the placeholder list that matches EXP, a
6261 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6262 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6263 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6264 is a location which initially points to a starting location in the
6265 placeholder list (zero means start of the list) and where a pointer into
6266 the placeholder list at which the object is found is placed. */
6269 find_placeholder (exp
, plist
)
6273 tree type
= TREE_TYPE (exp
);
6274 tree placeholder_expr
;
6276 for (placeholder_expr
6277 = plist
&& *plist
? TREE_CHAIN (*plist
) : placeholder_list
;
6278 placeholder_expr
!= 0;
6279 placeholder_expr
= TREE_CHAIN (placeholder_expr
))
6281 tree need_type
= TYPE_MAIN_VARIANT (type
);
6284 /* Find the outermost reference that is of the type we want. If none,
6285 see if any object has a type that is a pointer to the type we
6287 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6288 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
6289 || TREE_CODE (elt
) == COND_EXPR
)
6290 ? TREE_OPERAND (elt
, 1)
6291 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6292 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6293 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6294 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6295 ? TREE_OPERAND (elt
, 0) : 0))
6296 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
6299 *plist
= placeholder_expr
;
6303 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6305 = ((TREE_CODE (elt
) == COMPOUND_EXPR
6306 || TREE_CODE (elt
) == COND_EXPR
)
6307 ? TREE_OPERAND (elt
, 1)
6308 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6309 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6310 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6311 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6312 ? TREE_OPERAND (elt
, 0) : 0))
6313 if (POINTER_TYPE_P (TREE_TYPE (elt
))
6314 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
6318 *plist
= placeholder_expr
;
6319 return build1 (INDIRECT_REF
, need_type
, elt
);
6326 /* expand_expr: generate code for computing expression EXP.
6327 An rtx for the computed value is returned. The value is never null.
6328 In the case of a void EXP, const0_rtx is returned.
6330 The value may be stored in TARGET if TARGET is nonzero.
6331 TARGET is just a suggestion; callers must assume that
6332 the rtx returned may not be the same as TARGET.
6334 If TARGET is CONST0_RTX, it means that the value will be ignored.
6336 If TMODE is not VOIDmode, it suggests generating the
6337 result in mode TMODE. But this is done only when convenient.
6338 Otherwise, TMODE is ignored and the value generated in its natural mode.
6339 TMODE is just a suggestion; callers must assume that
6340 the rtx returned may not have mode TMODE.
6342 Note that TARGET may have neither TMODE nor MODE. In that case, it
6343 probably will not be used.
6345 If MODIFIER is EXPAND_SUM then when EXP is an addition
6346 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6347 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6348 products as above, or REG or MEM, or constant.
6349 Ordinarily in such cases we would output mul or add instructions
6350 and then return a pseudo reg containing the sum.
6352 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6353 it also marks a label as absolutely required (it can't be dead).
6354 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6355 This is used for outputting expressions used in initializers.
6357 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6358 with a constant address even if that address is not normally legitimate.
6359 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6362 expand_expr (exp
, target
, tmode
, modifier
)
6365 enum machine_mode tmode
;
6366 enum expand_modifier modifier
;
6369 tree type
= TREE_TYPE (exp
);
6370 int unsignedp
= TREE_UNSIGNED (type
);
6371 enum machine_mode mode
;
6372 enum tree_code code
= TREE_CODE (exp
);
6374 rtx subtarget
, original_target
;
6378 /* Handle ERROR_MARK before anybody tries to access its type. */
6379 if (TREE_CODE (exp
) == ERROR_MARK
|| TREE_CODE (type
) == ERROR_MARK
)
6381 op0
= CONST0_RTX (tmode
);
6387 mode
= TYPE_MODE (type
);
6388 /* Use subtarget as the target for operand 0 of a binary operation. */
6389 subtarget
= get_subtarget (target
);
6390 original_target
= target
;
6391 ignore
= (target
== const0_rtx
6392 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
6393 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
6394 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
6395 && TREE_CODE (type
) == VOID_TYPE
));
6397 /* If we are going to ignore this result, we need only do something
6398 if there is a side-effect somewhere in the expression. If there
6399 is, short-circuit the most common cases here. Note that we must
6400 not call expand_expr with anything but const0_rtx in case this
6401 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6405 if (! TREE_SIDE_EFFECTS (exp
))
6408 /* Ensure we reference a volatile object even if value is ignored, but
6409 don't do this if all we are doing is taking its address. */
6410 if (TREE_THIS_VOLATILE (exp
)
6411 && TREE_CODE (exp
) != FUNCTION_DECL
6412 && mode
!= VOIDmode
&& mode
!= BLKmode
6413 && modifier
!= EXPAND_CONST_ADDRESS
)
6415 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
6416 if (GET_CODE (temp
) == MEM
)
6417 temp
= copy_to_reg (temp
);
6421 if (TREE_CODE_CLASS (code
) == '1' || code
== COMPONENT_REF
6422 || code
== INDIRECT_REF
|| code
== BUFFER_REF
)
6423 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6426 else if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<'
6427 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
6429 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6430 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6433 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6434 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
6435 /* If the second operand has no side effects, just evaluate
6437 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6439 else if (code
== BIT_FIELD_REF
)
6441 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6442 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6443 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, modifier
);
6450 #ifdef MAX_INTEGER_COMPUTATION_MODE
6451 /* Only check stuff here if the mode we want is different from the mode
6452 of the expression; if it's the same, check_max_integer_computiation_mode
6453 will handle it. Do we really need to check this stuff at all? */
6456 && GET_MODE (target
) != mode
6457 && TREE_CODE (exp
) != INTEGER_CST
6458 && TREE_CODE (exp
) != PARM_DECL
6459 && TREE_CODE (exp
) != ARRAY_REF
6460 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6461 && TREE_CODE (exp
) != COMPONENT_REF
6462 && TREE_CODE (exp
) != BIT_FIELD_REF
6463 && TREE_CODE (exp
) != INDIRECT_REF
6464 && TREE_CODE (exp
) != CALL_EXPR
6465 && TREE_CODE (exp
) != VAR_DECL
6466 && TREE_CODE (exp
) != RTL_EXPR
)
6468 enum machine_mode mode
= GET_MODE (target
);
6470 if (GET_MODE_CLASS (mode
) == MODE_INT
6471 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6472 internal_error ("unsupported wide integer operation");
6476 && TREE_CODE (exp
) != INTEGER_CST
6477 && TREE_CODE (exp
) != PARM_DECL
6478 && TREE_CODE (exp
) != ARRAY_REF
6479 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6480 && TREE_CODE (exp
) != COMPONENT_REF
6481 && TREE_CODE (exp
) != BIT_FIELD_REF
6482 && TREE_CODE (exp
) != INDIRECT_REF
6483 && TREE_CODE (exp
) != VAR_DECL
6484 && TREE_CODE (exp
) != CALL_EXPR
6485 && TREE_CODE (exp
) != RTL_EXPR
6486 && GET_MODE_CLASS (tmode
) == MODE_INT
6487 && tmode
> MAX_INTEGER_COMPUTATION_MODE
)
6488 internal_error ("unsupported wide integer operation");
6490 check_max_integer_computation_mode (exp
);
6493 /* If will do cse, generate all results into pseudo registers
6494 since 1) that allows cse to find more things
6495 and 2) otherwise cse could produce an insn the machine
6496 cannot support. And exception is a CONSTRUCTOR into a multi-word
6497 MEM: that's much more likely to be most efficient into the MEM. */
6499 if (! cse_not_expected
&& mode
!= BLKmode
&& target
6500 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
)
6501 && ! (code
== CONSTRUCTOR
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
))
6508 tree function
= decl_function_context (exp
);
6509 /* Handle using a label in a containing function. */
6510 if (function
!= current_function_decl
6511 && function
!= inline_function_decl
&& function
!= 0)
6513 struct function
*p
= find_function_data (function
);
6514 p
->expr
->x_forced_labels
6515 = gen_rtx_EXPR_LIST (VOIDmode
, label_rtx (exp
),
6516 p
->expr
->x_forced_labels
);
6520 if (modifier
== EXPAND_INITIALIZER
)
6521 forced_labels
= gen_rtx_EXPR_LIST (VOIDmode
,
6526 temp
= gen_rtx_MEM (FUNCTION_MODE
,
6527 gen_rtx_LABEL_REF (Pmode
, label_rtx (exp
)));
6528 if (function
!= current_function_decl
6529 && function
!= inline_function_decl
&& function
!= 0)
6530 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
6535 if (!DECL_RTL_SET_P (exp
))
6537 error_with_decl (exp
, "prior parameter's size depends on `%s'");
6538 return CONST0_RTX (mode
);
6541 /* ... fall through ... */
6544 /* If a static var's type was incomplete when the decl was written,
6545 but the type is complete now, lay out the decl now. */
6546 if (DECL_SIZE (exp
) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
6547 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6549 rtx value
= DECL_RTL_IF_SET (exp
);
6551 layout_decl (exp
, 0);
6553 /* If the RTL was already set, update its mode and memory
6557 PUT_MODE (value
, DECL_MODE (exp
));
6558 SET_DECL_RTL (exp
, 0);
6559 set_mem_attributes (value
, exp
, 1);
6560 SET_DECL_RTL (exp
, value
);
6564 /* ... fall through ... */
6568 if (DECL_RTL (exp
) == 0)
6571 /* Ensure variable marked as used even if it doesn't go through
6572 a parser. If it hasn't be used yet, write out an external
6574 if (! TREE_USED (exp
))
6576 assemble_external (exp
);
6577 TREE_USED (exp
) = 1;
6580 /* Show we haven't gotten RTL for this yet. */
6583 /* Handle variables inherited from containing functions. */
6584 context
= decl_function_context (exp
);
6586 /* We treat inline_function_decl as an alias for the current function
6587 because that is the inline function whose vars, types, etc.
6588 are being merged into the current function.
6589 See expand_inline_function. */
6591 if (context
!= 0 && context
!= current_function_decl
6592 && context
!= inline_function_decl
6593 /* If var is static, we don't need a static chain to access it. */
6594 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
6595 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
6599 /* Mark as non-local and addressable. */
6600 DECL_NONLOCAL (exp
) = 1;
6601 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
6603 (*lang_hooks
.mark_addressable
) (exp
);
6604 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
6606 addr
= XEXP (DECL_RTL (exp
), 0);
6607 if (GET_CODE (addr
) == MEM
)
6609 = replace_equiv_address (addr
,
6610 fix_lexical_addr (XEXP (addr
, 0), exp
));
6612 addr
= fix_lexical_addr (addr
, exp
);
6614 temp
= replace_equiv_address (DECL_RTL (exp
), addr
);
6617 /* This is the case of an array whose size is to be determined
6618 from its initializer, while the initializer is still being parsed.
6621 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6622 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
6623 temp
= validize_mem (DECL_RTL (exp
));
6625 /* If DECL_RTL is memory, we are in the normal case and either
6626 the address is not valid or it is not a register and -fforce-addr
6627 is specified, get the address into a register. */
6629 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6630 && modifier
!= EXPAND_CONST_ADDRESS
6631 && modifier
!= EXPAND_SUM
6632 && modifier
!= EXPAND_INITIALIZER
6633 && (! memory_address_p (DECL_MODE (exp
),
6634 XEXP (DECL_RTL (exp
), 0))
6636 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
6637 temp
= replace_equiv_address (DECL_RTL (exp
),
6638 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
6640 /* If we got something, return it. But first, set the alignment
6641 if the address is a register. */
6644 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
6645 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6650 /* If the mode of DECL_RTL does not match that of the decl, it
6651 must be a promoted value. We return a SUBREG of the wanted mode,
6652 but mark it so that we know that it was already extended. */
6654 if (GET_CODE (DECL_RTL (exp
)) == REG
6655 && GET_MODE (DECL_RTL (exp
)) != DECL_MODE (exp
))
6657 /* Get the signedness used for this variable. Ensure we get the
6658 same mode we got when the variable was declared. */
6659 if (GET_MODE (DECL_RTL (exp
))
6660 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
,
6661 (TREE_CODE (exp
) == RESULT_DECL
? 1 : 0)))
6664 temp
= gen_lowpart_SUBREG (mode
, DECL_RTL (exp
));
6665 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6666 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6670 return DECL_RTL (exp
);
6673 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
6674 TREE_INT_CST_HIGH (exp
), mode
);
6676 /* ??? If overflow is set, fold will have done an incomplete job,
6677 which can result in (plus xx (const_int 0)), which can get
6678 simplified by validate_replace_rtx during virtual register
6679 instantiation, which can result in unrecognizable insns.
6680 Avoid this by forcing all overflows into registers. */
6681 if (TREE_CONSTANT_OVERFLOW (exp
)
6682 && modifier
!= EXPAND_INITIALIZER
)
6683 temp
= force_reg (mode
, temp
);
6688 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, 0);
6691 /* If optimized, generate immediate CONST_DOUBLE
6692 which will be turned into memory by reload if necessary.
6694 We used to force a register so that loop.c could see it. But
6695 this does not allow gen_* patterns to perform optimizations with
6696 the constants. It also produces two insns in cases like "x = 1.0;".
6697 On most machines, floating-point constants are not permitted in
6698 many insns, so we'd end up copying it to a register in any case.
6700 Now, we do the copying in expand_binop, if appropriate. */
6701 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
6702 TYPE_MODE (TREE_TYPE (exp
)));
6706 if (! TREE_CST_RTL (exp
))
6707 output_constant_def (exp
, 1);
6709 /* TREE_CST_RTL probably contains a constant address.
6710 On RISC machines where a constant address isn't valid,
6711 make some insns to get that address into a register. */
6712 if (GET_CODE (TREE_CST_RTL (exp
)) == MEM
6713 && modifier
!= EXPAND_CONST_ADDRESS
6714 && modifier
!= EXPAND_INITIALIZER
6715 && modifier
!= EXPAND_SUM
6716 && (! memory_address_p (mode
, XEXP (TREE_CST_RTL (exp
), 0))
6718 && GET_CODE (XEXP (TREE_CST_RTL (exp
), 0)) != REG
)))
6719 return replace_equiv_address (TREE_CST_RTL (exp
),
6720 copy_rtx (XEXP (TREE_CST_RTL (exp
), 0)));
6721 return TREE_CST_RTL (exp
);
6723 case EXPR_WITH_FILE_LOCATION
:
6726 const char *saved_input_filename
= input_filename
;
6727 int saved_lineno
= lineno
;
6728 input_filename
= EXPR_WFL_FILENAME (exp
);
6729 lineno
= EXPR_WFL_LINENO (exp
);
6730 if (EXPR_WFL_EMIT_LINE_NOTE (exp
))
6731 emit_line_note (input_filename
, lineno
);
6732 /* Possibly avoid switching back and forth here. */
6733 to_return
= expand_expr (EXPR_WFL_NODE (exp
), target
, tmode
, modifier
);
6734 input_filename
= saved_input_filename
;
6735 lineno
= saved_lineno
;
6740 context
= decl_function_context (exp
);
6742 /* If this SAVE_EXPR was at global context, assume we are an
6743 initialization function and move it into our context. */
6745 SAVE_EXPR_CONTEXT (exp
) = current_function_decl
;
6747 /* We treat inline_function_decl as an alias for the current function
6748 because that is the inline function whose vars, types, etc.
6749 are being merged into the current function.
6750 See expand_inline_function. */
6751 if (context
== current_function_decl
|| context
== inline_function_decl
)
6754 /* If this is non-local, handle it. */
6757 /* The following call just exists to abort if the context is
6758 not of a containing function. */
6759 find_function_data (context
);
6761 temp
= SAVE_EXPR_RTL (exp
);
6762 if (temp
&& GET_CODE (temp
) == REG
)
6764 put_var_into_stack (exp
);
6765 temp
= SAVE_EXPR_RTL (exp
);
6767 if (temp
== 0 || GET_CODE (temp
) != MEM
)
6770 replace_equiv_address (temp
,
6771 fix_lexical_addr (XEXP (temp
, 0), exp
));
6773 if (SAVE_EXPR_RTL (exp
) == 0)
6775 if (mode
== VOIDmode
)
6778 temp
= assign_temp (build_qualified_type (type
,
6780 | TYPE_QUAL_CONST
)),
6783 SAVE_EXPR_RTL (exp
) = temp
;
6784 if (!optimize
&& GET_CODE (temp
) == REG
)
6785 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, temp
,
6788 /* If the mode of TEMP does not match that of the expression, it
6789 must be a promoted value. We pass store_expr a SUBREG of the
6790 wanted mode but mark it so that we know that it was already
6791 extended. Note that `unsignedp' was modified above in
6794 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
6796 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6797 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6798 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6801 if (temp
== const0_rtx
)
6802 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
6804 store_expr (TREE_OPERAND (exp
, 0), temp
, 0);
6806 TREE_USED (exp
) = 1;
6809 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6810 must be a promoted value. We return a SUBREG of the wanted mode,
6811 but mark it so that we know that it was already extended. */
6813 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
6814 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
6816 /* Compute the signedness and make the proper SUBREG. */
6817 promote_mode (type
, mode
, &unsignedp
, 0);
6818 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6819 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6820 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6824 return SAVE_EXPR_RTL (exp
);
6829 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6830 TREE_OPERAND (exp
, 0)
6831 = (*lang_hooks
.unsave_expr_now
) (TREE_OPERAND (exp
, 0));
6835 case PLACEHOLDER_EXPR
:
6837 tree old_list
= placeholder_list
;
6838 tree placeholder_expr
= 0;
6840 exp
= find_placeholder (exp
, &placeholder_expr
);
6844 placeholder_list
= TREE_CHAIN (placeholder_expr
);
6845 temp
= expand_expr (exp
, original_target
, tmode
, modifier
);
6846 placeholder_list
= old_list
;
6850 case WITH_RECORD_EXPR
:
6851 /* Put the object on the placeholder list, expand our first operand,
6852 and pop the list. */
6853 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
6855 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
, tmode
,
6857 placeholder_list
= TREE_CHAIN (placeholder_list
);
6861 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6862 expand_goto (TREE_OPERAND (exp
, 0));
6864 expand_computed_goto (TREE_OPERAND (exp
, 0));
6868 expand_exit_loop_if_false (NULL
,
6869 invert_truthvalue (TREE_OPERAND (exp
, 0)));
6872 case LABELED_BLOCK_EXPR
:
6873 if (LABELED_BLOCK_BODY (exp
))
6874 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp
), 0, 1);
6875 /* Should perhaps use expand_label, but this is simpler and safer. */
6876 do_pending_stack_adjust ();
6877 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp
)));
6880 case EXIT_BLOCK_EXPR
:
6881 if (EXIT_BLOCK_RETURN (exp
))
6882 sorry ("returned value in block_exit_expr");
6883 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp
)));
6888 expand_start_loop (1);
6889 expand_expr_stmt_value (TREE_OPERAND (exp
, 0), 0, 1);
6897 tree vars
= TREE_OPERAND (exp
, 0);
6898 int vars_need_expansion
= 0;
6900 /* Need to open a binding contour here because
6901 if there are any cleanups they must be contained here. */
6902 expand_start_bindings (2);
6904 /* Mark the corresponding BLOCK for output in its proper place. */
6905 if (TREE_OPERAND (exp
, 2) != 0
6906 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
6907 (*lang_hooks
.decls
.insert_block
) (TREE_OPERAND (exp
, 2));
6909 /* If VARS have not yet been expanded, expand them now. */
6912 if (!DECL_RTL_SET_P (vars
))
6914 vars_need_expansion
= 1;
6917 expand_decl_init (vars
);
6918 vars
= TREE_CHAIN (vars
);
6921 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, modifier
);
6923 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
6929 if (RTL_EXPR_SEQUENCE (exp
))
6931 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
6933 emit_insn (RTL_EXPR_SEQUENCE (exp
));
6934 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
6936 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
6937 free_temps_for_rtl_expr (exp
);
6938 return RTL_EXPR_RTL (exp
);
6941 /* If we don't need the result, just ensure we evaluate any
6947 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
6948 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
, 0);
6953 /* All elts simple constants => refer to a constant in memory. But
6954 if this is a non-BLKmode mode, let it store a field at a time
6955 since that should make a CONST_INT or CONST_DOUBLE when we
6956 fold. Likewise, if we have a target we can use, it is best to
6957 store directly into the target unless the type is large enough
6958 that memcpy will be used. If we are making an initializer and
6959 all operands are constant, put it in memory as well.
6961 FIXME: Avoid trying to fill vector constructors piece-meal.
6962 Output them with output_constant_def below unless we're sure
6963 they're zeros. This should go away when vector initializers
6964 are treated like VECTOR_CST instead of arrays.
6966 else if ((TREE_STATIC (exp
)
6967 && ((mode
== BLKmode
6968 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
6969 || TREE_ADDRESSABLE (exp
)
6970 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
6971 && (! MOVE_BY_PIECES_P
6972 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
6974 && ((TREE_CODE (type
) == VECTOR_TYPE
6975 && !is_zeros_p (exp
))
6976 || ! mostly_zeros_p (exp
)))))
6977 || (modifier
== EXPAND_INITIALIZER
&& TREE_CONSTANT (exp
)))
6979 rtx constructor
= output_constant_def (exp
, 1);
6981 if (modifier
!= EXPAND_CONST_ADDRESS
6982 && modifier
!= EXPAND_INITIALIZER
6983 && modifier
!= EXPAND_SUM
)
6984 constructor
= validize_mem (constructor
);
6990 /* Handle calls that pass values in multiple non-contiguous
6991 locations. The Irix 6 ABI has examples of this. */
6992 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
6993 || GET_CODE (target
) == PARALLEL
)
6995 = assign_temp (build_qualified_type (type
,
6997 | (TREE_READONLY (exp
)
6998 * TYPE_QUAL_CONST
))),
6999 0, TREE_ADDRESSABLE (exp
), 1);
7001 store_constructor (exp
, target
, 0, int_expr_size (exp
));
7007 tree exp1
= TREE_OPERAND (exp
, 0);
7009 tree string
= string_constant (exp1
, &index
);
7011 /* Try to optimize reads from const strings. */
7013 && TREE_CODE (string
) == STRING_CST
7014 && TREE_CODE (index
) == INTEGER_CST
7015 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
7016 && GET_MODE_CLASS (mode
) == MODE_INT
7017 && GET_MODE_SIZE (mode
) == 1
7018 && modifier
!= EXPAND_WRITE
)
7019 return gen_int_mode (TREE_STRING_POINTER (string
)
7020 [TREE_INT_CST_LOW (index
)], mode
);
7022 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
7023 op0
= memory_address (mode
, op0
);
7024 temp
= gen_rtx_MEM (mode
, op0
);
7025 set_mem_attributes (temp
, exp
, 0);
7027 /* If we are writing to this object and its type is a record with
7028 readonly fields, we must mark it as readonly so it will
7029 conflict with readonly references to those fields. */
7030 if (modifier
== EXPAND_WRITE
&& readonly_fields_p (type
))
7031 RTX_UNCHANGING_P (temp
) = 1;
7037 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
7041 tree array
= TREE_OPERAND (exp
, 0);
7042 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
7043 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
7044 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
7047 /* Optimize the special-case of a zero lower bound.
7049 We convert the low_bound to sizetype to avoid some problems
7050 with constant folding. (E.g. suppose the lower bound is 1,
7051 and its mode is QI. Without the conversion, (ARRAY
7052 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7053 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7055 if (! integer_zerop (low_bound
))
7056 index
= size_diffop (index
, convert (sizetype
, low_bound
));
7058 /* Fold an expression like: "foo"[2].
7059 This is not done in fold so it won't happen inside &.
7060 Don't fold if this is for wide characters since it's too
7061 difficult to do correctly and this is a very rare case. */
7063 if (modifier
!= EXPAND_CONST_ADDRESS
&& modifier
!= EXPAND_INITIALIZER
7064 && TREE_CODE (array
) == STRING_CST
7065 && TREE_CODE (index
) == INTEGER_CST
7066 && compare_tree_int (index
, TREE_STRING_LENGTH (array
)) < 0
7067 && GET_MODE_CLASS (mode
) == MODE_INT
7068 && GET_MODE_SIZE (mode
) == 1)
7069 return gen_int_mode (TREE_STRING_POINTER (array
)
7070 [TREE_INT_CST_LOW (index
)], mode
);
7072 /* If this is a constant index into a constant array,
7073 just get the value from the array. Handle both the cases when
7074 we have an explicit constructor and when our operand is a variable
7075 that was declared const. */
7077 if (modifier
!= EXPAND_CONST_ADDRESS
&& modifier
!= EXPAND_INITIALIZER
7078 && TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
)
7079 && TREE_CODE (index
) == INTEGER_CST
7080 && 0 > compare_tree_int (index
,
7081 list_length (CONSTRUCTOR_ELTS
7082 (TREE_OPERAND (exp
, 0)))))
7086 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
7087 i
= TREE_INT_CST_LOW (index
);
7088 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
7092 return expand_expr (fold (TREE_VALUE (elem
)), target
, tmode
,
7096 else if (optimize
>= 1
7097 && modifier
!= EXPAND_CONST_ADDRESS
7098 && modifier
!= EXPAND_INITIALIZER
7099 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
7100 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
7101 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
7103 if (TREE_CODE (index
) == INTEGER_CST
)
7105 tree init
= DECL_INITIAL (array
);
7107 if (TREE_CODE (init
) == CONSTRUCTOR
)
7111 for (elem
= CONSTRUCTOR_ELTS (init
);
7113 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
7114 elem
= TREE_CHAIN (elem
))
7117 if (elem
&& !TREE_SIDE_EFFECTS (TREE_VALUE (elem
)))
7118 return expand_expr (fold (TREE_VALUE (elem
)), target
,
7121 else if (TREE_CODE (init
) == STRING_CST
7122 && 0 > compare_tree_int (index
,
7123 TREE_STRING_LENGTH (init
)))
7125 tree type
= TREE_TYPE (TREE_TYPE (init
));
7126 enum machine_mode mode
= TYPE_MODE (type
);
7128 if (GET_MODE_CLASS (mode
) == MODE_INT
7129 && GET_MODE_SIZE (mode
) == 1)
7130 return gen_int_mode (TREE_STRING_POINTER (init
)
7131 [TREE_INT_CST_LOW (index
)], mode
);
7140 case ARRAY_RANGE_REF
:
7141 /* If the operand is a CONSTRUCTOR, we can just extract the
7142 appropriate field if it is present. Don't do this if we have
7143 already written the data since we want to refer to that copy
7144 and varasm.c assumes that's what we'll do. */
7145 if (code
== COMPONENT_REF
7146 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
7147 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
7151 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
7152 elt
= TREE_CHAIN (elt
))
7153 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
7154 /* We can normally use the value of the field in the
7155 CONSTRUCTOR. However, if this is a bitfield in
7156 an integral mode that we can fit in a HOST_WIDE_INT,
7157 we must mask only the number of bits in the bitfield,
7158 since this is done implicitly by the constructor. If
7159 the bitfield does not meet either of those conditions,
7160 we can't do this optimization. */
7161 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
7162 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
7164 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
7165 <= HOST_BITS_PER_WIDE_INT
))))
7167 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
7168 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
7170 HOST_WIDE_INT bitsize
7171 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt
)));
7172 enum machine_mode imode
7173 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
7175 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
7177 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
7178 op0
= expand_and (imode
, op0
, op1
, target
);
7183 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
7186 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
7188 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
7198 enum machine_mode mode1
;
7199 HOST_WIDE_INT bitsize
, bitpos
;
7202 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7203 &mode1
, &unsignedp
, &volatilep
);
7206 /* If we got back the original object, something is wrong. Perhaps
7207 we are evaluating an expression too early. In any event, don't
7208 infinitely recurse. */
7212 /* If TEM's type is a union of variable size, pass TARGET to the inner
7213 computation, since it will need a temporary and TARGET is known
7214 to have to do. This occurs in unchecked conversion in Ada. */
7218 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
7219 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
7221 ? target
: NULL_RTX
),
7223 (modifier
== EXPAND_INITIALIZER
7224 || modifier
== EXPAND_CONST_ADDRESS
)
7225 ? modifier
: EXPAND_NORMAL
);
7227 /* If this is a constant, put it into a register if it is a
7228 legitimate constant and OFFSET is 0 and memory if it isn't. */
7229 if (CONSTANT_P (op0
))
7231 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
7232 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
7234 op0
= force_reg (mode
, op0
);
7236 op0
= validize_mem (force_const_mem (mode
, op0
));
7241 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
7243 /* If this object is in a register, put it into memory.
7244 This case can't occur in C, but can in Ada if we have
7245 unchecked conversion of an expression from a scalar type to
7246 an array or record type. */
7247 if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7248 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
7250 /* If the operand is a SAVE_EXPR, we can deal with this by
7251 forcing the SAVE_EXPR into memory. */
7252 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
7254 put_var_into_stack (TREE_OPERAND (exp
, 0));
7255 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
7260 = build_qualified_type (TREE_TYPE (tem
),
7261 (TYPE_QUALS (TREE_TYPE (tem
))
7262 | TYPE_QUAL_CONST
));
7263 rtx memloc
= assign_temp (nt
, 1, 1, 1);
7265 emit_move_insn (memloc
, op0
);
7270 if (GET_CODE (op0
) != MEM
)
7273 #ifdef POINTERS_EXTEND_UNSIGNED
7274 if (GET_MODE (offset_rtx
) != Pmode
)
7275 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
7277 if (GET_MODE (offset_rtx
) != ptr_mode
)
7278 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
7281 /* A constant address in OP0 can have VOIDmode, we must not try
7282 to call force_reg for that case. Avoid that case. */
7283 if (GET_CODE (op0
) == MEM
7284 && GET_MODE (op0
) == BLKmode
7285 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
7287 && (bitpos
% bitsize
) == 0
7288 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
7289 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
7291 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7295 op0
= offset_address (op0
, offset_rtx
,
7296 highest_pow2_factor (offset
));
7299 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7300 record its alignment as BIGGEST_ALIGNMENT. */
7301 if (GET_CODE (op0
) == MEM
&& bitpos
== 0 && offset
!= 0
7302 && is_aligning_offset (offset
, tem
))
7303 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
7305 /* Don't forget about volatility even if this is a bitfield. */
7306 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
7308 if (op0
== orig_op0
)
7309 op0
= copy_rtx (op0
);
7311 MEM_VOLATILE_P (op0
) = 1;
7314 /* The following code doesn't handle CONCAT.
7315 Assume only bitpos == 0 can be used for CONCAT, due to
7316 one element arrays having the same mode as its element. */
7317 if (GET_CODE (op0
) == CONCAT
)
7319 if (bitpos
!= 0 || bitsize
!= GET_MODE_BITSIZE (GET_MODE (op0
)))
7324 /* In cases where an aligned union has an unaligned object
7325 as a field, we might be extracting a BLKmode value from
7326 an integer-mode (e.g., SImode) object. Handle this case
7327 by doing the extract into an object as wide as the field
7328 (which we know to be the width of a basic mode), then
7329 storing into memory, and changing the mode to BLKmode. */
7330 if (mode1
== VOIDmode
7331 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7332 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
7333 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7334 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
7335 && modifier
!= EXPAND_CONST_ADDRESS
7336 && modifier
!= EXPAND_INITIALIZER
)
7337 /* If the field isn't aligned enough to fetch as a memref,
7338 fetch it as a bit field. */
7339 || (mode1
!= BLKmode
7340 && SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))
7341 && ((TYPE_ALIGN (TREE_TYPE (tem
))
7342 < GET_MODE_ALIGNMENT (mode
))
7343 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)))
7344 /* If the type and the field are a constant size and the
7345 size of the type isn't the same size as the bitfield,
7346 we must use bitfield operations. */
7348 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
)))
7350 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7353 enum machine_mode ext_mode
= mode
;
7355 if (ext_mode
== BLKmode
7356 && ! (target
!= 0 && GET_CODE (op0
) == MEM
7357 && GET_CODE (target
) == MEM
7358 && bitpos
% BITS_PER_UNIT
== 0))
7359 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7361 if (ext_mode
== BLKmode
)
7363 /* In this case, BITPOS must start at a byte boundary and
7364 TARGET, if specified, must be a MEM. */
7365 if (GET_CODE (op0
) != MEM
7366 || (target
!= 0 && GET_CODE (target
) != MEM
)
7367 || bitpos
% BITS_PER_UNIT
!= 0)
7370 op0
= adjust_address (op0
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
7372 target
= assign_temp (type
, 0, 1, 1);
7374 emit_block_move (target
, op0
,
7375 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7382 op0
= validize_mem (op0
);
7384 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
7385 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7387 op0
= extract_bit_field (op0
, bitsize
, bitpos
,
7388 unsignedp
, target
, ext_mode
, ext_mode
,
7389 int_size_in_bytes (TREE_TYPE (tem
)));
7391 /* If the result is a record type and BITSIZE is narrower than
7392 the mode of OP0, an integral mode, and this is a big endian
7393 machine, we must put the field into the high-order bits. */
7394 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7395 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7396 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
7397 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7398 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7402 if (mode
== BLKmode
)
7404 rtx
new = assign_temp (build_qualified_type
7405 ((*lang_hooks
.types
.type_for_mode
)
7407 TYPE_QUAL_CONST
), 0, 1, 1);
7409 emit_move_insn (new, op0
);
7410 op0
= copy_rtx (new);
7411 PUT_MODE (op0
, BLKmode
);
7412 set_mem_attributes (op0
, exp
, 1);
7418 /* If the result is BLKmode, use that to access the object
7420 if (mode
== BLKmode
)
7423 /* Get a reference to just this component. */
7424 if (modifier
== EXPAND_CONST_ADDRESS
7425 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7426 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7428 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7430 if (op0
== orig_op0
)
7431 op0
= copy_rtx (op0
);
7433 set_mem_attributes (op0
, exp
, 0);
7434 if (GET_CODE (XEXP (op0
, 0)) == REG
)
7435 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7437 MEM_VOLATILE_P (op0
) |= volatilep
;
7438 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7439 || modifier
== EXPAND_CONST_ADDRESS
7440 || modifier
== EXPAND_INITIALIZER
)
7442 else if (target
== 0)
7443 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7445 convert_move (target
, op0
, unsignedp
);
7451 rtx insn
, before
= get_last_insn (), vtbl_ref
;
7453 /* Evaluate the interior expression. */
7454 subtarget
= expand_expr (TREE_OPERAND (exp
, 0), target
,
7457 /* Get or create an instruction off which to hang a note. */
7458 if (REG_P (subtarget
))
7461 insn
= get_last_insn ();
7464 if (! INSN_P (insn
))
7465 insn
= prev_nonnote_insn (insn
);
7469 target
= gen_reg_rtx (GET_MODE (subtarget
));
7470 insn
= emit_move_insn (target
, subtarget
);
7473 /* Collect the data for the note. */
7474 vtbl_ref
= XEXP (DECL_RTL (TREE_OPERAND (exp
, 1)), 0);
7475 vtbl_ref
= plus_constant (vtbl_ref
,
7476 tree_low_cst (TREE_OPERAND (exp
, 2), 0));
7477 /* Discard the initial CONST that was added. */
7478 vtbl_ref
= XEXP (vtbl_ref
, 0);
7481 = gen_rtx_EXPR_LIST (REG_VTABLE_REF
, vtbl_ref
, REG_NOTES (insn
));
7486 /* Intended for a reference to a buffer of a file-object in Pascal.
7487 But it's not certain that a special tree code will really be
7488 necessary for these. INDIRECT_REF might work for them. */
7494 /* Pascal set IN expression.
7497 rlo = set_low - (set_low%bits_per_word);
7498 the_word = set [ (index - rlo)/bits_per_word ];
7499 bit_index = index % bits_per_word;
7500 bitmask = 1 << bit_index;
7501 return !!(the_word & bitmask); */
7503 tree set
= TREE_OPERAND (exp
, 0);
7504 tree index
= TREE_OPERAND (exp
, 1);
7505 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
7506 tree set_type
= TREE_TYPE (set
);
7507 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
7508 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
7509 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
7510 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
7511 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
7512 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
7513 rtx setaddr
= XEXP (setval
, 0);
7514 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
7516 rtx diff
, quo
, rem
, addr
, bit
, result
;
7518 /* If domain is empty, answer is no. Likewise if index is constant
7519 and out of bounds. */
7520 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
7521 && TREE_CODE (set_low_bound
) == INTEGER_CST
7522 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
7523 || (TREE_CODE (index
) == INTEGER_CST
7524 && TREE_CODE (set_low_bound
) == INTEGER_CST
7525 && tree_int_cst_lt (index
, set_low_bound
))
7526 || (TREE_CODE (set_high_bound
) == INTEGER_CST
7527 && TREE_CODE (index
) == INTEGER_CST
7528 && tree_int_cst_lt (set_high_bound
, index
))))
7532 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7534 /* If we get here, we have to generate the code for both cases
7535 (in range and out of range). */
7537 op0
= gen_label_rtx ();
7538 op1
= gen_label_rtx ();
7540 if (! (GET_CODE (index_val
) == CONST_INT
7541 && GET_CODE (lo_r
) == CONST_INT
))
7542 emit_cmp_and_jump_insns (index_val
, lo_r
, LT
, NULL_RTX
,
7543 GET_MODE (index_val
), iunsignedp
, op1
);
7545 if (! (GET_CODE (index_val
) == CONST_INT
7546 && GET_CODE (hi_r
) == CONST_INT
))
7547 emit_cmp_and_jump_insns (index_val
, hi_r
, GT
, NULL_RTX
,
7548 GET_MODE (index_val
), iunsignedp
, op1
);
7550 /* Calculate the element number of bit zero in the first word
7552 if (GET_CODE (lo_r
) == CONST_INT
)
7553 rlow
= GEN_INT (INTVAL (lo_r
)
7554 & ~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
7556 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
7557 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
7558 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7560 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
7561 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7563 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
7564 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7565 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
7566 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7568 addr
= memory_address (byte_mode
,
7569 expand_binop (index_mode
, add_optab
, diff
,
7570 setaddr
, NULL_RTX
, iunsignedp
,
7573 /* Extract the bit we want to examine. */
7574 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
7575 gen_rtx_MEM (byte_mode
, addr
),
7576 make_tree (TREE_TYPE (index
), rem
),
7578 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
7579 GET_MODE (target
) == byte_mode
? target
: 0,
7580 1, OPTAB_LIB_WIDEN
);
7582 if (result
!= target
)
7583 convert_move (target
, result
, 1);
7585 /* Output the code to handle the out-of-range case. */
7588 emit_move_insn (target
, const0_rtx
);
7593 case WITH_CLEANUP_EXPR
:
7594 if (WITH_CLEANUP_EXPR_RTL (exp
) == 0)
7596 WITH_CLEANUP_EXPR_RTL (exp
)
7597 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7598 expand_decl_cleanup_eh (NULL_TREE
, TREE_OPERAND (exp
, 1),
7599 CLEANUP_EH_ONLY (exp
));
7601 /* That's it for this cleanup. */
7602 TREE_OPERAND (exp
, 1) = 0;
7604 return WITH_CLEANUP_EXPR_RTL (exp
);
7606 case CLEANUP_POINT_EXPR
:
7608 /* Start a new binding layer that will keep track of all cleanup
7609 actions to be performed. */
7610 expand_start_bindings (2);
7612 target_temp_slot_level
= temp_slot_level
;
7614 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7615 /* If we're going to use this value, load it up now. */
7617 op0
= force_not_mem (op0
);
7618 preserve_temp_slots (op0
);
7619 expand_end_bindings (NULL_TREE
, 0, 0);
7624 /* Check for a built-in function. */
7625 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7626 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7628 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7630 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7631 == BUILT_IN_FRONTEND
)
7632 return (*lang_hooks
.expand_expr
)
7633 (exp
, original_target
, tmode
, modifier
);
7635 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7638 return expand_call (exp
, target
, ignore
);
7640 case NON_LVALUE_EXPR
:
7643 case REFERENCE_EXPR
:
7644 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7647 if (TREE_CODE (type
) == UNION_TYPE
)
7649 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7651 /* If both input and output are BLKmode, this conversion isn't doing
7652 anything except possibly changing memory attribute. */
7653 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7655 rtx result
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7658 result
= copy_rtx (result
);
7659 set_mem_attributes (result
, exp
, 0);
7664 target
= assign_temp (type
, 0, 1, 1);
7666 if (GET_CODE (target
) == MEM
)
7667 /* Store data into beginning of memory target. */
7668 store_expr (TREE_OPERAND (exp
, 0),
7669 adjust_address (target
, TYPE_MODE (valtype
), 0), 0);
7671 else if (GET_CODE (target
) == REG
)
7672 /* Store this field into a union of the proper type. */
7673 store_field (target
,
7674 MIN ((int_size_in_bytes (TREE_TYPE
7675 (TREE_OPERAND (exp
, 0)))
7677 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7678 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7679 VOIDmode
, 0, type
, 0);
7683 /* Return the entire union. */
7687 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7689 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7692 /* If the signedness of the conversion differs and OP0 is
7693 a promoted SUBREG, clear that indication since we now
7694 have to do the proper extension. */
7695 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7696 && GET_CODE (op0
) == SUBREG
)
7697 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7702 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7703 if (GET_MODE (op0
) == mode
)
7706 /* If OP0 is a constant, just convert it into the proper mode. */
7707 if (CONSTANT_P (op0
))
7709 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7710 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
7712 if (modifier
== EXPAND_INITIALIZER
)
7713 return simplify_gen_subreg (mode
, op0
, inner_mode
,
7714 subreg_lowpart_offset (mode
,
7717 return convert_modes (mode
, inner_mode
, op0
,
7718 TREE_UNSIGNED (inner_type
));
7721 if (modifier
== EXPAND_INITIALIZER
)
7722 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7726 convert_to_mode (mode
, op0
,
7727 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7729 convert_move (target
, op0
,
7730 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7733 case VIEW_CONVERT_EXPR
:
7734 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7736 /* If the input and output modes are both the same, we are done.
7737 Otherwise, if neither mode is BLKmode and both are within a word, we
7738 can use gen_lowpart. If neither is true, make sure the operand is
7739 in memory and convert the MEM to the new mode. */
7740 if (TYPE_MODE (type
) == GET_MODE (op0
))
7742 else if (TYPE_MODE (type
) != BLKmode
&& GET_MODE (op0
) != BLKmode
7743 && GET_MODE_SIZE (TYPE_MODE (type
)) <= UNITS_PER_WORD
7744 && GET_MODE_SIZE (GET_MODE (op0
)) <= UNITS_PER_WORD
)
7745 op0
= gen_lowpart (TYPE_MODE (type
), op0
);
7746 else if (GET_CODE (op0
) != MEM
)
7748 /* If the operand is not a MEM, force it into memory. Since we
7749 are going to be be changing the mode of the MEM, don't call
7750 force_const_mem for constants because we don't allow pool
7751 constants to change mode. */
7752 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7754 if (TREE_ADDRESSABLE (exp
))
7757 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
7759 = assign_stack_temp_for_type
7760 (TYPE_MODE (inner_type
),
7761 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
7763 emit_move_insn (target
, op0
);
7767 /* At this point, OP0 is in the correct mode. If the output type is such
7768 that the operand is known to be aligned, indicate that it is.
7769 Otherwise, we need only be concerned about alignment for non-BLKmode
7771 if (GET_CODE (op0
) == MEM
)
7773 op0
= copy_rtx (op0
);
7775 if (TYPE_ALIGN_OK (type
))
7776 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
7777 else if (TYPE_MODE (type
) != BLKmode
&& STRICT_ALIGNMENT
7778 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
7780 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7781 HOST_WIDE_INT temp_size
7782 = MAX (int_size_in_bytes (inner_type
),
7783 (HOST_WIDE_INT
) GET_MODE_SIZE (TYPE_MODE (type
)));
7784 rtx
new = assign_stack_temp_for_type (TYPE_MODE (type
),
7785 temp_size
, 0, type
);
7786 rtx new_with_op0_mode
= adjust_address (new, GET_MODE (op0
), 0);
7788 if (TREE_ADDRESSABLE (exp
))
7791 if (GET_MODE (op0
) == BLKmode
)
7792 emit_block_move (new_with_op0_mode
, op0
,
7793 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type
))),
7796 emit_move_insn (new_with_op0_mode
, op0
);
7801 op0
= adjust_address (op0
, TYPE_MODE (type
), 0);
7807 this_optab
= ! unsignedp
&& flag_trapv
7808 && (GET_MODE_CLASS (mode
) == MODE_INT
)
7809 ? addv_optab
: add_optab
;
7811 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7812 something else, make sure we add the register to the constant and
7813 then to the other thing. This case can occur during strength
7814 reduction and doing it this way will produce better code if the
7815 frame pointer or argument pointer is eliminated.
7817 fold-const.c will ensure that the constant is always in the inner
7818 PLUS_EXPR, so the only case we need to do anything about is if
7819 sp, ap, or fp is our second argument, in which case we must swap
7820 the innermost first argument and our second argument. */
7822 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7823 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7824 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
7825 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7826 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7827 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7829 tree t
= TREE_OPERAND (exp
, 1);
7831 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7832 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
7835 /* If the result is to be ptr_mode and we are adding an integer to
7836 something, we might be forming a constant. So try to use
7837 plus_constant. If it produces a sum and we can't accept it,
7838 use force_operand. This allows P = &ARR[const] to generate
7839 efficient code on machines where a SYMBOL_REF is not a valid
7842 If this is an EXPAND_SUM call, always return the sum. */
7843 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7844 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
7846 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
7847 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7848 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
7852 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
7854 /* Use immed_double_const to ensure that the constant is
7855 truncated according to the mode of OP1, then sign extended
7856 to a HOST_WIDE_INT. Using the constant directly can result
7857 in non-canonical RTL in a 64x32 cross compile. */
7859 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
7861 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
7862 op1
= plus_constant (op1
, INTVAL (constant_part
));
7863 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7864 op1
= force_operand (op1
, target
);
7868 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7869 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
7870 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
7874 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7875 (modifier
== EXPAND_INITIALIZER
7876 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
7877 if (! CONSTANT_P (op0
))
7879 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7880 VOIDmode
, modifier
);
7881 /* Don't go to both_summands if modifier
7882 says it's not right to return a PLUS. */
7883 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7887 /* Use immed_double_const to ensure that the constant is
7888 truncated according to the mode of OP1, then sign extended
7889 to a HOST_WIDE_INT. Using the constant directly can result
7890 in non-canonical RTL in a 64x32 cross compile. */
7892 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
7894 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7895 op0
= plus_constant (op0
, INTVAL (constant_part
));
7896 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7897 op0
= force_operand (op0
, target
);
7902 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7905 /* No sense saving up arithmetic to be done
7906 if it's all in the wrong mode to form part of an address.
7907 And force_operand won't know whether to sign-extend or
7909 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7910 || mode
!= ptr_mode
)
7912 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7913 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7914 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
7920 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, modifier
);
7921 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, modifier
);
7923 /* We come here from MINUS_EXPR when the second operand is a
7926 /* Make sure any term that's a sum with a constant comes last. */
7927 if (GET_CODE (op0
) == PLUS
7928 && CONSTANT_P (XEXP (op0
, 1)))
7934 /* If adding to a sum including a constant,
7935 associate it to put the constant outside. */
7936 if (GET_CODE (op1
) == PLUS
7937 && CONSTANT_P (XEXP (op1
, 1)))
7939 rtx constant_term
= const0_rtx
;
7941 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
7944 /* Ensure that MULT comes first if there is one. */
7945 else if (GET_CODE (op0
) == MULT
)
7946 op0
= gen_rtx_PLUS (mode
, op0
, XEXP (op1
, 0));
7948 op0
= gen_rtx_PLUS (mode
, XEXP (op1
, 0), op0
);
7950 /* Let's also eliminate constants from op0 if possible. */
7951 op0
= eliminate_constant_term (op0
, &constant_term
);
7953 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7954 their sum should be a constant. Form it into OP1, since the
7955 result we want will then be OP0 + OP1. */
7957 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
7962 op1
= gen_rtx_PLUS (mode
, constant_term
, XEXP (op1
, 1));
7965 /* Put a constant term last and put a multiplication first. */
7966 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
7967 temp
= op1
, op1
= op0
, op0
= temp
;
7969 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
7970 return temp
? temp
: gen_rtx_PLUS (mode
, op0
, op1
);
7973 /* For initializers, we are allowed to return a MINUS of two
7974 symbolic constants. Here we handle all cases when both operands
7976 /* Handle difference of two symbolic constants,
7977 for the sake of an initializer. */
7978 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7979 && really_constant_p (TREE_OPERAND (exp
, 0))
7980 && really_constant_p (TREE_OPERAND (exp
, 1)))
7982 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
,
7984 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
,
7987 /* If the last operand is a CONST_INT, use plus_constant of
7988 the negated constant. Else make the MINUS. */
7989 if (GET_CODE (op1
) == CONST_INT
)
7990 return plus_constant (op0
, - INTVAL (op1
));
7992 return gen_rtx_MINUS (mode
, op0
, op1
);
7995 this_optab
= ! unsignedp
&& flag_trapv
7996 && (GET_MODE_CLASS(mode
) == MODE_INT
)
7997 ? subv_optab
: sub_optab
;
7999 /* No sense saving up arithmetic to be done
8000 if it's all in the wrong mode to form part of an address.
8001 And force_operand won't know whether to sign-extend or
8003 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8004 || mode
!= ptr_mode
)
8007 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8010 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, modifier
);
8011 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, modifier
);
8013 /* Convert A - const to A + (-const). */
8014 if (GET_CODE (op1
) == CONST_INT
)
8016 op1
= negate_rtx (mode
, op1
);
8023 /* If first operand is constant, swap them.
8024 Thus the following special case checks need only
8025 check the second operand. */
8026 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
8028 tree t1
= TREE_OPERAND (exp
, 0);
8029 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
8030 TREE_OPERAND (exp
, 1) = t1
;
8033 /* Attempt to return something suitable for generating an
8034 indexed address, for machines that support that. */
8036 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
8037 && host_integerp (TREE_OPERAND (exp
, 1), 0))
8039 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
8042 /* If we knew for certain that this is arithmetic for an array
8043 reference, and we knew the bounds of the array, then we could
8044 apply the distributive law across (PLUS X C) for constant C.
8045 Without such knowledge, we risk overflowing the computation
8046 when both X and C are large, but X+C isn't. */
8047 /* ??? Could perhaps special-case EXP being unsigned and C being
8048 positive. In that case we are certain that X+C is no smaller
8049 than X and so the transformed expression will overflow iff the
8050 original would have. */
8052 if (GET_CODE (op0
) != REG
)
8053 op0
= force_operand (op0
, NULL_RTX
);
8054 if (GET_CODE (op0
) != REG
)
8055 op0
= copy_to_mode_reg (mode
, op0
);
8058 gen_rtx_MULT (mode
, op0
,
8059 GEN_INT (tree_low_cst (TREE_OPERAND (exp
, 1), 0)));
8062 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8065 /* Check for multiplying things that have been extended
8066 from a narrower type. If this machine supports multiplying
8067 in that narrower type with a result in the desired type,
8068 do it that way, and avoid the explicit type-conversion. */
8069 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
8070 && TREE_CODE (type
) == INTEGER_TYPE
8071 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8072 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
8073 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
8074 && int_fits_type_p (TREE_OPERAND (exp
, 1),
8075 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8076 /* Don't use a widening multiply if a shift will do. */
8077 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
8078 > HOST_BITS_PER_WIDE_INT
)
8079 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
8081 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8082 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
8084 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
8085 /* If both operands are extended, they must either both
8086 be zero-extended or both be sign-extended. */
8087 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
8089 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
8091 enum machine_mode innermode
8092 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
8093 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8094 ? smul_widen_optab
: umul_widen_optab
);
8095 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8096 ? umul_widen_optab
: smul_widen_optab
);
8097 if (mode
== GET_MODE_WIDER_MODE (innermode
))
8099 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
8101 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8102 NULL_RTX
, VOIDmode
, 0);
8103 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
8104 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
8107 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
8108 NULL_RTX
, VOIDmode
, 0);
8111 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
8112 && innermode
== word_mode
)
8115 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8116 NULL_RTX
, VOIDmode
, 0);
8117 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
8118 op1
= convert_modes (innermode
, mode
,
8119 expand_expr (TREE_OPERAND (exp
, 1),
8120 NULL_RTX
, VOIDmode
, 0),
8123 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
8124 NULL_RTX
, VOIDmode
, 0);
8125 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8126 unsignedp
, OPTAB_LIB_WIDEN
);
8127 htem
= expand_mult_highpart_adjust (innermode
,
8128 gen_highpart (innermode
, temp
),
8130 gen_highpart (innermode
, temp
),
8132 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
8137 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8138 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8139 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
8141 case TRUNC_DIV_EXPR
:
8142 case FLOOR_DIV_EXPR
:
8144 case ROUND_DIV_EXPR
:
8145 case EXACT_DIV_EXPR
:
8146 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8148 /* Possible optimization: compute the dividend with EXPAND_SUM
8149 then if the divisor is constant can optimize the case
8150 where some terms of the dividend have coeffs divisible by it. */
8151 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8152 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8153 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
8156 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8157 expensive divide. If not, combine will rebuild the original
8159 if (flag_unsafe_math_optimizations
&& optimize
&& !optimize_size
8160 && TREE_CODE (type
) == REAL_TYPE
8161 && !real_onep (TREE_OPERAND (exp
, 0)))
8162 return expand_expr (build (MULT_EXPR
, type
, TREE_OPERAND (exp
, 0),
8163 build (RDIV_EXPR
, type
,
8164 build_real (type
, dconst1
),
8165 TREE_OPERAND (exp
, 1))),
8166 target
, tmode
, unsignedp
);
8167 this_optab
= sdiv_optab
;
8170 case TRUNC_MOD_EXPR
:
8171 case FLOOR_MOD_EXPR
:
8173 case ROUND_MOD_EXPR
:
8174 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8176 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8177 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8178 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
8180 case FIX_ROUND_EXPR
:
8181 case FIX_FLOOR_EXPR
:
8183 abort (); /* Not used for C. */
8185 case FIX_TRUNC_EXPR
:
8186 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8188 target
= gen_reg_rtx (mode
);
8189 expand_fix (target
, op0
, unsignedp
);
8193 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8195 target
= gen_reg_rtx (mode
);
8196 /* expand_float can't figure out what to do if FROM has VOIDmode.
8197 So give it the correct mode. With -O, cse will optimize this. */
8198 if (GET_MODE (op0
) == VOIDmode
)
8199 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8201 expand_float (target
, op0
,
8202 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8206 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8207 temp
= expand_unop (mode
,
8208 ! unsignedp
&& flag_trapv
8209 && (GET_MODE_CLASS(mode
) == MODE_INT
)
8210 ? negv_optab
: neg_optab
, op0
, target
, 0);
8216 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8218 /* Handle complex values specially. */
8219 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
8220 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
8221 return expand_complex_abs (mode
, op0
, target
, unsignedp
);
8223 /* Unsigned abs is simply the operand. Testing here means we don't
8224 risk generating incorrect code below. */
8225 if (TREE_UNSIGNED (type
))
8228 return expand_abs (mode
, op0
, target
, unsignedp
,
8229 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
8233 target
= original_target
;
8234 if (target
== 0 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1), 1)
8235 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
8236 || GET_MODE (target
) != mode
8237 || (GET_CODE (target
) == REG
8238 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8239 target
= gen_reg_rtx (mode
);
8240 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8241 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8243 /* First try to do it with a special MIN or MAX instruction.
8244 If that does not win, use a conditional jump to select the proper
8246 this_optab
= (TREE_UNSIGNED (type
)
8247 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
8248 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
8250 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8255 /* At this point, a MEM target is no longer useful; we will get better
8258 if (GET_CODE (target
) == MEM
)
8259 target
= gen_reg_rtx (mode
);
8262 emit_move_insn (target
, op0
);
8264 op0
= gen_label_rtx ();
8266 /* If this mode is an integer too wide to compare properly,
8267 compare word by word. Rely on cse to optimize constant cases. */
8268 if (GET_MODE_CLASS (mode
) == MODE_INT
8269 && ! can_compare_p (GE
, mode
, ccp_jump
))
8271 if (code
== MAX_EXPR
)
8272 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8273 target
, op1
, NULL_RTX
, op0
);
8275 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8276 op1
, target
, NULL_RTX
, op0
);
8280 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)));
8281 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
8282 unsignedp
, mode
, NULL_RTX
, NULL_RTX
,
8285 emit_move_insn (target
, op1
);
8290 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8291 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8297 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8298 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
8303 /* ??? Can optimize bitwise operations with one arg constant.
8304 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8305 and (a bitwise1 b) bitwise2 b (etc)
8306 but that is probably not worth while. */
8308 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8309 boolean values when we want in all cases to compute both of them. In
8310 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8311 as actual zero-or-1 values and then bitwise anding. In cases where
8312 there cannot be any side effects, better code would be made by
8313 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8314 how to recognize those cases. */
8316 case TRUTH_AND_EXPR
:
8318 this_optab
= and_optab
;
8323 this_optab
= ior_optab
;
8326 case TRUTH_XOR_EXPR
:
8328 this_optab
= xor_optab
;
8335 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8337 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8338 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
8341 /* Could determine the answer when only additive constants differ. Also,
8342 the addition of one can be handled by changing the condition. */
8349 case UNORDERED_EXPR
:
8356 temp
= do_store_flag (exp
, target
, tmode
!= VOIDmode
? tmode
: mode
, 0);
8360 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8361 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
8363 && GET_CODE (original_target
) == REG
8364 && (GET_MODE (original_target
)
8365 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
8367 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
8370 /* If temp is constant, we can just compute the result. */
8371 if (GET_CODE (temp
) == CONST_INT
)
8373 if (INTVAL (temp
) != 0)
8374 emit_move_insn (target
, const1_rtx
);
8376 emit_move_insn (target
, const0_rtx
);
8381 if (temp
!= original_target
)
8383 enum machine_mode mode1
= GET_MODE (temp
);
8384 if (mode1
== VOIDmode
)
8385 mode1
= tmode
!= VOIDmode
? tmode
: mode
;
8387 temp
= copy_to_mode_reg (mode1
, temp
);
8390 op1
= gen_label_rtx ();
8391 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
8392 GET_MODE (temp
), unsignedp
, op1
);
8393 emit_move_insn (temp
, const1_rtx
);
8398 /* If no set-flag instruction, must generate a conditional
8399 store into a temporary variable. Drop through
8400 and handle this like && and ||. */
8402 case TRUTH_ANDIF_EXPR
:
8403 case TRUTH_ORIF_EXPR
:
8405 && (target
== 0 || ! safe_from_p (target
, exp
, 1)
8406 /* Make sure we don't have a hard reg (such as function's return
8407 value) live across basic blocks, if not optimizing. */
8408 || (!optimize
&& GET_CODE (target
) == REG
8409 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8410 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8413 emit_clr_insn (target
);
8415 op1
= gen_label_rtx ();
8416 jumpifnot (exp
, op1
);
8419 emit_0_to_1_insn (target
);
8422 return ignore
? const0_rtx
: target
;
8424 case TRUTH_NOT_EXPR
:
8425 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8426 /* The parser is careful to generate TRUTH_NOT_EXPR
8427 only with operands that are always zero or one. */
8428 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8429 target
, 1, OPTAB_LIB_WIDEN
);
8435 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
8437 return expand_expr (TREE_OPERAND (exp
, 1),
8438 (ignore
? const0_rtx
: target
),
8442 /* If we would have a "singleton" (see below) were it not for a
8443 conversion in each arm, bring that conversion back out. */
8444 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8445 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
8446 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
8447 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
8449 tree iftrue
= TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
8450 tree iffalse
= TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
8452 if ((TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '2'
8453 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8454 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '2'
8455 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0))
8456 || (TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '1'
8457 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8458 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '1'
8459 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0)))
8460 return expand_expr (build1 (NOP_EXPR
, type
,
8461 build (COND_EXPR
, TREE_TYPE (iftrue
),
8462 TREE_OPERAND (exp
, 0),
8464 target
, tmode
, modifier
);
8468 /* Note that COND_EXPRs whose type is a structure or union
8469 are required to be constructed to contain assignments of
8470 a temporary variable, so that we can evaluate them here
8471 for side effect only. If type is void, we must do likewise. */
8473 /* If an arm of the branch requires a cleanup,
8474 only that cleanup is performed. */
8477 tree binary_op
= 0, unary_op
= 0;
8479 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8480 convert it to our mode, if necessary. */
8481 if (integer_onep (TREE_OPERAND (exp
, 1))
8482 && integer_zerop (TREE_OPERAND (exp
, 2))
8483 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8487 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
8492 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, modifier
);
8493 if (GET_MODE (op0
) == mode
)
8497 target
= gen_reg_rtx (mode
);
8498 convert_move (target
, op0
, unsignedp
);
8502 /* Check for X ? A + B : A. If we have this, we can copy A to the
8503 output and conditionally add B. Similarly for unary operations.
8504 Don't do this if X has side-effects because those side effects
8505 might affect A or B and the "?" operation is a sequence point in
8506 ANSI. (operand_equal_p tests for side effects.) */
8508 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
8509 && operand_equal_p (TREE_OPERAND (exp
, 2),
8510 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8511 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
8512 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
8513 && operand_equal_p (TREE_OPERAND (exp
, 1),
8514 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8515 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
8516 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
8517 && operand_equal_p (TREE_OPERAND (exp
, 2),
8518 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8519 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
8520 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
8521 && operand_equal_p (TREE_OPERAND (exp
, 1),
8522 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8523 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
8525 /* If we are not to produce a result, we have no target. Otherwise,
8526 if a target was specified use it; it will not be used as an
8527 intermediate target unless it is safe. If no target, use a
8532 else if (original_target
8533 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
8534 || (singleton
&& GET_CODE (original_target
) == REG
8535 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
8536 && original_target
== var_rtx (singleton
)))
8537 && GET_MODE (original_target
) == mode
8538 #ifdef HAVE_conditional_move
8539 && (! can_conditionally_move_p (mode
)
8540 || GET_CODE (original_target
) == REG
8541 || TREE_ADDRESSABLE (type
))
8543 && (GET_CODE (original_target
) != MEM
8544 || TREE_ADDRESSABLE (type
)))
8545 temp
= original_target
;
8546 else if (TREE_ADDRESSABLE (type
))
8549 temp
= assign_temp (type
, 0, 0, 1);
8551 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8552 do the test of X as a store-flag operation, do this as
8553 A + ((X != 0) << log C). Similarly for other simple binary
8554 operators. Only do for C == 1 if BRANCH_COST is low. */
8555 if (temp
&& singleton
&& binary_op
8556 && (TREE_CODE (binary_op
) == PLUS_EXPR
8557 || TREE_CODE (binary_op
) == MINUS_EXPR
8558 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
8559 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
8560 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
8561 : integer_onep (TREE_OPERAND (binary_op
, 1)))
8562 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8565 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
8566 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8567 ? addv_optab
: add_optab
)
8568 : TREE_CODE (binary_op
) == MINUS_EXPR
8569 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8570 ? subv_optab
: sub_optab
)
8571 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
8574 /* If we had X ? A : A + 1, do this as A + (X == 0).
8576 We have to invert the truth value here and then put it
8577 back later if do_store_flag fails. We cannot simply copy
8578 TREE_OPERAND (exp, 0) to another variable and modify that
8579 because invert_truthvalue can modify the tree pointed to
8581 if (singleton
== TREE_OPERAND (exp
, 1))
8582 TREE_OPERAND (exp
, 0)
8583 = invert_truthvalue (TREE_OPERAND (exp
, 0));
8585 result
= do_store_flag (TREE_OPERAND (exp
, 0),
8586 (safe_from_p (temp
, singleton
, 1)
8588 mode
, BRANCH_COST
<= 1);
8590 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
8591 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
8592 build_int_2 (tree_log2
8596 (safe_from_p (temp
, singleton
, 1)
8597 ? temp
: NULL_RTX
), 0);
8601 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
8602 return expand_binop (mode
, boptab
, op1
, result
, temp
,
8603 unsignedp
, OPTAB_LIB_WIDEN
);
8605 else if (singleton
== TREE_OPERAND (exp
, 1))
8606 TREE_OPERAND (exp
, 0)
8607 = invert_truthvalue (TREE_OPERAND (exp
, 0));
8610 do_pending_stack_adjust ();
8612 op0
= gen_label_rtx ();
8614 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
8618 /* If the target conflicts with the other operand of the
8619 binary op, we can't use it. Also, we can't use the target
8620 if it is a hard register, because evaluating the condition
8621 might clobber it. */
8623 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
8624 || (GET_CODE (temp
) == REG
8625 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
8626 temp
= gen_reg_rtx (mode
);
8627 store_expr (singleton
, temp
, 0);
8630 expand_expr (singleton
,
8631 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8632 if (singleton
== TREE_OPERAND (exp
, 1))
8633 jumpif (TREE_OPERAND (exp
, 0), op0
);
8635 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8637 start_cleanup_deferral ();
8638 if (binary_op
&& temp
== 0)
8639 /* Just touch the other operand. */
8640 expand_expr (TREE_OPERAND (binary_op
, 1),
8641 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8643 store_expr (build (TREE_CODE (binary_op
), type
,
8644 make_tree (type
, temp
),
8645 TREE_OPERAND (binary_op
, 1)),
8648 store_expr (build1 (TREE_CODE (unary_op
), type
,
8649 make_tree (type
, temp
)),
8653 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8654 comparison operator. If we have one of these cases, set the
8655 output to A, branch on A (cse will merge these two references),
8656 then set the output to FOO. */
8658 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8659 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8660 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8661 TREE_OPERAND (exp
, 1), 0)
8662 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8663 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
8664 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
8666 if (GET_CODE (temp
) == REG
8667 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8668 temp
= gen_reg_rtx (mode
);
8669 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8670 jumpif (TREE_OPERAND (exp
, 0), op0
);
8672 start_cleanup_deferral ();
8673 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8677 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8678 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8679 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8680 TREE_OPERAND (exp
, 2), 0)
8681 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8682 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
8683 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
8685 if (GET_CODE (temp
) == REG
8686 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8687 temp
= gen_reg_rtx (mode
);
8688 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8689 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8691 start_cleanup_deferral ();
8692 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8697 op1
= gen_label_rtx ();
8698 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8700 start_cleanup_deferral ();
8702 /* One branch of the cond can be void, if it never returns. For
8703 example A ? throw : E */
8705 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8706 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8708 expand_expr (TREE_OPERAND (exp
, 1),
8709 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8710 end_cleanup_deferral ();
8712 emit_jump_insn (gen_jump (op1
));
8715 start_cleanup_deferral ();
8717 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8718 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8720 expand_expr (TREE_OPERAND (exp
, 2),
8721 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8724 end_cleanup_deferral ();
8735 /* Something needs to be initialized, but we didn't know
8736 where that thing was when building the tree. For example,
8737 it could be the return value of a function, or a parameter
8738 to a function which lays down in the stack, or a temporary
8739 variable which must be passed by reference.
8741 We guarantee that the expression will either be constructed
8742 or copied into our original target. */
8744 tree slot
= TREE_OPERAND (exp
, 0);
8745 tree cleanups
= NULL_TREE
;
8748 if (TREE_CODE (slot
) != VAR_DECL
)
8752 target
= original_target
;
8754 /* Set this here so that if we get a target that refers to a
8755 register variable that's already been used, put_reg_into_stack
8756 knows that it should fix up those uses. */
8757 TREE_USED (slot
) = 1;
8761 if (DECL_RTL_SET_P (slot
))
8763 target
= DECL_RTL (slot
);
8764 /* If we have already expanded the slot, so don't do
8766 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8771 target
= assign_temp (type
, 2, 0, 1);
8772 /* All temp slots at this level must not conflict. */
8773 preserve_temp_slots (target
);
8774 SET_DECL_RTL (slot
, target
);
8775 if (TREE_ADDRESSABLE (slot
))
8776 put_var_into_stack (slot
);
8778 /* Since SLOT is not known to the called function
8779 to belong to its stack frame, we must build an explicit
8780 cleanup. This case occurs when we must build up a reference
8781 to pass the reference as an argument. In this case,
8782 it is very likely that such a reference need not be
8785 if (TREE_OPERAND (exp
, 2) == 0)
8786 TREE_OPERAND (exp
, 2)
8787 = (*lang_hooks
.maybe_build_cleanup
) (slot
);
8788 cleanups
= TREE_OPERAND (exp
, 2);
8793 /* This case does occur, when expanding a parameter which
8794 needs to be constructed on the stack. The target
8795 is the actual stack address that we want to initialize.
8796 The function we call will perform the cleanup in this case. */
8798 /* If we have already assigned it space, use that space,
8799 not target that we were passed in, as our target
8800 parameter is only a hint. */
8801 if (DECL_RTL_SET_P (slot
))
8803 target
= DECL_RTL (slot
);
8804 /* If we have already expanded the slot, so don't do
8806 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8811 SET_DECL_RTL (slot
, target
);
8812 /* If we must have an addressable slot, then make sure that
8813 the RTL that we just stored in slot is OK. */
8814 if (TREE_ADDRESSABLE (slot
))
8815 put_var_into_stack (slot
);
8819 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
8820 /* Mark it as expanded. */
8821 TREE_OPERAND (exp
, 1) = NULL_TREE
;
8823 store_expr (exp1
, target
, 0);
8825 expand_decl_cleanup_eh (NULL_TREE
, cleanups
, CLEANUP_EH_ONLY (exp
));
8832 tree lhs
= TREE_OPERAND (exp
, 0);
8833 tree rhs
= TREE_OPERAND (exp
, 1);
8835 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8841 /* If lhs is complex, expand calls in rhs before computing it.
8842 That's so we don't compute a pointer and save it over a
8843 call. If lhs is simple, compute it first so we can give it
8844 as a target if the rhs is just a call. This avoids an
8845 extra temp and copy and that prevents a partial-subsumption
8846 which makes bad code. Actually we could treat
8847 component_ref's of vars like vars. */
8849 tree lhs
= TREE_OPERAND (exp
, 0);
8850 tree rhs
= TREE_OPERAND (exp
, 1);
8854 /* Check for |= or &= of a bitfield of size one into another bitfield
8855 of size 1. In this case, (unless we need the result of the
8856 assignment) we can do this more efficiently with a
8857 test followed by an assignment, if necessary.
8859 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8860 things change so we do, this code should be enhanced to
8863 && TREE_CODE (lhs
) == COMPONENT_REF
8864 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
8865 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
8866 && TREE_OPERAND (rhs
, 0) == lhs
8867 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
8868 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
8869 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
8871 rtx label
= gen_label_rtx ();
8873 do_jump (TREE_OPERAND (rhs
, 1),
8874 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
8875 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
8876 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
8877 (TREE_CODE (rhs
) == BIT_IOR_EXPR
8879 : integer_zero_node
)),
8881 do_pending_stack_adjust ();
8886 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8892 if (!TREE_OPERAND (exp
, 0))
8893 expand_null_return ();
8895 expand_return (TREE_OPERAND (exp
, 0));
8898 case PREINCREMENT_EXPR
:
8899 case PREDECREMENT_EXPR
:
8900 return expand_increment (exp
, 0, ignore
);
8902 case POSTINCREMENT_EXPR
:
8903 case POSTDECREMENT_EXPR
:
8904 /* Faster to treat as pre-increment if result is not used. */
8905 return expand_increment (exp
, ! ignore
, ignore
);
8908 /* Are we taking the address of a nested function? */
8909 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
8910 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
8911 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0))
8912 && ! TREE_STATIC (exp
))
8914 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
8915 op0
= force_operand (op0
, target
);
8917 /* If we are taking the address of something erroneous, just
8919 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
8921 /* If we are taking the address of a constant and are at the
8922 top level, we have to use output_constant_def since we can't
8923 call force_const_mem at top level. */
8925 && (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
8926 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0)))
8928 op0
= XEXP (output_constant_def (TREE_OPERAND (exp
, 0), 0), 0);
8931 /* We make sure to pass const0_rtx down if we came in with
8932 ignore set, to avoid doing the cleanups twice for something. */
8933 op0
= expand_expr (TREE_OPERAND (exp
, 0),
8934 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
8935 (modifier
== EXPAND_INITIALIZER
8936 ? modifier
: EXPAND_CONST_ADDRESS
));
8938 /* If we are going to ignore the result, OP0 will have been set
8939 to const0_rtx, so just return it. Don't get confused and
8940 think we are taking the address of the constant. */
8944 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8945 clever and returns a REG when given a MEM. */
8946 op0
= protect_from_queue (op0
, 1);
8948 /* We would like the object in memory. If it is a constant, we can
8949 have it be statically allocated into memory. For a non-constant,
8950 we need to allocate some memory and store the value into it. */
8952 if (CONSTANT_P (op0
))
8953 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8955 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
8956 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
8957 || GET_CODE (op0
) == PARALLEL
)
8959 /* If the operand is a SAVE_EXPR, we can deal with this by
8960 forcing the SAVE_EXPR into memory. */
8961 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
8963 put_var_into_stack (TREE_OPERAND (exp
, 0));
8964 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
8968 /* If this object is in a register, it can't be BLKmode. */
8969 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8970 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
8972 if (GET_CODE (op0
) == PARALLEL
)
8973 /* Handle calls that pass values in multiple
8974 non-contiguous locations. The Irix 6 ABI has examples
8976 emit_group_store (memloc
, op0
,
8977 int_size_in_bytes (inner_type
));
8979 emit_move_insn (memloc
, op0
);
8985 if (GET_CODE (op0
) != MEM
)
8988 mark_temp_addr_taken (op0
);
8989 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8991 op0
= XEXP (op0
, 0);
8992 #ifdef POINTERS_EXTEND_UNSIGNED
8993 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
8994 && mode
== ptr_mode
)
8995 op0
= convert_memory_address (ptr_mode
, op0
);
9000 /* If OP0 is not aligned as least as much as the type requires, we
9001 need to make a temporary, copy OP0 to it, and take the address of
9002 the temporary. We want to use the alignment of the type, not of
9003 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9004 the test for BLKmode means that can't happen. The test for
9005 BLKmode is because we never make mis-aligned MEMs with
9008 We don't need to do this at all if the machine doesn't have
9009 strict alignment. */
9010 if (STRICT_ALIGNMENT
&& GET_MODE (op0
) == BLKmode
9011 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
9013 && MEM_ALIGN (op0
) < BIGGEST_ALIGNMENT
)
9015 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9017 = assign_stack_temp_for_type
9018 (TYPE_MODE (inner_type
),
9019 MEM_SIZE (op0
) ? INTVAL (MEM_SIZE (op0
))
9020 : int_size_in_bytes (inner_type
),
9021 1, build_qualified_type (inner_type
,
9022 (TYPE_QUALS (inner_type
)
9023 | TYPE_QUAL_CONST
)));
9025 if (TYPE_ALIGN_OK (inner_type
))
9028 emit_block_move (new, op0
, expr_size (TREE_OPERAND (exp
, 0)),
9033 op0
= force_operand (XEXP (op0
, 0), target
);
9037 && GET_CODE (op0
) != REG
9038 && modifier
!= EXPAND_CONST_ADDRESS
9039 && modifier
!= EXPAND_INITIALIZER
9040 && modifier
!= EXPAND_SUM
)
9041 op0
= force_reg (Pmode
, op0
);
9043 if (GET_CODE (op0
) == REG
9044 && ! REG_USERVAR_P (op0
))
9045 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)));
9047 #ifdef POINTERS_EXTEND_UNSIGNED
9048 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
9049 && mode
== ptr_mode
)
9050 op0
= convert_memory_address (ptr_mode
, op0
);
9055 case ENTRY_VALUE_EXPR
:
9058 /* COMPLEX type for Extended Pascal & Fortran */
9061 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
9064 /* Get the rtx code of the operands. */
9065 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9066 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
9069 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
9073 /* Move the real (op0) and imaginary (op1) parts to their location. */
9074 emit_move_insn (gen_realpart (mode
, target
), op0
);
9075 emit_move_insn (gen_imagpart (mode
, target
), op1
);
9077 insns
= get_insns ();
9080 /* Complex construction should appear as a single unit. */
9081 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9082 each with a separate pseudo as destination.
9083 It's not correct for flow to treat them as a unit. */
9084 if (GET_CODE (target
) != CONCAT
)
9085 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
9093 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9094 return gen_realpart (mode
, op0
);
9097 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9098 return gen_imagpart (mode
, op0
);
9102 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
9106 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9109 target
= gen_reg_rtx (mode
);
9113 /* Store the realpart and the negated imagpart to target. */
9114 emit_move_insn (gen_realpart (partmode
, target
),
9115 gen_realpart (partmode
, op0
));
9117 imag_t
= gen_imagpart (partmode
, target
);
9118 temp
= expand_unop (partmode
,
9119 ! unsignedp
&& flag_trapv
9120 && (GET_MODE_CLASS(partmode
) == MODE_INT
)
9121 ? negv_optab
: neg_optab
,
9122 gen_imagpart (partmode
, op0
), imag_t
, 0);
9124 emit_move_insn (imag_t
, temp
);
9126 insns
= get_insns ();
9129 /* Conjugate should appear as a single unit
9130 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9131 each with a separate pseudo as destination.
9132 It's not correct for flow to treat them as a unit. */
9133 if (GET_CODE (target
) != CONCAT
)
9134 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
9141 case TRY_CATCH_EXPR
:
9143 tree handler
= TREE_OPERAND (exp
, 1);
9145 expand_eh_region_start ();
9147 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9149 expand_eh_region_end_cleanup (handler
);
9154 case TRY_FINALLY_EXPR
:
9156 tree try_block
= TREE_OPERAND (exp
, 0);
9157 tree finally_block
= TREE_OPERAND (exp
, 1);
9159 if (!optimize
|| unsafe_for_reeval (finally_block
) > 1)
9161 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9162 is not sufficient, so we cannot expand the block twice.
9163 So we play games with GOTO_SUBROUTINE_EXPR to let us
9164 expand the thing only once. */
9165 /* When not optimizing, we go ahead with this form since
9166 (1) user breakpoints operate more predictably without
9167 code duplication, and
9168 (2) we're not running any of the global optimizers
9169 that would explode in time/space with the highly
9170 connected CFG created by the indirect branching. */
9172 rtx finally_label
= gen_label_rtx ();
9173 rtx done_label
= gen_label_rtx ();
9174 rtx return_link
= gen_reg_rtx (Pmode
);
9175 tree cleanup
= build (GOTO_SUBROUTINE_EXPR
, void_type_node
,
9176 (tree
) finally_label
, (tree
) return_link
);
9177 TREE_SIDE_EFFECTS (cleanup
) = 1;
9179 /* Start a new binding layer that will keep track of all cleanup
9180 actions to be performed. */
9181 expand_start_bindings (2);
9182 target_temp_slot_level
= temp_slot_level
;
9184 expand_decl_cleanup (NULL_TREE
, cleanup
);
9185 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
9187 preserve_temp_slots (op0
);
9188 expand_end_bindings (NULL_TREE
, 0, 0);
9189 emit_jump (done_label
);
9190 emit_label (finally_label
);
9191 expand_expr (finally_block
, const0_rtx
, VOIDmode
, 0);
9192 emit_indirect_jump (return_link
);
9193 emit_label (done_label
);
9197 expand_start_bindings (2);
9198 target_temp_slot_level
= temp_slot_level
;
9200 expand_decl_cleanup (NULL_TREE
, finally_block
);
9201 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
9203 preserve_temp_slots (op0
);
9204 expand_end_bindings (NULL_TREE
, 0, 0);
9210 case GOTO_SUBROUTINE_EXPR
:
9212 rtx subr
= (rtx
) TREE_OPERAND (exp
, 0);
9213 rtx return_link
= *(rtx
*) &TREE_OPERAND (exp
, 1);
9214 rtx return_address
= gen_label_rtx ();
9215 emit_move_insn (return_link
,
9216 gen_rtx_LABEL_REF (Pmode
, return_address
));
9218 emit_label (return_address
);
9223 return expand_builtin_va_arg (TREE_OPERAND (exp
, 0), type
);
9226 return get_exception_pointer (cfun
);
9229 /* Function descriptors are not valid except for as
9230 initialization constants, and should not be expanded. */
9234 return (*lang_hooks
.expand_expr
) (exp
, original_target
, tmode
, modifier
);
9237 /* Here to do an ordinary binary operator, generating an instruction
9238 from the optab already placed in `this_optab'. */
9240 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
9242 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
9243 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9245 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9246 unsignedp
, OPTAB_LIB_WIDEN
);
9252 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9253 when applied to the address of EXP produces an address known to be
9254 aligned more than BIGGEST_ALIGNMENT. */
9257 is_aligning_offset (offset
, exp
)
9261 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9262 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9263 || TREE_CODE (offset
) == NOP_EXPR
9264 || TREE_CODE (offset
) == CONVERT_EXPR
9265 || TREE_CODE (offset
) == WITH_RECORD_EXPR
)
9266 offset
= TREE_OPERAND (offset
, 0);
9268 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9269 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9270 if (TREE_CODE (offset
) != BIT_AND_EXPR
9271 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
9272 || compare_tree_int (TREE_OPERAND (offset
, 1), BIGGEST_ALIGNMENT
) <= 0
9273 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
9276 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9277 It must be NEGATE_EXPR. Then strip any more conversions. */
9278 offset
= TREE_OPERAND (offset
, 0);
9279 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9280 || TREE_CODE (offset
) == NOP_EXPR
9281 || TREE_CODE (offset
) == CONVERT_EXPR
)
9282 offset
= TREE_OPERAND (offset
, 0);
9284 if (TREE_CODE (offset
) != NEGATE_EXPR
)
9287 offset
= TREE_OPERAND (offset
, 0);
9288 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9289 || TREE_CODE (offset
) == NOP_EXPR
9290 || TREE_CODE (offset
) == CONVERT_EXPR
)
9291 offset
= TREE_OPERAND (offset
, 0);
9293 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9294 whose type is the same as EXP. */
9295 return (TREE_CODE (offset
) == ADDR_EXPR
9296 && (TREE_OPERAND (offset
, 0) == exp
9297 || (TREE_CODE (TREE_OPERAND (offset
, 0)) == PLACEHOLDER_EXPR
9298 && (TREE_TYPE (TREE_OPERAND (offset
, 0))
9299 == TREE_TYPE (exp
)))));
9302 /* Return the tree node if an ARG corresponds to a string constant or zero
9303 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9304 in bytes within the string that ARG is accessing. The type of the
9305 offset will be `sizetype'. */
9308 string_constant (arg
, ptr_offset
)
9314 if (TREE_CODE (arg
) == ADDR_EXPR
9315 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
9317 *ptr_offset
= size_zero_node
;
9318 return TREE_OPERAND (arg
, 0);
9320 else if (TREE_CODE (arg
) == PLUS_EXPR
)
9322 tree arg0
= TREE_OPERAND (arg
, 0);
9323 tree arg1
= TREE_OPERAND (arg
, 1);
9328 if (TREE_CODE (arg0
) == ADDR_EXPR
9329 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
9331 *ptr_offset
= convert (sizetype
, arg1
);
9332 return TREE_OPERAND (arg0
, 0);
9334 else if (TREE_CODE (arg1
) == ADDR_EXPR
9335 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
9337 *ptr_offset
= convert (sizetype
, arg0
);
9338 return TREE_OPERAND (arg1
, 0);
9345 /* Expand code for a post- or pre- increment or decrement
9346 and return the RTX for the result.
9347 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9350 expand_increment (exp
, post
, ignore
)
9356 tree incremented
= TREE_OPERAND (exp
, 0);
9357 optab this_optab
= add_optab
;
9359 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
9360 int op0_is_copy
= 0;
9361 int single_insn
= 0;
9362 /* 1 means we can't store into OP0 directly,
9363 because it is a subreg narrower than a word,
9364 and we don't dare clobber the rest of the word. */
9367 /* Stabilize any component ref that might need to be
9368 evaluated more than once below. */
9370 || TREE_CODE (incremented
) == BIT_FIELD_REF
9371 || (TREE_CODE (incremented
) == COMPONENT_REF
9372 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
9373 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
9374 incremented
= stabilize_reference (incremented
);
9375 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9376 ones into save exprs so that they don't accidentally get evaluated
9377 more than once by the code below. */
9378 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
9379 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
9380 incremented
= save_expr (incremented
);
9382 /* Compute the operands as RTX.
9383 Note whether OP0 is the actual lvalue or a copy of it:
9384 I believe it is a copy iff it is a register or subreg
9385 and insns were generated in computing it. */
9387 temp
= get_last_insn ();
9388 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, 0);
9390 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9391 in place but instead must do sign- or zero-extension during assignment,
9392 so we copy it into a new register and let the code below use it as
9395 Note that we can safely modify this SUBREG since it is know not to be
9396 shared (it was made by the expand_expr call above). */
9398 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9401 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9405 else if (GET_CODE (op0
) == SUBREG
9406 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9408 /* We cannot increment this SUBREG in place. If we are
9409 post-incrementing, get a copy of the old value. Otherwise,
9410 just mark that we cannot increment in place. */
9412 op0
= copy_to_reg (op0
);
9417 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
9418 && temp
!= get_last_insn ());
9419 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9421 /* Decide whether incrementing or decrementing. */
9422 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9423 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9424 this_optab
= sub_optab
;
9426 /* Convert decrement by a constant into a negative increment. */
9427 if (this_optab
== sub_optab
9428 && GET_CODE (op1
) == CONST_INT
)
9430 op1
= GEN_INT (-INTVAL (op1
));
9431 this_optab
= add_optab
;
9434 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp
)))
9435 this_optab
= this_optab
== add_optab
? addv_optab
: subv_optab
;
9437 /* For a preincrement, see if we can do this with a single instruction. */
9440 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9441 if (icode
!= (int) CODE_FOR_nothing
9442 /* Make sure that OP0 is valid for operands 0 and 1
9443 of the insn we want to queue. */
9444 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9445 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
)
9446 && (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9450 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9451 then we cannot just increment OP0. We must therefore contrive to
9452 increment the original value. Then, for postincrement, we can return
9453 OP0 since it is a copy of the old value. For preincrement, expand here
9454 unless we can do it with a single insn.
9456 Likewise if storing directly into OP0 would clobber high bits
9457 we need to preserve (bad_subreg). */
9458 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9460 /* This is the easiest way to increment the value wherever it is.
9461 Problems with multiple evaluation of INCREMENTED are prevented
9462 because either (1) it is a component_ref or preincrement,
9463 in which case it was stabilized above, or (2) it is an array_ref
9464 with constant index in an array in a register, which is
9465 safe to reevaluate. */
9466 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9467 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9468 ? MINUS_EXPR
: PLUS_EXPR
),
9471 TREE_OPERAND (exp
, 1));
9473 while (TREE_CODE (incremented
) == NOP_EXPR
9474 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9476 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9477 incremented
= TREE_OPERAND (incremented
, 0);
9480 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
, 0);
9481 return post
? op0
: temp
;
9486 /* We have a true reference to the value in OP0.
9487 If there is an insn to add or subtract in this mode, queue it.
9488 Queueing the increment insn avoids the register shuffling
9489 that often results if we must increment now and first save
9490 the old value for subsequent use. */
9492 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9493 op0
= stabilize (op0
);
9496 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9497 if (icode
!= (int) CODE_FOR_nothing
9498 /* Make sure that OP0 is valid for operands 0 and 1
9499 of the insn we want to queue. */
9500 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9501 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
))
9503 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9504 op1
= force_reg (mode
, op1
);
9506 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9508 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9510 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
9511 ? force_reg (Pmode
, XEXP (op0
, 0))
9512 : copy_to_reg (XEXP (op0
, 0)));
9515 op0
= replace_equiv_address (op0
, addr
);
9516 temp
= force_reg (GET_MODE (op0
), op0
);
9517 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9518 op1
= force_reg (mode
, op1
);
9520 /* The increment queue is LIFO, thus we have to `queue'
9521 the instructions in reverse order. */
9522 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9523 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9528 /* Preincrement, or we can't increment with one simple insn. */
9530 /* Save a copy of the value before inc or dec, to return it later. */
9531 temp
= value
= copy_to_reg (op0
);
9533 /* Arrange to return the incremented value. */
9534 /* Copy the rtx because expand_binop will protect from the queue,
9535 and the results of that would be invalid for us to return
9536 if our caller does emit_queue before using our result. */
9537 temp
= copy_rtx (value
= op0
);
9539 /* Increment however we can. */
9540 op1
= expand_binop (mode
, this_optab
, value
, op1
, op0
,
9541 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9543 /* Make sure the value is stored into OP0. */
9545 emit_move_insn (op0
, op1
);
9550 /* At the start of a function, record that we have no previously-pushed
9551 arguments waiting to be popped. */
9554 init_pending_stack_adjust ()
9556 pending_stack_adjust
= 0;
9559 /* When exiting from function, if safe, clear out any pending stack adjust
9560 so the adjustment won't get done.
9562 Note, if the current function calls alloca, then it must have a
9563 frame pointer regardless of the value of flag_omit_frame_pointer. */
9566 clear_pending_stack_adjust ()
9568 #ifdef EXIT_IGNORE_STACK
9570 && (! flag_omit_frame_pointer
|| current_function_calls_alloca
)
9571 && EXIT_IGNORE_STACK
9572 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
9573 && ! flag_inline_functions
)
9575 stack_pointer_delta
-= pending_stack_adjust
,
9576 pending_stack_adjust
= 0;
9581 /* Pop any previously-pushed arguments that have not been popped yet. */
9584 do_pending_stack_adjust ()
9586 if (inhibit_defer_pop
== 0)
9588 if (pending_stack_adjust
!= 0)
9589 adjust_stack (GEN_INT (pending_stack_adjust
));
9590 pending_stack_adjust
= 0;
9594 /* Expand conditional expressions. */
9596 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9597 LABEL is an rtx of code CODE_LABEL, in this function and all the
9601 jumpifnot (exp
, label
)
9605 do_jump (exp
, label
, NULL_RTX
);
9608 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9615 do_jump (exp
, NULL_RTX
, label
);
9618 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9619 the result is zero, or IF_TRUE_LABEL if the result is one.
9620 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9621 meaning fall through in that case.
9623 do_jump always does any pending stack adjust except when it does not
9624 actually perform a jump. An example where there is no jump
9625 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9627 This function is responsible for optimizing cases such as
9628 &&, || and comparison operators in EXP. */
9631 do_jump (exp
, if_false_label
, if_true_label
)
9633 rtx if_false_label
, if_true_label
;
9635 enum tree_code code
= TREE_CODE (exp
);
9636 /* Some cases need to create a label to jump to
9637 in order to properly fall through.
9638 These cases set DROP_THROUGH_LABEL nonzero. */
9639 rtx drop_through_label
= 0;
9643 enum machine_mode mode
;
9645 #ifdef MAX_INTEGER_COMPUTATION_MODE
9646 check_max_integer_computation_mode (exp
);
9657 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
9663 /* This is not true with #pragma weak */
9665 /* The address of something can never be zero. */
9667 emit_jump (if_true_label
);
9672 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
9673 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
9674 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
9675 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_RANGE_REF
)
9678 /* If we are narrowing the operand, we have to do the compare in the
9680 if ((TYPE_PRECISION (TREE_TYPE (exp
))
9681 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9683 case NON_LVALUE_EXPR
:
9684 case REFERENCE_EXPR
:
9689 /* These cannot change zero->nonzero or vice versa. */
9690 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9693 case WITH_RECORD_EXPR
:
9694 /* Put the object on the placeholder list, recurse through our first
9695 operand, and pop the list. */
9696 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
9698 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9699 placeholder_list
= TREE_CHAIN (placeholder_list
);
9703 /* This is never less insns than evaluating the PLUS_EXPR followed by
9704 a test and can be longer if the test is eliminated. */
9706 /* Reduce to minus. */
9707 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
9708 TREE_OPERAND (exp
, 0),
9709 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
9710 TREE_OPERAND (exp
, 1))));
9711 /* Process as MINUS. */
9715 /* Nonzero iff operands of minus differ. */
9716 do_compare_and_jump (build (NE_EXPR
, TREE_TYPE (exp
),
9717 TREE_OPERAND (exp
, 0),
9718 TREE_OPERAND (exp
, 1)),
9719 NE
, NE
, if_false_label
, if_true_label
);
9723 /* If we are AND'ing with a small constant, do this comparison in the
9724 smallest type that fits. If the machine doesn't have comparisons
9725 that small, it will be converted back to the wider comparison.
9726 This helps if we are testing the sign bit of a narrower object.
9727 combine can't do this for us because it can't know whether a
9728 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9730 if (! SLOW_BYTE_ACCESS
9731 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
9732 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
9733 && (i
= tree_floor_log2 (TREE_OPERAND (exp
, 1))) >= 0
9734 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
9735 && (type
= (*lang_hooks
.types
.type_for_mode
) (mode
, 1)) != 0
9736 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9737 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9738 != CODE_FOR_nothing
))
9740 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9745 case TRUTH_NOT_EXPR
:
9746 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9749 case TRUTH_ANDIF_EXPR
:
9750 if (if_false_label
== 0)
9751 if_false_label
= drop_through_label
= gen_label_rtx ();
9752 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
9753 start_cleanup_deferral ();
9754 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9755 end_cleanup_deferral ();
9758 case TRUTH_ORIF_EXPR
:
9759 if (if_true_label
== 0)
9760 if_true_label
= drop_through_label
= gen_label_rtx ();
9761 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
9762 start_cleanup_deferral ();
9763 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9764 end_cleanup_deferral ();
9769 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
9770 preserve_temp_slots (NULL_RTX
);
9774 do_pending_stack_adjust ();
9775 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9781 case ARRAY_RANGE_REF
:
9783 HOST_WIDE_INT bitsize
, bitpos
;
9785 enum machine_mode mode
;
9790 /* Get description of this reference. We don't actually care
9791 about the underlying object here. */
9792 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
9793 &unsignedp
, &volatilep
);
9795 type
= (*lang_hooks
.types
.type_for_size
) (bitsize
, unsignedp
);
9796 if (! SLOW_BYTE_ACCESS
9797 && type
!= 0 && bitsize
>= 0
9798 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9799 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9800 != CODE_FOR_nothing
))
9802 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9809 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9810 if (integer_onep (TREE_OPERAND (exp
, 1))
9811 && integer_zerop (TREE_OPERAND (exp
, 2)))
9812 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9814 else if (integer_zerop (TREE_OPERAND (exp
, 1))
9815 && integer_onep (TREE_OPERAND (exp
, 2)))
9816 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9820 rtx label1
= gen_label_rtx ();
9821 drop_through_label
= gen_label_rtx ();
9823 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
9825 start_cleanup_deferral ();
9826 /* Now the THEN-expression. */
9827 do_jump (TREE_OPERAND (exp
, 1),
9828 if_false_label
? if_false_label
: drop_through_label
,
9829 if_true_label
? if_true_label
: drop_through_label
);
9830 /* In case the do_jump just above never jumps. */
9831 do_pending_stack_adjust ();
9832 emit_label (label1
);
9834 /* Now the ELSE-expression. */
9835 do_jump (TREE_OPERAND (exp
, 2),
9836 if_false_label
? if_false_label
: drop_through_label
,
9837 if_true_label
? if_true_label
: drop_through_label
);
9838 end_cleanup_deferral ();
9844 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9846 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9847 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9849 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
9850 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
9853 (build (TRUTH_ANDIF_EXPR
, TREE_TYPE (exp
),
9854 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9855 fold (build1 (REALPART_EXPR
,
9856 TREE_TYPE (inner_type
),
9858 fold (build1 (REALPART_EXPR
,
9859 TREE_TYPE (inner_type
),
9861 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9862 fold (build1 (IMAGPART_EXPR
,
9863 TREE_TYPE (inner_type
),
9865 fold (build1 (IMAGPART_EXPR
,
9866 TREE_TYPE (inner_type
),
9868 if_false_label
, if_true_label
);
9871 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
9872 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9874 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9875 && !can_compare_p (EQ
, TYPE_MODE (inner_type
), ccp_jump
))
9876 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
9878 do_compare_and_jump (exp
, EQ
, EQ
, if_false_label
, if_true_label
);
9884 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9886 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9887 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9889 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
9890 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
9893 (build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
),
9894 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9895 fold (build1 (REALPART_EXPR
,
9896 TREE_TYPE (inner_type
),
9898 fold (build1 (REALPART_EXPR
,
9899 TREE_TYPE (inner_type
),
9901 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9902 fold (build1 (IMAGPART_EXPR
,
9903 TREE_TYPE (inner_type
),
9905 fold (build1 (IMAGPART_EXPR
,
9906 TREE_TYPE (inner_type
),
9908 if_false_label
, if_true_label
);
9911 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
9912 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9914 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9915 && !can_compare_p (NE
, TYPE_MODE (inner_type
), ccp_jump
))
9916 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
9918 do_compare_and_jump (exp
, NE
, NE
, if_false_label
, if_true_label
);
9923 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9924 if (GET_MODE_CLASS (mode
) == MODE_INT
9925 && ! can_compare_p (LT
, mode
, ccp_jump
))
9926 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
9928 do_compare_and_jump (exp
, LT
, LTU
, if_false_label
, if_true_label
);
9932 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9933 if (GET_MODE_CLASS (mode
) == MODE_INT
9934 && ! can_compare_p (LE
, mode
, ccp_jump
))
9935 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
9937 do_compare_and_jump (exp
, LE
, LEU
, if_false_label
, if_true_label
);
9941 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9942 if (GET_MODE_CLASS (mode
) == MODE_INT
9943 && ! can_compare_p (GT
, mode
, ccp_jump
))
9944 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
9946 do_compare_and_jump (exp
, GT
, GTU
, if_false_label
, if_true_label
);
9950 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9951 if (GET_MODE_CLASS (mode
) == MODE_INT
9952 && ! can_compare_p (GE
, mode
, ccp_jump
))
9953 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
9955 do_compare_and_jump (exp
, GE
, GEU
, if_false_label
, if_true_label
);
9958 case UNORDERED_EXPR
:
9961 enum rtx_code cmp
, rcmp
;
9964 if (code
== UNORDERED_EXPR
)
9965 cmp
= UNORDERED
, rcmp
= ORDERED
;
9967 cmp
= ORDERED
, rcmp
= UNORDERED
;
9968 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9971 if (! can_compare_p (cmp
, mode
, ccp_jump
)
9972 && (can_compare_p (rcmp
, mode
, ccp_jump
)
9973 /* If the target doesn't provide either UNORDERED or ORDERED
9974 comparisons, canonicalize on UNORDERED for the library. */
9975 || rcmp
== UNORDERED
))
9979 do_compare_and_jump (exp
, cmp
, cmp
, if_false_label
, if_true_label
);
9981 do_compare_and_jump (exp
, rcmp
, rcmp
, if_true_label
, if_false_label
);
9986 enum rtx_code rcode1
;
9987 enum tree_code tcode2
;
10000 goto unordered_bcc
;
10004 goto unordered_bcc
;
10008 goto unordered_bcc
;
10011 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10012 if (can_compare_p (rcode1
, mode
, ccp_jump
))
10013 do_compare_and_jump (exp
, rcode1
, rcode1
, if_false_label
,
10017 tree op0
= save_expr (TREE_OPERAND (exp
, 0));
10018 tree op1
= save_expr (TREE_OPERAND (exp
, 1));
10021 /* If the target doesn't support combined unordered
10022 compares, decompose into UNORDERED + comparison. */
10023 cmp0
= fold (build (UNORDERED_EXPR
, TREE_TYPE (exp
), op0
, op1
));
10024 cmp1
= fold (build (tcode2
, TREE_TYPE (exp
), op0
, op1
));
10025 exp
= build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
), cmp0
, cmp1
);
10026 do_jump (exp
, if_false_label
, if_true_label
);
10032 __builtin_expect (<test>, 0) and
10033 __builtin_expect (<test>, 1)
10035 We need to do this here, so that <test> is not converted to a SCC
10036 operation on machines that use condition code registers and COMPARE
10037 like the PowerPC, and then the jump is done based on whether the SCC
10038 operation produced a 1 or 0. */
10040 /* Check for a built-in function. */
10041 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
10043 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
10044 tree arglist
= TREE_OPERAND (exp
, 1);
10046 if (TREE_CODE (fndecl
) == FUNCTION_DECL
10047 && DECL_BUILT_IN (fndecl
)
10048 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
10049 && arglist
!= NULL_TREE
10050 && TREE_CHAIN (arglist
) != NULL_TREE
)
10052 rtx seq
= expand_builtin_expect_jump (exp
, if_false_label
,
10055 if (seq
!= NULL_RTX
)
10062 /* fall through and generate the normal code. */
10066 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
10068 /* This is not needed any more and causes poor code since it causes
10069 comparisons and tests from non-SI objects to have different code
10071 /* Copy to register to avoid generating bad insns by cse
10072 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10073 if (!cse_not_expected
&& GET_CODE (temp
) == MEM
)
10074 temp
= copy_to_reg (temp
);
10076 do_pending_stack_adjust ();
10077 /* Do any postincrements in the expression that was tested. */
10080 if (GET_CODE (temp
) == CONST_INT
10081 || (GET_CODE (temp
) == CONST_DOUBLE
&& GET_MODE (temp
) == VOIDmode
)
10082 || GET_CODE (temp
) == LABEL_REF
)
10084 rtx target
= temp
== const0_rtx
? if_false_label
: if_true_label
;
10086 emit_jump (target
);
10088 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
10089 && ! can_compare_p (NE
, GET_MODE (temp
), ccp_jump
))
10090 /* Note swapping the labels gives us not-equal. */
10091 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
10092 else if (GET_MODE (temp
) != VOIDmode
)
10093 do_compare_rtx_and_jump (temp
, CONST0_RTX (GET_MODE (temp
)),
10094 NE
, TREE_UNSIGNED (TREE_TYPE (exp
)),
10095 GET_MODE (temp
), NULL_RTX
,
10096 if_false_label
, if_true_label
);
10101 if (drop_through_label
)
10103 /* If do_jump produces code that might be jumped around,
10104 do any stack adjusts from that code, before the place
10105 where control merges in. */
10106 do_pending_stack_adjust ();
10107 emit_label (drop_through_label
);
10111 /* Given a comparison expression EXP for values too wide to be compared
10112 with one insn, test the comparison and jump to the appropriate label.
10113 The code of EXP is ignored; we always test GT if SWAP is 0,
10114 and LT if SWAP is 1. */
10117 do_jump_by_parts_greater (exp
, swap
, if_false_label
, if_true_label
)
10120 rtx if_false_label
, if_true_label
;
10122 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
10123 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
10124 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10125 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10127 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
);
10130 /* Compare OP0 with OP1, word at a time, in mode MODE.
10131 UNSIGNEDP says to do unsigned comparison.
10132 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10135 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
)
10136 enum machine_mode mode
;
10139 rtx if_false_label
, if_true_label
;
10141 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10142 rtx drop_through_label
= 0;
10145 if (! if_true_label
|| ! if_false_label
)
10146 drop_through_label
= gen_label_rtx ();
10147 if (! if_true_label
)
10148 if_true_label
= drop_through_label
;
10149 if (! if_false_label
)
10150 if_false_label
= drop_through_label
;
10152 /* Compare a word at a time, high order first. */
10153 for (i
= 0; i
< nwords
; i
++)
10155 rtx op0_word
, op1_word
;
10157 if (WORDS_BIG_ENDIAN
)
10159 op0_word
= operand_subword_force (op0
, i
, mode
);
10160 op1_word
= operand_subword_force (op1
, i
, mode
);
10164 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
10165 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
10168 /* All but high-order word must be compared as unsigned. */
10169 do_compare_rtx_and_jump (op0_word
, op1_word
, GT
,
10170 (unsignedp
|| i
> 0), word_mode
, NULL_RTX
,
10171 NULL_RTX
, if_true_label
);
10173 /* Consider lower words only if these are equal. */
10174 do_compare_rtx_and_jump (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
10175 NULL_RTX
, NULL_RTX
, if_false_label
);
10178 if (if_false_label
)
10179 emit_jump (if_false_label
);
10180 if (drop_through_label
)
10181 emit_label (drop_through_label
);
10184 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10185 with one insn, test the comparison and jump to the appropriate label. */
10188 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
)
10190 rtx if_false_label
, if_true_label
;
10192 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
10193 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
10194 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10195 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10197 rtx drop_through_label
= 0;
10199 if (! if_false_label
)
10200 drop_through_label
= if_false_label
= gen_label_rtx ();
10202 for (i
= 0; i
< nwords
; i
++)
10203 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, mode
),
10204 operand_subword_force (op1
, i
, mode
),
10205 EQ
, TREE_UNSIGNED (TREE_TYPE (exp
)),
10206 word_mode
, NULL_RTX
, if_false_label
, NULL_RTX
);
10209 emit_jump (if_true_label
);
10210 if (drop_through_label
)
10211 emit_label (drop_through_label
);
10214 /* Jump according to whether OP0 is 0.
10215 We assume that OP0 has an integer mode that is too wide
10216 for the available compare insns. */
10219 do_jump_by_parts_equality_rtx (op0
, if_false_label
, if_true_label
)
10221 rtx if_false_label
, if_true_label
;
10223 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
10226 rtx drop_through_label
= 0;
10228 /* The fastest way of doing this comparison on almost any machine is to
10229 "or" all the words and compare the result. If all have to be loaded
10230 from memory and this is a very wide item, it's possible this may
10231 be slower, but that's highly unlikely. */
10233 part
= gen_reg_rtx (word_mode
);
10234 emit_move_insn (part
, operand_subword_force (op0
, 0, GET_MODE (op0
)));
10235 for (i
= 1; i
< nwords
&& part
!= 0; i
++)
10236 part
= expand_binop (word_mode
, ior_optab
, part
,
10237 operand_subword_force (op0
, i
, GET_MODE (op0
)),
10238 part
, 1, OPTAB_WIDEN
);
10242 do_compare_rtx_and_jump (part
, const0_rtx
, EQ
, 1, word_mode
,
10243 NULL_RTX
, if_false_label
, if_true_label
);
10248 /* If we couldn't do the "or" simply, do this with a series of compares. */
10249 if (! if_false_label
)
10250 drop_through_label
= if_false_label
= gen_label_rtx ();
10252 for (i
= 0; i
< nwords
; i
++)
10253 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, GET_MODE (op0
)),
10254 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
,
10255 if_false_label
, NULL_RTX
);
10258 emit_jump (if_true_label
);
10260 if (drop_through_label
)
10261 emit_label (drop_through_label
);
10264 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10265 (including code to compute the values to be compared)
10266 and set (CC0) according to the result.
10267 The decision as to signed or unsigned comparison must be made by the caller.
10269 We force a stack adjustment unless there are currently
10270 things pushed on the stack that aren't yet used.
10272 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10276 compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
, size
)
10278 enum rtx_code code
;
10280 enum machine_mode mode
;
10283 enum rtx_code ucode
;
10286 /* If one operand is constant, make it the second one. Only do this
10287 if the other operand is not constant as well. */
10289 if (swap_commutative_operands_p (op0
, op1
))
10294 code
= swap_condition (code
);
10297 if (flag_force_mem
)
10299 op0
= force_not_mem (op0
);
10300 op1
= force_not_mem (op1
);
10303 do_pending_stack_adjust ();
10305 ucode
= unsignedp
? unsigned_condition (code
) : code
;
10306 if ((tem
= simplify_relational_operation (ucode
, mode
, op0
, op1
)) != 0)
10310 /* There's no need to do this now that combine.c can eliminate lots of
10311 sign extensions. This can be less efficient in certain cases on other
10314 /* If this is a signed equality comparison, we can do it as an
10315 unsigned comparison since zero-extension is cheaper than sign
10316 extension and comparisons with zero are done as unsigned. This is
10317 the case even on machines that can do fast sign extension, since
10318 zero-extension is easier to combine with other operations than
10319 sign-extension is. If we are comparing against a constant, we must
10320 convert it to what it would look like unsigned. */
10321 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10322 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10324 if (GET_CODE (op1
) == CONST_INT
10325 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10326 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10331 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
);
10334 return gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
10336 return gen_rtx_fmt_ee (code
, VOIDmode
, op0
, op1
);
10340 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10341 The decision as to signed or unsigned comparison must be made by the caller.
10343 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10347 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
, size
,
10348 if_false_label
, if_true_label
)
10350 enum rtx_code code
;
10352 enum machine_mode mode
;
10354 rtx if_false_label
, if_true_label
;
10356 enum rtx_code ucode
;
10358 int dummy_true_label
= 0;
10360 /* Reverse the comparison if that is safe and we want to jump if it is
10362 if (! if_true_label
&& ! FLOAT_MODE_P (mode
))
10364 if_true_label
= if_false_label
;
10365 if_false_label
= 0;
10366 code
= reverse_condition (code
);
10369 /* If one operand is constant, make it the second one. Only do this
10370 if the other operand is not constant as well. */
10372 if (swap_commutative_operands_p (op0
, op1
))
10377 code
= swap_condition (code
);
10380 if (flag_force_mem
)
10382 op0
= force_not_mem (op0
);
10383 op1
= force_not_mem (op1
);
10386 do_pending_stack_adjust ();
10388 ucode
= unsignedp
? unsigned_condition (code
) : code
;
10389 if ((tem
= simplify_relational_operation (ucode
, mode
, op0
, op1
)) != 0)
10391 if (tem
== const_true_rtx
)
10394 emit_jump (if_true_label
);
10398 if (if_false_label
)
10399 emit_jump (if_false_label
);
10405 /* There's no need to do this now that combine.c can eliminate lots of
10406 sign extensions. This can be less efficient in certain cases on other
10409 /* If this is a signed equality comparison, we can do it as an
10410 unsigned comparison since zero-extension is cheaper than sign
10411 extension and comparisons with zero are done as unsigned. This is
10412 the case even on machines that can do fast sign extension, since
10413 zero-extension is easier to combine with other operations than
10414 sign-extension is. If we are comparing against a constant, we must
10415 convert it to what it would look like unsigned. */
10416 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10417 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10419 if (GET_CODE (op1
) == CONST_INT
10420 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10421 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10426 if (! if_true_label
)
10428 dummy_true_label
= 1;
10429 if_true_label
= gen_label_rtx ();
10432 emit_cmp_and_jump_insns (op0
, op1
, code
, size
, mode
, unsignedp
,
10435 if (if_false_label
)
10436 emit_jump (if_false_label
);
10437 if (dummy_true_label
)
10438 emit_label (if_true_label
);
10441 /* Generate code for a comparison expression EXP (including code to compute
10442 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10443 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10444 generated code will drop through.
10445 SIGNED_CODE should be the rtx operation for this comparison for
10446 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10448 We force a stack adjustment unless there are currently
10449 things pushed on the stack that aren't yet used. */
10452 do_compare_and_jump (exp
, signed_code
, unsigned_code
, if_false_label
,
10455 enum rtx_code signed_code
, unsigned_code
;
10456 rtx if_false_label
, if_true_label
;
10460 enum machine_mode mode
;
10462 enum rtx_code code
;
10464 /* Don't crash if the comparison was erroneous. */
10465 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
10466 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
10469 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
10470 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == ERROR_MARK
)
10473 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10474 mode
= TYPE_MODE (type
);
10475 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
10476 && (TREE_CODE (TREE_OPERAND (exp
, 1)) != INTEGER_CST
10477 || (GET_MODE_BITSIZE (mode
)
10478 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
,
10481 /* op0 might have been replaced by promoted constant, in which
10482 case the type of second argument should be used. */
10483 type
= TREE_TYPE (TREE_OPERAND (exp
, 1));
10484 mode
= TYPE_MODE (type
);
10486 unsignedp
= TREE_UNSIGNED (type
);
10487 code
= unsignedp
? unsigned_code
: signed_code
;
10489 #ifdef HAVE_canonicalize_funcptr_for_compare
10490 /* If function pointers need to be "canonicalized" before they can
10491 be reliably compared, then canonicalize them. */
10492 if (HAVE_canonicalize_funcptr_for_compare
10493 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10494 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10497 rtx new_op0
= gen_reg_rtx (mode
);
10499 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
10503 if (HAVE_canonicalize_funcptr_for_compare
10504 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10505 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10508 rtx new_op1
= gen_reg_rtx (mode
);
10510 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
10515 /* Do any postincrements in the expression that was tested. */
10518 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
,
10520 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
10521 if_false_label
, if_true_label
);
10524 /* Generate code to calculate EXP using a store-flag instruction
10525 and return an rtx for the result. EXP is either a comparison
10526 or a TRUTH_NOT_EXPR whose operand is a comparison.
10528 If TARGET is nonzero, store the result there if convenient.
10530 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
10533 Return zero if there is no suitable set-flag instruction
10534 available on this machine.
10536 Once expand_expr has been called on the arguments of the comparison,
10537 we are committed to doing the store flag, since it is not safe to
10538 re-evaluate the expression. We emit the store-flag insn by calling
10539 emit_store_flag, but only expand the arguments if we have a reason
10540 to believe that emit_store_flag will be successful. If we think that
10541 it will, but it isn't, we have to simulate the store-flag with a
10542 set/jump/set sequence. */
10545 do_store_flag (exp
, target
, mode
, only_cheap
)
10548 enum machine_mode mode
;
10551 enum rtx_code code
;
10552 tree arg0
, arg1
, type
;
10554 enum machine_mode operand_mode
;
10558 enum insn_code icode
;
10559 rtx subtarget
= target
;
10562 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10563 result at the end. We can't simply invert the test since it would
10564 have already been inverted if it were valid. This case occurs for
10565 some floating-point comparisons. */
10567 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
10568 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
10570 arg0
= TREE_OPERAND (exp
, 0);
10571 arg1
= TREE_OPERAND (exp
, 1);
10573 /* Don't crash if the comparison was erroneous. */
10574 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
10577 type
= TREE_TYPE (arg0
);
10578 operand_mode
= TYPE_MODE (type
);
10579 unsignedp
= TREE_UNSIGNED (type
);
10581 /* We won't bother with BLKmode store-flag operations because it would mean
10582 passing a lot of information to emit_store_flag. */
10583 if (operand_mode
== BLKmode
)
10586 /* We won't bother with store-flag operations involving function pointers
10587 when function pointers must be canonicalized before comparisons. */
10588 #ifdef HAVE_canonicalize_funcptr_for_compare
10589 if (HAVE_canonicalize_funcptr_for_compare
10590 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10591 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10593 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10594 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10595 == FUNCTION_TYPE
))))
10602 /* Get the rtx comparison code to use. We know that EXP is a comparison
10603 operation of some type. Some comparisons against 1 and -1 can be
10604 converted to comparisons with zero. Do so here so that the tests
10605 below will be aware that we have a comparison with zero. These
10606 tests will not catch constants in the first operand, but constants
10607 are rarely passed as the first operand. */
10609 switch (TREE_CODE (exp
))
10618 if (integer_onep (arg1
))
10619 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10621 code
= unsignedp
? LTU
: LT
;
10624 if (! unsignedp
&& integer_all_onesp (arg1
))
10625 arg1
= integer_zero_node
, code
= LT
;
10627 code
= unsignedp
? LEU
: LE
;
10630 if (! unsignedp
&& integer_all_onesp (arg1
))
10631 arg1
= integer_zero_node
, code
= GE
;
10633 code
= unsignedp
? GTU
: GT
;
10636 if (integer_onep (arg1
))
10637 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
10639 code
= unsignedp
? GEU
: GE
;
10642 case UNORDERED_EXPR
:
10668 /* Put a constant second. */
10669 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
10671 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
10672 code
= swap_condition (code
);
10675 /* If this is an equality or inequality test of a single bit, we can
10676 do this by shifting the bit being tested to the low-order bit and
10677 masking the result with the constant 1. If the condition was EQ,
10678 we xor it with 1. This does not require an scc insn and is faster
10679 than an scc insn even if we have it. */
10681 if ((code
== NE
|| code
== EQ
)
10682 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
10683 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10685 tree inner
= TREE_OPERAND (arg0
, 0);
10686 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
10689 /* If INNER is a right shift of a constant and it plus BITNUM does
10690 not overflow, adjust BITNUM and INNER. */
10692 if (TREE_CODE (inner
) == RSHIFT_EXPR
10693 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
10694 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
10695 && bitnum
< TYPE_PRECISION (type
)
10696 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
10697 bitnum
- TYPE_PRECISION (type
)))
10699 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
10700 inner
= TREE_OPERAND (inner
, 0);
10703 /* If we are going to be able to omit the AND below, we must do our
10704 operations as unsigned. If we must use the AND, we have a choice.
10705 Normally unsigned is faster, but for some machines signed is. */
10706 ops_unsignedp
= (bitnum
== TYPE_PRECISION (type
) - 1 ? 1
10707 #ifdef LOAD_EXTEND_OP
10708 : (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1)
10714 if (! get_subtarget (subtarget
)
10715 || GET_MODE (subtarget
) != operand_mode
10716 || ! safe_from_p (subtarget
, inner
, 1))
10719 op0
= expand_expr (inner
, subtarget
, VOIDmode
, 0);
10722 op0
= expand_shift (RSHIFT_EXPR
, operand_mode
, op0
,
10723 size_int (bitnum
), subtarget
, ops_unsignedp
);
10725 if (GET_MODE (op0
) != mode
)
10726 op0
= convert_to_mode (mode
, op0
, ops_unsignedp
);
10728 if ((code
== EQ
&& ! invert
) || (code
== NE
&& invert
))
10729 op0
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
, subtarget
,
10730 ops_unsignedp
, OPTAB_LIB_WIDEN
);
10732 /* Put the AND last so it can combine with more things. */
10733 if (bitnum
!= TYPE_PRECISION (type
) - 1)
10734 op0
= expand_and (mode
, op0
, const1_rtx
, subtarget
);
10739 /* Now see if we are likely to be able to do this. Return if not. */
10740 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
10743 icode
= setcc_gen_code
[(int) code
];
10744 if (icode
== CODE_FOR_nothing
10745 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
10747 /* We can only do this if it is one of the special cases that
10748 can be handled without an scc insn. */
10749 if ((code
== LT
&& integer_zerop (arg1
))
10750 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
10752 else if (BRANCH_COST
>= 0
10753 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
10754 && TREE_CODE (type
) != REAL_TYPE
10755 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
10756 != CODE_FOR_nothing
)
10757 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
10758 != CODE_FOR_nothing
)))
10764 if (! get_subtarget (target
)
10765 || GET_MODE (subtarget
) != operand_mode
10766 || ! safe_from_p (subtarget
, arg1
, 1))
10769 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
10770 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
10773 target
= gen_reg_rtx (mode
);
10775 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10776 because, if the emit_store_flag does anything it will succeed and
10777 OP0 and OP1 will not be used subsequently. */
10779 result
= emit_store_flag (target
, code
,
10780 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
10781 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
10782 operand_mode
, unsignedp
, 1);
10787 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
10788 result
, 0, OPTAB_LIB_WIDEN
);
10792 /* If this failed, we have to do this with set/compare/jump/set code. */
10793 if (GET_CODE (target
) != REG
10794 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
10795 target
= gen_reg_rtx (GET_MODE (target
));
10797 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
10798 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
10799 operand_mode
, NULL_RTX
);
10800 if (GET_CODE (result
) == CONST_INT
)
10801 return (((result
== const0_rtx
&& ! invert
)
10802 || (result
!= const0_rtx
&& invert
))
10803 ? const0_rtx
: const1_rtx
);
10805 /* The code of RESULT may not match CODE if compare_from_rtx
10806 decided to swap its operands and reverse the original code.
10808 We know that compare_from_rtx returns either a CONST_INT or
10809 a new comparison code, so it is safe to just extract the
10810 code from RESULT. */
10811 code
= GET_CODE (result
);
10813 label
= gen_label_rtx ();
10814 if (bcc_gen_fctn
[(int) code
] == 0)
10817 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
10818 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
10819 emit_label (label
);
10825 /* Stubs in case we haven't got a casesi insn. */
10826 #ifndef HAVE_casesi
10827 # define HAVE_casesi 0
10828 # define gen_casesi(a, b, c, d, e) (0)
10829 # define CODE_FOR_casesi CODE_FOR_nothing
10832 /* If the machine does not have a case insn that compares the bounds,
10833 this means extra overhead for dispatch tables, which raises the
10834 threshold for using them. */
10835 #ifndef CASE_VALUES_THRESHOLD
10836 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10837 #endif /* CASE_VALUES_THRESHOLD */
10840 case_values_threshold ()
10842 return CASE_VALUES_THRESHOLD
;
10845 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10846 0 otherwise (i.e. if there is no casesi instruction). */
10848 try_casesi (index_type
, index_expr
, minval
, range
,
10849 table_label
, default_label
)
10850 tree index_type
, index_expr
, minval
, range
;
10851 rtx table_label ATTRIBUTE_UNUSED
;
10854 enum machine_mode index_mode
= SImode
;
10855 int index_bits
= GET_MODE_BITSIZE (index_mode
);
10856 rtx op1
, op2
, index
;
10857 enum machine_mode op_mode
;
10862 /* Convert the index to SImode. */
10863 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
10865 enum machine_mode omode
= TYPE_MODE (index_type
);
10866 rtx rangertx
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
10868 /* We must handle the endpoints in the original mode. */
10869 index_expr
= build (MINUS_EXPR
, index_type
,
10870 index_expr
, minval
);
10871 minval
= integer_zero_node
;
10872 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10873 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
10874 omode
, 1, default_label
);
10875 /* Now we can safely truncate. */
10876 index
= convert_to_mode (index_mode
, index
, 0);
10880 if (TYPE_MODE (index_type
) != index_mode
)
10882 index_expr
= convert ((*lang_hooks
.types
.type_for_size
)
10883 (index_bits
, 0), index_expr
);
10884 index_type
= TREE_TYPE (index_expr
);
10887 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10890 index
= protect_from_queue (index
, 0);
10891 do_pending_stack_adjust ();
10893 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
10894 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
10896 index
= copy_to_mode_reg (op_mode
, index
);
10898 op1
= expand_expr (minval
, NULL_RTX
, VOIDmode
, 0);
10900 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
10901 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
10902 op1
, TREE_UNSIGNED (TREE_TYPE (minval
)));
10903 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
10905 op1
= copy_to_mode_reg (op_mode
, op1
);
10907 op2
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
10909 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
10910 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
10911 op2
, TREE_UNSIGNED (TREE_TYPE (range
)));
10912 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
10914 op2
= copy_to_mode_reg (op_mode
, op2
);
10916 emit_jump_insn (gen_casesi (index
, op1
, op2
,
10917 table_label
, default_label
));
10921 /* Attempt to generate a tablejump instruction; same concept. */
10922 #ifndef HAVE_tablejump
10923 #define HAVE_tablejump 0
10924 #define gen_tablejump(x, y) (0)
10927 /* Subroutine of the next function.
10929 INDEX is the value being switched on, with the lowest value
10930 in the table already subtracted.
10931 MODE is its expected mode (needed if INDEX is constant).
10932 RANGE is the length of the jump table.
10933 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10935 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10936 index value is out of range. */
10939 do_tablejump (index
, mode
, range
, table_label
, default_label
)
10940 rtx index
, range
, table_label
, default_label
;
10941 enum machine_mode mode
;
10945 if (range
> cfun
->max_jumptable_ents
)
10946 cfun
->max_jumptable_ents
= range
;
10948 /* Do an unsigned comparison (in the proper mode) between the index
10949 expression and the value which represents the length of the range.
10950 Since we just finished subtracting the lower bound of the range
10951 from the index expression, this comparison allows us to simultaneously
10952 check that the original index expression value is both greater than
10953 or equal to the minimum value of the range and less than or equal to
10954 the maximum value of the range. */
10956 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
10959 /* If index is in range, it must fit in Pmode.
10960 Convert to Pmode so we can index with it. */
10962 index
= convert_to_mode (Pmode
, index
, 1);
10964 /* Don't let a MEM slip thru, because then INDEX that comes
10965 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10966 and break_out_memory_refs will go to work on it and mess it up. */
10967 #ifdef PIC_CASE_VECTOR_ADDRESS
10968 if (flag_pic
&& GET_CODE (index
) != REG
)
10969 index
= copy_to_mode_reg (Pmode
, index
);
10972 /* If flag_force_addr were to affect this address
10973 it could interfere with the tricky assumptions made
10974 about addresses that contain label-refs,
10975 which may be valid only very near the tablejump itself. */
10976 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10977 GET_MODE_SIZE, because this indicates how large insns are. The other
10978 uses should all be Pmode, because they are addresses. This code
10979 could fail if addresses and insns are not the same size. */
10980 index
= gen_rtx_PLUS (Pmode
,
10981 gen_rtx_MULT (Pmode
, index
,
10982 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
10983 gen_rtx_LABEL_REF (Pmode
, table_label
));
10984 #ifdef PIC_CASE_VECTOR_ADDRESS
10986 index
= PIC_CASE_VECTOR_ADDRESS (index
);
10989 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
10990 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
10991 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
10992 RTX_UNCHANGING_P (vector
) = 1;
10993 convert_move (temp
, vector
, 0);
10995 emit_jump_insn (gen_tablejump (temp
, table_label
));
10997 /* If we are generating PIC code or if the table is PC-relative, the
10998 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10999 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
11004 try_tablejump (index_type
, index_expr
, minval
, range
,
11005 table_label
, default_label
)
11006 tree index_type
, index_expr
, minval
, range
;
11007 rtx table_label
, default_label
;
11011 if (! HAVE_tablejump
)
11014 index_expr
= fold (build (MINUS_EXPR
, index_type
,
11015 convert (index_type
, index_expr
),
11016 convert (index_type
, minval
)));
11017 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
11019 index
= protect_from_queue (index
, 0);
11020 do_pending_stack_adjust ();
11022 do_tablejump (index
, TYPE_MODE (index_type
),
11023 convert_modes (TYPE_MODE (index_type
),
11024 TYPE_MODE (TREE_TYPE (range
)),
11025 expand_expr (range
, NULL_RTX
,
11027 TREE_UNSIGNED (TREE_TYPE (range
))),
11028 table_label
, default_label
);
11032 /* Nonzero if the mode is a valid vector mode for this architecture.
11033 This returns nonzero even if there is no hardware support for the
11034 vector mode, but we can emulate with narrower modes. */
11037 vector_mode_valid_p (mode
)
11038 enum machine_mode mode
;
11040 enum mode_class
class = GET_MODE_CLASS (mode
);
11041 enum machine_mode innermode
;
11043 /* Doh! What's going on? */
11044 if (class != MODE_VECTOR_INT
11045 && class != MODE_VECTOR_FLOAT
)
11048 /* Hardware support. Woo hoo! */
11049 if (VECTOR_MODE_SUPPORTED_P (mode
))
11052 innermode
= GET_MODE_INNER (mode
);
11054 /* We should probably return 1 if requesting V4DI and we have no DI,
11055 but we have V2DI, but this is probably very unlikely. */
11057 /* If we have support for the inner mode, we can safely emulate it.
11058 We may not have V2DI, but me can emulate with a pair of DIs. */
11059 return mov_optab
->handlers
[innermode
].insn_code
!= CODE_FOR_nothing
;
11062 #include "gt-expr.h"