1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
71 #define STACK_PUSH_CODE PRE_INC
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
85 #define TARGET_MEM_FUNCTIONS 0
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 static tree placeholder_list
= 0;
100 /* This structure is used by move_by_pieces to describe the move to
102 struct move_by_pieces
111 int explicit_inc_from
;
112 unsigned HOST_WIDE_INT len
;
113 HOST_WIDE_INT offset
;
117 /* This structure is used by store_by_pieces to describe the clear to
120 struct store_by_pieces
126 unsigned HOST_WIDE_INT len
;
127 HOST_WIDE_INT offset
;
128 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
133 static rtx enqueue_insn
PARAMS ((rtx
, rtx
));
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
135 PARAMS ((unsigned HOST_WIDE_INT
,
137 static void move_by_pieces_1
PARAMS ((rtx (*) (rtx
, ...), enum machine_mode
,
138 struct move_by_pieces
*));
139 static bool block_move_libcall_safe_for_call_parm
PARAMS ((void));
140 static bool emit_block_move_via_movstr
PARAMS ((rtx
, rtx
, rtx
, unsigned));
141 static rtx emit_block_move_via_libcall
PARAMS ((rtx
, rtx
, rtx
));
142 static tree emit_block_move_libcall_fn
PARAMS ((int));
143 static void emit_block_move_via_loop
PARAMS ((rtx
, rtx
, rtx
, unsigned));
144 static rtx clear_by_pieces_1
PARAMS ((PTR
, HOST_WIDE_INT
,
146 static void clear_by_pieces
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
148 static void store_by_pieces_1
PARAMS ((struct store_by_pieces
*,
150 static void store_by_pieces_2
PARAMS ((rtx (*) (rtx
, ...),
152 struct store_by_pieces
*));
153 static bool clear_storage_via_clrstr
PARAMS ((rtx
, rtx
, unsigned));
154 static rtx clear_storage_via_libcall
PARAMS ((rtx
, rtx
));
155 static tree clear_storage_libcall_fn
PARAMS ((int));
156 static rtx compress_float_constant
PARAMS ((rtx
, rtx
));
157 static rtx get_subtarget
PARAMS ((rtx
));
158 static int is_zeros_p
PARAMS ((tree
));
159 static int mostly_zeros_p
PARAMS ((tree
));
160 static void store_constructor_field
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
161 HOST_WIDE_INT
, enum machine_mode
,
162 tree
, tree
, int, int));
163 static void store_constructor
PARAMS ((tree
, rtx
, int, HOST_WIDE_INT
));
164 static rtx store_field
PARAMS ((rtx
, HOST_WIDE_INT
,
165 HOST_WIDE_INT
, enum machine_mode
,
166 tree
, enum machine_mode
, int, tree
,
168 static rtx var_rtx
PARAMS ((tree
));
169 static HOST_WIDE_INT highest_pow2_factor
PARAMS ((tree
));
170 static HOST_WIDE_INT highest_pow2_factor_for_type
PARAMS ((tree
, tree
));
171 static int is_aligning_offset
PARAMS ((tree
, tree
));
172 static rtx expand_increment
PARAMS ((tree
, int, int));
173 static void do_jump_by_parts_greater
PARAMS ((tree
, int, rtx
, rtx
));
174 static void do_jump_by_parts_equality
PARAMS ((tree
, rtx
, rtx
));
175 static void do_compare_and_jump
PARAMS ((tree
, enum rtx_code
, enum rtx_code
,
177 static rtx do_store_flag
PARAMS ((tree
, rtx
, enum machine_mode
, int));
179 static void emit_single_push_insn
PARAMS ((enum machine_mode
, rtx
, tree
));
181 static void do_tablejump
PARAMS ((rtx
, enum machine_mode
, rtx
, rtx
, rtx
));
183 /* Record for each mode whether we can move a register directly to or
184 from an object of that mode in memory. If we can't, we won't try
185 to use that mode directly when accessing a field of that mode. */
187 static char direct_load
[NUM_MACHINE_MODES
];
188 static char direct_store
[NUM_MACHINE_MODES
];
190 /* Record for each mode whether we can float-extend from memory. */
192 static bool float_extend_from_mem
[NUM_MACHINE_MODES
][NUM_MACHINE_MODES
];
194 /* If a memory-to-memory move would take MOVE_RATIO or more simple
195 move-instruction sequences, we will do a movstr or libcall instead. */
198 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
201 /* If we are optimizing for space (-Os), cut down the default move ratio. */
202 #define MOVE_RATIO (optimize_size ? 3 : 15)
206 /* This macro is used to determine whether move_by_pieces should be called
207 to perform a structure copy. */
208 #ifndef MOVE_BY_PIECES_P
209 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
210 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
213 /* If a clear memory operation would take CLEAR_RATIO or more simple
214 move-instruction sequences, we will do a clrstr or libcall instead. */
217 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
218 #define CLEAR_RATIO 2
220 /* If we are optimizing for space, cut down the default clear ratio. */
221 #define CLEAR_RATIO (optimize_size ? 3 : 15)
225 /* This macro is used to determine whether clear_by_pieces should be
226 called to clear storage. */
227 #ifndef CLEAR_BY_PIECES_P
228 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
229 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
232 /* This macro is used to determine whether store_by_pieces should be
233 called to "memset" storage with byte values other than zero, or
234 to "memcpy" storage when the source is a constant string. */
235 #ifndef STORE_BY_PIECES_P
236 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
239 /* This array records the insn_code of insns to perform block moves. */
240 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
242 /* This array records the insn_code of insns to perform block clears. */
243 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
245 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
247 #ifndef SLOW_UNALIGNED_ACCESS
248 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
251 /* This is run once per compilation to set up which modes can be used
252 directly in memory and to initialize the block move optab. */
258 enum machine_mode mode
;
263 /* Try indexing by frame ptr and try by stack ptr.
264 It is known that on the Convex the stack ptr isn't a valid index.
265 With luck, one or the other is valid on any machine. */
266 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
267 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
269 /* A scratch register we can modify in-place below to avoid
270 useless RTL allocations. */
271 reg
= gen_rtx_REG (VOIDmode
, -1);
273 insn
= rtx_alloc (INSN
);
274 pat
= gen_rtx_SET (0, NULL_RTX
, NULL_RTX
);
275 PATTERN (insn
) = pat
;
277 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
278 mode
= (enum machine_mode
) ((int) mode
+ 1))
282 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
283 PUT_MODE (mem
, mode
);
284 PUT_MODE (mem1
, mode
);
285 PUT_MODE (reg
, mode
);
287 /* See if there is some register that can be used in this mode and
288 directly loaded or stored from memory. */
290 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
291 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
292 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
295 if (! HARD_REGNO_MODE_OK (regno
, mode
))
301 SET_DEST (pat
) = reg
;
302 if (recog (pat
, insn
, &num_clobbers
) >= 0)
303 direct_load
[(int) mode
] = 1;
305 SET_SRC (pat
) = mem1
;
306 SET_DEST (pat
) = reg
;
307 if (recog (pat
, insn
, &num_clobbers
) >= 0)
308 direct_load
[(int) mode
] = 1;
311 SET_DEST (pat
) = mem
;
312 if (recog (pat
, insn
, &num_clobbers
) >= 0)
313 direct_store
[(int) mode
] = 1;
316 SET_DEST (pat
) = mem1
;
317 if (recog (pat
, insn
, &num_clobbers
) >= 0)
318 direct_store
[(int) mode
] = 1;
322 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
324 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
325 mode
= GET_MODE_WIDER_MODE (mode
))
327 enum machine_mode srcmode
;
328 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
329 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
333 ic
= can_extend_p (mode
, srcmode
, 0);
334 if (ic
== CODE_FOR_nothing
)
337 PUT_MODE (mem
, srcmode
);
339 if ((*insn_data
[ic
].operand
[1].predicate
) (mem
, srcmode
))
340 float_extend_from_mem
[mode
][srcmode
] = true;
345 /* This is run at the start of compiling a function. */
350 cfun
->expr
= (struct expr_status
*) ggc_alloc (sizeof (struct expr_status
));
353 pending_stack_adjust
= 0;
354 stack_pointer_delta
= 0;
355 inhibit_defer_pop
= 0;
357 apply_args_value
= 0;
361 /* Small sanity check that the queue is empty at the end of a function. */
364 finish_expr_for_function ()
370 /* Manage the queue of increment instructions to be output
371 for POSTINCREMENT_EXPR expressions, etc. */
373 /* Queue up to increment (or change) VAR later. BODY says how:
374 BODY should be the same thing you would pass to emit_insn
375 to increment right away. It will go to emit_insn later on.
377 The value is a QUEUED expression to be used in place of VAR
378 where you want to guarantee the pre-incrementation value of VAR. */
381 enqueue_insn (var
, body
)
384 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
), var
, NULL_RTX
, NULL_RTX
,
385 body
, pending_chain
);
386 return pending_chain
;
389 /* Use protect_from_queue to convert a QUEUED expression
390 into something that you can put immediately into an instruction.
391 If the queued incrementation has not happened yet,
392 protect_from_queue returns the variable itself.
393 If the incrementation has happened, protect_from_queue returns a temp
394 that contains a copy of the old value of the variable.
396 Any time an rtx which might possibly be a QUEUED is to be put
397 into an instruction, it must be passed through protect_from_queue first.
398 QUEUED expressions are not meaningful in instructions.
400 Do not pass a value through protect_from_queue and then hold
401 on to it for a while before putting it in an instruction!
402 If the queue is flushed in between, incorrect code will result. */
405 protect_from_queue (x
, modify
)
409 RTX_CODE code
= GET_CODE (x
);
411 #if 0 /* A QUEUED can hang around after the queue is forced out. */
412 /* Shortcut for most common case. */
413 if (pending_chain
== 0)
419 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
420 use of autoincrement. Make a copy of the contents of the memory
421 location rather than a copy of the address, but not if the value is
422 of mode BLKmode. Don't modify X in place since it might be
424 if (code
== MEM
&& GET_MODE (x
) != BLKmode
425 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
428 rtx
new = replace_equiv_address_nv (x
, QUEUED_VAR (y
));
432 rtx temp
= gen_reg_rtx (GET_MODE (x
));
434 emit_insn_before (gen_move_insn (temp
, new),
439 /* Copy the address into a pseudo, so that the returned value
440 remains correct across calls to emit_queue. */
441 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
444 /* Otherwise, recursively protect the subexpressions of all
445 the kinds of rtx's that can contain a QUEUED. */
448 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
449 if (tem
!= XEXP (x
, 0))
455 else if (code
== PLUS
|| code
== MULT
)
457 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
458 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
459 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
468 /* If the increment has not happened, use the variable itself. Copy it
469 into a new pseudo so that the value remains correct across calls to
471 if (QUEUED_INSN (x
) == 0)
472 return copy_to_reg (QUEUED_VAR (x
));
473 /* If the increment has happened and a pre-increment copy exists,
475 if (QUEUED_COPY (x
) != 0)
476 return QUEUED_COPY (x
);
477 /* The increment has happened but we haven't set up a pre-increment copy.
478 Set one up now, and use it. */
479 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
480 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
482 return QUEUED_COPY (x
);
485 /* Return nonzero if X contains a QUEUED expression:
486 if it contains anything that will be altered by a queued increment.
487 We handle only combinations of MEM, PLUS, MINUS and MULT operators
488 since memory addresses generally contain only those. */
494 enum rtx_code code
= GET_CODE (x
);
500 return queued_subexp_p (XEXP (x
, 0));
504 return (queued_subexp_p (XEXP (x
, 0))
505 || queued_subexp_p (XEXP (x
, 1)));
511 /* Perform all the pending incrementations. */
517 while ((p
= pending_chain
))
519 rtx body
= QUEUED_BODY (p
);
521 switch (GET_CODE (body
))
529 QUEUED_INSN (p
) = body
;
533 #ifdef ENABLE_CHECKING
540 QUEUED_INSN (p
) = emit_insn (body
);
544 pending_chain
= QUEUED_NEXT (p
);
548 /* Copy data from FROM to TO, where the machine modes are not the same.
549 Both modes may be integer, or both may be floating.
550 UNSIGNEDP should be nonzero if FROM is an unsigned type.
551 This causes zero-extension instead of sign-extension. */
554 convert_move (to
, from
, unsignedp
)
558 enum machine_mode to_mode
= GET_MODE (to
);
559 enum machine_mode from_mode
= GET_MODE (from
);
560 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
561 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
565 /* rtx code for making an equivalent value. */
566 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
567 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
569 to
= protect_from_queue (to
, 1);
570 from
= protect_from_queue (from
, 0);
572 if (to_real
!= from_real
)
575 /* If FROM is a SUBREG that indicates that we have already done at least
576 the required extension, strip it. We don't handle such SUBREGs as
579 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
580 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
581 >= GET_MODE_SIZE (to_mode
))
582 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
583 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
585 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
588 if (to_mode
== from_mode
589 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
591 emit_move_insn (to
, from
);
595 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
597 if (GET_MODE_BITSIZE (from_mode
) != GET_MODE_BITSIZE (to_mode
))
600 if (VECTOR_MODE_P (to_mode
))
601 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
603 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
605 emit_move_insn (to
, from
);
609 if (to_real
!= from_real
)
616 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
))
618 /* Try converting directly if the insn is supported. */
619 if ((code
= can_extend_p (to_mode
, from_mode
, 0))
622 emit_unop_insn (code
, to
, from
, UNKNOWN
);
627 #ifdef HAVE_trunchfqf2
628 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
630 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
634 #ifdef HAVE_trunctqfqf2
635 if (HAVE_trunctqfqf2
&& from_mode
== TQFmode
&& to_mode
== QFmode
)
637 emit_unop_insn (CODE_FOR_trunctqfqf2
, to
, from
, UNKNOWN
);
641 #ifdef HAVE_truncsfqf2
642 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
644 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
648 #ifdef HAVE_truncdfqf2
649 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
651 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
655 #ifdef HAVE_truncxfqf2
656 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
658 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
662 #ifdef HAVE_trunctfqf2
663 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
665 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
670 #ifdef HAVE_trunctqfhf2
671 if (HAVE_trunctqfhf2
&& from_mode
== TQFmode
&& to_mode
== HFmode
)
673 emit_unop_insn (CODE_FOR_trunctqfhf2
, to
, from
, UNKNOWN
);
677 #ifdef HAVE_truncsfhf2
678 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
680 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
684 #ifdef HAVE_truncdfhf2
685 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
687 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
691 #ifdef HAVE_truncxfhf2
692 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
694 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
698 #ifdef HAVE_trunctfhf2
699 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
701 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
706 #ifdef HAVE_truncsftqf2
707 if (HAVE_truncsftqf2
&& from_mode
== SFmode
&& to_mode
== TQFmode
)
709 emit_unop_insn (CODE_FOR_truncsftqf2
, to
, from
, UNKNOWN
);
713 #ifdef HAVE_truncdftqf2
714 if (HAVE_truncdftqf2
&& from_mode
== DFmode
&& to_mode
== TQFmode
)
716 emit_unop_insn (CODE_FOR_truncdftqf2
, to
, from
, UNKNOWN
);
720 #ifdef HAVE_truncxftqf2
721 if (HAVE_truncxftqf2
&& from_mode
== XFmode
&& to_mode
== TQFmode
)
723 emit_unop_insn (CODE_FOR_truncxftqf2
, to
, from
, UNKNOWN
);
727 #ifdef HAVE_trunctftqf2
728 if (HAVE_trunctftqf2
&& from_mode
== TFmode
&& to_mode
== TQFmode
)
730 emit_unop_insn (CODE_FOR_trunctftqf2
, to
, from
, UNKNOWN
);
735 #ifdef HAVE_truncdfsf2
736 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
738 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
742 #ifdef HAVE_truncxfsf2
743 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
745 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
749 #ifdef HAVE_trunctfsf2
750 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
752 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
756 #ifdef HAVE_truncxfdf2
757 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
759 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
763 #ifdef HAVE_trunctfdf2
764 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
766 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
778 libcall
= extendsfdf2_libfunc
;
782 libcall
= extendsfxf2_libfunc
;
786 libcall
= extendsftf2_libfunc
;
798 libcall
= truncdfsf2_libfunc
;
802 libcall
= extenddfxf2_libfunc
;
806 libcall
= extenddftf2_libfunc
;
818 libcall
= truncxfsf2_libfunc
;
822 libcall
= truncxfdf2_libfunc
;
834 libcall
= trunctfsf2_libfunc
;
838 libcall
= trunctfdf2_libfunc
;
850 if (libcall
== (rtx
) 0)
851 /* This conversion is not implemented yet. */
855 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
857 insns
= get_insns ();
859 emit_libcall_block (insns
, to
, value
, gen_rtx_FLOAT_TRUNCATE (to_mode
,
864 /* Now both modes are integers. */
866 /* Handle expanding beyond a word. */
867 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
868 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
875 enum machine_mode lowpart_mode
;
876 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
878 /* Try converting directly if the insn is supported. */
879 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
882 /* If FROM is a SUBREG, put it into a register. Do this
883 so that we always generate the same set of insns for
884 better cse'ing; if an intermediate assignment occurred,
885 we won't be doing the operation directly on the SUBREG. */
886 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
887 from
= force_reg (from_mode
, from
);
888 emit_unop_insn (code
, to
, from
, equiv_code
);
891 /* Next, try converting via full word. */
892 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
893 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
894 != CODE_FOR_nothing
))
896 if (GET_CODE (to
) == REG
)
897 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
898 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
899 emit_unop_insn (code
, to
,
900 gen_lowpart (word_mode
, to
), equiv_code
);
904 /* No special multiword conversion insn; do it by hand. */
907 /* Since we will turn this into a no conflict block, we must ensure
908 that the source does not overlap the target. */
910 if (reg_overlap_mentioned_p (to
, from
))
911 from
= force_reg (from_mode
, from
);
913 /* Get a copy of FROM widened to a word, if necessary. */
914 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
915 lowpart_mode
= word_mode
;
917 lowpart_mode
= from_mode
;
919 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
921 lowpart
= gen_lowpart (lowpart_mode
, to
);
922 emit_move_insn (lowpart
, lowfrom
);
924 /* Compute the value to put in each remaining word. */
926 fill_value
= const0_rtx
;
931 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
932 && STORE_FLAG_VALUE
== -1)
934 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
936 fill_value
= gen_reg_rtx (word_mode
);
937 emit_insn (gen_slt (fill_value
));
943 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
944 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
946 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
950 /* Fill the remaining words. */
951 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
953 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
954 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
959 if (fill_value
!= subword
)
960 emit_move_insn (subword
, fill_value
);
963 insns
= get_insns ();
966 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
967 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
971 /* Truncating multi-word to a word or less. */
972 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
973 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
975 if (!((GET_CODE (from
) == MEM
976 && ! MEM_VOLATILE_P (from
)
977 && direct_load
[(int) to_mode
]
978 && ! mode_dependent_address_p (XEXP (from
, 0)))
979 || GET_CODE (from
) == REG
980 || GET_CODE (from
) == SUBREG
))
981 from
= force_reg (from_mode
, from
);
982 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
986 /* Handle pointer conversion. */ /* SPEE 900220. */
987 if (to_mode
== PQImode
)
989 if (from_mode
!= QImode
)
990 from
= convert_to_mode (QImode
, from
, unsignedp
);
992 #ifdef HAVE_truncqipqi2
993 if (HAVE_truncqipqi2
)
995 emit_unop_insn (CODE_FOR_truncqipqi2
, to
, from
, UNKNOWN
);
998 #endif /* HAVE_truncqipqi2 */
1002 if (from_mode
== PQImode
)
1004 if (to_mode
!= QImode
)
1006 from
= convert_to_mode (QImode
, from
, unsignedp
);
1011 #ifdef HAVE_extendpqiqi2
1012 if (HAVE_extendpqiqi2
)
1014 emit_unop_insn (CODE_FOR_extendpqiqi2
, to
, from
, UNKNOWN
);
1017 #endif /* HAVE_extendpqiqi2 */
1022 if (to_mode
== PSImode
)
1024 if (from_mode
!= SImode
)
1025 from
= convert_to_mode (SImode
, from
, unsignedp
);
1027 #ifdef HAVE_truncsipsi2
1028 if (HAVE_truncsipsi2
)
1030 emit_unop_insn (CODE_FOR_truncsipsi2
, to
, from
, UNKNOWN
);
1033 #endif /* HAVE_truncsipsi2 */
1037 if (from_mode
== PSImode
)
1039 if (to_mode
!= SImode
)
1041 from
= convert_to_mode (SImode
, from
, unsignedp
);
1046 #ifdef HAVE_extendpsisi2
1047 if (! unsignedp
&& HAVE_extendpsisi2
)
1049 emit_unop_insn (CODE_FOR_extendpsisi2
, to
, from
, UNKNOWN
);
1052 #endif /* HAVE_extendpsisi2 */
1053 #ifdef HAVE_zero_extendpsisi2
1054 if (unsignedp
&& HAVE_zero_extendpsisi2
)
1056 emit_unop_insn (CODE_FOR_zero_extendpsisi2
, to
, from
, UNKNOWN
);
1059 #endif /* HAVE_zero_extendpsisi2 */
1064 if (to_mode
== PDImode
)
1066 if (from_mode
!= DImode
)
1067 from
= convert_to_mode (DImode
, from
, unsignedp
);
1069 #ifdef HAVE_truncdipdi2
1070 if (HAVE_truncdipdi2
)
1072 emit_unop_insn (CODE_FOR_truncdipdi2
, to
, from
, UNKNOWN
);
1075 #endif /* HAVE_truncdipdi2 */
1079 if (from_mode
== PDImode
)
1081 if (to_mode
!= DImode
)
1083 from
= convert_to_mode (DImode
, from
, unsignedp
);
1088 #ifdef HAVE_extendpdidi2
1089 if (HAVE_extendpdidi2
)
1091 emit_unop_insn (CODE_FOR_extendpdidi2
, to
, from
, UNKNOWN
);
1094 #endif /* HAVE_extendpdidi2 */
1099 /* Now follow all the conversions between integers
1100 no more than a word long. */
1102 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1103 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
1104 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1105 GET_MODE_BITSIZE (from_mode
)))
1107 if (!((GET_CODE (from
) == MEM
1108 && ! MEM_VOLATILE_P (from
)
1109 && direct_load
[(int) to_mode
]
1110 && ! mode_dependent_address_p (XEXP (from
, 0)))
1111 || GET_CODE (from
) == REG
1112 || GET_CODE (from
) == SUBREG
))
1113 from
= force_reg (from_mode
, from
);
1114 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
1115 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
1116 from
= copy_to_reg (from
);
1117 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
1121 /* Handle extension. */
1122 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
1124 /* Convert directly if that works. */
1125 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
1126 != CODE_FOR_nothing
)
1129 from
= force_not_mem (from
);
1131 emit_unop_insn (code
, to
, from
, equiv_code
);
1136 enum machine_mode intermediate
;
1140 /* Search for a mode to convert via. */
1141 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
1142 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
1143 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
1144 != CODE_FOR_nothing
)
1145 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
1146 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1147 GET_MODE_BITSIZE (intermediate
))))
1148 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
1149 != CODE_FOR_nothing
))
1151 convert_move (to
, convert_to_mode (intermediate
, from
,
1152 unsignedp
), unsignedp
);
1156 /* No suitable intermediate mode.
1157 Generate what we need with shifts. */
1158 shift_amount
= build_int_2 (GET_MODE_BITSIZE (to_mode
)
1159 - GET_MODE_BITSIZE (from_mode
), 0);
1160 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
1161 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
1163 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
1166 emit_move_insn (to
, tmp
);
1171 /* Support special truncate insns for certain modes. */
1173 if (from_mode
== DImode
&& to_mode
== SImode
)
1175 #ifdef HAVE_truncdisi2
1176 if (HAVE_truncdisi2
)
1178 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1182 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1186 if (from_mode
== DImode
&& to_mode
== HImode
)
1188 #ifdef HAVE_truncdihi2
1189 if (HAVE_truncdihi2
)
1191 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1195 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1199 if (from_mode
== DImode
&& to_mode
== QImode
)
1201 #ifdef HAVE_truncdiqi2
1202 if (HAVE_truncdiqi2
)
1204 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1208 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1212 if (from_mode
== SImode
&& to_mode
== HImode
)
1214 #ifdef HAVE_truncsihi2
1215 if (HAVE_truncsihi2
)
1217 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1221 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1225 if (from_mode
== SImode
&& to_mode
== QImode
)
1227 #ifdef HAVE_truncsiqi2
1228 if (HAVE_truncsiqi2
)
1230 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1234 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1238 if (from_mode
== HImode
&& to_mode
== QImode
)
1240 #ifdef HAVE_trunchiqi2
1241 if (HAVE_trunchiqi2
)
1243 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1247 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1251 if (from_mode
== TImode
&& to_mode
== DImode
)
1253 #ifdef HAVE_trunctidi2
1254 if (HAVE_trunctidi2
)
1256 emit_unop_insn (CODE_FOR_trunctidi2
, to
, from
, UNKNOWN
);
1260 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1264 if (from_mode
== TImode
&& to_mode
== SImode
)
1266 #ifdef HAVE_trunctisi2
1267 if (HAVE_trunctisi2
)
1269 emit_unop_insn (CODE_FOR_trunctisi2
, to
, from
, UNKNOWN
);
1273 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1277 if (from_mode
== TImode
&& to_mode
== HImode
)
1279 #ifdef HAVE_trunctihi2
1280 if (HAVE_trunctihi2
)
1282 emit_unop_insn (CODE_FOR_trunctihi2
, to
, from
, UNKNOWN
);
1286 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1290 if (from_mode
== TImode
&& to_mode
== QImode
)
1292 #ifdef HAVE_trunctiqi2
1293 if (HAVE_trunctiqi2
)
1295 emit_unop_insn (CODE_FOR_trunctiqi2
, to
, from
, UNKNOWN
);
1299 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1303 /* Handle truncation of volatile memrefs, and so on;
1304 the things that couldn't be truncated directly,
1305 and for which there was no special instruction. */
1306 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1308 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1309 emit_move_insn (to
, temp
);
1313 /* Mode combination is not recognized. */
1317 /* Return an rtx for a value that would result
1318 from converting X to mode MODE.
1319 Both X and MODE may be floating, or both integer.
1320 UNSIGNEDP is nonzero if X is an unsigned value.
1321 This can be done by referring to a part of X in place
1322 or by copying to a new temporary with conversion.
1324 This function *must not* call protect_from_queue
1325 except when putting X into an insn (in which case convert_move does it). */
1328 convert_to_mode (mode
, x
, unsignedp
)
1329 enum machine_mode mode
;
1333 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
1336 /* Return an rtx for a value that would result
1337 from converting X from mode OLDMODE to mode MODE.
1338 Both modes may be floating, or both integer.
1339 UNSIGNEDP is nonzero if X is an unsigned value.
1341 This can be done by referring to a part of X in place
1342 or by copying to a new temporary with conversion.
1344 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1346 This function *must not* call protect_from_queue
1347 except when putting X into an insn (in which case convert_move does it). */
1350 convert_modes (mode
, oldmode
, x
, unsignedp
)
1351 enum machine_mode mode
, oldmode
;
1357 /* If FROM is a SUBREG that indicates that we have already done at least
1358 the required extension, strip it. */
1360 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1361 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1362 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1363 x
= gen_lowpart (mode
, x
);
1365 if (GET_MODE (x
) != VOIDmode
)
1366 oldmode
= GET_MODE (x
);
1368 if (mode
== oldmode
)
1371 /* There is one case that we must handle specially: If we are converting
1372 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1373 we are to interpret the constant as unsigned, gen_lowpart will do
1374 the wrong if the constant appears negative. What we want to do is
1375 make the high-order word of the constant zero, not all ones. */
1377 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1378 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1379 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1381 HOST_WIDE_INT val
= INTVAL (x
);
1383 if (oldmode
!= VOIDmode
1384 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
1386 int width
= GET_MODE_BITSIZE (oldmode
);
1388 /* We need to zero extend VAL. */
1389 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1392 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
1395 /* We can do this with a gen_lowpart if both desired and current modes
1396 are integer, and this is either a constant integer, a register, or a
1397 non-volatile MEM. Except for the constant case where MODE is no
1398 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1400 if ((GET_CODE (x
) == CONST_INT
1401 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1402 || (GET_MODE_CLASS (mode
) == MODE_INT
1403 && GET_MODE_CLASS (oldmode
) == MODE_INT
1404 && (GET_CODE (x
) == CONST_DOUBLE
1405 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
1406 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
1407 && direct_load
[(int) mode
])
1408 || (GET_CODE (x
) == REG
1409 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
1410 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
1412 /* ?? If we don't know OLDMODE, we have to assume here that
1413 X does not need sign- or zero-extension. This may not be
1414 the case, but it's the best we can do. */
1415 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1416 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1418 HOST_WIDE_INT val
= INTVAL (x
);
1419 int width
= GET_MODE_BITSIZE (oldmode
);
1421 /* We must sign or zero-extend in this case. Start by
1422 zero-extending, then sign extend if we need to. */
1423 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1425 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1426 val
|= (HOST_WIDE_INT
) (-1) << width
;
1428 return gen_int_mode (val
, mode
);
1431 return gen_lowpart (mode
, x
);
1434 temp
= gen_reg_rtx (mode
);
1435 convert_move (temp
, x
, unsignedp
);
1439 /* This macro is used to determine what the largest unit size that
1440 move_by_pieces can use is. */
1442 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1443 move efficiently, as opposed to MOVE_MAX which is the maximum
1444 number of bytes we can move with a single instruction. */
1446 #ifndef MOVE_MAX_PIECES
1447 #define MOVE_MAX_PIECES MOVE_MAX
1450 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1451 store efficiently. Due to internal GCC limitations, this is
1452 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1453 for an immediate constant. */
1455 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1457 /* Generate several move instructions to copy LEN bytes from block FROM to
1458 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1459 and TO through protect_from_queue before calling.
1461 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1462 used to push FROM to the stack.
1464 ALIGN is maximum alignment we can assume. */
1467 move_by_pieces (to
, from
, len
, align
)
1469 unsigned HOST_WIDE_INT len
;
1472 struct move_by_pieces data
;
1473 rtx to_addr
, from_addr
= XEXP (from
, 0);
1474 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
1475 enum machine_mode mode
= VOIDmode
, tmode
;
1476 enum insn_code icode
;
1479 data
.from_addr
= from_addr
;
1482 to_addr
= XEXP (to
, 0);
1485 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1486 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1488 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1495 #ifdef STACK_GROWS_DOWNWARD
1501 data
.to_addr
= to_addr
;
1504 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1505 || GET_CODE (from_addr
) == POST_INC
1506 || GET_CODE (from_addr
) == POST_DEC
);
1508 data
.explicit_inc_from
= 0;
1509 data
.explicit_inc_to
= 0;
1510 if (data
.reverse
) data
.offset
= len
;
1513 /* If copying requires more than two move insns,
1514 copy addresses to registers (to make displacements shorter)
1515 and use post-increment if available. */
1516 if (!(data
.autinc_from
&& data
.autinc_to
)
1517 && move_by_pieces_ninsns (len
, align
) > 2)
1519 /* Find the mode of the largest move... */
1520 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1521 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1522 if (GET_MODE_SIZE (tmode
) < max_size
)
1525 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
1527 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1528 data
.autinc_from
= 1;
1529 data
.explicit_inc_from
= -1;
1531 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
1533 data
.from_addr
= copy_addr_to_reg (from_addr
);
1534 data
.autinc_from
= 1;
1535 data
.explicit_inc_from
= 1;
1537 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1538 data
.from_addr
= copy_addr_to_reg (from_addr
);
1539 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
1541 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1543 data
.explicit_inc_to
= -1;
1545 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
1547 data
.to_addr
= copy_addr_to_reg (to_addr
);
1549 data
.explicit_inc_to
= 1;
1551 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1552 data
.to_addr
= copy_addr_to_reg (to_addr
);
1555 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1556 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1557 align
= MOVE_MAX
* BITS_PER_UNIT
;
1559 /* First move what we can in the largest integer mode, then go to
1560 successively smaller modes. */
1562 while (max_size
> 1)
1564 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1565 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1566 if (GET_MODE_SIZE (tmode
) < max_size
)
1569 if (mode
== VOIDmode
)
1572 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1573 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1574 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1576 max_size
= GET_MODE_SIZE (mode
);
1579 /* The code above should have handled everything. */
1584 /* Return number of insns required to move L bytes by pieces.
1585 ALIGN (in bits) is maximum alignment we can assume. */
1587 static unsigned HOST_WIDE_INT
1588 move_by_pieces_ninsns (l
, align
)
1589 unsigned HOST_WIDE_INT l
;
1592 unsigned HOST_WIDE_INT n_insns
= 0;
1593 unsigned HOST_WIDE_INT max_size
= MOVE_MAX
+ 1;
1595 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1596 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1597 align
= MOVE_MAX
* BITS_PER_UNIT
;
1599 while (max_size
> 1)
1601 enum machine_mode mode
= VOIDmode
, tmode
;
1602 enum insn_code icode
;
1604 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1605 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1606 if (GET_MODE_SIZE (tmode
) < max_size
)
1609 if (mode
== VOIDmode
)
1612 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1613 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1614 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1616 max_size
= GET_MODE_SIZE (mode
);
1624 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1625 with move instructions for mode MODE. GENFUN is the gen_... function
1626 to make a move insn for that mode. DATA has all the other info. */
1629 move_by_pieces_1 (genfun
, mode
, data
)
1630 rtx (*genfun
) PARAMS ((rtx
, ...));
1631 enum machine_mode mode
;
1632 struct move_by_pieces
*data
;
1634 unsigned int size
= GET_MODE_SIZE (mode
);
1635 rtx to1
= NULL_RTX
, from1
;
1637 while (data
->len
>= size
)
1640 data
->offset
-= size
;
1644 if (data
->autinc_to
)
1645 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1648 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1651 if (data
->autinc_from
)
1652 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1655 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1657 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1658 emit_insn (gen_add2_insn (data
->to_addr
,
1659 GEN_INT (-(HOST_WIDE_INT
)size
)));
1660 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1661 emit_insn (gen_add2_insn (data
->from_addr
,
1662 GEN_INT (-(HOST_WIDE_INT
)size
)));
1665 emit_insn ((*genfun
) (to1
, from1
));
1668 #ifdef PUSH_ROUNDING
1669 emit_single_push_insn (mode
, from1
, NULL
);
1675 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1676 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1677 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1678 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1680 if (! data
->reverse
)
1681 data
->offset
+= size
;
1687 /* Emit code to move a block Y to a block X. This may be done with
1688 string-move instructions, with multiple scalar move instructions,
1689 or with a library call.
1691 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1692 SIZE is an rtx that says how long they are.
1693 ALIGN is the maximum alignment we can assume they have.
1694 METHOD describes what kind of copy this is, and what mechanisms may be used.
1696 Return the address of the new block, if memcpy is called and returns it,
1700 emit_block_move (x
, y
, size
, method
)
1702 enum block_op_methods method
;
1710 case BLOCK_OP_NORMAL
:
1711 may_use_call
= true;
1714 case BLOCK_OP_CALL_PARM
:
1715 may_use_call
= block_move_libcall_safe_for_call_parm ();
1717 /* Make inhibit_defer_pop nonzero around the library call
1718 to force it to pop the arguments right away. */
1722 case BLOCK_OP_NO_LIBCALL
:
1723 may_use_call
= false;
1730 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1732 if (GET_MODE (x
) != BLKmode
)
1734 if (GET_MODE (y
) != BLKmode
)
1737 x
= protect_from_queue (x
, 1);
1738 y
= protect_from_queue (y
, 0);
1739 size
= protect_from_queue (size
, 0);
1741 if (GET_CODE (x
) != MEM
)
1743 if (GET_CODE (y
) != MEM
)
1748 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1749 can be incorrect is coming from __builtin_memcpy. */
1750 if (GET_CODE (size
) == CONST_INT
)
1752 x
= shallow_copy_rtx (x
);
1753 y
= shallow_copy_rtx (y
);
1754 set_mem_size (x
, size
);
1755 set_mem_size (y
, size
);
1758 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1759 move_by_pieces (x
, y
, INTVAL (size
), align
);
1760 else if (emit_block_move_via_movstr (x
, y
, size
, align
))
1762 else if (may_use_call
)
1763 retval
= emit_block_move_via_libcall (x
, y
, size
);
1765 emit_block_move_via_loop (x
, y
, size
, align
);
1767 if (method
== BLOCK_OP_CALL_PARM
)
1773 /* A subroutine of emit_block_move. Returns true if calling the
1774 block move libcall will not clobber any parameters which may have
1775 already been placed on the stack. */
1778 block_move_libcall_safe_for_call_parm ()
1784 /* Check to see whether memcpy takes all register arguments. */
1786 takes_regs_uninit
, takes_regs_no
, takes_regs_yes
1787 } takes_regs
= takes_regs_uninit
;
1791 case takes_regs_uninit
:
1793 CUMULATIVE_ARGS args_so_far
;
1796 fn
= emit_block_move_libcall_fn (false);
1797 INIT_CUMULATIVE_ARGS (args_so_far
, TREE_TYPE (fn
), NULL_RTX
, 0);
1799 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1800 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1802 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1803 rtx tmp
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
1804 if (!tmp
|| !REG_P (tmp
))
1805 goto fail_takes_regs
;
1806 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1807 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, mode
,
1809 goto fail_takes_regs
;
1811 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, NULL_TREE
, 1);
1814 takes_regs
= takes_regs_yes
;
1817 case takes_regs_yes
:
1821 takes_regs
= takes_regs_no
;
1832 /* A subroutine of emit_block_move. Expand a movstr pattern;
1833 return true if successful. */
1836 emit_block_move_via_movstr (x
, y
, size
, align
)
1840 /* Try the most limited insn first, because there's no point
1841 including more than one in the machine description unless
1842 the more limited one has some advantage. */
1844 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1845 enum machine_mode mode
;
1847 /* Since this is a move insn, we don't care about volatility. */
1850 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1851 mode
= GET_MODE_WIDER_MODE (mode
))
1853 enum insn_code code
= movstr_optab
[(int) mode
];
1854 insn_operand_predicate_fn pred
;
1856 if (code
!= CODE_FOR_nothing
1857 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1858 here because if SIZE is less than the mode mask, as it is
1859 returned by the macro, it will definitely be less than the
1860 actual mode mask. */
1861 && ((GET_CODE (size
) == CONST_INT
1862 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1863 <= (GET_MODE_MASK (mode
) >> 1)))
1864 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1865 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1866 || (*pred
) (x
, BLKmode
))
1867 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1868 || (*pred
) (y
, BLKmode
))
1869 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1870 || (*pred
) (opalign
, VOIDmode
)))
1873 rtx last
= get_last_insn ();
1876 op2
= convert_to_mode (mode
, size
, 1);
1877 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1878 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1879 op2
= copy_to_mode_reg (mode
, op2
);
1881 /* ??? When called via emit_block_move_for_call, it'd be
1882 nice if there were some way to inform the backend, so
1883 that it doesn't fail the expansion because it thinks
1884 emitting the libcall would be more efficient. */
1886 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1894 delete_insns_since (last
);
1902 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1903 Return the return value from memcpy, 0 otherwise. */
1906 emit_block_move_via_libcall (dst
, src
, size
)
1909 tree call_expr
, arg_list
, fn
, src_tree
, dst_tree
, size_tree
;
1910 enum machine_mode size_mode
;
1913 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1915 It is unsafe to save the value generated by protect_from_queue
1916 and reuse it later. Consider what happens if emit_queue is
1917 called before the return value from protect_from_queue is used.
1919 Expansion of the CALL_EXPR below will call emit_queue before
1920 we are finished emitting RTL for argument setup. So if we are
1921 not careful we could get the wrong value for an argument.
1923 To avoid this problem we go ahead and emit code to copy X, Y &
1924 SIZE into new pseudos. We can then place those new pseudos
1925 into an RTL_EXPR and use them later, even after a call to
1928 Note this is not strictly needed for library calls since they
1929 do not call emit_queue before loading their arguments. However,
1930 we may need to have library calls call emit_queue in the future
1931 since failing to do so could cause problems for targets which
1932 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1934 dst
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
1935 src
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
1937 if (TARGET_MEM_FUNCTIONS
)
1938 size_mode
= TYPE_MODE (sizetype
);
1940 size_mode
= TYPE_MODE (unsigned_type_node
);
1941 size
= convert_to_mode (size_mode
, size
, 1);
1942 size
= copy_to_mode_reg (size_mode
, size
);
1944 /* It is incorrect to use the libcall calling conventions to call
1945 memcpy in this context. This could be a user call to memcpy and
1946 the user may wish to examine the return value from memcpy. For
1947 targets where libcalls and normal calls have different conventions
1948 for returning pointers, we could end up generating incorrect code.
1950 For convenience, we generate the call to bcopy this way as well. */
1952 dst_tree
= make_tree (ptr_type_node
, dst
);
1953 src_tree
= make_tree (ptr_type_node
, src
);
1954 if (TARGET_MEM_FUNCTIONS
)
1955 size_tree
= make_tree (sizetype
, size
);
1957 size_tree
= make_tree (unsigned_type_node
, size
);
1959 fn
= emit_block_move_libcall_fn (true);
1960 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
1961 if (TARGET_MEM_FUNCTIONS
)
1963 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1964 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1968 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1969 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1972 /* Now we have to build up the CALL_EXPR itself. */
1973 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1974 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1975 call_expr
, arg_list
, NULL_TREE
);
1976 TREE_SIDE_EFFECTS (call_expr
) = 1;
1978 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
1980 /* If we are initializing a readonly value, show the above call
1981 clobbered it. Otherwise, a load from it may erroneously be
1982 hoisted from a loop. */
1983 if (RTX_UNCHANGING_P (dst
))
1984 emit_insn (gen_rtx_CLOBBER (VOIDmode
, dst
));
1986 return (TARGET_MEM_FUNCTIONS
? retval
: NULL_RTX
);
1989 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1990 for the function we use for block copies. The first time FOR_CALL
1991 is true, we call assemble_external. */
1993 static GTY(()) tree block_move_fn
;
1996 emit_block_move_libcall_fn (for_call
)
1999 static bool emitted_extern
;
2000 tree fn
= block_move_fn
, args
;
2004 if (TARGET_MEM_FUNCTIONS
)
2006 fn
= get_identifier ("memcpy");
2007 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2008 const_ptr_type_node
, sizetype
,
2013 fn
= get_identifier ("bcopy");
2014 args
= build_function_type_list (void_type_node
, const_ptr_type_node
,
2015 ptr_type_node
, unsigned_type_node
,
2019 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
2020 DECL_EXTERNAL (fn
) = 1;
2021 TREE_PUBLIC (fn
) = 1;
2022 DECL_ARTIFICIAL (fn
) = 1;
2023 TREE_NOTHROW (fn
) = 1;
2028 if (for_call
&& !emitted_extern
)
2030 emitted_extern
= true;
2031 make_decl_rtl (fn
, NULL
);
2032 assemble_external (fn
);
2038 /* A subroutine of emit_block_move. Copy the data via an explicit
2039 loop. This is used only when libcalls are forbidden. */
2040 /* ??? It'd be nice to copy in hunks larger than QImode. */
2043 emit_block_move_via_loop (x
, y
, size
, align
)
2045 unsigned int align ATTRIBUTE_UNUSED
;
2047 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
2048 enum machine_mode iter_mode
;
2050 iter_mode
= GET_MODE (size
);
2051 if (iter_mode
== VOIDmode
)
2052 iter_mode
= word_mode
;
2054 top_label
= gen_label_rtx ();
2055 cmp_label
= gen_label_rtx ();
2056 iter
= gen_reg_rtx (iter_mode
);
2058 emit_move_insn (iter
, const0_rtx
);
2060 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
2061 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
2062 do_pending_stack_adjust ();
2064 emit_note (NULL
, NOTE_INSN_LOOP_BEG
);
2066 emit_jump (cmp_label
);
2067 emit_label (top_label
);
2069 tmp
= convert_modes (Pmode
, iter_mode
, iter
, true);
2070 x_addr
= gen_rtx_PLUS (Pmode
, x_addr
, tmp
);
2071 y_addr
= gen_rtx_PLUS (Pmode
, y_addr
, tmp
);
2072 x
= change_address (x
, QImode
, x_addr
);
2073 y
= change_address (y
, QImode
, y_addr
);
2075 emit_move_insn (x
, y
);
2077 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
2078 true, OPTAB_LIB_WIDEN
);
2080 emit_move_insn (iter
, tmp
);
2082 emit_note (NULL
, NOTE_INSN_LOOP_CONT
);
2083 emit_label (cmp_label
);
2085 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
2088 emit_note (NULL
, NOTE_INSN_LOOP_END
);
2091 /* Copy all or part of a value X into registers starting at REGNO.
2092 The number of registers to be filled is NREGS. */
2095 move_block_to_reg (regno
, x
, nregs
, mode
)
2099 enum machine_mode mode
;
2102 #ifdef HAVE_load_multiple
2110 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
2111 x
= validize_mem (force_const_mem (mode
, x
));
2113 /* See if the machine can do this with a load multiple insn. */
2114 #ifdef HAVE_load_multiple
2115 if (HAVE_load_multiple
)
2117 last
= get_last_insn ();
2118 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
2126 delete_insns_since (last
);
2130 for (i
= 0; i
< nregs
; i
++)
2131 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
2132 operand_subword_force (x
, i
, mode
));
2135 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2136 The number of registers to be filled is NREGS. SIZE indicates the number
2137 of bytes in the object X. */
2140 move_block_from_reg (regno
, x
, nregs
, size
)
2147 #ifdef HAVE_store_multiple
2151 enum machine_mode mode
;
2156 /* If SIZE is that of a mode no bigger than a word, just use that
2157 mode's store operation. */
2158 if (size
<= UNITS_PER_WORD
2159 && (mode
= mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0)) != BLKmode
)
2161 emit_move_insn (adjust_address (x
, mode
, 0), gen_rtx_REG (mode
, regno
));
2165 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
2166 to the left before storing to memory. Note that the previous test
2167 doesn't handle all cases (e.g. SIZE == 3). */
2168 if (size
< UNITS_PER_WORD
&& BYTES_BIG_ENDIAN
)
2170 rtx tem
= operand_subword (x
, 0, 1, BLKmode
);
2176 shift
= expand_shift (LSHIFT_EXPR
, word_mode
,
2177 gen_rtx_REG (word_mode
, regno
),
2178 build_int_2 ((UNITS_PER_WORD
- size
)
2179 * BITS_PER_UNIT
, 0), NULL_RTX
, 0);
2180 emit_move_insn (tem
, shift
);
2184 /* See if the machine can do this with a store multiple insn. */
2185 #ifdef HAVE_store_multiple
2186 if (HAVE_store_multiple
)
2188 last
= get_last_insn ();
2189 pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
2197 delete_insns_since (last
);
2201 for (i
= 0; i
< nregs
; i
++)
2203 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
2208 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
2212 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2213 ORIG, where ORIG is a non-consecutive group of registers represented by
2214 a PARALLEL. The clone is identical to the original except in that the
2215 original set of registers is replaced by a new set of pseudo registers.
2216 The new set has the same modes as the original set. */
2219 gen_group_rtx (orig
)
2225 if (GET_CODE (orig
) != PARALLEL
)
2228 length
= XVECLEN (orig
, 0);
2229 tmps
= (rtx
*) alloca (sizeof (rtx
) * length
);
2231 /* Skip a NULL entry in first slot. */
2232 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
2237 for (; i
< length
; i
++)
2239 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
2240 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
2242 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
2245 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
2248 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2249 registers represented by a PARALLEL. SSIZE represents the total size of
2250 block SRC in bytes, or -1 if not known. */
2251 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2252 the balance will be in what would be the low-order memory addresses, i.e.
2253 left justified for big endian, right justified for little endian. This
2254 happens to be true for the targets currently using this support. If this
2255 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2259 emit_group_load (dst
, orig_src
, ssize
)
2266 if (GET_CODE (dst
) != PARALLEL
)
2269 /* Check for a NULL entry, used to indicate that the parameter goes
2270 both on the stack and in registers. */
2271 if (XEXP (XVECEXP (dst
, 0, 0), 0))
2276 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
2278 /* Process the pieces. */
2279 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2281 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
2282 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
2283 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2286 /* Handle trailing fragments that run over the size of the struct. */
2287 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2289 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2290 bytelen
= ssize
- bytepos
;
2295 /* If we won't be loading directly from memory, protect the real source
2296 from strange tricks we might play; but make sure that the source can
2297 be loaded directly into the destination. */
2299 if (GET_CODE (orig_src
) != MEM
2300 && (!CONSTANT_P (orig_src
)
2301 || (GET_MODE (orig_src
) != mode
2302 && GET_MODE (orig_src
) != VOIDmode
)))
2304 if (GET_MODE (orig_src
) == VOIDmode
)
2305 src
= gen_reg_rtx (mode
);
2307 src
= gen_reg_rtx (GET_MODE (orig_src
));
2309 emit_move_insn (src
, orig_src
);
2312 /* Optimize the access just a bit. */
2313 if (GET_CODE (src
) == MEM
2314 && MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
)
2315 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2316 && bytelen
== GET_MODE_SIZE (mode
))
2318 tmps
[i
] = gen_reg_rtx (mode
);
2319 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
2321 else if (GET_CODE (src
) == CONCAT
)
2323 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
2324 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
2326 if ((bytepos
== 0 && bytelen
== slen0
)
2327 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
2329 /* The following assumes that the concatenated objects all
2330 have the same size. In this case, a simple calculation
2331 can be used to determine the object and the bit field
2333 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
2334 if (! CONSTANT_P (tmps
[i
])
2335 && (GET_CODE (tmps
[i
]) != REG
|| GET_MODE (tmps
[i
]) != mode
))
2336 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
2337 (bytepos
% slen0
) * BITS_PER_UNIT
,
2338 1, NULL_RTX
, mode
, mode
, ssize
);
2340 else if (bytepos
== 0)
2342 rtx mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
2343 emit_move_insn (mem
, src
);
2344 tmps
[i
] = adjust_address (mem
, mode
, 0);
2349 else if (CONSTANT_P (src
)
2350 || (GET_CODE (src
) == REG
&& GET_MODE (src
) == mode
))
2353 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
2354 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
2357 if (BYTES_BIG_ENDIAN
&& shift
)
2358 expand_binop (mode
, ashl_optab
, tmps
[i
], GEN_INT (shift
),
2359 tmps
[i
], 0, OPTAB_WIDEN
);
2364 /* Copy the extracted pieces into the proper (probable) hard regs. */
2365 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2366 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
2369 /* Emit code to move a block SRC to block DST, where SRC and DST are
2370 non-consecutive groups of registers, each represented by a PARALLEL. */
2373 emit_group_move (dst
, src
)
2378 if (GET_CODE (src
) != PARALLEL
2379 || GET_CODE (dst
) != PARALLEL
2380 || XVECLEN (src
, 0) != XVECLEN (dst
, 0))
2383 /* Skip first entry if NULL. */
2384 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
2385 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
2386 XEXP (XVECEXP (src
, 0, i
), 0));
2389 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2390 registers represented by a PARALLEL. SSIZE represents the total size of
2391 block DST, or -1 if not known. */
2394 emit_group_store (orig_dst
, src
, ssize
)
2401 if (GET_CODE (src
) != PARALLEL
)
2404 /* Check for a NULL entry, used to indicate that the parameter goes
2405 both on the stack and in registers. */
2406 if (XEXP (XVECEXP (src
, 0, 0), 0))
2411 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (src
, 0));
2413 /* Copy the (probable) hard regs into pseudos. */
2414 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2416 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2417 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2418 emit_move_insn (tmps
[i
], reg
);
2422 /* If we won't be storing directly into memory, protect the real destination
2423 from strange tricks we might play. */
2425 if (GET_CODE (dst
) == PARALLEL
)
2429 /* We can get a PARALLEL dst if there is a conditional expression in
2430 a return statement. In that case, the dst and src are the same,
2431 so no action is necessary. */
2432 if (rtx_equal_p (dst
, src
))
2435 /* It is unclear if we can ever reach here, but we may as well handle
2436 it. Allocate a temporary, and split this into a store/load to/from
2439 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
2440 emit_group_store (temp
, src
, ssize
);
2441 emit_group_load (dst
, temp
, ssize
);
2444 else if (GET_CODE (dst
) != MEM
&& GET_CODE (dst
) != CONCAT
)
2446 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
2447 /* Make life a bit easier for combine. */
2448 emit_move_insn (dst
, CONST0_RTX (GET_MODE (orig_dst
)));
2451 /* Process the pieces. */
2452 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2454 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2455 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2456 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2459 /* Handle trailing fragments that run over the size of the struct. */
2460 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2462 if (BYTES_BIG_ENDIAN
)
2464 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2465 expand_binop (mode
, ashr_optab
, tmps
[i
], GEN_INT (shift
),
2466 tmps
[i
], 0, OPTAB_WIDEN
);
2468 bytelen
= ssize
- bytepos
;
2471 if (GET_CODE (dst
) == CONCAT
)
2473 if (bytepos
+ bytelen
<= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2474 dest
= XEXP (dst
, 0);
2475 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2477 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2478 dest
= XEXP (dst
, 1);
2484 /* Optimize the access just a bit. */
2485 if (GET_CODE (dest
) == MEM
2486 && MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
)
2487 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2488 && bytelen
== GET_MODE_SIZE (mode
))
2489 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2491 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2492 mode
, tmps
[i
], ssize
);
2497 /* Copy from the pseudo into the (probable) hard reg. */
2498 if (GET_CODE (dst
) == REG
)
2499 emit_move_insn (orig_dst
, dst
);
2502 /* Generate code to copy a BLKmode object of TYPE out of a
2503 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2504 is null, a stack temporary is created. TGTBLK is returned.
2506 The primary purpose of this routine is to handle functions
2507 that return BLKmode structures in registers. Some machines
2508 (the PA for example) want to return all small structures
2509 in registers regardless of the structure's alignment. */
2512 copy_blkmode_from_reg (tgtblk
, srcreg
, type
)
2517 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2518 rtx src
= NULL
, dst
= NULL
;
2519 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2520 unsigned HOST_WIDE_INT bitpos
, xbitpos
, big_endian_correction
= 0;
2524 tgtblk
= assign_temp (build_qualified_type (type
,
2526 | TYPE_QUAL_CONST
)),
2528 preserve_temp_slots (tgtblk
);
2531 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2532 into a new pseudo which is a full word. */
2534 if (GET_MODE (srcreg
) != BLKmode
2535 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2536 srcreg
= convert_to_mode (word_mode
, srcreg
, TREE_UNSIGNED (type
));
2538 /* Structures whose size is not a multiple of a word are aligned
2539 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2540 machine, this means we must skip the empty high order bytes when
2541 calculating the bit offset. */
2542 if (BYTES_BIG_ENDIAN
2543 && bytes
% UNITS_PER_WORD
)
2544 big_endian_correction
2545 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2547 /* Copy the structure BITSIZE bites at a time.
2549 We could probably emit more efficient code for machines which do not use
2550 strict alignment, but it doesn't seem worth the effort at the current
2552 for (bitpos
= 0, xbitpos
= big_endian_correction
;
2553 bitpos
< bytes
* BITS_PER_UNIT
;
2554 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2556 /* We need a new source operand each time xbitpos is on a
2557 word boundary and when xbitpos == big_endian_correction
2558 (the first time through). */
2559 if (xbitpos
% BITS_PER_WORD
== 0
2560 || xbitpos
== big_endian_correction
)
2561 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2564 /* We need a new destination operand each time bitpos is on
2566 if (bitpos
% BITS_PER_WORD
== 0)
2567 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2569 /* Use xbitpos for the source extraction (right justified) and
2570 xbitpos for the destination store (left justified). */
2571 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2572 extract_bit_field (src
, bitsize
,
2573 xbitpos
% BITS_PER_WORD
, 1,
2574 NULL_RTX
, word_mode
, word_mode
,
2582 /* Add a USE expression for REG to the (possibly empty) list pointed
2583 to by CALL_FUSAGE. REG must denote a hard register. */
2586 use_reg (call_fusage
, reg
)
2587 rtx
*call_fusage
, reg
;
2589 if (GET_CODE (reg
) != REG
2590 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2594 = gen_rtx_EXPR_LIST (VOIDmode
,
2595 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2598 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2599 starting at REGNO. All of these registers must be hard registers. */
2602 use_regs (call_fusage
, regno
, nregs
)
2609 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
2612 for (i
= 0; i
< nregs
; i
++)
2613 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2616 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2617 PARALLEL REGS. This is for calls that pass values in multiple
2618 non-contiguous locations. The Irix 6 ABI has examples of this. */
2621 use_group_regs (call_fusage
, regs
)
2627 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2629 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2631 /* A NULL entry means the parameter goes both on the stack and in
2632 registers. This can also be a MEM for targets that pass values
2633 partially on the stack and partially in registers. */
2634 if (reg
!= 0 && GET_CODE (reg
) == REG
)
2635 use_reg (call_fusage
, reg
);
2640 /* Determine whether the LEN bytes generated by CONSTFUN can be
2641 stored to memory using several move instructions. CONSTFUNDATA is
2642 a pointer which will be passed as argument in every CONSTFUN call.
2643 ALIGN is maximum alignment we can assume. Return nonzero if a
2644 call to store_by_pieces should succeed. */
2647 can_store_by_pieces (len
, constfun
, constfundata
, align
)
2648 unsigned HOST_WIDE_INT len
;
2649 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
2653 unsigned HOST_WIDE_INT max_size
, l
;
2654 HOST_WIDE_INT offset
= 0;
2655 enum machine_mode mode
, tmode
;
2656 enum insn_code icode
;
2660 if (! STORE_BY_PIECES_P (len
, align
))
2663 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2664 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2665 align
= MOVE_MAX
* BITS_PER_UNIT
;
2667 /* We would first store what we can in the largest integer mode, then go to
2668 successively smaller modes. */
2671 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2676 max_size
= STORE_MAX_PIECES
+ 1;
2677 while (max_size
> 1)
2679 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2680 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2681 if (GET_MODE_SIZE (tmode
) < max_size
)
2684 if (mode
== VOIDmode
)
2687 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2688 if (icode
!= CODE_FOR_nothing
2689 && align
>= GET_MODE_ALIGNMENT (mode
))
2691 unsigned int size
= GET_MODE_SIZE (mode
);
2698 cst
= (*constfun
) (constfundata
, offset
, mode
);
2699 if (!LEGITIMATE_CONSTANT_P (cst
))
2709 max_size
= GET_MODE_SIZE (mode
);
2712 /* The code above should have handled everything. */
2720 /* Generate several move instructions to store LEN bytes generated by
2721 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2722 pointer which will be passed as argument in every CONSTFUN call.
2723 ALIGN is maximum alignment we can assume. */
2726 store_by_pieces (to
, len
, constfun
, constfundata
, align
)
2728 unsigned HOST_WIDE_INT len
;
2729 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
2733 struct store_by_pieces data
;
2735 if (! STORE_BY_PIECES_P (len
, align
))
2737 to
= protect_from_queue (to
, 1);
2738 data
.constfun
= constfun
;
2739 data
.constfundata
= constfundata
;
2742 store_by_pieces_1 (&data
, align
);
2745 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2746 rtx with BLKmode). The caller must pass TO through protect_from_queue
2747 before calling. ALIGN is maximum alignment we can assume. */
2750 clear_by_pieces (to
, len
, align
)
2752 unsigned HOST_WIDE_INT len
;
2755 struct store_by_pieces data
;
2757 data
.constfun
= clear_by_pieces_1
;
2758 data
.constfundata
= NULL
;
2761 store_by_pieces_1 (&data
, align
);
2764 /* Callback routine for clear_by_pieces.
2765 Return const0_rtx unconditionally. */
2768 clear_by_pieces_1 (data
, offset
, mode
)
2769 PTR data ATTRIBUTE_UNUSED
;
2770 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
;
2771 enum machine_mode mode ATTRIBUTE_UNUSED
;
2776 /* Subroutine of clear_by_pieces and store_by_pieces.
2777 Generate several move instructions to store LEN bytes of block TO. (A MEM
2778 rtx with BLKmode). The caller must pass TO through protect_from_queue
2779 before calling. ALIGN is maximum alignment we can assume. */
2782 store_by_pieces_1 (data
, align
)
2783 struct store_by_pieces
*data
;
2786 rtx to_addr
= XEXP (data
->to
, 0);
2787 unsigned HOST_WIDE_INT max_size
= STORE_MAX_PIECES
+ 1;
2788 enum machine_mode mode
= VOIDmode
, tmode
;
2789 enum insn_code icode
;
2792 data
->to_addr
= to_addr
;
2794 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2795 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2797 data
->explicit_inc_to
= 0;
2799 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2801 data
->offset
= data
->len
;
2803 /* If storing requires more than two move insns,
2804 copy addresses to registers (to make displacements shorter)
2805 and use post-increment if available. */
2806 if (!data
->autinc_to
2807 && move_by_pieces_ninsns (data
->len
, align
) > 2)
2809 /* Determine the main mode we'll be using. */
2810 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2811 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2812 if (GET_MODE_SIZE (tmode
) < max_size
)
2815 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2817 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2818 data
->autinc_to
= 1;
2819 data
->explicit_inc_to
= -1;
2822 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2823 && ! data
->autinc_to
)
2825 data
->to_addr
= copy_addr_to_reg (to_addr
);
2826 data
->autinc_to
= 1;
2827 data
->explicit_inc_to
= 1;
2830 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2831 data
->to_addr
= copy_addr_to_reg (to_addr
);
2834 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2835 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2836 align
= MOVE_MAX
* BITS_PER_UNIT
;
2838 /* First store what we can in the largest integer mode, then go to
2839 successively smaller modes. */
2841 while (max_size
> 1)
2843 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2844 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2845 if (GET_MODE_SIZE (tmode
) < max_size
)
2848 if (mode
== VOIDmode
)
2851 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2852 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2853 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2855 max_size
= GET_MODE_SIZE (mode
);
2858 /* The code above should have handled everything. */
2863 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2864 with move instructions for mode MODE. GENFUN is the gen_... function
2865 to make a move insn for that mode. DATA has all the other info. */
2868 store_by_pieces_2 (genfun
, mode
, data
)
2869 rtx (*genfun
) PARAMS ((rtx
, ...));
2870 enum machine_mode mode
;
2871 struct store_by_pieces
*data
;
2873 unsigned int size
= GET_MODE_SIZE (mode
);
2876 while (data
->len
>= size
)
2879 data
->offset
-= size
;
2881 if (data
->autinc_to
)
2882 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2885 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2887 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2888 emit_insn (gen_add2_insn (data
->to_addr
,
2889 GEN_INT (-(HOST_WIDE_INT
) size
)));
2891 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2892 emit_insn ((*genfun
) (to1
, cst
));
2894 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2895 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2897 if (! data
->reverse
)
2898 data
->offset
+= size
;
2904 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2905 its length in bytes. */
2908 clear_storage (object
, size
)
2913 unsigned int align
= (GET_CODE (object
) == MEM
? MEM_ALIGN (object
)
2914 : GET_MODE_ALIGNMENT (GET_MODE (object
)));
2916 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2917 just move a zero. Otherwise, do this a piece at a time. */
2918 if (GET_MODE (object
) != BLKmode
2919 && GET_CODE (size
) == CONST_INT
2920 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (object
)))
2921 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2924 object
= protect_from_queue (object
, 1);
2925 size
= protect_from_queue (size
, 0);
2927 if (GET_CODE (size
) == CONST_INT
2928 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2929 clear_by_pieces (object
, INTVAL (size
), align
);
2930 else if (clear_storage_via_clrstr (object
, size
, align
))
2933 retval
= clear_storage_via_libcall (object
, size
);
2939 /* A subroutine of clear_storage. Expand a clrstr pattern;
2940 return true if successful. */
2943 clear_storage_via_clrstr (object
, size
, align
)
2947 /* Try the most limited insn first, because there's no point
2948 including more than one in the machine description unless
2949 the more limited one has some advantage. */
2951 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2952 enum machine_mode mode
;
2954 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2955 mode
= GET_MODE_WIDER_MODE (mode
))
2957 enum insn_code code
= clrstr_optab
[(int) mode
];
2958 insn_operand_predicate_fn pred
;
2960 if (code
!= CODE_FOR_nothing
2961 /* We don't need MODE to be narrower than
2962 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2963 the mode mask, as it is returned by the macro, it will
2964 definitely be less than the actual mode mask. */
2965 && ((GET_CODE (size
) == CONST_INT
2966 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2967 <= (GET_MODE_MASK (mode
) >> 1)))
2968 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2969 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2970 || (*pred
) (object
, BLKmode
))
2971 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
2972 || (*pred
) (opalign
, VOIDmode
)))
2975 rtx last
= get_last_insn ();
2978 op1
= convert_to_mode (mode
, size
, 1);
2979 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2980 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
2981 op1
= copy_to_mode_reg (mode
, op1
);
2983 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2990 delete_insns_since (last
);
2997 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2998 Return the return value of memset, 0 otherwise. */
3001 clear_storage_via_libcall (object
, size
)
3004 tree call_expr
, arg_list
, fn
, object_tree
, size_tree
;
3005 enum machine_mode size_mode
;
3008 /* OBJECT or SIZE may have been passed through protect_from_queue.
3010 It is unsafe to save the value generated by protect_from_queue
3011 and reuse it later. Consider what happens if emit_queue is
3012 called before the return value from protect_from_queue is used.
3014 Expansion of the CALL_EXPR below will call emit_queue before
3015 we are finished emitting RTL for argument setup. So if we are
3016 not careful we could get the wrong value for an argument.
3018 To avoid this problem we go ahead and emit code to copy OBJECT
3019 and SIZE into new pseudos. We can then place those new pseudos
3020 into an RTL_EXPR and use them later, even after a call to
3023 Note this is not strictly needed for library calls since they
3024 do not call emit_queue before loading their arguments. However,
3025 we may need to have library calls call emit_queue in the future
3026 since failing to do so could cause problems for targets which
3027 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3029 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
3031 if (TARGET_MEM_FUNCTIONS
)
3032 size_mode
= TYPE_MODE (sizetype
);
3034 size_mode
= TYPE_MODE (unsigned_type_node
);
3035 size
= convert_to_mode (size_mode
, size
, 1);
3036 size
= copy_to_mode_reg (size_mode
, size
);
3038 /* It is incorrect to use the libcall calling conventions to call
3039 memset in this context. This could be a user call to memset and
3040 the user may wish to examine the return value from memset. For
3041 targets where libcalls and normal calls have different conventions
3042 for returning pointers, we could end up generating incorrect code.
3044 For convenience, we generate the call to bzero this way as well. */
3046 object_tree
= make_tree (ptr_type_node
, object
);
3047 if (TARGET_MEM_FUNCTIONS
)
3048 size_tree
= make_tree (sizetype
, size
);
3050 size_tree
= make_tree (unsigned_type_node
, size
);
3052 fn
= clear_storage_libcall_fn (true);
3053 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
3054 if (TARGET_MEM_FUNCTIONS
)
3055 arg_list
= tree_cons (NULL_TREE
, integer_zero_node
, arg_list
);
3056 arg_list
= tree_cons (NULL_TREE
, object_tree
, arg_list
);
3058 /* Now we have to build up the CALL_EXPR itself. */
3059 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
3060 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
3061 call_expr
, arg_list
, NULL_TREE
);
3062 TREE_SIDE_EFFECTS (call_expr
) = 1;
3064 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
3066 /* If we are initializing a readonly value, show the above call
3067 clobbered it. Otherwise, a load from it may erroneously be
3068 hoisted from a loop. */
3069 if (RTX_UNCHANGING_P (object
))
3070 emit_insn (gen_rtx_CLOBBER (VOIDmode
, object
));
3072 return (TARGET_MEM_FUNCTIONS
? retval
: NULL_RTX
);
3075 /* A subroutine of clear_storage_via_libcall. Create the tree node
3076 for the function we use for block clears. The first time FOR_CALL
3077 is true, we call assemble_external. */
3079 static GTY(()) tree block_clear_fn
;
3082 clear_storage_libcall_fn (for_call
)
3085 static bool emitted_extern
;
3086 tree fn
= block_clear_fn
, args
;
3090 if (TARGET_MEM_FUNCTIONS
)
3092 fn
= get_identifier ("memset");
3093 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
3094 integer_type_node
, sizetype
,
3099 fn
= get_identifier ("bzero");
3100 args
= build_function_type_list (void_type_node
, ptr_type_node
,
3101 unsigned_type_node
, NULL_TREE
);
3104 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
3105 DECL_EXTERNAL (fn
) = 1;
3106 TREE_PUBLIC (fn
) = 1;
3107 DECL_ARTIFICIAL (fn
) = 1;
3108 TREE_NOTHROW (fn
) = 1;
3110 block_clear_fn
= fn
;
3113 if (for_call
&& !emitted_extern
)
3115 emitted_extern
= true;
3116 make_decl_rtl (fn
, NULL
);
3117 assemble_external (fn
);
3123 /* Generate code to copy Y into X.
3124 Both Y and X must have the same mode, except that
3125 Y can be a constant with VOIDmode.
3126 This mode cannot be BLKmode; use emit_block_move for that.
3128 Return the last instruction emitted. */
3131 emit_move_insn (x
, y
)
3134 enum machine_mode mode
= GET_MODE (x
);
3135 rtx y_cst
= NULL_RTX
;
3138 x
= protect_from_queue (x
, 1);
3139 y
= protect_from_queue (y
, 0);
3141 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
3144 /* Never force constant_p_rtx to memory. */
3145 if (GET_CODE (y
) == CONSTANT_P_RTX
)
3147 else if (CONSTANT_P (y
))
3150 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3151 && (last_insn
= compress_float_constant (x
, y
)))
3154 if (!LEGITIMATE_CONSTANT_P (y
))
3157 y
= force_const_mem (mode
, y
);
3159 /* If the target's cannot_force_const_mem prevented the spill,
3160 assume that the target's move expanders will also take care
3161 of the non-legitimate constant. */
3167 /* If X or Y are memory references, verify that their addresses are valid
3169 if (GET_CODE (x
) == MEM
3170 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
3171 && ! push_operand (x
, GET_MODE (x
)))
3173 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
3174 x
= validize_mem (x
);
3176 if (GET_CODE (y
) == MEM
3177 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
3179 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
3180 y
= validize_mem (y
);
3182 if (mode
== BLKmode
)
3185 last_insn
= emit_move_insn_1 (x
, y
);
3187 if (y_cst
&& GET_CODE (x
) == REG
)
3188 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
3193 /* Low level part of emit_move_insn.
3194 Called just like emit_move_insn, but assumes X and Y
3195 are basically valid. */
3198 emit_move_insn_1 (x
, y
)
3201 enum machine_mode mode
= GET_MODE (x
);
3202 enum machine_mode submode
;
3203 enum mode_class
class = GET_MODE_CLASS (mode
);
3205 if ((unsigned int) mode
>= (unsigned int) MAX_MACHINE_MODE
)
3208 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
3210 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
3212 /* Expand complex moves by moving real part and imag part, if possible. */
3213 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
3214 && BLKmode
!= (submode
= GET_MODE_INNER (mode
))
3215 && (mov_optab
->handlers
[(int) submode
].insn_code
3216 != CODE_FOR_nothing
))
3218 /* Don't split destination if it is a stack push. */
3219 int stack
= push_operand (x
, GET_MODE (x
));
3221 #ifdef PUSH_ROUNDING
3222 /* In case we output to the stack, but the size is smaller machine can
3223 push exactly, we need to use move instructions. */
3225 && (PUSH_ROUNDING (GET_MODE_SIZE (submode
))
3226 != GET_MODE_SIZE (submode
)))
3229 HOST_WIDE_INT offset1
, offset2
;
3231 /* Do not use anti_adjust_stack, since we don't want to update
3232 stack_pointer_delta. */
3233 temp
= expand_binop (Pmode
,
3234 #ifdef STACK_GROWS_DOWNWARD
3242 (GET_MODE_SIZE (GET_MODE (x
)))),
3243 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
3245 if (temp
!= stack_pointer_rtx
)
3246 emit_move_insn (stack_pointer_rtx
, temp
);
3248 #ifdef STACK_GROWS_DOWNWARD
3250 offset2
= GET_MODE_SIZE (submode
);
3252 offset1
= -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)));
3253 offset2
= (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))
3254 + GET_MODE_SIZE (submode
));
3257 emit_move_insn (change_address (x
, submode
,
3258 gen_rtx_PLUS (Pmode
,
3260 GEN_INT (offset1
))),
3261 gen_realpart (submode
, y
));
3262 emit_move_insn (change_address (x
, submode
,
3263 gen_rtx_PLUS (Pmode
,
3265 GEN_INT (offset2
))),
3266 gen_imagpart (submode
, y
));
3270 /* If this is a stack, push the highpart first, so it
3271 will be in the argument order.
3273 In that case, change_address is used only to convert
3274 the mode, not to change the address. */
3277 /* Note that the real part always precedes the imag part in memory
3278 regardless of machine's endianness. */
3279 #ifdef STACK_GROWS_DOWNWARD
3280 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
3281 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3282 gen_imagpart (submode
, y
)));
3283 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
3284 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3285 gen_realpart (submode
, y
)));
3287 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
3288 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3289 gen_realpart (submode
, y
)));
3290 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
3291 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3292 gen_imagpart (submode
, y
)));
3297 rtx realpart_x
, realpart_y
;
3298 rtx imagpart_x
, imagpart_y
;
3300 /* If this is a complex value with each part being smaller than a
3301 word, the usual calling sequence will likely pack the pieces into
3302 a single register. Unfortunately, SUBREG of hard registers only
3303 deals in terms of words, so we have a problem converting input
3304 arguments to the CONCAT of two registers that is used elsewhere
3305 for complex values. If this is before reload, we can copy it into
3306 memory and reload. FIXME, we should see about using extract and
3307 insert on integer registers, but complex short and complex char
3308 variables should be rarely used. */
3309 if (GET_MODE_BITSIZE (mode
) < 2 * BITS_PER_WORD
3310 && (reload_in_progress
| reload_completed
) == 0)
3313 = (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
3315 = (REG_P (y
) && REGNO (y
) < FIRST_PSEUDO_REGISTER
);
3317 if (packed_dest_p
|| packed_src_p
)
3319 enum mode_class reg_class
= ((class == MODE_COMPLEX_FLOAT
)
3320 ? MODE_FLOAT
: MODE_INT
);
3322 enum machine_mode reg_mode
3323 = mode_for_size (GET_MODE_BITSIZE (mode
), reg_class
, 1);
3325 if (reg_mode
!= BLKmode
)
3327 rtx mem
= assign_stack_temp (reg_mode
,
3328 GET_MODE_SIZE (mode
), 0);
3329 rtx cmem
= adjust_address (mem
, mode
, 0);
3332 = N_("function using short complex types cannot be inline");
3336 rtx sreg
= gen_rtx_SUBREG (reg_mode
, x
, 0);
3338 emit_move_insn_1 (cmem
, y
);
3339 return emit_move_insn_1 (sreg
, mem
);
3343 rtx sreg
= gen_rtx_SUBREG (reg_mode
, y
, 0);
3345 emit_move_insn_1 (mem
, sreg
);
3346 return emit_move_insn_1 (x
, cmem
);
3352 realpart_x
= gen_realpart (submode
, x
);
3353 realpart_y
= gen_realpart (submode
, y
);
3354 imagpart_x
= gen_imagpart (submode
, x
);
3355 imagpart_y
= gen_imagpart (submode
, y
);
3357 /* Show the output dies here. This is necessary for SUBREGs
3358 of pseudos since we cannot track their lifetimes correctly;
3359 hard regs shouldn't appear here except as return values.
3360 We never want to emit such a clobber after reload. */
3362 && ! (reload_in_progress
|| reload_completed
)
3363 && (GET_CODE (realpart_x
) == SUBREG
3364 || GET_CODE (imagpart_x
) == SUBREG
))
3365 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3367 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
3368 (realpart_x
, realpart_y
));
3369 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
3370 (imagpart_x
, imagpart_y
));
3373 return get_last_insn ();
3376 /* This will handle any multi-word or full-word mode that lacks a move_insn
3377 pattern. However, you will get better code if you define such patterns,
3378 even if they must turn into multiple assembler instructions. */
3379 else if (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
3386 #ifdef PUSH_ROUNDING
3388 /* If X is a push on the stack, do the push now and replace
3389 X with a reference to the stack pointer. */
3390 if (push_operand (x
, GET_MODE (x
)))
3395 /* Do not use anti_adjust_stack, since we don't want to update
3396 stack_pointer_delta. */
3397 temp
= expand_binop (Pmode
,
3398 #ifdef STACK_GROWS_DOWNWARD
3406 (GET_MODE_SIZE (GET_MODE (x
)))),
3407 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
3409 if (temp
!= stack_pointer_rtx
)
3410 emit_move_insn (stack_pointer_rtx
, temp
);
3412 code
= GET_CODE (XEXP (x
, 0));
3414 /* Just hope that small offsets off SP are OK. */
3415 if (code
== POST_INC
)
3416 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3417 GEN_INT (-((HOST_WIDE_INT
)
3418 GET_MODE_SIZE (GET_MODE (x
)))));
3419 else if (code
== POST_DEC
)
3420 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3421 GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
3423 temp
= stack_pointer_rtx
;
3425 x
= change_address (x
, VOIDmode
, temp
);
3429 /* If we are in reload, see if either operand is a MEM whose address
3430 is scheduled for replacement. */
3431 if (reload_in_progress
&& GET_CODE (x
) == MEM
3432 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3433 x
= replace_equiv_address_nv (x
, inner
);
3434 if (reload_in_progress
&& GET_CODE (y
) == MEM
3435 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3436 y
= replace_equiv_address_nv (y
, inner
);
3442 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3445 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3446 rtx ypart
= operand_subword (y
, i
, 1, mode
);
3448 /* If we can't get a part of Y, put Y into memory if it is a
3449 constant. Otherwise, force it into a register. If we still
3450 can't get a part of Y, abort. */
3451 if (ypart
== 0 && CONSTANT_P (y
))
3453 y
= force_const_mem (mode
, y
);
3454 ypart
= operand_subword (y
, i
, 1, mode
);
3456 else if (ypart
== 0)
3457 ypart
= operand_subword_force (y
, i
, mode
);
3459 if (xpart
== 0 || ypart
== 0)
3462 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3464 last_insn
= emit_move_insn (xpart
, ypart
);
3470 /* Show the output dies here. This is necessary for SUBREGs
3471 of pseudos since we cannot track their lifetimes correctly;
3472 hard regs shouldn't appear here except as return values.
3473 We never want to emit such a clobber after reload. */
3475 && ! (reload_in_progress
|| reload_completed
)
3476 && need_clobber
!= 0)
3477 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3487 /* If Y is representable exactly in a narrower mode, and the target can
3488 perform the extension directly from constant or memory, then emit the
3489 move as an extension. */
3492 compress_float_constant (x
, y
)
3495 enum machine_mode dstmode
= GET_MODE (x
);
3496 enum machine_mode orig_srcmode
= GET_MODE (y
);
3497 enum machine_mode srcmode
;
3500 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3502 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3503 srcmode
!= orig_srcmode
;
3504 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3507 rtx trunc_y
, last_insn
;
3509 /* Skip if the target can't extend this way. */
3510 ic
= can_extend_p (dstmode
, srcmode
, 0);
3511 if (ic
== CODE_FOR_nothing
)
3514 /* Skip if the narrowed value isn't exact. */
3515 if (! exact_real_truncate (srcmode
, &r
))
3518 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3520 if (LEGITIMATE_CONSTANT_P (trunc_y
))
3522 /* Skip if the target needs extra instructions to perform
3524 if (! (*insn_data
[ic
].operand
[1].predicate
) (trunc_y
, srcmode
))
3527 else if (float_extend_from_mem
[dstmode
][srcmode
])
3528 trunc_y
= validize_mem (force_const_mem (srcmode
, trunc_y
));
3532 emit_unop_insn (ic
, x
, trunc_y
, UNKNOWN
);
3533 last_insn
= get_last_insn ();
3535 if (GET_CODE (x
) == REG
)
3536 REG_NOTES (last_insn
)
3537 = gen_rtx_EXPR_LIST (REG_EQUAL
, y
, REG_NOTES (last_insn
));
3545 /* Pushing data onto the stack. */
3547 /* Push a block of length SIZE (perhaps variable)
3548 and return an rtx to address the beginning of the block.
3549 Note that it is not possible for the value returned to be a QUEUED.
3550 The value may be virtual_outgoing_args_rtx.
3552 EXTRA is the number of bytes of padding to push in addition to SIZE.
3553 BELOW nonzero means this padding comes at low addresses;
3554 otherwise, the padding comes at high addresses. */
3557 push_block (size
, extra
, below
)
3563 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3564 if (CONSTANT_P (size
))
3565 anti_adjust_stack (plus_constant (size
, extra
));
3566 else if (GET_CODE (size
) == REG
&& extra
== 0)
3567 anti_adjust_stack (size
);
3570 temp
= copy_to_mode_reg (Pmode
, size
);
3572 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3573 temp
, 0, OPTAB_LIB_WIDEN
);
3574 anti_adjust_stack (temp
);
3577 #ifndef STACK_GROWS_DOWNWARD
3583 temp
= virtual_outgoing_args_rtx
;
3584 if (extra
!= 0 && below
)
3585 temp
= plus_constant (temp
, extra
);
3589 if (GET_CODE (size
) == CONST_INT
)
3590 temp
= plus_constant (virtual_outgoing_args_rtx
,
3591 -INTVAL (size
) - (below
? 0 : extra
));
3592 else if (extra
!= 0 && !below
)
3593 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3594 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3596 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3597 negate_rtx (Pmode
, size
));
3600 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3603 #ifdef PUSH_ROUNDING
3605 /* Emit single push insn. */
3608 emit_single_push_insn (mode
, x
, type
)
3610 enum machine_mode mode
;
3614 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3616 enum insn_code icode
;
3617 insn_operand_predicate_fn pred
;
3619 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3620 /* If there is push pattern, use it. Otherwise try old way of throwing
3621 MEM representing push operation to move expander. */
3622 icode
= push_optab
->handlers
[(int) mode
].insn_code
;
3623 if (icode
!= CODE_FOR_nothing
)
3625 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3626 && !((*pred
) (x
, mode
))))
3627 x
= force_reg (mode
, x
);
3628 emit_insn (GEN_FCN (icode
) (x
));
3631 if (GET_MODE_SIZE (mode
) == rounded_size
)
3632 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3635 #ifdef STACK_GROWS_DOWNWARD
3636 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3637 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3639 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3640 GEN_INT (rounded_size
));
3642 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3645 dest
= gen_rtx_MEM (mode
, dest_addr
);
3649 set_mem_attributes (dest
, type
, 1);
3651 if (flag_optimize_sibling_calls
)
3652 /* Function incoming arguments may overlap with sibling call
3653 outgoing arguments and we cannot allow reordering of reads
3654 from function arguments with stores to outgoing arguments
3655 of sibling calls. */
3656 set_mem_alias_set (dest
, 0);
3658 emit_move_insn (dest
, x
);
3662 /* Generate code to push X onto the stack, assuming it has mode MODE and
3664 MODE is redundant except when X is a CONST_INT (since they don't
3666 SIZE is an rtx for the size of data to be copied (in bytes),
3667 needed only if X is BLKmode.
3669 ALIGN (in bits) is maximum alignment we can assume.
3671 If PARTIAL and REG are both nonzero, then copy that many of the first
3672 words of X into registers starting with REG, and push the rest of X.
3673 The amount of space pushed is decreased by PARTIAL words,
3674 rounded *down* to a multiple of PARM_BOUNDARY.
3675 REG must be a hard register in this case.
3676 If REG is zero but PARTIAL is not, take any all others actions for an
3677 argument partially in registers, but do not actually load any
3680 EXTRA is the amount in bytes of extra space to leave next to this arg.
3681 This is ignored if an argument block has already been allocated.
3683 On a machine that lacks real push insns, ARGS_ADDR is the address of
3684 the bottom of the argument block for this call. We use indexing off there
3685 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3686 argument block has not been preallocated.
3688 ARGS_SO_FAR is the size of args previously pushed for this call.
3690 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3691 for arguments passed in registers. If nonzero, it will be the number
3692 of bytes required. */
3695 emit_push_insn (x
, mode
, type
, size
, align
, partial
, reg
, extra
,
3696 args_addr
, args_so_far
, reg_parm_stack_space
,
3699 enum machine_mode mode
;
3708 int reg_parm_stack_space
;
3712 enum direction stack_direction
3713 #ifdef STACK_GROWS_DOWNWARD
3719 /* Decide where to pad the argument: `downward' for below,
3720 `upward' for above, or `none' for don't pad it.
3721 Default is below for small data on big-endian machines; else above. */
3722 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3724 /* Invert direction if stack is post-decrement.
3726 if (STACK_PUSH_CODE
== POST_DEC
)
3727 if (where_pad
!= none
)
3728 where_pad
= (where_pad
== downward
? upward
: downward
);
3730 xinner
= x
= protect_from_queue (x
, 0);
3732 if (mode
== BLKmode
)
3734 /* Copy a block into the stack, entirely or partially. */
3737 int used
= partial
* UNITS_PER_WORD
;
3738 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3746 /* USED is now the # of bytes we need not copy to the stack
3747 because registers will take care of them. */
3750 xinner
= adjust_address (xinner
, BLKmode
, used
);
3752 /* If the partial register-part of the arg counts in its stack size,
3753 skip the part of stack space corresponding to the registers.
3754 Otherwise, start copying to the beginning of the stack space,
3755 by setting SKIP to 0. */
3756 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3758 #ifdef PUSH_ROUNDING
3759 /* Do it with several push insns if that doesn't take lots of insns
3760 and if there is no difficulty with push insns that skip bytes
3761 on the stack for alignment purposes. */
3764 && GET_CODE (size
) == CONST_INT
3766 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3767 /* Here we avoid the case of a structure whose weak alignment
3768 forces many pushes of a small amount of data,
3769 and such small pushes do rounding that causes trouble. */
3770 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3771 || align
>= BIGGEST_ALIGNMENT
3772 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3773 == (align
/ BITS_PER_UNIT
)))
3774 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3776 /* Push padding now if padding above and stack grows down,
3777 or if padding below and stack grows up.
3778 But if space already allocated, this has already been done. */
3779 if (extra
&& args_addr
== 0
3780 && where_pad
!= none
&& where_pad
!= stack_direction
)
3781 anti_adjust_stack (GEN_INT (extra
));
3783 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
);
3786 #endif /* PUSH_ROUNDING */
3790 /* Otherwise make space on the stack and copy the data
3791 to the address of that space. */
3793 /* Deduct words put into registers from the size we must copy. */
3796 if (GET_CODE (size
) == CONST_INT
)
3797 size
= GEN_INT (INTVAL (size
) - used
);
3799 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3800 GEN_INT (used
), NULL_RTX
, 0,
3804 /* Get the address of the stack space.
3805 In this case, we do not deal with EXTRA separately.
3806 A single stack adjust will do. */
3809 temp
= push_block (size
, extra
, where_pad
== downward
);
3812 else if (GET_CODE (args_so_far
) == CONST_INT
)
3813 temp
= memory_address (BLKmode
,
3814 plus_constant (args_addr
,
3815 skip
+ INTVAL (args_so_far
)));
3817 temp
= memory_address (BLKmode
,
3818 plus_constant (gen_rtx_PLUS (Pmode
,
3823 if (!ACCUMULATE_OUTGOING_ARGS
)
3825 /* If the source is referenced relative to the stack pointer,
3826 copy it to another register to stabilize it. We do not need
3827 to do this if we know that we won't be changing sp. */
3829 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3830 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3831 temp
= copy_to_reg (temp
);
3834 target
= gen_rtx_MEM (BLKmode
, temp
);
3838 set_mem_attributes (target
, type
, 1);
3839 /* Function incoming arguments may overlap with sibling call
3840 outgoing arguments and we cannot allow reordering of reads
3841 from function arguments with stores to outgoing arguments
3842 of sibling calls. */
3843 set_mem_alias_set (target
, 0);
3846 /* ALIGN may well be better aligned than TYPE, e.g. due to
3847 PARM_BOUNDARY. Assume the caller isn't lying. */
3848 set_mem_align (target
, align
);
3850 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
3853 else if (partial
> 0)
3855 /* Scalar partly in registers. */
3857 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3860 /* # words of start of argument
3861 that we must make space for but need not store. */
3862 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
3863 int args_offset
= INTVAL (args_so_far
);
3866 /* Push padding now if padding above and stack grows down,
3867 or if padding below and stack grows up.
3868 But if space already allocated, this has already been done. */
3869 if (extra
&& args_addr
== 0
3870 && where_pad
!= none
&& where_pad
!= stack_direction
)
3871 anti_adjust_stack (GEN_INT (extra
));
3873 /* If we make space by pushing it, we might as well push
3874 the real data. Otherwise, we can leave OFFSET nonzero
3875 and leave the space uninitialized. */
3879 /* Now NOT_STACK gets the number of words that we don't need to
3880 allocate on the stack. */
3881 not_stack
= partial
- offset
;
3883 /* If the partial register-part of the arg counts in its stack size,
3884 skip the part of stack space corresponding to the registers.
3885 Otherwise, start copying to the beginning of the stack space,
3886 by setting SKIP to 0. */
3887 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3889 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3890 x
= validize_mem (force_const_mem (mode
, x
));
3892 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3893 SUBREGs of such registers are not allowed. */
3894 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
3895 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3896 x
= copy_to_reg (x
);
3898 /* Loop over all the words allocated on the stack for this arg. */
3899 /* We can do it by words, because any scalar bigger than a word
3900 has a size a multiple of a word. */
3901 #ifndef PUSH_ARGS_REVERSED
3902 for (i
= not_stack
; i
< size
; i
++)
3904 for (i
= size
- 1; i
>= not_stack
; i
--)
3906 if (i
>= not_stack
+ offset
)
3907 emit_push_insn (operand_subword_force (x
, i
, mode
),
3908 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3910 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3912 reg_parm_stack_space
, alignment_pad
);
3919 /* Push padding now if padding above and stack grows down,
3920 or if padding below and stack grows up.
3921 But if space already allocated, this has already been done. */
3922 if (extra
&& args_addr
== 0
3923 && where_pad
!= none
&& where_pad
!= stack_direction
)
3924 anti_adjust_stack (GEN_INT (extra
));
3926 #ifdef PUSH_ROUNDING
3927 if (args_addr
== 0 && PUSH_ARGS
)
3928 emit_single_push_insn (mode
, x
, type
);
3932 if (GET_CODE (args_so_far
) == CONST_INT
)
3934 = memory_address (mode
,
3935 plus_constant (args_addr
,
3936 INTVAL (args_so_far
)));
3938 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3940 dest
= gen_rtx_MEM (mode
, addr
);
3943 set_mem_attributes (dest
, type
, 1);
3944 /* Function incoming arguments may overlap with sibling call
3945 outgoing arguments and we cannot allow reordering of reads
3946 from function arguments with stores to outgoing arguments
3947 of sibling calls. */
3948 set_mem_alias_set (dest
, 0);
3951 emit_move_insn (dest
, x
);
3955 /* If part should go in registers, copy that part
3956 into the appropriate registers. Do this now, at the end,
3957 since mem-to-mem copies above may do function calls. */
3958 if (partial
> 0 && reg
!= 0)
3960 /* Handle calls that pass values in multiple non-contiguous locations.
3961 The Irix 6 ABI has examples of this. */
3962 if (GET_CODE (reg
) == PARALLEL
)
3963 emit_group_load (reg
, x
, -1); /* ??? size? */
3965 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
3968 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3969 anti_adjust_stack (GEN_INT (extra
));
3971 if (alignment_pad
&& args_addr
== 0)
3972 anti_adjust_stack (alignment_pad
);
3975 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3983 /* Only registers can be subtargets. */
3984 || GET_CODE (x
) != REG
3985 /* If the register is readonly, it can't be set more than once. */
3986 || RTX_UNCHANGING_P (x
)
3987 /* Don't use hard regs to avoid extending their life. */
3988 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3989 /* Avoid subtargets inside loops,
3990 since they hide some invariant expressions. */
3991 || preserve_subexpressions_p ())
3995 /* Expand an assignment that stores the value of FROM into TO.
3996 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3997 (This may contain a QUEUED rtx;
3998 if the value is constant, this rtx is a constant.)
3999 Otherwise, the returned value is NULL_RTX.
4001 SUGGEST_REG is no longer actually used.
4002 It used to mean, copy the value through a register
4003 and return that register, if that is possible.
4004 We now use WANT_VALUE to decide whether to do this. */
4007 expand_assignment (to
, from
, want_value
, suggest_reg
)
4010 int suggest_reg ATTRIBUTE_UNUSED
;
4015 /* Don't crash if the lhs of the assignment was erroneous. */
4017 if (TREE_CODE (to
) == ERROR_MARK
)
4019 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
4020 return want_value
? result
: NULL_RTX
;
4023 /* Assignment of a structure component needs special treatment
4024 if the structure component's rtx is not simply a MEM.
4025 Assignment of an array element at a constant index, and assignment of
4026 an array element in an unaligned packed structure field, has the same
4029 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
4030 || TREE_CODE (to
) == ARRAY_REF
|| TREE_CODE (to
) == ARRAY_RANGE_REF
4031 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
4033 enum machine_mode mode1
;
4034 HOST_WIDE_INT bitsize
, bitpos
;
4042 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
4043 &unsignedp
, &volatilep
);
4045 /* If we are going to use store_bit_field and extract_bit_field,
4046 make sure to_rtx will be safe for multiple use. */
4048 if (mode1
== VOIDmode
&& want_value
)
4049 tem
= stabilize_reference (tem
);
4051 orig_to_rtx
= to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, 0);
4055 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
4057 if (GET_CODE (to_rtx
) != MEM
)
4060 #ifdef POINTERS_EXTEND_UNSIGNED
4061 if (GET_MODE (offset_rtx
) != Pmode
)
4062 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
4064 if (GET_MODE (offset_rtx
) != ptr_mode
)
4065 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4068 /* A constant address in TO_RTX can have VOIDmode, we must not try
4069 to call force_reg for that case. Avoid that case. */
4070 if (GET_CODE (to_rtx
) == MEM
4071 && GET_MODE (to_rtx
) == BLKmode
4072 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
4074 && (bitpos
% bitsize
) == 0
4075 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
4076 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
4078 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
4082 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4083 highest_pow2_factor_for_type (TREE_TYPE (to
),
4087 if (GET_CODE (to_rtx
) == MEM
)
4089 /* If the field is at offset zero, we could have been given the
4090 DECL_RTX of the parent struct. Don't munge it. */
4091 to_rtx
= shallow_copy_rtx (to_rtx
);
4093 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
4096 /* Deal with volatile and readonly fields. The former is only done
4097 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4098 if (volatilep
&& GET_CODE (to_rtx
) == MEM
)
4100 if (to_rtx
== orig_to_rtx
)
4101 to_rtx
= copy_rtx (to_rtx
);
4102 MEM_VOLATILE_P (to_rtx
) = 1;
4105 if (TREE_CODE (to
) == COMPONENT_REF
4106 && TREE_READONLY (TREE_OPERAND (to
, 1)))
4108 if (to_rtx
== orig_to_rtx
)
4109 to_rtx
= copy_rtx (to_rtx
);
4110 RTX_UNCHANGING_P (to_rtx
) = 1;
4113 if (GET_CODE (to_rtx
) == MEM
&& ! can_address_p (to
))
4115 if (to_rtx
== orig_to_rtx
)
4116 to_rtx
= copy_rtx (to_rtx
);
4117 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4120 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
4122 /* Spurious cast for HPUX compiler. */
4123 ? ((enum machine_mode
)
4124 TYPE_MODE (TREE_TYPE (to
)))
4126 unsignedp
, TREE_TYPE (tem
), get_alias_set (to
));
4128 preserve_temp_slots (result
);
4132 /* If the value is meaningful, convert RESULT to the proper mode.
4133 Otherwise, return nothing. */
4134 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
4135 TYPE_MODE (TREE_TYPE (from
)),
4137 TREE_UNSIGNED (TREE_TYPE (to
)))
4141 /* If the rhs is a function call and its value is not an aggregate,
4142 call the function before we start to compute the lhs.
4143 This is needed for correct code for cases such as
4144 val = setjmp (buf) on machines where reference to val
4145 requires loading up part of an address in a separate insn.
4147 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4148 since it might be a promoted variable where the zero- or sign- extension
4149 needs to be done. Handling this in the normal way is safe because no
4150 computation is done before the call. */
4151 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
)
4152 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
4153 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
4154 && GET_CODE (DECL_RTL (to
)) == REG
))
4159 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
4161 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4163 /* Handle calls that return values in multiple non-contiguous locations.
4164 The Irix 6 ABI has examples of this. */
4165 if (GET_CODE (to_rtx
) == PARALLEL
)
4166 emit_group_load (to_rtx
, value
, int_size_in_bytes (TREE_TYPE (from
)));
4167 else if (GET_MODE (to_rtx
) == BLKmode
)
4168 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
4171 #ifdef POINTERS_EXTEND_UNSIGNED
4172 if (POINTER_TYPE_P (TREE_TYPE (to
))
4173 && GET_MODE (to_rtx
) != GET_MODE (value
))
4174 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
4176 emit_move_insn (to_rtx
, value
);
4178 preserve_temp_slots (to_rtx
);
4181 return want_value
? to_rtx
: NULL_RTX
;
4184 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4185 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4188 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4190 /* Don't move directly into a return register. */
4191 if (TREE_CODE (to
) == RESULT_DECL
4192 && (GET_CODE (to_rtx
) == REG
|| GET_CODE (to_rtx
) == PARALLEL
))
4197 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
4199 if (GET_CODE (to_rtx
) == PARALLEL
)
4200 emit_group_load (to_rtx
, temp
, int_size_in_bytes (TREE_TYPE (from
)));
4202 emit_move_insn (to_rtx
, temp
);
4204 preserve_temp_slots (to_rtx
);
4207 return want_value
? to_rtx
: NULL_RTX
;
4210 /* In case we are returning the contents of an object which overlaps
4211 the place the value is being stored, use a safe function when copying
4212 a value through a pointer into a structure value return block. */
4213 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
4214 && current_function_returns_struct
4215 && !current_function_returns_pcc_struct
)
4220 size
= expr_size (from
);
4221 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
4223 if (TARGET_MEM_FUNCTIONS
)
4224 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
4225 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
4226 XEXP (from_rtx
, 0), Pmode
,
4227 convert_to_mode (TYPE_MODE (sizetype
),
4228 size
, TREE_UNSIGNED (sizetype
)),
4229 TYPE_MODE (sizetype
));
4231 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
4232 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
4233 XEXP (to_rtx
, 0), Pmode
,
4234 convert_to_mode (TYPE_MODE (integer_type_node
),
4236 TREE_UNSIGNED (integer_type_node
)),
4237 TYPE_MODE (integer_type_node
));
4239 preserve_temp_slots (to_rtx
);
4242 return want_value
? to_rtx
: NULL_RTX
;
4245 /* Compute FROM and store the value in the rtx we got. */
4248 result
= store_expr (from
, to_rtx
, want_value
);
4249 preserve_temp_slots (result
);
4252 return want_value
? result
: NULL_RTX
;
4255 /* Generate code for computing expression EXP,
4256 and storing the value into TARGET.
4257 TARGET may contain a QUEUED rtx.
4259 If WANT_VALUE is nonzero, return a copy of the value
4260 not in TARGET, so that we can be sure to use the proper
4261 value in a containing expression even if TARGET has something
4262 else stored in it. If possible, we copy the value through a pseudo
4263 and return that pseudo. Or, if the value is constant, we try to
4264 return the constant. In some cases, we return a pseudo
4265 copied *from* TARGET.
4267 If the mode is BLKmode then we may return TARGET itself.
4268 It turns out that in BLKmode it doesn't cause a problem.
4269 because C has no operators that could combine two different
4270 assignments into the same BLKmode object with different values
4271 with no sequence point. Will other languages need this to
4274 If WANT_VALUE is 0, we return NULL, to make sure
4275 to catch quickly any cases where the caller uses the value
4276 and fails to set WANT_VALUE. */
4279 store_expr (exp
, target
, want_value
)
4285 int dont_return_target
= 0;
4286 int dont_store_target
= 0;
4288 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4290 /* Perform first part of compound expression, then assign from second
4292 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
4294 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
4296 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4298 /* For conditional expression, get safe form of the target. Then
4299 test the condition, doing the appropriate assignment on either
4300 side. This avoids the creation of unnecessary temporaries.
4301 For non-BLKmode, it is more efficient not to do this. */
4303 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4306 target
= protect_from_queue (target
, 1);
4308 do_pending_stack_adjust ();
4310 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
4311 start_cleanup_deferral ();
4312 store_expr (TREE_OPERAND (exp
, 1), target
, 0);
4313 end_cleanup_deferral ();
4315 emit_jump_insn (gen_jump (lab2
));
4318 start_cleanup_deferral ();
4319 store_expr (TREE_OPERAND (exp
, 2), target
, 0);
4320 end_cleanup_deferral ();
4325 return want_value
? target
: NULL_RTX
;
4327 else if (queued_subexp_p (target
))
4328 /* If target contains a postincrement, let's not risk
4329 using it as the place to generate the rhs. */
4331 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
4333 /* Expand EXP into a new pseudo. */
4334 temp
= gen_reg_rtx (GET_MODE (target
));
4335 temp
= expand_expr (exp
, temp
, GET_MODE (target
), 0);
4338 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
), 0);
4340 /* If target is volatile, ANSI requires accessing the value
4341 *from* the target, if it is accessed. So make that happen.
4342 In no case return the target itself. */
4343 if (! MEM_VOLATILE_P (target
) && want_value
)
4344 dont_return_target
= 1;
4346 else if (want_value
&& GET_CODE (target
) == MEM
&& ! MEM_VOLATILE_P (target
)
4347 && GET_MODE (target
) != BLKmode
)
4348 /* If target is in memory and caller wants value in a register instead,
4349 arrange that. Pass TARGET as target for expand_expr so that,
4350 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4351 We know expand_expr will not use the target in that case.
4352 Don't do this if TARGET is volatile because we are supposed
4353 to write it and then read it. */
4355 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
4356 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
4358 /* If TEMP is already in the desired TARGET, only copy it from
4359 memory and don't store it there again. */
4361 || (rtx_equal_p (temp
, target
)
4362 && ! side_effects_p (temp
) && ! side_effects_p (target
)))
4363 dont_store_target
= 1;
4364 temp
= copy_to_reg (temp
);
4366 dont_return_target
= 1;
4368 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4369 /* If this is a scalar in a register that is stored in a wider mode
4370 than the declared mode, compute the result into its declared mode
4371 and then convert to the wider mode. Our value is the computed
4374 rtx inner_target
= 0;
4376 /* If we don't want a value, we can do the conversion inside EXP,
4377 which will often result in some optimizations. Do the conversion
4378 in two steps: first change the signedness, if needed, then
4379 the extend. But don't do this if the type of EXP is a subtype
4380 of something else since then the conversion might involve
4381 more than just converting modes. */
4382 if (! want_value
&& INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4383 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
4385 if (TREE_UNSIGNED (TREE_TYPE (exp
))
4386 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4388 ((*lang_hooks
.types
.signed_or_unsigned_type
)
4389 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)), exp
);
4391 exp
= convert ((*lang_hooks
.types
.type_for_mode
)
4392 (GET_MODE (SUBREG_REG (target
)),
4393 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4396 inner_target
= SUBREG_REG (target
);
4399 temp
= expand_expr (exp
, inner_target
, VOIDmode
, 0);
4401 /* If TEMP is a MEM and we want a result value, make the access
4402 now so it gets done only once. Strictly speaking, this is
4403 only necessary if the MEM is volatile, or if the address
4404 overlaps TARGET. But not performing the load twice also
4405 reduces the amount of rtl we generate and then have to CSE. */
4406 if (GET_CODE (temp
) == MEM
&& want_value
)
4407 temp
= copy_to_reg (temp
);
4409 /* If TEMP is a VOIDmode constant, use convert_modes to make
4410 sure that we properly convert it. */
4411 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4413 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4414 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4415 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4416 GET_MODE (target
), temp
,
4417 SUBREG_PROMOTED_UNSIGNED_P (target
));
4420 convert_move (SUBREG_REG (target
), temp
,
4421 SUBREG_PROMOTED_UNSIGNED_P (target
));
4423 /* If we promoted a constant, change the mode back down to match
4424 target. Otherwise, the caller might get confused by a result whose
4425 mode is larger than expected. */
4427 if (want_value
&& GET_MODE (temp
) != GET_MODE (target
))
4429 if (GET_MODE (temp
) != VOIDmode
)
4431 temp
= gen_lowpart_SUBREG (GET_MODE (target
), temp
);
4432 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4433 SUBREG_PROMOTED_UNSIGNED_SET (temp
,
4434 SUBREG_PROMOTED_UNSIGNED_P (target
));
4437 temp
= convert_modes (GET_MODE (target
),
4438 GET_MODE (SUBREG_REG (target
)),
4439 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4442 return want_value
? temp
: NULL_RTX
;
4446 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
4447 /* Return TARGET if it's a specified hardware register.
4448 If TARGET is a volatile mem ref, either return TARGET
4449 or return a reg copied *from* TARGET; ANSI requires this.
4451 Otherwise, if TEMP is not TARGET, return TEMP
4452 if it is constant (for efficiency),
4453 or if we really want the correct value. */
4454 if (!(target
&& GET_CODE (target
) == REG
4455 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4456 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
4457 && ! rtx_equal_p (temp
, target
)
4458 && (CONSTANT_P (temp
) || want_value
))
4459 dont_return_target
= 1;
4462 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4463 the same as that of TARGET, adjust the constant. This is needed, for
4464 example, in case it is a CONST_DOUBLE and we want only a word-sized
4466 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4467 && TREE_CODE (exp
) != ERROR_MARK
4468 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4469 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4470 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
4472 /* If value was not generated in the target, store it there.
4473 Convert the value to TARGET's type first if necessary.
4474 If TEMP and TARGET compare equal according to rtx_equal_p, but
4475 one or both of them are volatile memory refs, we have to distinguish
4477 - expand_expr has used TARGET. In this case, we must not generate
4478 another copy. This can be detected by TARGET being equal according
4480 - expand_expr has not used TARGET - that means that the source just
4481 happens to have the same RTX form. Since temp will have been created
4482 by expand_expr, it will compare unequal according to == .
4483 We must generate a copy in this case, to reach the correct number
4484 of volatile memory references. */
4486 if ((! rtx_equal_p (temp
, target
)
4487 || (temp
!= target
&& (side_effects_p (temp
)
4488 || side_effects_p (target
))))
4489 && TREE_CODE (exp
) != ERROR_MARK
4490 && ! dont_store_target
4491 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4492 but TARGET is not valid memory reference, TEMP will differ
4493 from TARGET although it is really the same location. */
4494 && (TREE_CODE_CLASS (TREE_CODE (exp
)) != 'd'
4495 || target
!= DECL_RTL_IF_SET (exp
))
4496 /* If there's nothing to copy, don't bother. Don't call expr_size
4497 unless necessary, because some front-ends (C++) expr_size-hook
4498 aborts on objects that are not supposed to be bit-copied or
4500 && expr_size (exp
) != const0_rtx
)
4502 target
= protect_from_queue (target
, 1);
4503 if (GET_MODE (temp
) != GET_MODE (target
)
4504 && GET_MODE (temp
) != VOIDmode
)
4506 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4507 if (dont_return_target
)
4509 /* In this case, we will return TEMP,
4510 so make sure it has the proper mode.
4511 But don't forget to store the value into TARGET. */
4512 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4513 emit_move_insn (target
, temp
);
4516 convert_move (target
, temp
, unsignedp
);
4519 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4521 /* Handle copying a string constant into an array. The string
4522 constant may be shorter than the array. So copy just the string's
4523 actual length, and clear the rest. First get the size of the data
4524 type of the string, which is actually the size of the target. */
4525 rtx size
= expr_size (exp
);
4527 if (GET_CODE (size
) == CONST_INT
4528 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4529 emit_block_move (target
, temp
, size
, BLOCK_OP_NORMAL
);
4532 /* Compute the size of the data to copy from the string. */
4534 = size_binop (MIN_EXPR
,
4535 make_tree (sizetype
, size
),
4536 size_int (TREE_STRING_LENGTH (exp
)));
4537 rtx copy_size_rtx
= expand_expr (copy_size
, NULL_RTX
,
4541 /* Copy that much. */
4542 copy_size_rtx
= convert_to_mode (ptr_mode
, copy_size_rtx
, 0);
4543 emit_block_move (target
, temp
, copy_size_rtx
, BLOCK_OP_NORMAL
);
4545 /* Figure out how much is left in TARGET that we have to clear.
4546 Do all calculations in ptr_mode. */
4547 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4549 size
= plus_constant (size
, -INTVAL (copy_size_rtx
));
4550 target
= adjust_address (target
, BLKmode
,
4551 INTVAL (copy_size_rtx
));
4555 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
4556 copy_size_rtx
, NULL_RTX
, 0,
4559 #ifdef POINTERS_EXTEND_UNSIGNED
4560 if (GET_MODE (copy_size_rtx
) != Pmode
)
4561 copy_size_rtx
= convert_memory_address (Pmode
,
4565 target
= offset_address (target
, copy_size_rtx
,
4566 highest_pow2_factor (copy_size
));
4567 label
= gen_label_rtx ();
4568 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4569 GET_MODE (size
), 0, label
);
4572 if (size
!= const0_rtx
)
4573 clear_storage (target
, size
);
4579 /* Handle calls that return values in multiple non-contiguous locations.
4580 The Irix 6 ABI has examples of this. */
4581 else if (GET_CODE (target
) == PARALLEL
)
4582 emit_group_load (target
, temp
, int_size_in_bytes (TREE_TYPE (exp
)));
4583 else if (GET_MODE (temp
) == BLKmode
)
4584 emit_block_move (target
, temp
, expr_size (exp
), BLOCK_OP_NORMAL
);
4586 emit_move_insn (target
, temp
);
4589 /* If we don't want a value, return NULL_RTX. */
4593 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4594 ??? The latter test doesn't seem to make sense. */
4595 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
4598 /* Return TARGET itself if it is a hard register. */
4599 else if (want_value
&& GET_MODE (target
) != BLKmode
4600 && ! (GET_CODE (target
) == REG
4601 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4602 return copy_to_reg (target
);
4608 /* Return 1 if EXP just contains zeros. */
4616 switch (TREE_CODE (exp
))
4620 case NON_LVALUE_EXPR
:
4621 case VIEW_CONVERT_EXPR
:
4622 return is_zeros_p (TREE_OPERAND (exp
, 0));
4625 return integer_zerop (exp
);
4629 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
4632 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp
), dconst0
);
4635 for (elt
= TREE_VECTOR_CST_ELTS (exp
); elt
;
4636 elt
= TREE_CHAIN (elt
))
4637 if (!is_zeros_p (TREE_VALUE (elt
)))
4643 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4644 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
4645 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4646 if (! is_zeros_p (TREE_VALUE (elt
)))
4656 /* Return 1 if EXP contains mostly (3/4) zeros. */
4659 mostly_zeros_p (exp
)
4662 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4664 int elts
= 0, zeros
= 0;
4665 tree elt
= CONSTRUCTOR_ELTS (exp
);
4666 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4668 /* If there are no ranges of true bits, it is all zero. */
4669 return elt
== NULL_TREE
;
4671 for (; elt
; elt
= TREE_CHAIN (elt
))
4673 /* We do not handle the case where the index is a RANGE_EXPR,
4674 so the statistic will be somewhat inaccurate.
4675 We do make a more accurate count in store_constructor itself,
4676 so since this function is only used for nested array elements,
4677 this should be close enough. */
4678 if (mostly_zeros_p (TREE_VALUE (elt
)))
4683 return 4 * zeros
>= 3 * elts
;
4686 return is_zeros_p (exp
);
4689 /* Helper function for store_constructor.
4690 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4691 TYPE is the type of the CONSTRUCTOR, not the element type.
4692 CLEARED is as for store_constructor.
4693 ALIAS_SET is the alias set to use for any stores.
4695 This provides a recursive shortcut back to store_constructor when it isn't
4696 necessary to go through store_field. This is so that we can pass through
4697 the cleared field to let store_constructor know that we may not have to
4698 clear a substructure if the outer structure has already been cleared. */
4701 store_constructor_field (target
, bitsize
, bitpos
, mode
, exp
, type
, cleared
,
4704 unsigned HOST_WIDE_INT bitsize
;
4705 HOST_WIDE_INT bitpos
;
4706 enum machine_mode mode
;
4711 if (TREE_CODE (exp
) == CONSTRUCTOR
4712 && bitpos
% BITS_PER_UNIT
== 0
4713 /* If we have a nonzero bitpos for a register target, then we just
4714 let store_field do the bitfield handling. This is unlikely to
4715 generate unnecessary clear instructions anyways. */
4716 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
4718 if (GET_CODE (target
) == MEM
)
4720 = adjust_address (target
,
4721 GET_MODE (target
) == BLKmode
4723 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4724 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
4727 /* Update the alias set, if required. */
4728 if (GET_CODE (target
) == MEM
&& ! MEM_KEEP_ALIAS_SET_P (target
)
4729 && MEM_ALIAS_SET (target
) != 0)
4731 target
= copy_rtx (target
);
4732 set_mem_alias_set (target
, alias_set
);
4735 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
4738 store_field (target
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
4742 /* Store the value of constructor EXP into the rtx TARGET.
4743 TARGET is either a REG or a MEM; we know it cannot conflict, since
4744 safe_from_p has been called.
4745 CLEARED is true if TARGET is known to have been zero'd.
4746 SIZE is the number of bytes of TARGET we are allowed to modify: this
4747 may not be the same as the size of EXP if we are assigning to a field
4748 which has been packed to exclude padding bits. */
4751 store_constructor (exp
, target
, cleared
, size
)
4757 tree type
= TREE_TYPE (exp
);
4758 #ifdef WORD_REGISTER_OPERATIONS
4759 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4762 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
4763 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4767 /* We either clear the aggregate or indicate the value is dead. */
4768 if ((TREE_CODE (type
) == UNION_TYPE
4769 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4771 && ! CONSTRUCTOR_ELTS (exp
))
4772 /* If the constructor is empty, clear the union. */
4774 clear_storage (target
, expr_size (exp
));
4778 /* If we are building a static constructor into a register,
4779 set the initial value as zero so we can fold the value into
4780 a constant. But if more than one register is involved,
4781 this probably loses. */
4782 else if (! cleared
&& GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
4783 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4785 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4789 /* If the constructor has fewer fields than the structure
4790 or if we are initializing the structure to mostly zeros,
4791 clear the whole structure first. Don't do this if TARGET is a
4792 register whose mode size isn't equal to SIZE since clear_storage
4793 can't handle this case. */
4794 else if (! cleared
&& size
> 0
4795 && ((list_length (CONSTRUCTOR_ELTS (exp
))
4796 != fields_length (type
))
4797 || mostly_zeros_p (exp
))
4798 && (GET_CODE (target
) != REG
4799 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
4802 clear_storage (target
, GEN_INT (size
));
4807 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4809 /* Store each element of the constructor into
4810 the corresponding field of TARGET. */
4812 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4814 tree field
= TREE_PURPOSE (elt
);
4815 tree value
= TREE_VALUE (elt
);
4816 enum machine_mode mode
;
4817 HOST_WIDE_INT bitsize
;
4818 HOST_WIDE_INT bitpos
= 0;
4820 rtx to_rtx
= target
;
4822 /* Just ignore missing fields.
4823 We cleared the whole structure, above,
4824 if any fields are missing. */
4828 if (cleared
&& is_zeros_p (value
))
4831 if (host_integerp (DECL_SIZE (field
), 1))
4832 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4836 mode
= DECL_MODE (field
);
4837 if (DECL_BIT_FIELD (field
))
4840 offset
= DECL_FIELD_OFFSET (field
);
4841 if (host_integerp (offset
, 0)
4842 && host_integerp (bit_position (field
), 0))
4844 bitpos
= int_bit_position (field
);
4848 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
4854 if (contains_placeholder_p (offset
))
4855 offset
= build (WITH_RECORD_EXPR
, sizetype
,
4856 offset
, make_tree (TREE_TYPE (exp
), target
));
4858 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4859 if (GET_CODE (to_rtx
) != MEM
)
4862 #ifdef POINTERS_EXTEND_UNSIGNED
4863 if (GET_MODE (offset_rtx
) != Pmode
)
4864 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
4866 if (GET_MODE (offset_rtx
) != ptr_mode
)
4867 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4870 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4871 highest_pow2_factor (offset
));
4874 if (TREE_READONLY (field
))
4876 if (GET_CODE (to_rtx
) == MEM
)
4877 to_rtx
= copy_rtx (to_rtx
);
4879 RTX_UNCHANGING_P (to_rtx
) = 1;
4882 #ifdef WORD_REGISTER_OPERATIONS
4883 /* If this initializes a field that is smaller than a word, at the
4884 start of a word, try to widen it to a full word.
4885 This special case allows us to output C++ member function
4886 initializations in a form that the optimizers can understand. */
4887 if (GET_CODE (target
) == REG
4888 && bitsize
< BITS_PER_WORD
4889 && bitpos
% BITS_PER_WORD
== 0
4890 && GET_MODE_CLASS (mode
) == MODE_INT
4891 && TREE_CODE (value
) == INTEGER_CST
4893 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
4895 tree type
= TREE_TYPE (value
);
4897 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4899 type
= (*lang_hooks
.types
.type_for_size
)
4900 (BITS_PER_WORD
, TREE_UNSIGNED (type
));
4901 value
= convert (type
, value
);
4904 if (BYTES_BIG_ENDIAN
)
4906 = fold (build (LSHIFT_EXPR
, type
, value
,
4907 build_int_2 (BITS_PER_WORD
- bitsize
, 0)));
4908 bitsize
= BITS_PER_WORD
;
4913 if (GET_CODE (to_rtx
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (to_rtx
)
4914 && DECL_NONADDRESSABLE_P (field
))
4916 to_rtx
= copy_rtx (to_rtx
);
4917 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4920 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
4921 value
, type
, cleared
,
4922 get_alias_set (TREE_TYPE (field
)));
4925 else if (TREE_CODE (type
) == ARRAY_TYPE
4926 || TREE_CODE (type
) == VECTOR_TYPE
)
4931 tree domain
= TYPE_DOMAIN (type
);
4932 tree elttype
= TREE_TYPE (type
);
4934 HOST_WIDE_INT minelt
= 0;
4935 HOST_WIDE_INT maxelt
= 0;
4937 /* Vectors are like arrays, but the domain is stored via an array
4939 if (TREE_CODE (type
) == VECTOR_TYPE
)
4941 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4942 the same field as TYPE_DOMAIN, we are not guaranteed that
4944 domain
= TYPE_DEBUG_REPRESENTATION_TYPE (type
);
4945 domain
= TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain
)));
4948 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
4949 && TYPE_MAX_VALUE (domain
)
4950 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
4951 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
4953 /* If we have constant bounds for the range of the type, get them. */
4956 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
4957 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
4960 /* If the constructor has fewer elements than the array,
4961 clear the whole array first. Similarly if this is
4962 static constructor of a non-BLKmode object. */
4963 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
4967 HOST_WIDE_INT count
= 0, zero_count
= 0;
4968 need_to_clear
= ! const_bounds_p
;
4970 /* This loop is a more accurate version of the loop in
4971 mostly_zeros_p (it handles RANGE_EXPR in an index).
4972 It is also needed to check for missing elements. */
4973 for (elt
= CONSTRUCTOR_ELTS (exp
);
4974 elt
!= NULL_TREE
&& ! need_to_clear
;
4975 elt
= TREE_CHAIN (elt
))
4977 tree index
= TREE_PURPOSE (elt
);
4978 HOST_WIDE_INT this_node_count
;
4980 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4982 tree lo_index
= TREE_OPERAND (index
, 0);
4983 tree hi_index
= TREE_OPERAND (index
, 1);
4985 if (! host_integerp (lo_index
, 1)
4986 || ! host_integerp (hi_index
, 1))
4992 this_node_count
= (tree_low_cst (hi_index
, 1)
4993 - tree_low_cst (lo_index
, 1) + 1);
4996 this_node_count
= 1;
4998 count
+= this_node_count
;
4999 if (mostly_zeros_p (TREE_VALUE (elt
)))
5000 zero_count
+= this_node_count
;
5003 /* Clear the entire array first if there are any missing elements,
5004 or if the incidence of zero elements is >= 75%. */
5006 && (count
< maxelt
- minelt
+ 1 || 4 * zero_count
>= 3 * count
))
5010 if (need_to_clear
&& size
> 0)
5015 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5017 clear_storage (target
, GEN_INT (size
));
5021 else if (REG_P (target
))
5022 /* Inform later passes that the old value is dead. */
5023 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
5025 /* Store each element of the constructor into
5026 the corresponding element of TARGET, determined
5027 by counting the elements. */
5028 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
5030 elt
= TREE_CHAIN (elt
), i
++)
5032 enum machine_mode mode
;
5033 HOST_WIDE_INT bitsize
;
5034 HOST_WIDE_INT bitpos
;
5036 tree value
= TREE_VALUE (elt
);
5037 tree index
= TREE_PURPOSE (elt
);
5038 rtx xtarget
= target
;
5040 if (cleared
&& is_zeros_p (value
))
5043 unsignedp
= TREE_UNSIGNED (elttype
);
5044 mode
= TYPE_MODE (elttype
);
5045 if (mode
== BLKmode
)
5046 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
5047 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
5050 bitsize
= GET_MODE_BITSIZE (mode
);
5052 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
5054 tree lo_index
= TREE_OPERAND (index
, 0);
5055 tree hi_index
= TREE_OPERAND (index
, 1);
5056 rtx index_r
, pos_rtx
, loop_end
;
5057 struct nesting
*loop
;
5058 HOST_WIDE_INT lo
, hi
, count
;
5061 /* If the range is constant and "small", unroll the loop. */
5063 && host_integerp (lo_index
, 0)
5064 && host_integerp (hi_index
, 0)
5065 && (lo
= tree_low_cst (lo_index
, 0),
5066 hi
= tree_low_cst (hi_index
, 0),
5067 count
= hi
- lo
+ 1,
5068 (GET_CODE (target
) != MEM
5070 || (host_integerp (TYPE_SIZE (elttype
), 1)
5071 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
5074 lo
-= minelt
; hi
-= minelt
;
5075 for (; lo
<= hi
; lo
++)
5077 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
5079 if (GET_CODE (target
) == MEM
5080 && !MEM_KEEP_ALIAS_SET_P (target
)
5081 && TREE_CODE (type
) == ARRAY_TYPE
5082 && TYPE_NONALIASED_COMPONENT (type
))
5084 target
= copy_rtx (target
);
5085 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5088 store_constructor_field
5089 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
5090 get_alias_set (elttype
));
5095 expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
5096 loop_end
= gen_label_rtx ();
5098 unsignedp
= TREE_UNSIGNED (domain
);
5100 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
5103 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
5105 SET_DECL_RTL (index
, index_r
);
5106 if (TREE_CODE (value
) == SAVE_EXPR
5107 && SAVE_EXPR_RTL (value
) == 0)
5109 /* Make sure value gets expanded once before the
5111 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
5114 store_expr (lo_index
, index_r
, 0);
5115 loop
= expand_start_loop (0);
5117 /* Assign value to element index. */
5119 = convert (ssizetype
,
5120 fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5121 index
, TYPE_MIN_VALUE (domain
))));
5122 position
= size_binop (MULT_EXPR
, position
,
5124 TYPE_SIZE_UNIT (elttype
)));
5126 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
5127 xtarget
= offset_address (target
, pos_rtx
,
5128 highest_pow2_factor (position
));
5129 xtarget
= adjust_address (xtarget
, mode
, 0);
5130 if (TREE_CODE (value
) == CONSTRUCTOR
)
5131 store_constructor (value
, xtarget
, cleared
,
5132 bitsize
/ BITS_PER_UNIT
);
5134 store_expr (value
, xtarget
, 0);
5136 expand_exit_loop_if_false (loop
,
5137 build (LT_EXPR
, integer_type_node
,
5140 expand_increment (build (PREINCREMENT_EXPR
,
5142 index
, integer_one_node
), 0, 0);
5144 emit_label (loop_end
);
5147 else if ((index
!= 0 && ! host_integerp (index
, 0))
5148 || ! host_integerp (TYPE_SIZE (elttype
), 1))
5153 index
= ssize_int (1);
5156 index
= convert (ssizetype
,
5157 fold (build (MINUS_EXPR
, index
,
5158 TYPE_MIN_VALUE (domain
))));
5160 position
= size_binop (MULT_EXPR
, index
,
5162 TYPE_SIZE_UNIT (elttype
)));
5163 xtarget
= offset_address (target
,
5164 expand_expr (position
, 0, VOIDmode
, 0),
5165 highest_pow2_factor (position
));
5166 xtarget
= adjust_address (xtarget
, mode
, 0);
5167 store_expr (value
, xtarget
, 0);
5172 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
5173 * tree_low_cst (TYPE_SIZE (elttype
), 1));
5175 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
5177 if (GET_CODE (target
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (target
)
5178 && TREE_CODE (type
) == ARRAY_TYPE
5179 && TYPE_NONALIASED_COMPONENT (type
))
5181 target
= copy_rtx (target
);
5182 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5185 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
5186 type
, cleared
, get_alias_set (elttype
));
5192 /* Set constructor assignments. */
5193 else if (TREE_CODE (type
) == SET_TYPE
)
5195 tree elt
= CONSTRUCTOR_ELTS (exp
);
5196 unsigned HOST_WIDE_INT nbytes
= int_size_in_bytes (type
), nbits
;
5197 tree domain
= TYPE_DOMAIN (type
);
5198 tree domain_min
, domain_max
, bitlength
;
5200 /* The default implementation strategy is to extract the constant
5201 parts of the constructor, use that to initialize the target,
5202 and then "or" in whatever non-constant ranges we need in addition.
5204 If a large set is all zero or all ones, it is
5205 probably better to set it using memset (if available) or bzero.
5206 Also, if a large set has just a single range, it may also be
5207 better to first clear all the first clear the set (using
5208 bzero/memset), and set the bits we want. */
5210 /* Check for all zeros. */
5211 if (elt
== NULL_TREE
&& size
> 0)
5214 clear_storage (target
, GEN_INT (size
));
5218 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
5219 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
5220 bitlength
= size_binop (PLUS_EXPR
,
5221 size_diffop (domain_max
, domain_min
),
5224 nbits
= tree_low_cst (bitlength
, 1);
5226 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5227 are "complicated" (more than one range), initialize (the
5228 constant parts) by copying from a constant. */
5229 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
5230 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
5232 unsigned int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
5233 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
5234 char *bit_buffer
= (char *) alloca (nbits
);
5235 HOST_WIDE_INT word
= 0;
5236 unsigned int bit_pos
= 0;
5237 unsigned int ibit
= 0;
5238 unsigned int offset
= 0; /* In bytes from beginning of set. */
5240 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
5243 if (bit_buffer
[ibit
])
5245 if (BYTES_BIG_ENDIAN
)
5246 word
|= (1 << (set_word_size
- 1 - bit_pos
));
5248 word
|= 1 << bit_pos
;
5252 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
5254 if (word
!= 0 || ! cleared
)
5256 rtx datum
= GEN_INT (word
);
5259 /* The assumption here is that it is safe to use
5260 XEXP if the set is multi-word, but not if
5261 it's single-word. */
5262 if (GET_CODE (target
) == MEM
)
5263 to_rtx
= adjust_address (target
, mode
, offset
);
5264 else if (offset
== 0)
5268 emit_move_insn (to_rtx
, datum
);
5275 offset
+= set_word_size
/ BITS_PER_UNIT
;
5280 /* Don't bother clearing storage if the set is all ones. */
5281 if (TREE_CHAIN (elt
) != NULL_TREE
5282 || (TREE_PURPOSE (elt
) == NULL_TREE
5284 : ( ! host_integerp (TREE_VALUE (elt
), 0)
5285 || ! host_integerp (TREE_PURPOSE (elt
), 0)
5286 || (tree_low_cst (TREE_VALUE (elt
), 0)
5287 - tree_low_cst (TREE_PURPOSE (elt
), 0) + 1
5288 != (HOST_WIDE_INT
) nbits
))))
5289 clear_storage (target
, expr_size (exp
));
5291 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
5293 /* Start of range of element or NULL. */
5294 tree startbit
= TREE_PURPOSE (elt
);
5295 /* End of range of element, or element value. */
5296 tree endbit
= TREE_VALUE (elt
);
5297 HOST_WIDE_INT startb
, endb
;
5298 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
5300 bitlength_rtx
= expand_expr (bitlength
,
5301 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
5303 /* Handle non-range tuple element like [ expr ]. */
5304 if (startbit
== NULL_TREE
)
5306 startbit
= save_expr (endbit
);
5310 startbit
= convert (sizetype
, startbit
);
5311 endbit
= convert (sizetype
, endbit
);
5312 if (! integer_zerop (domain_min
))
5314 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
5315 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
5317 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
5318 EXPAND_CONST_ADDRESS
);
5319 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
5320 EXPAND_CONST_ADDRESS
);
5326 ((build_qualified_type ((*lang_hooks
.types
.type_for_mode
)
5327 (GET_MODE (target
), 0),
5330 emit_move_insn (targetx
, target
);
5333 else if (GET_CODE (target
) == MEM
)
5338 /* Optimization: If startbit and endbit are constants divisible
5339 by BITS_PER_UNIT, call memset instead. */
5340 if (TARGET_MEM_FUNCTIONS
5341 && TREE_CODE (startbit
) == INTEGER_CST
5342 && TREE_CODE (endbit
) == INTEGER_CST
5343 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
5344 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
5346 emit_library_call (memset_libfunc
, LCT_NORMAL
,
5348 plus_constant (XEXP (targetx
, 0),
5349 startb
/ BITS_PER_UNIT
),
5351 constm1_rtx
, TYPE_MODE (integer_type_node
),
5352 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
5353 TYPE_MODE (sizetype
));
5356 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__setbits"),
5357 LCT_NORMAL
, VOIDmode
, 4, XEXP (targetx
, 0),
5358 Pmode
, bitlength_rtx
, TYPE_MODE (sizetype
),
5359 startbit_rtx
, TYPE_MODE (sizetype
),
5360 endbit_rtx
, TYPE_MODE (sizetype
));
5363 emit_move_insn (target
, targetx
);
5371 /* Store the value of EXP (an expression tree)
5372 into a subfield of TARGET which has mode MODE and occupies
5373 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5374 If MODE is VOIDmode, it means that we are storing into a bit-field.
5376 If VALUE_MODE is VOIDmode, return nothing in particular.
5377 UNSIGNEDP is not used in this case.
5379 Otherwise, return an rtx for the value stored. This rtx
5380 has mode VALUE_MODE if that is convenient to do.
5381 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5383 TYPE is the type of the underlying object,
5385 ALIAS_SET is the alias set for the destination. This value will
5386 (in general) be different from that for TARGET, since TARGET is a
5387 reference to the containing structure. */
5390 store_field (target
, bitsize
, bitpos
, mode
, exp
, value_mode
, unsignedp
, type
,
5393 HOST_WIDE_INT bitsize
;
5394 HOST_WIDE_INT bitpos
;
5395 enum machine_mode mode
;
5397 enum machine_mode value_mode
;
5402 HOST_WIDE_INT width_mask
= 0;
5404 if (TREE_CODE (exp
) == ERROR_MARK
)
5407 /* If we have nothing to store, do nothing unless the expression has
5410 return expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
5411 else if (bitsize
>=0 && bitsize
< HOST_BITS_PER_WIDE_INT
)
5412 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5414 /* If we are storing into an unaligned field of an aligned union that is
5415 in a register, we may have the mode of TARGET being an integer mode but
5416 MODE == BLKmode. In that case, get an aligned object whose size and
5417 alignment are the same as TARGET and store TARGET into it (we can avoid
5418 the store if the field being stored is the entire width of TARGET). Then
5419 call ourselves recursively to store the field into a BLKmode version of
5420 that object. Finally, load from the object into TARGET. This is not
5421 very efficient in general, but should only be slightly more expensive
5422 than the otherwise-required unaligned accesses. Perhaps this can be
5423 cleaned up later. */
5426 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
5430 (build_qualified_type (type
, TYPE_QUALS (type
) | TYPE_QUAL_CONST
),
5432 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
5434 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5435 emit_move_insn (object
, target
);
5437 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
5440 emit_move_insn (target
, object
);
5442 /* We want to return the BLKmode version of the data. */
5446 if (GET_CODE (target
) == CONCAT
)
5448 /* We're storing into a struct containing a single __complex. */
5452 return store_expr (exp
, target
, 0);
5455 /* If the structure is in a register or if the component
5456 is a bit field, we cannot use addressing to access it.
5457 Use bit-field techniques or SUBREG to store in it. */
5459 if (mode
== VOIDmode
5460 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5461 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5462 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5463 || GET_CODE (target
) == REG
5464 || GET_CODE (target
) == SUBREG
5465 /* If the field isn't aligned enough to store as an ordinary memref,
5466 store it as a bit field. */
5467 || (mode
!= BLKmode
&& SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
))
5468 && (MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
)
5469 || bitpos
% GET_MODE_ALIGNMENT (mode
)))
5470 /* If the RHS and field are a constant size and the size of the
5471 RHS isn't the same size as the bitfield, we must use bitfield
5474 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5475 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5477 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
5479 /* If BITSIZE is narrower than the size of the type of EXP
5480 we will be narrowing TEMP. Normally, what's wanted are the
5481 low-order bits. However, if EXP's type is a record and this is
5482 big-endian machine, we want the upper BITSIZE bits. */
5483 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5484 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
5485 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5486 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5487 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5491 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5493 if (mode
!= VOIDmode
&& mode
!= BLKmode
5494 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5495 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5497 /* If the modes of TARGET and TEMP are both BLKmode, both
5498 must be in memory and BITPOS must be aligned on a byte
5499 boundary. If so, we simply do a block copy. */
5500 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5502 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
5503 || bitpos
% BITS_PER_UNIT
!= 0)
5506 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5507 emit_block_move (target
, temp
,
5508 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5512 return value_mode
== VOIDmode
? const0_rtx
: target
;
5515 /* Store the value in the bitfield. */
5516 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
,
5517 int_size_in_bytes (type
));
5519 if (value_mode
!= VOIDmode
)
5521 /* The caller wants an rtx for the value.
5522 If possible, avoid refetching from the bitfield itself. */
5524 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
5527 enum machine_mode tmode
;
5529 tmode
= GET_MODE (temp
);
5530 if (tmode
== VOIDmode
)
5534 return expand_and (tmode
, temp
,
5535 gen_int_mode (width_mask
, tmode
),
5538 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
5539 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5540 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5543 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
5544 NULL_RTX
, value_mode
, VOIDmode
,
5545 int_size_in_bytes (type
));
5551 rtx addr
= XEXP (target
, 0);
5552 rtx to_rtx
= target
;
5554 /* If a value is wanted, it must be the lhs;
5555 so make the address stable for multiple use. */
5557 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
5558 && ! CONSTANT_ADDRESS_P (addr
)
5559 /* A frame-pointer reference is already stable. */
5560 && ! (GET_CODE (addr
) == PLUS
5561 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
5562 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
5563 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
5564 to_rtx
= replace_equiv_address (to_rtx
, copy_to_reg (addr
));
5566 /* Now build a reference to just the desired component. */
5568 to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5570 if (to_rtx
== target
)
5571 to_rtx
= copy_rtx (to_rtx
);
5573 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5574 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5575 set_mem_alias_set (to_rtx
, alias_set
);
5577 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
5581 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5582 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5583 codes and find the ultimate containing object, which we return.
5585 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5586 bit position, and *PUNSIGNEDP to the signedness of the field.
5587 If the position of the field is variable, we store a tree
5588 giving the variable offset (in units) in *POFFSET.
5589 This offset is in addition to the bit position.
5590 If the position is not variable, we store 0 in *POFFSET.
5592 If any of the extraction expressions is volatile,
5593 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5595 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5596 is a mode that can be used to access the field. In that case, *PBITSIZE
5599 If the field describes a variable-sized object, *PMODE is set to
5600 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5601 this case, but the address of the object can be found. */
5604 get_inner_reference (exp
, pbitsize
, pbitpos
, poffset
, pmode
,
5605 punsignedp
, pvolatilep
)
5607 HOST_WIDE_INT
*pbitsize
;
5608 HOST_WIDE_INT
*pbitpos
;
5610 enum machine_mode
*pmode
;
5615 enum machine_mode mode
= VOIDmode
;
5616 tree offset
= size_zero_node
;
5617 tree bit_offset
= bitsize_zero_node
;
5618 tree placeholder_ptr
= 0;
5621 /* First get the mode, signedness, and size. We do this from just the
5622 outermost expression. */
5623 if (TREE_CODE (exp
) == COMPONENT_REF
)
5625 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5626 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5627 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5629 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
5631 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5633 size_tree
= TREE_OPERAND (exp
, 1);
5634 *punsignedp
= TREE_UNSIGNED (exp
);
5638 mode
= TYPE_MODE (TREE_TYPE (exp
));
5639 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
5641 if (mode
== BLKmode
)
5642 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5644 *pbitsize
= GET_MODE_BITSIZE (mode
);
5649 if (! host_integerp (size_tree
, 1))
5650 mode
= BLKmode
, *pbitsize
= -1;
5652 *pbitsize
= tree_low_cst (size_tree
, 1);
5655 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5656 and find the ultimate containing object. */
5659 if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5660 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
, TREE_OPERAND (exp
, 2));
5661 else if (TREE_CODE (exp
) == COMPONENT_REF
)
5663 tree field
= TREE_OPERAND (exp
, 1);
5664 tree this_offset
= DECL_FIELD_OFFSET (field
);
5666 /* If this field hasn't been filled in yet, don't go
5667 past it. This should only happen when folding expressions
5668 made during type construction. */
5669 if (this_offset
== 0)
5671 else if (! TREE_CONSTANT (this_offset
)
5672 && contains_placeholder_p (this_offset
))
5673 this_offset
= build (WITH_RECORD_EXPR
, sizetype
, this_offset
, exp
);
5675 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5676 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5677 DECL_FIELD_BIT_OFFSET (field
));
5679 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5682 else if (TREE_CODE (exp
) == ARRAY_REF
5683 || TREE_CODE (exp
) == ARRAY_RANGE_REF
)
5685 tree index
= TREE_OPERAND (exp
, 1);
5686 tree array
= TREE_OPERAND (exp
, 0);
5687 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
5688 tree low_bound
= (domain
? TYPE_MIN_VALUE (domain
) : 0);
5689 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array
)));
5691 /* We assume all arrays have sizes that are a multiple of a byte.
5692 First subtract the lower bound, if any, in the type of the
5693 index, then convert to sizetype and multiply by the size of the
5695 if (low_bound
!= 0 && ! integer_zerop (low_bound
))
5696 index
= fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5699 /* If the index has a self-referential type, pass it to a
5700 WITH_RECORD_EXPR; if the component size is, pass our
5701 component to one. */
5702 if (! TREE_CONSTANT (index
)
5703 && contains_placeholder_p (index
))
5704 index
= build (WITH_RECORD_EXPR
, TREE_TYPE (index
), index
, exp
);
5705 if (! TREE_CONSTANT (unit_size
)
5706 && contains_placeholder_p (unit_size
))
5707 unit_size
= build (WITH_RECORD_EXPR
, sizetype
, unit_size
, array
);
5709 offset
= size_binop (PLUS_EXPR
, offset
,
5710 size_binop (MULT_EXPR
,
5711 convert (sizetype
, index
),
5715 else if (TREE_CODE (exp
) == PLACEHOLDER_EXPR
)
5717 tree
new = find_placeholder (exp
, &placeholder_ptr
);
5719 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5720 We might have been called from tree optimization where we
5721 haven't set up an object yet. */
5729 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
5730 && TREE_CODE (exp
) != VIEW_CONVERT_EXPR
5731 && ! ((TREE_CODE (exp
) == NOP_EXPR
5732 || TREE_CODE (exp
) == CONVERT_EXPR
)
5733 && (TYPE_MODE (TREE_TYPE (exp
))
5734 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
5737 /* If any reference in the chain is volatile, the effect is volatile. */
5738 if (TREE_THIS_VOLATILE (exp
))
5741 exp
= TREE_OPERAND (exp
, 0);
5744 /* If OFFSET is constant, see if we can return the whole thing as a
5745 constant bit position. Otherwise, split it up. */
5746 if (host_integerp (offset
, 0)
5747 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5749 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5750 && host_integerp (tem
, 0))
5751 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5753 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5759 /* Return 1 if T is an expression that get_inner_reference handles. */
5762 handled_component_p (t
)
5765 switch (TREE_CODE (t
))
5770 case ARRAY_RANGE_REF
:
5771 case NON_LVALUE_EXPR
:
5772 case VIEW_CONVERT_EXPR
:
5777 return (TYPE_MODE (TREE_TYPE (t
))
5778 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t
, 0))));
5785 /* Given an rtx VALUE that may contain additions and multiplications, return
5786 an equivalent value that just refers to a register, memory, or constant.
5787 This is done by generating instructions to perform the arithmetic and
5788 returning a pseudo-register containing the value.
5790 The returned value may be a REG, SUBREG, MEM or constant. */
5793 force_operand (value
, target
)
5797 /* Use subtarget as the target for operand 0 of a binary operation. */
5798 rtx subtarget
= get_subtarget (target
);
5799 enum rtx_code code
= GET_CODE (value
);
5801 /* Check for a PIC address load. */
5802 if ((code
== PLUS
|| code
== MINUS
)
5803 && XEXP (value
, 0) == pic_offset_table_rtx
5804 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5805 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5806 || GET_CODE (XEXP (value
, 1)) == CONST
))
5809 subtarget
= gen_reg_rtx (GET_MODE (value
));
5810 emit_move_insn (subtarget
, value
);
5814 if (code
== ZERO_EXTEND
|| code
== SIGN_EXTEND
)
5817 target
= gen_reg_rtx (GET_MODE (value
));
5818 convert_move (target
, force_operand (XEXP (value
, 0), NULL
),
5819 code
== ZERO_EXTEND
);
5823 if (GET_RTX_CLASS (code
) == '2' || GET_RTX_CLASS (code
) == 'c')
5825 op2
= XEXP (value
, 1);
5826 if (!CONSTANT_P (op2
) && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5828 if (code
== MINUS
&& GET_CODE (op2
) == CONST_INT
)
5831 op2
= negate_rtx (GET_MODE (value
), op2
);
5834 /* Check for an addition with OP2 a constant integer and our first
5835 operand a PLUS of a virtual register and something else. In that
5836 case, we want to emit the sum of the virtual register and the
5837 constant first and then add the other value. This allows virtual
5838 register instantiation to simply modify the constant rather than
5839 creating another one around this addition. */
5840 if (code
== PLUS
&& GET_CODE (op2
) == CONST_INT
5841 && GET_CODE (XEXP (value
, 0)) == PLUS
5842 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
5843 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5844 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5846 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
5847 XEXP (XEXP (value
, 0), 0), op2
,
5848 subtarget
, 0, OPTAB_LIB_WIDEN
);
5849 return expand_simple_binop (GET_MODE (value
), code
, temp
,
5850 force_operand (XEXP (XEXP (value
,
5852 target
, 0, OPTAB_LIB_WIDEN
);
5855 op1
= force_operand (XEXP (value
, 0), subtarget
);
5856 op2
= force_operand (op2
, NULL_RTX
);
5860 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
5862 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
5863 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5864 target
, 1, OPTAB_LIB_WIDEN
);
5866 return expand_divmod (0,
5867 FLOAT_MODE_P (GET_MODE (value
))
5868 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
5869 GET_MODE (value
), op1
, op2
, target
, 0);
5872 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5876 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
5880 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5884 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5885 target
, 0, OPTAB_LIB_WIDEN
);
5888 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5889 target
, 1, OPTAB_LIB_WIDEN
);
5892 if (GET_RTX_CLASS (code
) == '1')
5894 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
5895 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
5898 #ifdef INSN_SCHEDULING
5899 /* On machines that have insn scheduling, we want all memory reference to be
5900 explicit, so we need to deal with such paradoxical SUBREGs. */
5901 if (GET_CODE (value
) == SUBREG
&& GET_CODE (SUBREG_REG (value
)) == MEM
5902 && (GET_MODE_SIZE (GET_MODE (value
))
5903 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
5905 = simplify_gen_subreg (GET_MODE (value
),
5906 force_reg (GET_MODE (SUBREG_REG (value
)),
5907 force_operand (SUBREG_REG (value
),
5909 GET_MODE (SUBREG_REG (value
)),
5910 SUBREG_BYTE (value
));
5916 /* Subroutine of expand_expr: return nonzero iff there is no way that
5917 EXP can reference X, which is being modified. TOP_P is nonzero if this
5918 call is going to be used to determine whether we need a temporary
5919 for EXP, as opposed to a recursive call to this function.
5921 It is always safe for this routine to return zero since it merely
5922 searches for optimization opportunities. */
5925 safe_from_p (x
, exp
, top_p
)
5932 static tree save_expr_list
;
5935 /* If EXP has varying size, we MUST use a target since we currently
5936 have no way of allocating temporaries of variable size
5937 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5938 So we assume here that something at a higher level has prevented a
5939 clash. This is somewhat bogus, but the best we can do. Only
5940 do this when X is BLKmode and when we are at the top level. */
5941 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
5942 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
5943 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
5944 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
5945 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
5947 && GET_MODE (x
) == BLKmode
)
5948 /* If X is in the outgoing argument area, it is always safe. */
5949 || (GET_CODE (x
) == MEM
5950 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
5951 || (GET_CODE (XEXP (x
, 0)) == PLUS
5952 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
5955 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5956 find the underlying pseudo. */
5957 if (GET_CODE (x
) == SUBREG
)
5960 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5964 /* A SAVE_EXPR might appear many times in the expression passed to the
5965 top-level safe_from_p call, and if it has a complex subexpression,
5966 examining it multiple times could result in a combinatorial explosion.
5967 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5968 with optimization took about 28 minutes to compile -- even though it was
5969 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5970 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5971 we have processed. Note that the only test of top_p was above. */
5980 rtn
= safe_from_p (x
, exp
, 0);
5982 for (t
= save_expr_list
; t
!= 0; t
= TREE_CHAIN (t
))
5983 TREE_PRIVATE (TREE_PURPOSE (t
)) = 0;
5988 /* Now look at our tree code and possibly recurse. */
5989 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
5992 exp_rtl
= DECL_RTL_IF_SET (exp
);
5999 if (TREE_CODE (exp
) == TREE_LIST
)
6000 return ((TREE_VALUE (exp
) == 0
6001 || safe_from_p (x
, TREE_VALUE (exp
), 0))
6002 && (TREE_CHAIN (exp
) == 0
6003 || safe_from_p (x
, TREE_CHAIN (exp
), 0)));
6004 else if (TREE_CODE (exp
) == ERROR_MARK
)
6005 return 1; /* An already-visited SAVE_EXPR? */
6010 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6014 return (safe_from_p (x
, TREE_OPERAND (exp
, 0), 0)
6015 && safe_from_p (x
, TREE_OPERAND (exp
, 1), 0));
6019 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6020 the expression. If it is set, we conflict iff we are that rtx or
6021 both are in memory. Otherwise, we check all operands of the
6022 expression recursively. */
6024 switch (TREE_CODE (exp
))
6027 /* If the operand is static or we are static, we can't conflict.
6028 Likewise if we don't conflict with the operand at all. */
6029 if (staticp (TREE_OPERAND (exp
, 0))
6030 || TREE_STATIC (exp
)
6031 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
6034 /* Otherwise, the only way this can conflict is if we are taking
6035 the address of a DECL a that address if part of X, which is
6037 exp
= TREE_OPERAND (exp
, 0);
6040 if (!DECL_RTL_SET_P (exp
)
6041 || GET_CODE (DECL_RTL (exp
)) != MEM
)
6044 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
6049 if (GET_CODE (x
) == MEM
6050 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
6051 get_alias_set (exp
)))
6056 /* Assume that the call will clobber all hard registers and
6058 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6059 || GET_CODE (x
) == MEM
)
6064 /* If a sequence exists, we would have to scan every instruction
6065 in the sequence to see if it was safe. This is probably not
6067 if (RTL_EXPR_SEQUENCE (exp
))
6070 exp_rtl
= RTL_EXPR_RTL (exp
);
6073 case WITH_CLEANUP_EXPR
:
6074 exp_rtl
= WITH_CLEANUP_EXPR_RTL (exp
);
6077 case CLEANUP_POINT_EXPR
:
6078 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6081 exp_rtl
= SAVE_EXPR_RTL (exp
);
6085 /* If we've already scanned this, don't do it again. Otherwise,
6086 show we've scanned it and record for clearing the flag if we're
6088 if (TREE_PRIVATE (exp
))
6091 TREE_PRIVATE (exp
) = 1;
6092 if (! safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
6094 TREE_PRIVATE (exp
) = 0;
6098 save_expr_list
= tree_cons (exp
, NULL_TREE
, save_expr_list
);
6102 /* The only operand we look at is operand 1. The rest aren't
6103 part of the expression. */
6104 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
6106 case METHOD_CALL_EXPR
:
6107 /* This takes an rtx argument, but shouldn't appear here. */
6114 /* If we have an rtx, we do not need to scan our operands. */
6118 nops
= first_rtl_op (TREE_CODE (exp
));
6119 for (i
= 0; i
< nops
; i
++)
6120 if (TREE_OPERAND (exp
, i
) != 0
6121 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
6124 /* If this is a language-specific tree code, it may require
6125 special handling. */
6126 if ((unsigned int) TREE_CODE (exp
)
6127 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6128 && !(*lang_hooks
.safe_from_p
) (x
, exp
))
6132 /* If we have an rtl, find any enclosed object. Then see if we conflict
6136 if (GET_CODE (exp_rtl
) == SUBREG
)
6138 exp_rtl
= SUBREG_REG (exp_rtl
);
6139 if (GET_CODE (exp_rtl
) == REG
6140 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
6144 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6145 are memory and they conflict. */
6146 return ! (rtx_equal_p (x
, exp_rtl
)
6147 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
6148 && true_dependence (exp_rtl
, VOIDmode
, x
,
6149 rtx_addr_varies_p
)));
6152 /* If we reach here, it is safe. */
6156 /* Subroutine of expand_expr: return rtx if EXP is a
6157 variable or parameter; else return 0. */
6164 switch (TREE_CODE (exp
))
6168 return DECL_RTL (exp
);
6174 #ifdef MAX_INTEGER_COMPUTATION_MODE
6177 check_max_integer_computation_mode (exp
)
6180 enum tree_code code
;
6181 enum machine_mode mode
;
6183 /* Strip any NOPs that don't change the mode. */
6185 code
= TREE_CODE (exp
);
6187 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6188 if (code
== NOP_EXPR
6189 && TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
6192 /* First check the type of the overall operation. We need only look at
6193 unary, binary and relational operations. */
6194 if (TREE_CODE_CLASS (code
) == '1'
6195 || TREE_CODE_CLASS (code
) == '2'
6196 || TREE_CODE_CLASS (code
) == '<')
6198 mode
= TYPE_MODE (TREE_TYPE (exp
));
6199 if (GET_MODE_CLASS (mode
) == MODE_INT
6200 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6201 internal_error ("unsupported wide integer operation");
6204 /* Check operand of a unary op. */
6205 if (TREE_CODE_CLASS (code
) == '1')
6207 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6208 if (GET_MODE_CLASS (mode
) == MODE_INT
6209 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6210 internal_error ("unsupported wide integer operation");
6213 /* Check operands of a binary/comparison op. */
6214 if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<')
6216 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6217 if (GET_MODE_CLASS (mode
) == MODE_INT
6218 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6219 internal_error ("unsupported wide integer operation");
6221 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
6222 if (GET_MODE_CLASS (mode
) == MODE_INT
6223 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6224 internal_error ("unsupported wide integer operation");
6229 /* Return the highest power of two that EXP is known to be a multiple of.
6230 This is used in updating alignment of MEMs in array references. */
6232 static HOST_WIDE_INT
6233 highest_pow2_factor (exp
)
6236 HOST_WIDE_INT c0
, c1
;
6238 switch (TREE_CODE (exp
))
6241 /* We can find the lowest bit that's a one. If the low
6242 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6243 We need to handle this case since we can find it in a COND_EXPR,
6244 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6245 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6247 if (TREE_CONSTANT_OVERFLOW (exp
))
6248 return BIGGEST_ALIGNMENT
;
6251 /* Note: tree_low_cst is intentionally not used here,
6252 we don't care about the upper bits. */
6253 c0
= TREE_INT_CST_LOW (exp
);
6255 return c0
? c0
: BIGGEST_ALIGNMENT
;
6259 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
6260 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6261 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6262 return MIN (c0
, c1
);
6265 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6266 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6269 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6271 if (integer_pow2p (TREE_OPERAND (exp
, 1))
6272 && host_integerp (TREE_OPERAND (exp
, 1), 1))
6274 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6275 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
6276 return MAX (1, c0
/ c1
);
6280 case NON_LVALUE_EXPR
: case NOP_EXPR
: case CONVERT_EXPR
:
6281 case SAVE_EXPR
: case WITH_RECORD_EXPR
:
6282 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
6285 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6288 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6289 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
6290 return MIN (c0
, c1
);
6299 /* Similar, except that it is known that the expression must be a multiple
6300 of the alignment of TYPE. */
6302 static HOST_WIDE_INT
6303 highest_pow2_factor_for_type (type
, exp
)
6307 HOST_WIDE_INT type_align
, factor
;
6309 factor
= highest_pow2_factor (exp
);
6310 type_align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
6311 return MAX (factor
, type_align
);
6314 /* Return an object on the placeholder list that matches EXP, a
6315 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6316 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6317 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6318 is a location which initially points to a starting location in the
6319 placeholder list (zero means start of the list) and where a pointer into
6320 the placeholder list at which the object is found is placed. */
6323 find_placeholder (exp
, plist
)
6327 tree type
= TREE_TYPE (exp
);
6328 tree placeholder_expr
;
6330 for (placeholder_expr
6331 = plist
&& *plist
? TREE_CHAIN (*plist
) : placeholder_list
;
6332 placeholder_expr
!= 0;
6333 placeholder_expr
= TREE_CHAIN (placeholder_expr
))
6335 tree need_type
= TYPE_MAIN_VARIANT (type
);
6338 /* Find the outermost reference that is of the type we want. If none,
6339 see if any object has a type that is a pointer to the type we
6341 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6342 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
6343 || TREE_CODE (elt
) == COND_EXPR
)
6344 ? TREE_OPERAND (elt
, 1)
6345 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6346 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6347 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6348 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6349 ? TREE_OPERAND (elt
, 0) : 0))
6350 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
6353 *plist
= placeholder_expr
;
6357 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6359 = ((TREE_CODE (elt
) == COMPOUND_EXPR
6360 || TREE_CODE (elt
) == COND_EXPR
)
6361 ? TREE_OPERAND (elt
, 1)
6362 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6363 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6364 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6365 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6366 ? TREE_OPERAND (elt
, 0) : 0))
6367 if (POINTER_TYPE_P (TREE_TYPE (elt
))
6368 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
6372 *plist
= placeholder_expr
;
6373 return build1 (INDIRECT_REF
, need_type
, elt
);
6380 /* expand_expr: generate code for computing expression EXP.
6381 An rtx for the computed value is returned. The value is never null.
6382 In the case of a void EXP, const0_rtx is returned.
6384 The value may be stored in TARGET if TARGET is nonzero.
6385 TARGET is just a suggestion; callers must assume that
6386 the rtx returned may not be the same as TARGET.
6388 If TARGET is CONST0_RTX, it means that the value will be ignored.
6390 If TMODE is not VOIDmode, it suggests generating the
6391 result in mode TMODE. But this is done only when convenient.
6392 Otherwise, TMODE is ignored and the value generated in its natural mode.
6393 TMODE is just a suggestion; callers must assume that
6394 the rtx returned may not have mode TMODE.
6396 Note that TARGET may have neither TMODE nor MODE. In that case, it
6397 probably will not be used.
6399 If MODIFIER is EXPAND_SUM then when EXP is an addition
6400 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6401 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6402 products as above, or REG or MEM, or constant.
6403 Ordinarily in such cases we would output mul or add instructions
6404 and then return a pseudo reg containing the sum.
6406 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6407 it also marks a label as absolutely required (it can't be dead).
6408 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6409 This is used for outputting expressions used in initializers.
6411 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6412 with a constant address even if that address is not normally legitimate.
6413 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6416 expand_expr (exp
, target
, tmode
, modifier
)
6419 enum machine_mode tmode
;
6420 enum expand_modifier modifier
;
6423 tree type
= TREE_TYPE (exp
);
6424 int unsignedp
= TREE_UNSIGNED (type
);
6425 enum machine_mode mode
;
6426 enum tree_code code
= TREE_CODE (exp
);
6428 rtx subtarget
, original_target
;
6432 /* Handle ERROR_MARK before anybody tries to access its type. */
6433 if (TREE_CODE (exp
) == ERROR_MARK
|| TREE_CODE (type
) == ERROR_MARK
)
6435 op0
= CONST0_RTX (tmode
);
6441 mode
= TYPE_MODE (type
);
6442 /* Use subtarget as the target for operand 0 of a binary operation. */
6443 subtarget
= get_subtarget (target
);
6444 original_target
= target
;
6445 ignore
= (target
== const0_rtx
6446 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
6447 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
6448 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
6449 && TREE_CODE (type
) == VOID_TYPE
));
6451 /* If we are going to ignore this result, we need only do something
6452 if there is a side-effect somewhere in the expression. If there
6453 is, short-circuit the most common cases here. Note that we must
6454 not call expand_expr with anything but const0_rtx in case this
6455 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6459 if (! TREE_SIDE_EFFECTS (exp
))
6462 /* Ensure we reference a volatile object even if value is ignored, but
6463 don't do this if all we are doing is taking its address. */
6464 if (TREE_THIS_VOLATILE (exp
)
6465 && TREE_CODE (exp
) != FUNCTION_DECL
6466 && mode
!= VOIDmode
&& mode
!= BLKmode
6467 && modifier
!= EXPAND_CONST_ADDRESS
)
6469 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
6470 if (GET_CODE (temp
) == MEM
)
6471 temp
= copy_to_reg (temp
);
6475 if (TREE_CODE_CLASS (code
) == '1' || code
== COMPONENT_REF
6476 || code
== INDIRECT_REF
|| code
== BUFFER_REF
)
6477 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6480 else if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<'
6481 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
6483 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6484 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6487 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6488 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
6489 /* If the second operand has no side effects, just evaluate
6491 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6493 else if (code
== BIT_FIELD_REF
)
6495 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6496 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6497 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, modifier
);
6504 #ifdef MAX_INTEGER_COMPUTATION_MODE
6505 /* Only check stuff here if the mode we want is different from the mode
6506 of the expression; if it's the same, check_max_integer_computation_mode
6507 will handle it. Do we really need to check this stuff at all? */
6510 && GET_MODE (target
) != mode
6511 && TREE_CODE (exp
) != INTEGER_CST
6512 && TREE_CODE (exp
) != PARM_DECL
6513 && TREE_CODE (exp
) != ARRAY_REF
6514 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6515 && TREE_CODE (exp
) != COMPONENT_REF
6516 && TREE_CODE (exp
) != BIT_FIELD_REF
6517 && TREE_CODE (exp
) != INDIRECT_REF
6518 && TREE_CODE (exp
) != CALL_EXPR
6519 && TREE_CODE (exp
) != VAR_DECL
6520 && TREE_CODE (exp
) != RTL_EXPR
)
6522 enum machine_mode mode
= GET_MODE (target
);
6524 if (GET_MODE_CLASS (mode
) == MODE_INT
6525 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6526 internal_error ("unsupported wide integer operation");
6530 && TREE_CODE (exp
) != INTEGER_CST
6531 && TREE_CODE (exp
) != PARM_DECL
6532 && TREE_CODE (exp
) != ARRAY_REF
6533 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6534 && TREE_CODE (exp
) != COMPONENT_REF
6535 && TREE_CODE (exp
) != BIT_FIELD_REF
6536 && TREE_CODE (exp
) != INDIRECT_REF
6537 && TREE_CODE (exp
) != VAR_DECL
6538 && TREE_CODE (exp
) != CALL_EXPR
6539 && TREE_CODE (exp
) != RTL_EXPR
6540 && GET_MODE_CLASS (tmode
) == MODE_INT
6541 && tmode
> MAX_INTEGER_COMPUTATION_MODE
)
6542 internal_error ("unsupported wide integer operation");
6544 check_max_integer_computation_mode (exp
);
6547 /* If will do cse, generate all results into pseudo registers
6548 since 1) that allows cse to find more things
6549 and 2) otherwise cse could produce an insn the machine
6550 cannot support. An exception is a CONSTRUCTOR into a multi-word
6551 MEM: that's much more likely to be most efficient into the MEM.
6552 Another is a CALL_EXPR which must return in memory. */
6554 if (! cse_not_expected
&& mode
!= BLKmode
&& target
6555 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
)
6556 && ! (code
== CONSTRUCTOR
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
6557 && ! (code
== CALL_EXPR
&& aggregate_value_p (exp
)))
6564 tree function
= decl_function_context (exp
);
6565 /* Handle using a label in a containing function. */
6566 if (function
!= current_function_decl
6567 && function
!= inline_function_decl
&& function
!= 0)
6569 struct function
*p
= find_function_data (function
);
6570 p
->expr
->x_forced_labels
6571 = gen_rtx_EXPR_LIST (VOIDmode
, label_rtx (exp
),
6572 p
->expr
->x_forced_labels
);
6576 if (modifier
== EXPAND_INITIALIZER
)
6577 forced_labels
= gen_rtx_EXPR_LIST (VOIDmode
,
6582 temp
= gen_rtx_MEM (FUNCTION_MODE
,
6583 gen_rtx_LABEL_REF (Pmode
, label_rtx (exp
)));
6584 if (function
!= current_function_decl
6585 && function
!= inline_function_decl
&& function
!= 0)
6586 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
6591 if (!DECL_RTL_SET_P (exp
))
6593 error_with_decl (exp
, "prior parameter's size depends on `%s'");
6594 return CONST0_RTX (mode
);
6597 /* ... fall through ... */
6600 /* If a static var's type was incomplete when the decl was written,
6601 but the type is complete now, lay out the decl now. */
6602 if (DECL_SIZE (exp
) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
6603 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6605 rtx value
= DECL_RTL_IF_SET (exp
);
6607 layout_decl (exp
, 0);
6609 /* If the RTL was already set, update its mode and memory
6613 PUT_MODE (value
, DECL_MODE (exp
));
6614 SET_DECL_RTL (exp
, 0);
6615 set_mem_attributes (value
, exp
, 1);
6616 SET_DECL_RTL (exp
, value
);
6620 /* ... fall through ... */
6624 if (DECL_RTL (exp
) == 0)
6627 /* Ensure variable marked as used even if it doesn't go through
6628 a parser. If it hasn't be used yet, write out an external
6630 if (! TREE_USED (exp
))
6632 assemble_external (exp
);
6633 TREE_USED (exp
) = 1;
6636 /* Show we haven't gotten RTL for this yet. */
6639 /* Handle variables inherited from containing functions. */
6640 context
= decl_function_context (exp
);
6642 /* We treat inline_function_decl as an alias for the current function
6643 because that is the inline function whose vars, types, etc.
6644 are being merged into the current function.
6645 See expand_inline_function. */
6647 if (context
!= 0 && context
!= current_function_decl
6648 && context
!= inline_function_decl
6649 /* If var is static, we don't need a static chain to access it. */
6650 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
6651 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
6655 /* Mark as non-local and addressable. */
6656 DECL_NONLOCAL (exp
) = 1;
6657 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
6659 (*lang_hooks
.mark_addressable
) (exp
);
6660 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
6662 addr
= XEXP (DECL_RTL (exp
), 0);
6663 if (GET_CODE (addr
) == MEM
)
6665 = replace_equiv_address (addr
,
6666 fix_lexical_addr (XEXP (addr
, 0), exp
));
6668 addr
= fix_lexical_addr (addr
, exp
);
6670 temp
= replace_equiv_address (DECL_RTL (exp
), addr
);
6673 /* This is the case of an array whose size is to be determined
6674 from its initializer, while the initializer is still being parsed.
6677 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6678 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
6679 temp
= validize_mem (DECL_RTL (exp
));
6681 /* If DECL_RTL is memory, we are in the normal case and either
6682 the address is not valid or it is not a register and -fforce-addr
6683 is specified, get the address into a register. */
6685 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6686 && modifier
!= EXPAND_CONST_ADDRESS
6687 && modifier
!= EXPAND_SUM
6688 && modifier
!= EXPAND_INITIALIZER
6689 && (! memory_address_p (DECL_MODE (exp
),
6690 XEXP (DECL_RTL (exp
), 0))
6692 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
6693 temp
= replace_equiv_address (DECL_RTL (exp
),
6694 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
6696 /* If we got something, return it. But first, set the alignment
6697 if the address is a register. */
6700 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
6701 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6706 /* If the mode of DECL_RTL does not match that of the decl, it
6707 must be a promoted value. We return a SUBREG of the wanted mode,
6708 but mark it so that we know that it was already extended. */
6710 if (GET_CODE (DECL_RTL (exp
)) == REG
6711 && GET_MODE (DECL_RTL (exp
)) != DECL_MODE (exp
))
6713 /* Get the signedness used for this variable. Ensure we get the
6714 same mode we got when the variable was declared. */
6715 if (GET_MODE (DECL_RTL (exp
))
6716 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
,
6717 (TREE_CODE (exp
) == RESULT_DECL
? 1 : 0)))
6720 temp
= gen_lowpart_SUBREG (mode
, DECL_RTL (exp
));
6721 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6722 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6726 return DECL_RTL (exp
);
6729 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
6730 TREE_INT_CST_HIGH (exp
), mode
);
6732 /* ??? If overflow is set, fold will have done an incomplete job,
6733 which can result in (plus xx (const_int 0)), which can get
6734 simplified by validate_replace_rtx during virtual register
6735 instantiation, which can result in unrecognizable insns.
6736 Avoid this by forcing all overflows into registers. */
6737 if (TREE_CONSTANT_OVERFLOW (exp
)
6738 && modifier
!= EXPAND_INITIALIZER
)
6739 temp
= force_reg (mode
, temp
);
6744 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, 0);
6747 /* If optimized, generate immediate CONST_DOUBLE
6748 which will be turned into memory by reload if necessary.
6750 We used to force a register so that loop.c could see it. But
6751 this does not allow gen_* patterns to perform optimizations with
6752 the constants. It also produces two insns in cases like "x = 1.0;".
6753 On most machines, floating-point constants are not permitted in
6754 many insns, so we'd end up copying it to a register in any case.
6756 Now, we do the copying in expand_binop, if appropriate. */
6757 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
6758 TYPE_MODE (TREE_TYPE (exp
)));
6762 if (! TREE_CST_RTL (exp
))
6763 output_constant_def (exp
, 1);
6765 /* TREE_CST_RTL probably contains a constant address.
6766 On RISC machines where a constant address isn't valid,
6767 make some insns to get that address into a register. */
6768 if (GET_CODE (TREE_CST_RTL (exp
)) == MEM
6769 && modifier
!= EXPAND_CONST_ADDRESS
6770 && modifier
!= EXPAND_INITIALIZER
6771 && modifier
!= EXPAND_SUM
6772 && (! memory_address_p (mode
, XEXP (TREE_CST_RTL (exp
), 0))
6774 && GET_CODE (XEXP (TREE_CST_RTL (exp
), 0)) != REG
)))
6775 return replace_equiv_address (TREE_CST_RTL (exp
),
6776 copy_rtx (XEXP (TREE_CST_RTL (exp
), 0)));
6777 return TREE_CST_RTL (exp
);
6779 case EXPR_WITH_FILE_LOCATION
:
6782 const char *saved_input_filename
= input_filename
;
6783 int saved_lineno
= lineno
;
6784 input_filename
= EXPR_WFL_FILENAME (exp
);
6785 lineno
= EXPR_WFL_LINENO (exp
);
6786 if (EXPR_WFL_EMIT_LINE_NOTE (exp
))
6787 emit_line_note (input_filename
, lineno
);
6788 /* Possibly avoid switching back and forth here. */
6789 to_return
= expand_expr (EXPR_WFL_NODE (exp
), target
, tmode
, modifier
);
6790 input_filename
= saved_input_filename
;
6791 lineno
= saved_lineno
;
6796 context
= decl_function_context (exp
);
6798 /* If this SAVE_EXPR was at global context, assume we are an
6799 initialization function and move it into our context. */
6801 SAVE_EXPR_CONTEXT (exp
) = current_function_decl
;
6803 /* We treat inline_function_decl as an alias for the current function
6804 because that is the inline function whose vars, types, etc.
6805 are being merged into the current function.
6806 See expand_inline_function. */
6807 if (context
== current_function_decl
|| context
== inline_function_decl
)
6810 /* If this is non-local, handle it. */
6813 /* The following call just exists to abort if the context is
6814 not of a containing function. */
6815 find_function_data (context
);
6817 temp
= SAVE_EXPR_RTL (exp
);
6818 if (temp
&& GET_CODE (temp
) == REG
)
6820 put_var_into_stack (exp
);
6821 temp
= SAVE_EXPR_RTL (exp
);
6823 if (temp
== 0 || GET_CODE (temp
) != MEM
)
6826 replace_equiv_address (temp
,
6827 fix_lexical_addr (XEXP (temp
, 0), exp
));
6829 if (SAVE_EXPR_RTL (exp
) == 0)
6831 if (mode
== VOIDmode
)
6834 temp
= assign_temp (build_qualified_type (type
,
6836 | TYPE_QUAL_CONST
)),
6839 SAVE_EXPR_RTL (exp
) = temp
;
6840 if (!optimize
&& GET_CODE (temp
) == REG
)
6841 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, temp
,
6844 /* If the mode of TEMP does not match that of the expression, it
6845 must be a promoted value. We pass store_expr a SUBREG of the
6846 wanted mode but mark it so that we know that it was already
6847 extended. Note that `unsignedp' was modified above in
6850 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
6852 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6853 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6854 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6857 if (temp
== const0_rtx
)
6858 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
6860 store_expr (TREE_OPERAND (exp
, 0), temp
, 0);
6862 TREE_USED (exp
) = 1;
6865 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6866 must be a promoted value. We return a SUBREG of the wanted mode,
6867 but mark it so that we know that it was already extended. */
6869 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
6870 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
6872 /* Compute the signedness and make the proper SUBREG. */
6873 promote_mode (type
, mode
, &unsignedp
, 0);
6874 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6875 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6876 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6880 return SAVE_EXPR_RTL (exp
);
6885 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6886 TREE_OPERAND (exp
, 0)
6887 = (*lang_hooks
.unsave_expr_now
) (TREE_OPERAND (exp
, 0));
6891 case PLACEHOLDER_EXPR
:
6893 tree old_list
= placeholder_list
;
6894 tree placeholder_expr
= 0;
6896 exp
= find_placeholder (exp
, &placeholder_expr
);
6900 placeholder_list
= TREE_CHAIN (placeholder_expr
);
6901 temp
= expand_expr (exp
, original_target
, tmode
, modifier
);
6902 placeholder_list
= old_list
;
6906 case WITH_RECORD_EXPR
:
6907 /* Put the object on the placeholder list, expand our first operand,
6908 and pop the list. */
6909 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
6911 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
, tmode
,
6913 placeholder_list
= TREE_CHAIN (placeholder_list
);
6917 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6918 expand_goto (TREE_OPERAND (exp
, 0));
6920 expand_computed_goto (TREE_OPERAND (exp
, 0));
6924 expand_exit_loop_if_false (NULL
,
6925 invert_truthvalue (TREE_OPERAND (exp
, 0)));
6928 case LABELED_BLOCK_EXPR
:
6929 if (LABELED_BLOCK_BODY (exp
))
6930 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp
), 0, 1);
6931 /* Should perhaps use expand_label, but this is simpler and safer. */
6932 do_pending_stack_adjust ();
6933 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp
)));
6936 case EXIT_BLOCK_EXPR
:
6937 if (EXIT_BLOCK_RETURN (exp
))
6938 sorry ("returned value in block_exit_expr");
6939 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp
)));
6944 expand_start_loop (1);
6945 expand_expr_stmt_value (TREE_OPERAND (exp
, 0), 0, 1);
6953 tree vars
= TREE_OPERAND (exp
, 0);
6955 /* Need to open a binding contour here because
6956 if there are any cleanups they must be contained here. */
6957 expand_start_bindings (2);
6959 /* Mark the corresponding BLOCK for output in its proper place. */
6960 if (TREE_OPERAND (exp
, 2) != 0
6961 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
6962 (*lang_hooks
.decls
.insert_block
) (TREE_OPERAND (exp
, 2));
6964 /* If VARS have not yet been expanded, expand them now. */
6967 if (!DECL_RTL_SET_P (vars
))
6969 expand_decl_init (vars
);
6970 vars
= TREE_CHAIN (vars
);
6973 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, modifier
);
6975 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
6981 if (RTL_EXPR_SEQUENCE (exp
))
6983 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
6985 emit_insn (RTL_EXPR_SEQUENCE (exp
));
6986 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
6988 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
6989 free_temps_for_rtl_expr (exp
);
6990 return RTL_EXPR_RTL (exp
);
6993 /* If we don't need the result, just ensure we evaluate any
6999 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
7000 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
, 0);
7005 /* All elts simple constants => refer to a constant in memory. But
7006 if this is a non-BLKmode mode, let it store a field at a time
7007 since that should make a CONST_INT or CONST_DOUBLE when we
7008 fold. Likewise, if we have a target we can use, it is best to
7009 store directly into the target unless the type is large enough
7010 that memcpy will be used. If we are making an initializer and
7011 all operands are constant, put it in memory as well.
7013 FIXME: Avoid trying to fill vector constructors piece-meal.
7014 Output them with output_constant_def below unless we're sure
7015 they're zeros. This should go away when vector initializers
7016 are treated like VECTOR_CST instead of arrays.
7018 else if ((TREE_STATIC (exp
)
7019 && ((mode
== BLKmode
7020 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
7021 || TREE_ADDRESSABLE (exp
)
7022 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
7023 && (! MOVE_BY_PIECES_P
7024 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
7026 && ((TREE_CODE (type
) == VECTOR_TYPE
7027 && !is_zeros_p (exp
))
7028 || ! mostly_zeros_p (exp
)))))
7029 || (modifier
== EXPAND_INITIALIZER
&& TREE_CONSTANT (exp
)))
7031 rtx constructor
= output_constant_def (exp
, 1);
7033 if (modifier
!= EXPAND_CONST_ADDRESS
7034 && modifier
!= EXPAND_INITIALIZER
7035 && modifier
!= EXPAND_SUM
)
7036 constructor
= validize_mem (constructor
);
7042 /* Handle calls that pass values in multiple non-contiguous
7043 locations. The Irix 6 ABI has examples of this. */
7044 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
7045 || GET_CODE (target
) == PARALLEL
)
7047 = assign_temp (build_qualified_type (type
,
7049 | (TREE_READONLY (exp
)
7050 * TYPE_QUAL_CONST
))),
7051 0, TREE_ADDRESSABLE (exp
), 1);
7053 store_constructor (exp
, target
, 0, int_expr_size (exp
));
7059 tree exp1
= TREE_OPERAND (exp
, 0);
7061 tree string
= string_constant (exp1
, &index
);
7063 /* Try to optimize reads from const strings. */
7065 && TREE_CODE (string
) == STRING_CST
7066 && TREE_CODE (index
) == INTEGER_CST
7067 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
7068 && GET_MODE_CLASS (mode
) == MODE_INT
7069 && GET_MODE_SIZE (mode
) == 1
7070 && modifier
!= EXPAND_WRITE
)
7071 return gen_int_mode (TREE_STRING_POINTER (string
)
7072 [TREE_INT_CST_LOW (index
)], mode
);
7074 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
7075 op0
= memory_address (mode
, op0
);
7076 temp
= gen_rtx_MEM (mode
, op0
);
7077 set_mem_attributes (temp
, exp
, 0);
7079 /* If we are writing to this object and its type is a record with
7080 readonly fields, we must mark it as readonly so it will
7081 conflict with readonly references to those fields. */
7082 if (modifier
== EXPAND_WRITE
&& readonly_fields_p (type
))
7083 RTX_UNCHANGING_P (temp
) = 1;
7089 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
7093 tree array
= TREE_OPERAND (exp
, 0);
7094 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
7095 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
7096 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
7099 /* Optimize the special-case of a zero lower bound.
7101 We convert the low_bound to sizetype to avoid some problems
7102 with constant folding. (E.g. suppose the lower bound is 1,
7103 and its mode is QI. Without the conversion, (ARRAY
7104 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7105 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7107 if (! integer_zerop (low_bound
))
7108 index
= size_diffop (index
, convert (sizetype
, low_bound
));
7110 /* Fold an expression like: "foo"[2].
7111 This is not done in fold so it won't happen inside &.
7112 Don't fold if this is for wide characters since it's too
7113 difficult to do correctly and this is a very rare case. */
7115 if (modifier
!= EXPAND_CONST_ADDRESS
&& modifier
!= EXPAND_INITIALIZER
7116 && TREE_CODE (array
) == STRING_CST
7117 && TREE_CODE (index
) == INTEGER_CST
7118 && compare_tree_int (index
, TREE_STRING_LENGTH (array
)) < 0
7119 && GET_MODE_CLASS (mode
) == MODE_INT
7120 && GET_MODE_SIZE (mode
) == 1)
7121 return gen_int_mode (TREE_STRING_POINTER (array
)
7122 [TREE_INT_CST_LOW (index
)], mode
);
7124 /* If this is a constant index into a constant array,
7125 just get the value from the array. Handle both the cases when
7126 we have an explicit constructor and when our operand is a variable
7127 that was declared const. */
7129 if (modifier
!= EXPAND_CONST_ADDRESS
&& modifier
!= EXPAND_INITIALIZER
7130 && TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
)
7131 && TREE_CODE (index
) == INTEGER_CST
7132 && 0 > compare_tree_int (index
,
7133 list_length (CONSTRUCTOR_ELTS
7134 (TREE_OPERAND (exp
, 0)))))
7138 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
7139 i
= TREE_INT_CST_LOW (index
);
7140 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
7144 return expand_expr (fold (TREE_VALUE (elem
)), target
, tmode
,
7148 else if (optimize
>= 1
7149 && modifier
!= EXPAND_CONST_ADDRESS
7150 && modifier
!= EXPAND_INITIALIZER
7151 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
7152 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
7153 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
7155 if (TREE_CODE (index
) == INTEGER_CST
)
7157 tree init
= DECL_INITIAL (array
);
7159 if (TREE_CODE (init
) == CONSTRUCTOR
)
7163 for (elem
= CONSTRUCTOR_ELTS (init
);
7165 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
7166 elem
= TREE_CHAIN (elem
))
7169 if (elem
&& !TREE_SIDE_EFFECTS (TREE_VALUE (elem
)))
7170 return expand_expr (fold (TREE_VALUE (elem
)), target
,
7173 else if (TREE_CODE (init
) == STRING_CST
7174 && 0 > compare_tree_int (index
,
7175 TREE_STRING_LENGTH (init
)))
7177 tree type
= TREE_TYPE (TREE_TYPE (init
));
7178 enum machine_mode mode
= TYPE_MODE (type
);
7180 if (GET_MODE_CLASS (mode
) == MODE_INT
7181 && GET_MODE_SIZE (mode
) == 1)
7182 return gen_int_mode (TREE_STRING_POINTER (init
)
7183 [TREE_INT_CST_LOW (index
)], mode
);
7192 case ARRAY_RANGE_REF
:
7193 /* If the operand is a CONSTRUCTOR, we can just extract the
7194 appropriate field if it is present. Don't do this if we have
7195 already written the data since we want to refer to that copy
7196 and varasm.c assumes that's what we'll do. */
7197 if (code
== COMPONENT_REF
7198 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
7199 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
7203 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
7204 elt
= TREE_CHAIN (elt
))
7205 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
7206 /* We can normally use the value of the field in the
7207 CONSTRUCTOR. However, if this is a bitfield in
7208 an integral mode that we can fit in a HOST_WIDE_INT,
7209 we must mask only the number of bits in the bitfield,
7210 since this is done implicitly by the constructor. If
7211 the bitfield does not meet either of those conditions,
7212 we can't do this optimization. */
7213 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
7214 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
7216 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
7217 <= HOST_BITS_PER_WIDE_INT
))))
7219 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
7220 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
7222 HOST_WIDE_INT bitsize
7223 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt
)));
7224 enum machine_mode imode
7225 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
7227 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
7229 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
7230 op0
= expand_and (imode
, op0
, op1
, target
);
7235 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
7238 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
7240 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
7250 enum machine_mode mode1
;
7251 HOST_WIDE_INT bitsize
, bitpos
;
7254 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7255 &mode1
, &unsignedp
, &volatilep
);
7258 /* If we got back the original object, something is wrong. Perhaps
7259 we are evaluating an expression too early. In any event, don't
7260 infinitely recurse. */
7264 /* If TEM's type is a union of variable size, pass TARGET to the inner
7265 computation, since it will need a temporary and TARGET is known
7266 to have to do. This occurs in unchecked conversion in Ada. */
7270 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
7271 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
7273 ? target
: NULL_RTX
),
7275 (modifier
== EXPAND_INITIALIZER
7276 || modifier
== EXPAND_CONST_ADDRESS
)
7277 ? modifier
: EXPAND_NORMAL
);
7279 /* If this is a constant, put it into a register if it is a
7280 legitimate constant and OFFSET is 0 and memory if it isn't. */
7281 if (CONSTANT_P (op0
))
7283 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
7284 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
7286 op0
= force_reg (mode
, op0
);
7288 op0
= validize_mem (force_const_mem (mode
, op0
));
7293 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
7295 /* If this object is in a register, put it into memory.
7296 This case can't occur in C, but can in Ada if we have
7297 unchecked conversion of an expression from a scalar type to
7298 an array or record type. */
7299 if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7300 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
7302 /* If the operand is a SAVE_EXPR, we can deal with this by
7303 forcing the SAVE_EXPR into memory. */
7304 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
7306 put_var_into_stack (TREE_OPERAND (exp
, 0));
7307 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
7312 = build_qualified_type (TREE_TYPE (tem
),
7313 (TYPE_QUALS (TREE_TYPE (tem
))
7314 | TYPE_QUAL_CONST
));
7315 rtx memloc
= assign_temp (nt
, 1, 1, 1);
7317 emit_move_insn (memloc
, op0
);
7322 if (GET_CODE (op0
) != MEM
)
7325 #ifdef POINTERS_EXTEND_UNSIGNED
7326 if (GET_MODE (offset_rtx
) != Pmode
)
7327 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
7329 if (GET_MODE (offset_rtx
) != ptr_mode
)
7330 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
7333 /* A constant address in OP0 can have VOIDmode, we must not try
7334 to call force_reg for that case. Avoid that case. */
7335 if (GET_CODE (op0
) == MEM
7336 && GET_MODE (op0
) == BLKmode
7337 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
7339 && (bitpos
% bitsize
) == 0
7340 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
7341 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
7343 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7347 op0
= offset_address (op0
, offset_rtx
,
7348 highest_pow2_factor (offset
));
7351 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7352 record its alignment as BIGGEST_ALIGNMENT. */
7353 if (GET_CODE (op0
) == MEM
&& bitpos
== 0 && offset
!= 0
7354 && is_aligning_offset (offset
, tem
))
7355 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
7357 /* Don't forget about volatility even if this is a bitfield. */
7358 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
7360 if (op0
== orig_op0
)
7361 op0
= copy_rtx (op0
);
7363 MEM_VOLATILE_P (op0
) = 1;
7366 /* The following code doesn't handle CONCAT.
7367 Assume only bitpos == 0 can be used for CONCAT, due to
7368 one element arrays having the same mode as its element. */
7369 if (GET_CODE (op0
) == CONCAT
)
7371 if (bitpos
!= 0 || bitsize
!= GET_MODE_BITSIZE (GET_MODE (op0
)))
7376 /* In cases where an aligned union has an unaligned object
7377 as a field, we might be extracting a BLKmode value from
7378 an integer-mode (e.g., SImode) object. Handle this case
7379 by doing the extract into an object as wide as the field
7380 (which we know to be the width of a basic mode), then
7381 storing into memory, and changing the mode to BLKmode. */
7382 if (mode1
== VOIDmode
7383 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7384 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
7385 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7386 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
7387 && modifier
!= EXPAND_CONST_ADDRESS
7388 && modifier
!= EXPAND_INITIALIZER
)
7389 /* If the field isn't aligned enough to fetch as a memref,
7390 fetch it as a bit field. */
7391 || (mode1
!= BLKmode
7392 && SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))
7393 && ((TYPE_ALIGN (TREE_TYPE (tem
))
7394 < GET_MODE_ALIGNMENT (mode
))
7395 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)))
7396 /* If the type and the field are a constant size and the
7397 size of the type isn't the same size as the bitfield,
7398 we must use bitfield operations. */
7400 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
)))
7402 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7405 enum machine_mode ext_mode
= mode
;
7407 if (ext_mode
== BLKmode
7408 && ! (target
!= 0 && GET_CODE (op0
) == MEM
7409 && GET_CODE (target
) == MEM
7410 && bitpos
% BITS_PER_UNIT
== 0))
7411 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7413 if (ext_mode
== BLKmode
)
7415 /* In this case, BITPOS must start at a byte boundary and
7416 TARGET, if specified, must be a MEM. */
7417 if (GET_CODE (op0
) != MEM
7418 || (target
!= 0 && GET_CODE (target
) != MEM
)
7419 || bitpos
% BITS_PER_UNIT
!= 0)
7422 op0
= adjust_address (op0
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
7424 target
= assign_temp (type
, 0, 1, 1);
7426 emit_block_move (target
, op0
,
7427 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7434 op0
= validize_mem (op0
);
7436 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
7437 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7439 op0
= extract_bit_field (op0
, bitsize
, bitpos
,
7440 unsignedp
, target
, ext_mode
, ext_mode
,
7441 int_size_in_bytes (TREE_TYPE (tem
)));
7443 /* If the result is a record type and BITSIZE is narrower than
7444 the mode of OP0, an integral mode, and this is a big endian
7445 machine, we must put the field into the high-order bits. */
7446 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7447 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7448 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
7449 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7450 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7454 if (mode
== BLKmode
)
7456 rtx
new = assign_temp (build_qualified_type
7457 ((*lang_hooks
.types
.type_for_mode
)
7459 TYPE_QUAL_CONST
), 0, 1, 1);
7461 emit_move_insn (new, op0
);
7462 op0
= copy_rtx (new);
7463 PUT_MODE (op0
, BLKmode
);
7464 set_mem_attributes (op0
, exp
, 1);
7470 /* If the result is BLKmode, use that to access the object
7472 if (mode
== BLKmode
)
7475 /* Get a reference to just this component. */
7476 if (modifier
== EXPAND_CONST_ADDRESS
7477 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7478 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7480 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7482 if (op0
== orig_op0
)
7483 op0
= copy_rtx (op0
);
7485 set_mem_attributes (op0
, exp
, 0);
7486 if (GET_CODE (XEXP (op0
, 0)) == REG
)
7487 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7489 MEM_VOLATILE_P (op0
) |= volatilep
;
7490 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7491 || modifier
== EXPAND_CONST_ADDRESS
7492 || modifier
== EXPAND_INITIALIZER
)
7494 else if (target
== 0)
7495 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7497 convert_move (target
, op0
, unsignedp
);
7503 rtx insn
, before
= get_last_insn (), vtbl_ref
;
7505 /* Evaluate the interior expression. */
7506 subtarget
= expand_expr (TREE_OPERAND (exp
, 0), target
,
7509 /* Get or create an instruction off which to hang a note. */
7510 if (REG_P (subtarget
))
7513 insn
= get_last_insn ();
7516 if (! INSN_P (insn
))
7517 insn
= prev_nonnote_insn (insn
);
7521 target
= gen_reg_rtx (GET_MODE (subtarget
));
7522 insn
= emit_move_insn (target
, subtarget
);
7525 /* Collect the data for the note. */
7526 vtbl_ref
= XEXP (DECL_RTL (TREE_OPERAND (exp
, 1)), 0);
7527 vtbl_ref
= plus_constant (vtbl_ref
,
7528 tree_low_cst (TREE_OPERAND (exp
, 2), 0));
7529 /* Discard the initial CONST that was added. */
7530 vtbl_ref
= XEXP (vtbl_ref
, 0);
7533 = gen_rtx_EXPR_LIST (REG_VTABLE_REF
, vtbl_ref
, REG_NOTES (insn
));
7538 /* Intended for a reference to a buffer of a file-object in Pascal.
7539 But it's not certain that a special tree code will really be
7540 necessary for these. INDIRECT_REF might work for them. */
7546 /* Pascal set IN expression.
7549 rlo = set_low - (set_low%bits_per_word);
7550 the_word = set [ (index - rlo)/bits_per_word ];
7551 bit_index = index % bits_per_word;
7552 bitmask = 1 << bit_index;
7553 return !!(the_word & bitmask); */
7555 tree set
= TREE_OPERAND (exp
, 0);
7556 tree index
= TREE_OPERAND (exp
, 1);
7557 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
7558 tree set_type
= TREE_TYPE (set
);
7559 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
7560 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
7561 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
7562 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
7563 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
7564 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
7565 rtx setaddr
= XEXP (setval
, 0);
7566 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
7568 rtx diff
, quo
, rem
, addr
, bit
, result
;
7570 /* If domain is empty, answer is no. Likewise if index is constant
7571 and out of bounds. */
7572 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
7573 && TREE_CODE (set_low_bound
) == INTEGER_CST
7574 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
7575 || (TREE_CODE (index
) == INTEGER_CST
7576 && TREE_CODE (set_low_bound
) == INTEGER_CST
7577 && tree_int_cst_lt (index
, set_low_bound
))
7578 || (TREE_CODE (set_high_bound
) == INTEGER_CST
7579 && TREE_CODE (index
) == INTEGER_CST
7580 && tree_int_cst_lt (set_high_bound
, index
))))
7584 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7586 /* If we get here, we have to generate the code for both cases
7587 (in range and out of range). */
7589 op0
= gen_label_rtx ();
7590 op1
= gen_label_rtx ();
7592 if (! (GET_CODE (index_val
) == CONST_INT
7593 && GET_CODE (lo_r
) == CONST_INT
))
7594 emit_cmp_and_jump_insns (index_val
, lo_r
, LT
, NULL_RTX
,
7595 GET_MODE (index_val
), iunsignedp
, op1
);
7597 if (! (GET_CODE (index_val
) == CONST_INT
7598 && GET_CODE (hi_r
) == CONST_INT
))
7599 emit_cmp_and_jump_insns (index_val
, hi_r
, GT
, NULL_RTX
,
7600 GET_MODE (index_val
), iunsignedp
, op1
);
7602 /* Calculate the element number of bit zero in the first word
7604 if (GET_CODE (lo_r
) == CONST_INT
)
7605 rlow
= GEN_INT (INTVAL (lo_r
)
7606 & ~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
7608 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
7609 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
7610 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7612 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
7613 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7615 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
7616 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7617 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
7618 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7620 addr
= memory_address (byte_mode
,
7621 expand_binop (index_mode
, add_optab
, diff
,
7622 setaddr
, NULL_RTX
, iunsignedp
,
7625 /* Extract the bit we want to examine. */
7626 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
7627 gen_rtx_MEM (byte_mode
, addr
),
7628 make_tree (TREE_TYPE (index
), rem
),
7630 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
7631 GET_MODE (target
) == byte_mode
? target
: 0,
7632 1, OPTAB_LIB_WIDEN
);
7634 if (result
!= target
)
7635 convert_move (target
, result
, 1);
7637 /* Output the code to handle the out-of-range case. */
7640 emit_move_insn (target
, const0_rtx
);
7645 case WITH_CLEANUP_EXPR
:
7646 if (WITH_CLEANUP_EXPR_RTL (exp
) == 0)
7648 WITH_CLEANUP_EXPR_RTL (exp
)
7649 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7650 expand_decl_cleanup_eh (NULL_TREE
, TREE_OPERAND (exp
, 1),
7651 CLEANUP_EH_ONLY (exp
));
7653 /* That's it for this cleanup. */
7654 TREE_OPERAND (exp
, 1) = 0;
7656 return WITH_CLEANUP_EXPR_RTL (exp
);
7658 case CLEANUP_POINT_EXPR
:
7660 /* Start a new binding layer that will keep track of all cleanup
7661 actions to be performed. */
7662 expand_start_bindings (2);
7664 target_temp_slot_level
= temp_slot_level
;
7666 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7667 /* If we're going to use this value, load it up now. */
7669 op0
= force_not_mem (op0
);
7670 preserve_temp_slots (op0
);
7671 expand_end_bindings (NULL_TREE
, 0, 0);
7676 /* Check for a built-in function. */
7677 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7678 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7680 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7682 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7683 == BUILT_IN_FRONTEND
)
7684 return (*lang_hooks
.expand_expr
)
7685 (exp
, original_target
, tmode
, modifier
);
7687 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7690 return expand_call (exp
, target
, ignore
);
7692 case NON_LVALUE_EXPR
:
7695 case REFERENCE_EXPR
:
7696 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7699 if (TREE_CODE (type
) == UNION_TYPE
)
7701 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7703 /* If both input and output are BLKmode, this conversion isn't doing
7704 anything except possibly changing memory attribute. */
7705 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7707 rtx result
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7710 result
= copy_rtx (result
);
7711 set_mem_attributes (result
, exp
, 0);
7716 target
= assign_temp (type
, 0, 1, 1);
7718 if (GET_CODE (target
) == MEM
)
7719 /* Store data into beginning of memory target. */
7720 store_expr (TREE_OPERAND (exp
, 0),
7721 adjust_address (target
, TYPE_MODE (valtype
), 0), 0);
7723 else if (GET_CODE (target
) == REG
)
7724 /* Store this field into a union of the proper type. */
7725 store_field (target
,
7726 MIN ((int_size_in_bytes (TREE_TYPE
7727 (TREE_OPERAND (exp
, 0)))
7729 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7730 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7731 VOIDmode
, 0, type
, 0);
7735 /* Return the entire union. */
7739 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7741 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7744 /* If the signedness of the conversion differs and OP0 is
7745 a promoted SUBREG, clear that indication since we now
7746 have to do the proper extension. */
7747 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7748 && GET_CODE (op0
) == SUBREG
)
7749 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7754 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7755 if (GET_MODE (op0
) == mode
)
7758 /* If OP0 is a constant, just convert it into the proper mode. */
7759 if (CONSTANT_P (op0
))
7761 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7762 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
7764 if (modifier
== EXPAND_INITIALIZER
)
7765 return simplify_gen_subreg (mode
, op0
, inner_mode
,
7766 subreg_lowpart_offset (mode
,
7769 return convert_modes (mode
, inner_mode
, op0
,
7770 TREE_UNSIGNED (inner_type
));
7773 if (modifier
== EXPAND_INITIALIZER
)
7774 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7778 convert_to_mode (mode
, op0
,
7779 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7781 convert_move (target
, op0
,
7782 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7785 case VIEW_CONVERT_EXPR
:
7786 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7788 /* If the input and output modes are both the same, we are done.
7789 Otherwise, if neither mode is BLKmode and both are within a word, we
7790 can use gen_lowpart. If neither is true, make sure the operand is
7791 in memory and convert the MEM to the new mode. */
7792 if (TYPE_MODE (type
) == GET_MODE (op0
))
7794 else if (TYPE_MODE (type
) != BLKmode
&& GET_MODE (op0
) != BLKmode
7795 && GET_MODE_SIZE (TYPE_MODE (type
)) <= UNITS_PER_WORD
7796 && GET_MODE_SIZE (GET_MODE (op0
)) <= UNITS_PER_WORD
)
7797 op0
= gen_lowpart (TYPE_MODE (type
), op0
);
7798 else if (GET_CODE (op0
) != MEM
)
7800 /* If the operand is not a MEM, force it into memory. Since we
7801 are going to be be changing the mode of the MEM, don't call
7802 force_const_mem for constants because we don't allow pool
7803 constants to change mode. */
7804 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7806 if (TREE_ADDRESSABLE (exp
))
7809 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
7811 = assign_stack_temp_for_type
7812 (TYPE_MODE (inner_type
),
7813 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
7815 emit_move_insn (target
, op0
);
7819 /* At this point, OP0 is in the correct mode. If the output type is such
7820 that the operand is known to be aligned, indicate that it is.
7821 Otherwise, we need only be concerned about alignment for non-BLKmode
7823 if (GET_CODE (op0
) == MEM
)
7825 op0
= copy_rtx (op0
);
7827 if (TYPE_ALIGN_OK (type
))
7828 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
7829 else if (TYPE_MODE (type
) != BLKmode
&& STRICT_ALIGNMENT
7830 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
7832 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7833 HOST_WIDE_INT temp_size
7834 = MAX (int_size_in_bytes (inner_type
),
7835 (HOST_WIDE_INT
) GET_MODE_SIZE (TYPE_MODE (type
)));
7836 rtx
new = assign_stack_temp_for_type (TYPE_MODE (type
),
7837 temp_size
, 0, type
);
7838 rtx new_with_op0_mode
= adjust_address (new, GET_MODE (op0
), 0);
7840 if (TREE_ADDRESSABLE (exp
))
7843 if (GET_MODE (op0
) == BLKmode
)
7844 emit_block_move (new_with_op0_mode
, op0
,
7845 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type
))),
7848 emit_move_insn (new_with_op0_mode
, op0
);
7853 op0
= adjust_address (op0
, TYPE_MODE (type
), 0);
7859 this_optab
= ! unsignedp
&& flag_trapv
7860 && (GET_MODE_CLASS (mode
) == MODE_INT
)
7861 ? addv_optab
: add_optab
;
7863 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7864 something else, make sure we add the register to the constant and
7865 then to the other thing. This case can occur during strength
7866 reduction and doing it this way will produce better code if the
7867 frame pointer or argument pointer is eliminated.
7869 fold-const.c will ensure that the constant is always in the inner
7870 PLUS_EXPR, so the only case we need to do anything about is if
7871 sp, ap, or fp is our second argument, in which case we must swap
7872 the innermost first argument and our second argument. */
7874 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7875 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7876 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
7877 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7878 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7879 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7881 tree t
= TREE_OPERAND (exp
, 1);
7883 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7884 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
7887 /* If the result is to be ptr_mode and we are adding an integer to
7888 something, we might be forming a constant. So try to use
7889 plus_constant. If it produces a sum and we can't accept it,
7890 use force_operand. This allows P = &ARR[const] to generate
7891 efficient code on machines where a SYMBOL_REF is not a valid
7894 If this is an EXPAND_SUM call, always return the sum. */
7895 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7896 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
7898 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
7899 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7900 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
7904 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
7906 /* Use immed_double_const to ensure that the constant is
7907 truncated according to the mode of OP1, then sign extended
7908 to a HOST_WIDE_INT. Using the constant directly can result
7909 in non-canonical RTL in a 64x32 cross compile. */
7911 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
7913 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
7914 op1
= plus_constant (op1
, INTVAL (constant_part
));
7915 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7916 op1
= force_operand (op1
, target
);
7920 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7921 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
7922 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
7926 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7927 (modifier
== EXPAND_INITIALIZER
7928 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
7929 if (! CONSTANT_P (op0
))
7931 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7932 VOIDmode
, modifier
);
7933 /* Don't go to both_summands if modifier
7934 says it's not right to return a PLUS. */
7935 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7939 /* Use immed_double_const to ensure that the constant is
7940 truncated according to the mode of OP1, then sign extended
7941 to a HOST_WIDE_INT. Using the constant directly can result
7942 in non-canonical RTL in a 64x32 cross compile. */
7944 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
7946 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7947 op0
= plus_constant (op0
, INTVAL (constant_part
));
7948 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7949 op0
= force_operand (op0
, target
);
7954 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7957 /* No sense saving up arithmetic to be done
7958 if it's all in the wrong mode to form part of an address.
7959 And force_operand won't know whether to sign-extend or
7961 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7962 || mode
!= ptr_mode
)
7964 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7965 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7966 if (op0
== const0_rtx
)
7968 if (op1
== const0_rtx
)
7973 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, modifier
);
7974 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, modifier
);
7976 /* We come here from MINUS_EXPR when the second operand is a
7979 /* Make sure any term that's a sum with a constant comes last. */
7980 if (GET_CODE (op0
) == PLUS
7981 && CONSTANT_P (XEXP (op0
, 1)))
7987 /* If adding to a sum including a constant,
7988 associate it to put the constant outside. */
7989 if (GET_CODE (op1
) == PLUS
7990 && CONSTANT_P (XEXP (op1
, 1)))
7992 rtx constant_term
= const0_rtx
;
7994 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
7997 /* Ensure that MULT comes first if there is one. */
7998 else if (GET_CODE (op0
) == MULT
)
7999 op0
= gen_rtx_PLUS (mode
, op0
, XEXP (op1
, 0));
8001 op0
= gen_rtx_PLUS (mode
, XEXP (op1
, 0), op0
);
8003 /* Let's also eliminate constants from op0 if possible. */
8004 op0
= eliminate_constant_term (op0
, &constant_term
);
8006 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8007 their sum should be a constant. Form it into OP1, since the
8008 result we want will then be OP0 + OP1. */
8010 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
8015 op1
= gen_rtx_PLUS (mode
, constant_term
, XEXP (op1
, 1));
8018 /* Put a constant term last and put a multiplication first. */
8019 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
8020 temp
= op1
, op1
= op0
, op0
= temp
;
8022 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
8023 return temp
? temp
: gen_rtx_PLUS (mode
, op0
, op1
);
8026 /* For initializers, we are allowed to return a MINUS of two
8027 symbolic constants. Here we handle all cases when both operands
8029 /* Handle difference of two symbolic constants,
8030 for the sake of an initializer. */
8031 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8032 && really_constant_p (TREE_OPERAND (exp
, 0))
8033 && really_constant_p (TREE_OPERAND (exp
, 1)))
8035 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
,
8037 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
,
8040 /* If the last operand is a CONST_INT, use plus_constant of
8041 the negated constant. Else make the MINUS. */
8042 if (GET_CODE (op1
) == CONST_INT
)
8043 return plus_constant (op0
, - INTVAL (op1
));
8045 return gen_rtx_MINUS (mode
, op0
, op1
);
8048 this_optab
= ! unsignedp
&& flag_trapv
8049 && (GET_MODE_CLASS(mode
) == MODE_INT
)
8050 ? subv_optab
: sub_optab
;
8052 /* No sense saving up arithmetic to be done
8053 if it's all in the wrong mode to form part of an address.
8054 And force_operand won't know whether to sign-extend or
8056 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8057 || mode
!= ptr_mode
)
8060 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8063 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, modifier
);
8064 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, modifier
);
8066 /* Convert A - const to A + (-const). */
8067 if (GET_CODE (op1
) == CONST_INT
)
8069 op1
= negate_rtx (mode
, op1
);
8076 /* If first operand is constant, swap them.
8077 Thus the following special case checks need only
8078 check the second operand. */
8079 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
8081 tree t1
= TREE_OPERAND (exp
, 0);
8082 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
8083 TREE_OPERAND (exp
, 1) = t1
;
8086 /* Attempt to return something suitable for generating an
8087 indexed address, for machines that support that. */
8089 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
8090 && host_integerp (TREE_OPERAND (exp
, 1), 0))
8092 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
8095 /* If we knew for certain that this is arithmetic for an array
8096 reference, and we knew the bounds of the array, then we could
8097 apply the distributive law across (PLUS X C) for constant C.
8098 Without such knowledge, we risk overflowing the computation
8099 when both X and C are large, but X+C isn't. */
8100 /* ??? Could perhaps special-case EXP being unsigned and C being
8101 positive. In that case we are certain that X+C is no smaller
8102 than X and so the transformed expression will overflow iff the
8103 original would have. */
8105 if (GET_CODE (op0
) != REG
)
8106 op0
= force_operand (op0
, NULL_RTX
);
8107 if (GET_CODE (op0
) != REG
)
8108 op0
= copy_to_mode_reg (mode
, op0
);
8111 gen_rtx_MULT (mode
, op0
,
8112 GEN_INT (tree_low_cst (TREE_OPERAND (exp
, 1), 0)));
8115 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8118 /* Check for multiplying things that have been extended
8119 from a narrower type. If this machine supports multiplying
8120 in that narrower type with a result in the desired type,
8121 do it that way, and avoid the explicit type-conversion. */
8122 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
8123 && TREE_CODE (type
) == INTEGER_TYPE
8124 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8125 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
8126 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
8127 && int_fits_type_p (TREE_OPERAND (exp
, 1),
8128 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8129 /* Don't use a widening multiply if a shift will do. */
8130 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
8131 > HOST_BITS_PER_WIDE_INT
)
8132 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
8134 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8135 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
8137 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
8138 /* If both operands are extended, they must either both
8139 be zero-extended or both be sign-extended. */
8140 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
8142 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
8144 enum machine_mode innermode
8145 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
8146 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8147 ? smul_widen_optab
: umul_widen_optab
);
8148 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8149 ? umul_widen_optab
: smul_widen_optab
);
8150 if (mode
== GET_MODE_WIDER_MODE (innermode
))
8152 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
8154 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8155 NULL_RTX
, VOIDmode
, 0);
8156 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
8157 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
8160 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
8161 NULL_RTX
, VOIDmode
, 0);
8164 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
8165 && innermode
== word_mode
)
8168 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8169 NULL_RTX
, VOIDmode
, 0);
8170 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
8171 op1
= convert_modes (innermode
, mode
,
8172 expand_expr (TREE_OPERAND (exp
, 1),
8173 NULL_RTX
, VOIDmode
, 0),
8176 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
8177 NULL_RTX
, VOIDmode
, 0);
8178 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8179 unsignedp
, OPTAB_LIB_WIDEN
);
8180 htem
= expand_mult_highpart_adjust (innermode
,
8181 gen_highpart (innermode
, temp
),
8183 gen_highpart (innermode
, temp
),
8185 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
8190 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8191 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8192 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
8194 case TRUNC_DIV_EXPR
:
8195 case FLOOR_DIV_EXPR
:
8197 case ROUND_DIV_EXPR
:
8198 case EXACT_DIV_EXPR
:
8199 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8201 /* Possible optimization: compute the dividend with EXPAND_SUM
8202 then if the divisor is constant can optimize the case
8203 where some terms of the dividend have coeffs divisible by it. */
8204 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8205 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8206 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
8209 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8210 expensive divide. If not, combine will rebuild the original
8212 if (flag_unsafe_math_optimizations
&& optimize
&& !optimize_size
8213 && TREE_CODE (type
) == REAL_TYPE
8214 && !real_onep (TREE_OPERAND (exp
, 0)))
8215 return expand_expr (build (MULT_EXPR
, type
, TREE_OPERAND (exp
, 0),
8216 build (RDIV_EXPR
, type
,
8217 build_real (type
, dconst1
),
8218 TREE_OPERAND (exp
, 1))),
8219 target
, tmode
, modifier
);
8220 this_optab
= sdiv_optab
;
8223 case TRUNC_MOD_EXPR
:
8224 case FLOOR_MOD_EXPR
:
8226 case ROUND_MOD_EXPR
:
8227 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8229 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8230 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8231 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
8233 case FIX_ROUND_EXPR
:
8234 case FIX_FLOOR_EXPR
:
8236 abort (); /* Not used for C. */
8238 case FIX_TRUNC_EXPR
:
8239 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8241 target
= gen_reg_rtx (mode
);
8242 expand_fix (target
, op0
, unsignedp
);
8246 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8248 target
= gen_reg_rtx (mode
);
8249 /* expand_float can't figure out what to do if FROM has VOIDmode.
8250 So give it the correct mode. With -O, cse will optimize this. */
8251 if (GET_MODE (op0
) == VOIDmode
)
8252 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8254 expand_float (target
, op0
,
8255 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8259 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8260 temp
= expand_unop (mode
,
8261 ! unsignedp
&& flag_trapv
8262 && (GET_MODE_CLASS(mode
) == MODE_INT
)
8263 ? negv_optab
: neg_optab
, op0
, target
, 0);
8269 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8271 /* Handle complex values specially. */
8272 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
8273 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
8274 return expand_complex_abs (mode
, op0
, target
, unsignedp
);
8276 /* Unsigned abs is simply the operand. Testing here means we don't
8277 risk generating incorrect code below. */
8278 if (TREE_UNSIGNED (type
))
8281 return expand_abs (mode
, op0
, target
, unsignedp
,
8282 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
8286 target
= original_target
;
8287 if (target
== 0 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1), 1)
8288 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
8289 || GET_MODE (target
) != mode
8290 || (GET_CODE (target
) == REG
8291 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8292 target
= gen_reg_rtx (mode
);
8293 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8294 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8296 /* First try to do it with a special MIN or MAX instruction.
8297 If that does not win, use a conditional jump to select the proper
8299 this_optab
= (TREE_UNSIGNED (type
)
8300 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
8301 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
8303 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8308 /* At this point, a MEM target is no longer useful; we will get better
8311 if (GET_CODE (target
) == MEM
)
8312 target
= gen_reg_rtx (mode
);
8315 emit_move_insn (target
, op0
);
8317 op0
= gen_label_rtx ();
8319 /* If this mode is an integer too wide to compare properly,
8320 compare word by word. Rely on cse to optimize constant cases. */
8321 if (GET_MODE_CLASS (mode
) == MODE_INT
8322 && ! can_compare_p (GE
, mode
, ccp_jump
))
8324 if (code
== MAX_EXPR
)
8325 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8326 target
, op1
, NULL_RTX
, op0
);
8328 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8329 op1
, target
, NULL_RTX
, op0
);
8333 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)));
8334 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
8335 unsignedp
, mode
, NULL_RTX
, NULL_RTX
,
8338 emit_move_insn (target
, op1
);
8343 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8344 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8350 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8351 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
8357 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8358 temp
= expand_unop (mode
, clz_optab
, op0
, target
, 1);
8364 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8365 temp
= expand_unop (mode
, ctz_optab
, op0
, target
, 1);
8371 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8372 temp
= expand_unop (mode
, popcount_optab
, op0
, target
, 1);
8378 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8379 temp
= expand_unop (mode
, parity_optab
, op0
, target
, 1);
8384 /* ??? Can optimize bitwise operations with one arg constant.
8385 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8386 and (a bitwise1 b) bitwise2 b (etc)
8387 but that is probably not worth while. */
8389 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8390 boolean values when we want in all cases to compute both of them. In
8391 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8392 as actual zero-or-1 values and then bitwise anding. In cases where
8393 there cannot be any side effects, better code would be made by
8394 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8395 how to recognize those cases. */
8397 case TRUTH_AND_EXPR
:
8399 this_optab
= and_optab
;
8404 this_optab
= ior_optab
;
8407 case TRUTH_XOR_EXPR
:
8409 this_optab
= xor_optab
;
8416 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8418 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8419 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
8422 /* Could determine the answer when only additive constants differ. Also,
8423 the addition of one can be handled by changing the condition. */
8430 case UNORDERED_EXPR
:
8437 temp
= do_store_flag (exp
, target
, tmode
!= VOIDmode
? tmode
: mode
, 0);
8441 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8442 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
8444 && GET_CODE (original_target
) == REG
8445 && (GET_MODE (original_target
)
8446 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
8448 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
8451 /* If temp is constant, we can just compute the result. */
8452 if (GET_CODE (temp
) == CONST_INT
)
8454 if (INTVAL (temp
) != 0)
8455 emit_move_insn (target
, const1_rtx
);
8457 emit_move_insn (target
, const0_rtx
);
8462 if (temp
!= original_target
)
8464 enum machine_mode mode1
= GET_MODE (temp
);
8465 if (mode1
== VOIDmode
)
8466 mode1
= tmode
!= VOIDmode
? tmode
: mode
;
8468 temp
= copy_to_mode_reg (mode1
, temp
);
8471 op1
= gen_label_rtx ();
8472 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
8473 GET_MODE (temp
), unsignedp
, op1
);
8474 emit_move_insn (temp
, const1_rtx
);
8479 /* If no set-flag instruction, must generate a conditional
8480 store into a temporary variable. Drop through
8481 and handle this like && and ||. */
8483 case TRUTH_ANDIF_EXPR
:
8484 case TRUTH_ORIF_EXPR
:
8486 && (target
== 0 || ! safe_from_p (target
, exp
, 1)
8487 /* Make sure we don't have a hard reg (such as function's return
8488 value) live across basic blocks, if not optimizing. */
8489 || (!optimize
&& GET_CODE (target
) == REG
8490 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8491 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8494 emit_clr_insn (target
);
8496 op1
= gen_label_rtx ();
8497 jumpifnot (exp
, op1
);
8500 emit_0_to_1_insn (target
);
8503 return ignore
? const0_rtx
: target
;
8505 case TRUTH_NOT_EXPR
:
8506 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8507 /* The parser is careful to generate TRUTH_NOT_EXPR
8508 only with operands that are always zero or one. */
8509 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8510 target
, 1, OPTAB_LIB_WIDEN
);
8516 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
8518 return expand_expr (TREE_OPERAND (exp
, 1),
8519 (ignore
? const0_rtx
: target
),
8523 /* If we would have a "singleton" (see below) were it not for a
8524 conversion in each arm, bring that conversion back out. */
8525 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8526 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
8527 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
8528 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
8530 tree iftrue
= TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
8531 tree iffalse
= TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
8533 if ((TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '2'
8534 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8535 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '2'
8536 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0))
8537 || (TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '1'
8538 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8539 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '1'
8540 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0)))
8541 return expand_expr (build1 (NOP_EXPR
, type
,
8542 build (COND_EXPR
, TREE_TYPE (iftrue
),
8543 TREE_OPERAND (exp
, 0),
8545 target
, tmode
, modifier
);
8549 /* Note that COND_EXPRs whose type is a structure or union
8550 are required to be constructed to contain assignments of
8551 a temporary variable, so that we can evaluate them here
8552 for side effect only. If type is void, we must do likewise. */
8554 /* If an arm of the branch requires a cleanup,
8555 only that cleanup is performed. */
8558 tree binary_op
= 0, unary_op
= 0;
8560 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8561 convert it to our mode, if necessary. */
8562 if (integer_onep (TREE_OPERAND (exp
, 1))
8563 && integer_zerop (TREE_OPERAND (exp
, 2))
8564 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8568 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
8573 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, modifier
);
8574 if (GET_MODE (op0
) == mode
)
8578 target
= gen_reg_rtx (mode
);
8579 convert_move (target
, op0
, unsignedp
);
8583 /* Check for X ? A + B : A. If we have this, we can copy A to the
8584 output and conditionally add B. Similarly for unary operations.
8585 Don't do this if X has side-effects because those side effects
8586 might affect A or B and the "?" operation is a sequence point in
8587 ANSI. (operand_equal_p tests for side effects.) */
8589 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
8590 && operand_equal_p (TREE_OPERAND (exp
, 2),
8591 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8592 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
8593 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
8594 && operand_equal_p (TREE_OPERAND (exp
, 1),
8595 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8596 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
8597 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
8598 && operand_equal_p (TREE_OPERAND (exp
, 2),
8599 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8600 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
8601 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
8602 && operand_equal_p (TREE_OPERAND (exp
, 1),
8603 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8604 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
8606 /* If we are not to produce a result, we have no target. Otherwise,
8607 if a target was specified use it; it will not be used as an
8608 intermediate target unless it is safe. If no target, use a
8613 else if (original_target
8614 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
8615 || (singleton
&& GET_CODE (original_target
) == REG
8616 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
8617 && original_target
== var_rtx (singleton
)))
8618 && GET_MODE (original_target
) == mode
8619 #ifdef HAVE_conditional_move
8620 && (! can_conditionally_move_p (mode
)
8621 || GET_CODE (original_target
) == REG
8622 || TREE_ADDRESSABLE (type
))
8624 && (GET_CODE (original_target
) != MEM
8625 || TREE_ADDRESSABLE (type
)))
8626 temp
= original_target
;
8627 else if (TREE_ADDRESSABLE (type
))
8630 temp
= assign_temp (type
, 0, 0, 1);
8632 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8633 do the test of X as a store-flag operation, do this as
8634 A + ((X != 0) << log C). Similarly for other simple binary
8635 operators. Only do for C == 1 if BRANCH_COST is low. */
8636 if (temp
&& singleton
&& binary_op
8637 && (TREE_CODE (binary_op
) == PLUS_EXPR
8638 || TREE_CODE (binary_op
) == MINUS_EXPR
8639 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
8640 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
8641 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
8642 : integer_onep (TREE_OPERAND (binary_op
, 1)))
8643 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8647 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
8648 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8649 ? addv_optab
: add_optab
)
8650 : TREE_CODE (binary_op
) == MINUS_EXPR
8651 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8652 ? subv_optab
: sub_optab
)
8653 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
8656 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8657 if (singleton
== TREE_OPERAND (exp
, 1))
8658 cond
= invert_truthvalue (TREE_OPERAND (exp
, 0));
8660 cond
= TREE_OPERAND (exp
, 0);
8662 result
= do_store_flag (cond
, (safe_from_p (temp
, singleton
, 1)
8664 mode
, BRANCH_COST
<= 1);
8666 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
8667 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
8668 build_int_2 (tree_log2
8672 (safe_from_p (temp
, singleton
, 1)
8673 ? temp
: NULL_RTX
), 0);
8677 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
8678 return expand_binop (mode
, boptab
, op1
, result
, temp
,
8679 unsignedp
, OPTAB_LIB_WIDEN
);
8683 do_pending_stack_adjust ();
8685 op0
= gen_label_rtx ();
8687 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
8691 /* If the target conflicts with the other operand of the
8692 binary op, we can't use it. Also, we can't use the target
8693 if it is a hard register, because evaluating the condition
8694 might clobber it. */
8696 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
8697 || (GET_CODE (temp
) == REG
8698 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
8699 temp
= gen_reg_rtx (mode
);
8700 store_expr (singleton
, temp
, 0);
8703 expand_expr (singleton
,
8704 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8705 if (singleton
== TREE_OPERAND (exp
, 1))
8706 jumpif (TREE_OPERAND (exp
, 0), op0
);
8708 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8710 start_cleanup_deferral ();
8711 if (binary_op
&& temp
== 0)
8712 /* Just touch the other operand. */
8713 expand_expr (TREE_OPERAND (binary_op
, 1),
8714 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8716 store_expr (build (TREE_CODE (binary_op
), type
,
8717 make_tree (type
, temp
),
8718 TREE_OPERAND (binary_op
, 1)),
8721 store_expr (build1 (TREE_CODE (unary_op
), type
,
8722 make_tree (type
, temp
)),
8726 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8727 comparison operator. If we have one of these cases, set the
8728 output to A, branch on A (cse will merge these two references),
8729 then set the output to FOO. */
8731 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8732 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8733 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8734 TREE_OPERAND (exp
, 1), 0)
8735 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8736 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
8737 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
8739 if (GET_CODE (temp
) == REG
8740 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8741 temp
= gen_reg_rtx (mode
);
8742 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8743 jumpif (TREE_OPERAND (exp
, 0), op0
);
8745 start_cleanup_deferral ();
8746 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8750 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8751 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8752 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8753 TREE_OPERAND (exp
, 2), 0)
8754 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8755 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
8756 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
8758 if (GET_CODE (temp
) == REG
8759 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8760 temp
= gen_reg_rtx (mode
);
8761 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8762 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8764 start_cleanup_deferral ();
8765 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8770 op1
= gen_label_rtx ();
8771 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8773 start_cleanup_deferral ();
8775 /* One branch of the cond can be void, if it never returns. For
8776 example A ? throw : E */
8778 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8779 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8781 expand_expr (TREE_OPERAND (exp
, 1),
8782 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8783 end_cleanup_deferral ();
8785 emit_jump_insn (gen_jump (op1
));
8788 start_cleanup_deferral ();
8790 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8791 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8793 expand_expr (TREE_OPERAND (exp
, 2),
8794 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8797 end_cleanup_deferral ();
8808 /* Something needs to be initialized, but we didn't know
8809 where that thing was when building the tree. For example,
8810 it could be the return value of a function, or a parameter
8811 to a function which lays down in the stack, or a temporary
8812 variable which must be passed by reference.
8814 We guarantee that the expression will either be constructed
8815 or copied into our original target. */
8817 tree slot
= TREE_OPERAND (exp
, 0);
8818 tree cleanups
= NULL_TREE
;
8821 if (TREE_CODE (slot
) != VAR_DECL
)
8825 target
= original_target
;
8827 /* Set this here so that if we get a target that refers to a
8828 register variable that's already been used, put_reg_into_stack
8829 knows that it should fix up those uses. */
8830 TREE_USED (slot
) = 1;
8834 if (DECL_RTL_SET_P (slot
))
8836 target
= DECL_RTL (slot
);
8837 /* If we have already expanded the slot, so don't do
8839 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8844 target
= assign_temp (type
, 2, 0, 1);
8845 /* All temp slots at this level must not conflict. */
8846 preserve_temp_slots (target
);
8847 SET_DECL_RTL (slot
, target
);
8848 if (TREE_ADDRESSABLE (slot
))
8849 put_var_into_stack (slot
);
8851 /* Since SLOT is not known to the called function
8852 to belong to its stack frame, we must build an explicit
8853 cleanup. This case occurs when we must build up a reference
8854 to pass the reference as an argument. In this case,
8855 it is very likely that such a reference need not be
8858 if (TREE_OPERAND (exp
, 2) == 0)
8859 TREE_OPERAND (exp
, 2)
8860 = (*lang_hooks
.maybe_build_cleanup
) (slot
);
8861 cleanups
= TREE_OPERAND (exp
, 2);
8866 /* This case does occur, when expanding a parameter which
8867 needs to be constructed on the stack. The target
8868 is the actual stack address that we want to initialize.
8869 The function we call will perform the cleanup in this case. */
8871 /* If we have already assigned it space, use that space,
8872 not target that we were passed in, as our target
8873 parameter is only a hint. */
8874 if (DECL_RTL_SET_P (slot
))
8876 target
= DECL_RTL (slot
);
8877 /* If we have already expanded the slot, so don't do
8879 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8884 SET_DECL_RTL (slot
, target
);
8885 /* If we must have an addressable slot, then make sure that
8886 the RTL that we just stored in slot is OK. */
8887 if (TREE_ADDRESSABLE (slot
))
8888 put_var_into_stack (slot
);
8892 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
8893 /* Mark it as expanded. */
8894 TREE_OPERAND (exp
, 1) = NULL_TREE
;
8896 store_expr (exp1
, target
, 0);
8898 expand_decl_cleanup_eh (NULL_TREE
, cleanups
, CLEANUP_EH_ONLY (exp
));
8905 tree lhs
= TREE_OPERAND (exp
, 0);
8906 tree rhs
= TREE_OPERAND (exp
, 1);
8908 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8914 /* If lhs is complex, expand calls in rhs before computing it.
8915 That's so we don't compute a pointer and save it over a
8916 call. If lhs is simple, compute it first so we can give it
8917 as a target if the rhs is just a call. This avoids an
8918 extra temp and copy and that prevents a partial-subsumption
8919 which makes bad code. Actually we could treat
8920 component_ref's of vars like vars. */
8922 tree lhs
= TREE_OPERAND (exp
, 0);
8923 tree rhs
= TREE_OPERAND (exp
, 1);
8927 /* Check for |= or &= of a bitfield of size one into another bitfield
8928 of size 1. In this case, (unless we need the result of the
8929 assignment) we can do this more efficiently with a
8930 test followed by an assignment, if necessary.
8932 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8933 things change so we do, this code should be enhanced to
8936 && TREE_CODE (lhs
) == COMPONENT_REF
8937 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
8938 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
8939 && TREE_OPERAND (rhs
, 0) == lhs
8940 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
8941 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
8942 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
8944 rtx label
= gen_label_rtx ();
8946 do_jump (TREE_OPERAND (rhs
, 1),
8947 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
8948 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
8949 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
8950 (TREE_CODE (rhs
) == BIT_IOR_EXPR
8952 : integer_zero_node
)),
8954 do_pending_stack_adjust ();
8959 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8965 if (!TREE_OPERAND (exp
, 0))
8966 expand_null_return ();
8968 expand_return (TREE_OPERAND (exp
, 0));
8971 case PREINCREMENT_EXPR
:
8972 case PREDECREMENT_EXPR
:
8973 return expand_increment (exp
, 0, ignore
);
8975 case POSTINCREMENT_EXPR
:
8976 case POSTDECREMENT_EXPR
:
8977 /* Faster to treat as pre-increment if result is not used. */
8978 return expand_increment (exp
, ! ignore
, ignore
);
8981 /* Are we taking the address of a nested function? */
8982 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
8983 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
8984 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0))
8985 && ! TREE_STATIC (exp
))
8987 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
8988 op0
= force_operand (op0
, target
);
8990 /* If we are taking the address of something erroneous, just
8992 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
8994 /* If we are taking the address of a constant and are at the
8995 top level, we have to use output_constant_def since we can't
8996 call force_const_mem at top level. */
8998 && (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
8999 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0)))
9001 op0
= XEXP (output_constant_def (TREE_OPERAND (exp
, 0), 0), 0);
9004 /* We make sure to pass const0_rtx down if we came in with
9005 ignore set, to avoid doing the cleanups twice for something. */
9006 op0
= expand_expr (TREE_OPERAND (exp
, 0),
9007 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
9008 (modifier
== EXPAND_INITIALIZER
9009 ? modifier
: EXPAND_CONST_ADDRESS
));
9011 /* If we are going to ignore the result, OP0 will have been set
9012 to const0_rtx, so just return it. Don't get confused and
9013 think we are taking the address of the constant. */
9017 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9018 clever and returns a REG when given a MEM. */
9019 op0
= protect_from_queue (op0
, 1);
9021 /* We would like the object in memory. If it is a constant, we can
9022 have it be statically allocated into memory. For a non-constant,
9023 we need to allocate some memory and store the value into it. */
9025 if (CONSTANT_P (op0
))
9026 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
9028 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
9029 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
9030 || GET_CODE (op0
) == PARALLEL
)
9032 /* If the operand is a SAVE_EXPR, we can deal with this by
9033 forcing the SAVE_EXPR into memory. */
9034 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
9036 put_var_into_stack (TREE_OPERAND (exp
, 0));
9037 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
9041 /* If this object is in a register, it can't be BLKmode. */
9042 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9043 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
9045 if (GET_CODE (op0
) == PARALLEL
)
9046 /* Handle calls that pass values in multiple
9047 non-contiguous locations. The Irix 6 ABI has examples
9049 emit_group_store (memloc
, op0
,
9050 int_size_in_bytes (inner_type
));
9052 emit_move_insn (memloc
, op0
);
9058 if (GET_CODE (op0
) != MEM
)
9061 mark_temp_addr_taken (op0
);
9062 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
9064 op0
= XEXP (op0
, 0);
9065 #ifdef POINTERS_EXTEND_UNSIGNED
9066 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
9067 && mode
== ptr_mode
)
9068 op0
= convert_memory_address (ptr_mode
, op0
);
9073 /* If OP0 is not aligned as least as much as the type requires, we
9074 need to make a temporary, copy OP0 to it, and take the address of
9075 the temporary. We want to use the alignment of the type, not of
9076 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9077 the test for BLKmode means that can't happen. The test for
9078 BLKmode is because we never make mis-aligned MEMs with
9081 We don't need to do this at all if the machine doesn't have
9082 strict alignment. */
9083 if (STRICT_ALIGNMENT
&& GET_MODE (op0
) == BLKmode
9084 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
9086 && MEM_ALIGN (op0
) < BIGGEST_ALIGNMENT
)
9088 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9090 = assign_stack_temp_for_type
9091 (TYPE_MODE (inner_type
),
9092 MEM_SIZE (op0
) ? INTVAL (MEM_SIZE (op0
))
9093 : int_size_in_bytes (inner_type
),
9094 1, build_qualified_type (inner_type
,
9095 (TYPE_QUALS (inner_type
)
9096 | TYPE_QUAL_CONST
)));
9098 if (TYPE_ALIGN_OK (inner_type
))
9101 emit_block_move (new, op0
, expr_size (TREE_OPERAND (exp
, 0)),
9106 op0
= force_operand (XEXP (op0
, 0), target
);
9110 && GET_CODE (op0
) != REG
9111 && modifier
!= EXPAND_CONST_ADDRESS
9112 && modifier
!= EXPAND_INITIALIZER
9113 && modifier
!= EXPAND_SUM
)
9114 op0
= force_reg (Pmode
, op0
);
9116 if (GET_CODE (op0
) == REG
9117 && ! REG_USERVAR_P (op0
))
9118 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)));
9120 #ifdef POINTERS_EXTEND_UNSIGNED
9121 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
9122 && mode
== ptr_mode
)
9123 op0
= convert_memory_address (ptr_mode
, op0
);
9128 case ENTRY_VALUE_EXPR
:
9131 /* COMPLEX type for Extended Pascal & Fortran */
9134 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
9137 /* Get the rtx code of the operands. */
9138 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9139 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
9142 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
9146 /* Move the real (op0) and imaginary (op1) parts to their location. */
9147 emit_move_insn (gen_realpart (mode
, target
), op0
);
9148 emit_move_insn (gen_imagpart (mode
, target
), op1
);
9150 insns
= get_insns ();
9153 /* Complex construction should appear as a single unit. */
9154 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9155 each with a separate pseudo as destination.
9156 It's not correct for flow to treat them as a unit. */
9157 if (GET_CODE (target
) != CONCAT
)
9158 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
9166 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9167 return gen_realpart (mode
, op0
);
9170 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9171 return gen_imagpart (mode
, op0
);
9175 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
9179 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9182 target
= gen_reg_rtx (mode
);
9186 /* Store the realpart and the negated imagpart to target. */
9187 emit_move_insn (gen_realpart (partmode
, target
),
9188 gen_realpart (partmode
, op0
));
9190 imag_t
= gen_imagpart (partmode
, target
);
9191 temp
= expand_unop (partmode
,
9192 ! unsignedp
&& flag_trapv
9193 && (GET_MODE_CLASS(partmode
) == MODE_INT
)
9194 ? negv_optab
: neg_optab
,
9195 gen_imagpart (partmode
, op0
), imag_t
, 0);
9197 emit_move_insn (imag_t
, temp
);
9199 insns
= get_insns ();
9202 /* Conjugate should appear as a single unit
9203 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9204 each with a separate pseudo as destination.
9205 It's not correct for flow to treat them as a unit. */
9206 if (GET_CODE (target
) != CONCAT
)
9207 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
9214 case TRY_CATCH_EXPR
:
9216 tree handler
= TREE_OPERAND (exp
, 1);
9218 expand_eh_region_start ();
9220 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9222 expand_eh_region_end_cleanup (handler
);
9227 case TRY_FINALLY_EXPR
:
9229 tree try_block
= TREE_OPERAND (exp
, 0);
9230 tree finally_block
= TREE_OPERAND (exp
, 1);
9232 if (!optimize
|| unsafe_for_reeval (finally_block
) > 1)
9234 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9235 is not sufficient, so we cannot expand the block twice.
9236 So we play games with GOTO_SUBROUTINE_EXPR to let us
9237 expand the thing only once. */
9238 /* When not optimizing, we go ahead with this form since
9239 (1) user breakpoints operate more predictably without
9240 code duplication, and
9241 (2) we're not running any of the global optimizers
9242 that would explode in time/space with the highly
9243 connected CFG created by the indirect branching. */
9245 rtx finally_label
= gen_label_rtx ();
9246 rtx done_label
= gen_label_rtx ();
9247 rtx return_link
= gen_reg_rtx (Pmode
);
9248 tree cleanup
= build (GOTO_SUBROUTINE_EXPR
, void_type_node
,
9249 (tree
) finally_label
, (tree
) return_link
);
9250 TREE_SIDE_EFFECTS (cleanup
) = 1;
9252 /* Start a new binding layer that will keep track of all cleanup
9253 actions to be performed. */
9254 expand_start_bindings (2);
9255 target_temp_slot_level
= temp_slot_level
;
9257 expand_decl_cleanup (NULL_TREE
, cleanup
);
9258 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
9260 preserve_temp_slots (op0
);
9261 expand_end_bindings (NULL_TREE
, 0, 0);
9262 emit_jump (done_label
);
9263 emit_label (finally_label
);
9264 expand_expr (finally_block
, const0_rtx
, VOIDmode
, 0);
9265 emit_indirect_jump (return_link
);
9266 emit_label (done_label
);
9270 expand_start_bindings (2);
9271 target_temp_slot_level
= temp_slot_level
;
9273 expand_decl_cleanup (NULL_TREE
, finally_block
);
9274 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
9276 preserve_temp_slots (op0
);
9277 expand_end_bindings (NULL_TREE
, 0, 0);
9283 case GOTO_SUBROUTINE_EXPR
:
9285 rtx subr
= (rtx
) TREE_OPERAND (exp
, 0);
9286 rtx return_link
= *(rtx
*) &TREE_OPERAND (exp
, 1);
9287 rtx return_address
= gen_label_rtx ();
9288 emit_move_insn (return_link
,
9289 gen_rtx_LABEL_REF (Pmode
, return_address
));
9291 emit_label (return_address
);
9296 return expand_builtin_va_arg (TREE_OPERAND (exp
, 0), type
);
9299 return get_exception_pointer (cfun
);
9302 /* Function descriptors are not valid except for as
9303 initialization constants, and should not be expanded. */
9307 return (*lang_hooks
.expand_expr
) (exp
, original_target
, tmode
, modifier
);
9310 /* Here to do an ordinary binary operator, generating an instruction
9311 from the optab already placed in `this_optab'. */
9313 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
9315 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
9316 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9318 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9319 unsignedp
, OPTAB_LIB_WIDEN
);
9325 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9326 when applied to the address of EXP produces an address known to be
9327 aligned more than BIGGEST_ALIGNMENT. */
9330 is_aligning_offset (offset
, exp
)
9334 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9335 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9336 || TREE_CODE (offset
) == NOP_EXPR
9337 || TREE_CODE (offset
) == CONVERT_EXPR
9338 || TREE_CODE (offset
) == WITH_RECORD_EXPR
)
9339 offset
= TREE_OPERAND (offset
, 0);
9341 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9342 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9343 if (TREE_CODE (offset
) != BIT_AND_EXPR
9344 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
9345 || compare_tree_int (TREE_OPERAND (offset
, 1), BIGGEST_ALIGNMENT
) <= 0
9346 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
9349 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9350 It must be NEGATE_EXPR. Then strip any more conversions. */
9351 offset
= TREE_OPERAND (offset
, 0);
9352 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9353 || TREE_CODE (offset
) == NOP_EXPR
9354 || TREE_CODE (offset
) == CONVERT_EXPR
)
9355 offset
= TREE_OPERAND (offset
, 0);
9357 if (TREE_CODE (offset
) != NEGATE_EXPR
)
9360 offset
= TREE_OPERAND (offset
, 0);
9361 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9362 || TREE_CODE (offset
) == NOP_EXPR
9363 || TREE_CODE (offset
) == CONVERT_EXPR
)
9364 offset
= TREE_OPERAND (offset
, 0);
9366 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9367 whose type is the same as EXP. */
9368 return (TREE_CODE (offset
) == ADDR_EXPR
9369 && (TREE_OPERAND (offset
, 0) == exp
9370 || (TREE_CODE (TREE_OPERAND (offset
, 0)) == PLACEHOLDER_EXPR
9371 && (TREE_TYPE (TREE_OPERAND (offset
, 0))
9372 == TREE_TYPE (exp
)))));
9375 /* Return the tree node if an ARG corresponds to a string constant or zero
9376 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9377 in bytes within the string that ARG is accessing. The type of the
9378 offset will be `sizetype'. */
9381 string_constant (arg
, ptr_offset
)
9387 if (TREE_CODE (arg
) == ADDR_EXPR
9388 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
9390 *ptr_offset
= size_zero_node
;
9391 return TREE_OPERAND (arg
, 0);
9393 else if (TREE_CODE (arg
) == PLUS_EXPR
)
9395 tree arg0
= TREE_OPERAND (arg
, 0);
9396 tree arg1
= TREE_OPERAND (arg
, 1);
9401 if (TREE_CODE (arg0
) == ADDR_EXPR
9402 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
9404 *ptr_offset
= convert (sizetype
, arg1
);
9405 return TREE_OPERAND (arg0
, 0);
9407 else if (TREE_CODE (arg1
) == ADDR_EXPR
9408 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
9410 *ptr_offset
= convert (sizetype
, arg0
);
9411 return TREE_OPERAND (arg1
, 0);
9418 /* Expand code for a post- or pre- increment or decrement
9419 and return the RTX for the result.
9420 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9423 expand_increment (exp
, post
, ignore
)
9429 tree incremented
= TREE_OPERAND (exp
, 0);
9430 optab this_optab
= add_optab
;
9432 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
9433 int op0_is_copy
= 0;
9434 int single_insn
= 0;
9435 /* 1 means we can't store into OP0 directly,
9436 because it is a subreg narrower than a word,
9437 and we don't dare clobber the rest of the word. */
9440 /* Stabilize any component ref that might need to be
9441 evaluated more than once below. */
9443 || TREE_CODE (incremented
) == BIT_FIELD_REF
9444 || (TREE_CODE (incremented
) == COMPONENT_REF
9445 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
9446 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
9447 incremented
= stabilize_reference (incremented
);
9448 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9449 ones into save exprs so that they don't accidentally get evaluated
9450 more than once by the code below. */
9451 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
9452 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
9453 incremented
= save_expr (incremented
);
9455 /* Compute the operands as RTX.
9456 Note whether OP0 is the actual lvalue or a copy of it:
9457 I believe it is a copy iff it is a register or subreg
9458 and insns were generated in computing it. */
9460 temp
= get_last_insn ();
9461 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, 0);
9463 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9464 in place but instead must do sign- or zero-extension during assignment,
9465 so we copy it into a new register and let the code below use it as
9468 Note that we can safely modify this SUBREG since it is know not to be
9469 shared (it was made by the expand_expr call above). */
9471 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9474 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9478 else if (GET_CODE (op0
) == SUBREG
9479 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9481 /* We cannot increment this SUBREG in place. If we are
9482 post-incrementing, get a copy of the old value. Otherwise,
9483 just mark that we cannot increment in place. */
9485 op0
= copy_to_reg (op0
);
9490 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
9491 && temp
!= get_last_insn ());
9492 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9494 /* Decide whether incrementing or decrementing. */
9495 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9496 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9497 this_optab
= sub_optab
;
9499 /* Convert decrement by a constant into a negative increment. */
9500 if (this_optab
== sub_optab
9501 && GET_CODE (op1
) == CONST_INT
)
9503 op1
= GEN_INT (-INTVAL (op1
));
9504 this_optab
= add_optab
;
9507 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp
)))
9508 this_optab
= this_optab
== add_optab
? addv_optab
: subv_optab
;
9510 /* For a preincrement, see if we can do this with a single instruction. */
9513 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9514 if (icode
!= (int) CODE_FOR_nothing
9515 /* Make sure that OP0 is valid for operands 0 and 1
9516 of the insn we want to queue. */
9517 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9518 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
)
9519 && (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9523 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9524 then we cannot just increment OP0. We must therefore contrive to
9525 increment the original value. Then, for postincrement, we can return
9526 OP0 since it is a copy of the old value. For preincrement, expand here
9527 unless we can do it with a single insn.
9529 Likewise if storing directly into OP0 would clobber high bits
9530 we need to preserve (bad_subreg). */
9531 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9533 /* This is the easiest way to increment the value wherever it is.
9534 Problems with multiple evaluation of INCREMENTED are prevented
9535 because either (1) it is a component_ref or preincrement,
9536 in which case it was stabilized above, or (2) it is an array_ref
9537 with constant index in an array in a register, which is
9538 safe to reevaluate. */
9539 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9540 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9541 ? MINUS_EXPR
: PLUS_EXPR
),
9544 TREE_OPERAND (exp
, 1));
9546 while (TREE_CODE (incremented
) == NOP_EXPR
9547 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9549 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9550 incremented
= TREE_OPERAND (incremented
, 0);
9553 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
, 0);
9554 return post
? op0
: temp
;
9559 /* We have a true reference to the value in OP0.
9560 If there is an insn to add or subtract in this mode, queue it.
9561 Queueing the increment insn avoids the register shuffling
9562 that often results if we must increment now and first save
9563 the old value for subsequent use. */
9565 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9566 op0
= stabilize (op0
);
9569 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9570 if (icode
!= (int) CODE_FOR_nothing
9571 /* Make sure that OP0 is valid for operands 0 and 1
9572 of the insn we want to queue. */
9573 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9574 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
))
9576 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9577 op1
= force_reg (mode
, op1
);
9579 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9581 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9583 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
9584 ? force_reg (Pmode
, XEXP (op0
, 0))
9585 : copy_to_reg (XEXP (op0
, 0)));
9588 op0
= replace_equiv_address (op0
, addr
);
9589 temp
= force_reg (GET_MODE (op0
), op0
);
9590 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9591 op1
= force_reg (mode
, op1
);
9593 /* The increment queue is LIFO, thus we have to `queue'
9594 the instructions in reverse order. */
9595 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9596 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9601 /* Preincrement, or we can't increment with one simple insn. */
9603 /* Save a copy of the value before inc or dec, to return it later. */
9604 temp
= value
= copy_to_reg (op0
);
9606 /* Arrange to return the incremented value. */
9607 /* Copy the rtx because expand_binop will protect from the queue,
9608 and the results of that would be invalid for us to return
9609 if our caller does emit_queue before using our result. */
9610 temp
= copy_rtx (value
= op0
);
9612 /* Increment however we can. */
9613 op1
= expand_binop (mode
, this_optab
, value
, op1
, op0
,
9614 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9616 /* Make sure the value is stored into OP0. */
9618 emit_move_insn (op0
, op1
);
9623 /* At the start of a function, record that we have no previously-pushed
9624 arguments waiting to be popped. */
9627 init_pending_stack_adjust ()
9629 pending_stack_adjust
= 0;
9632 /* When exiting from function, if safe, clear out any pending stack adjust
9633 so the adjustment won't get done.
9635 Note, if the current function calls alloca, then it must have a
9636 frame pointer regardless of the value of flag_omit_frame_pointer. */
9639 clear_pending_stack_adjust ()
9641 #ifdef EXIT_IGNORE_STACK
9643 && (! flag_omit_frame_pointer
|| current_function_calls_alloca
)
9644 && EXIT_IGNORE_STACK
9645 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
9646 && ! flag_inline_functions
)
9648 stack_pointer_delta
-= pending_stack_adjust
,
9649 pending_stack_adjust
= 0;
9654 /* Pop any previously-pushed arguments that have not been popped yet. */
9657 do_pending_stack_adjust ()
9659 if (inhibit_defer_pop
== 0)
9661 if (pending_stack_adjust
!= 0)
9662 adjust_stack (GEN_INT (pending_stack_adjust
));
9663 pending_stack_adjust
= 0;
9667 /* Expand conditional expressions. */
9669 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9670 LABEL is an rtx of code CODE_LABEL, in this function and all the
9674 jumpifnot (exp
, label
)
9678 do_jump (exp
, label
, NULL_RTX
);
9681 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9688 do_jump (exp
, NULL_RTX
, label
);
9691 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9692 the result is zero, or IF_TRUE_LABEL if the result is one.
9693 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9694 meaning fall through in that case.
9696 do_jump always does any pending stack adjust except when it does not
9697 actually perform a jump. An example where there is no jump
9698 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9700 This function is responsible for optimizing cases such as
9701 &&, || and comparison operators in EXP. */
9704 do_jump (exp
, if_false_label
, if_true_label
)
9706 rtx if_false_label
, if_true_label
;
9708 enum tree_code code
= TREE_CODE (exp
);
9709 /* Some cases need to create a label to jump to
9710 in order to properly fall through.
9711 These cases set DROP_THROUGH_LABEL nonzero. */
9712 rtx drop_through_label
= 0;
9716 enum machine_mode mode
;
9718 #ifdef MAX_INTEGER_COMPUTATION_MODE
9719 check_max_integer_computation_mode (exp
);
9730 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
9736 /* This is not true with #pragma weak */
9738 /* The address of something can never be zero. */
9740 emit_jump (if_true_label
);
9745 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
9746 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
9747 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
9748 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_RANGE_REF
)
9751 /* If we are narrowing the operand, we have to do the compare in the
9753 if ((TYPE_PRECISION (TREE_TYPE (exp
))
9754 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9756 case NON_LVALUE_EXPR
:
9757 case REFERENCE_EXPR
:
9762 /* These cannot change zero->nonzero or vice versa. */
9763 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9766 case WITH_RECORD_EXPR
:
9767 /* Put the object on the placeholder list, recurse through our first
9768 operand, and pop the list. */
9769 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
9771 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9772 placeholder_list
= TREE_CHAIN (placeholder_list
);
9776 /* This is never less insns than evaluating the PLUS_EXPR followed by
9777 a test and can be longer if the test is eliminated. */
9779 /* Reduce to minus. */
9780 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
9781 TREE_OPERAND (exp
, 0),
9782 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
9783 TREE_OPERAND (exp
, 1))));
9784 /* Process as MINUS. */
9788 /* Nonzero iff operands of minus differ. */
9789 do_compare_and_jump (build (NE_EXPR
, TREE_TYPE (exp
),
9790 TREE_OPERAND (exp
, 0),
9791 TREE_OPERAND (exp
, 1)),
9792 NE
, NE
, if_false_label
, if_true_label
);
9796 /* If we are AND'ing with a small constant, do this comparison in the
9797 smallest type that fits. If the machine doesn't have comparisons
9798 that small, it will be converted back to the wider comparison.
9799 This helps if we are testing the sign bit of a narrower object.
9800 combine can't do this for us because it can't know whether a
9801 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9803 if (! SLOW_BYTE_ACCESS
9804 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
9805 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
9806 && (i
= tree_floor_log2 (TREE_OPERAND (exp
, 1))) >= 0
9807 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
9808 && (type
= (*lang_hooks
.types
.type_for_mode
) (mode
, 1)) != 0
9809 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9810 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9811 != CODE_FOR_nothing
))
9813 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9818 case TRUTH_NOT_EXPR
:
9819 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9822 case TRUTH_ANDIF_EXPR
:
9823 if (if_false_label
== 0)
9824 if_false_label
= drop_through_label
= gen_label_rtx ();
9825 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
9826 start_cleanup_deferral ();
9827 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9828 end_cleanup_deferral ();
9831 case TRUTH_ORIF_EXPR
:
9832 if (if_true_label
== 0)
9833 if_true_label
= drop_through_label
= gen_label_rtx ();
9834 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
9835 start_cleanup_deferral ();
9836 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9837 end_cleanup_deferral ();
9842 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
9843 preserve_temp_slots (NULL_RTX
);
9847 do_pending_stack_adjust ();
9848 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9854 case ARRAY_RANGE_REF
:
9856 HOST_WIDE_INT bitsize
, bitpos
;
9858 enum machine_mode mode
;
9863 /* Get description of this reference. We don't actually care
9864 about the underlying object here. */
9865 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
9866 &unsignedp
, &volatilep
);
9868 type
= (*lang_hooks
.types
.type_for_size
) (bitsize
, unsignedp
);
9869 if (! SLOW_BYTE_ACCESS
9870 && type
!= 0 && bitsize
>= 0
9871 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9872 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9873 != CODE_FOR_nothing
))
9875 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9882 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9883 if (integer_onep (TREE_OPERAND (exp
, 1))
9884 && integer_zerop (TREE_OPERAND (exp
, 2)))
9885 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9887 else if (integer_zerop (TREE_OPERAND (exp
, 1))
9888 && integer_onep (TREE_OPERAND (exp
, 2)))
9889 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9893 rtx label1
= gen_label_rtx ();
9894 drop_through_label
= gen_label_rtx ();
9896 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
9898 start_cleanup_deferral ();
9899 /* Now the THEN-expression. */
9900 do_jump (TREE_OPERAND (exp
, 1),
9901 if_false_label
? if_false_label
: drop_through_label
,
9902 if_true_label
? if_true_label
: drop_through_label
);
9903 /* In case the do_jump just above never jumps. */
9904 do_pending_stack_adjust ();
9905 emit_label (label1
);
9907 /* Now the ELSE-expression. */
9908 do_jump (TREE_OPERAND (exp
, 2),
9909 if_false_label
? if_false_label
: drop_through_label
,
9910 if_true_label
? if_true_label
: drop_through_label
);
9911 end_cleanup_deferral ();
9917 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9919 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9920 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9922 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
9923 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
9926 (build (TRUTH_ANDIF_EXPR
, TREE_TYPE (exp
),
9927 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9928 fold (build1 (REALPART_EXPR
,
9929 TREE_TYPE (inner_type
),
9931 fold (build1 (REALPART_EXPR
,
9932 TREE_TYPE (inner_type
),
9934 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9935 fold (build1 (IMAGPART_EXPR
,
9936 TREE_TYPE (inner_type
),
9938 fold (build1 (IMAGPART_EXPR
,
9939 TREE_TYPE (inner_type
),
9941 if_false_label
, if_true_label
);
9944 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
9945 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9947 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9948 && !can_compare_p (EQ
, TYPE_MODE (inner_type
), ccp_jump
))
9949 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
9951 do_compare_and_jump (exp
, EQ
, EQ
, if_false_label
, if_true_label
);
9957 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9959 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9960 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9962 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
9963 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
9966 (build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
),
9967 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9968 fold (build1 (REALPART_EXPR
,
9969 TREE_TYPE (inner_type
),
9971 fold (build1 (REALPART_EXPR
,
9972 TREE_TYPE (inner_type
),
9974 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9975 fold (build1 (IMAGPART_EXPR
,
9976 TREE_TYPE (inner_type
),
9978 fold (build1 (IMAGPART_EXPR
,
9979 TREE_TYPE (inner_type
),
9981 if_false_label
, if_true_label
);
9984 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
9985 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9987 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9988 && !can_compare_p (NE
, TYPE_MODE (inner_type
), ccp_jump
))
9989 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
9991 do_compare_and_jump (exp
, NE
, NE
, if_false_label
, if_true_label
);
9996 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9997 if (GET_MODE_CLASS (mode
) == MODE_INT
9998 && ! can_compare_p (LT
, mode
, ccp_jump
))
9999 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
10001 do_compare_and_jump (exp
, LT
, LTU
, if_false_label
, if_true_label
);
10005 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10006 if (GET_MODE_CLASS (mode
) == MODE_INT
10007 && ! can_compare_p (LE
, mode
, ccp_jump
))
10008 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
10010 do_compare_and_jump (exp
, LE
, LEU
, if_false_label
, if_true_label
);
10014 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10015 if (GET_MODE_CLASS (mode
) == MODE_INT
10016 && ! can_compare_p (GT
, mode
, ccp_jump
))
10017 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
10019 do_compare_and_jump (exp
, GT
, GTU
, if_false_label
, if_true_label
);
10023 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10024 if (GET_MODE_CLASS (mode
) == MODE_INT
10025 && ! can_compare_p (GE
, mode
, ccp_jump
))
10026 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
10028 do_compare_and_jump (exp
, GE
, GEU
, if_false_label
, if_true_label
);
10031 case UNORDERED_EXPR
:
10034 enum rtx_code cmp
, rcmp
;
10037 if (code
== UNORDERED_EXPR
)
10038 cmp
= UNORDERED
, rcmp
= ORDERED
;
10040 cmp
= ORDERED
, rcmp
= UNORDERED
;
10041 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10044 if (! can_compare_p (cmp
, mode
, ccp_jump
)
10045 && (can_compare_p (rcmp
, mode
, ccp_jump
)
10046 /* If the target doesn't provide either UNORDERED or ORDERED
10047 comparisons, canonicalize on UNORDERED for the library. */
10048 || rcmp
== UNORDERED
))
10052 do_compare_and_jump (exp
, cmp
, cmp
, if_false_label
, if_true_label
);
10054 do_compare_and_jump (exp
, rcmp
, rcmp
, if_true_label
, if_false_label
);
10059 enum rtx_code rcode1
;
10060 enum tree_code tcode2
;
10065 goto unordered_bcc
;
10069 goto unordered_bcc
;
10073 goto unordered_bcc
;
10077 goto unordered_bcc
;
10081 goto unordered_bcc
;
10084 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10085 if (can_compare_p (rcode1
, mode
, ccp_jump
))
10086 do_compare_and_jump (exp
, rcode1
, rcode1
, if_false_label
,
10090 tree op0
= save_expr (TREE_OPERAND (exp
, 0));
10091 tree op1
= save_expr (TREE_OPERAND (exp
, 1));
10094 /* If the target doesn't support combined unordered
10095 compares, decompose into UNORDERED + comparison. */
10096 cmp0
= fold (build (UNORDERED_EXPR
, TREE_TYPE (exp
), op0
, op1
));
10097 cmp1
= fold (build (tcode2
, TREE_TYPE (exp
), op0
, op1
));
10098 exp
= build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
), cmp0
, cmp1
);
10099 do_jump (exp
, if_false_label
, if_true_label
);
10105 __builtin_expect (<test>, 0) and
10106 __builtin_expect (<test>, 1)
10108 We need to do this here, so that <test> is not converted to a SCC
10109 operation on machines that use condition code registers and COMPARE
10110 like the PowerPC, and then the jump is done based on whether the SCC
10111 operation produced a 1 or 0. */
10113 /* Check for a built-in function. */
10114 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
10116 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
10117 tree arglist
= TREE_OPERAND (exp
, 1);
10119 if (TREE_CODE (fndecl
) == FUNCTION_DECL
10120 && DECL_BUILT_IN (fndecl
)
10121 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
10122 && arglist
!= NULL_TREE
10123 && TREE_CHAIN (arglist
) != NULL_TREE
)
10125 rtx seq
= expand_builtin_expect_jump (exp
, if_false_label
,
10128 if (seq
!= NULL_RTX
)
10135 /* fall through and generate the normal code. */
10139 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
10141 /* This is not needed any more and causes poor code since it causes
10142 comparisons and tests from non-SI objects to have different code
10144 /* Copy to register to avoid generating bad insns by cse
10145 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10146 if (!cse_not_expected
&& GET_CODE (temp
) == MEM
)
10147 temp
= copy_to_reg (temp
);
10149 do_pending_stack_adjust ();
10150 /* Do any postincrements in the expression that was tested. */
10153 if (GET_CODE (temp
) == CONST_INT
10154 || (GET_CODE (temp
) == CONST_DOUBLE
&& GET_MODE (temp
) == VOIDmode
)
10155 || GET_CODE (temp
) == LABEL_REF
)
10157 rtx target
= temp
== const0_rtx
? if_false_label
: if_true_label
;
10159 emit_jump (target
);
10161 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
10162 && ! can_compare_p (NE
, GET_MODE (temp
), ccp_jump
))
10163 /* Note swapping the labels gives us not-equal. */
10164 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
10165 else if (GET_MODE (temp
) != VOIDmode
)
10166 do_compare_rtx_and_jump (temp
, CONST0_RTX (GET_MODE (temp
)),
10167 NE
, TREE_UNSIGNED (TREE_TYPE (exp
)),
10168 GET_MODE (temp
), NULL_RTX
,
10169 if_false_label
, if_true_label
);
10174 if (drop_through_label
)
10176 /* If do_jump produces code that might be jumped around,
10177 do any stack adjusts from that code, before the place
10178 where control merges in. */
10179 do_pending_stack_adjust ();
10180 emit_label (drop_through_label
);
10184 /* Given a comparison expression EXP for values too wide to be compared
10185 with one insn, test the comparison and jump to the appropriate label.
10186 The code of EXP is ignored; we always test GT if SWAP is 0,
10187 and LT if SWAP is 1. */
10190 do_jump_by_parts_greater (exp
, swap
, if_false_label
, if_true_label
)
10193 rtx if_false_label
, if_true_label
;
10195 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
10196 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
10197 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10198 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10200 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
);
10203 /* Compare OP0 with OP1, word at a time, in mode MODE.
10204 UNSIGNEDP says to do unsigned comparison.
10205 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10208 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
)
10209 enum machine_mode mode
;
10212 rtx if_false_label
, if_true_label
;
10214 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10215 rtx drop_through_label
= 0;
10218 if (! if_true_label
|| ! if_false_label
)
10219 drop_through_label
= gen_label_rtx ();
10220 if (! if_true_label
)
10221 if_true_label
= drop_through_label
;
10222 if (! if_false_label
)
10223 if_false_label
= drop_through_label
;
10225 /* Compare a word at a time, high order first. */
10226 for (i
= 0; i
< nwords
; i
++)
10228 rtx op0_word
, op1_word
;
10230 if (WORDS_BIG_ENDIAN
)
10232 op0_word
= operand_subword_force (op0
, i
, mode
);
10233 op1_word
= operand_subword_force (op1
, i
, mode
);
10237 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
10238 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
10241 /* All but high-order word must be compared as unsigned. */
10242 do_compare_rtx_and_jump (op0_word
, op1_word
, GT
,
10243 (unsignedp
|| i
> 0), word_mode
, NULL_RTX
,
10244 NULL_RTX
, if_true_label
);
10246 /* Consider lower words only if these are equal. */
10247 do_compare_rtx_and_jump (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
10248 NULL_RTX
, NULL_RTX
, if_false_label
);
10251 if (if_false_label
)
10252 emit_jump (if_false_label
);
10253 if (drop_through_label
)
10254 emit_label (drop_through_label
);
10257 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10258 with one insn, test the comparison and jump to the appropriate label. */
10261 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
)
10263 rtx if_false_label
, if_true_label
;
10265 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
10266 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
10267 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10268 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10270 rtx drop_through_label
= 0;
10272 if (! if_false_label
)
10273 drop_through_label
= if_false_label
= gen_label_rtx ();
10275 for (i
= 0; i
< nwords
; i
++)
10276 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, mode
),
10277 operand_subword_force (op1
, i
, mode
),
10278 EQ
, TREE_UNSIGNED (TREE_TYPE (exp
)),
10279 word_mode
, NULL_RTX
, if_false_label
, NULL_RTX
);
10282 emit_jump (if_true_label
);
10283 if (drop_through_label
)
10284 emit_label (drop_through_label
);
10287 /* Jump according to whether OP0 is 0.
10288 We assume that OP0 has an integer mode that is too wide
10289 for the available compare insns. */
10292 do_jump_by_parts_equality_rtx (op0
, if_false_label
, if_true_label
)
10294 rtx if_false_label
, if_true_label
;
10296 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
10299 rtx drop_through_label
= 0;
10301 /* The fastest way of doing this comparison on almost any machine is to
10302 "or" all the words and compare the result. If all have to be loaded
10303 from memory and this is a very wide item, it's possible this may
10304 be slower, but that's highly unlikely. */
10306 part
= gen_reg_rtx (word_mode
);
10307 emit_move_insn (part
, operand_subword_force (op0
, 0, GET_MODE (op0
)));
10308 for (i
= 1; i
< nwords
&& part
!= 0; i
++)
10309 part
= expand_binop (word_mode
, ior_optab
, part
,
10310 operand_subword_force (op0
, i
, GET_MODE (op0
)),
10311 part
, 1, OPTAB_WIDEN
);
10315 do_compare_rtx_and_jump (part
, const0_rtx
, EQ
, 1, word_mode
,
10316 NULL_RTX
, if_false_label
, if_true_label
);
10321 /* If we couldn't do the "or" simply, do this with a series of compares. */
10322 if (! if_false_label
)
10323 drop_through_label
= if_false_label
= gen_label_rtx ();
10325 for (i
= 0; i
< nwords
; i
++)
10326 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, GET_MODE (op0
)),
10327 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
,
10328 if_false_label
, NULL_RTX
);
10331 emit_jump (if_true_label
);
10333 if (drop_through_label
)
10334 emit_label (drop_through_label
);
10337 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10338 (including code to compute the values to be compared)
10339 and set (CC0) according to the result.
10340 The decision as to signed or unsigned comparison must be made by the caller.
10342 We force a stack adjustment unless there are currently
10343 things pushed on the stack that aren't yet used.
10345 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10349 compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
, size
)
10351 enum rtx_code code
;
10353 enum machine_mode mode
;
10356 enum rtx_code ucode
;
10359 /* If one operand is constant, make it the second one. Only do this
10360 if the other operand is not constant as well. */
10362 if (swap_commutative_operands_p (op0
, op1
))
10367 code
= swap_condition (code
);
10370 if (flag_force_mem
)
10372 op0
= force_not_mem (op0
);
10373 op1
= force_not_mem (op1
);
10376 do_pending_stack_adjust ();
10378 ucode
= unsignedp
? unsigned_condition (code
) : code
;
10379 if ((tem
= simplify_relational_operation (ucode
, mode
, op0
, op1
)) != 0)
10383 /* There's no need to do this now that combine.c can eliminate lots of
10384 sign extensions. This can be less efficient in certain cases on other
10387 /* If this is a signed equality comparison, we can do it as an
10388 unsigned comparison since zero-extension is cheaper than sign
10389 extension and comparisons with zero are done as unsigned. This is
10390 the case even on machines that can do fast sign extension, since
10391 zero-extension is easier to combine with other operations than
10392 sign-extension is. If we are comparing against a constant, we must
10393 convert it to what it would look like unsigned. */
10394 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10395 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10397 if (GET_CODE (op1
) == CONST_INT
10398 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10399 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10404 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
);
10407 return gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
10409 return gen_rtx_fmt_ee (code
, VOIDmode
, op0
, op1
);
10413 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10414 The decision as to signed or unsigned comparison must be made by the caller.
10416 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10420 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
, size
,
10421 if_false_label
, if_true_label
)
10423 enum rtx_code code
;
10425 enum machine_mode mode
;
10427 rtx if_false_label
, if_true_label
;
10429 enum rtx_code ucode
;
10431 int dummy_true_label
= 0;
10433 /* Reverse the comparison if that is safe and we want to jump if it is
10435 if (! if_true_label
&& ! FLOAT_MODE_P (mode
))
10437 if_true_label
= if_false_label
;
10438 if_false_label
= 0;
10439 code
= reverse_condition (code
);
10442 /* If one operand is constant, make it the second one. Only do this
10443 if the other operand is not constant as well. */
10445 if (swap_commutative_operands_p (op0
, op1
))
10450 code
= swap_condition (code
);
10453 if (flag_force_mem
)
10455 op0
= force_not_mem (op0
);
10456 op1
= force_not_mem (op1
);
10459 do_pending_stack_adjust ();
10461 ucode
= unsignedp
? unsigned_condition (code
) : code
;
10462 if ((tem
= simplify_relational_operation (ucode
, mode
, op0
, op1
)) != 0)
10464 if (tem
== const_true_rtx
)
10467 emit_jump (if_true_label
);
10471 if (if_false_label
)
10472 emit_jump (if_false_label
);
10478 /* There's no need to do this now that combine.c can eliminate lots of
10479 sign extensions. This can be less efficient in certain cases on other
10482 /* If this is a signed equality comparison, we can do it as an
10483 unsigned comparison since zero-extension is cheaper than sign
10484 extension and comparisons with zero are done as unsigned. This is
10485 the case even on machines that can do fast sign extension, since
10486 zero-extension is easier to combine with other operations than
10487 sign-extension is. If we are comparing against a constant, we must
10488 convert it to what it would look like unsigned. */
10489 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10490 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10492 if (GET_CODE (op1
) == CONST_INT
10493 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10494 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10499 if (! if_true_label
)
10501 dummy_true_label
= 1;
10502 if_true_label
= gen_label_rtx ();
10505 emit_cmp_and_jump_insns (op0
, op1
, code
, size
, mode
, unsignedp
,
10508 if (if_false_label
)
10509 emit_jump (if_false_label
);
10510 if (dummy_true_label
)
10511 emit_label (if_true_label
);
10514 /* Generate code for a comparison expression EXP (including code to compute
10515 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10516 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10517 generated code will drop through.
10518 SIGNED_CODE should be the rtx operation for this comparison for
10519 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10521 We force a stack adjustment unless there are currently
10522 things pushed on the stack that aren't yet used. */
10525 do_compare_and_jump (exp
, signed_code
, unsigned_code
, if_false_label
,
10528 enum rtx_code signed_code
, unsigned_code
;
10529 rtx if_false_label
, if_true_label
;
10533 enum machine_mode mode
;
10535 enum rtx_code code
;
10537 /* Don't crash if the comparison was erroneous. */
10538 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
10539 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
10542 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
10543 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == ERROR_MARK
)
10546 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10547 mode
= TYPE_MODE (type
);
10548 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
10549 && (TREE_CODE (TREE_OPERAND (exp
, 1)) != INTEGER_CST
10550 || (GET_MODE_BITSIZE (mode
)
10551 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
,
10554 /* op0 might have been replaced by promoted constant, in which
10555 case the type of second argument should be used. */
10556 type
= TREE_TYPE (TREE_OPERAND (exp
, 1));
10557 mode
= TYPE_MODE (type
);
10559 unsignedp
= TREE_UNSIGNED (type
);
10560 code
= unsignedp
? unsigned_code
: signed_code
;
10562 #ifdef HAVE_canonicalize_funcptr_for_compare
10563 /* If function pointers need to be "canonicalized" before they can
10564 be reliably compared, then canonicalize them. */
10565 if (HAVE_canonicalize_funcptr_for_compare
10566 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10567 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10570 rtx new_op0
= gen_reg_rtx (mode
);
10572 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
10576 if (HAVE_canonicalize_funcptr_for_compare
10577 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10578 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10581 rtx new_op1
= gen_reg_rtx (mode
);
10583 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
10588 /* Do any postincrements in the expression that was tested. */
10591 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
,
10593 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
10594 if_false_label
, if_true_label
);
10597 /* Generate code to calculate EXP using a store-flag instruction
10598 and return an rtx for the result. EXP is either a comparison
10599 or a TRUTH_NOT_EXPR whose operand is a comparison.
10601 If TARGET is nonzero, store the result there if convenient.
10603 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
10606 Return zero if there is no suitable set-flag instruction
10607 available on this machine.
10609 Once expand_expr has been called on the arguments of the comparison,
10610 we are committed to doing the store flag, since it is not safe to
10611 re-evaluate the expression. We emit the store-flag insn by calling
10612 emit_store_flag, but only expand the arguments if we have a reason
10613 to believe that emit_store_flag will be successful. If we think that
10614 it will, but it isn't, we have to simulate the store-flag with a
10615 set/jump/set sequence. */
10618 do_store_flag (exp
, target
, mode
, only_cheap
)
10621 enum machine_mode mode
;
10624 enum rtx_code code
;
10625 tree arg0
, arg1
, type
;
10627 enum machine_mode operand_mode
;
10631 enum insn_code icode
;
10632 rtx subtarget
= target
;
10635 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10636 result at the end. We can't simply invert the test since it would
10637 have already been inverted if it were valid. This case occurs for
10638 some floating-point comparisons. */
10640 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
10641 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
10643 arg0
= TREE_OPERAND (exp
, 0);
10644 arg1
= TREE_OPERAND (exp
, 1);
10646 /* Don't crash if the comparison was erroneous. */
10647 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
10650 type
= TREE_TYPE (arg0
);
10651 operand_mode
= TYPE_MODE (type
);
10652 unsignedp
= TREE_UNSIGNED (type
);
10654 /* We won't bother with BLKmode store-flag operations because it would mean
10655 passing a lot of information to emit_store_flag. */
10656 if (operand_mode
== BLKmode
)
10659 /* We won't bother with store-flag operations involving function pointers
10660 when function pointers must be canonicalized before comparisons. */
10661 #ifdef HAVE_canonicalize_funcptr_for_compare
10662 if (HAVE_canonicalize_funcptr_for_compare
10663 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10664 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10666 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10667 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10668 == FUNCTION_TYPE
))))
10675 /* Get the rtx comparison code to use. We know that EXP is a comparison
10676 operation of some type. Some comparisons against 1 and -1 can be
10677 converted to comparisons with zero. Do so here so that the tests
10678 below will be aware that we have a comparison with zero. These
10679 tests will not catch constants in the first operand, but constants
10680 are rarely passed as the first operand. */
10682 switch (TREE_CODE (exp
))
10691 if (integer_onep (arg1
))
10692 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10694 code
= unsignedp
? LTU
: LT
;
10697 if (! unsignedp
&& integer_all_onesp (arg1
))
10698 arg1
= integer_zero_node
, code
= LT
;
10700 code
= unsignedp
? LEU
: LE
;
10703 if (! unsignedp
&& integer_all_onesp (arg1
))
10704 arg1
= integer_zero_node
, code
= GE
;
10706 code
= unsignedp
? GTU
: GT
;
10709 if (integer_onep (arg1
))
10710 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
10712 code
= unsignedp
? GEU
: GE
;
10715 case UNORDERED_EXPR
:
10741 /* Put a constant second. */
10742 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
10744 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
10745 code
= swap_condition (code
);
10748 /* If this is an equality or inequality test of a single bit, we can
10749 do this by shifting the bit being tested to the low-order bit and
10750 masking the result with the constant 1. If the condition was EQ,
10751 we xor it with 1. This does not require an scc insn and is faster
10752 than an scc insn even if we have it. */
10754 if ((code
== NE
|| code
== EQ
)
10755 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
10756 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10758 tree inner
= TREE_OPERAND (arg0
, 0);
10759 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
10762 /* If INNER is a right shift of a constant and it plus BITNUM does
10763 not overflow, adjust BITNUM and INNER. */
10765 if (TREE_CODE (inner
) == RSHIFT_EXPR
10766 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
10767 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
10768 && bitnum
< TYPE_PRECISION (type
)
10769 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
10770 bitnum
- TYPE_PRECISION (type
)))
10772 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
10773 inner
= TREE_OPERAND (inner
, 0);
10776 /* If we are going to be able to omit the AND below, we must do our
10777 operations as unsigned. If we must use the AND, we have a choice.
10778 Normally unsigned is faster, but for some machines signed is. */
10779 ops_unsignedp
= (bitnum
== TYPE_PRECISION (type
) - 1 ? 1
10780 #ifdef LOAD_EXTEND_OP
10781 : (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1)
10787 if (! get_subtarget (subtarget
)
10788 || GET_MODE (subtarget
) != operand_mode
10789 || ! safe_from_p (subtarget
, inner
, 1))
10792 op0
= expand_expr (inner
, subtarget
, VOIDmode
, 0);
10795 op0
= expand_shift (RSHIFT_EXPR
, operand_mode
, op0
,
10796 size_int (bitnum
), subtarget
, ops_unsignedp
);
10798 if (GET_MODE (op0
) != mode
)
10799 op0
= convert_to_mode (mode
, op0
, ops_unsignedp
);
10801 if ((code
== EQ
&& ! invert
) || (code
== NE
&& invert
))
10802 op0
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
, subtarget
,
10803 ops_unsignedp
, OPTAB_LIB_WIDEN
);
10805 /* Put the AND last so it can combine with more things. */
10806 if (bitnum
!= TYPE_PRECISION (type
) - 1)
10807 op0
= expand_and (mode
, op0
, const1_rtx
, subtarget
);
10812 /* Now see if we are likely to be able to do this. Return if not. */
10813 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
10816 icode
= setcc_gen_code
[(int) code
];
10817 if (icode
== CODE_FOR_nothing
10818 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
10820 /* We can only do this if it is one of the special cases that
10821 can be handled without an scc insn. */
10822 if ((code
== LT
&& integer_zerop (arg1
))
10823 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
10825 else if (BRANCH_COST
>= 0
10826 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
10827 && TREE_CODE (type
) != REAL_TYPE
10828 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
10829 != CODE_FOR_nothing
)
10830 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
10831 != CODE_FOR_nothing
)))
10837 if (! get_subtarget (target
)
10838 || GET_MODE (subtarget
) != operand_mode
10839 || ! safe_from_p (subtarget
, arg1
, 1))
10842 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
10843 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
10846 target
= gen_reg_rtx (mode
);
10848 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10849 because, if the emit_store_flag does anything it will succeed and
10850 OP0 and OP1 will not be used subsequently. */
10852 result
= emit_store_flag (target
, code
,
10853 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
10854 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
10855 operand_mode
, unsignedp
, 1);
10860 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
10861 result
, 0, OPTAB_LIB_WIDEN
);
10865 /* If this failed, we have to do this with set/compare/jump/set code. */
10866 if (GET_CODE (target
) != REG
10867 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
10868 target
= gen_reg_rtx (GET_MODE (target
));
10870 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
10871 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
10872 operand_mode
, NULL_RTX
);
10873 if (GET_CODE (result
) == CONST_INT
)
10874 return (((result
== const0_rtx
&& ! invert
)
10875 || (result
!= const0_rtx
&& invert
))
10876 ? const0_rtx
: const1_rtx
);
10878 /* The code of RESULT may not match CODE if compare_from_rtx
10879 decided to swap its operands and reverse the original code.
10881 We know that compare_from_rtx returns either a CONST_INT or
10882 a new comparison code, so it is safe to just extract the
10883 code from RESULT. */
10884 code
= GET_CODE (result
);
10886 label
= gen_label_rtx ();
10887 if (bcc_gen_fctn
[(int) code
] == 0)
10890 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
10891 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
10892 emit_label (label
);
10898 /* Stubs in case we haven't got a casesi insn. */
10899 #ifndef HAVE_casesi
10900 # define HAVE_casesi 0
10901 # define gen_casesi(a, b, c, d, e) (0)
10902 # define CODE_FOR_casesi CODE_FOR_nothing
10905 /* If the machine does not have a case insn that compares the bounds,
10906 this means extra overhead for dispatch tables, which raises the
10907 threshold for using them. */
10908 #ifndef CASE_VALUES_THRESHOLD
10909 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10910 #endif /* CASE_VALUES_THRESHOLD */
10913 case_values_threshold ()
10915 return CASE_VALUES_THRESHOLD
;
10918 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10919 0 otherwise (i.e. if there is no casesi instruction). */
10921 try_casesi (index_type
, index_expr
, minval
, range
,
10922 table_label
, default_label
)
10923 tree index_type
, index_expr
, minval
, range
;
10924 rtx table_label ATTRIBUTE_UNUSED
;
10927 enum machine_mode index_mode
= SImode
;
10928 int index_bits
= GET_MODE_BITSIZE (index_mode
);
10929 rtx op1
, op2
, index
;
10930 enum machine_mode op_mode
;
10935 /* Convert the index to SImode. */
10936 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
10938 enum machine_mode omode
= TYPE_MODE (index_type
);
10939 rtx rangertx
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
10941 /* We must handle the endpoints in the original mode. */
10942 index_expr
= build (MINUS_EXPR
, index_type
,
10943 index_expr
, minval
);
10944 minval
= integer_zero_node
;
10945 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10946 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
10947 omode
, 1, default_label
);
10948 /* Now we can safely truncate. */
10949 index
= convert_to_mode (index_mode
, index
, 0);
10953 if (TYPE_MODE (index_type
) != index_mode
)
10955 index_expr
= convert ((*lang_hooks
.types
.type_for_size
)
10956 (index_bits
, 0), index_expr
);
10957 index_type
= TREE_TYPE (index_expr
);
10960 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10963 index
= protect_from_queue (index
, 0);
10964 do_pending_stack_adjust ();
10966 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
10967 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
10969 index
= copy_to_mode_reg (op_mode
, index
);
10971 op1
= expand_expr (minval
, NULL_RTX
, VOIDmode
, 0);
10973 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
10974 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
10975 op1
, TREE_UNSIGNED (TREE_TYPE (minval
)));
10976 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
10978 op1
= copy_to_mode_reg (op_mode
, op1
);
10980 op2
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
10982 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
10983 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
10984 op2
, TREE_UNSIGNED (TREE_TYPE (range
)));
10985 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
10987 op2
= copy_to_mode_reg (op_mode
, op2
);
10989 emit_jump_insn (gen_casesi (index
, op1
, op2
,
10990 table_label
, default_label
));
10994 /* Attempt to generate a tablejump instruction; same concept. */
10995 #ifndef HAVE_tablejump
10996 #define HAVE_tablejump 0
10997 #define gen_tablejump(x, y) (0)
11000 /* Subroutine of the next function.
11002 INDEX is the value being switched on, with the lowest value
11003 in the table already subtracted.
11004 MODE is its expected mode (needed if INDEX is constant).
11005 RANGE is the length of the jump table.
11006 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11008 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11009 index value is out of range. */
11012 do_tablejump (index
, mode
, range
, table_label
, default_label
)
11013 rtx index
, range
, table_label
, default_label
;
11014 enum machine_mode mode
;
11018 if (INTVAL (range
) > cfun
->max_jumptable_ents
)
11019 cfun
->max_jumptable_ents
= INTVAL (range
);
11021 /* Do an unsigned comparison (in the proper mode) between the index
11022 expression and the value which represents the length of the range.
11023 Since we just finished subtracting the lower bound of the range
11024 from the index expression, this comparison allows us to simultaneously
11025 check that the original index expression value is both greater than
11026 or equal to the minimum value of the range and less than or equal to
11027 the maximum value of the range. */
11029 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
11032 /* If index is in range, it must fit in Pmode.
11033 Convert to Pmode so we can index with it. */
11035 index
= convert_to_mode (Pmode
, index
, 1);
11037 /* Don't let a MEM slip thru, because then INDEX that comes
11038 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11039 and break_out_memory_refs will go to work on it and mess it up. */
11040 #ifdef PIC_CASE_VECTOR_ADDRESS
11041 if (flag_pic
&& GET_CODE (index
) != REG
)
11042 index
= copy_to_mode_reg (Pmode
, index
);
11045 /* If flag_force_addr were to affect this address
11046 it could interfere with the tricky assumptions made
11047 about addresses that contain label-refs,
11048 which may be valid only very near the tablejump itself. */
11049 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11050 GET_MODE_SIZE, because this indicates how large insns are. The other
11051 uses should all be Pmode, because they are addresses. This code
11052 could fail if addresses and insns are not the same size. */
11053 index
= gen_rtx_PLUS (Pmode
,
11054 gen_rtx_MULT (Pmode
, index
,
11055 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
11056 gen_rtx_LABEL_REF (Pmode
, table_label
));
11057 #ifdef PIC_CASE_VECTOR_ADDRESS
11059 index
= PIC_CASE_VECTOR_ADDRESS (index
);
11062 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
11063 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
11064 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
11065 RTX_UNCHANGING_P (vector
) = 1;
11066 convert_move (temp
, vector
, 0);
11068 emit_jump_insn (gen_tablejump (temp
, table_label
));
11070 /* If we are generating PIC code or if the table is PC-relative, the
11071 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11072 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
11077 try_tablejump (index_type
, index_expr
, minval
, range
,
11078 table_label
, default_label
)
11079 tree index_type
, index_expr
, minval
, range
;
11080 rtx table_label
, default_label
;
11084 if (! HAVE_tablejump
)
11087 index_expr
= fold (build (MINUS_EXPR
, index_type
,
11088 convert (index_type
, index_expr
),
11089 convert (index_type
, minval
)));
11090 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
11092 index
= protect_from_queue (index
, 0);
11093 do_pending_stack_adjust ();
11095 do_tablejump (index
, TYPE_MODE (index_type
),
11096 convert_modes (TYPE_MODE (index_type
),
11097 TYPE_MODE (TREE_TYPE (range
)),
11098 expand_expr (range
, NULL_RTX
,
11100 TREE_UNSIGNED (TREE_TYPE (range
))),
11101 table_label
, default_label
);
11105 /* Nonzero if the mode is a valid vector mode for this architecture.
11106 This returns nonzero even if there is no hardware support for the
11107 vector mode, but we can emulate with narrower modes. */
11110 vector_mode_valid_p (mode
)
11111 enum machine_mode mode
;
11113 enum mode_class
class = GET_MODE_CLASS (mode
);
11114 enum machine_mode innermode
;
11116 /* Doh! What's going on? */
11117 if (class != MODE_VECTOR_INT
11118 && class != MODE_VECTOR_FLOAT
)
11121 /* Hardware support. Woo hoo! */
11122 if (VECTOR_MODE_SUPPORTED_P (mode
))
11125 innermode
= GET_MODE_INNER (mode
);
11127 /* We should probably return 1 if requesting V4DI and we have no DI,
11128 but we have V2DI, but this is probably very unlikely. */
11130 /* If we have support for the inner mode, we can safely emulate it.
11131 We may not have V2DI, but me can emulate with a pair of DIs. */
11132 return mov_optab
->handlers
[innermode
].insn_code
!= CODE_FOR_nothing
;
11135 #include "gt-expr.h"