1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
71 #define STACK_PUSH_CODE PRE_INC
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
85 #define TARGET_MEM_FUNCTIONS 0
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 static tree placeholder_list
= 0;
100 /* This structure is used by move_by_pieces to describe the move to
102 struct move_by_pieces
111 int explicit_inc_from
;
112 unsigned HOST_WIDE_INT len
;
113 HOST_WIDE_INT offset
;
117 /* This structure is used by store_by_pieces to describe the clear to
120 struct store_by_pieces
126 unsigned HOST_WIDE_INT len
;
127 HOST_WIDE_INT offset
;
128 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
133 static rtx enqueue_insn
PARAMS ((rtx
, rtx
));
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
135 PARAMS ((unsigned HOST_WIDE_INT
,
137 static void move_by_pieces_1
PARAMS ((rtx (*) (rtx
, ...), enum machine_mode
,
138 struct move_by_pieces
*));
139 static bool block_move_libcall_safe_for_call_parm
PARAMS ((void));
140 static bool emit_block_move_via_movstr
PARAMS ((rtx
, rtx
, rtx
, unsigned));
141 static rtx emit_block_move_via_libcall
PARAMS ((rtx
, rtx
, rtx
));
142 static tree emit_block_move_libcall_fn
PARAMS ((int));
143 static void emit_block_move_via_loop
PARAMS ((rtx
, rtx
, rtx
, unsigned));
144 static rtx clear_by_pieces_1
PARAMS ((PTR
, HOST_WIDE_INT
,
146 static void clear_by_pieces
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
148 static void store_by_pieces_1
PARAMS ((struct store_by_pieces
*,
150 static void store_by_pieces_2
PARAMS ((rtx (*) (rtx
, ...),
152 struct store_by_pieces
*));
153 static bool clear_storage_via_clrstr
PARAMS ((rtx
, rtx
, unsigned));
154 static rtx clear_storage_via_libcall
PARAMS ((rtx
, rtx
));
155 static tree clear_storage_libcall_fn
PARAMS ((int));
156 static rtx compress_float_constant
PARAMS ((rtx
, rtx
));
157 static rtx get_subtarget
PARAMS ((rtx
));
158 static int is_zeros_p
PARAMS ((tree
));
159 static int mostly_zeros_p
PARAMS ((tree
));
160 static void store_constructor_field
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
161 HOST_WIDE_INT
, enum machine_mode
,
162 tree
, tree
, int, int));
163 static void store_constructor
PARAMS ((tree
, rtx
, int, HOST_WIDE_INT
));
164 static rtx store_field
PARAMS ((rtx
, HOST_WIDE_INT
,
165 HOST_WIDE_INT
, enum machine_mode
,
166 tree
, enum machine_mode
, int, tree
,
168 static rtx var_rtx
PARAMS ((tree
));
169 static HOST_WIDE_INT highest_pow2_factor
PARAMS ((tree
));
170 static HOST_WIDE_INT highest_pow2_factor_for_type
PARAMS ((tree
, tree
));
171 static int is_aligning_offset
PARAMS ((tree
, tree
));
172 static rtx expand_increment
PARAMS ((tree
, int, int));
173 static void do_jump_by_parts_greater
PARAMS ((tree
, int, rtx
, rtx
));
174 static void do_jump_by_parts_equality
PARAMS ((tree
, rtx
, rtx
));
175 static void do_compare_and_jump
PARAMS ((tree
, enum rtx_code
, enum rtx_code
,
177 static rtx do_store_flag
PARAMS ((tree
, rtx
, enum machine_mode
, int));
179 static void emit_single_push_insn
PARAMS ((enum machine_mode
, rtx
, tree
));
181 static void do_tablejump
PARAMS ((rtx
, enum machine_mode
, rtx
, rtx
, rtx
));
183 /* Record for each mode whether we can move a register directly to or
184 from an object of that mode in memory. If we can't, we won't try
185 to use that mode directly when accessing a field of that mode. */
187 static char direct_load
[NUM_MACHINE_MODES
];
188 static char direct_store
[NUM_MACHINE_MODES
];
190 /* Record for each mode whether we can float-extend from memory. */
192 static bool float_extend_from_mem
[NUM_MACHINE_MODES
][NUM_MACHINE_MODES
];
194 /* If a memory-to-memory move would take MOVE_RATIO or more simple
195 move-instruction sequences, we will do a movstr or libcall instead. */
198 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
201 /* If we are optimizing for space (-Os), cut down the default move ratio. */
202 #define MOVE_RATIO (optimize_size ? 3 : 15)
206 /* This macro is used to determine whether move_by_pieces should be called
207 to perform a structure copy. */
208 #ifndef MOVE_BY_PIECES_P
209 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
210 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
213 /* If a clear memory operation would take CLEAR_RATIO or more simple
214 move-instruction sequences, we will do a clrstr or libcall instead. */
217 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
218 #define CLEAR_RATIO 2
220 /* If we are optimizing for space, cut down the default clear ratio. */
221 #define CLEAR_RATIO (optimize_size ? 3 : 15)
225 /* This macro is used to determine whether clear_by_pieces should be
226 called to clear storage. */
227 #ifndef CLEAR_BY_PIECES_P
228 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
229 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
232 /* This macro is used to determine whether store_by_pieces should be
233 called to "memset" storage with byte values other than zero, or
234 to "memcpy" storage when the source is a constant string. */
235 #ifndef STORE_BY_PIECES_P
236 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
239 /* This array records the insn_code of insns to perform block moves. */
240 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
242 /* This array records the insn_code of insns to perform block clears. */
243 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
245 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
247 #ifndef SLOW_UNALIGNED_ACCESS
248 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
251 /* This is run once per compilation to set up which modes can be used
252 directly in memory and to initialize the block move optab. */
258 enum machine_mode mode
;
263 /* Try indexing by frame ptr and try by stack ptr.
264 It is known that on the Convex the stack ptr isn't a valid index.
265 With luck, one or the other is valid on any machine. */
266 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
267 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
269 /* A scratch register we can modify in-place below to avoid
270 useless RTL allocations. */
271 reg
= gen_rtx_REG (VOIDmode
, -1);
273 insn
= rtx_alloc (INSN
);
274 pat
= gen_rtx_SET (0, NULL_RTX
, NULL_RTX
);
275 PATTERN (insn
) = pat
;
277 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
278 mode
= (enum machine_mode
) ((int) mode
+ 1))
282 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
283 PUT_MODE (mem
, mode
);
284 PUT_MODE (mem1
, mode
);
285 PUT_MODE (reg
, mode
);
287 /* See if there is some register that can be used in this mode and
288 directly loaded or stored from memory. */
290 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
291 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
292 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
295 if (! HARD_REGNO_MODE_OK (regno
, mode
))
301 SET_DEST (pat
) = reg
;
302 if (recog (pat
, insn
, &num_clobbers
) >= 0)
303 direct_load
[(int) mode
] = 1;
305 SET_SRC (pat
) = mem1
;
306 SET_DEST (pat
) = reg
;
307 if (recog (pat
, insn
, &num_clobbers
) >= 0)
308 direct_load
[(int) mode
] = 1;
311 SET_DEST (pat
) = mem
;
312 if (recog (pat
, insn
, &num_clobbers
) >= 0)
313 direct_store
[(int) mode
] = 1;
316 SET_DEST (pat
) = mem1
;
317 if (recog (pat
, insn
, &num_clobbers
) >= 0)
318 direct_store
[(int) mode
] = 1;
322 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
324 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
325 mode
= GET_MODE_WIDER_MODE (mode
))
327 enum machine_mode srcmode
;
328 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
329 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
333 ic
= can_extend_p (mode
, srcmode
, 0);
334 if (ic
== CODE_FOR_nothing
)
337 PUT_MODE (mem
, srcmode
);
339 if ((*insn_data
[ic
].operand
[1].predicate
) (mem
, srcmode
))
340 float_extend_from_mem
[mode
][srcmode
] = true;
345 /* This is run at the start of compiling a function. */
350 cfun
->expr
= (struct expr_status
*) ggc_alloc (sizeof (struct expr_status
));
353 pending_stack_adjust
= 0;
354 stack_pointer_delta
= 0;
355 inhibit_defer_pop
= 0;
357 apply_args_value
= 0;
361 /* Small sanity check that the queue is empty at the end of a function. */
364 finish_expr_for_function ()
370 /* Manage the queue of increment instructions to be output
371 for POSTINCREMENT_EXPR expressions, etc. */
373 /* Queue up to increment (or change) VAR later. BODY says how:
374 BODY should be the same thing you would pass to emit_insn
375 to increment right away. It will go to emit_insn later on.
377 The value is a QUEUED expression to be used in place of VAR
378 where you want to guarantee the pre-incrementation value of VAR. */
381 enqueue_insn (var
, body
)
384 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
), var
, NULL_RTX
, NULL_RTX
,
385 body
, pending_chain
);
386 return pending_chain
;
389 /* Use protect_from_queue to convert a QUEUED expression
390 into something that you can put immediately into an instruction.
391 If the queued incrementation has not happened yet,
392 protect_from_queue returns the variable itself.
393 If the incrementation has happened, protect_from_queue returns a temp
394 that contains a copy of the old value of the variable.
396 Any time an rtx which might possibly be a QUEUED is to be put
397 into an instruction, it must be passed through protect_from_queue first.
398 QUEUED expressions are not meaningful in instructions.
400 Do not pass a value through protect_from_queue and then hold
401 on to it for a while before putting it in an instruction!
402 If the queue is flushed in between, incorrect code will result. */
405 protect_from_queue (x
, modify
)
409 RTX_CODE code
= GET_CODE (x
);
411 #if 0 /* A QUEUED can hang around after the queue is forced out. */
412 /* Shortcut for most common case. */
413 if (pending_chain
== 0)
419 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
420 use of autoincrement. Make a copy of the contents of the memory
421 location rather than a copy of the address, but not if the value is
422 of mode BLKmode. Don't modify X in place since it might be
424 if (code
== MEM
&& GET_MODE (x
) != BLKmode
425 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
428 rtx
new = replace_equiv_address_nv (x
, QUEUED_VAR (y
));
432 rtx temp
= gen_reg_rtx (GET_MODE (x
));
434 emit_insn_before (gen_move_insn (temp
, new),
439 /* Copy the address into a pseudo, so that the returned value
440 remains correct across calls to emit_queue. */
441 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
444 /* Otherwise, recursively protect the subexpressions of all
445 the kinds of rtx's that can contain a QUEUED. */
448 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
449 if (tem
!= XEXP (x
, 0))
455 else if (code
== PLUS
|| code
== MULT
)
457 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
458 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
459 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
468 /* If the increment has not happened, use the variable itself. Copy it
469 into a new pseudo so that the value remains correct across calls to
471 if (QUEUED_INSN (x
) == 0)
472 return copy_to_reg (QUEUED_VAR (x
));
473 /* If the increment has happened and a pre-increment copy exists,
475 if (QUEUED_COPY (x
) != 0)
476 return QUEUED_COPY (x
);
477 /* The increment has happened but we haven't set up a pre-increment copy.
478 Set one up now, and use it. */
479 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
480 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
482 return QUEUED_COPY (x
);
485 /* Return nonzero if X contains a QUEUED expression:
486 if it contains anything that will be altered by a queued increment.
487 We handle only combinations of MEM, PLUS, MINUS and MULT operators
488 since memory addresses generally contain only those. */
494 enum rtx_code code
= GET_CODE (x
);
500 return queued_subexp_p (XEXP (x
, 0));
504 return (queued_subexp_p (XEXP (x
, 0))
505 || queued_subexp_p (XEXP (x
, 1)));
511 /* Perform all the pending incrementations. */
517 while ((p
= pending_chain
))
519 rtx body
= QUEUED_BODY (p
);
521 switch (GET_CODE (body
))
529 QUEUED_INSN (p
) = body
;
533 #ifdef ENABLE_CHECKING
540 QUEUED_INSN (p
) = emit_insn (body
);
544 pending_chain
= QUEUED_NEXT (p
);
548 /* Copy data from FROM to TO, where the machine modes are not the same.
549 Both modes may be integer, or both may be floating.
550 UNSIGNEDP should be nonzero if FROM is an unsigned type.
551 This causes zero-extension instead of sign-extension. */
554 convert_move (to
, from
, unsignedp
)
558 enum machine_mode to_mode
= GET_MODE (to
);
559 enum machine_mode from_mode
= GET_MODE (from
);
560 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
561 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
565 /* rtx code for making an equivalent value. */
566 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
567 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
569 to
= protect_from_queue (to
, 1);
570 from
= protect_from_queue (from
, 0);
572 if (to_real
!= from_real
)
575 /* If FROM is a SUBREG that indicates that we have already done at least
576 the required extension, strip it. We don't handle such SUBREGs as
579 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
580 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
581 >= GET_MODE_SIZE (to_mode
))
582 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
583 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
585 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
588 if (to_mode
== from_mode
589 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
591 emit_move_insn (to
, from
);
595 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
597 if (GET_MODE_BITSIZE (from_mode
) != GET_MODE_BITSIZE (to_mode
))
600 if (VECTOR_MODE_P (to_mode
))
601 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
603 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
605 emit_move_insn (to
, from
);
609 if (to_real
!= from_real
)
616 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
))
618 /* Try converting directly if the insn is supported. */
619 if ((code
= can_extend_p (to_mode
, from_mode
, 0))
622 emit_unop_insn (code
, to
, from
, UNKNOWN
);
627 #ifdef HAVE_trunchfqf2
628 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
630 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
634 #ifdef HAVE_trunctqfqf2
635 if (HAVE_trunctqfqf2
&& from_mode
== TQFmode
&& to_mode
== QFmode
)
637 emit_unop_insn (CODE_FOR_trunctqfqf2
, to
, from
, UNKNOWN
);
641 #ifdef HAVE_truncsfqf2
642 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
644 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
648 #ifdef HAVE_truncdfqf2
649 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
651 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
655 #ifdef HAVE_truncxfqf2
656 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
658 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
662 #ifdef HAVE_trunctfqf2
663 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
665 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
670 #ifdef HAVE_trunctqfhf2
671 if (HAVE_trunctqfhf2
&& from_mode
== TQFmode
&& to_mode
== HFmode
)
673 emit_unop_insn (CODE_FOR_trunctqfhf2
, to
, from
, UNKNOWN
);
677 #ifdef HAVE_truncsfhf2
678 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
680 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
684 #ifdef HAVE_truncdfhf2
685 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
687 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
691 #ifdef HAVE_truncxfhf2
692 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
694 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
698 #ifdef HAVE_trunctfhf2
699 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
701 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
706 #ifdef HAVE_truncsftqf2
707 if (HAVE_truncsftqf2
&& from_mode
== SFmode
&& to_mode
== TQFmode
)
709 emit_unop_insn (CODE_FOR_truncsftqf2
, to
, from
, UNKNOWN
);
713 #ifdef HAVE_truncdftqf2
714 if (HAVE_truncdftqf2
&& from_mode
== DFmode
&& to_mode
== TQFmode
)
716 emit_unop_insn (CODE_FOR_truncdftqf2
, to
, from
, UNKNOWN
);
720 #ifdef HAVE_truncxftqf2
721 if (HAVE_truncxftqf2
&& from_mode
== XFmode
&& to_mode
== TQFmode
)
723 emit_unop_insn (CODE_FOR_truncxftqf2
, to
, from
, UNKNOWN
);
727 #ifdef HAVE_trunctftqf2
728 if (HAVE_trunctftqf2
&& from_mode
== TFmode
&& to_mode
== TQFmode
)
730 emit_unop_insn (CODE_FOR_trunctftqf2
, to
, from
, UNKNOWN
);
735 #ifdef HAVE_truncdfsf2
736 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
738 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
742 #ifdef HAVE_truncxfsf2
743 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
745 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
749 #ifdef HAVE_trunctfsf2
750 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
752 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
756 #ifdef HAVE_truncxfdf2
757 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
759 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
763 #ifdef HAVE_trunctfdf2
764 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
766 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
778 libcall
= extendsfdf2_libfunc
;
782 libcall
= extendsfxf2_libfunc
;
786 libcall
= extendsftf2_libfunc
;
798 libcall
= truncdfsf2_libfunc
;
802 libcall
= extenddfxf2_libfunc
;
806 libcall
= extenddftf2_libfunc
;
818 libcall
= truncxfsf2_libfunc
;
822 libcall
= truncxfdf2_libfunc
;
834 libcall
= trunctfsf2_libfunc
;
838 libcall
= trunctfdf2_libfunc
;
850 if (libcall
== (rtx
) 0)
851 /* This conversion is not implemented yet. */
855 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
857 insns
= get_insns ();
859 emit_libcall_block (insns
, to
, value
, gen_rtx_FLOAT_TRUNCATE (to_mode
,
864 /* Now both modes are integers. */
866 /* Handle expanding beyond a word. */
867 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
868 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
875 enum machine_mode lowpart_mode
;
876 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
878 /* Try converting directly if the insn is supported. */
879 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
882 /* If FROM is a SUBREG, put it into a register. Do this
883 so that we always generate the same set of insns for
884 better cse'ing; if an intermediate assignment occurred,
885 we won't be doing the operation directly on the SUBREG. */
886 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
887 from
= force_reg (from_mode
, from
);
888 emit_unop_insn (code
, to
, from
, equiv_code
);
891 /* Next, try converting via full word. */
892 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
893 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
894 != CODE_FOR_nothing
))
896 if (GET_CODE (to
) == REG
)
897 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
898 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
899 emit_unop_insn (code
, to
,
900 gen_lowpart (word_mode
, to
), equiv_code
);
904 /* No special multiword conversion insn; do it by hand. */
907 /* Since we will turn this into a no conflict block, we must ensure
908 that the source does not overlap the target. */
910 if (reg_overlap_mentioned_p (to
, from
))
911 from
= force_reg (from_mode
, from
);
913 /* Get a copy of FROM widened to a word, if necessary. */
914 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
915 lowpart_mode
= word_mode
;
917 lowpart_mode
= from_mode
;
919 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
921 lowpart
= gen_lowpart (lowpart_mode
, to
);
922 emit_move_insn (lowpart
, lowfrom
);
924 /* Compute the value to put in each remaining word. */
926 fill_value
= const0_rtx
;
931 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
932 && STORE_FLAG_VALUE
== -1)
934 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
936 fill_value
= gen_reg_rtx (word_mode
);
937 emit_insn (gen_slt (fill_value
));
943 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
944 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
946 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
950 /* Fill the remaining words. */
951 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
953 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
954 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
959 if (fill_value
!= subword
)
960 emit_move_insn (subword
, fill_value
);
963 insns
= get_insns ();
966 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
967 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
971 /* Truncating multi-word to a word or less. */
972 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
973 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
975 if (!((GET_CODE (from
) == MEM
976 && ! MEM_VOLATILE_P (from
)
977 && direct_load
[(int) to_mode
]
978 && ! mode_dependent_address_p (XEXP (from
, 0)))
979 || GET_CODE (from
) == REG
980 || GET_CODE (from
) == SUBREG
))
981 from
= force_reg (from_mode
, from
);
982 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
986 /* Handle pointer conversion. */ /* SPEE 900220. */
987 if (to_mode
== PQImode
)
989 if (from_mode
!= QImode
)
990 from
= convert_to_mode (QImode
, from
, unsignedp
);
992 #ifdef HAVE_truncqipqi2
993 if (HAVE_truncqipqi2
)
995 emit_unop_insn (CODE_FOR_truncqipqi2
, to
, from
, UNKNOWN
);
998 #endif /* HAVE_truncqipqi2 */
1002 if (from_mode
== PQImode
)
1004 if (to_mode
!= QImode
)
1006 from
= convert_to_mode (QImode
, from
, unsignedp
);
1011 #ifdef HAVE_extendpqiqi2
1012 if (HAVE_extendpqiqi2
)
1014 emit_unop_insn (CODE_FOR_extendpqiqi2
, to
, from
, UNKNOWN
);
1017 #endif /* HAVE_extendpqiqi2 */
1022 if (to_mode
== PSImode
)
1024 if (from_mode
!= SImode
)
1025 from
= convert_to_mode (SImode
, from
, unsignedp
);
1027 #ifdef HAVE_truncsipsi2
1028 if (HAVE_truncsipsi2
)
1030 emit_unop_insn (CODE_FOR_truncsipsi2
, to
, from
, UNKNOWN
);
1033 #endif /* HAVE_truncsipsi2 */
1037 if (from_mode
== PSImode
)
1039 if (to_mode
!= SImode
)
1041 from
= convert_to_mode (SImode
, from
, unsignedp
);
1046 #ifdef HAVE_extendpsisi2
1047 if (! unsignedp
&& HAVE_extendpsisi2
)
1049 emit_unop_insn (CODE_FOR_extendpsisi2
, to
, from
, UNKNOWN
);
1052 #endif /* HAVE_extendpsisi2 */
1053 #ifdef HAVE_zero_extendpsisi2
1054 if (unsignedp
&& HAVE_zero_extendpsisi2
)
1056 emit_unop_insn (CODE_FOR_zero_extendpsisi2
, to
, from
, UNKNOWN
);
1059 #endif /* HAVE_zero_extendpsisi2 */
1064 if (to_mode
== PDImode
)
1066 if (from_mode
!= DImode
)
1067 from
= convert_to_mode (DImode
, from
, unsignedp
);
1069 #ifdef HAVE_truncdipdi2
1070 if (HAVE_truncdipdi2
)
1072 emit_unop_insn (CODE_FOR_truncdipdi2
, to
, from
, UNKNOWN
);
1075 #endif /* HAVE_truncdipdi2 */
1079 if (from_mode
== PDImode
)
1081 if (to_mode
!= DImode
)
1083 from
= convert_to_mode (DImode
, from
, unsignedp
);
1088 #ifdef HAVE_extendpdidi2
1089 if (HAVE_extendpdidi2
)
1091 emit_unop_insn (CODE_FOR_extendpdidi2
, to
, from
, UNKNOWN
);
1094 #endif /* HAVE_extendpdidi2 */
1099 /* Now follow all the conversions between integers
1100 no more than a word long. */
1102 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1103 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
1104 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1105 GET_MODE_BITSIZE (from_mode
)))
1107 if (!((GET_CODE (from
) == MEM
1108 && ! MEM_VOLATILE_P (from
)
1109 && direct_load
[(int) to_mode
]
1110 && ! mode_dependent_address_p (XEXP (from
, 0)))
1111 || GET_CODE (from
) == REG
1112 || GET_CODE (from
) == SUBREG
))
1113 from
= force_reg (from_mode
, from
);
1114 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
1115 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
1116 from
= copy_to_reg (from
);
1117 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
1121 /* Handle extension. */
1122 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
1124 /* Convert directly if that works. */
1125 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
1126 != CODE_FOR_nothing
)
1129 from
= force_not_mem (from
);
1131 emit_unop_insn (code
, to
, from
, equiv_code
);
1136 enum machine_mode intermediate
;
1140 /* Search for a mode to convert via. */
1141 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
1142 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
1143 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
1144 != CODE_FOR_nothing
)
1145 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
1146 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1147 GET_MODE_BITSIZE (intermediate
))))
1148 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
1149 != CODE_FOR_nothing
))
1151 convert_move (to
, convert_to_mode (intermediate
, from
,
1152 unsignedp
), unsignedp
);
1156 /* No suitable intermediate mode.
1157 Generate what we need with shifts. */
1158 shift_amount
= build_int_2 (GET_MODE_BITSIZE (to_mode
)
1159 - GET_MODE_BITSIZE (from_mode
), 0);
1160 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
1161 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
1163 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
1166 emit_move_insn (to
, tmp
);
1171 /* Support special truncate insns for certain modes. */
1173 if (from_mode
== DImode
&& to_mode
== SImode
)
1175 #ifdef HAVE_truncdisi2
1176 if (HAVE_truncdisi2
)
1178 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1182 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1186 if (from_mode
== DImode
&& to_mode
== HImode
)
1188 #ifdef HAVE_truncdihi2
1189 if (HAVE_truncdihi2
)
1191 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1195 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1199 if (from_mode
== DImode
&& to_mode
== QImode
)
1201 #ifdef HAVE_truncdiqi2
1202 if (HAVE_truncdiqi2
)
1204 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1208 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1212 if (from_mode
== SImode
&& to_mode
== HImode
)
1214 #ifdef HAVE_truncsihi2
1215 if (HAVE_truncsihi2
)
1217 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1221 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1225 if (from_mode
== SImode
&& to_mode
== QImode
)
1227 #ifdef HAVE_truncsiqi2
1228 if (HAVE_truncsiqi2
)
1230 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1234 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1238 if (from_mode
== HImode
&& to_mode
== QImode
)
1240 #ifdef HAVE_trunchiqi2
1241 if (HAVE_trunchiqi2
)
1243 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1247 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1251 if (from_mode
== TImode
&& to_mode
== DImode
)
1253 #ifdef HAVE_trunctidi2
1254 if (HAVE_trunctidi2
)
1256 emit_unop_insn (CODE_FOR_trunctidi2
, to
, from
, UNKNOWN
);
1260 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1264 if (from_mode
== TImode
&& to_mode
== SImode
)
1266 #ifdef HAVE_trunctisi2
1267 if (HAVE_trunctisi2
)
1269 emit_unop_insn (CODE_FOR_trunctisi2
, to
, from
, UNKNOWN
);
1273 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1277 if (from_mode
== TImode
&& to_mode
== HImode
)
1279 #ifdef HAVE_trunctihi2
1280 if (HAVE_trunctihi2
)
1282 emit_unop_insn (CODE_FOR_trunctihi2
, to
, from
, UNKNOWN
);
1286 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1290 if (from_mode
== TImode
&& to_mode
== QImode
)
1292 #ifdef HAVE_trunctiqi2
1293 if (HAVE_trunctiqi2
)
1295 emit_unop_insn (CODE_FOR_trunctiqi2
, to
, from
, UNKNOWN
);
1299 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1303 /* Handle truncation of volatile memrefs, and so on;
1304 the things that couldn't be truncated directly,
1305 and for which there was no special instruction. */
1306 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1308 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1309 emit_move_insn (to
, temp
);
1313 /* Mode combination is not recognized. */
1317 /* Return an rtx for a value that would result
1318 from converting X to mode MODE.
1319 Both X and MODE may be floating, or both integer.
1320 UNSIGNEDP is nonzero if X is an unsigned value.
1321 This can be done by referring to a part of X in place
1322 or by copying to a new temporary with conversion.
1324 This function *must not* call protect_from_queue
1325 except when putting X into an insn (in which case convert_move does it). */
1328 convert_to_mode (mode
, x
, unsignedp
)
1329 enum machine_mode mode
;
1333 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
1336 /* Return an rtx for a value that would result
1337 from converting X from mode OLDMODE to mode MODE.
1338 Both modes may be floating, or both integer.
1339 UNSIGNEDP is nonzero if X is an unsigned value.
1341 This can be done by referring to a part of X in place
1342 or by copying to a new temporary with conversion.
1344 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1346 This function *must not* call protect_from_queue
1347 except when putting X into an insn (in which case convert_move does it). */
1350 convert_modes (mode
, oldmode
, x
, unsignedp
)
1351 enum machine_mode mode
, oldmode
;
1357 /* If FROM is a SUBREG that indicates that we have already done at least
1358 the required extension, strip it. */
1360 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1361 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1362 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1363 x
= gen_lowpart (mode
, x
);
1365 if (GET_MODE (x
) != VOIDmode
)
1366 oldmode
= GET_MODE (x
);
1368 if (mode
== oldmode
)
1371 /* There is one case that we must handle specially: If we are converting
1372 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1373 we are to interpret the constant as unsigned, gen_lowpart will do
1374 the wrong if the constant appears negative. What we want to do is
1375 make the high-order word of the constant zero, not all ones. */
1377 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1378 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1379 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1381 HOST_WIDE_INT val
= INTVAL (x
);
1383 if (oldmode
!= VOIDmode
1384 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
1386 int width
= GET_MODE_BITSIZE (oldmode
);
1388 /* We need to zero extend VAL. */
1389 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1392 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
1395 /* We can do this with a gen_lowpart if both desired and current modes
1396 are integer, and this is either a constant integer, a register, or a
1397 non-volatile MEM. Except for the constant case where MODE is no
1398 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1400 if ((GET_CODE (x
) == CONST_INT
1401 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1402 || (GET_MODE_CLASS (mode
) == MODE_INT
1403 && GET_MODE_CLASS (oldmode
) == MODE_INT
1404 && (GET_CODE (x
) == CONST_DOUBLE
1405 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
1406 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
1407 && direct_load
[(int) mode
])
1408 || (GET_CODE (x
) == REG
1409 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
1410 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
1412 /* ?? If we don't know OLDMODE, we have to assume here that
1413 X does not need sign- or zero-extension. This may not be
1414 the case, but it's the best we can do. */
1415 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1416 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1418 HOST_WIDE_INT val
= INTVAL (x
);
1419 int width
= GET_MODE_BITSIZE (oldmode
);
1421 /* We must sign or zero-extend in this case. Start by
1422 zero-extending, then sign extend if we need to. */
1423 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1425 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1426 val
|= (HOST_WIDE_INT
) (-1) << width
;
1428 return gen_int_mode (val
, mode
);
1431 return gen_lowpart (mode
, x
);
1434 temp
= gen_reg_rtx (mode
);
1435 convert_move (temp
, x
, unsignedp
);
1439 /* This macro is used to determine what the largest unit size that
1440 move_by_pieces can use is. */
1442 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1443 move efficiently, as opposed to MOVE_MAX which is the maximum
1444 number of bytes we can move with a single instruction. */
1446 #ifndef MOVE_MAX_PIECES
1447 #define MOVE_MAX_PIECES MOVE_MAX
1450 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1451 store efficiently. Due to internal GCC limitations, this is
1452 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1453 for an immediate constant. */
1455 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1457 /* Generate several move instructions to copy LEN bytes from block FROM to
1458 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1459 and TO through protect_from_queue before calling.
1461 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1462 used to push FROM to the stack.
1464 ALIGN is maximum alignment we can assume. */
1467 move_by_pieces (to
, from
, len
, align
)
1469 unsigned HOST_WIDE_INT len
;
1472 struct move_by_pieces data
;
1473 rtx to_addr
, from_addr
= XEXP (from
, 0);
1474 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
1475 enum machine_mode mode
= VOIDmode
, tmode
;
1476 enum insn_code icode
;
1479 data
.from_addr
= from_addr
;
1482 to_addr
= XEXP (to
, 0);
1485 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1486 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1488 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1495 #ifdef STACK_GROWS_DOWNWARD
1501 data
.to_addr
= to_addr
;
1504 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1505 || GET_CODE (from_addr
) == POST_INC
1506 || GET_CODE (from_addr
) == POST_DEC
);
1508 data
.explicit_inc_from
= 0;
1509 data
.explicit_inc_to
= 0;
1510 if (data
.reverse
) data
.offset
= len
;
1513 /* If copying requires more than two move insns,
1514 copy addresses to registers (to make displacements shorter)
1515 and use post-increment if available. */
1516 if (!(data
.autinc_from
&& data
.autinc_to
)
1517 && move_by_pieces_ninsns (len
, align
) > 2)
1519 /* Find the mode of the largest move... */
1520 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1521 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1522 if (GET_MODE_SIZE (tmode
) < max_size
)
1525 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
1527 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1528 data
.autinc_from
= 1;
1529 data
.explicit_inc_from
= -1;
1531 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
1533 data
.from_addr
= copy_addr_to_reg (from_addr
);
1534 data
.autinc_from
= 1;
1535 data
.explicit_inc_from
= 1;
1537 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1538 data
.from_addr
= copy_addr_to_reg (from_addr
);
1539 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
1541 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1543 data
.explicit_inc_to
= -1;
1545 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
1547 data
.to_addr
= copy_addr_to_reg (to_addr
);
1549 data
.explicit_inc_to
= 1;
1551 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1552 data
.to_addr
= copy_addr_to_reg (to_addr
);
1555 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1556 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1557 align
= MOVE_MAX
* BITS_PER_UNIT
;
1559 /* First move what we can in the largest integer mode, then go to
1560 successively smaller modes. */
1562 while (max_size
> 1)
1564 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1565 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1566 if (GET_MODE_SIZE (tmode
) < max_size
)
1569 if (mode
== VOIDmode
)
1572 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1573 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1574 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1576 max_size
= GET_MODE_SIZE (mode
);
1579 /* The code above should have handled everything. */
1584 /* Return number of insns required to move L bytes by pieces.
1585 ALIGN (in bits) is maximum alignment we can assume. */
1587 static unsigned HOST_WIDE_INT
1588 move_by_pieces_ninsns (l
, align
)
1589 unsigned HOST_WIDE_INT l
;
1592 unsigned HOST_WIDE_INT n_insns
= 0;
1593 unsigned HOST_WIDE_INT max_size
= MOVE_MAX
+ 1;
1595 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1596 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1597 align
= MOVE_MAX
* BITS_PER_UNIT
;
1599 while (max_size
> 1)
1601 enum machine_mode mode
= VOIDmode
, tmode
;
1602 enum insn_code icode
;
1604 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1605 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1606 if (GET_MODE_SIZE (tmode
) < max_size
)
1609 if (mode
== VOIDmode
)
1612 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1613 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1614 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1616 max_size
= GET_MODE_SIZE (mode
);
1624 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1625 with move instructions for mode MODE. GENFUN is the gen_... function
1626 to make a move insn for that mode. DATA has all the other info. */
1629 move_by_pieces_1 (genfun
, mode
, data
)
1630 rtx (*genfun
) PARAMS ((rtx
, ...));
1631 enum machine_mode mode
;
1632 struct move_by_pieces
*data
;
1634 unsigned int size
= GET_MODE_SIZE (mode
);
1635 rtx to1
= NULL_RTX
, from1
;
1637 while (data
->len
>= size
)
1640 data
->offset
-= size
;
1644 if (data
->autinc_to
)
1645 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1648 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1651 if (data
->autinc_from
)
1652 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1655 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1657 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1658 emit_insn (gen_add2_insn (data
->to_addr
,
1659 GEN_INT (-(HOST_WIDE_INT
)size
)));
1660 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1661 emit_insn (gen_add2_insn (data
->from_addr
,
1662 GEN_INT (-(HOST_WIDE_INT
)size
)));
1665 emit_insn ((*genfun
) (to1
, from1
));
1668 #ifdef PUSH_ROUNDING
1669 emit_single_push_insn (mode
, from1
, NULL
);
1675 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1676 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1677 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1678 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1680 if (! data
->reverse
)
1681 data
->offset
+= size
;
1687 /* Emit code to move a block Y to a block X. This may be done with
1688 string-move instructions, with multiple scalar move instructions,
1689 or with a library call.
1691 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1692 SIZE is an rtx that says how long they are.
1693 ALIGN is the maximum alignment we can assume they have.
1694 METHOD describes what kind of copy this is, and what mechanisms may be used.
1696 Return the address of the new block, if memcpy is called and returns it,
1700 emit_block_move (x
, y
, size
, method
)
1702 enum block_op_methods method
;
1710 case BLOCK_OP_NORMAL
:
1711 may_use_call
= true;
1714 case BLOCK_OP_CALL_PARM
:
1715 may_use_call
= block_move_libcall_safe_for_call_parm ();
1717 /* Make inhibit_defer_pop nonzero around the library call
1718 to force it to pop the arguments right away. */
1722 case BLOCK_OP_NO_LIBCALL
:
1723 may_use_call
= false;
1730 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1732 if (GET_MODE (x
) != BLKmode
)
1734 if (GET_MODE (y
) != BLKmode
)
1737 x
= protect_from_queue (x
, 1);
1738 y
= protect_from_queue (y
, 0);
1739 size
= protect_from_queue (size
, 0);
1741 if (GET_CODE (x
) != MEM
)
1743 if (GET_CODE (y
) != MEM
)
1748 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1749 can be incorrect is coming from __builtin_memcpy. */
1750 if (GET_CODE (size
) == CONST_INT
)
1752 x
= shallow_copy_rtx (x
);
1753 y
= shallow_copy_rtx (y
);
1754 set_mem_size (x
, size
);
1755 set_mem_size (y
, size
);
1758 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1759 move_by_pieces (x
, y
, INTVAL (size
), align
);
1760 else if (emit_block_move_via_movstr (x
, y
, size
, align
))
1762 else if (may_use_call
)
1763 retval
= emit_block_move_via_libcall (x
, y
, size
);
1765 emit_block_move_via_loop (x
, y
, size
, align
);
1767 if (method
== BLOCK_OP_CALL_PARM
)
1773 /* A subroutine of emit_block_move. Returns true if calling the
1774 block move libcall will not clobber any parameters which may have
1775 already been placed on the stack. */
1778 block_move_libcall_safe_for_call_parm ()
1784 /* Check to see whether memcpy takes all register arguments. */
1786 takes_regs_uninit
, takes_regs_no
, takes_regs_yes
1787 } takes_regs
= takes_regs_uninit
;
1791 case takes_regs_uninit
:
1793 CUMULATIVE_ARGS args_so_far
;
1796 fn
= emit_block_move_libcall_fn (false);
1797 INIT_CUMULATIVE_ARGS (args_so_far
, TREE_TYPE (fn
), NULL_RTX
, 0);
1799 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1800 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1802 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1803 rtx tmp
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
1804 if (!tmp
|| !REG_P (tmp
))
1805 goto fail_takes_regs
;
1806 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1807 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, mode
,
1809 goto fail_takes_regs
;
1811 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, NULL_TREE
, 1);
1814 takes_regs
= takes_regs_yes
;
1817 case takes_regs_yes
:
1821 takes_regs
= takes_regs_no
;
1832 /* A subroutine of emit_block_move. Expand a movstr pattern;
1833 return true if successful. */
1836 emit_block_move_via_movstr (x
, y
, size
, align
)
1840 /* Try the most limited insn first, because there's no point
1841 including more than one in the machine description unless
1842 the more limited one has some advantage. */
1844 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1845 enum machine_mode mode
;
1847 /* Since this is a move insn, we don't care about volatility. */
1850 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1851 mode
= GET_MODE_WIDER_MODE (mode
))
1853 enum insn_code code
= movstr_optab
[(int) mode
];
1854 insn_operand_predicate_fn pred
;
1856 if (code
!= CODE_FOR_nothing
1857 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1858 here because if SIZE is less than the mode mask, as it is
1859 returned by the macro, it will definitely be less than the
1860 actual mode mask. */
1861 && ((GET_CODE (size
) == CONST_INT
1862 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1863 <= (GET_MODE_MASK (mode
) >> 1)))
1864 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1865 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1866 || (*pred
) (x
, BLKmode
))
1867 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1868 || (*pred
) (y
, BLKmode
))
1869 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1870 || (*pred
) (opalign
, VOIDmode
)))
1873 rtx last
= get_last_insn ();
1876 op2
= convert_to_mode (mode
, size
, 1);
1877 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1878 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1879 op2
= copy_to_mode_reg (mode
, op2
);
1881 /* ??? When called via emit_block_move_for_call, it'd be
1882 nice if there were some way to inform the backend, so
1883 that it doesn't fail the expansion because it thinks
1884 emitting the libcall would be more efficient. */
1886 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1894 delete_insns_since (last
);
1902 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1903 Return the return value from memcpy, 0 otherwise. */
1906 emit_block_move_via_libcall (dst
, src
, size
)
1909 tree call_expr
, arg_list
, fn
, src_tree
, dst_tree
, size_tree
;
1910 enum machine_mode size_mode
;
1913 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1915 It is unsafe to save the value generated by protect_from_queue
1916 and reuse it later. Consider what happens if emit_queue is
1917 called before the return value from protect_from_queue is used.
1919 Expansion of the CALL_EXPR below will call emit_queue before
1920 we are finished emitting RTL for argument setup. So if we are
1921 not careful we could get the wrong value for an argument.
1923 To avoid this problem we go ahead and emit code to copy X, Y &
1924 SIZE into new pseudos. We can then place those new pseudos
1925 into an RTL_EXPR and use them later, even after a call to
1928 Note this is not strictly needed for library calls since they
1929 do not call emit_queue before loading their arguments. However,
1930 we may need to have library calls call emit_queue in the future
1931 since failing to do so could cause problems for targets which
1932 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1934 dst
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
1935 src
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
1937 if (TARGET_MEM_FUNCTIONS
)
1938 size_mode
= TYPE_MODE (sizetype
);
1940 size_mode
= TYPE_MODE (unsigned_type_node
);
1941 size
= convert_to_mode (size_mode
, size
, 1);
1942 size
= copy_to_mode_reg (size_mode
, size
);
1944 /* It is incorrect to use the libcall calling conventions to call
1945 memcpy in this context. This could be a user call to memcpy and
1946 the user may wish to examine the return value from memcpy. For
1947 targets where libcalls and normal calls have different conventions
1948 for returning pointers, we could end up generating incorrect code.
1950 For convenience, we generate the call to bcopy this way as well. */
1952 dst_tree
= make_tree (ptr_type_node
, dst
);
1953 src_tree
= make_tree (ptr_type_node
, src
);
1954 if (TARGET_MEM_FUNCTIONS
)
1955 size_tree
= make_tree (sizetype
, size
);
1957 size_tree
= make_tree (unsigned_type_node
, size
);
1959 fn
= emit_block_move_libcall_fn (true);
1960 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
1961 if (TARGET_MEM_FUNCTIONS
)
1963 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1964 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1968 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1969 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1972 /* Now we have to build up the CALL_EXPR itself. */
1973 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1974 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1975 call_expr
, arg_list
, NULL_TREE
);
1976 TREE_SIDE_EFFECTS (call_expr
) = 1;
1978 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
1980 /* If we are initializing a readonly value, show the above call
1981 clobbered it. Otherwise, a load from it may erroneously be
1982 hoisted from a loop. */
1983 if (RTX_UNCHANGING_P (dst
))
1984 emit_insn (gen_rtx_CLOBBER (VOIDmode
, dst
));
1986 return (TARGET_MEM_FUNCTIONS
? retval
: NULL_RTX
);
1989 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1990 for the function we use for block copies. The first time FOR_CALL
1991 is true, we call assemble_external. */
1993 static GTY(()) tree block_move_fn
;
1996 emit_block_move_libcall_fn (for_call
)
1999 static bool emitted_extern
;
2000 tree fn
= block_move_fn
, args
;
2004 if (TARGET_MEM_FUNCTIONS
)
2006 fn
= get_identifier ("memcpy");
2007 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2008 const_ptr_type_node
, sizetype
,
2013 fn
= get_identifier ("bcopy");
2014 args
= build_function_type_list (void_type_node
, const_ptr_type_node
,
2015 ptr_type_node
, unsigned_type_node
,
2019 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
2020 DECL_EXTERNAL (fn
) = 1;
2021 TREE_PUBLIC (fn
) = 1;
2022 DECL_ARTIFICIAL (fn
) = 1;
2023 TREE_NOTHROW (fn
) = 1;
2028 if (for_call
&& !emitted_extern
)
2030 emitted_extern
= true;
2031 make_decl_rtl (fn
, NULL
);
2032 assemble_external (fn
);
2038 /* A subroutine of emit_block_move. Copy the data via an explicit
2039 loop. This is used only when libcalls are forbidden. */
2040 /* ??? It'd be nice to copy in hunks larger than QImode. */
2043 emit_block_move_via_loop (x
, y
, size
, align
)
2045 unsigned int align ATTRIBUTE_UNUSED
;
2047 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
2048 enum machine_mode iter_mode
;
2050 iter_mode
= GET_MODE (size
);
2051 if (iter_mode
== VOIDmode
)
2052 iter_mode
= word_mode
;
2054 top_label
= gen_label_rtx ();
2055 cmp_label
= gen_label_rtx ();
2056 iter
= gen_reg_rtx (iter_mode
);
2058 emit_move_insn (iter
, const0_rtx
);
2060 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
2061 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
2062 do_pending_stack_adjust ();
2064 emit_note (NULL
, NOTE_INSN_LOOP_BEG
);
2066 emit_jump (cmp_label
);
2067 emit_label (top_label
);
2069 tmp
= convert_modes (Pmode
, iter_mode
, iter
, true);
2070 x_addr
= gen_rtx_PLUS (Pmode
, x_addr
, tmp
);
2071 y_addr
= gen_rtx_PLUS (Pmode
, y_addr
, tmp
);
2072 x
= change_address (x
, QImode
, x_addr
);
2073 y
= change_address (y
, QImode
, y_addr
);
2075 emit_move_insn (x
, y
);
2077 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
2078 true, OPTAB_LIB_WIDEN
);
2080 emit_move_insn (iter
, tmp
);
2082 emit_note (NULL
, NOTE_INSN_LOOP_CONT
);
2083 emit_label (cmp_label
);
2085 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
2088 emit_note (NULL
, NOTE_INSN_LOOP_END
);
2091 /* Copy all or part of a value X into registers starting at REGNO.
2092 The number of registers to be filled is NREGS. */
2095 move_block_to_reg (regno
, x
, nregs
, mode
)
2099 enum machine_mode mode
;
2102 #ifdef HAVE_load_multiple
2110 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
2111 x
= validize_mem (force_const_mem (mode
, x
));
2113 /* See if the machine can do this with a load multiple insn. */
2114 #ifdef HAVE_load_multiple
2115 if (HAVE_load_multiple
)
2117 last
= get_last_insn ();
2118 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
2126 delete_insns_since (last
);
2130 for (i
= 0; i
< nregs
; i
++)
2131 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
2132 operand_subword_force (x
, i
, mode
));
2135 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2136 The number of registers to be filled is NREGS. SIZE indicates the number
2137 of bytes in the object X. */
2140 move_block_from_reg (regno
, x
, nregs
, size
)
2147 #ifdef HAVE_store_multiple
2151 enum machine_mode mode
;
2156 /* If SIZE is that of a mode no bigger than a word, just use that
2157 mode's store operation. */
2158 if (size
<= UNITS_PER_WORD
2159 && (mode
= mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0)) != BLKmode
)
2161 emit_move_insn (adjust_address (x
, mode
, 0), gen_rtx_REG (mode
, regno
));
2165 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
2166 to the left before storing to memory. Note that the previous test
2167 doesn't handle all cases (e.g. SIZE == 3). */
2168 if (size
< UNITS_PER_WORD
&& BYTES_BIG_ENDIAN
)
2170 rtx tem
= operand_subword (x
, 0, 1, BLKmode
);
2176 shift
= expand_shift (LSHIFT_EXPR
, word_mode
,
2177 gen_rtx_REG (word_mode
, regno
),
2178 build_int_2 ((UNITS_PER_WORD
- size
)
2179 * BITS_PER_UNIT
, 0), NULL_RTX
, 0);
2180 emit_move_insn (tem
, shift
);
2184 /* See if the machine can do this with a store multiple insn. */
2185 #ifdef HAVE_store_multiple
2186 if (HAVE_store_multiple
)
2188 last
= get_last_insn ();
2189 pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
2197 delete_insns_since (last
);
2201 for (i
= 0; i
< nregs
; i
++)
2203 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
2208 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
2212 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2213 ORIG, where ORIG is a non-consecutive group of registers represented by
2214 a PARALLEL. The clone is identical to the original except in that the
2215 original set of registers is replaced by a new set of pseudo registers.
2216 The new set has the same modes as the original set. */
2219 gen_group_rtx (orig
)
2225 if (GET_CODE (orig
) != PARALLEL
)
2228 length
= XVECLEN (orig
, 0);
2229 tmps
= (rtx
*) alloca (sizeof (rtx
) * length
);
2231 /* Skip a NULL entry in first slot. */
2232 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
2237 for (; i
< length
; i
++)
2239 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
2240 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
2242 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
2245 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
2248 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2249 registers represented by a PARALLEL. SSIZE represents the total size of
2250 block SRC in bytes, or -1 if not known. */
2251 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2252 the balance will be in what would be the low-order memory addresses, i.e.
2253 left justified for big endian, right justified for little endian. This
2254 happens to be true for the targets currently using this support. If this
2255 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2259 emit_group_load (dst
, orig_src
, ssize
)
2266 if (GET_CODE (dst
) != PARALLEL
)
2269 /* Check for a NULL entry, used to indicate that the parameter goes
2270 both on the stack and in registers. */
2271 if (XEXP (XVECEXP (dst
, 0, 0), 0))
2276 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
2278 /* Process the pieces. */
2279 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2281 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
2282 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
2283 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2286 /* Handle trailing fragments that run over the size of the struct. */
2287 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2289 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2290 bytelen
= ssize
- bytepos
;
2295 /* If we won't be loading directly from memory, protect the real source
2296 from strange tricks we might play; but make sure that the source can
2297 be loaded directly into the destination. */
2299 if (GET_CODE (orig_src
) != MEM
2300 && (!CONSTANT_P (orig_src
)
2301 || (GET_MODE (orig_src
) != mode
2302 && GET_MODE (orig_src
) != VOIDmode
)))
2304 if (GET_MODE (orig_src
) == VOIDmode
)
2305 src
= gen_reg_rtx (mode
);
2307 src
= gen_reg_rtx (GET_MODE (orig_src
));
2309 emit_move_insn (src
, orig_src
);
2312 /* Optimize the access just a bit. */
2313 if (GET_CODE (src
) == MEM
2314 && MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
)
2315 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2316 && bytelen
== GET_MODE_SIZE (mode
))
2318 tmps
[i
] = gen_reg_rtx (mode
);
2319 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
2321 else if (GET_CODE (src
) == CONCAT
)
2323 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
2324 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
2326 if ((bytepos
== 0 && bytelen
== slen0
)
2327 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
2329 /* The following assumes that the concatenated objects all
2330 have the same size. In this case, a simple calculation
2331 can be used to determine the object and the bit field
2333 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
2334 if (! CONSTANT_P (tmps
[i
])
2335 && (GET_CODE (tmps
[i
]) != REG
|| GET_MODE (tmps
[i
]) != mode
))
2336 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
2337 (bytepos
% slen0
) * BITS_PER_UNIT
,
2338 1, NULL_RTX
, mode
, mode
, ssize
);
2340 else if (bytepos
== 0)
2342 rtx mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
2343 emit_move_insn (mem
, src
);
2344 tmps
[i
] = adjust_address (mem
, mode
, 0);
2349 else if (CONSTANT_P (src
)
2350 || (GET_CODE (src
) == REG
&& GET_MODE (src
) == mode
))
2353 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
2354 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
2357 if (BYTES_BIG_ENDIAN
&& shift
)
2358 expand_binop (mode
, ashl_optab
, tmps
[i
], GEN_INT (shift
),
2359 tmps
[i
], 0, OPTAB_WIDEN
);
2364 /* Copy the extracted pieces into the proper (probable) hard regs. */
2365 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2366 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
2369 /* Emit code to move a block SRC to block DST, where SRC and DST are
2370 non-consecutive groups of registers, each represented by a PARALLEL. */
2373 emit_group_move (dst
, src
)
2378 if (GET_CODE (src
) != PARALLEL
2379 || GET_CODE (dst
) != PARALLEL
2380 || XVECLEN (src
, 0) != XVECLEN (dst
, 0))
2383 /* Skip first entry if NULL. */
2384 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
2385 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
2386 XEXP (XVECEXP (src
, 0, i
), 0));
2389 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2390 registers represented by a PARALLEL. SSIZE represents the total size of
2391 block DST, or -1 if not known. */
2394 emit_group_store (orig_dst
, src
, ssize
)
2401 if (GET_CODE (src
) != PARALLEL
)
2404 /* Check for a NULL entry, used to indicate that the parameter goes
2405 both on the stack and in registers. */
2406 if (XEXP (XVECEXP (src
, 0, 0), 0))
2411 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (src
, 0));
2413 /* Copy the (probable) hard regs into pseudos. */
2414 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2416 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2417 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2418 emit_move_insn (tmps
[i
], reg
);
2422 /* If we won't be storing directly into memory, protect the real destination
2423 from strange tricks we might play. */
2425 if (GET_CODE (dst
) == PARALLEL
)
2429 /* We can get a PARALLEL dst if there is a conditional expression in
2430 a return statement. In that case, the dst and src are the same,
2431 so no action is necessary. */
2432 if (rtx_equal_p (dst
, src
))
2435 /* It is unclear if we can ever reach here, but we may as well handle
2436 it. Allocate a temporary, and split this into a store/load to/from
2439 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
2440 emit_group_store (temp
, src
, ssize
);
2441 emit_group_load (dst
, temp
, ssize
);
2444 else if (GET_CODE (dst
) != MEM
&& GET_CODE (dst
) != CONCAT
)
2446 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
2447 /* Make life a bit easier for combine. */
2448 emit_move_insn (dst
, const0_rtx
);
2451 /* Process the pieces. */
2452 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2454 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2455 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2456 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2459 /* Handle trailing fragments that run over the size of the struct. */
2460 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2462 if (BYTES_BIG_ENDIAN
)
2464 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2465 expand_binop (mode
, ashr_optab
, tmps
[i
], GEN_INT (shift
),
2466 tmps
[i
], 0, OPTAB_WIDEN
);
2468 bytelen
= ssize
- bytepos
;
2471 if (GET_CODE (dst
) == CONCAT
)
2473 if (bytepos
+ bytelen
<= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2474 dest
= XEXP (dst
, 0);
2475 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2477 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2478 dest
= XEXP (dst
, 1);
2484 /* Optimize the access just a bit. */
2485 if (GET_CODE (dest
) == MEM
2486 && MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
)
2487 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2488 && bytelen
== GET_MODE_SIZE (mode
))
2489 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2491 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2492 mode
, tmps
[i
], ssize
);
2497 /* Copy from the pseudo into the (probable) hard reg. */
2498 if (GET_CODE (dst
) == REG
)
2499 emit_move_insn (orig_dst
, dst
);
2502 /* Generate code to copy a BLKmode object of TYPE out of a
2503 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2504 is null, a stack temporary is created. TGTBLK is returned.
2506 The primary purpose of this routine is to handle functions
2507 that return BLKmode structures in registers. Some machines
2508 (the PA for example) want to return all small structures
2509 in registers regardless of the structure's alignment. */
2512 copy_blkmode_from_reg (tgtblk
, srcreg
, type
)
2517 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2518 rtx src
= NULL
, dst
= NULL
;
2519 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2520 unsigned HOST_WIDE_INT bitpos
, xbitpos
, big_endian_correction
= 0;
2524 tgtblk
= assign_temp (build_qualified_type (type
,
2526 | TYPE_QUAL_CONST
)),
2528 preserve_temp_slots (tgtblk
);
2531 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2532 into a new pseudo which is a full word. */
2534 if (GET_MODE (srcreg
) != BLKmode
2535 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2536 srcreg
= convert_to_mode (word_mode
, srcreg
, TREE_UNSIGNED (type
));
2538 /* Structures whose size is not a multiple of a word are aligned
2539 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2540 machine, this means we must skip the empty high order bytes when
2541 calculating the bit offset. */
2542 if (BYTES_BIG_ENDIAN
2543 && bytes
% UNITS_PER_WORD
)
2544 big_endian_correction
2545 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2547 /* Copy the structure BITSIZE bites at a time.
2549 We could probably emit more efficient code for machines which do not use
2550 strict alignment, but it doesn't seem worth the effort at the current
2552 for (bitpos
= 0, xbitpos
= big_endian_correction
;
2553 bitpos
< bytes
* BITS_PER_UNIT
;
2554 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2556 /* We need a new source operand each time xbitpos is on a
2557 word boundary and when xbitpos == big_endian_correction
2558 (the first time through). */
2559 if (xbitpos
% BITS_PER_WORD
== 0
2560 || xbitpos
== big_endian_correction
)
2561 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2564 /* We need a new destination operand each time bitpos is on
2566 if (bitpos
% BITS_PER_WORD
== 0)
2567 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2569 /* Use xbitpos for the source extraction (right justified) and
2570 xbitpos for the destination store (left justified). */
2571 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2572 extract_bit_field (src
, bitsize
,
2573 xbitpos
% BITS_PER_WORD
, 1,
2574 NULL_RTX
, word_mode
, word_mode
,
2582 /* Add a USE expression for REG to the (possibly empty) list pointed
2583 to by CALL_FUSAGE. REG must denote a hard register. */
2586 use_reg (call_fusage
, reg
)
2587 rtx
*call_fusage
, reg
;
2589 if (GET_CODE (reg
) != REG
2590 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2594 = gen_rtx_EXPR_LIST (VOIDmode
,
2595 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2598 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2599 starting at REGNO. All of these registers must be hard registers. */
2602 use_regs (call_fusage
, regno
, nregs
)
2609 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
2612 for (i
= 0; i
< nregs
; i
++)
2613 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2616 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2617 PARALLEL REGS. This is for calls that pass values in multiple
2618 non-contiguous locations. The Irix 6 ABI has examples of this. */
2621 use_group_regs (call_fusage
, regs
)
2627 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2629 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2631 /* A NULL entry means the parameter goes both on the stack and in
2632 registers. This can also be a MEM for targets that pass values
2633 partially on the stack and partially in registers. */
2634 if (reg
!= 0 && GET_CODE (reg
) == REG
)
2635 use_reg (call_fusage
, reg
);
2640 /* Determine whether the LEN bytes generated by CONSTFUN can be
2641 stored to memory using several move instructions. CONSTFUNDATA is
2642 a pointer which will be passed as argument in every CONSTFUN call.
2643 ALIGN is maximum alignment we can assume. Return nonzero if a
2644 call to store_by_pieces should succeed. */
2647 can_store_by_pieces (len
, constfun
, constfundata
, align
)
2648 unsigned HOST_WIDE_INT len
;
2649 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
2653 unsigned HOST_WIDE_INT max_size
, l
;
2654 HOST_WIDE_INT offset
= 0;
2655 enum machine_mode mode
, tmode
;
2656 enum insn_code icode
;
2660 if (! STORE_BY_PIECES_P (len
, align
))
2663 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2664 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2665 align
= MOVE_MAX
* BITS_PER_UNIT
;
2667 /* We would first store what we can in the largest integer mode, then go to
2668 successively smaller modes. */
2671 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2676 max_size
= STORE_MAX_PIECES
+ 1;
2677 while (max_size
> 1)
2679 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2680 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2681 if (GET_MODE_SIZE (tmode
) < max_size
)
2684 if (mode
== VOIDmode
)
2687 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2688 if (icode
!= CODE_FOR_nothing
2689 && align
>= GET_MODE_ALIGNMENT (mode
))
2691 unsigned int size
= GET_MODE_SIZE (mode
);
2698 cst
= (*constfun
) (constfundata
, offset
, mode
);
2699 if (!LEGITIMATE_CONSTANT_P (cst
))
2709 max_size
= GET_MODE_SIZE (mode
);
2712 /* The code above should have handled everything. */
2720 /* Generate several move instructions to store LEN bytes generated by
2721 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2722 pointer which will be passed as argument in every CONSTFUN call.
2723 ALIGN is maximum alignment we can assume. */
2726 store_by_pieces (to
, len
, constfun
, constfundata
, align
)
2728 unsigned HOST_WIDE_INT len
;
2729 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
2733 struct store_by_pieces data
;
2735 if (! STORE_BY_PIECES_P (len
, align
))
2737 to
= protect_from_queue (to
, 1);
2738 data
.constfun
= constfun
;
2739 data
.constfundata
= constfundata
;
2742 store_by_pieces_1 (&data
, align
);
2745 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2746 rtx with BLKmode). The caller must pass TO through protect_from_queue
2747 before calling. ALIGN is maximum alignment we can assume. */
2750 clear_by_pieces (to
, len
, align
)
2752 unsigned HOST_WIDE_INT len
;
2755 struct store_by_pieces data
;
2757 data
.constfun
= clear_by_pieces_1
;
2758 data
.constfundata
= NULL
;
2761 store_by_pieces_1 (&data
, align
);
2764 /* Callback routine for clear_by_pieces.
2765 Return const0_rtx unconditionally. */
2768 clear_by_pieces_1 (data
, offset
, mode
)
2769 PTR data ATTRIBUTE_UNUSED
;
2770 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
;
2771 enum machine_mode mode ATTRIBUTE_UNUSED
;
2776 /* Subroutine of clear_by_pieces and store_by_pieces.
2777 Generate several move instructions to store LEN bytes of block TO. (A MEM
2778 rtx with BLKmode). The caller must pass TO through protect_from_queue
2779 before calling. ALIGN is maximum alignment we can assume. */
2782 store_by_pieces_1 (data
, align
)
2783 struct store_by_pieces
*data
;
2786 rtx to_addr
= XEXP (data
->to
, 0);
2787 unsigned HOST_WIDE_INT max_size
= STORE_MAX_PIECES
+ 1;
2788 enum machine_mode mode
= VOIDmode
, tmode
;
2789 enum insn_code icode
;
2792 data
->to_addr
= to_addr
;
2794 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2795 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2797 data
->explicit_inc_to
= 0;
2799 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2801 data
->offset
= data
->len
;
2803 /* If storing requires more than two move insns,
2804 copy addresses to registers (to make displacements shorter)
2805 and use post-increment if available. */
2806 if (!data
->autinc_to
2807 && move_by_pieces_ninsns (data
->len
, align
) > 2)
2809 /* Determine the main mode we'll be using. */
2810 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2811 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2812 if (GET_MODE_SIZE (tmode
) < max_size
)
2815 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2817 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2818 data
->autinc_to
= 1;
2819 data
->explicit_inc_to
= -1;
2822 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2823 && ! data
->autinc_to
)
2825 data
->to_addr
= copy_addr_to_reg (to_addr
);
2826 data
->autinc_to
= 1;
2827 data
->explicit_inc_to
= 1;
2830 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2831 data
->to_addr
= copy_addr_to_reg (to_addr
);
2834 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2835 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2836 align
= MOVE_MAX
* BITS_PER_UNIT
;
2838 /* First store what we can in the largest integer mode, then go to
2839 successively smaller modes. */
2841 while (max_size
> 1)
2843 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2844 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2845 if (GET_MODE_SIZE (tmode
) < max_size
)
2848 if (mode
== VOIDmode
)
2851 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2852 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2853 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2855 max_size
= GET_MODE_SIZE (mode
);
2858 /* The code above should have handled everything. */
2863 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2864 with move instructions for mode MODE. GENFUN is the gen_... function
2865 to make a move insn for that mode. DATA has all the other info. */
2868 store_by_pieces_2 (genfun
, mode
, data
)
2869 rtx (*genfun
) PARAMS ((rtx
, ...));
2870 enum machine_mode mode
;
2871 struct store_by_pieces
*data
;
2873 unsigned int size
= GET_MODE_SIZE (mode
);
2876 while (data
->len
>= size
)
2879 data
->offset
-= size
;
2881 if (data
->autinc_to
)
2882 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2885 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2887 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2888 emit_insn (gen_add2_insn (data
->to_addr
,
2889 GEN_INT (-(HOST_WIDE_INT
) size
)));
2891 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2892 emit_insn ((*genfun
) (to1
, cst
));
2894 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2895 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2897 if (! data
->reverse
)
2898 data
->offset
+= size
;
2904 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2905 its length in bytes. */
2908 clear_storage (object
, size
)
2913 unsigned int align
= (GET_CODE (object
) == MEM
? MEM_ALIGN (object
)
2914 : GET_MODE_ALIGNMENT (GET_MODE (object
)));
2916 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2917 just move a zero. Otherwise, do this a piece at a time. */
2918 if (GET_MODE (object
) != BLKmode
2919 && GET_CODE (size
) == CONST_INT
2920 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (object
)))
2921 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2924 object
= protect_from_queue (object
, 1);
2925 size
= protect_from_queue (size
, 0);
2927 if (GET_CODE (size
) == CONST_INT
2928 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2929 clear_by_pieces (object
, INTVAL (size
), align
);
2930 else if (clear_storage_via_clrstr (object
, size
, align
))
2933 retval
= clear_storage_via_libcall (object
, size
);
2939 /* A subroutine of clear_storage. Expand a clrstr pattern;
2940 return true if successful. */
2943 clear_storage_via_clrstr (object
, size
, align
)
2947 /* Try the most limited insn first, because there's no point
2948 including more than one in the machine description unless
2949 the more limited one has some advantage. */
2951 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2952 enum machine_mode mode
;
2954 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2955 mode
= GET_MODE_WIDER_MODE (mode
))
2957 enum insn_code code
= clrstr_optab
[(int) mode
];
2958 insn_operand_predicate_fn pred
;
2960 if (code
!= CODE_FOR_nothing
2961 /* We don't need MODE to be narrower than
2962 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2963 the mode mask, as it is returned by the macro, it will
2964 definitely be less than the actual mode mask. */
2965 && ((GET_CODE (size
) == CONST_INT
2966 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2967 <= (GET_MODE_MASK (mode
) >> 1)))
2968 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2969 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2970 || (*pred
) (object
, BLKmode
))
2971 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
2972 || (*pred
) (opalign
, VOIDmode
)))
2975 rtx last
= get_last_insn ();
2978 op1
= convert_to_mode (mode
, size
, 1);
2979 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2980 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
2981 op1
= copy_to_mode_reg (mode
, op1
);
2983 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2990 delete_insns_since (last
);
2997 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2998 Return the return value of memset, 0 otherwise. */
3001 clear_storage_via_libcall (object
, size
)
3004 tree call_expr
, arg_list
, fn
, object_tree
, size_tree
;
3005 enum machine_mode size_mode
;
3008 /* OBJECT or SIZE may have been passed through protect_from_queue.
3010 It is unsafe to save the value generated by protect_from_queue
3011 and reuse it later. Consider what happens if emit_queue is
3012 called before the return value from protect_from_queue is used.
3014 Expansion of the CALL_EXPR below will call emit_queue before
3015 we are finished emitting RTL for argument setup. So if we are
3016 not careful we could get the wrong value for an argument.
3018 To avoid this problem we go ahead and emit code to copy OBJECT
3019 and SIZE into new pseudos. We can then place those new pseudos
3020 into an RTL_EXPR and use them later, even after a call to
3023 Note this is not strictly needed for library calls since they
3024 do not call emit_queue before loading their arguments. However,
3025 we may need to have library calls call emit_queue in the future
3026 since failing to do so could cause problems for targets which
3027 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3029 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
3031 if (TARGET_MEM_FUNCTIONS
)
3032 size_mode
= TYPE_MODE (sizetype
);
3034 size_mode
= TYPE_MODE (unsigned_type_node
);
3035 size
= convert_to_mode (size_mode
, size
, 1);
3036 size
= copy_to_mode_reg (size_mode
, size
);
3038 /* It is incorrect to use the libcall calling conventions to call
3039 memset in this context. This could be a user call to memset and
3040 the user may wish to examine the return value from memset. For
3041 targets where libcalls and normal calls have different conventions
3042 for returning pointers, we could end up generating incorrect code.
3044 For convenience, we generate the call to bzero this way as well. */
3046 object_tree
= make_tree (ptr_type_node
, object
);
3047 if (TARGET_MEM_FUNCTIONS
)
3048 size_tree
= make_tree (sizetype
, size
);
3050 size_tree
= make_tree (unsigned_type_node
, size
);
3052 fn
= clear_storage_libcall_fn (true);
3053 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
3054 if (TARGET_MEM_FUNCTIONS
)
3055 arg_list
= tree_cons (NULL_TREE
, integer_zero_node
, arg_list
);
3056 arg_list
= tree_cons (NULL_TREE
, object_tree
, arg_list
);
3058 /* Now we have to build up the CALL_EXPR itself. */
3059 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
3060 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
3061 call_expr
, arg_list
, NULL_TREE
);
3062 TREE_SIDE_EFFECTS (call_expr
) = 1;
3064 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
3066 /* If we are initializing a readonly value, show the above call
3067 clobbered it. Otherwise, a load from it may erroneously be
3068 hoisted from a loop. */
3069 if (RTX_UNCHANGING_P (object
))
3070 emit_insn (gen_rtx_CLOBBER (VOIDmode
, object
));
3072 return (TARGET_MEM_FUNCTIONS
? retval
: NULL_RTX
);
3075 /* A subroutine of clear_storage_via_libcall. Create the tree node
3076 for the function we use for block clears. The first time FOR_CALL
3077 is true, we call assemble_external. */
3079 static GTY(()) tree block_clear_fn
;
3082 clear_storage_libcall_fn (for_call
)
3085 static bool emitted_extern
;
3086 tree fn
= block_clear_fn
, args
;
3090 if (TARGET_MEM_FUNCTIONS
)
3092 fn
= get_identifier ("memset");
3093 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
3094 integer_type_node
, sizetype
,
3099 fn
= get_identifier ("bzero");
3100 args
= build_function_type_list (void_type_node
, ptr_type_node
,
3101 unsigned_type_node
, NULL_TREE
);
3104 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
3105 DECL_EXTERNAL (fn
) = 1;
3106 TREE_PUBLIC (fn
) = 1;
3107 DECL_ARTIFICIAL (fn
) = 1;
3108 TREE_NOTHROW (fn
) = 1;
3110 block_clear_fn
= fn
;
3113 if (for_call
&& !emitted_extern
)
3115 emitted_extern
= true;
3116 make_decl_rtl (fn
, NULL
);
3117 assemble_external (fn
);
3123 /* Generate code to copy Y into X.
3124 Both Y and X must have the same mode, except that
3125 Y can be a constant with VOIDmode.
3126 This mode cannot be BLKmode; use emit_block_move for that.
3128 Return the last instruction emitted. */
3131 emit_move_insn (x
, y
)
3134 enum machine_mode mode
= GET_MODE (x
);
3135 rtx y_cst
= NULL_RTX
;
3138 x
= protect_from_queue (x
, 1);
3139 y
= protect_from_queue (y
, 0);
3141 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
3144 /* Never force constant_p_rtx to memory. */
3145 if (GET_CODE (y
) == CONSTANT_P_RTX
)
3147 else if (CONSTANT_P (y
))
3150 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3151 && (last_insn
= compress_float_constant (x
, y
)))
3154 if (!LEGITIMATE_CONSTANT_P (y
))
3157 y
= force_const_mem (mode
, y
);
3159 /* If the target's cannot_force_const_mem prevented the spill,
3160 assume that the target's move expanders will also take care
3161 of the non-legitimate constant. */
3167 /* If X or Y are memory references, verify that their addresses are valid
3169 if (GET_CODE (x
) == MEM
3170 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
3171 && ! push_operand (x
, GET_MODE (x
)))
3173 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
3174 x
= validize_mem (x
);
3176 if (GET_CODE (y
) == MEM
3177 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
3179 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
3180 y
= validize_mem (y
);
3182 if (mode
== BLKmode
)
3185 last_insn
= emit_move_insn_1 (x
, y
);
3187 if (y_cst
&& GET_CODE (x
) == REG
)
3188 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
3193 /* Low level part of emit_move_insn.
3194 Called just like emit_move_insn, but assumes X and Y
3195 are basically valid. */
3198 emit_move_insn_1 (x
, y
)
3201 enum machine_mode mode
= GET_MODE (x
);
3202 enum machine_mode submode
;
3203 enum mode_class
class = GET_MODE_CLASS (mode
);
3205 if ((unsigned int) mode
>= (unsigned int) MAX_MACHINE_MODE
)
3208 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
3210 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
3212 /* Expand complex moves by moving real part and imag part, if possible. */
3213 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
3214 && BLKmode
!= (submode
= GET_MODE_INNER (mode
))
3215 && (mov_optab
->handlers
[(int) submode
].insn_code
3216 != CODE_FOR_nothing
))
3218 /* Don't split destination if it is a stack push. */
3219 int stack
= push_operand (x
, GET_MODE (x
));
3221 #ifdef PUSH_ROUNDING
3222 /* In case we output to the stack, but the size is smaller machine can
3223 push exactly, we need to use move instructions. */
3225 && (PUSH_ROUNDING (GET_MODE_SIZE (submode
))
3226 != GET_MODE_SIZE (submode
)))
3229 HOST_WIDE_INT offset1
, offset2
;
3231 /* Do not use anti_adjust_stack, since we don't want to update
3232 stack_pointer_delta. */
3233 temp
= expand_binop (Pmode
,
3234 #ifdef STACK_GROWS_DOWNWARD
3242 (GET_MODE_SIZE (GET_MODE (x
)))),
3243 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
3245 if (temp
!= stack_pointer_rtx
)
3246 emit_move_insn (stack_pointer_rtx
, temp
);
3248 #ifdef STACK_GROWS_DOWNWARD
3250 offset2
= GET_MODE_SIZE (submode
);
3252 offset1
= -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)));
3253 offset2
= (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))
3254 + GET_MODE_SIZE (submode
));
3257 emit_move_insn (change_address (x
, submode
,
3258 gen_rtx_PLUS (Pmode
,
3260 GEN_INT (offset1
))),
3261 gen_realpart (submode
, y
));
3262 emit_move_insn (change_address (x
, submode
,
3263 gen_rtx_PLUS (Pmode
,
3265 GEN_INT (offset2
))),
3266 gen_imagpart (submode
, y
));
3270 /* If this is a stack, push the highpart first, so it
3271 will be in the argument order.
3273 In that case, change_address is used only to convert
3274 the mode, not to change the address. */
3277 /* Note that the real part always precedes the imag part in memory
3278 regardless of machine's endianness. */
3279 #ifdef STACK_GROWS_DOWNWARD
3280 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
3281 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3282 gen_imagpart (submode
, y
)));
3283 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
3284 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3285 gen_realpart (submode
, y
)));
3287 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
3288 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3289 gen_realpart (submode
, y
)));
3290 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
3291 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3292 gen_imagpart (submode
, y
)));
3297 rtx realpart_x
, realpart_y
;
3298 rtx imagpart_x
, imagpart_y
;
3300 /* If this is a complex value with each part being smaller than a
3301 word, the usual calling sequence will likely pack the pieces into
3302 a single register. Unfortunately, SUBREG of hard registers only
3303 deals in terms of words, so we have a problem converting input
3304 arguments to the CONCAT of two registers that is used elsewhere
3305 for complex values. If this is before reload, we can copy it into
3306 memory and reload. FIXME, we should see about using extract and
3307 insert on integer registers, but complex short and complex char
3308 variables should be rarely used. */
3309 if (GET_MODE_BITSIZE (mode
) < 2 * BITS_PER_WORD
3310 && (reload_in_progress
| reload_completed
) == 0)
3313 = (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
3315 = (REG_P (y
) && REGNO (y
) < FIRST_PSEUDO_REGISTER
);
3317 if (packed_dest_p
|| packed_src_p
)
3319 enum mode_class reg_class
= ((class == MODE_COMPLEX_FLOAT
)
3320 ? MODE_FLOAT
: MODE_INT
);
3322 enum machine_mode reg_mode
3323 = mode_for_size (GET_MODE_BITSIZE (mode
), reg_class
, 1);
3325 if (reg_mode
!= BLKmode
)
3327 rtx mem
= assign_stack_temp (reg_mode
,
3328 GET_MODE_SIZE (mode
), 0);
3329 rtx cmem
= adjust_address (mem
, mode
, 0);
3332 = N_("function using short complex types cannot be inline");
3336 rtx sreg
= gen_rtx_SUBREG (reg_mode
, x
, 0);
3338 emit_move_insn_1 (cmem
, y
);
3339 return emit_move_insn_1 (sreg
, mem
);
3343 rtx sreg
= gen_rtx_SUBREG (reg_mode
, y
, 0);
3345 emit_move_insn_1 (mem
, sreg
);
3346 return emit_move_insn_1 (x
, cmem
);
3352 realpart_x
= gen_realpart (submode
, x
);
3353 realpart_y
= gen_realpart (submode
, y
);
3354 imagpart_x
= gen_imagpart (submode
, x
);
3355 imagpart_y
= gen_imagpart (submode
, y
);
3357 /* Show the output dies here. This is necessary for SUBREGs
3358 of pseudos since we cannot track their lifetimes correctly;
3359 hard regs shouldn't appear here except as return values.
3360 We never want to emit such a clobber after reload. */
3362 && ! (reload_in_progress
|| reload_completed
)
3363 && (GET_CODE (realpart_x
) == SUBREG
3364 || GET_CODE (imagpart_x
) == SUBREG
))
3365 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3367 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
3368 (realpart_x
, realpart_y
));
3369 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
3370 (imagpart_x
, imagpart_y
));
3373 return get_last_insn ();
3376 /* This will handle any multi-word or full-word mode that lacks a move_insn
3377 pattern. However, you will get better code if you define such patterns,
3378 even if they must turn into multiple assembler instructions. */
3379 else if (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
3386 #ifdef PUSH_ROUNDING
3388 /* If X is a push on the stack, do the push now and replace
3389 X with a reference to the stack pointer. */
3390 if (push_operand (x
, GET_MODE (x
)))
3395 /* Do not use anti_adjust_stack, since we don't want to update
3396 stack_pointer_delta. */
3397 temp
= expand_binop (Pmode
,
3398 #ifdef STACK_GROWS_DOWNWARD
3406 (GET_MODE_SIZE (GET_MODE (x
)))),
3407 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
3409 if (temp
!= stack_pointer_rtx
)
3410 emit_move_insn (stack_pointer_rtx
, temp
);
3412 code
= GET_CODE (XEXP (x
, 0));
3414 /* Just hope that small offsets off SP are OK. */
3415 if (code
== POST_INC
)
3416 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3417 GEN_INT (-((HOST_WIDE_INT
)
3418 GET_MODE_SIZE (GET_MODE (x
)))));
3419 else if (code
== POST_DEC
)
3420 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3421 GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
3423 temp
= stack_pointer_rtx
;
3425 x
= change_address (x
, VOIDmode
, temp
);
3429 /* If we are in reload, see if either operand is a MEM whose address
3430 is scheduled for replacement. */
3431 if (reload_in_progress
&& GET_CODE (x
) == MEM
3432 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3433 x
= replace_equiv_address_nv (x
, inner
);
3434 if (reload_in_progress
&& GET_CODE (y
) == MEM
3435 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3436 y
= replace_equiv_address_nv (y
, inner
);
3442 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3445 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3446 rtx ypart
= operand_subword (y
, i
, 1, mode
);
3448 /* If we can't get a part of Y, put Y into memory if it is a
3449 constant. Otherwise, force it into a register. If we still
3450 can't get a part of Y, abort. */
3451 if (ypart
== 0 && CONSTANT_P (y
))
3453 y
= force_const_mem (mode
, y
);
3454 ypart
= operand_subword (y
, i
, 1, mode
);
3456 else if (ypart
== 0)
3457 ypart
= operand_subword_force (y
, i
, mode
);
3459 if (xpart
== 0 || ypart
== 0)
3462 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3464 last_insn
= emit_move_insn (xpart
, ypart
);
3470 /* Show the output dies here. This is necessary for SUBREGs
3471 of pseudos since we cannot track their lifetimes correctly;
3472 hard regs shouldn't appear here except as return values.
3473 We never want to emit such a clobber after reload. */
3475 && ! (reload_in_progress
|| reload_completed
)
3476 && need_clobber
!= 0)
3477 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3487 /* If Y is representable exactly in a narrower mode, and the target can
3488 perform the extension directly from constant or memory, then emit the
3489 move as an extension. */
3492 compress_float_constant (x
, y
)
3495 enum machine_mode dstmode
= GET_MODE (x
);
3496 enum machine_mode orig_srcmode
= GET_MODE (y
);
3497 enum machine_mode srcmode
;
3500 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3502 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3503 srcmode
!= orig_srcmode
;
3504 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3507 rtx trunc_y
, last_insn
;
3509 /* Skip if the target can't extend this way. */
3510 ic
= can_extend_p (dstmode
, srcmode
, 0);
3511 if (ic
== CODE_FOR_nothing
)
3514 /* Skip if the narrowed value isn't exact. */
3515 if (! exact_real_truncate (srcmode
, &r
))
3518 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3520 if (LEGITIMATE_CONSTANT_P (trunc_y
))
3522 /* Skip if the target needs extra instructions to perform
3524 if (! (*insn_data
[ic
].operand
[1].predicate
) (trunc_y
, srcmode
))
3527 else if (float_extend_from_mem
[dstmode
][srcmode
])
3528 trunc_y
= validize_mem (force_const_mem (srcmode
, trunc_y
));
3532 emit_unop_insn (ic
, x
, trunc_y
, UNKNOWN
);
3533 last_insn
= get_last_insn ();
3535 if (GET_CODE (x
) == REG
)
3536 REG_NOTES (last_insn
)
3537 = gen_rtx_EXPR_LIST (REG_EQUAL
, y
, REG_NOTES (last_insn
));
3545 /* Pushing data onto the stack. */
3547 /* Push a block of length SIZE (perhaps variable)
3548 and return an rtx to address the beginning of the block.
3549 Note that it is not possible for the value returned to be a QUEUED.
3550 The value may be virtual_outgoing_args_rtx.
3552 EXTRA is the number of bytes of padding to push in addition to SIZE.
3553 BELOW nonzero means this padding comes at low addresses;
3554 otherwise, the padding comes at high addresses. */
3557 push_block (size
, extra
, below
)
3563 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3564 if (CONSTANT_P (size
))
3565 anti_adjust_stack (plus_constant (size
, extra
));
3566 else if (GET_CODE (size
) == REG
&& extra
== 0)
3567 anti_adjust_stack (size
);
3570 temp
= copy_to_mode_reg (Pmode
, size
);
3572 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3573 temp
, 0, OPTAB_LIB_WIDEN
);
3574 anti_adjust_stack (temp
);
3577 #ifndef STACK_GROWS_DOWNWARD
3583 temp
= virtual_outgoing_args_rtx
;
3584 if (extra
!= 0 && below
)
3585 temp
= plus_constant (temp
, extra
);
3589 if (GET_CODE (size
) == CONST_INT
)
3590 temp
= plus_constant (virtual_outgoing_args_rtx
,
3591 -INTVAL (size
) - (below
? 0 : extra
));
3592 else if (extra
!= 0 && !below
)
3593 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3594 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3596 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3597 negate_rtx (Pmode
, size
));
3600 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3603 #ifdef PUSH_ROUNDING
3605 /* Emit single push insn. */
3608 emit_single_push_insn (mode
, x
, type
)
3610 enum machine_mode mode
;
3614 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3616 enum insn_code icode
;
3617 insn_operand_predicate_fn pred
;
3619 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3620 /* If there is push pattern, use it. Otherwise try old way of throwing
3621 MEM representing push operation to move expander. */
3622 icode
= push_optab
->handlers
[(int) mode
].insn_code
;
3623 if (icode
!= CODE_FOR_nothing
)
3625 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3626 && !((*pred
) (x
, mode
))))
3627 x
= force_reg (mode
, x
);
3628 emit_insn (GEN_FCN (icode
) (x
));
3631 if (GET_MODE_SIZE (mode
) == rounded_size
)
3632 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3635 #ifdef STACK_GROWS_DOWNWARD
3636 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3637 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3639 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3640 GEN_INT (rounded_size
));
3642 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3645 dest
= gen_rtx_MEM (mode
, dest_addr
);
3649 set_mem_attributes (dest
, type
, 1);
3651 if (flag_optimize_sibling_calls
)
3652 /* Function incoming arguments may overlap with sibling call
3653 outgoing arguments and we cannot allow reordering of reads
3654 from function arguments with stores to outgoing arguments
3655 of sibling calls. */
3656 set_mem_alias_set (dest
, 0);
3658 emit_move_insn (dest
, x
);
3662 /* Generate code to push X onto the stack, assuming it has mode MODE and
3664 MODE is redundant except when X is a CONST_INT (since they don't
3666 SIZE is an rtx for the size of data to be copied (in bytes),
3667 needed only if X is BLKmode.
3669 ALIGN (in bits) is maximum alignment we can assume.
3671 If PARTIAL and REG are both nonzero, then copy that many of the first
3672 words of X into registers starting with REG, and push the rest of X.
3673 The amount of space pushed is decreased by PARTIAL words,
3674 rounded *down* to a multiple of PARM_BOUNDARY.
3675 REG must be a hard register in this case.
3676 If REG is zero but PARTIAL is not, take any all others actions for an
3677 argument partially in registers, but do not actually load any
3680 EXTRA is the amount in bytes of extra space to leave next to this arg.
3681 This is ignored if an argument block has already been allocated.
3683 On a machine that lacks real push insns, ARGS_ADDR is the address of
3684 the bottom of the argument block for this call. We use indexing off there
3685 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3686 argument block has not been preallocated.
3688 ARGS_SO_FAR is the size of args previously pushed for this call.
3690 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3691 for arguments passed in registers. If nonzero, it will be the number
3692 of bytes required. */
3695 emit_push_insn (x
, mode
, type
, size
, align
, partial
, reg
, extra
,
3696 args_addr
, args_so_far
, reg_parm_stack_space
,
3699 enum machine_mode mode
;
3708 int reg_parm_stack_space
;
3712 enum direction stack_direction
3713 #ifdef STACK_GROWS_DOWNWARD
3719 /* Decide where to pad the argument: `downward' for below,
3720 `upward' for above, or `none' for don't pad it.
3721 Default is below for small data on big-endian machines; else above. */
3722 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3724 /* Invert direction if stack is post-decrement.
3726 if (STACK_PUSH_CODE
== POST_DEC
)
3727 if (where_pad
!= none
)
3728 where_pad
= (where_pad
== downward
? upward
: downward
);
3730 xinner
= x
= protect_from_queue (x
, 0);
3732 if (mode
== BLKmode
)
3734 /* Copy a block into the stack, entirely or partially. */
3737 int used
= partial
* UNITS_PER_WORD
;
3738 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3746 /* USED is now the # of bytes we need not copy to the stack
3747 because registers will take care of them. */
3750 xinner
= adjust_address (xinner
, BLKmode
, used
);
3752 /* If the partial register-part of the arg counts in its stack size,
3753 skip the part of stack space corresponding to the registers.
3754 Otherwise, start copying to the beginning of the stack space,
3755 by setting SKIP to 0. */
3756 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3758 #ifdef PUSH_ROUNDING
3759 /* Do it with several push insns if that doesn't take lots of insns
3760 and if there is no difficulty with push insns that skip bytes
3761 on the stack for alignment purposes. */
3764 && GET_CODE (size
) == CONST_INT
3766 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3767 /* Here we avoid the case of a structure whose weak alignment
3768 forces many pushes of a small amount of data,
3769 and such small pushes do rounding that causes trouble. */
3770 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3771 || align
>= BIGGEST_ALIGNMENT
3772 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3773 == (align
/ BITS_PER_UNIT
)))
3774 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3776 /* Push padding now if padding above and stack grows down,
3777 or if padding below and stack grows up.
3778 But if space already allocated, this has already been done. */
3779 if (extra
&& args_addr
== 0
3780 && where_pad
!= none
&& where_pad
!= stack_direction
)
3781 anti_adjust_stack (GEN_INT (extra
));
3783 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
);
3786 #endif /* PUSH_ROUNDING */
3790 /* Otherwise make space on the stack and copy the data
3791 to the address of that space. */
3793 /* Deduct words put into registers from the size we must copy. */
3796 if (GET_CODE (size
) == CONST_INT
)
3797 size
= GEN_INT (INTVAL (size
) - used
);
3799 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3800 GEN_INT (used
), NULL_RTX
, 0,
3804 /* Get the address of the stack space.
3805 In this case, we do not deal with EXTRA separately.
3806 A single stack adjust will do. */
3809 temp
= push_block (size
, extra
, where_pad
== downward
);
3812 else if (GET_CODE (args_so_far
) == CONST_INT
)
3813 temp
= memory_address (BLKmode
,
3814 plus_constant (args_addr
,
3815 skip
+ INTVAL (args_so_far
)));
3817 temp
= memory_address (BLKmode
,
3818 plus_constant (gen_rtx_PLUS (Pmode
,
3823 if (!ACCUMULATE_OUTGOING_ARGS
)
3825 /* If the source is referenced relative to the stack pointer,
3826 copy it to another register to stabilize it. We do not need
3827 to do this if we know that we won't be changing sp. */
3829 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3830 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3831 temp
= copy_to_reg (temp
);
3834 target
= gen_rtx_MEM (BLKmode
, temp
);
3838 set_mem_attributes (target
, type
, 1);
3839 /* Function incoming arguments may overlap with sibling call
3840 outgoing arguments and we cannot allow reordering of reads
3841 from function arguments with stores to outgoing arguments
3842 of sibling calls. */
3843 set_mem_alias_set (target
, 0);
3846 /* ALIGN may well be better aligned than TYPE, e.g. due to
3847 PARM_BOUNDARY. Assume the caller isn't lying. */
3848 set_mem_align (target
, align
);
3850 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
3853 else if (partial
> 0)
3855 /* Scalar partly in registers. */
3857 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3860 /* # words of start of argument
3861 that we must make space for but need not store. */
3862 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
3863 int args_offset
= INTVAL (args_so_far
);
3866 /* Push padding now if padding above and stack grows down,
3867 or if padding below and stack grows up.
3868 But if space already allocated, this has already been done. */
3869 if (extra
&& args_addr
== 0
3870 && where_pad
!= none
&& where_pad
!= stack_direction
)
3871 anti_adjust_stack (GEN_INT (extra
));
3873 /* If we make space by pushing it, we might as well push
3874 the real data. Otherwise, we can leave OFFSET nonzero
3875 and leave the space uninitialized. */
3879 /* Now NOT_STACK gets the number of words that we don't need to
3880 allocate on the stack. */
3881 not_stack
= partial
- offset
;
3883 /* If the partial register-part of the arg counts in its stack size,
3884 skip the part of stack space corresponding to the registers.
3885 Otherwise, start copying to the beginning of the stack space,
3886 by setting SKIP to 0. */
3887 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3889 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3890 x
= validize_mem (force_const_mem (mode
, x
));
3892 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3893 SUBREGs of such registers are not allowed. */
3894 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
3895 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3896 x
= copy_to_reg (x
);
3898 /* Loop over all the words allocated on the stack for this arg. */
3899 /* We can do it by words, because any scalar bigger than a word
3900 has a size a multiple of a word. */
3901 #ifndef PUSH_ARGS_REVERSED
3902 for (i
= not_stack
; i
< size
; i
++)
3904 for (i
= size
- 1; i
>= not_stack
; i
--)
3906 if (i
>= not_stack
+ offset
)
3907 emit_push_insn (operand_subword_force (x
, i
, mode
),
3908 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3910 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3912 reg_parm_stack_space
, alignment_pad
);
3919 /* Push padding now if padding above and stack grows down,
3920 or if padding below and stack grows up.
3921 But if space already allocated, this has already been done. */
3922 if (extra
&& args_addr
== 0
3923 && where_pad
!= none
&& where_pad
!= stack_direction
)
3924 anti_adjust_stack (GEN_INT (extra
));
3926 #ifdef PUSH_ROUNDING
3927 if (args_addr
== 0 && PUSH_ARGS
)
3928 emit_single_push_insn (mode
, x
, type
);
3932 if (GET_CODE (args_so_far
) == CONST_INT
)
3934 = memory_address (mode
,
3935 plus_constant (args_addr
,
3936 INTVAL (args_so_far
)));
3938 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3940 dest
= gen_rtx_MEM (mode
, addr
);
3943 set_mem_attributes (dest
, type
, 1);
3944 /* Function incoming arguments may overlap with sibling call
3945 outgoing arguments and we cannot allow reordering of reads
3946 from function arguments with stores to outgoing arguments
3947 of sibling calls. */
3948 set_mem_alias_set (dest
, 0);
3951 emit_move_insn (dest
, x
);
3955 /* If part should go in registers, copy that part
3956 into the appropriate registers. Do this now, at the end,
3957 since mem-to-mem copies above may do function calls. */
3958 if (partial
> 0 && reg
!= 0)
3960 /* Handle calls that pass values in multiple non-contiguous locations.
3961 The Irix 6 ABI has examples of this. */
3962 if (GET_CODE (reg
) == PARALLEL
)
3963 emit_group_load (reg
, x
, -1); /* ??? size? */
3965 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
3968 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3969 anti_adjust_stack (GEN_INT (extra
));
3971 if (alignment_pad
&& args_addr
== 0)
3972 anti_adjust_stack (alignment_pad
);
3975 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3983 /* Only registers can be subtargets. */
3984 || GET_CODE (x
) != REG
3985 /* If the register is readonly, it can't be set more than once. */
3986 || RTX_UNCHANGING_P (x
)
3987 /* Don't use hard regs to avoid extending their life. */
3988 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3989 /* Avoid subtargets inside loops,
3990 since they hide some invariant expressions. */
3991 || preserve_subexpressions_p ())
3995 /* Expand an assignment that stores the value of FROM into TO.
3996 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3997 (This may contain a QUEUED rtx;
3998 if the value is constant, this rtx is a constant.)
3999 Otherwise, the returned value is NULL_RTX.
4001 SUGGEST_REG is no longer actually used.
4002 It used to mean, copy the value through a register
4003 and return that register, if that is possible.
4004 We now use WANT_VALUE to decide whether to do this. */
4007 expand_assignment (to
, from
, want_value
, suggest_reg
)
4010 int suggest_reg ATTRIBUTE_UNUSED
;
4015 /* Don't crash if the lhs of the assignment was erroneous. */
4017 if (TREE_CODE (to
) == ERROR_MARK
)
4019 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
4020 return want_value
? result
: NULL_RTX
;
4023 /* Assignment of a structure component needs special treatment
4024 if the structure component's rtx is not simply a MEM.
4025 Assignment of an array element at a constant index, and assignment of
4026 an array element in an unaligned packed structure field, has the same
4029 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
4030 || TREE_CODE (to
) == ARRAY_REF
|| TREE_CODE (to
) == ARRAY_RANGE_REF
)
4032 enum machine_mode mode1
;
4033 HOST_WIDE_INT bitsize
, bitpos
;
4041 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
4042 &unsignedp
, &volatilep
);
4044 /* If we are going to use store_bit_field and extract_bit_field,
4045 make sure to_rtx will be safe for multiple use. */
4047 if (mode1
== VOIDmode
&& want_value
)
4048 tem
= stabilize_reference (tem
);
4050 orig_to_rtx
= to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, 0);
4054 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
4056 if (GET_CODE (to_rtx
) != MEM
)
4059 #ifdef POINTERS_EXTEND_UNSIGNED
4060 if (GET_MODE (offset_rtx
) != Pmode
)
4061 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
4063 if (GET_MODE (offset_rtx
) != ptr_mode
)
4064 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4067 /* A constant address in TO_RTX can have VOIDmode, we must not try
4068 to call force_reg for that case. Avoid that case. */
4069 if (GET_CODE (to_rtx
) == MEM
4070 && GET_MODE (to_rtx
) == BLKmode
4071 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
4073 && (bitpos
% bitsize
) == 0
4074 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
4075 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
4077 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
4081 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4082 highest_pow2_factor_for_type (TREE_TYPE (to
),
4086 if (GET_CODE (to_rtx
) == MEM
)
4088 /* If the field is at offset zero, we could have been given the
4089 DECL_RTX of the parent struct. Don't munge it. */
4090 to_rtx
= shallow_copy_rtx (to_rtx
);
4092 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
4095 /* Deal with volatile and readonly fields. The former is only done
4096 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4097 if (volatilep
&& GET_CODE (to_rtx
) == MEM
)
4099 if (to_rtx
== orig_to_rtx
)
4100 to_rtx
= copy_rtx (to_rtx
);
4101 MEM_VOLATILE_P (to_rtx
) = 1;
4104 if (TREE_CODE (to
) == COMPONENT_REF
4105 && TREE_READONLY (TREE_OPERAND (to
, 1)))
4107 if (to_rtx
== orig_to_rtx
)
4108 to_rtx
= copy_rtx (to_rtx
);
4109 RTX_UNCHANGING_P (to_rtx
) = 1;
4112 if (GET_CODE (to_rtx
) == MEM
&& ! can_address_p (to
))
4114 if (to_rtx
== orig_to_rtx
)
4115 to_rtx
= copy_rtx (to_rtx
);
4116 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4119 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
4121 /* Spurious cast for HPUX compiler. */
4122 ? ((enum machine_mode
)
4123 TYPE_MODE (TREE_TYPE (to
)))
4125 unsignedp
, TREE_TYPE (tem
), get_alias_set (to
));
4127 preserve_temp_slots (result
);
4131 /* If the value is meaningful, convert RESULT to the proper mode.
4132 Otherwise, return nothing. */
4133 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
4134 TYPE_MODE (TREE_TYPE (from
)),
4136 TREE_UNSIGNED (TREE_TYPE (to
)))
4140 /* If the rhs is a function call and its value is not an aggregate,
4141 call the function before we start to compute the lhs.
4142 This is needed for correct code for cases such as
4143 val = setjmp (buf) on machines where reference to val
4144 requires loading up part of an address in a separate insn.
4146 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4147 since it might be a promoted variable where the zero- or sign- extension
4148 needs to be done. Handling this in the normal way is safe because no
4149 computation is done before the call. */
4150 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
)
4151 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
4152 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
4153 && GET_CODE (DECL_RTL (to
)) == REG
))
4158 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
4160 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4162 /* Handle calls that return values in multiple non-contiguous locations.
4163 The Irix 6 ABI has examples of this. */
4164 if (GET_CODE (to_rtx
) == PARALLEL
)
4165 emit_group_load (to_rtx
, value
, int_size_in_bytes (TREE_TYPE (from
)));
4166 else if (GET_MODE (to_rtx
) == BLKmode
)
4167 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
4170 #ifdef POINTERS_EXTEND_UNSIGNED
4171 if (POINTER_TYPE_P (TREE_TYPE (to
))
4172 && GET_MODE (to_rtx
) != GET_MODE (value
))
4173 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
4175 emit_move_insn (to_rtx
, value
);
4177 preserve_temp_slots (to_rtx
);
4180 return want_value
? to_rtx
: NULL_RTX
;
4183 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4184 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4187 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4189 /* Don't move directly into a return register. */
4190 if (TREE_CODE (to
) == RESULT_DECL
4191 && (GET_CODE (to_rtx
) == REG
|| GET_CODE (to_rtx
) == PARALLEL
))
4196 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
4198 if (GET_CODE (to_rtx
) == PARALLEL
)
4199 emit_group_load (to_rtx
, temp
, int_size_in_bytes (TREE_TYPE (from
)));
4201 emit_move_insn (to_rtx
, temp
);
4203 preserve_temp_slots (to_rtx
);
4206 return want_value
? to_rtx
: NULL_RTX
;
4209 /* In case we are returning the contents of an object which overlaps
4210 the place the value is being stored, use a safe function when copying
4211 a value through a pointer into a structure value return block. */
4212 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
4213 && current_function_returns_struct
4214 && !current_function_returns_pcc_struct
)
4219 size
= expr_size (from
);
4220 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
4222 if (TARGET_MEM_FUNCTIONS
)
4223 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
4224 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
4225 XEXP (from_rtx
, 0), Pmode
,
4226 convert_to_mode (TYPE_MODE (sizetype
),
4227 size
, TREE_UNSIGNED (sizetype
)),
4228 TYPE_MODE (sizetype
));
4230 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
4231 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
4232 XEXP (to_rtx
, 0), Pmode
,
4233 convert_to_mode (TYPE_MODE (integer_type_node
),
4235 TREE_UNSIGNED (integer_type_node
)),
4236 TYPE_MODE (integer_type_node
));
4238 preserve_temp_slots (to_rtx
);
4241 return want_value
? to_rtx
: NULL_RTX
;
4244 /* Compute FROM and store the value in the rtx we got. */
4247 result
= store_expr (from
, to_rtx
, want_value
);
4248 preserve_temp_slots (result
);
4251 return want_value
? result
: NULL_RTX
;
4254 /* Generate code for computing expression EXP,
4255 and storing the value into TARGET.
4256 TARGET may contain a QUEUED rtx.
4258 If WANT_VALUE is nonzero, return a copy of the value
4259 not in TARGET, so that we can be sure to use the proper
4260 value in a containing expression even if TARGET has something
4261 else stored in it. If possible, we copy the value through a pseudo
4262 and return that pseudo. Or, if the value is constant, we try to
4263 return the constant. In some cases, we return a pseudo
4264 copied *from* TARGET.
4266 If the mode is BLKmode then we may return TARGET itself.
4267 It turns out that in BLKmode it doesn't cause a problem.
4268 because C has no operators that could combine two different
4269 assignments into the same BLKmode object with different values
4270 with no sequence point. Will other languages need this to
4273 If WANT_VALUE is 0, we return NULL, to make sure
4274 to catch quickly any cases where the caller uses the value
4275 and fails to set WANT_VALUE. */
4278 store_expr (exp
, target
, want_value
)
4284 int dont_return_target
= 0;
4285 int dont_store_target
= 0;
4287 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4289 /* Perform first part of compound expression, then assign from second
4291 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
4293 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
4295 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4297 /* For conditional expression, get safe form of the target. Then
4298 test the condition, doing the appropriate assignment on either
4299 side. This avoids the creation of unnecessary temporaries.
4300 For non-BLKmode, it is more efficient not to do this. */
4302 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4305 target
= protect_from_queue (target
, 1);
4307 do_pending_stack_adjust ();
4309 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
4310 start_cleanup_deferral ();
4311 store_expr (TREE_OPERAND (exp
, 1), target
, 0);
4312 end_cleanup_deferral ();
4314 emit_jump_insn (gen_jump (lab2
));
4317 start_cleanup_deferral ();
4318 store_expr (TREE_OPERAND (exp
, 2), target
, 0);
4319 end_cleanup_deferral ();
4324 return want_value
? target
: NULL_RTX
;
4326 else if (queued_subexp_p (target
))
4327 /* If target contains a postincrement, let's not risk
4328 using it as the place to generate the rhs. */
4330 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
4332 /* Expand EXP into a new pseudo. */
4333 temp
= gen_reg_rtx (GET_MODE (target
));
4334 temp
= expand_expr (exp
, temp
, GET_MODE (target
), 0);
4337 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
), 0);
4339 /* If target is volatile, ANSI requires accessing the value
4340 *from* the target, if it is accessed. So make that happen.
4341 In no case return the target itself. */
4342 if (! MEM_VOLATILE_P (target
) && want_value
)
4343 dont_return_target
= 1;
4345 else if (want_value
&& GET_CODE (target
) == MEM
&& ! MEM_VOLATILE_P (target
)
4346 && GET_MODE (target
) != BLKmode
)
4347 /* If target is in memory and caller wants value in a register instead,
4348 arrange that. Pass TARGET as target for expand_expr so that,
4349 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4350 We know expand_expr will not use the target in that case.
4351 Don't do this if TARGET is volatile because we are supposed
4352 to write it and then read it. */
4354 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
4355 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
4357 /* If TEMP is already in the desired TARGET, only copy it from
4358 memory and don't store it there again. */
4360 || (rtx_equal_p (temp
, target
)
4361 && ! side_effects_p (temp
) && ! side_effects_p (target
)))
4362 dont_store_target
= 1;
4363 temp
= copy_to_reg (temp
);
4365 dont_return_target
= 1;
4367 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4368 /* If this is a scalar in a register that is stored in a wider mode
4369 than the declared mode, compute the result into its declared mode
4370 and then convert to the wider mode. Our value is the computed
4373 rtx inner_target
= 0;
4375 /* If we don't want a value, we can do the conversion inside EXP,
4376 which will often result in some optimizations. Do the conversion
4377 in two steps: first change the signedness, if needed, then
4378 the extend. But don't do this if the type of EXP is a subtype
4379 of something else since then the conversion might involve
4380 more than just converting modes. */
4381 if (! want_value
&& INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4382 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
4384 if (TREE_UNSIGNED (TREE_TYPE (exp
))
4385 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4387 ((*lang_hooks
.types
.signed_or_unsigned_type
)
4388 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)), exp
);
4390 exp
= convert ((*lang_hooks
.types
.type_for_mode
)
4391 (GET_MODE (SUBREG_REG (target
)),
4392 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4395 inner_target
= SUBREG_REG (target
);
4398 temp
= expand_expr (exp
, inner_target
, VOIDmode
, 0);
4400 /* If TEMP is a volatile MEM and we want a result value, make
4401 the access now so it gets done only once. Likewise if
4402 it contains TARGET. */
4403 if (GET_CODE (temp
) == MEM
&& want_value
4404 && (MEM_VOLATILE_P (temp
)
4405 || reg_mentioned_p (SUBREG_REG (target
), XEXP (temp
, 0))))
4406 temp
= copy_to_reg (temp
);
4408 /* If TEMP is a VOIDmode constant, use convert_modes to make
4409 sure that we properly convert it. */
4410 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4412 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4413 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4414 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4415 GET_MODE (target
), temp
,
4416 SUBREG_PROMOTED_UNSIGNED_P (target
));
4419 convert_move (SUBREG_REG (target
), temp
,
4420 SUBREG_PROMOTED_UNSIGNED_P (target
));
4422 /* If we promoted a constant, change the mode back down to match
4423 target. Otherwise, the caller might get confused by a result whose
4424 mode is larger than expected. */
4426 if (want_value
&& GET_MODE (temp
) != GET_MODE (target
))
4428 if (GET_MODE (temp
) != VOIDmode
)
4430 temp
= gen_lowpart_SUBREG (GET_MODE (target
), temp
);
4431 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4432 SUBREG_PROMOTED_UNSIGNED_SET (temp
,
4433 SUBREG_PROMOTED_UNSIGNED_P (target
));
4436 temp
= convert_modes (GET_MODE (target
),
4437 GET_MODE (SUBREG_REG (target
)),
4438 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4441 return want_value
? temp
: NULL_RTX
;
4445 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
4446 /* Return TARGET if it's a specified hardware register.
4447 If TARGET is a volatile mem ref, either return TARGET
4448 or return a reg copied *from* TARGET; ANSI requires this.
4450 Otherwise, if TEMP is not TARGET, return TEMP
4451 if it is constant (for efficiency),
4452 or if we really want the correct value. */
4453 if (!(target
&& GET_CODE (target
) == REG
4454 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4455 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
4456 && ! rtx_equal_p (temp
, target
)
4457 && (CONSTANT_P (temp
) || want_value
))
4458 dont_return_target
= 1;
4461 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4462 the same as that of TARGET, adjust the constant. This is needed, for
4463 example, in case it is a CONST_DOUBLE and we want only a word-sized
4465 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4466 && TREE_CODE (exp
) != ERROR_MARK
4467 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4468 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4469 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
4471 /* If value was not generated in the target, store it there.
4472 Convert the value to TARGET's type first if necessary.
4473 If TEMP and TARGET compare equal according to rtx_equal_p, but
4474 one or both of them are volatile memory refs, we have to distinguish
4476 - expand_expr has used TARGET. In this case, we must not generate
4477 another copy. This can be detected by TARGET being equal according
4479 - expand_expr has not used TARGET - that means that the source just
4480 happens to have the same RTX form. Since temp will have been created
4481 by expand_expr, it will compare unequal according to == .
4482 We must generate a copy in this case, to reach the correct number
4483 of volatile memory references. */
4485 if ((! rtx_equal_p (temp
, target
)
4486 || (temp
!= target
&& (side_effects_p (temp
)
4487 || side_effects_p (target
))))
4488 && TREE_CODE (exp
) != ERROR_MARK
4489 && ! dont_store_target
4490 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4491 but TARGET is not valid memory reference, TEMP will differ
4492 from TARGET although it is really the same location. */
4493 && (TREE_CODE_CLASS (TREE_CODE (exp
)) != 'd'
4494 || target
!= DECL_RTL_IF_SET (exp
))
4495 /* If there's nothing to copy, don't bother. Don't call expr_size
4496 unless necessary, because some front-ends (C++) expr_size-hook
4497 aborts on objects that are not supposed to be bit-copied or
4499 && expr_size (exp
) != const0_rtx
)
4501 target
= protect_from_queue (target
, 1);
4502 if (GET_MODE (temp
) != GET_MODE (target
)
4503 && GET_MODE (temp
) != VOIDmode
)
4505 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4506 if (dont_return_target
)
4508 /* In this case, we will return TEMP,
4509 so make sure it has the proper mode.
4510 But don't forget to store the value into TARGET. */
4511 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4512 emit_move_insn (target
, temp
);
4515 convert_move (target
, temp
, unsignedp
);
4518 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4520 /* Handle copying a string constant into an array. The string
4521 constant may be shorter than the array. So copy just the string's
4522 actual length, and clear the rest. First get the size of the data
4523 type of the string, which is actually the size of the target. */
4524 rtx size
= expr_size (exp
);
4526 if (GET_CODE (size
) == CONST_INT
4527 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4528 emit_block_move (target
, temp
, size
, BLOCK_OP_NORMAL
);
4531 /* Compute the size of the data to copy from the string. */
4533 = size_binop (MIN_EXPR
,
4534 make_tree (sizetype
, size
),
4535 size_int (TREE_STRING_LENGTH (exp
)));
4536 rtx copy_size_rtx
= expand_expr (copy_size
, NULL_RTX
,
4540 /* Copy that much. */
4541 copy_size_rtx
= convert_to_mode (ptr_mode
, copy_size_rtx
, 0);
4542 emit_block_move (target
, temp
, copy_size_rtx
, BLOCK_OP_NORMAL
);
4544 /* Figure out how much is left in TARGET that we have to clear.
4545 Do all calculations in ptr_mode. */
4546 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4548 size
= plus_constant (size
, -INTVAL (copy_size_rtx
));
4549 target
= adjust_address (target
, BLKmode
,
4550 INTVAL (copy_size_rtx
));
4554 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
4555 copy_size_rtx
, NULL_RTX
, 0,
4558 #ifdef POINTERS_EXTEND_UNSIGNED
4559 if (GET_MODE (copy_size_rtx
) != Pmode
)
4560 copy_size_rtx
= convert_memory_address (Pmode
,
4564 target
= offset_address (target
, copy_size_rtx
,
4565 highest_pow2_factor (copy_size
));
4566 label
= gen_label_rtx ();
4567 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4568 GET_MODE (size
), 0, label
);
4571 if (size
!= const0_rtx
)
4572 clear_storage (target
, size
);
4578 /* Handle calls that return values in multiple non-contiguous locations.
4579 The Irix 6 ABI has examples of this. */
4580 else if (GET_CODE (target
) == PARALLEL
)
4581 emit_group_load (target
, temp
, int_size_in_bytes (TREE_TYPE (exp
)));
4582 else if (GET_MODE (temp
) == BLKmode
)
4583 emit_block_move (target
, temp
, expr_size (exp
), BLOCK_OP_NORMAL
);
4585 emit_move_insn (target
, temp
);
4588 /* If we don't want a value, return NULL_RTX. */
4592 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4593 ??? The latter test doesn't seem to make sense. */
4594 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
4597 /* Return TARGET itself if it is a hard register. */
4598 else if (want_value
&& GET_MODE (target
) != BLKmode
4599 && ! (GET_CODE (target
) == REG
4600 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4601 return copy_to_reg (target
);
4607 /* Return 1 if EXP just contains zeros. */
4615 switch (TREE_CODE (exp
))
4619 case NON_LVALUE_EXPR
:
4620 case VIEW_CONVERT_EXPR
:
4621 return is_zeros_p (TREE_OPERAND (exp
, 0));
4624 return integer_zerop (exp
);
4628 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
4631 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp
), dconst0
);
4634 for (elt
= TREE_VECTOR_CST_ELTS (exp
); elt
;
4635 elt
= TREE_CHAIN (elt
))
4636 if (!is_zeros_p (TREE_VALUE (elt
)))
4642 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4643 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
4644 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4645 if (! is_zeros_p (TREE_VALUE (elt
)))
4655 /* Return 1 if EXP contains mostly (3/4) zeros. */
4658 mostly_zeros_p (exp
)
4661 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4663 int elts
= 0, zeros
= 0;
4664 tree elt
= CONSTRUCTOR_ELTS (exp
);
4665 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4667 /* If there are no ranges of true bits, it is all zero. */
4668 return elt
== NULL_TREE
;
4670 for (; elt
; elt
= TREE_CHAIN (elt
))
4672 /* We do not handle the case where the index is a RANGE_EXPR,
4673 so the statistic will be somewhat inaccurate.
4674 We do make a more accurate count in store_constructor itself,
4675 so since this function is only used for nested array elements,
4676 this should be close enough. */
4677 if (mostly_zeros_p (TREE_VALUE (elt
)))
4682 return 4 * zeros
>= 3 * elts
;
4685 return is_zeros_p (exp
);
4688 /* Helper function for store_constructor.
4689 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4690 TYPE is the type of the CONSTRUCTOR, not the element type.
4691 CLEARED is as for store_constructor.
4692 ALIAS_SET is the alias set to use for any stores.
4694 This provides a recursive shortcut back to store_constructor when it isn't
4695 necessary to go through store_field. This is so that we can pass through
4696 the cleared field to let store_constructor know that we may not have to
4697 clear a substructure if the outer structure has already been cleared. */
4700 store_constructor_field (target
, bitsize
, bitpos
, mode
, exp
, type
, cleared
,
4703 unsigned HOST_WIDE_INT bitsize
;
4704 HOST_WIDE_INT bitpos
;
4705 enum machine_mode mode
;
4710 if (TREE_CODE (exp
) == CONSTRUCTOR
4711 && bitpos
% BITS_PER_UNIT
== 0
4712 /* If we have a nonzero bitpos for a register target, then we just
4713 let store_field do the bitfield handling. This is unlikely to
4714 generate unnecessary clear instructions anyways. */
4715 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
4717 if (GET_CODE (target
) == MEM
)
4719 = adjust_address (target
,
4720 GET_MODE (target
) == BLKmode
4722 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4723 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
4726 /* Update the alias set, if required. */
4727 if (GET_CODE (target
) == MEM
&& ! MEM_KEEP_ALIAS_SET_P (target
)
4728 && MEM_ALIAS_SET (target
) != 0)
4730 target
= copy_rtx (target
);
4731 set_mem_alias_set (target
, alias_set
);
4734 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
4737 store_field (target
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
4741 /* Store the value of constructor EXP into the rtx TARGET.
4742 TARGET is either a REG or a MEM; we know it cannot conflict, since
4743 safe_from_p has been called.
4744 CLEARED is true if TARGET is known to have been zero'd.
4745 SIZE is the number of bytes of TARGET we are allowed to modify: this
4746 may not be the same as the size of EXP if we are assigning to a field
4747 which has been packed to exclude padding bits. */
4750 store_constructor (exp
, target
, cleared
, size
)
4756 tree type
= TREE_TYPE (exp
);
4757 #ifdef WORD_REGISTER_OPERATIONS
4758 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4761 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
4762 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4766 /* We either clear the aggregate or indicate the value is dead. */
4767 if ((TREE_CODE (type
) == UNION_TYPE
4768 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4770 && ! CONSTRUCTOR_ELTS (exp
))
4771 /* If the constructor is empty, clear the union. */
4773 clear_storage (target
, expr_size (exp
));
4777 /* If we are building a static constructor into a register,
4778 set the initial value as zero so we can fold the value into
4779 a constant. But if more than one register is involved,
4780 this probably loses. */
4781 else if (! cleared
&& GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
4782 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4784 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4788 /* If the constructor has fewer fields than the structure
4789 or if we are initializing the structure to mostly zeros,
4790 clear the whole structure first. Don't do this if TARGET is a
4791 register whose mode size isn't equal to SIZE since clear_storage
4792 can't handle this case. */
4793 else if (! cleared
&& size
> 0
4794 && ((list_length (CONSTRUCTOR_ELTS (exp
))
4795 != fields_length (type
))
4796 || mostly_zeros_p (exp
))
4797 && (GET_CODE (target
) != REG
4798 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
4801 clear_storage (target
, GEN_INT (size
));
4806 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4808 /* Store each element of the constructor into
4809 the corresponding field of TARGET. */
4811 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4813 tree field
= TREE_PURPOSE (elt
);
4814 tree value
= TREE_VALUE (elt
);
4815 enum machine_mode mode
;
4816 HOST_WIDE_INT bitsize
;
4817 HOST_WIDE_INT bitpos
= 0;
4819 rtx to_rtx
= target
;
4821 /* Just ignore missing fields.
4822 We cleared the whole structure, above,
4823 if any fields are missing. */
4827 if (cleared
&& is_zeros_p (value
))
4830 if (host_integerp (DECL_SIZE (field
), 1))
4831 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4835 mode
= DECL_MODE (field
);
4836 if (DECL_BIT_FIELD (field
))
4839 offset
= DECL_FIELD_OFFSET (field
);
4840 if (host_integerp (offset
, 0)
4841 && host_integerp (bit_position (field
), 0))
4843 bitpos
= int_bit_position (field
);
4847 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
4853 if (contains_placeholder_p (offset
))
4854 offset
= build (WITH_RECORD_EXPR
, sizetype
,
4855 offset
, make_tree (TREE_TYPE (exp
), target
));
4857 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4858 if (GET_CODE (to_rtx
) != MEM
)
4861 #ifdef POINTERS_EXTEND_UNSIGNED
4862 if (GET_MODE (offset_rtx
) != Pmode
)
4863 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
4865 if (GET_MODE (offset_rtx
) != ptr_mode
)
4866 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4869 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4870 highest_pow2_factor (offset
));
4873 if (TREE_READONLY (field
))
4875 if (GET_CODE (to_rtx
) == MEM
)
4876 to_rtx
= copy_rtx (to_rtx
);
4878 RTX_UNCHANGING_P (to_rtx
) = 1;
4881 #ifdef WORD_REGISTER_OPERATIONS
4882 /* If this initializes a field that is smaller than a word, at the
4883 start of a word, try to widen it to a full word.
4884 This special case allows us to output C++ member function
4885 initializations in a form that the optimizers can understand. */
4886 if (GET_CODE (target
) == REG
4887 && bitsize
< BITS_PER_WORD
4888 && bitpos
% BITS_PER_WORD
== 0
4889 && GET_MODE_CLASS (mode
) == MODE_INT
4890 && TREE_CODE (value
) == INTEGER_CST
4892 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
4894 tree type
= TREE_TYPE (value
);
4896 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4898 type
= (*lang_hooks
.types
.type_for_size
)
4899 (BITS_PER_WORD
, TREE_UNSIGNED (type
));
4900 value
= convert (type
, value
);
4903 if (BYTES_BIG_ENDIAN
)
4905 = fold (build (LSHIFT_EXPR
, type
, value
,
4906 build_int_2 (BITS_PER_WORD
- bitsize
, 0)));
4907 bitsize
= BITS_PER_WORD
;
4912 if (GET_CODE (to_rtx
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (to_rtx
)
4913 && DECL_NONADDRESSABLE_P (field
))
4915 to_rtx
= copy_rtx (to_rtx
);
4916 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4919 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
4920 value
, type
, cleared
,
4921 get_alias_set (TREE_TYPE (field
)));
4924 else if (TREE_CODE (type
) == ARRAY_TYPE
4925 || TREE_CODE (type
) == VECTOR_TYPE
)
4930 tree domain
= TYPE_DOMAIN (type
);
4931 tree elttype
= TREE_TYPE (type
);
4933 HOST_WIDE_INT minelt
= 0;
4934 HOST_WIDE_INT maxelt
= 0;
4936 /* Vectors are like arrays, but the domain is stored via an array
4938 if (TREE_CODE (type
) == VECTOR_TYPE
)
4940 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4941 the same field as TYPE_DOMAIN, we are not guaranteed that
4943 domain
= TYPE_DEBUG_REPRESENTATION_TYPE (type
);
4944 domain
= TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain
)));
4947 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
4948 && TYPE_MAX_VALUE (domain
)
4949 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
4950 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
4952 /* If we have constant bounds for the range of the type, get them. */
4955 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
4956 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
4959 /* If the constructor has fewer elements than the array,
4960 clear the whole array first. Similarly if this is
4961 static constructor of a non-BLKmode object. */
4962 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
4966 HOST_WIDE_INT count
= 0, zero_count
= 0;
4967 need_to_clear
= ! const_bounds_p
;
4969 /* This loop is a more accurate version of the loop in
4970 mostly_zeros_p (it handles RANGE_EXPR in an index).
4971 It is also needed to check for missing elements. */
4972 for (elt
= CONSTRUCTOR_ELTS (exp
);
4973 elt
!= NULL_TREE
&& ! need_to_clear
;
4974 elt
= TREE_CHAIN (elt
))
4976 tree index
= TREE_PURPOSE (elt
);
4977 HOST_WIDE_INT this_node_count
;
4979 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4981 tree lo_index
= TREE_OPERAND (index
, 0);
4982 tree hi_index
= TREE_OPERAND (index
, 1);
4984 if (! host_integerp (lo_index
, 1)
4985 || ! host_integerp (hi_index
, 1))
4991 this_node_count
= (tree_low_cst (hi_index
, 1)
4992 - tree_low_cst (lo_index
, 1) + 1);
4995 this_node_count
= 1;
4997 count
+= this_node_count
;
4998 if (mostly_zeros_p (TREE_VALUE (elt
)))
4999 zero_count
+= this_node_count
;
5002 /* Clear the entire array first if there are any missing elements,
5003 or if the incidence of zero elements is >= 75%. */
5005 && (count
< maxelt
- minelt
+ 1 || 4 * zero_count
>= 3 * count
))
5009 if (need_to_clear
&& size
> 0)
5014 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5016 clear_storage (target
, GEN_INT (size
));
5020 else if (REG_P (target
))
5021 /* Inform later passes that the old value is dead. */
5022 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
5024 /* Store each element of the constructor into
5025 the corresponding element of TARGET, determined
5026 by counting the elements. */
5027 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
5029 elt
= TREE_CHAIN (elt
), i
++)
5031 enum machine_mode mode
;
5032 HOST_WIDE_INT bitsize
;
5033 HOST_WIDE_INT bitpos
;
5035 tree value
= TREE_VALUE (elt
);
5036 tree index
= TREE_PURPOSE (elt
);
5037 rtx xtarget
= target
;
5039 if (cleared
&& is_zeros_p (value
))
5042 unsignedp
= TREE_UNSIGNED (elttype
);
5043 mode
= TYPE_MODE (elttype
);
5044 if (mode
== BLKmode
)
5045 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
5046 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
5049 bitsize
= GET_MODE_BITSIZE (mode
);
5051 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
5053 tree lo_index
= TREE_OPERAND (index
, 0);
5054 tree hi_index
= TREE_OPERAND (index
, 1);
5055 rtx index_r
, pos_rtx
, loop_end
;
5056 struct nesting
*loop
;
5057 HOST_WIDE_INT lo
, hi
, count
;
5060 /* If the range is constant and "small", unroll the loop. */
5062 && host_integerp (lo_index
, 0)
5063 && host_integerp (hi_index
, 0)
5064 && (lo
= tree_low_cst (lo_index
, 0),
5065 hi
= tree_low_cst (hi_index
, 0),
5066 count
= hi
- lo
+ 1,
5067 (GET_CODE (target
) != MEM
5069 || (host_integerp (TYPE_SIZE (elttype
), 1)
5070 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
5073 lo
-= minelt
; hi
-= minelt
;
5074 for (; lo
<= hi
; lo
++)
5076 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
5078 if (GET_CODE (target
) == MEM
5079 && !MEM_KEEP_ALIAS_SET_P (target
)
5080 && TREE_CODE (type
) == ARRAY_TYPE
5081 && TYPE_NONALIASED_COMPONENT (type
))
5083 target
= copy_rtx (target
);
5084 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5087 store_constructor_field
5088 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
5089 get_alias_set (elttype
));
5094 expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
5095 loop_end
= gen_label_rtx ();
5097 unsignedp
= TREE_UNSIGNED (domain
);
5099 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
5102 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
5104 SET_DECL_RTL (index
, index_r
);
5105 if (TREE_CODE (value
) == SAVE_EXPR
5106 && SAVE_EXPR_RTL (value
) == 0)
5108 /* Make sure value gets expanded once before the
5110 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
5113 store_expr (lo_index
, index_r
, 0);
5114 loop
= expand_start_loop (0);
5116 /* Assign value to element index. */
5118 = convert (ssizetype
,
5119 fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5120 index
, TYPE_MIN_VALUE (domain
))));
5121 position
= size_binop (MULT_EXPR
, position
,
5123 TYPE_SIZE_UNIT (elttype
)));
5125 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
5126 xtarget
= offset_address (target
, pos_rtx
,
5127 highest_pow2_factor (position
));
5128 xtarget
= adjust_address (xtarget
, mode
, 0);
5129 if (TREE_CODE (value
) == CONSTRUCTOR
)
5130 store_constructor (value
, xtarget
, cleared
,
5131 bitsize
/ BITS_PER_UNIT
);
5133 store_expr (value
, xtarget
, 0);
5135 expand_exit_loop_if_false (loop
,
5136 build (LT_EXPR
, integer_type_node
,
5139 expand_increment (build (PREINCREMENT_EXPR
,
5141 index
, integer_one_node
), 0, 0);
5143 emit_label (loop_end
);
5146 else if ((index
!= 0 && ! host_integerp (index
, 0))
5147 || ! host_integerp (TYPE_SIZE (elttype
), 1))
5152 index
= ssize_int (1);
5155 index
= convert (ssizetype
,
5156 fold (build (MINUS_EXPR
, index
,
5157 TYPE_MIN_VALUE (domain
))));
5159 position
= size_binop (MULT_EXPR
, index
,
5161 TYPE_SIZE_UNIT (elttype
)));
5162 xtarget
= offset_address (target
,
5163 expand_expr (position
, 0, VOIDmode
, 0),
5164 highest_pow2_factor (position
));
5165 xtarget
= adjust_address (xtarget
, mode
, 0);
5166 store_expr (value
, xtarget
, 0);
5171 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
5172 * tree_low_cst (TYPE_SIZE (elttype
), 1));
5174 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
5176 if (GET_CODE (target
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (target
)
5177 && TREE_CODE (type
) == ARRAY_TYPE
5178 && TYPE_NONALIASED_COMPONENT (type
))
5180 target
= copy_rtx (target
);
5181 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5184 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
5185 type
, cleared
, get_alias_set (elttype
));
5191 /* Set constructor assignments. */
5192 else if (TREE_CODE (type
) == SET_TYPE
)
5194 tree elt
= CONSTRUCTOR_ELTS (exp
);
5195 unsigned HOST_WIDE_INT nbytes
= int_size_in_bytes (type
), nbits
;
5196 tree domain
= TYPE_DOMAIN (type
);
5197 tree domain_min
, domain_max
, bitlength
;
5199 /* The default implementation strategy is to extract the constant
5200 parts of the constructor, use that to initialize the target,
5201 and then "or" in whatever non-constant ranges we need in addition.
5203 If a large set is all zero or all ones, it is
5204 probably better to set it using memset (if available) or bzero.
5205 Also, if a large set has just a single range, it may also be
5206 better to first clear all the first clear the set (using
5207 bzero/memset), and set the bits we want. */
5209 /* Check for all zeros. */
5210 if (elt
== NULL_TREE
&& size
> 0)
5213 clear_storage (target
, GEN_INT (size
));
5217 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
5218 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
5219 bitlength
= size_binop (PLUS_EXPR
,
5220 size_diffop (domain_max
, domain_min
),
5223 nbits
= tree_low_cst (bitlength
, 1);
5225 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5226 are "complicated" (more than one range), initialize (the
5227 constant parts) by copying from a constant. */
5228 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
5229 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
5231 unsigned int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
5232 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
5233 char *bit_buffer
= (char *) alloca (nbits
);
5234 HOST_WIDE_INT word
= 0;
5235 unsigned int bit_pos
= 0;
5236 unsigned int ibit
= 0;
5237 unsigned int offset
= 0; /* In bytes from beginning of set. */
5239 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
5242 if (bit_buffer
[ibit
])
5244 if (BYTES_BIG_ENDIAN
)
5245 word
|= (1 << (set_word_size
- 1 - bit_pos
));
5247 word
|= 1 << bit_pos
;
5251 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
5253 if (word
!= 0 || ! cleared
)
5255 rtx datum
= GEN_INT (word
);
5258 /* The assumption here is that it is safe to use
5259 XEXP if the set is multi-word, but not if
5260 it's single-word. */
5261 if (GET_CODE (target
) == MEM
)
5262 to_rtx
= adjust_address (target
, mode
, offset
);
5263 else if (offset
== 0)
5267 emit_move_insn (to_rtx
, datum
);
5274 offset
+= set_word_size
/ BITS_PER_UNIT
;
5279 /* Don't bother clearing storage if the set is all ones. */
5280 if (TREE_CHAIN (elt
) != NULL_TREE
5281 || (TREE_PURPOSE (elt
) == NULL_TREE
5283 : ( ! host_integerp (TREE_VALUE (elt
), 0)
5284 || ! host_integerp (TREE_PURPOSE (elt
), 0)
5285 || (tree_low_cst (TREE_VALUE (elt
), 0)
5286 - tree_low_cst (TREE_PURPOSE (elt
), 0) + 1
5287 != (HOST_WIDE_INT
) nbits
))))
5288 clear_storage (target
, expr_size (exp
));
5290 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
5292 /* Start of range of element or NULL. */
5293 tree startbit
= TREE_PURPOSE (elt
);
5294 /* End of range of element, or element value. */
5295 tree endbit
= TREE_VALUE (elt
);
5296 HOST_WIDE_INT startb
, endb
;
5297 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
5299 bitlength_rtx
= expand_expr (bitlength
,
5300 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
5302 /* Handle non-range tuple element like [ expr ]. */
5303 if (startbit
== NULL_TREE
)
5305 startbit
= save_expr (endbit
);
5309 startbit
= convert (sizetype
, startbit
);
5310 endbit
= convert (sizetype
, endbit
);
5311 if (! integer_zerop (domain_min
))
5313 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
5314 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
5316 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
5317 EXPAND_CONST_ADDRESS
);
5318 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
5319 EXPAND_CONST_ADDRESS
);
5325 ((build_qualified_type ((*lang_hooks
.types
.type_for_mode
)
5326 (GET_MODE (target
), 0),
5329 emit_move_insn (targetx
, target
);
5332 else if (GET_CODE (target
) == MEM
)
5337 /* Optimization: If startbit and endbit are constants divisible
5338 by BITS_PER_UNIT, call memset instead. */
5339 if (TARGET_MEM_FUNCTIONS
5340 && TREE_CODE (startbit
) == INTEGER_CST
5341 && TREE_CODE (endbit
) == INTEGER_CST
5342 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
5343 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
5345 emit_library_call (memset_libfunc
, LCT_NORMAL
,
5347 plus_constant (XEXP (targetx
, 0),
5348 startb
/ BITS_PER_UNIT
),
5350 constm1_rtx
, TYPE_MODE (integer_type_node
),
5351 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
5352 TYPE_MODE (sizetype
));
5355 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__setbits"),
5356 LCT_NORMAL
, VOIDmode
, 4, XEXP (targetx
, 0),
5357 Pmode
, bitlength_rtx
, TYPE_MODE (sizetype
),
5358 startbit_rtx
, TYPE_MODE (sizetype
),
5359 endbit_rtx
, TYPE_MODE (sizetype
));
5362 emit_move_insn (target
, targetx
);
5370 /* Store the value of EXP (an expression tree)
5371 into a subfield of TARGET which has mode MODE and occupies
5372 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5373 If MODE is VOIDmode, it means that we are storing into a bit-field.
5375 If VALUE_MODE is VOIDmode, return nothing in particular.
5376 UNSIGNEDP is not used in this case.
5378 Otherwise, return an rtx for the value stored. This rtx
5379 has mode VALUE_MODE if that is convenient to do.
5380 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5382 TYPE is the type of the underlying object,
5384 ALIAS_SET is the alias set for the destination. This value will
5385 (in general) be different from that for TARGET, since TARGET is a
5386 reference to the containing structure. */
5389 store_field (target
, bitsize
, bitpos
, mode
, exp
, value_mode
, unsignedp
, type
,
5392 HOST_WIDE_INT bitsize
;
5393 HOST_WIDE_INT bitpos
;
5394 enum machine_mode mode
;
5396 enum machine_mode value_mode
;
5401 HOST_WIDE_INT width_mask
= 0;
5403 if (TREE_CODE (exp
) == ERROR_MARK
)
5406 /* If we have nothing to store, do nothing unless the expression has
5409 return expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
5410 else if (bitsize
>=0 && bitsize
< HOST_BITS_PER_WIDE_INT
)
5411 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5413 /* If we are storing into an unaligned field of an aligned union that is
5414 in a register, we may have the mode of TARGET being an integer mode but
5415 MODE == BLKmode. In that case, get an aligned object whose size and
5416 alignment are the same as TARGET and store TARGET into it (we can avoid
5417 the store if the field being stored is the entire width of TARGET). Then
5418 call ourselves recursively to store the field into a BLKmode version of
5419 that object. Finally, load from the object into TARGET. This is not
5420 very efficient in general, but should only be slightly more expensive
5421 than the otherwise-required unaligned accesses. Perhaps this can be
5422 cleaned up later. */
5425 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
5429 (build_qualified_type (type
, TYPE_QUALS (type
) | TYPE_QUAL_CONST
),
5431 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
5433 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5434 emit_move_insn (object
, target
);
5436 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
5439 emit_move_insn (target
, object
);
5441 /* We want to return the BLKmode version of the data. */
5445 if (GET_CODE (target
) == CONCAT
)
5447 /* We're storing into a struct containing a single __complex. */
5451 return store_expr (exp
, target
, 0);
5454 /* If the structure is in a register or if the component
5455 is a bit field, we cannot use addressing to access it.
5456 Use bit-field techniques or SUBREG to store in it. */
5458 if (mode
== VOIDmode
5459 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5460 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5461 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5462 || GET_CODE (target
) == REG
5463 || GET_CODE (target
) == SUBREG
5464 /* If the field isn't aligned enough to store as an ordinary memref,
5465 store it as a bit field. */
5466 || (mode
!= BLKmode
&& SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
))
5467 && (MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
)
5468 || bitpos
% GET_MODE_ALIGNMENT (mode
)))
5469 /* If the RHS and field are a constant size and the size of the
5470 RHS isn't the same size as the bitfield, we must use bitfield
5473 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5474 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5476 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
5478 /* If BITSIZE is narrower than the size of the type of EXP
5479 we will be narrowing TEMP. Normally, what's wanted are the
5480 low-order bits. However, if EXP's type is a record and this is
5481 big-endian machine, we want the upper BITSIZE bits. */
5482 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5483 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
5484 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5485 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5486 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5490 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5492 if (mode
!= VOIDmode
&& mode
!= BLKmode
5493 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5494 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5496 /* If the modes of TARGET and TEMP are both BLKmode, both
5497 must be in memory and BITPOS must be aligned on a byte
5498 boundary. If so, we simply do a block copy. */
5499 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5501 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
5502 || bitpos
% BITS_PER_UNIT
!= 0)
5505 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5506 emit_block_move (target
, temp
,
5507 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5511 return value_mode
== VOIDmode
? const0_rtx
: target
;
5514 /* Store the value in the bitfield. */
5515 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
,
5516 int_size_in_bytes (type
));
5518 if (value_mode
!= VOIDmode
)
5520 /* The caller wants an rtx for the value.
5521 If possible, avoid refetching from the bitfield itself. */
5523 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
5526 enum machine_mode tmode
;
5528 tmode
= GET_MODE (temp
);
5529 if (tmode
== VOIDmode
)
5533 return expand_and (tmode
, temp
,
5534 gen_int_mode (width_mask
, tmode
),
5537 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
5538 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5539 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5542 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
5543 NULL_RTX
, value_mode
, VOIDmode
,
5544 int_size_in_bytes (type
));
5550 rtx addr
= XEXP (target
, 0);
5551 rtx to_rtx
= target
;
5553 /* If a value is wanted, it must be the lhs;
5554 so make the address stable for multiple use. */
5556 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
5557 && ! CONSTANT_ADDRESS_P (addr
)
5558 /* A frame-pointer reference is already stable. */
5559 && ! (GET_CODE (addr
) == PLUS
5560 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
5561 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
5562 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
5563 to_rtx
= replace_equiv_address (to_rtx
, copy_to_reg (addr
));
5565 /* Now build a reference to just the desired component. */
5567 to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5569 if (to_rtx
== target
)
5570 to_rtx
= copy_rtx (to_rtx
);
5572 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5573 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5574 set_mem_alias_set (to_rtx
, alias_set
);
5576 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
5580 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5581 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5582 codes and find the ultimate containing object, which we return.
5584 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5585 bit position, and *PUNSIGNEDP to the signedness of the field.
5586 If the position of the field is variable, we store a tree
5587 giving the variable offset (in units) in *POFFSET.
5588 This offset is in addition to the bit position.
5589 If the position is not variable, we store 0 in *POFFSET.
5591 If any of the extraction expressions is volatile,
5592 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5594 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5595 is a mode that can be used to access the field. In that case, *PBITSIZE
5598 If the field describes a variable-sized object, *PMODE is set to
5599 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5600 this case, but the address of the object can be found. */
5603 get_inner_reference (exp
, pbitsize
, pbitpos
, poffset
, pmode
,
5604 punsignedp
, pvolatilep
)
5606 HOST_WIDE_INT
*pbitsize
;
5607 HOST_WIDE_INT
*pbitpos
;
5609 enum machine_mode
*pmode
;
5614 enum machine_mode mode
= VOIDmode
;
5615 tree offset
= size_zero_node
;
5616 tree bit_offset
= bitsize_zero_node
;
5617 tree placeholder_ptr
= 0;
5620 /* First get the mode, signedness, and size. We do this from just the
5621 outermost expression. */
5622 if (TREE_CODE (exp
) == COMPONENT_REF
)
5624 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5625 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5626 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5628 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
5630 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5632 size_tree
= TREE_OPERAND (exp
, 1);
5633 *punsignedp
= TREE_UNSIGNED (exp
);
5637 mode
= TYPE_MODE (TREE_TYPE (exp
));
5638 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
5640 if (mode
== BLKmode
)
5641 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5643 *pbitsize
= GET_MODE_BITSIZE (mode
);
5648 if (! host_integerp (size_tree
, 1))
5649 mode
= BLKmode
, *pbitsize
= -1;
5651 *pbitsize
= tree_low_cst (size_tree
, 1);
5654 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5655 and find the ultimate containing object. */
5658 if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5659 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
, TREE_OPERAND (exp
, 2));
5660 else if (TREE_CODE (exp
) == COMPONENT_REF
)
5662 tree field
= TREE_OPERAND (exp
, 1);
5663 tree this_offset
= DECL_FIELD_OFFSET (field
);
5665 /* If this field hasn't been filled in yet, don't go
5666 past it. This should only happen when folding expressions
5667 made during type construction. */
5668 if (this_offset
== 0)
5670 else if (! TREE_CONSTANT (this_offset
)
5671 && contains_placeholder_p (this_offset
))
5672 this_offset
= build (WITH_RECORD_EXPR
, sizetype
, this_offset
, exp
);
5674 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5675 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5676 DECL_FIELD_BIT_OFFSET (field
));
5678 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5681 else if (TREE_CODE (exp
) == ARRAY_REF
5682 || TREE_CODE (exp
) == ARRAY_RANGE_REF
)
5684 tree index
= TREE_OPERAND (exp
, 1);
5685 tree array
= TREE_OPERAND (exp
, 0);
5686 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
5687 tree low_bound
= (domain
? TYPE_MIN_VALUE (domain
) : 0);
5688 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array
)));
5690 /* We assume all arrays have sizes that are a multiple of a byte.
5691 First subtract the lower bound, if any, in the type of the
5692 index, then convert to sizetype and multiply by the size of the
5694 if (low_bound
!= 0 && ! integer_zerop (low_bound
))
5695 index
= fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5698 /* If the index has a self-referential type, pass it to a
5699 WITH_RECORD_EXPR; if the component size is, pass our
5700 component to one. */
5701 if (! TREE_CONSTANT (index
)
5702 && contains_placeholder_p (index
))
5703 index
= build (WITH_RECORD_EXPR
, TREE_TYPE (index
), index
, exp
);
5704 if (! TREE_CONSTANT (unit_size
)
5705 && contains_placeholder_p (unit_size
))
5706 unit_size
= build (WITH_RECORD_EXPR
, sizetype
, unit_size
, array
);
5708 offset
= size_binop (PLUS_EXPR
, offset
,
5709 size_binop (MULT_EXPR
,
5710 convert (sizetype
, index
),
5714 else if (TREE_CODE (exp
) == PLACEHOLDER_EXPR
)
5716 tree
new = find_placeholder (exp
, &placeholder_ptr
);
5718 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5719 We might have been called from tree optimization where we
5720 haven't set up an object yet. */
5728 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
5729 && TREE_CODE (exp
) != VIEW_CONVERT_EXPR
5730 && ! ((TREE_CODE (exp
) == NOP_EXPR
5731 || TREE_CODE (exp
) == CONVERT_EXPR
)
5732 && (TYPE_MODE (TREE_TYPE (exp
))
5733 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
5736 /* If any reference in the chain is volatile, the effect is volatile. */
5737 if (TREE_THIS_VOLATILE (exp
))
5740 exp
= TREE_OPERAND (exp
, 0);
5743 /* If OFFSET is constant, see if we can return the whole thing as a
5744 constant bit position. Otherwise, split it up. */
5745 if (host_integerp (offset
, 0)
5746 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5748 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5749 && host_integerp (tem
, 0))
5750 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5752 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5758 /* Return 1 if T is an expression that get_inner_reference handles. */
5761 handled_component_p (t
)
5764 switch (TREE_CODE (t
))
5769 case ARRAY_RANGE_REF
:
5770 case NON_LVALUE_EXPR
:
5771 case VIEW_CONVERT_EXPR
:
5776 return (TYPE_MODE (TREE_TYPE (t
))
5777 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t
, 0))));
5784 /* Given an rtx VALUE that may contain additions and multiplications, return
5785 an equivalent value that just refers to a register, memory, or constant.
5786 This is done by generating instructions to perform the arithmetic and
5787 returning a pseudo-register containing the value.
5789 The returned value may be a REG, SUBREG, MEM or constant. */
5792 force_operand (value
, target
)
5796 /* Use subtarget as the target for operand 0 of a binary operation. */
5797 rtx subtarget
= get_subtarget (target
);
5798 enum rtx_code code
= GET_CODE (value
);
5800 /* Check for a PIC address load. */
5801 if ((code
== PLUS
|| code
== MINUS
)
5802 && XEXP (value
, 0) == pic_offset_table_rtx
5803 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5804 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5805 || GET_CODE (XEXP (value
, 1)) == CONST
))
5808 subtarget
= gen_reg_rtx (GET_MODE (value
));
5809 emit_move_insn (subtarget
, value
);
5813 if (code
== ZERO_EXTEND
|| code
== SIGN_EXTEND
)
5816 target
= gen_reg_rtx (GET_MODE (value
));
5817 convert_move (target
, force_operand (XEXP (value
, 0), NULL
),
5818 code
== ZERO_EXTEND
);
5822 if (GET_RTX_CLASS (code
) == '2' || GET_RTX_CLASS (code
) == 'c')
5824 op2
= XEXP (value
, 1);
5825 if (!CONSTANT_P (op2
) && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5827 if (code
== MINUS
&& GET_CODE (op2
) == CONST_INT
)
5830 op2
= negate_rtx (GET_MODE (value
), op2
);
5833 /* Check for an addition with OP2 a constant integer and our first
5834 operand a PLUS of a virtual register and something else. In that
5835 case, we want to emit the sum of the virtual register and the
5836 constant first and then add the other value. This allows virtual
5837 register instantiation to simply modify the constant rather than
5838 creating another one around this addition. */
5839 if (code
== PLUS
&& GET_CODE (op2
) == CONST_INT
5840 && GET_CODE (XEXP (value
, 0)) == PLUS
5841 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
5842 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5843 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5845 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
5846 XEXP (XEXP (value
, 0), 0), op2
,
5847 subtarget
, 0, OPTAB_LIB_WIDEN
);
5848 return expand_simple_binop (GET_MODE (value
), code
, temp
,
5849 force_operand (XEXP (XEXP (value
,
5851 target
, 0, OPTAB_LIB_WIDEN
);
5854 op1
= force_operand (XEXP (value
, 0), subtarget
);
5855 op2
= force_operand (op2
, NULL_RTX
);
5859 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
5861 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
5862 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5863 target
, 1, OPTAB_LIB_WIDEN
);
5865 return expand_divmod (0,
5866 FLOAT_MODE_P (GET_MODE (value
))
5867 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
5868 GET_MODE (value
), op1
, op2
, target
, 0);
5871 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5875 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
5879 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5883 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5884 target
, 0, OPTAB_LIB_WIDEN
);
5887 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5888 target
, 1, OPTAB_LIB_WIDEN
);
5891 if (GET_RTX_CLASS (code
) == '1')
5893 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
5894 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
5897 #ifdef INSN_SCHEDULING
5898 /* On machines that have insn scheduling, we want all memory reference to be
5899 explicit, so we need to deal with such paradoxical SUBREGs. */
5900 if (GET_CODE (value
) == SUBREG
&& GET_CODE (SUBREG_REG (value
)) == MEM
5901 && (GET_MODE_SIZE (GET_MODE (value
))
5902 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
5904 = simplify_gen_subreg (GET_MODE (value
),
5905 force_reg (GET_MODE (SUBREG_REG (value
)),
5906 force_operand (SUBREG_REG (value
),
5908 GET_MODE (SUBREG_REG (value
)),
5909 SUBREG_BYTE (value
));
5915 /* Subroutine of expand_expr: return nonzero iff there is no way that
5916 EXP can reference X, which is being modified. TOP_P is nonzero if this
5917 call is going to be used to determine whether we need a temporary
5918 for EXP, as opposed to a recursive call to this function.
5920 It is always safe for this routine to return zero since it merely
5921 searches for optimization opportunities. */
5924 safe_from_p (x
, exp
, top_p
)
5931 static tree save_expr_list
;
5934 /* If EXP has varying size, we MUST use a target since we currently
5935 have no way of allocating temporaries of variable size
5936 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5937 So we assume here that something at a higher level has prevented a
5938 clash. This is somewhat bogus, but the best we can do. Only
5939 do this when X is BLKmode and when we are at the top level. */
5940 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
5941 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
5942 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
5943 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
5944 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
5946 && GET_MODE (x
) == BLKmode
)
5947 /* If X is in the outgoing argument area, it is always safe. */
5948 || (GET_CODE (x
) == MEM
5949 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
5950 || (GET_CODE (XEXP (x
, 0)) == PLUS
5951 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
5954 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5955 find the underlying pseudo. */
5956 if (GET_CODE (x
) == SUBREG
)
5959 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5963 /* A SAVE_EXPR might appear many times in the expression passed to the
5964 top-level safe_from_p call, and if it has a complex subexpression,
5965 examining it multiple times could result in a combinatorial explosion.
5966 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5967 with optimization took about 28 minutes to compile -- even though it was
5968 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5969 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5970 we have processed. Note that the only test of top_p was above. */
5979 rtn
= safe_from_p (x
, exp
, 0);
5981 for (t
= save_expr_list
; t
!= 0; t
= TREE_CHAIN (t
))
5982 TREE_PRIVATE (TREE_PURPOSE (t
)) = 0;
5987 /* Now look at our tree code and possibly recurse. */
5988 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
5991 exp_rtl
= DECL_RTL_IF_SET (exp
);
5998 if (TREE_CODE (exp
) == TREE_LIST
)
5999 return ((TREE_VALUE (exp
) == 0
6000 || safe_from_p (x
, TREE_VALUE (exp
), 0))
6001 && (TREE_CHAIN (exp
) == 0
6002 || safe_from_p (x
, TREE_CHAIN (exp
), 0)));
6003 else if (TREE_CODE (exp
) == ERROR_MARK
)
6004 return 1; /* An already-visited SAVE_EXPR? */
6009 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6013 return (safe_from_p (x
, TREE_OPERAND (exp
, 0), 0)
6014 && safe_from_p (x
, TREE_OPERAND (exp
, 1), 0));
6018 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6019 the expression. If it is set, we conflict iff we are that rtx or
6020 both are in memory. Otherwise, we check all operands of the
6021 expression recursively. */
6023 switch (TREE_CODE (exp
))
6026 /* If the operand is static or we are static, we can't conflict.
6027 Likewise if we don't conflict with the operand at all. */
6028 if (staticp (TREE_OPERAND (exp
, 0))
6029 || TREE_STATIC (exp
)
6030 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
6033 /* Otherwise, the only way this can conflict is if we are taking
6034 the address of a DECL a that address if part of X, which is
6036 exp
= TREE_OPERAND (exp
, 0);
6039 if (!DECL_RTL_SET_P (exp
)
6040 || GET_CODE (DECL_RTL (exp
)) != MEM
)
6043 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
6048 if (GET_CODE (x
) == MEM
6049 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
6050 get_alias_set (exp
)))
6055 /* Assume that the call will clobber all hard registers and
6057 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6058 || GET_CODE (x
) == MEM
)
6063 /* If a sequence exists, we would have to scan every instruction
6064 in the sequence to see if it was safe. This is probably not
6066 if (RTL_EXPR_SEQUENCE (exp
))
6069 exp_rtl
= RTL_EXPR_RTL (exp
);
6072 case WITH_CLEANUP_EXPR
:
6073 exp_rtl
= WITH_CLEANUP_EXPR_RTL (exp
);
6076 case CLEANUP_POINT_EXPR
:
6077 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6080 exp_rtl
= SAVE_EXPR_RTL (exp
);
6084 /* If we've already scanned this, don't do it again. Otherwise,
6085 show we've scanned it and record for clearing the flag if we're
6087 if (TREE_PRIVATE (exp
))
6090 TREE_PRIVATE (exp
) = 1;
6091 if (! safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
6093 TREE_PRIVATE (exp
) = 0;
6097 save_expr_list
= tree_cons (exp
, NULL_TREE
, save_expr_list
);
6101 /* The only operand we look at is operand 1. The rest aren't
6102 part of the expression. */
6103 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
6105 case METHOD_CALL_EXPR
:
6106 /* This takes an rtx argument, but shouldn't appear here. */
6113 /* If we have an rtx, we do not need to scan our operands. */
6117 nops
= first_rtl_op (TREE_CODE (exp
));
6118 for (i
= 0; i
< nops
; i
++)
6119 if (TREE_OPERAND (exp
, i
) != 0
6120 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
6123 /* If this is a language-specific tree code, it may require
6124 special handling. */
6125 if ((unsigned int) TREE_CODE (exp
)
6126 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6127 && !(*lang_hooks
.safe_from_p
) (x
, exp
))
6131 /* If we have an rtl, find any enclosed object. Then see if we conflict
6135 if (GET_CODE (exp_rtl
) == SUBREG
)
6137 exp_rtl
= SUBREG_REG (exp_rtl
);
6138 if (GET_CODE (exp_rtl
) == REG
6139 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
6143 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6144 are memory and they conflict. */
6145 return ! (rtx_equal_p (x
, exp_rtl
)
6146 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
6147 && true_dependence (exp_rtl
, VOIDmode
, x
,
6148 rtx_addr_varies_p
)));
6151 /* If we reach here, it is safe. */
6155 /* Subroutine of expand_expr: return rtx if EXP is a
6156 variable or parameter; else return 0. */
6163 switch (TREE_CODE (exp
))
6167 return DECL_RTL (exp
);
6173 #ifdef MAX_INTEGER_COMPUTATION_MODE
6176 check_max_integer_computation_mode (exp
)
6179 enum tree_code code
;
6180 enum machine_mode mode
;
6182 /* Strip any NOPs that don't change the mode. */
6184 code
= TREE_CODE (exp
);
6186 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6187 if (code
== NOP_EXPR
6188 && TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
6191 /* First check the type of the overall operation. We need only look at
6192 unary, binary and relational operations. */
6193 if (TREE_CODE_CLASS (code
) == '1'
6194 || TREE_CODE_CLASS (code
) == '2'
6195 || TREE_CODE_CLASS (code
) == '<')
6197 mode
= TYPE_MODE (TREE_TYPE (exp
));
6198 if (GET_MODE_CLASS (mode
) == MODE_INT
6199 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6200 internal_error ("unsupported wide integer operation");
6203 /* Check operand of a unary op. */
6204 if (TREE_CODE_CLASS (code
) == '1')
6206 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6207 if (GET_MODE_CLASS (mode
) == MODE_INT
6208 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6209 internal_error ("unsupported wide integer operation");
6212 /* Check operands of a binary/comparison op. */
6213 if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<')
6215 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6216 if (GET_MODE_CLASS (mode
) == MODE_INT
6217 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6218 internal_error ("unsupported wide integer operation");
6220 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
6221 if (GET_MODE_CLASS (mode
) == MODE_INT
6222 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6223 internal_error ("unsupported wide integer operation");
6228 /* Return the highest power of two that EXP is known to be a multiple of.
6229 This is used in updating alignment of MEMs in array references. */
6231 static HOST_WIDE_INT
6232 highest_pow2_factor (exp
)
6235 HOST_WIDE_INT c0
, c1
;
6237 switch (TREE_CODE (exp
))
6240 /* We can find the lowest bit that's a one. If the low
6241 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6242 We need to handle this case since we can find it in a COND_EXPR,
6243 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6244 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6246 if (TREE_CONSTANT_OVERFLOW (exp
))
6247 return BIGGEST_ALIGNMENT
;
6250 /* Note: tree_low_cst is intentionally not used here,
6251 we don't care about the upper bits. */
6252 c0
= TREE_INT_CST_LOW (exp
);
6254 return c0
? c0
: BIGGEST_ALIGNMENT
;
6258 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
6259 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6260 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6261 return MIN (c0
, c1
);
6264 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6265 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6268 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6270 if (integer_pow2p (TREE_OPERAND (exp
, 1))
6271 && host_integerp (TREE_OPERAND (exp
, 1), 1))
6273 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6274 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
6275 return MAX (1, c0
/ c1
);
6279 case NON_LVALUE_EXPR
: case NOP_EXPR
: case CONVERT_EXPR
:
6280 case SAVE_EXPR
: case WITH_RECORD_EXPR
:
6281 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
6284 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6287 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6288 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
6289 return MIN (c0
, c1
);
6298 /* Similar, except that it is known that the expression must be a multiple
6299 of the alignment of TYPE. */
6301 static HOST_WIDE_INT
6302 highest_pow2_factor_for_type (type
, exp
)
6306 HOST_WIDE_INT type_align
, factor
;
6308 factor
= highest_pow2_factor (exp
);
6309 type_align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
6310 return MAX (factor
, type_align
);
6313 /* Return an object on the placeholder list that matches EXP, a
6314 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6315 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6316 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6317 is a location which initially points to a starting location in the
6318 placeholder list (zero means start of the list) and where a pointer into
6319 the placeholder list at which the object is found is placed. */
6322 find_placeholder (exp
, plist
)
6326 tree type
= TREE_TYPE (exp
);
6327 tree placeholder_expr
;
6329 for (placeholder_expr
6330 = plist
&& *plist
? TREE_CHAIN (*plist
) : placeholder_list
;
6331 placeholder_expr
!= 0;
6332 placeholder_expr
= TREE_CHAIN (placeholder_expr
))
6334 tree need_type
= TYPE_MAIN_VARIANT (type
);
6337 /* Find the outermost reference that is of the type we want. If none,
6338 see if any object has a type that is a pointer to the type we
6340 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6341 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
6342 || TREE_CODE (elt
) == COND_EXPR
)
6343 ? TREE_OPERAND (elt
, 1)
6344 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6345 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6346 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6347 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6348 ? TREE_OPERAND (elt
, 0) : 0))
6349 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
6352 *plist
= placeholder_expr
;
6356 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6358 = ((TREE_CODE (elt
) == COMPOUND_EXPR
6359 || TREE_CODE (elt
) == COND_EXPR
)
6360 ? TREE_OPERAND (elt
, 1)
6361 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6362 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6363 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6364 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6365 ? TREE_OPERAND (elt
, 0) : 0))
6366 if (POINTER_TYPE_P (TREE_TYPE (elt
))
6367 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
6371 *plist
= placeholder_expr
;
6372 return build1 (INDIRECT_REF
, need_type
, elt
);
6379 /* expand_expr: generate code for computing expression EXP.
6380 An rtx for the computed value is returned. The value is never null.
6381 In the case of a void EXP, const0_rtx is returned.
6383 The value may be stored in TARGET if TARGET is nonzero.
6384 TARGET is just a suggestion; callers must assume that
6385 the rtx returned may not be the same as TARGET.
6387 If TARGET is CONST0_RTX, it means that the value will be ignored.
6389 If TMODE is not VOIDmode, it suggests generating the
6390 result in mode TMODE. But this is done only when convenient.
6391 Otherwise, TMODE is ignored and the value generated in its natural mode.
6392 TMODE is just a suggestion; callers must assume that
6393 the rtx returned may not have mode TMODE.
6395 Note that TARGET may have neither TMODE nor MODE. In that case, it
6396 probably will not be used.
6398 If MODIFIER is EXPAND_SUM then when EXP is an addition
6399 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6400 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6401 products as above, or REG or MEM, or constant.
6402 Ordinarily in such cases we would output mul or add instructions
6403 and then return a pseudo reg containing the sum.
6405 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6406 it also marks a label as absolutely required (it can't be dead).
6407 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6408 This is used for outputting expressions used in initializers.
6410 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6411 with a constant address even if that address is not normally legitimate.
6412 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6415 expand_expr (exp
, target
, tmode
, modifier
)
6418 enum machine_mode tmode
;
6419 enum expand_modifier modifier
;
6422 tree type
= TREE_TYPE (exp
);
6423 int unsignedp
= TREE_UNSIGNED (type
);
6424 enum machine_mode mode
;
6425 enum tree_code code
= TREE_CODE (exp
);
6427 rtx subtarget
, original_target
;
6431 /* Handle ERROR_MARK before anybody tries to access its type. */
6432 if (TREE_CODE (exp
) == ERROR_MARK
|| TREE_CODE (type
) == ERROR_MARK
)
6434 op0
= CONST0_RTX (tmode
);
6440 mode
= TYPE_MODE (type
);
6441 /* Use subtarget as the target for operand 0 of a binary operation. */
6442 subtarget
= get_subtarget (target
);
6443 original_target
= target
;
6444 ignore
= (target
== const0_rtx
6445 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
6446 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
6447 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
6448 && TREE_CODE (type
) == VOID_TYPE
));
6450 /* If we are going to ignore this result, we need only do something
6451 if there is a side-effect somewhere in the expression. If there
6452 is, short-circuit the most common cases here. Note that we must
6453 not call expand_expr with anything but const0_rtx in case this
6454 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6458 if (! TREE_SIDE_EFFECTS (exp
))
6461 /* Ensure we reference a volatile object even if value is ignored, but
6462 don't do this if all we are doing is taking its address. */
6463 if (TREE_THIS_VOLATILE (exp
)
6464 && TREE_CODE (exp
) != FUNCTION_DECL
6465 && mode
!= VOIDmode
&& mode
!= BLKmode
6466 && modifier
!= EXPAND_CONST_ADDRESS
)
6468 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
6469 if (GET_CODE (temp
) == MEM
)
6470 temp
= copy_to_reg (temp
);
6474 if (TREE_CODE_CLASS (code
) == '1' || code
== COMPONENT_REF
6475 || code
== INDIRECT_REF
|| code
== BUFFER_REF
)
6476 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6479 else if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<'
6480 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
6482 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6483 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6486 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6487 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
6488 /* If the second operand has no side effects, just evaluate
6490 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6492 else if (code
== BIT_FIELD_REF
)
6494 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6495 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6496 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, modifier
);
6503 #ifdef MAX_INTEGER_COMPUTATION_MODE
6504 /* Only check stuff here if the mode we want is different from the mode
6505 of the expression; if it's the same, check_max_integer_computiation_mode
6506 will handle it. Do we really need to check this stuff at all? */
6509 && GET_MODE (target
) != mode
6510 && TREE_CODE (exp
) != INTEGER_CST
6511 && TREE_CODE (exp
) != PARM_DECL
6512 && TREE_CODE (exp
) != ARRAY_REF
6513 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6514 && TREE_CODE (exp
) != COMPONENT_REF
6515 && TREE_CODE (exp
) != BIT_FIELD_REF
6516 && TREE_CODE (exp
) != INDIRECT_REF
6517 && TREE_CODE (exp
) != CALL_EXPR
6518 && TREE_CODE (exp
) != VAR_DECL
6519 && TREE_CODE (exp
) != RTL_EXPR
)
6521 enum machine_mode mode
= GET_MODE (target
);
6523 if (GET_MODE_CLASS (mode
) == MODE_INT
6524 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6525 internal_error ("unsupported wide integer operation");
6529 && TREE_CODE (exp
) != INTEGER_CST
6530 && TREE_CODE (exp
) != PARM_DECL
6531 && TREE_CODE (exp
) != ARRAY_REF
6532 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6533 && TREE_CODE (exp
) != COMPONENT_REF
6534 && TREE_CODE (exp
) != BIT_FIELD_REF
6535 && TREE_CODE (exp
) != INDIRECT_REF
6536 && TREE_CODE (exp
) != VAR_DECL
6537 && TREE_CODE (exp
) != CALL_EXPR
6538 && TREE_CODE (exp
) != RTL_EXPR
6539 && GET_MODE_CLASS (tmode
) == MODE_INT
6540 && tmode
> MAX_INTEGER_COMPUTATION_MODE
)
6541 internal_error ("unsupported wide integer operation");
6543 check_max_integer_computation_mode (exp
);
6546 /* If will do cse, generate all results into pseudo registers
6547 since 1) that allows cse to find more things
6548 and 2) otherwise cse could produce an insn the machine
6549 cannot support. An exception is a CONSTRUCTOR into a multi-word
6550 MEM: that's much more likely to be most efficient into the MEM.
6551 Another is a CALL_EXPR which must return in memory. */
6553 if (! cse_not_expected
&& mode
!= BLKmode
&& target
6554 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
)
6555 && ! (code
== CONSTRUCTOR
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
6556 && ! (code
== CALL_EXPR
&& aggregate_value_p (exp
)))
6563 tree function
= decl_function_context (exp
);
6564 /* Handle using a label in a containing function. */
6565 if (function
!= current_function_decl
6566 && function
!= inline_function_decl
&& function
!= 0)
6568 struct function
*p
= find_function_data (function
);
6569 p
->expr
->x_forced_labels
6570 = gen_rtx_EXPR_LIST (VOIDmode
, label_rtx (exp
),
6571 p
->expr
->x_forced_labels
);
6575 if (modifier
== EXPAND_INITIALIZER
)
6576 forced_labels
= gen_rtx_EXPR_LIST (VOIDmode
,
6581 temp
= gen_rtx_MEM (FUNCTION_MODE
,
6582 gen_rtx_LABEL_REF (Pmode
, label_rtx (exp
)));
6583 if (function
!= current_function_decl
6584 && function
!= inline_function_decl
&& function
!= 0)
6585 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
6590 if (!DECL_RTL_SET_P (exp
))
6592 error_with_decl (exp
, "prior parameter's size depends on `%s'");
6593 return CONST0_RTX (mode
);
6596 /* ... fall through ... */
6599 /* If a static var's type was incomplete when the decl was written,
6600 but the type is complete now, lay out the decl now. */
6601 if (DECL_SIZE (exp
) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
6602 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6604 rtx value
= DECL_RTL_IF_SET (exp
);
6606 layout_decl (exp
, 0);
6608 /* If the RTL was already set, update its mode and memory
6612 PUT_MODE (value
, DECL_MODE (exp
));
6613 SET_DECL_RTL (exp
, 0);
6614 set_mem_attributes (value
, exp
, 1);
6615 SET_DECL_RTL (exp
, value
);
6619 /* ... fall through ... */
6623 if (DECL_RTL (exp
) == 0)
6626 /* Ensure variable marked as used even if it doesn't go through
6627 a parser. If it hasn't be used yet, write out an external
6629 if (! TREE_USED (exp
))
6631 assemble_external (exp
);
6632 TREE_USED (exp
) = 1;
6635 /* Show we haven't gotten RTL for this yet. */
6638 /* Handle variables inherited from containing functions. */
6639 context
= decl_function_context (exp
);
6641 /* We treat inline_function_decl as an alias for the current function
6642 because that is the inline function whose vars, types, etc.
6643 are being merged into the current function.
6644 See expand_inline_function. */
6646 if (context
!= 0 && context
!= current_function_decl
6647 && context
!= inline_function_decl
6648 /* If var is static, we don't need a static chain to access it. */
6649 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
6650 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
6654 /* Mark as non-local and addressable. */
6655 DECL_NONLOCAL (exp
) = 1;
6656 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
6658 (*lang_hooks
.mark_addressable
) (exp
);
6659 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
6661 addr
= XEXP (DECL_RTL (exp
), 0);
6662 if (GET_CODE (addr
) == MEM
)
6664 = replace_equiv_address (addr
,
6665 fix_lexical_addr (XEXP (addr
, 0), exp
));
6667 addr
= fix_lexical_addr (addr
, exp
);
6669 temp
= replace_equiv_address (DECL_RTL (exp
), addr
);
6672 /* This is the case of an array whose size is to be determined
6673 from its initializer, while the initializer is still being parsed.
6676 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6677 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
6678 temp
= validize_mem (DECL_RTL (exp
));
6680 /* If DECL_RTL is memory, we are in the normal case and either
6681 the address is not valid or it is not a register and -fforce-addr
6682 is specified, get the address into a register. */
6684 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6685 && modifier
!= EXPAND_CONST_ADDRESS
6686 && modifier
!= EXPAND_SUM
6687 && modifier
!= EXPAND_INITIALIZER
6688 && (! memory_address_p (DECL_MODE (exp
),
6689 XEXP (DECL_RTL (exp
), 0))
6691 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
6692 temp
= replace_equiv_address (DECL_RTL (exp
),
6693 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
6695 /* If we got something, return it. But first, set the alignment
6696 if the address is a register. */
6699 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
6700 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6705 /* If the mode of DECL_RTL does not match that of the decl, it
6706 must be a promoted value. We return a SUBREG of the wanted mode,
6707 but mark it so that we know that it was already extended. */
6709 if (GET_CODE (DECL_RTL (exp
)) == REG
6710 && GET_MODE (DECL_RTL (exp
)) != DECL_MODE (exp
))
6712 /* Get the signedness used for this variable. Ensure we get the
6713 same mode we got when the variable was declared. */
6714 if (GET_MODE (DECL_RTL (exp
))
6715 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
,
6716 (TREE_CODE (exp
) == RESULT_DECL
? 1 : 0)))
6719 temp
= gen_lowpart_SUBREG (mode
, DECL_RTL (exp
));
6720 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6721 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6725 return DECL_RTL (exp
);
6728 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
6729 TREE_INT_CST_HIGH (exp
), mode
);
6731 /* ??? If overflow is set, fold will have done an incomplete job,
6732 which can result in (plus xx (const_int 0)), which can get
6733 simplified by validate_replace_rtx during virtual register
6734 instantiation, which can result in unrecognizable insns.
6735 Avoid this by forcing all overflows into registers. */
6736 if (TREE_CONSTANT_OVERFLOW (exp
)
6737 && modifier
!= EXPAND_INITIALIZER
)
6738 temp
= force_reg (mode
, temp
);
6743 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, 0);
6746 /* If optimized, generate immediate CONST_DOUBLE
6747 which will be turned into memory by reload if necessary.
6749 We used to force a register so that loop.c could see it. But
6750 this does not allow gen_* patterns to perform optimizations with
6751 the constants. It also produces two insns in cases like "x = 1.0;".
6752 On most machines, floating-point constants are not permitted in
6753 many insns, so we'd end up copying it to a register in any case.
6755 Now, we do the copying in expand_binop, if appropriate. */
6756 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
6757 TYPE_MODE (TREE_TYPE (exp
)));
6761 if (! TREE_CST_RTL (exp
))
6762 output_constant_def (exp
, 1);
6764 /* TREE_CST_RTL probably contains a constant address.
6765 On RISC machines where a constant address isn't valid,
6766 make some insns to get that address into a register. */
6767 if (GET_CODE (TREE_CST_RTL (exp
)) == MEM
6768 && modifier
!= EXPAND_CONST_ADDRESS
6769 && modifier
!= EXPAND_INITIALIZER
6770 && modifier
!= EXPAND_SUM
6771 && (! memory_address_p (mode
, XEXP (TREE_CST_RTL (exp
), 0))
6773 && GET_CODE (XEXP (TREE_CST_RTL (exp
), 0)) != REG
)))
6774 return replace_equiv_address (TREE_CST_RTL (exp
),
6775 copy_rtx (XEXP (TREE_CST_RTL (exp
), 0)));
6776 return TREE_CST_RTL (exp
);
6778 case EXPR_WITH_FILE_LOCATION
:
6781 const char *saved_input_filename
= input_filename
;
6782 int saved_lineno
= lineno
;
6783 input_filename
= EXPR_WFL_FILENAME (exp
);
6784 lineno
= EXPR_WFL_LINENO (exp
);
6785 if (EXPR_WFL_EMIT_LINE_NOTE (exp
))
6786 emit_line_note (input_filename
, lineno
);
6787 /* Possibly avoid switching back and forth here. */
6788 to_return
= expand_expr (EXPR_WFL_NODE (exp
), target
, tmode
, modifier
);
6789 input_filename
= saved_input_filename
;
6790 lineno
= saved_lineno
;
6795 context
= decl_function_context (exp
);
6797 /* If this SAVE_EXPR was at global context, assume we are an
6798 initialization function and move it into our context. */
6800 SAVE_EXPR_CONTEXT (exp
) = current_function_decl
;
6802 /* We treat inline_function_decl as an alias for the current function
6803 because that is the inline function whose vars, types, etc.
6804 are being merged into the current function.
6805 See expand_inline_function. */
6806 if (context
== current_function_decl
|| context
== inline_function_decl
)
6809 /* If this is non-local, handle it. */
6812 /* The following call just exists to abort if the context is
6813 not of a containing function. */
6814 find_function_data (context
);
6816 temp
= SAVE_EXPR_RTL (exp
);
6817 if (temp
&& GET_CODE (temp
) == REG
)
6819 put_var_into_stack (exp
);
6820 temp
= SAVE_EXPR_RTL (exp
);
6822 if (temp
== 0 || GET_CODE (temp
) != MEM
)
6825 replace_equiv_address (temp
,
6826 fix_lexical_addr (XEXP (temp
, 0), exp
));
6828 if (SAVE_EXPR_RTL (exp
) == 0)
6830 if (mode
== VOIDmode
)
6833 temp
= assign_temp (build_qualified_type (type
,
6835 | TYPE_QUAL_CONST
)),
6838 SAVE_EXPR_RTL (exp
) = temp
;
6839 if (!optimize
&& GET_CODE (temp
) == REG
)
6840 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, temp
,
6843 /* If the mode of TEMP does not match that of the expression, it
6844 must be a promoted value. We pass store_expr a SUBREG of the
6845 wanted mode but mark it so that we know that it was already
6846 extended. Note that `unsignedp' was modified above in
6849 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
6851 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6852 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6853 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6856 if (temp
== const0_rtx
)
6857 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
6859 store_expr (TREE_OPERAND (exp
, 0), temp
, 0);
6861 TREE_USED (exp
) = 1;
6864 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6865 must be a promoted value. We return a SUBREG of the wanted mode,
6866 but mark it so that we know that it was already extended. */
6868 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
6869 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
6871 /* Compute the signedness and make the proper SUBREG. */
6872 promote_mode (type
, mode
, &unsignedp
, 0);
6873 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6874 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6875 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6879 return SAVE_EXPR_RTL (exp
);
6884 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6885 TREE_OPERAND (exp
, 0)
6886 = (*lang_hooks
.unsave_expr_now
) (TREE_OPERAND (exp
, 0));
6890 case PLACEHOLDER_EXPR
:
6892 tree old_list
= placeholder_list
;
6893 tree placeholder_expr
= 0;
6895 exp
= find_placeholder (exp
, &placeholder_expr
);
6899 placeholder_list
= TREE_CHAIN (placeholder_expr
);
6900 temp
= expand_expr (exp
, original_target
, tmode
, modifier
);
6901 placeholder_list
= old_list
;
6905 case WITH_RECORD_EXPR
:
6906 /* Put the object on the placeholder list, expand our first operand,
6907 and pop the list. */
6908 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
6910 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
, tmode
,
6912 placeholder_list
= TREE_CHAIN (placeholder_list
);
6916 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6917 expand_goto (TREE_OPERAND (exp
, 0));
6919 expand_computed_goto (TREE_OPERAND (exp
, 0));
6923 expand_exit_loop_if_false (NULL
,
6924 invert_truthvalue (TREE_OPERAND (exp
, 0)));
6927 case LABELED_BLOCK_EXPR
:
6928 if (LABELED_BLOCK_BODY (exp
))
6929 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp
), 0, 1);
6930 /* Should perhaps use expand_label, but this is simpler and safer. */
6931 do_pending_stack_adjust ();
6932 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp
)));
6935 case EXIT_BLOCK_EXPR
:
6936 if (EXIT_BLOCK_RETURN (exp
))
6937 sorry ("returned value in block_exit_expr");
6938 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp
)));
6943 expand_start_loop (1);
6944 expand_expr_stmt_value (TREE_OPERAND (exp
, 0), 0, 1);
6952 tree vars
= TREE_OPERAND (exp
, 0);
6954 /* Need to open a binding contour here because
6955 if there are any cleanups they must be contained here. */
6956 expand_start_bindings (2);
6958 /* Mark the corresponding BLOCK for output in its proper place. */
6959 if (TREE_OPERAND (exp
, 2) != 0
6960 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
6961 (*lang_hooks
.decls
.insert_block
) (TREE_OPERAND (exp
, 2));
6963 /* If VARS have not yet been expanded, expand them now. */
6966 if (!DECL_RTL_SET_P (vars
))
6968 expand_decl_init (vars
);
6969 vars
= TREE_CHAIN (vars
);
6972 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, modifier
);
6974 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
6980 if (RTL_EXPR_SEQUENCE (exp
))
6982 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
6984 emit_insn (RTL_EXPR_SEQUENCE (exp
));
6985 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
6987 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
6988 free_temps_for_rtl_expr (exp
);
6989 return RTL_EXPR_RTL (exp
);
6992 /* If we don't need the result, just ensure we evaluate any
6998 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
6999 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
, 0);
7004 /* All elts simple constants => refer to a constant in memory. But
7005 if this is a non-BLKmode mode, let it store a field at a time
7006 since that should make a CONST_INT or CONST_DOUBLE when we
7007 fold. Likewise, if we have a target we can use, it is best to
7008 store directly into the target unless the type is large enough
7009 that memcpy will be used. If we are making an initializer and
7010 all operands are constant, put it in memory as well.
7012 FIXME: Avoid trying to fill vector constructors piece-meal.
7013 Output them with output_constant_def below unless we're sure
7014 they're zeros. This should go away when vector initializers
7015 are treated like VECTOR_CST instead of arrays.
7017 else if ((TREE_STATIC (exp
)
7018 && ((mode
== BLKmode
7019 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
7020 || TREE_ADDRESSABLE (exp
)
7021 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
7022 && (! MOVE_BY_PIECES_P
7023 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
7025 && ((TREE_CODE (type
) == VECTOR_TYPE
7026 && !is_zeros_p (exp
))
7027 || ! mostly_zeros_p (exp
)))))
7028 || (modifier
== EXPAND_INITIALIZER
&& TREE_CONSTANT (exp
)))
7030 rtx constructor
= output_constant_def (exp
, 1);
7032 if (modifier
!= EXPAND_CONST_ADDRESS
7033 && modifier
!= EXPAND_INITIALIZER
7034 && modifier
!= EXPAND_SUM
)
7035 constructor
= validize_mem (constructor
);
7041 /* Handle calls that pass values in multiple non-contiguous
7042 locations. The Irix 6 ABI has examples of this. */
7043 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
7044 || GET_CODE (target
) == PARALLEL
)
7046 = assign_temp (build_qualified_type (type
,
7048 | (TREE_READONLY (exp
)
7049 * TYPE_QUAL_CONST
))),
7050 0, TREE_ADDRESSABLE (exp
), 1);
7052 store_constructor (exp
, target
, 0, int_expr_size (exp
));
7058 tree exp1
= TREE_OPERAND (exp
, 0);
7060 tree string
= string_constant (exp1
, &index
);
7062 /* Try to optimize reads from const strings. */
7064 && TREE_CODE (string
) == STRING_CST
7065 && TREE_CODE (index
) == INTEGER_CST
7066 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
7067 && GET_MODE_CLASS (mode
) == MODE_INT
7068 && GET_MODE_SIZE (mode
) == 1
7069 && modifier
!= EXPAND_WRITE
)
7070 return gen_int_mode (TREE_STRING_POINTER (string
)
7071 [TREE_INT_CST_LOW (index
)], mode
);
7073 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
7074 op0
= memory_address (mode
, op0
);
7075 temp
= gen_rtx_MEM (mode
, op0
);
7076 set_mem_attributes (temp
, exp
, 0);
7078 /* If we are writing to this object and its type is a record with
7079 readonly fields, we must mark it as readonly so it will
7080 conflict with readonly references to those fields. */
7081 if (modifier
== EXPAND_WRITE
&& readonly_fields_p (type
))
7082 RTX_UNCHANGING_P (temp
) = 1;
7088 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
7092 tree array
= TREE_OPERAND (exp
, 0);
7093 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
7094 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
7095 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
7098 /* Optimize the special-case of a zero lower bound.
7100 We convert the low_bound to sizetype to avoid some problems
7101 with constant folding. (E.g. suppose the lower bound is 1,
7102 and its mode is QI. Without the conversion, (ARRAY
7103 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7104 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7106 if (! integer_zerop (low_bound
))
7107 index
= size_diffop (index
, convert (sizetype
, low_bound
));
7109 /* Fold an expression like: "foo"[2].
7110 This is not done in fold so it won't happen inside &.
7111 Don't fold if this is for wide characters since it's too
7112 difficult to do correctly and this is a very rare case. */
7114 if (modifier
!= EXPAND_CONST_ADDRESS
&& modifier
!= EXPAND_INITIALIZER
7115 && TREE_CODE (array
) == STRING_CST
7116 && TREE_CODE (index
) == INTEGER_CST
7117 && compare_tree_int (index
, TREE_STRING_LENGTH (array
)) < 0
7118 && GET_MODE_CLASS (mode
) == MODE_INT
7119 && GET_MODE_SIZE (mode
) == 1)
7120 return gen_int_mode (TREE_STRING_POINTER (array
)
7121 [TREE_INT_CST_LOW (index
)], mode
);
7123 /* If this is a constant index into a constant array,
7124 just get the value from the array. Handle both the cases when
7125 we have an explicit constructor and when our operand is a variable
7126 that was declared const. */
7128 if (modifier
!= EXPAND_CONST_ADDRESS
&& modifier
!= EXPAND_INITIALIZER
7129 && TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
)
7130 && TREE_CODE (index
) == INTEGER_CST
7131 && 0 > compare_tree_int (index
,
7132 list_length (CONSTRUCTOR_ELTS
7133 (TREE_OPERAND (exp
, 0)))))
7137 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
7138 i
= TREE_INT_CST_LOW (index
);
7139 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
7143 return expand_expr (fold (TREE_VALUE (elem
)), target
, tmode
,
7147 else if (optimize
>= 1
7148 && modifier
!= EXPAND_CONST_ADDRESS
7149 && modifier
!= EXPAND_INITIALIZER
7150 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
7151 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
7152 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
7154 if (TREE_CODE (index
) == INTEGER_CST
)
7156 tree init
= DECL_INITIAL (array
);
7158 if (TREE_CODE (init
) == CONSTRUCTOR
)
7162 for (elem
= CONSTRUCTOR_ELTS (init
);
7164 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
7165 elem
= TREE_CHAIN (elem
))
7168 if (elem
&& !TREE_SIDE_EFFECTS (TREE_VALUE (elem
)))
7169 return expand_expr (fold (TREE_VALUE (elem
)), target
,
7172 else if (TREE_CODE (init
) == STRING_CST
7173 && 0 > compare_tree_int (index
,
7174 TREE_STRING_LENGTH (init
)))
7176 tree type
= TREE_TYPE (TREE_TYPE (init
));
7177 enum machine_mode mode
= TYPE_MODE (type
);
7179 if (GET_MODE_CLASS (mode
) == MODE_INT
7180 && GET_MODE_SIZE (mode
) == 1)
7181 return gen_int_mode (TREE_STRING_POINTER (init
)
7182 [TREE_INT_CST_LOW (index
)], mode
);
7191 case ARRAY_RANGE_REF
:
7192 /* If the operand is a CONSTRUCTOR, we can just extract the
7193 appropriate field if it is present. Don't do this if we have
7194 already written the data since we want to refer to that copy
7195 and varasm.c assumes that's what we'll do. */
7196 if (code
== COMPONENT_REF
7197 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
7198 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
7202 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
7203 elt
= TREE_CHAIN (elt
))
7204 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
7205 /* We can normally use the value of the field in the
7206 CONSTRUCTOR. However, if this is a bitfield in
7207 an integral mode that we can fit in a HOST_WIDE_INT,
7208 we must mask only the number of bits in the bitfield,
7209 since this is done implicitly by the constructor. If
7210 the bitfield does not meet either of those conditions,
7211 we can't do this optimization. */
7212 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
7213 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
7215 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
7216 <= HOST_BITS_PER_WIDE_INT
))))
7218 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
7219 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
7221 HOST_WIDE_INT bitsize
7222 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt
)));
7223 enum machine_mode imode
7224 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
7226 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
7228 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
7229 op0
= expand_and (imode
, op0
, op1
, target
);
7234 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
7237 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
7239 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
7249 enum machine_mode mode1
;
7250 HOST_WIDE_INT bitsize
, bitpos
;
7253 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7254 &mode1
, &unsignedp
, &volatilep
);
7257 /* If we got back the original object, something is wrong. Perhaps
7258 we are evaluating an expression too early. In any event, don't
7259 infinitely recurse. */
7263 /* If TEM's type is a union of variable size, pass TARGET to the inner
7264 computation, since it will need a temporary and TARGET is known
7265 to have to do. This occurs in unchecked conversion in Ada. */
7269 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
7270 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
7272 ? target
: NULL_RTX
),
7274 (modifier
== EXPAND_INITIALIZER
7275 || modifier
== EXPAND_CONST_ADDRESS
)
7276 ? modifier
: EXPAND_NORMAL
);
7278 /* If this is a constant, put it into a register if it is a
7279 legitimate constant and OFFSET is 0 and memory if it isn't. */
7280 if (CONSTANT_P (op0
))
7282 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
7283 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
7285 op0
= force_reg (mode
, op0
);
7287 op0
= validize_mem (force_const_mem (mode
, op0
));
7292 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
7294 /* If this object is in a register, put it into memory.
7295 This case can't occur in C, but can in Ada if we have
7296 unchecked conversion of an expression from a scalar type to
7297 an array or record type. */
7298 if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7299 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
7301 /* If the operand is a SAVE_EXPR, we can deal with this by
7302 forcing the SAVE_EXPR into memory. */
7303 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
7305 put_var_into_stack (TREE_OPERAND (exp
, 0));
7306 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
7311 = build_qualified_type (TREE_TYPE (tem
),
7312 (TYPE_QUALS (TREE_TYPE (tem
))
7313 | TYPE_QUAL_CONST
));
7314 rtx memloc
= assign_temp (nt
, 1, 1, 1);
7316 emit_move_insn (memloc
, op0
);
7321 if (GET_CODE (op0
) != MEM
)
7324 #ifdef POINTERS_EXTEND_UNSIGNED
7325 if (GET_MODE (offset_rtx
) != Pmode
)
7326 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
7328 if (GET_MODE (offset_rtx
) != ptr_mode
)
7329 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
7332 /* A constant address in OP0 can have VOIDmode, we must not try
7333 to call force_reg for that case. Avoid that case. */
7334 if (GET_CODE (op0
) == MEM
7335 && GET_MODE (op0
) == BLKmode
7336 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
7338 && (bitpos
% bitsize
) == 0
7339 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
7340 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
7342 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7346 op0
= offset_address (op0
, offset_rtx
,
7347 highest_pow2_factor (offset
));
7350 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7351 record its alignment as BIGGEST_ALIGNMENT. */
7352 if (GET_CODE (op0
) == MEM
&& bitpos
== 0 && offset
!= 0
7353 && is_aligning_offset (offset
, tem
))
7354 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
7356 /* Don't forget about volatility even if this is a bitfield. */
7357 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
7359 if (op0
== orig_op0
)
7360 op0
= copy_rtx (op0
);
7362 MEM_VOLATILE_P (op0
) = 1;
7365 /* The following code doesn't handle CONCAT.
7366 Assume only bitpos == 0 can be used for CONCAT, due to
7367 one element arrays having the same mode as its element. */
7368 if (GET_CODE (op0
) == CONCAT
)
7370 if (bitpos
!= 0 || bitsize
!= GET_MODE_BITSIZE (GET_MODE (op0
)))
7375 /* In cases where an aligned union has an unaligned object
7376 as a field, we might be extracting a BLKmode value from
7377 an integer-mode (e.g., SImode) object. Handle this case
7378 by doing the extract into an object as wide as the field
7379 (which we know to be the width of a basic mode), then
7380 storing into memory, and changing the mode to BLKmode. */
7381 if (mode1
== VOIDmode
7382 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7383 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
7384 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7385 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
7386 && modifier
!= EXPAND_CONST_ADDRESS
7387 && modifier
!= EXPAND_INITIALIZER
)
7388 /* If the field isn't aligned enough to fetch as a memref,
7389 fetch it as a bit field. */
7390 || (mode1
!= BLKmode
7391 && SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))
7392 && ((TYPE_ALIGN (TREE_TYPE (tem
))
7393 < GET_MODE_ALIGNMENT (mode
))
7394 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)))
7395 /* If the type and the field are a constant size and the
7396 size of the type isn't the same size as the bitfield,
7397 we must use bitfield operations. */
7399 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
)))
7401 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7404 enum machine_mode ext_mode
= mode
;
7406 if (ext_mode
== BLKmode
7407 && ! (target
!= 0 && GET_CODE (op0
) == MEM
7408 && GET_CODE (target
) == MEM
7409 && bitpos
% BITS_PER_UNIT
== 0))
7410 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7412 if (ext_mode
== BLKmode
)
7414 /* In this case, BITPOS must start at a byte boundary and
7415 TARGET, if specified, must be a MEM. */
7416 if (GET_CODE (op0
) != MEM
7417 || (target
!= 0 && GET_CODE (target
) != MEM
)
7418 || bitpos
% BITS_PER_UNIT
!= 0)
7421 op0
= adjust_address (op0
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
7423 target
= assign_temp (type
, 0, 1, 1);
7425 emit_block_move (target
, op0
,
7426 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7433 op0
= validize_mem (op0
);
7435 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
7436 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7438 op0
= extract_bit_field (op0
, bitsize
, bitpos
,
7439 unsignedp
, target
, ext_mode
, ext_mode
,
7440 int_size_in_bytes (TREE_TYPE (tem
)));
7442 /* If the result is a record type and BITSIZE is narrower than
7443 the mode of OP0, an integral mode, and this is a big endian
7444 machine, we must put the field into the high-order bits. */
7445 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7446 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7447 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
7448 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7449 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7453 if (mode
== BLKmode
)
7455 rtx
new = assign_temp (build_qualified_type
7456 ((*lang_hooks
.types
.type_for_mode
)
7458 TYPE_QUAL_CONST
), 0, 1, 1);
7460 emit_move_insn (new, op0
);
7461 op0
= copy_rtx (new);
7462 PUT_MODE (op0
, BLKmode
);
7463 set_mem_attributes (op0
, exp
, 1);
7469 /* If the result is BLKmode, use that to access the object
7471 if (mode
== BLKmode
)
7474 /* Get a reference to just this component. */
7475 if (modifier
== EXPAND_CONST_ADDRESS
7476 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7477 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7479 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7481 if (op0
== orig_op0
)
7482 op0
= copy_rtx (op0
);
7484 set_mem_attributes (op0
, exp
, 0);
7485 if (GET_CODE (XEXP (op0
, 0)) == REG
)
7486 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7488 MEM_VOLATILE_P (op0
) |= volatilep
;
7489 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7490 || modifier
== EXPAND_CONST_ADDRESS
7491 || modifier
== EXPAND_INITIALIZER
)
7493 else if (target
== 0)
7494 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7496 convert_move (target
, op0
, unsignedp
);
7502 rtx insn
, before
= get_last_insn (), vtbl_ref
;
7504 /* Evaluate the interior expression. */
7505 subtarget
= expand_expr (TREE_OPERAND (exp
, 0), target
,
7508 /* Get or create an instruction off which to hang a note. */
7509 if (REG_P (subtarget
))
7512 insn
= get_last_insn ();
7515 if (! INSN_P (insn
))
7516 insn
= prev_nonnote_insn (insn
);
7520 target
= gen_reg_rtx (GET_MODE (subtarget
));
7521 insn
= emit_move_insn (target
, subtarget
);
7524 /* Collect the data for the note. */
7525 vtbl_ref
= XEXP (DECL_RTL (TREE_OPERAND (exp
, 1)), 0);
7526 vtbl_ref
= plus_constant (vtbl_ref
,
7527 tree_low_cst (TREE_OPERAND (exp
, 2), 0));
7528 /* Discard the initial CONST that was added. */
7529 vtbl_ref
= XEXP (vtbl_ref
, 0);
7532 = gen_rtx_EXPR_LIST (REG_VTABLE_REF
, vtbl_ref
, REG_NOTES (insn
));
7537 /* Intended for a reference to a buffer of a file-object in Pascal.
7538 But it's not certain that a special tree code will really be
7539 necessary for these. INDIRECT_REF might work for them. */
7545 /* Pascal set IN expression.
7548 rlo = set_low - (set_low%bits_per_word);
7549 the_word = set [ (index - rlo)/bits_per_word ];
7550 bit_index = index % bits_per_word;
7551 bitmask = 1 << bit_index;
7552 return !!(the_word & bitmask); */
7554 tree set
= TREE_OPERAND (exp
, 0);
7555 tree index
= TREE_OPERAND (exp
, 1);
7556 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
7557 tree set_type
= TREE_TYPE (set
);
7558 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
7559 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
7560 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
7561 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
7562 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
7563 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
7564 rtx setaddr
= XEXP (setval
, 0);
7565 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
7567 rtx diff
, quo
, rem
, addr
, bit
, result
;
7569 /* If domain is empty, answer is no. Likewise if index is constant
7570 and out of bounds. */
7571 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
7572 && TREE_CODE (set_low_bound
) == INTEGER_CST
7573 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
7574 || (TREE_CODE (index
) == INTEGER_CST
7575 && TREE_CODE (set_low_bound
) == INTEGER_CST
7576 && tree_int_cst_lt (index
, set_low_bound
))
7577 || (TREE_CODE (set_high_bound
) == INTEGER_CST
7578 && TREE_CODE (index
) == INTEGER_CST
7579 && tree_int_cst_lt (set_high_bound
, index
))))
7583 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7585 /* If we get here, we have to generate the code for both cases
7586 (in range and out of range). */
7588 op0
= gen_label_rtx ();
7589 op1
= gen_label_rtx ();
7591 if (! (GET_CODE (index_val
) == CONST_INT
7592 && GET_CODE (lo_r
) == CONST_INT
))
7593 emit_cmp_and_jump_insns (index_val
, lo_r
, LT
, NULL_RTX
,
7594 GET_MODE (index_val
), iunsignedp
, op1
);
7596 if (! (GET_CODE (index_val
) == CONST_INT
7597 && GET_CODE (hi_r
) == CONST_INT
))
7598 emit_cmp_and_jump_insns (index_val
, hi_r
, GT
, NULL_RTX
,
7599 GET_MODE (index_val
), iunsignedp
, op1
);
7601 /* Calculate the element number of bit zero in the first word
7603 if (GET_CODE (lo_r
) == CONST_INT
)
7604 rlow
= GEN_INT (INTVAL (lo_r
)
7605 & ~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
7607 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
7608 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
7609 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7611 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
7612 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7614 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
7615 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7616 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
7617 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7619 addr
= memory_address (byte_mode
,
7620 expand_binop (index_mode
, add_optab
, diff
,
7621 setaddr
, NULL_RTX
, iunsignedp
,
7624 /* Extract the bit we want to examine. */
7625 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
7626 gen_rtx_MEM (byte_mode
, addr
),
7627 make_tree (TREE_TYPE (index
), rem
),
7629 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
7630 GET_MODE (target
) == byte_mode
? target
: 0,
7631 1, OPTAB_LIB_WIDEN
);
7633 if (result
!= target
)
7634 convert_move (target
, result
, 1);
7636 /* Output the code to handle the out-of-range case. */
7639 emit_move_insn (target
, const0_rtx
);
7644 case WITH_CLEANUP_EXPR
:
7645 if (WITH_CLEANUP_EXPR_RTL (exp
) == 0)
7647 WITH_CLEANUP_EXPR_RTL (exp
)
7648 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7649 expand_decl_cleanup_eh (NULL_TREE
, TREE_OPERAND (exp
, 1),
7650 CLEANUP_EH_ONLY (exp
));
7652 /* That's it for this cleanup. */
7653 TREE_OPERAND (exp
, 1) = 0;
7655 return WITH_CLEANUP_EXPR_RTL (exp
);
7657 case CLEANUP_POINT_EXPR
:
7659 /* Start a new binding layer that will keep track of all cleanup
7660 actions to be performed. */
7661 expand_start_bindings (2);
7663 target_temp_slot_level
= temp_slot_level
;
7665 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7666 /* If we're going to use this value, load it up now. */
7668 op0
= force_not_mem (op0
);
7669 preserve_temp_slots (op0
);
7670 expand_end_bindings (NULL_TREE
, 0, 0);
7675 /* Check for a built-in function. */
7676 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7677 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7679 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7681 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7682 == BUILT_IN_FRONTEND
)
7683 return (*lang_hooks
.expand_expr
)
7684 (exp
, original_target
, tmode
, modifier
);
7686 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7689 return expand_call (exp
, target
, ignore
);
7691 case NON_LVALUE_EXPR
:
7694 case REFERENCE_EXPR
:
7695 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7698 if (TREE_CODE (type
) == UNION_TYPE
)
7700 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7702 /* If both input and output are BLKmode, this conversion isn't doing
7703 anything except possibly changing memory attribute. */
7704 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7706 rtx result
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7709 result
= copy_rtx (result
);
7710 set_mem_attributes (result
, exp
, 0);
7715 target
= assign_temp (type
, 0, 1, 1);
7717 if (GET_CODE (target
) == MEM
)
7718 /* Store data into beginning of memory target. */
7719 store_expr (TREE_OPERAND (exp
, 0),
7720 adjust_address (target
, TYPE_MODE (valtype
), 0), 0);
7722 else if (GET_CODE (target
) == REG
)
7723 /* Store this field into a union of the proper type. */
7724 store_field (target
,
7725 MIN ((int_size_in_bytes (TREE_TYPE
7726 (TREE_OPERAND (exp
, 0)))
7728 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7729 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7730 VOIDmode
, 0, type
, 0);
7734 /* Return the entire union. */
7738 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7740 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7743 /* If the signedness of the conversion differs and OP0 is
7744 a promoted SUBREG, clear that indication since we now
7745 have to do the proper extension. */
7746 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7747 && GET_CODE (op0
) == SUBREG
)
7748 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7753 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7754 if (GET_MODE (op0
) == mode
)
7757 /* If OP0 is a constant, just convert it into the proper mode. */
7758 if (CONSTANT_P (op0
))
7760 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7761 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
7763 if (modifier
== EXPAND_INITIALIZER
)
7764 return simplify_gen_subreg (mode
, op0
, inner_mode
,
7765 subreg_lowpart_offset (mode
,
7768 return convert_modes (mode
, inner_mode
, op0
,
7769 TREE_UNSIGNED (inner_type
));
7772 if (modifier
== EXPAND_INITIALIZER
)
7773 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7777 convert_to_mode (mode
, op0
,
7778 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7780 convert_move (target
, op0
,
7781 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7784 case VIEW_CONVERT_EXPR
:
7785 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7787 /* If the input and output modes are both the same, we are done.
7788 Otherwise, if neither mode is BLKmode and both are within a word, we
7789 can use gen_lowpart. If neither is true, make sure the operand is
7790 in memory and convert the MEM to the new mode. */
7791 if (TYPE_MODE (type
) == GET_MODE (op0
))
7793 else if (TYPE_MODE (type
) != BLKmode
&& GET_MODE (op0
) != BLKmode
7794 && GET_MODE_SIZE (TYPE_MODE (type
)) <= UNITS_PER_WORD
7795 && GET_MODE_SIZE (GET_MODE (op0
)) <= UNITS_PER_WORD
)
7796 op0
= gen_lowpart (TYPE_MODE (type
), op0
);
7797 else if (GET_CODE (op0
) != MEM
)
7799 /* If the operand is not a MEM, force it into memory. Since we
7800 are going to be be changing the mode of the MEM, don't call
7801 force_const_mem for constants because we don't allow pool
7802 constants to change mode. */
7803 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7805 if (TREE_ADDRESSABLE (exp
))
7808 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
7810 = assign_stack_temp_for_type
7811 (TYPE_MODE (inner_type
),
7812 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
7814 emit_move_insn (target
, op0
);
7818 /* At this point, OP0 is in the correct mode. If the output type is such
7819 that the operand is known to be aligned, indicate that it is.
7820 Otherwise, we need only be concerned about alignment for non-BLKmode
7822 if (GET_CODE (op0
) == MEM
)
7824 op0
= copy_rtx (op0
);
7826 if (TYPE_ALIGN_OK (type
))
7827 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
7828 else if (TYPE_MODE (type
) != BLKmode
&& STRICT_ALIGNMENT
7829 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
7831 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7832 HOST_WIDE_INT temp_size
7833 = MAX (int_size_in_bytes (inner_type
),
7834 (HOST_WIDE_INT
) GET_MODE_SIZE (TYPE_MODE (type
)));
7835 rtx
new = assign_stack_temp_for_type (TYPE_MODE (type
),
7836 temp_size
, 0, type
);
7837 rtx new_with_op0_mode
= adjust_address (new, GET_MODE (op0
), 0);
7839 if (TREE_ADDRESSABLE (exp
))
7842 if (GET_MODE (op0
) == BLKmode
)
7843 emit_block_move (new_with_op0_mode
, op0
,
7844 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type
))),
7847 emit_move_insn (new_with_op0_mode
, op0
);
7852 op0
= adjust_address (op0
, TYPE_MODE (type
), 0);
7858 this_optab
= ! unsignedp
&& flag_trapv
7859 && (GET_MODE_CLASS (mode
) == MODE_INT
)
7860 ? addv_optab
: add_optab
;
7862 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7863 something else, make sure we add the register to the constant and
7864 then to the other thing. This case can occur during strength
7865 reduction and doing it this way will produce better code if the
7866 frame pointer or argument pointer is eliminated.
7868 fold-const.c will ensure that the constant is always in the inner
7869 PLUS_EXPR, so the only case we need to do anything about is if
7870 sp, ap, or fp is our second argument, in which case we must swap
7871 the innermost first argument and our second argument. */
7873 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7874 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7875 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
7876 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7877 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7878 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7880 tree t
= TREE_OPERAND (exp
, 1);
7882 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7883 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
7886 /* If the result is to be ptr_mode and we are adding an integer to
7887 something, we might be forming a constant. So try to use
7888 plus_constant. If it produces a sum and we can't accept it,
7889 use force_operand. This allows P = &ARR[const] to generate
7890 efficient code on machines where a SYMBOL_REF is not a valid
7893 If this is an EXPAND_SUM call, always return the sum. */
7894 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7895 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
7897 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
7898 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7899 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
7903 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
7905 /* Use immed_double_const to ensure that the constant is
7906 truncated according to the mode of OP1, then sign extended
7907 to a HOST_WIDE_INT. Using the constant directly can result
7908 in non-canonical RTL in a 64x32 cross compile. */
7910 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
7912 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
7913 op1
= plus_constant (op1
, INTVAL (constant_part
));
7914 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7915 op1
= force_operand (op1
, target
);
7919 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7920 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
7921 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
7925 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7926 (modifier
== EXPAND_INITIALIZER
7927 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
7928 if (! CONSTANT_P (op0
))
7930 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7931 VOIDmode
, modifier
);
7932 /* Don't go to both_summands if modifier
7933 says it's not right to return a PLUS. */
7934 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7938 /* Use immed_double_const to ensure that the constant is
7939 truncated according to the mode of OP1, then sign extended
7940 to a HOST_WIDE_INT. Using the constant directly can result
7941 in non-canonical RTL in a 64x32 cross compile. */
7943 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
7945 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7946 op0
= plus_constant (op0
, INTVAL (constant_part
));
7947 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7948 op0
= force_operand (op0
, target
);
7953 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7956 /* No sense saving up arithmetic to be done
7957 if it's all in the wrong mode to form part of an address.
7958 And force_operand won't know whether to sign-extend or
7960 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7961 || mode
!= ptr_mode
)
7963 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7964 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7965 if (op0
== const0_rtx
)
7967 if (op1
== const0_rtx
)
7972 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, modifier
);
7973 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, modifier
);
7975 /* We come here from MINUS_EXPR when the second operand is a
7978 /* Make sure any term that's a sum with a constant comes last. */
7979 if (GET_CODE (op0
) == PLUS
7980 && CONSTANT_P (XEXP (op0
, 1)))
7986 /* If adding to a sum including a constant,
7987 associate it to put the constant outside. */
7988 if (GET_CODE (op1
) == PLUS
7989 && CONSTANT_P (XEXP (op1
, 1)))
7991 rtx constant_term
= const0_rtx
;
7993 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
7996 /* Ensure that MULT comes first if there is one. */
7997 else if (GET_CODE (op0
) == MULT
)
7998 op0
= gen_rtx_PLUS (mode
, op0
, XEXP (op1
, 0));
8000 op0
= gen_rtx_PLUS (mode
, XEXP (op1
, 0), op0
);
8002 /* Let's also eliminate constants from op0 if possible. */
8003 op0
= eliminate_constant_term (op0
, &constant_term
);
8005 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8006 their sum should be a constant. Form it into OP1, since the
8007 result we want will then be OP0 + OP1. */
8009 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
8014 op1
= gen_rtx_PLUS (mode
, constant_term
, XEXP (op1
, 1));
8017 /* Put a constant term last and put a multiplication first. */
8018 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
8019 temp
= op1
, op1
= op0
, op0
= temp
;
8021 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
8022 return temp
? temp
: gen_rtx_PLUS (mode
, op0
, op1
);
8025 /* For initializers, we are allowed to return a MINUS of two
8026 symbolic constants. Here we handle all cases when both operands
8028 /* Handle difference of two symbolic constants,
8029 for the sake of an initializer. */
8030 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8031 && really_constant_p (TREE_OPERAND (exp
, 0))
8032 && really_constant_p (TREE_OPERAND (exp
, 1)))
8034 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
,
8036 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
,
8039 /* If the last operand is a CONST_INT, use plus_constant of
8040 the negated constant. Else make the MINUS. */
8041 if (GET_CODE (op1
) == CONST_INT
)
8042 return plus_constant (op0
, - INTVAL (op1
));
8044 return gen_rtx_MINUS (mode
, op0
, op1
);
8047 this_optab
= ! unsignedp
&& flag_trapv
8048 && (GET_MODE_CLASS(mode
) == MODE_INT
)
8049 ? subv_optab
: sub_optab
;
8051 /* No sense saving up arithmetic to be done
8052 if it's all in the wrong mode to form part of an address.
8053 And force_operand won't know whether to sign-extend or
8055 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8056 || mode
!= ptr_mode
)
8059 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8062 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, modifier
);
8063 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, modifier
);
8065 /* Convert A - const to A + (-const). */
8066 if (GET_CODE (op1
) == CONST_INT
)
8068 op1
= negate_rtx (mode
, op1
);
8075 /* If first operand is constant, swap them.
8076 Thus the following special case checks need only
8077 check the second operand. */
8078 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
8080 tree t1
= TREE_OPERAND (exp
, 0);
8081 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
8082 TREE_OPERAND (exp
, 1) = t1
;
8085 /* Attempt to return something suitable for generating an
8086 indexed address, for machines that support that. */
8088 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
8089 && host_integerp (TREE_OPERAND (exp
, 1), 0))
8091 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
8094 /* If we knew for certain that this is arithmetic for an array
8095 reference, and we knew the bounds of the array, then we could
8096 apply the distributive law across (PLUS X C) for constant C.
8097 Without such knowledge, we risk overflowing the computation
8098 when both X and C are large, but X+C isn't. */
8099 /* ??? Could perhaps special-case EXP being unsigned and C being
8100 positive. In that case we are certain that X+C is no smaller
8101 than X and so the transformed expression will overflow iff the
8102 original would have. */
8104 if (GET_CODE (op0
) != REG
)
8105 op0
= force_operand (op0
, NULL_RTX
);
8106 if (GET_CODE (op0
) != REG
)
8107 op0
= copy_to_mode_reg (mode
, op0
);
8110 gen_rtx_MULT (mode
, op0
,
8111 GEN_INT (tree_low_cst (TREE_OPERAND (exp
, 1), 0)));
8114 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8117 /* Check for multiplying things that have been extended
8118 from a narrower type. If this machine supports multiplying
8119 in that narrower type with a result in the desired type,
8120 do it that way, and avoid the explicit type-conversion. */
8121 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
8122 && TREE_CODE (type
) == INTEGER_TYPE
8123 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8124 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
8125 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
8126 && int_fits_type_p (TREE_OPERAND (exp
, 1),
8127 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8128 /* Don't use a widening multiply if a shift will do. */
8129 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
8130 > HOST_BITS_PER_WIDE_INT
)
8131 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
8133 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8134 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
8136 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
8137 /* If both operands are extended, they must either both
8138 be zero-extended or both be sign-extended. */
8139 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
8141 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
8143 enum machine_mode innermode
8144 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
8145 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8146 ? smul_widen_optab
: umul_widen_optab
);
8147 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8148 ? umul_widen_optab
: smul_widen_optab
);
8149 if (mode
== GET_MODE_WIDER_MODE (innermode
))
8151 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
8153 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8154 NULL_RTX
, VOIDmode
, 0);
8155 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
8156 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
8159 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
8160 NULL_RTX
, VOIDmode
, 0);
8163 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
8164 && innermode
== word_mode
)
8167 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8168 NULL_RTX
, VOIDmode
, 0);
8169 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
8170 op1
= convert_modes (innermode
, mode
,
8171 expand_expr (TREE_OPERAND (exp
, 1),
8172 NULL_RTX
, VOIDmode
, 0),
8175 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
8176 NULL_RTX
, VOIDmode
, 0);
8177 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8178 unsignedp
, OPTAB_LIB_WIDEN
);
8179 htem
= expand_mult_highpart_adjust (innermode
,
8180 gen_highpart (innermode
, temp
),
8182 gen_highpart (innermode
, temp
),
8184 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
8189 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8190 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8191 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
8193 case TRUNC_DIV_EXPR
:
8194 case FLOOR_DIV_EXPR
:
8196 case ROUND_DIV_EXPR
:
8197 case EXACT_DIV_EXPR
:
8198 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8200 /* Possible optimization: compute the dividend with EXPAND_SUM
8201 then if the divisor is constant can optimize the case
8202 where some terms of the dividend have coeffs divisible by it. */
8203 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8204 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8205 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
8208 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8209 expensive divide. If not, combine will rebuild the original
8211 if (flag_unsafe_math_optimizations
&& optimize
&& !optimize_size
8212 && TREE_CODE (type
) == REAL_TYPE
8213 && !real_onep (TREE_OPERAND (exp
, 0)))
8214 return expand_expr (build (MULT_EXPR
, type
, TREE_OPERAND (exp
, 0),
8215 build (RDIV_EXPR
, type
,
8216 build_real (type
, dconst1
),
8217 TREE_OPERAND (exp
, 1))),
8218 target
, tmode
, unsignedp
);
8219 this_optab
= sdiv_optab
;
8222 case TRUNC_MOD_EXPR
:
8223 case FLOOR_MOD_EXPR
:
8225 case ROUND_MOD_EXPR
:
8226 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8228 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8229 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8230 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
8232 case FIX_ROUND_EXPR
:
8233 case FIX_FLOOR_EXPR
:
8235 abort (); /* Not used for C. */
8237 case FIX_TRUNC_EXPR
:
8238 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8240 target
= gen_reg_rtx (mode
);
8241 expand_fix (target
, op0
, unsignedp
);
8245 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8247 target
= gen_reg_rtx (mode
);
8248 /* expand_float can't figure out what to do if FROM has VOIDmode.
8249 So give it the correct mode. With -O, cse will optimize this. */
8250 if (GET_MODE (op0
) == VOIDmode
)
8251 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8253 expand_float (target
, op0
,
8254 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8258 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8259 temp
= expand_unop (mode
,
8260 ! unsignedp
&& flag_trapv
8261 && (GET_MODE_CLASS(mode
) == MODE_INT
)
8262 ? negv_optab
: neg_optab
, op0
, target
, 0);
8268 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8270 /* Handle complex values specially. */
8271 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
8272 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
8273 return expand_complex_abs (mode
, op0
, target
, unsignedp
);
8275 /* Unsigned abs is simply the operand. Testing here means we don't
8276 risk generating incorrect code below. */
8277 if (TREE_UNSIGNED (type
))
8280 return expand_abs (mode
, op0
, target
, unsignedp
,
8281 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
8285 target
= original_target
;
8286 if (target
== 0 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1), 1)
8287 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
8288 || GET_MODE (target
) != mode
8289 || (GET_CODE (target
) == REG
8290 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8291 target
= gen_reg_rtx (mode
);
8292 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8293 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8295 /* First try to do it with a special MIN or MAX instruction.
8296 If that does not win, use a conditional jump to select the proper
8298 this_optab
= (TREE_UNSIGNED (type
)
8299 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
8300 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
8302 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8307 /* At this point, a MEM target is no longer useful; we will get better
8310 if (GET_CODE (target
) == MEM
)
8311 target
= gen_reg_rtx (mode
);
8314 emit_move_insn (target
, op0
);
8316 op0
= gen_label_rtx ();
8318 /* If this mode is an integer too wide to compare properly,
8319 compare word by word. Rely on cse to optimize constant cases. */
8320 if (GET_MODE_CLASS (mode
) == MODE_INT
8321 && ! can_compare_p (GE
, mode
, ccp_jump
))
8323 if (code
== MAX_EXPR
)
8324 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8325 target
, op1
, NULL_RTX
, op0
);
8327 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8328 op1
, target
, NULL_RTX
, op0
);
8332 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)));
8333 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
8334 unsignedp
, mode
, NULL_RTX
, NULL_RTX
,
8337 emit_move_insn (target
, op1
);
8342 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8343 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8349 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8350 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
8355 /* ??? Can optimize bitwise operations with one arg constant.
8356 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8357 and (a bitwise1 b) bitwise2 b (etc)
8358 but that is probably not worth while. */
8360 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8361 boolean values when we want in all cases to compute both of them. In
8362 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8363 as actual zero-or-1 values and then bitwise anding. In cases where
8364 there cannot be any side effects, better code would be made by
8365 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8366 how to recognize those cases. */
8368 case TRUTH_AND_EXPR
:
8370 this_optab
= and_optab
;
8375 this_optab
= ior_optab
;
8378 case TRUTH_XOR_EXPR
:
8380 this_optab
= xor_optab
;
8387 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8389 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8390 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
8393 /* Could determine the answer when only additive constants differ. Also,
8394 the addition of one can be handled by changing the condition. */
8401 case UNORDERED_EXPR
:
8408 temp
= do_store_flag (exp
, target
, tmode
!= VOIDmode
? tmode
: mode
, 0);
8412 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8413 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
8415 && GET_CODE (original_target
) == REG
8416 && (GET_MODE (original_target
)
8417 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
8419 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
8422 /* If temp is constant, we can just compute the result. */
8423 if (GET_CODE (temp
) == CONST_INT
)
8425 if (INTVAL (temp
) != 0)
8426 emit_move_insn (target
, const1_rtx
);
8428 emit_move_insn (target
, const0_rtx
);
8433 if (temp
!= original_target
)
8435 enum machine_mode mode1
= GET_MODE (temp
);
8436 if (mode1
== VOIDmode
)
8437 mode1
= tmode
!= VOIDmode
? tmode
: mode
;
8439 temp
= copy_to_mode_reg (mode1
, temp
);
8442 op1
= gen_label_rtx ();
8443 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
8444 GET_MODE (temp
), unsignedp
, op1
);
8445 emit_move_insn (temp
, const1_rtx
);
8450 /* If no set-flag instruction, must generate a conditional
8451 store into a temporary variable. Drop through
8452 and handle this like && and ||. */
8454 case TRUTH_ANDIF_EXPR
:
8455 case TRUTH_ORIF_EXPR
:
8457 && (target
== 0 || ! safe_from_p (target
, exp
, 1)
8458 /* Make sure we don't have a hard reg (such as function's return
8459 value) live across basic blocks, if not optimizing. */
8460 || (!optimize
&& GET_CODE (target
) == REG
8461 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8462 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8465 emit_clr_insn (target
);
8467 op1
= gen_label_rtx ();
8468 jumpifnot (exp
, op1
);
8471 emit_0_to_1_insn (target
);
8474 return ignore
? const0_rtx
: target
;
8476 case TRUTH_NOT_EXPR
:
8477 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8478 /* The parser is careful to generate TRUTH_NOT_EXPR
8479 only with operands that are always zero or one. */
8480 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8481 target
, 1, OPTAB_LIB_WIDEN
);
8487 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
8489 return expand_expr (TREE_OPERAND (exp
, 1),
8490 (ignore
? const0_rtx
: target
),
8494 /* If we would have a "singleton" (see below) were it not for a
8495 conversion in each arm, bring that conversion back out. */
8496 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8497 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
8498 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
8499 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
8501 tree iftrue
= TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
8502 tree iffalse
= TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
8504 if ((TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '2'
8505 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8506 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '2'
8507 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0))
8508 || (TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '1'
8509 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8510 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '1'
8511 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0)))
8512 return expand_expr (build1 (NOP_EXPR
, type
,
8513 build (COND_EXPR
, TREE_TYPE (iftrue
),
8514 TREE_OPERAND (exp
, 0),
8516 target
, tmode
, modifier
);
8520 /* Note that COND_EXPRs whose type is a structure or union
8521 are required to be constructed to contain assignments of
8522 a temporary variable, so that we can evaluate them here
8523 for side effect only. If type is void, we must do likewise. */
8525 /* If an arm of the branch requires a cleanup,
8526 only that cleanup is performed. */
8529 tree binary_op
= 0, unary_op
= 0;
8531 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8532 convert it to our mode, if necessary. */
8533 if (integer_onep (TREE_OPERAND (exp
, 1))
8534 && integer_zerop (TREE_OPERAND (exp
, 2))
8535 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8539 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
8544 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, modifier
);
8545 if (GET_MODE (op0
) == mode
)
8549 target
= gen_reg_rtx (mode
);
8550 convert_move (target
, op0
, unsignedp
);
8554 /* Check for X ? A + B : A. If we have this, we can copy A to the
8555 output and conditionally add B. Similarly for unary operations.
8556 Don't do this if X has side-effects because those side effects
8557 might affect A or B and the "?" operation is a sequence point in
8558 ANSI. (operand_equal_p tests for side effects.) */
8560 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
8561 && operand_equal_p (TREE_OPERAND (exp
, 2),
8562 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8563 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
8564 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
8565 && operand_equal_p (TREE_OPERAND (exp
, 1),
8566 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8567 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
8568 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
8569 && operand_equal_p (TREE_OPERAND (exp
, 2),
8570 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8571 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
8572 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
8573 && operand_equal_p (TREE_OPERAND (exp
, 1),
8574 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8575 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
8577 /* If we are not to produce a result, we have no target. Otherwise,
8578 if a target was specified use it; it will not be used as an
8579 intermediate target unless it is safe. If no target, use a
8584 else if (original_target
8585 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
8586 || (singleton
&& GET_CODE (original_target
) == REG
8587 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
8588 && original_target
== var_rtx (singleton
)))
8589 && GET_MODE (original_target
) == mode
8590 #ifdef HAVE_conditional_move
8591 && (! can_conditionally_move_p (mode
)
8592 || GET_CODE (original_target
) == REG
8593 || TREE_ADDRESSABLE (type
))
8595 && (GET_CODE (original_target
) != MEM
8596 || TREE_ADDRESSABLE (type
)))
8597 temp
= original_target
;
8598 else if (TREE_ADDRESSABLE (type
))
8601 temp
= assign_temp (type
, 0, 0, 1);
8603 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8604 do the test of X as a store-flag operation, do this as
8605 A + ((X != 0) << log C). Similarly for other simple binary
8606 operators. Only do for C == 1 if BRANCH_COST is low. */
8607 if (temp
&& singleton
&& binary_op
8608 && (TREE_CODE (binary_op
) == PLUS_EXPR
8609 || TREE_CODE (binary_op
) == MINUS_EXPR
8610 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
8611 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
8612 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
8613 : integer_onep (TREE_OPERAND (binary_op
, 1)))
8614 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8618 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
8619 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8620 ? addv_optab
: add_optab
)
8621 : TREE_CODE (binary_op
) == MINUS_EXPR
8622 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8623 ? subv_optab
: sub_optab
)
8624 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
8627 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8628 if (singleton
== TREE_OPERAND (exp
, 1))
8629 cond
= invert_truthvalue (TREE_OPERAND (exp
, 0));
8631 cond
= TREE_OPERAND (exp
, 0);
8633 result
= do_store_flag (cond
, (safe_from_p (temp
, singleton
, 1)
8635 mode
, BRANCH_COST
<= 1);
8637 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
8638 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
8639 build_int_2 (tree_log2
8643 (safe_from_p (temp
, singleton
, 1)
8644 ? temp
: NULL_RTX
), 0);
8648 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
8649 return expand_binop (mode
, boptab
, op1
, result
, temp
,
8650 unsignedp
, OPTAB_LIB_WIDEN
);
8654 do_pending_stack_adjust ();
8656 op0
= gen_label_rtx ();
8658 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
8662 /* If the target conflicts with the other operand of the
8663 binary op, we can't use it. Also, we can't use the target
8664 if it is a hard register, because evaluating the condition
8665 might clobber it. */
8667 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
8668 || (GET_CODE (temp
) == REG
8669 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
8670 temp
= gen_reg_rtx (mode
);
8671 store_expr (singleton
, temp
, 0);
8674 expand_expr (singleton
,
8675 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8676 if (singleton
== TREE_OPERAND (exp
, 1))
8677 jumpif (TREE_OPERAND (exp
, 0), op0
);
8679 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8681 start_cleanup_deferral ();
8682 if (binary_op
&& temp
== 0)
8683 /* Just touch the other operand. */
8684 expand_expr (TREE_OPERAND (binary_op
, 1),
8685 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8687 store_expr (build (TREE_CODE (binary_op
), type
,
8688 make_tree (type
, temp
),
8689 TREE_OPERAND (binary_op
, 1)),
8692 store_expr (build1 (TREE_CODE (unary_op
), type
,
8693 make_tree (type
, temp
)),
8697 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8698 comparison operator. If we have one of these cases, set the
8699 output to A, branch on A (cse will merge these two references),
8700 then set the output to FOO. */
8702 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8703 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8704 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8705 TREE_OPERAND (exp
, 1), 0)
8706 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8707 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
8708 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
8710 if (GET_CODE (temp
) == REG
8711 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8712 temp
= gen_reg_rtx (mode
);
8713 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8714 jumpif (TREE_OPERAND (exp
, 0), op0
);
8716 start_cleanup_deferral ();
8717 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8721 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8722 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8723 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8724 TREE_OPERAND (exp
, 2), 0)
8725 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8726 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
8727 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
8729 if (GET_CODE (temp
) == REG
8730 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8731 temp
= gen_reg_rtx (mode
);
8732 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8733 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8735 start_cleanup_deferral ();
8736 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8741 op1
= gen_label_rtx ();
8742 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8744 start_cleanup_deferral ();
8746 /* One branch of the cond can be void, if it never returns. For
8747 example A ? throw : E */
8749 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8750 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8752 expand_expr (TREE_OPERAND (exp
, 1),
8753 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8754 end_cleanup_deferral ();
8756 emit_jump_insn (gen_jump (op1
));
8759 start_cleanup_deferral ();
8761 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8762 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8764 expand_expr (TREE_OPERAND (exp
, 2),
8765 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8768 end_cleanup_deferral ();
8779 /* Something needs to be initialized, but we didn't know
8780 where that thing was when building the tree. For example,
8781 it could be the return value of a function, or a parameter
8782 to a function which lays down in the stack, or a temporary
8783 variable which must be passed by reference.
8785 We guarantee that the expression will either be constructed
8786 or copied into our original target. */
8788 tree slot
= TREE_OPERAND (exp
, 0);
8789 tree cleanups
= NULL_TREE
;
8792 if (TREE_CODE (slot
) != VAR_DECL
)
8796 target
= original_target
;
8798 /* Set this here so that if we get a target that refers to a
8799 register variable that's already been used, put_reg_into_stack
8800 knows that it should fix up those uses. */
8801 TREE_USED (slot
) = 1;
8805 if (DECL_RTL_SET_P (slot
))
8807 target
= DECL_RTL (slot
);
8808 /* If we have already expanded the slot, so don't do
8810 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8815 target
= assign_temp (type
, 2, 0, 1);
8816 /* All temp slots at this level must not conflict. */
8817 preserve_temp_slots (target
);
8818 SET_DECL_RTL (slot
, target
);
8819 if (TREE_ADDRESSABLE (slot
))
8820 put_var_into_stack (slot
);
8822 /* Since SLOT is not known to the called function
8823 to belong to its stack frame, we must build an explicit
8824 cleanup. This case occurs when we must build up a reference
8825 to pass the reference as an argument. In this case,
8826 it is very likely that such a reference need not be
8829 if (TREE_OPERAND (exp
, 2) == 0)
8830 TREE_OPERAND (exp
, 2)
8831 = (*lang_hooks
.maybe_build_cleanup
) (slot
);
8832 cleanups
= TREE_OPERAND (exp
, 2);
8837 /* This case does occur, when expanding a parameter which
8838 needs to be constructed on the stack. The target
8839 is the actual stack address that we want to initialize.
8840 The function we call will perform the cleanup in this case. */
8842 /* If we have already assigned it space, use that space,
8843 not target that we were passed in, as our target
8844 parameter is only a hint. */
8845 if (DECL_RTL_SET_P (slot
))
8847 target
= DECL_RTL (slot
);
8848 /* If we have already expanded the slot, so don't do
8850 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8855 SET_DECL_RTL (slot
, target
);
8856 /* If we must have an addressable slot, then make sure that
8857 the RTL that we just stored in slot is OK. */
8858 if (TREE_ADDRESSABLE (slot
))
8859 put_var_into_stack (slot
);
8863 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
8864 /* Mark it as expanded. */
8865 TREE_OPERAND (exp
, 1) = NULL_TREE
;
8867 store_expr (exp1
, target
, 0);
8869 expand_decl_cleanup_eh (NULL_TREE
, cleanups
, CLEANUP_EH_ONLY (exp
));
8876 tree lhs
= TREE_OPERAND (exp
, 0);
8877 tree rhs
= TREE_OPERAND (exp
, 1);
8879 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8885 /* If lhs is complex, expand calls in rhs before computing it.
8886 That's so we don't compute a pointer and save it over a
8887 call. If lhs is simple, compute it first so we can give it
8888 as a target if the rhs is just a call. This avoids an
8889 extra temp and copy and that prevents a partial-subsumption
8890 which makes bad code. Actually we could treat
8891 component_ref's of vars like vars. */
8893 tree lhs
= TREE_OPERAND (exp
, 0);
8894 tree rhs
= TREE_OPERAND (exp
, 1);
8898 /* Check for |= or &= of a bitfield of size one into another bitfield
8899 of size 1. In this case, (unless we need the result of the
8900 assignment) we can do this more efficiently with a
8901 test followed by an assignment, if necessary.
8903 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8904 things change so we do, this code should be enhanced to
8907 && TREE_CODE (lhs
) == COMPONENT_REF
8908 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
8909 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
8910 && TREE_OPERAND (rhs
, 0) == lhs
8911 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
8912 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
8913 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
8915 rtx label
= gen_label_rtx ();
8917 do_jump (TREE_OPERAND (rhs
, 1),
8918 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
8919 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
8920 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
8921 (TREE_CODE (rhs
) == BIT_IOR_EXPR
8923 : integer_zero_node
)),
8925 do_pending_stack_adjust ();
8930 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8936 if (!TREE_OPERAND (exp
, 0))
8937 expand_null_return ();
8939 expand_return (TREE_OPERAND (exp
, 0));
8942 case PREINCREMENT_EXPR
:
8943 case PREDECREMENT_EXPR
:
8944 return expand_increment (exp
, 0, ignore
);
8946 case POSTINCREMENT_EXPR
:
8947 case POSTDECREMENT_EXPR
:
8948 /* Faster to treat as pre-increment if result is not used. */
8949 return expand_increment (exp
, ! ignore
, ignore
);
8952 /* Are we taking the address of a nested function? */
8953 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
8954 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
8955 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0))
8956 && ! TREE_STATIC (exp
))
8958 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
8959 op0
= force_operand (op0
, target
);
8961 /* If we are taking the address of something erroneous, just
8963 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
8965 /* If we are taking the address of a constant and are at the
8966 top level, we have to use output_constant_def since we can't
8967 call force_const_mem at top level. */
8969 && (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
8970 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0)))
8972 op0
= XEXP (output_constant_def (TREE_OPERAND (exp
, 0), 0), 0);
8975 /* We make sure to pass const0_rtx down if we came in with
8976 ignore set, to avoid doing the cleanups twice for something. */
8977 op0
= expand_expr (TREE_OPERAND (exp
, 0),
8978 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
8979 (modifier
== EXPAND_INITIALIZER
8980 ? modifier
: EXPAND_CONST_ADDRESS
));
8982 /* If we are going to ignore the result, OP0 will have been set
8983 to const0_rtx, so just return it. Don't get confused and
8984 think we are taking the address of the constant. */
8988 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8989 clever and returns a REG when given a MEM. */
8990 op0
= protect_from_queue (op0
, 1);
8992 /* We would like the object in memory. If it is a constant, we can
8993 have it be statically allocated into memory. For a non-constant,
8994 we need to allocate some memory and store the value into it. */
8996 if (CONSTANT_P (op0
))
8997 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8999 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
9000 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
9001 || GET_CODE (op0
) == PARALLEL
)
9003 /* If the operand is a SAVE_EXPR, we can deal with this by
9004 forcing the SAVE_EXPR into memory. */
9005 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
9007 put_var_into_stack (TREE_OPERAND (exp
, 0));
9008 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
9012 /* If this object is in a register, it can't be BLKmode. */
9013 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9014 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
9016 if (GET_CODE (op0
) == PARALLEL
)
9017 /* Handle calls that pass values in multiple
9018 non-contiguous locations. The Irix 6 ABI has examples
9020 emit_group_store (memloc
, op0
,
9021 int_size_in_bytes (inner_type
));
9023 emit_move_insn (memloc
, op0
);
9029 if (GET_CODE (op0
) != MEM
)
9032 mark_temp_addr_taken (op0
);
9033 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
9035 op0
= XEXP (op0
, 0);
9036 #ifdef POINTERS_EXTEND_UNSIGNED
9037 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
9038 && mode
== ptr_mode
)
9039 op0
= convert_memory_address (ptr_mode
, op0
);
9044 /* If OP0 is not aligned as least as much as the type requires, we
9045 need to make a temporary, copy OP0 to it, and take the address of
9046 the temporary. We want to use the alignment of the type, not of
9047 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9048 the test for BLKmode means that can't happen. The test for
9049 BLKmode is because we never make mis-aligned MEMs with
9052 We don't need to do this at all if the machine doesn't have
9053 strict alignment. */
9054 if (STRICT_ALIGNMENT
&& GET_MODE (op0
) == BLKmode
9055 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
9057 && MEM_ALIGN (op0
) < BIGGEST_ALIGNMENT
)
9059 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9061 = assign_stack_temp_for_type
9062 (TYPE_MODE (inner_type
),
9063 MEM_SIZE (op0
) ? INTVAL (MEM_SIZE (op0
))
9064 : int_size_in_bytes (inner_type
),
9065 1, build_qualified_type (inner_type
,
9066 (TYPE_QUALS (inner_type
)
9067 | TYPE_QUAL_CONST
)));
9069 if (TYPE_ALIGN_OK (inner_type
))
9072 emit_block_move (new, op0
, expr_size (TREE_OPERAND (exp
, 0)),
9077 op0
= force_operand (XEXP (op0
, 0), target
);
9081 && GET_CODE (op0
) != REG
9082 && modifier
!= EXPAND_CONST_ADDRESS
9083 && modifier
!= EXPAND_INITIALIZER
9084 && modifier
!= EXPAND_SUM
)
9085 op0
= force_reg (Pmode
, op0
);
9087 if (GET_CODE (op0
) == REG
9088 && ! REG_USERVAR_P (op0
))
9089 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)));
9091 #ifdef POINTERS_EXTEND_UNSIGNED
9092 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
9093 && mode
== ptr_mode
)
9094 op0
= convert_memory_address (ptr_mode
, op0
);
9099 case ENTRY_VALUE_EXPR
:
9102 /* COMPLEX type for Extended Pascal & Fortran */
9105 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
9108 /* Get the rtx code of the operands. */
9109 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9110 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
9113 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
9117 /* Move the real (op0) and imaginary (op1) parts to their location. */
9118 emit_move_insn (gen_realpart (mode
, target
), op0
);
9119 emit_move_insn (gen_imagpart (mode
, target
), op1
);
9121 insns
= get_insns ();
9124 /* Complex construction should appear as a single unit. */
9125 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9126 each with a separate pseudo as destination.
9127 It's not correct for flow to treat them as a unit. */
9128 if (GET_CODE (target
) != CONCAT
)
9129 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
9137 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9138 return gen_realpart (mode
, op0
);
9141 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9142 return gen_imagpart (mode
, op0
);
9146 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
9150 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9153 target
= gen_reg_rtx (mode
);
9157 /* Store the realpart and the negated imagpart to target. */
9158 emit_move_insn (gen_realpart (partmode
, target
),
9159 gen_realpart (partmode
, op0
));
9161 imag_t
= gen_imagpart (partmode
, target
);
9162 temp
= expand_unop (partmode
,
9163 ! unsignedp
&& flag_trapv
9164 && (GET_MODE_CLASS(partmode
) == MODE_INT
)
9165 ? negv_optab
: neg_optab
,
9166 gen_imagpart (partmode
, op0
), imag_t
, 0);
9168 emit_move_insn (imag_t
, temp
);
9170 insns
= get_insns ();
9173 /* Conjugate should appear as a single unit
9174 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9175 each with a separate pseudo as destination.
9176 It's not correct for flow to treat them as a unit. */
9177 if (GET_CODE (target
) != CONCAT
)
9178 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
9185 case TRY_CATCH_EXPR
:
9187 tree handler
= TREE_OPERAND (exp
, 1);
9189 expand_eh_region_start ();
9191 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9193 expand_eh_region_end_cleanup (handler
);
9198 case TRY_FINALLY_EXPR
:
9200 tree try_block
= TREE_OPERAND (exp
, 0);
9201 tree finally_block
= TREE_OPERAND (exp
, 1);
9203 if (!optimize
|| unsafe_for_reeval (finally_block
) > 1)
9205 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9206 is not sufficient, so we cannot expand the block twice.
9207 So we play games with GOTO_SUBROUTINE_EXPR to let us
9208 expand the thing only once. */
9209 /* When not optimizing, we go ahead with this form since
9210 (1) user breakpoints operate more predictably without
9211 code duplication, and
9212 (2) we're not running any of the global optimizers
9213 that would explode in time/space with the highly
9214 connected CFG created by the indirect branching. */
9216 rtx finally_label
= gen_label_rtx ();
9217 rtx done_label
= gen_label_rtx ();
9218 rtx return_link
= gen_reg_rtx (Pmode
);
9219 tree cleanup
= build (GOTO_SUBROUTINE_EXPR
, void_type_node
,
9220 (tree
) finally_label
, (tree
) return_link
);
9221 TREE_SIDE_EFFECTS (cleanup
) = 1;
9223 /* Start a new binding layer that will keep track of all cleanup
9224 actions to be performed. */
9225 expand_start_bindings (2);
9226 target_temp_slot_level
= temp_slot_level
;
9228 expand_decl_cleanup (NULL_TREE
, cleanup
);
9229 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
9231 preserve_temp_slots (op0
);
9232 expand_end_bindings (NULL_TREE
, 0, 0);
9233 emit_jump (done_label
);
9234 emit_label (finally_label
);
9235 expand_expr (finally_block
, const0_rtx
, VOIDmode
, 0);
9236 emit_indirect_jump (return_link
);
9237 emit_label (done_label
);
9241 expand_start_bindings (2);
9242 target_temp_slot_level
= temp_slot_level
;
9244 expand_decl_cleanup (NULL_TREE
, finally_block
);
9245 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
9247 preserve_temp_slots (op0
);
9248 expand_end_bindings (NULL_TREE
, 0, 0);
9254 case GOTO_SUBROUTINE_EXPR
:
9256 rtx subr
= (rtx
) TREE_OPERAND (exp
, 0);
9257 rtx return_link
= *(rtx
*) &TREE_OPERAND (exp
, 1);
9258 rtx return_address
= gen_label_rtx ();
9259 emit_move_insn (return_link
,
9260 gen_rtx_LABEL_REF (Pmode
, return_address
));
9262 emit_label (return_address
);
9267 return expand_builtin_va_arg (TREE_OPERAND (exp
, 0), type
);
9270 return get_exception_pointer (cfun
);
9273 /* Function descriptors are not valid except for as
9274 initialization constants, and should not be expanded. */
9278 return (*lang_hooks
.expand_expr
) (exp
, original_target
, tmode
, modifier
);
9281 /* Here to do an ordinary binary operator, generating an instruction
9282 from the optab already placed in `this_optab'. */
9284 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
9286 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
9287 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9289 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9290 unsignedp
, OPTAB_LIB_WIDEN
);
9296 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9297 when applied to the address of EXP produces an address known to be
9298 aligned more than BIGGEST_ALIGNMENT. */
9301 is_aligning_offset (offset
, exp
)
9305 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9306 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9307 || TREE_CODE (offset
) == NOP_EXPR
9308 || TREE_CODE (offset
) == CONVERT_EXPR
9309 || TREE_CODE (offset
) == WITH_RECORD_EXPR
)
9310 offset
= TREE_OPERAND (offset
, 0);
9312 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9313 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9314 if (TREE_CODE (offset
) != BIT_AND_EXPR
9315 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
9316 || compare_tree_int (TREE_OPERAND (offset
, 1), BIGGEST_ALIGNMENT
) <= 0
9317 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
9320 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9321 It must be NEGATE_EXPR. Then strip any more conversions. */
9322 offset
= TREE_OPERAND (offset
, 0);
9323 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9324 || TREE_CODE (offset
) == NOP_EXPR
9325 || TREE_CODE (offset
) == CONVERT_EXPR
)
9326 offset
= TREE_OPERAND (offset
, 0);
9328 if (TREE_CODE (offset
) != NEGATE_EXPR
)
9331 offset
= TREE_OPERAND (offset
, 0);
9332 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9333 || TREE_CODE (offset
) == NOP_EXPR
9334 || TREE_CODE (offset
) == CONVERT_EXPR
)
9335 offset
= TREE_OPERAND (offset
, 0);
9337 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9338 whose type is the same as EXP. */
9339 return (TREE_CODE (offset
) == ADDR_EXPR
9340 && (TREE_OPERAND (offset
, 0) == exp
9341 || (TREE_CODE (TREE_OPERAND (offset
, 0)) == PLACEHOLDER_EXPR
9342 && (TREE_TYPE (TREE_OPERAND (offset
, 0))
9343 == TREE_TYPE (exp
)))));
9346 /* Return the tree node if an ARG corresponds to a string constant or zero
9347 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9348 in bytes within the string that ARG is accessing. The type of the
9349 offset will be `sizetype'. */
9352 string_constant (arg
, ptr_offset
)
9358 if (TREE_CODE (arg
) == ADDR_EXPR
9359 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
9361 *ptr_offset
= size_zero_node
;
9362 return TREE_OPERAND (arg
, 0);
9364 else if (TREE_CODE (arg
) == PLUS_EXPR
)
9366 tree arg0
= TREE_OPERAND (arg
, 0);
9367 tree arg1
= TREE_OPERAND (arg
, 1);
9372 if (TREE_CODE (arg0
) == ADDR_EXPR
9373 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
9375 *ptr_offset
= convert (sizetype
, arg1
);
9376 return TREE_OPERAND (arg0
, 0);
9378 else if (TREE_CODE (arg1
) == ADDR_EXPR
9379 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
9381 *ptr_offset
= convert (sizetype
, arg0
);
9382 return TREE_OPERAND (arg1
, 0);
9389 /* Expand code for a post- or pre- increment or decrement
9390 and return the RTX for the result.
9391 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9394 expand_increment (exp
, post
, ignore
)
9400 tree incremented
= TREE_OPERAND (exp
, 0);
9401 optab this_optab
= add_optab
;
9403 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
9404 int op0_is_copy
= 0;
9405 int single_insn
= 0;
9406 /* 1 means we can't store into OP0 directly,
9407 because it is a subreg narrower than a word,
9408 and we don't dare clobber the rest of the word. */
9411 /* Stabilize any component ref that might need to be
9412 evaluated more than once below. */
9414 || TREE_CODE (incremented
) == BIT_FIELD_REF
9415 || (TREE_CODE (incremented
) == COMPONENT_REF
9416 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
9417 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
9418 incremented
= stabilize_reference (incremented
);
9419 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9420 ones into save exprs so that they don't accidentally get evaluated
9421 more than once by the code below. */
9422 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
9423 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
9424 incremented
= save_expr (incremented
);
9426 /* Compute the operands as RTX.
9427 Note whether OP0 is the actual lvalue or a copy of it:
9428 I believe it is a copy iff it is a register or subreg
9429 and insns were generated in computing it. */
9431 temp
= get_last_insn ();
9432 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, 0);
9434 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9435 in place but instead must do sign- or zero-extension during assignment,
9436 so we copy it into a new register and let the code below use it as
9439 Note that we can safely modify this SUBREG since it is know not to be
9440 shared (it was made by the expand_expr call above). */
9442 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9445 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9449 else if (GET_CODE (op0
) == SUBREG
9450 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9452 /* We cannot increment this SUBREG in place. If we are
9453 post-incrementing, get a copy of the old value. Otherwise,
9454 just mark that we cannot increment in place. */
9456 op0
= copy_to_reg (op0
);
9461 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
9462 && temp
!= get_last_insn ());
9463 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9465 /* Decide whether incrementing or decrementing. */
9466 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9467 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9468 this_optab
= sub_optab
;
9470 /* Convert decrement by a constant into a negative increment. */
9471 if (this_optab
== sub_optab
9472 && GET_CODE (op1
) == CONST_INT
)
9474 op1
= GEN_INT (-INTVAL (op1
));
9475 this_optab
= add_optab
;
9478 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp
)))
9479 this_optab
= this_optab
== add_optab
? addv_optab
: subv_optab
;
9481 /* For a preincrement, see if we can do this with a single instruction. */
9484 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9485 if (icode
!= (int) CODE_FOR_nothing
9486 /* Make sure that OP0 is valid for operands 0 and 1
9487 of the insn we want to queue. */
9488 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9489 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
)
9490 && (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9494 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9495 then we cannot just increment OP0. We must therefore contrive to
9496 increment the original value. Then, for postincrement, we can return
9497 OP0 since it is a copy of the old value. For preincrement, expand here
9498 unless we can do it with a single insn.
9500 Likewise if storing directly into OP0 would clobber high bits
9501 we need to preserve (bad_subreg). */
9502 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9504 /* This is the easiest way to increment the value wherever it is.
9505 Problems with multiple evaluation of INCREMENTED are prevented
9506 because either (1) it is a component_ref or preincrement,
9507 in which case it was stabilized above, or (2) it is an array_ref
9508 with constant index in an array in a register, which is
9509 safe to reevaluate. */
9510 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9511 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9512 ? MINUS_EXPR
: PLUS_EXPR
),
9515 TREE_OPERAND (exp
, 1));
9517 while (TREE_CODE (incremented
) == NOP_EXPR
9518 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9520 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9521 incremented
= TREE_OPERAND (incremented
, 0);
9524 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
, 0);
9525 return post
? op0
: temp
;
9530 /* We have a true reference to the value in OP0.
9531 If there is an insn to add or subtract in this mode, queue it.
9532 Queueing the increment insn avoids the register shuffling
9533 that often results if we must increment now and first save
9534 the old value for subsequent use. */
9536 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9537 op0
= stabilize (op0
);
9540 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9541 if (icode
!= (int) CODE_FOR_nothing
9542 /* Make sure that OP0 is valid for operands 0 and 1
9543 of the insn we want to queue. */
9544 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9545 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
))
9547 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9548 op1
= force_reg (mode
, op1
);
9550 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9552 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9554 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
9555 ? force_reg (Pmode
, XEXP (op0
, 0))
9556 : copy_to_reg (XEXP (op0
, 0)));
9559 op0
= replace_equiv_address (op0
, addr
);
9560 temp
= force_reg (GET_MODE (op0
), op0
);
9561 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9562 op1
= force_reg (mode
, op1
);
9564 /* The increment queue is LIFO, thus we have to `queue'
9565 the instructions in reverse order. */
9566 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9567 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9572 /* Preincrement, or we can't increment with one simple insn. */
9574 /* Save a copy of the value before inc or dec, to return it later. */
9575 temp
= value
= copy_to_reg (op0
);
9577 /* Arrange to return the incremented value. */
9578 /* Copy the rtx because expand_binop will protect from the queue,
9579 and the results of that would be invalid for us to return
9580 if our caller does emit_queue before using our result. */
9581 temp
= copy_rtx (value
= op0
);
9583 /* Increment however we can. */
9584 op1
= expand_binop (mode
, this_optab
, value
, op1
, op0
,
9585 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9587 /* Make sure the value is stored into OP0. */
9589 emit_move_insn (op0
, op1
);
9594 /* At the start of a function, record that we have no previously-pushed
9595 arguments waiting to be popped. */
9598 init_pending_stack_adjust ()
9600 pending_stack_adjust
= 0;
9603 /* When exiting from function, if safe, clear out any pending stack adjust
9604 so the adjustment won't get done.
9606 Note, if the current function calls alloca, then it must have a
9607 frame pointer regardless of the value of flag_omit_frame_pointer. */
9610 clear_pending_stack_adjust ()
9612 #ifdef EXIT_IGNORE_STACK
9614 && (! flag_omit_frame_pointer
|| current_function_calls_alloca
)
9615 && EXIT_IGNORE_STACK
9616 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
9617 && ! flag_inline_functions
)
9619 stack_pointer_delta
-= pending_stack_adjust
,
9620 pending_stack_adjust
= 0;
9625 /* Pop any previously-pushed arguments that have not been popped yet. */
9628 do_pending_stack_adjust ()
9630 if (inhibit_defer_pop
== 0)
9632 if (pending_stack_adjust
!= 0)
9633 adjust_stack (GEN_INT (pending_stack_adjust
));
9634 pending_stack_adjust
= 0;
9638 /* Expand conditional expressions. */
9640 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9641 LABEL is an rtx of code CODE_LABEL, in this function and all the
9645 jumpifnot (exp
, label
)
9649 do_jump (exp
, label
, NULL_RTX
);
9652 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9659 do_jump (exp
, NULL_RTX
, label
);
9662 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9663 the result is zero, or IF_TRUE_LABEL if the result is one.
9664 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9665 meaning fall through in that case.
9667 do_jump always does any pending stack adjust except when it does not
9668 actually perform a jump. An example where there is no jump
9669 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9671 This function is responsible for optimizing cases such as
9672 &&, || and comparison operators in EXP. */
9675 do_jump (exp
, if_false_label
, if_true_label
)
9677 rtx if_false_label
, if_true_label
;
9679 enum tree_code code
= TREE_CODE (exp
);
9680 /* Some cases need to create a label to jump to
9681 in order to properly fall through.
9682 These cases set DROP_THROUGH_LABEL nonzero. */
9683 rtx drop_through_label
= 0;
9687 enum machine_mode mode
;
9689 #ifdef MAX_INTEGER_COMPUTATION_MODE
9690 check_max_integer_computation_mode (exp
);
9701 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
9707 /* This is not true with #pragma weak */
9709 /* The address of something can never be zero. */
9711 emit_jump (if_true_label
);
9716 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
9717 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
9718 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
9719 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_RANGE_REF
)
9722 /* If we are narrowing the operand, we have to do the compare in the
9724 if ((TYPE_PRECISION (TREE_TYPE (exp
))
9725 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9727 case NON_LVALUE_EXPR
:
9728 case REFERENCE_EXPR
:
9733 /* These cannot change zero->nonzero or vice versa. */
9734 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9737 case WITH_RECORD_EXPR
:
9738 /* Put the object on the placeholder list, recurse through our first
9739 operand, and pop the list. */
9740 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
9742 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9743 placeholder_list
= TREE_CHAIN (placeholder_list
);
9747 /* This is never less insns than evaluating the PLUS_EXPR followed by
9748 a test and can be longer if the test is eliminated. */
9750 /* Reduce to minus. */
9751 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
9752 TREE_OPERAND (exp
, 0),
9753 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
9754 TREE_OPERAND (exp
, 1))));
9755 /* Process as MINUS. */
9759 /* Nonzero iff operands of minus differ. */
9760 do_compare_and_jump (build (NE_EXPR
, TREE_TYPE (exp
),
9761 TREE_OPERAND (exp
, 0),
9762 TREE_OPERAND (exp
, 1)),
9763 NE
, NE
, if_false_label
, if_true_label
);
9767 /* If we are AND'ing with a small constant, do this comparison in the
9768 smallest type that fits. If the machine doesn't have comparisons
9769 that small, it will be converted back to the wider comparison.
9770 This helps if we are testing the sign bit of a narrower object.
9771 combine can't do this for us because it can't know whether a
9772 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9774 if (! SLOW_BYTE_ACCESS
9775 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
9776 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
9777 && (i
= tree_floor_log2 (TREE_OPERAND (exp
, 1))) >= 0
9778 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
9779 && (type
= (*lang_hooks
.types
.type_for_mode
) (mode
, 1)) != 0
9780 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9781 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9782 != CODE_FOR_nothing
))
9784 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9789 case TRUTH_NOT_EXPR
:
9790 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9793 case TRUTH_ANDIF_EXPR
:
9794 if (if_false_label
== 0)
9795 if_false_label
= drop_through_label
= gen_label_rtx ();
9796 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
9797 start_cleanup_deferral ();
9798 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9799 end_cleanup_deferral ();
9802 case TRUTH_ORIF_EXPR
:
9803 if (if_true_label
== 0)
9804 if_true_label
= drop_through_label
= gen_label_rtx ();
9805 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
9806 start_cleanup_deferral ();
9807 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9808 end_cleanup_deferral ();
9813 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
9814 preserve_temp_slots (NULL_RTX
);
9818 do_pending_stack_adjust ();
9819 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9825 case ARRAY_RANGE_REF
:
9827 HOST_WIDE_INT bitsize
, bitpos
;
9829 enum machine_mode mode
;
9834 /* Get description of this reference. We don't actually care
9835 about the underlying object here. */
9836 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
9837 &unsignedp
, &volatilep
);
9839 type
= (*lang_hooks
.types
.type_for_size
) (bitsize
, unsignedp
);
9840 if (! SLOW_BYTE_ACCESS
9841 && type
!= 0 && bitsize
>= 0
9842 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9843 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9844 != CODE_FOR_nothing
))
9846 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9853 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9854 if (integer_onep (TREE_OPERAND (exp
, 1))
9855 && integer_zerop (TREE_OPERAND (exp
, 2)))
9856 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9858 else if (integer_zerop (TREE_OPERAND (exp
, 1))
9859 && integer_onep (TREE_OPERAND (exp
, 2)))
9860 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9864 rtx label1
= gen_label_rtx ();
9865 drop_through_label
= gen_label_rtx ();
9867 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
9869 start_cleanup_deferral ();
9870 /* Now the THEN-expression. */
9871 do_jump (TREE_OPERAND (exp
, 1),
9872 if_false_label
? if_false_label
: drop_through_label
,
9873 if_true_label
? if_true_label
: drop_through_label
);
9874 /* In case the do_jump just above never jumps. */
9875 do_pending_stack_adjust ();
9876 emit_label (label1
);
9878 /* Now the ELSE-expression. */
9879 do_jump (TREE_OPERAND (exp
, 2),
9880 if_false_label
? if_false_label
: drop_through_label
,
9881 if_true_label
? if_true_label
: drop_through_label
);
9882 end_cleanup_deferral ();
9888 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9890 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9891 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9893 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
9894 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
9897 (build (TRUTH_ANDIF_EXPR
, TREE_TYPE (exp
),
9898 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9899 fold (build1 (REALPART_EXPR
,
9900 TREE_TYPE (inner_type
),
9902 fold (build1 (REALPART_EXPR
,
9903 TREE_TYPE (inner_type
),
9905 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9906 fold (build1 (IMAGPART_EXPR
,
9907 TREE_TYPE (inner_type
),
9909 fold (build1 (IMAGPART_EXPR
,
9910 TREE_TYPE (inner_type
),
9912 if_false_label
, if_true_label
);
9915 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
9916 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9918 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9919 && !can_compare_p (EQ
, TYPE_MODE (inner_type
), ccp_jump
))
9920 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
9922 do_compare_and_jump (exp
, EQ
, EQ
, if_false_label
, if_true_label
);
9928 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9930 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9931 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9933 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
9934 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
9937 (build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
),
9938 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9939 fold (build1 (REALPART_EXPR
,
9940 TREE_TYPE (inner_type
),
9942 fold (build1 (REALPART_EXPR
,
9943 TREE_TYPE (inner_type
),
9945 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9946 fold (build1 (IMAGPART_EXPR
,
9947 TREE_TYPE (inner_type
),
9949 fold (build1 (IMAGPART_EXPR
,
9950 TREE_TYPE (inner_type
),
9952 if_false_label
, if_true_label
);
9955 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
9956 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9958 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9959 && !can_compare_p (NE
, TYPE_MODE (inner_type
), ccp_jump
))
9960 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
9962 do_compare_and_jump (exp
, NE
, NE
, if_false_label
, if_true_label
);
9967 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9968 if (GET_MODE_CLASS (mode
) == MODE_INT
9969 && ! can_compare_p (LT
, mode
, ccp_jump
))
9970 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
9972 do_compare_and_jump (exp
, LT
, LTU
, if_false_label
, if_true_label
);
9976 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9977 if (GET_MODE_CLASS (mode
) == MODE_INT
9978 && ! can_compare_p (LE
, mode
, ccp_jump
))
9979 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
9981 do_compare_and_jump (exp
, LE
, LEU
, if_false_label
, if_true_label
);
9985 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9986 if (GET_MODE_CLASS (mode
) == MODE_INT
9987 && ! can_compare_p (GT
, mode
, ccp_jump
))
9988 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
9990 do_compare_and_jump (exp
, GT
, GTU
, if_false_label
, if_true_label
);
9994 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9995 if (GET_MODE_CLASS (mode
) == MODE_INT
9996 && ! can_compare_p (GE
, mode
, ccp_jump
))
9997 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
9999 do_compare_and_jump (exp
, GE
, GEU
, if_false_label
, if_true_label
);
10002 case UNORDERED_EXPR
:
10005 enum rtx_code cmp
, rcmp
;
10008 if (code
== UNORDERED_EXPR
)
10009 cmp
= UNORDERED
, rcmp
= ORDERED
;
10011 cmp
= ORDERED
, rcmp
= UNORDERED
;
10012 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10015 if (! can_compare_p (cmp
, mode
, ccp_jump
)
10016 && (can_compare_p (rcmp
, mode
, ccp_jump
)
10017 /* If the target doesn't provide either UNORDERED or ORDERED
10018 comparisons, canonicalize on UNORDERED for the library. */
10019 || rcmp
== UNORDERED
))
10023 do_compare_and_jump (exp
, cmp
, cmp
, if_false_label
, if_true_label
);
10025 do_compare_and_jump (exp
, rcmp
, rcmp
, if_true_label
, if_false_label
);
10030 enum rtx_code rcode1
;
10031 enum tree_code tcode2
;
10036 goto unordered_bcc
;
10040 goto unordered_bcc
;
10044 goto unordered_bcc
;
10048 goto unordered_bcc
;
10052 goto unordered_bcc
;
10055 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10056 if (can_compare_p (rcode1
, mode
, ccp_jump
))
10057 do_compare_and_jump (exp
, rcode1
, rcode1
, if_false_label
,
10061 tree op0
= save_expr (TREE_OPERAND (exp
, 0));
10062 tree op1
= save_expr (TREE_OPERAND (exp
, 1));
10065 /* If the target doesn't support combined unordered
10066 compares, decompose into UNORDERED + comparison. */
10067 cmp0
= fold (build (UNORDERED_EXPR
, TREE_TYPE (exp
), op0
, op1
));
10068 cmp1
= fold (build (tcode2
, TREE_TYPE (exp
), op0
, op1
));
10069 exp
= build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
), cmp0
, cmp1
);
10070 do_jump (exp
, if_false_label
, if_true_label
);
10076 __builtin_expect (<test>, 0) and
10077 __builtin_expect (<test>, 1)
10079 We need to do this here, so that <test> is not converted to a SCC
10080 operation on machines that use condition code registers and COMPARE
10081 like the PowerPC, and then the jump is done based on whether the SCC
10082 operation produced a 1 or 0. */
10084 /* Check for a built-in function. */
10085 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
10087 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
10088 tree arglist
= TREE_OPERAND (exp
, 1);
10090 if (TREE_CODE (fndecl
) == FUNCTION_DECL
10091 && DECL_BUILT_IN (fndecl
)
10092 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
10093 && arglist
!= NULL_TREE
10094 && TREE_CHAIN (arglist
) != NULL_TREE
)
10096 rtx seq
= expand_builtin_expect_jump (exp
, if_false_label
,
10099 if (seq
!= NULL_RTX
)
10106 /* fall through and generate the normal code. */
10110 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
10112 /* This is not needed any more and causes poor code since it causes
10113 comparisons and tests from non-SI objects to have different code
10115 /* Copy to register to avoid generating bad insns by cse
10116 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10117 if (!cse_not_expected
&& GET_CODE (temp
) == MEM
)
10118 temp
= copy_to_reg (temp
);
10120 do_pending_stack_adjust ();
10121 /* Do any postincrements in the expression that was tested. */
10124 if (GET_CODE (temp
) == CONST_INT
10125 || (GET_CODE (temp
) == CONST_DOUBLE
&& GET_MODE (temp
) == VOIDmode
)
10126 || GET_CODE (temp
) == LABEL_REF
)
10128 rtx target
= temp
== const0_rtx
? if_false_label
: if_true_label
;
10130 emit_jump (target
);
10132 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
10133 && ! can_compare_p (NE
, GET_MODE (temp
), ccp_jump
))
10134 /* Note swapping the labels gives us not-equal. */
10135 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
10136 else if (GET_MODE (temp
) != VOIDmode
)
10137 do_compare_rtx_and_jump (temp
, CONST0_RTX (GET_MODE (temp
)),
10138 NE
, TREE_UNSIGNED (TREE_TYPE (exp
)),
10139 GET_MODE (temp
), NULL_RTX
,
10140 if_false_label
, if_true_label
);
10145 if (drop_through_label
)
10147 /* If do_jump produces code that might be jumped around,
10148 do any stack adjusts from that code, before the place
10149 where control merges in. */
10150 do_pending_stack_adjust ();
10151 emit_label (drop_through_label
);
10155 /* Given a comparison expression EXP for values too wide to be compared
10156 with one insn, test the comparison and jump to the appropriate label.
10157 The code of EXP is ignored; we always test GT if SWAP is 0,
10158 and LT if SWAP is 1. */
10161 do_jump_by_parts_greater (exp
, swap
, if_false_label
, if_true_label
)
10164 rtx if_false_label
, if_true_label
;
10166 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
10167 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
10168 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10169 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10171 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
);
10174 /* Compare OP0 with OP1, word at a time, in mode MODE.
10175 UNSIGNEDP says to do unsigned comparison.
10176 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10179 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
)
10180 enum machine_mode mode
;
10183 rtx if_false_label
, if_true_label
;
10185 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10186 rtx drop_through_label
= 0;
10189 if (! if_true_label
|| ! if_false_label
)
10190 drop_through_label
= gen_label_rtx ();
10191 if (! if_true_label
)
10192 if_true_label
= drop_through_label
;
10193 if (! if_false_label
)
10194 if_false_label
= drop_through_label
;
10196 /* Compare a word at a time, high order first. */
10197 for (i
= 0; i
< nwords
; i
++)
10199 rtx op0_word
, op1_word
;
10201 if (WORDS_BIG_ENDIAN
)
10203 op0_word
= operand_subword_force (op0
, i
, mode
);
10204 op1_word
= operand_subword_force (op1
, i
, mode
);
10208 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
10209 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
10212 /* All but high-order word must be compared as unsigned. */
10213 do_compare_rtx_and_jump (op0_word
, op1_word
, GT
,
10214 (unsignedp
|| i
> 0), word_mode
, NULL_RTX
,
10215 NULL_RTX
, if_true_label
);
10217 /* Consider lower words only if these are equal. */
10218 do_compare_rtx_and_jump (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
10219 NULL_RTX
, NULL_RTX
, if_false_label
);
10222 if (if_false_label
)
10223 emit_jump (if_false_label
);
10224 if (drop_through_label
)
10225 emit_label (drop_through_label
);
10228 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10229 with one insn, test the comparison and jump to the appropriate label. */
10232 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
)
10234 rtx if_false_label
, if_true_label
;
10236 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
10237 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
10238 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10239 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10241 rtx drop_through_label
= 0;
10243 if (! if_false_label
)
10244 drop_through_label
= if_false_label
= gen_label_rtx ();
10246 for (i
= 0; i
< nwords
; i
++)
10247 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, mode
),
10248 operand_subword_force (op1
, i
, mode
),
10249 EQ
, TREE_UNSIGNED (TREE_TYPE (exp
)),
10250 word_mode
, NULL_RTX
, if_false_label
, NULL_RTX
);
10253 emit_jump (if_true_label
);
10254 if (drop_through_label
)
10255 emit_label (drop_through_label
);
10258 /* Jump according to whether OP0 is 0.
10259 We assume that OP0 has an integer mode that is too wide
10260 for the available compare insns. */
10263 do_jump_by_parts_equality_rtx (op0
, if_false_label
, if_true_label
)
10265 rtx if_false_label
, if_true_label
;
10267 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
10270 rtx drop_through_label
= 0;
10272 /* The fastest way of doing this comparison on almost any machine is to
10273 "or" all the words and compare the result. If all have to be loaded
10274 from memory and this is a very wide item, it's possible this may
10275 be slower, but that's highly unlikely. */
10277 part
= gen_reg_rtx (word_mode
);
10278 emit_move_insn (part
, operand_subword_force (op0
, 0, GET_MODE (op0
)));
10279 for (i
= 1; i
< nwords
&& part
!= 0; i
++)
10280 part
= expand_binop (word_mode
, ior_optab
, part
,
10281 operand_subword_force (op0
, i
, GET_MODE (op0
)),
10282 part
, 1, OPTAB_WIDEN
);
10286 do_compare_rtx_and_jump (part
, const0_rtx
, EQ
, 1, word_mode
,
10287 NULL_RTX
, if_false_label
, if_true_label
);
10292 /* If we couldn't do the "or" simply, do this with a series of compares. */
10293 if (! if_false_label
)
10294 drop_through_label
= if_false_label
= gen_label_rtx ();
10296 for (i
= 0; i
< nwords
; i
++)
10297 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, GET_MODE (op0
)),
10298 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
,
10299 if_false_label
, NULL_RTX
);
10302 emit_jump (if_true_label
);
10304 if (drop_through_label
)
10305 emit_label (drop_through_label
);
10308 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10309 (including code to compute the values to be compared)
10310 and set (CC0) according to the result.
10311 The decision as to signed or unsigned comparison must be made by the caller.
10313 We force a stack adjustment unless there are currently
10314 things pushed on the stack that aren't yet used.
10316 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10320 compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
, size
)
10322 enum rtx_code code
;
10324 enum machine_mode mode
;
10327 enum rtx_code ucode
;
10330 /* If one operand is constant, make it the second one. Only do this
10331 if the other operand is not constant as well. */
10333 if (swap_commutative_operands_p (op0
, op1
))
10338 code
= swap_condition (code
);
10341 if (flag_force_mem
)
10343 op0
= force_not_mem (op0
);
10344 op1
= force_not_mem (op1
);
10347 do_pending_stack_adjust ();
10349 ucode
= unsignedp
? unsigned_condition (code
) : code
;
10350 if ((tem
= simplify_relational_operation (ucode
, mode
, op0
, op1
)) != 0)
10354 /* There's no need to do this now that combine.c can eliminate lots of
10355 sign extensions. This can be less efficient in certain cases on other
10358 /* If this is a signed equality comparison, we can do it as an
10359 unsigned comparison since zero-extension is cheaper than sign
10360 extension and comparisons with zero are done as unsigned. This is
10361 the case even on machines that can do fast sign extension, since
10362 zero-extension is easier to combine with other operations than
10363 sign-extension is. If we are comparing against a constant, we must
10364 convert it to what it would look like unsigned. */
10365 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10366 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10368 if (GET_CODE (op1
) == CONST_INT
10369 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10370 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10375 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
);
10378 return gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
10380 return gen_rtx_fmt_ee (code
, VOIDmode
, op0
, op1
);
10384 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10385 The decision as to signed or unsigned comparison must be made by the caller.
10387 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10391 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
, size
,
10392 if_false_label
, if_true_label
)
10394 enum rtx_code code
;
10396 enum machine_mode mode
;
10398 rtx if_false_label
, if_true_label
;
10400 enum rtx_code ucode
;
10402 int dummy_true_label
= 0;
10404 /* Reverse the comparison if that is safe and we want to jump if it is
10406 if (! if_true_label
&& ! FLOAT_MODE_P (mode
))
10408 if_true_label
= if_false_label
;
10409 if_false_label
= 0;
10410 code
= reverse_condition (code
);
10413 /* If one operand is constant, make it the second one. Only do this
10414 if the other operand is not constant as well. */
10416 if (swap_commutative_operands_p (op0
, op1
))
10421 code
= swap_condition (code
);
10424 if (flag_force_mem
)
10426 op0
= force_not_mem (op0
);
10427 op1
= force_not_mem (op1
);
10430 do_pending_stack_adjust ();
10432 ucode
= unsignedp
? unsigned_condition (code
) : code
;
10433 if ((tem
= simplify_relational_operation (ucode
, mode
, op0
, op1
)) != 0)
10435 if (tem
== const_true_rtx
)
10438 emit_jump (if_true_label
);
10442 if (if_false_label
)
10443 emit_jump (if_false_label
);
10449 /* There's no need to do this now that combine.c can eliminate lots of
10450 sign extensions. This can be less efficient in certain cases on other
10453 /* If this is a signed equality comparison, we can do it as an
10454 unsigned comparison since zero-extension is cheaper than sign
10455 extension and comparisons with zero are done as unsigned. This is
10456 the case even on machines that can do fast sign extension, since
10457 zero-extension is easier to combine with other operations than
10458 sign-extension is. If we are comparing against a constant, we must
10459 convert it to what it would look like unsigned. */
10460 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10461 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10463 if (GET_CODE (op1
) == CONST_INT
10464 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10465 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10470 if (! if_true_label
)
10472 dummy_true_label
= 1;
10473 if_true_label
= gen_label_rtx ();
10476 emit_cmp_and_jump_insns (op0
, op1
, code
, size
, mode
, unsignedp
,
10479 if (if_false_label
)
10480 emit_jump (if_false_label
);
10481 if (dummy_true_label
)
10482 emit_label (if_true_label
);
10485 /* Generate code for a comparison expression EXP (including code to compute
10486 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10487 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10488 generated code will drop through.
10489 SIGNED_CODE should be the rtx operation for this comparison for
10490 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10492 We force a stack adjustment unless there are currently
10493 things pushed on the stack that aren't yet used. */
10496 do_compare_and_jump (exp
, signed_code
, unsigned_code
, if_false_label
,
10499 enum rtx_code signed_code
, unsigned_code
;
10500 rtx if_false_label
, if_true_label
;
10504 enum machine_mode mode
;
10506 enum rtx_code code
;
10508 /* Don't crash if the comparison was erroneous. */
10509 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
10510 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
10513 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
10514 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == ERROR_MARK
)
10517 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10518 mode
= TYPE_MODE (type
);
10519 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
10520 && (TREE_CODE (TREE_OPERAND (exp
, 1)) != INTEGER_CST
10521 || (GET_MODE_BITSIZE (mode
)
10522 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
,
10525 /* op0 might have been replaced by promoted constant, in which
10526 case the type of second argument should be used. */
10527 type
= TREE_TYPE (TREE_OPERAND (exp
, 1));
10528 mode
= TYPE_MODE (type
);
10530 unsignedp
= TREE_UNSIGNED (type
);
10531 code
= unsignedp
? unsigned_code
: signed_code
;
10533 #ifdef HAVE_canonicalize_funcptr_for_compare
10534 /* If function pointers need to be "canonicalized" before they can
10535 be reliably compared, then canonicalize them. */
10536 if (HAVE_canonicalize_funcptr_for_compare
10537 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10538 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10541 rtx new_op0
= gen_reg_rtx (mode
);
10543 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
10547 if (HAVE_canonicalize_funcptr_for_compare
10548 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10549 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10552 rtx new_op1
= gen_reg_rtx (mode
);
10554 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
10559 /* Do any postincrements in the expression that was tested. */
10562 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
,
10564 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
10565 if_false_label
, if_true_label
);
10568 /* Generate code to calculate EXP using a store-flag instruction
10569 and return an rtx for the result. EXP is either a comparison
10570 or a TRUTH_NOT_EXPR whose operand is a comparison.
10572 If TARGET is nonzero, store the result there if convenient.
10574 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
10577 Return zero if there is no suitable set-flag instruction
10578 available on this machine.
10580 Once expand_expr has been called on the arguments of the comparison,
10581 we are committed to doing the store flag, since it is not safe to
10582 re-evaluate the expression. We emit the store-flag insn by calling
10583 emit_store_flag, but only expand the arguments if we have a reason
10584 to believe that emit_store_flag will be successful. If we think that
10585 it will, but it isn't, we have to simulate the store-flag with a
10586 set/jump/set sequence. */
10589 do_store_flag (exp
, target
, mode
, only_cheap
)
10592 enum machine_mode mode
;
10595 enum rtx_code code
;
10596 tree arg0
, arg1
, type
;
10598 enum machine_mode operand_mode
;
10602 enum insn_code icode
;
10603 rtx subtarget
= target
;
10606 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10607 result at the end. We can't simply invert the test since it would
10608 have already been inverted if it were valid. This case occurs for
10609 some floating-point comparisons. */
10611 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
10612 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
10614 arg0
= TREE_OPERAND (exp
, 0);
10615 arg1
= TREE_OPERAND (exp
, 1);
10617 /* Don't crash if the comparison was erroneous. */
10618 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
10621 type
= TREE_TYPE (arg0
);
10622 operand_mode
= TYPE_MODE (type
);
10623 unsignedp
= TREE_UNSIGNED (type
);
10625 /* We won't bother with BLKmode store-flag operations because it would mean
10626 passing a lot of information to emit_store_flag. */
10627 if (operand_mode
== BLKmode
)
10630 /* We won't bother with store-flag operations involving function pointers
10631 when function pointers must be canonicalized before comparisons. */
10632 #ifdef HAVE_canonicalize_funcptr_for_compare
10633 if (HAVE_canonicalize_funcptr_for_compare
10634 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10635 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10637 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10638 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10639 == FUNCTION_TYPE
))))
10646 /* Get the rtx comparison code to use. We know that EXP is a comparison
10647 operation of some type. Some comparisons against 1 and -1 can be
10648 converted to comparisons with zero. Do so here so that the tests
10649 below will be aware that we have a comparison with zero. These
10650 tests will not catch constants in the first operand, but constants
10651 are rarely passed as the first operand. */
10653 switch (TREE_CODE (exp
))
10662 if (integer_onep (arg1
))
10663 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10665 code
= unsignedp
? LTU
: LT
;
10668 if (! unsignedp
&& integer_all_onesp (arg1
))
10669 arg1
= integer_zero_node
, code
= LT
;
10671 code
= unsignedp
? LEU
: LE
;
10674 if (! unsignedp
&& integer_all_onesp (arg1
))
10675 arg1
= integer_zero_node
, code
= GE
;
10677 code
= unsignedp
? GTU
: GT
;
10680 if (integer_onep (arg1
))
10681 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
10683 code
= unsignedp
? GEU
: GE
;
10686 case UNORDERED_EXPR
:
10712 /* Put a constant second. */
10713 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
10715 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
10716 code
= swap_condition (code
);
10719 /* If this is an equality or inequality test of a single bit, we can
10720 do this by shifting the bit being tested to the low-order bit and
10721 masking the result with the constant 1. If the condition was EQ,
10722 we xor it with 1. This does not require an scc insn and is faster
10723 than an scc insn even if we have it. */
10725 if ((code
== NE
|| code
== EQ
)
10726 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
10727 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10729 tree inner
= TREE_OPERAND (arg0
, 0);
10730 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
10733 /* If INNER is a right shift of a constant and it plus BITNUM does
10734 not overflow, adjust BITNUM and INNER. */
10736 if (TREE_CODE (inner
) == RSHIFT_EXPR
10737 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
10738 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
10739 && bitnum
< TYPE_PRECISION (type
)
10740 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
10741 bitnum
- TYPE_PRECISION (type
)))
10743 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
10744 inner
= TREE_OPERAND (inner
, 0);
10747 /* If we are going to be able to omit the AND below, we must do our
10748 operations as unsigned. If we must use the AND, we have a choice.
10749 Normally unsigned is faster, but for some machines signed is. */
10750 ops_unsignedp
= (bitnum
== TYPE_PRECISION (type
) - 1 ? 1
10751 #ifdef LOAD_EXTEND_OP
10752 : (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1)
10758 if (! get_subtarget (subtarget
)
10759 || GET_MODE (subtarget
) != operand_mode
10760 || ! safe_from_p (subtarget
, inner
, 1))
10763 op0
= expand_expr (inner
, subtarget
, VOIDmode
, 0);
10766 op0
= expand_shift (RSHIFT_EXPR
, operand_mode
, op0
,
10767 size_int (bitnum
), subtarget
, ops_unsignedp
);
10769 if (GET_MODE (op0
) != mode
)
10770 op0
= convert_to_mode (mode
, op0
, ops_unsignedp
);
10772 if ((code
== EQ
&& ! invert
) || (code
== NE
&& invert
))
10773 op0
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
, subtarget
,
10774 ops_unsignedp
, OPTAB_LIB_WIDEN
);
10776 /* Put the AND last so it can combine with more things. */
10777 if (bitnum
!= TYPE_PRECISION (type
) - 1)
10778 op0
= expand_and (mode
, op0
, const1_rtx
, subtarget
);
10783 /* Now see if we are likely to be able to do this. Return if not. */
10784 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
10787 icode
= setcc_gen_code
[(int) code
];
10788 if (icode
== CODE_FOR_nothing
10789 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
10791 /* We can only do this if it is one of the special cases that
10792 can be handled without an scc insn. */
10793 if ((code
== LT
&& integer_zerop (arg1
))
10794 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
10796 else if (BRANCH_COST
>= 0
10797 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
10798 && TREE_CODE (type
) != REAL_TYPE
10799 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
10800 != CODE_FOR_nothing
)
10801 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
10802 != CODE_FOR_nothing
)))
10808 if (! get_subtarget (target
)
10809 || GET_MODE (subtarget
) != operand_mode
10810 || ! safe_from_p (subtarget
, arg1
, 1))
10813 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
10814 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
10817 target
= gen_reg_rtx (mode
);
10819 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10820 because, if the emit_store_flag does anything it will succeed and
10821 OP0 and OP1 will not be used subsequently. */
10823 result
= emit_store_flag (target
, code
,
10824 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
10825 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
10826 operand_mode
, unsignedp
, 1);
10831 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
10832 result
, 0, OPTAB_LIB_WIDEN
);
10836 /* If this failed, we have to do this with set/compare/jump/set code. */
10837 if (GET_CODE (target
) != REG
10838 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
10839 target
= gen_reg_rtx (GET_MODE (target
));
10841 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
10842 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
10843 operand_mode
, NULL_RTX
);
10844 if (GET_CODE (result
) == CONST_INT
)
10845 return (((result
== const0_rtx
&& ! invert
)
10846 || (result
!= const0_rtx
&& invert
))
10847 ? const0_rtx
: const1_rtx
);
10849 /* The code of RESULT may not match CODE if compare_from_rtx
10850 decided to swap its operands and reverse the original code.
10852 We know that compare_from_rtx returns either a CONST_INT or
10853 a new comparison code, so it is safe to just extract the
10854 code from RESULT. */
10855 code
= GET_CODE (result
);
10857 label
= gen_label_rtx ();
10858 if (bcc_gen_fctn
[(int) code
] == 0)
10861 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
10862 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
10863 emit_label (label
);
10869 /* Stubs in case we haven't got a casesi insn. */
10870 #ifndef HAVE_casesi
10871 # define HAVE_casesi 0
10872 # define gen_casesi(a, b, c, d, e) (0)
10873 # define CODE_FOR_casesi CODE_FOR_nothing
10876 /* If the machine does not have a case insn that compares the bounds,
10877 this means extra overhead for dispatch tables, which raises the
10878 threshold for using them. */
10879 #ifndef CASE_VALUES_THRESHOLD
10880 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10881 #endif /* CASE_VALUES_THRESHOLD */
10884 case_values_threshold ()
10886 return CASE_VALUES_THRESHOLD
;
10889 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10890 0 otherwise (i.e. if there is no casesi instruction). */
10892 try_casesi (index_type
, index_expr
, minval
, range
,
10893 table_label
, default_label
)
10894 tree index_type
, index_expr
, minval
, range
;
10895 rtx table_label ATTRIBUTE_UNUSED
;
10898 enum machine_mode index_mode
= SImode
;
10899 int index_bits
= GET_MODE_BITSIZE (index_mode
);
10900 rtx op1
, op2
, index
;
10901 enum machine_mode op_mode
;
10906 /* Convert the index to SImode. */
10907 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
10909 enum machine_mode omode
= TYPE_MODE (index_type
);
10910 rtx rangertx
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
10912 /* We must handle the endpoints in the original mode. */
10913 index_expr
= build (MINUS_EXPR
, index_type
,
10914 index_expr
, minval
);
10915 minval
= integer_zero_node
;
10916 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10917 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
10918 omode
, 1, default_label
);
10919 /* Now we can safely truncate. */
10920 index
= convert_to_mode (index_mode
, index
, 0);
10924 if (TYPE_MODE (index_type
) != index_mode
)
10926 index_expr
= convert ((*lang_hooks
.types
.type_for_size
)
10927 (index_bits
, 0), index_expr
);
10928 index_type
= TREE_TYPE (index_expr
);
10931 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10934 index
= protect_from_queue (index
, 0);
10935 do_pending_stack_adjust ();
10937 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
10938 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
10940 index
= copy_to_mode_reg (op_mode
, index
);
10942 op1
= expand_expr (minval
, NULL_RTX
, VOIDmode
, 0);
10944 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
10945 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
10946 op1
, TREE_UNSIGNED (TREE_TYPE (minval
)));
10947 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
10949 op1
= copy_to_mode_reg (op_mode
, op1
);
10951 op2
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
10953 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
10954 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
10955 op2
, TREE_UNSIGNED (TREE_TYPE (range
)));
10956 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
10958 op2
= copy_to_mode_reg (op_mode
, op2
);
10960 emit_jump_insn (gen_casesi (index
, op1
, op2
,
10961 table_label
, default_label
));
10965 /* Attempt to generate a tablejump instruction; same concept. */
10966 #ifndef HAVE_tablejump
10967 #define HAVE_tablejump 0
10968 #define gen_tablejump(x, y) (0)
10971 /* Subroutine of the next function.
10973 INDEX is the value being switched on, with the lowest value
10974 in the table already subtracted.
10975 MODE is its expected mode (needed if INDEX is constant).
10976 RANGE is the length of the jump table.
10977 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10979 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10980 index value is out of range. */
10983 do_tablejump (index
, mode
, range
, table_label
, default_label
)
10984 rtx index
, range
, table_label
, default_label
;
10985 enum machine_mode mode
;
10989 if (INTVAL (range
) > cfun
->max_jumptable_ents
)
10990 cfun
->max_jumptable_ents
= INTVAL (range
);
10992 /* Do an unsigned comparison (in the proper mode) between the index
10993 expression and the value which represents the length of the range.
10994 Since we just finished subtracting the lower bound of the range
10995 from the index expression, this comparison allows us to simultaneously
10996 check that the original index expression value is both greater than
10997 or equal to the minimum value of the range and less than or equal to
10998 the maximum value of the range. */
11000 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
11003 /* If index is in range, it must fit in Pmode.
11004 Convert to Pmode so we can index with it. */
11006 index
= convert_to_mode (Pmode
, index
, 1);
11008 /* Don't let a MEM slip thru, because then INDEX that comes
11009 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11010 and break_out_memory_refs will go to work on it and mess it up. */
11011 #ifdef PIC_CASE_VECTOR_ADDRESS
11012 if (flag_pic
&& GET_CODE (index
) != REG
)
11013 index
= copy_to_mode_reg (Pmode
, index
);
11016 /* If flag_force_addr were to affect this address
11017 it could interfere with the tricky assumptions made
11018 about addresses that contain label-refs,
11019 which may be valid only very near the tablejump itself. */
11020 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11021 GET_MODE_SIZE, because this indicates how large insns are. The other
11022 uses should all be Pmode, because they are addresses. This code
11023 could fail if addresses and insns are not the same size. */
11024 index
= gen_rtx_PLUS (Pmode
,
11025 gen_rtx_MULT (Pmode
, index
,
11026 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
11027 gen_rtx_LABEL_REF (Pmode
, table_label
));
11028 #ifdef PIC_CASE_VECTOR_ADDRESS
11030 index
= PIC_CASE_VECTOR_ADDRESS (index
);
11033 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
11034 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
11035 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
11036 RTX_UNCHANGING_P (vector
) = 1;
11037 convert_move (temp
, vector
, 0);
11039 emit_jump_insn (gen_tablejump (temp
, table_label
));
11041 /* If we are generating PIC code or if the table is PC-relative, the
11042 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11043 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
11048 try_tablejump (index_type
, index_expr
, minval
, range
,
11049 table_label
, default_label
)
11050 tree index_type
, index_expr
, minval
, range
;
11051 rtx table_label
, default_label
;
11055 if (! HAVE_tablejump
)
11058 index_expr
= fold (build (MINUS_EXPR
, index_type
,
11059 convert (index_type
, index_expr
),
11060 convert (index_type
, minval
)));
11061 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
11063 index
= protect_from_queue (index
, 0);
11064 do_pending_stack_adjust ();
11066 do_tablejump (index
, TYPE_MODE (index_type
),
11067 convert_modes (TYPE_MODE (index_type
),
11068 TYPE_MODE (TREE_TYPE (range
)),
11069 expand_expr (range
, NULL_RTX
,
11071 TREE_UNSIGNED (TREE_TYPE (range
))),
11072 table_label
, default_label
);
11076 /* Nonzero if the mode is a valid vector mode for this architecture.
11077 This returns nonzero even if there is no hardware support for the
11078 vector mode, but we can emulate with narrower modes. */
11081 vector_mode_valid_p (mode
)
11082 enum machine_mode mode
;
11084 enum mode_class
class = GET_MODE_CLASS (mode
);
11085 enum machine_mode innermode
;
11087 /* Doh! What's going on? */
11088 if (class != MODE_VECTOR_INT
11089 && class != MODE_VECTOR_FLOAT
)
11092 /* Hardware support. Woo hoo! */
11093 if (VECTOR_MODE_SUPPORTED_P (mode
))
11096 innermode
= GET_MODE_INNER (mode
);
11098 /* We should probably return 1 if requesting V4DI and we have no DI,
11099 but we have V2DI, but this is probably very unlikely. */
11101 /* If we have support for the inner mode, we can safely emulate it.
11102 We may not have V2DI, but me can emulate with a pair of DIs. */
11103 return mov_optab
->handlers
[innermode
].insn_code
!= CODE_FOR_nothing
;
11106 #include "gt-expr.h"