1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
71 #define STACK_PUSH_CODE PRE_INC
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
85 #define TARGET_MEM_FUNCTIONS 0
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 tree placeholder_list
= 0;
100 /* This structure is used by move_by_pieces to describe the move to
102 struct move_by_pieces
111 int explicit_inc_from
;
112 unsigned HOST_WIDE_INT len
;
113 HOST_WIDE_INT offset
;
117 /* This structure is used by store_by_pieces to describe the clear to
120 struct store_by_pieces
126 unsigned HOST_WIDE_INT len
;
127 HOST_WIDE_INT offset
;
128 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
);
133 static rtx
enqueue_insn (rtx
, rtx
);
134 static unsigned HOST_WIDE_INT
move_by_pieces_ninsns (unsigned HOST_WIDE_INT
,
136 static void move_by_pieces_1 (rtx (*) (rtx
, ...), enum machine_mode
,
137 struct move_by_pieces
*);
138 static bool block_move_libcall_safe_for_call_parm (void);
139 static bool emit_block_move_via_movstr (rtx
, rtx
, rtx
, unsigned);
140 static rtx
emit_block_move_via_libcall (rtx
, rtx
, rtx
);
141 static tree
emit_block_move_libcall_fn (int);
142 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
143 static rtx
clear_by_pieces_1 (void *, HOST_WIDE_INT
, enum machine_mode
);
144 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
145 static void store_by_pieces_1 (struct store_by_pieces
*, unsigned int);
146 static void store_by_pieces_2 (rtx (*) (rtx
, ...), enum machine_mode
,
147 struct store_by_pieces
*);
148 static bool clear_storage_via_clrstr (rtx
, rtx
, unsigned);
149 static rtx
clear_storage_via_libcall (rtx
, rtx
);
150 static tree
clear_storage_libcall_fn (int);
151 static rtx
compress_float_constant (rtx
, rtx
);
152 static rtx
get_subtarget (rtx
);
153 static int is_zeros_p (tree
);
154 static int mostly_zeros_p (tree
);
155 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
156 HOST_WIDE_INT
, enum machine_mode
,
157 tree
, tree
, int, int);
158 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
);
159 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
, enum machine_mode
,
160 tree
, enum machine_mode
, int, tree
, int);
161 static rtx
var_rtx (tree
);
163 static unsigned HOST_WIDE_INT
highest_pow2_factor (tree
);
164 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_type (tree
, tree
);
166 static int is_aligning_offset (tree
, tree
);
167 static rtx
expand_increment (tree
, int, int);
168 static rtx
do_store_flag (tree
, rtx
, enum machine_mode
, int);
170 static void emit_single_push_insn (enum machine_mode
, rtx
, tree
);
172 static void do_tablejump (rtx
, enum machine_mode
, rtx
, rtx
, rtx
);
173 static rtx
const_vector_from_tree (tree
);
175 /* Record for each mode whether we can move a register directly to or
176 from an object of that mode in memory. If we can't, we won't try
177 to use that mode directly when accessing a field of that mode. */
179 static char direct_load
[NUM_MACHINE_MODES
];
180 static char direct_store
[NUM_MACHINE_MODES
];
182 /* Record for each mode whether we can float-extend from memory. */
184 static bool float_extend_from_mem
[NUM_MACHINE_MODES
][NUM_MACHINE_MODES
];
186 /* If a memory-to-memory move would take MOVE_RATIO or more simple
187 move-instruction sequences, we will do a movstr or libcall instead. */
190 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
193 /* If we are optimizing for space (-Os), cut down the default move ratio. */
194 #define MOVE_RATIO (optimize_size ? 3 : 15)
198 /* This macro is used to determine whether move_by_pieces should be called
199 to perform a structure copy. */
200 #ifndef MOVE_BY_PIECES_P
201 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
202 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
205 /* If a clear memory operation would take CLEAR_RATIO or more simple
206 move-instruction sequences, we will do a clrstr or libcall instead. */
209 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
210 #define CLEAR_RATIO 2
212 /* If we are optimizing for space, cut down the default clear ratio. */
213 #define CLEAR_RATIO (optimize_size ? 3 : 15)
217 /* This macro is used to determine whether clear_by_pieces should be
218 called to clear storage. */
219 #ifndef CLEAR_BY_PIECES_P
220 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
221 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
224 /* This macro is used to determine whether store_by_pieces should be
225 called to "memset" storage with byte values other than zero, or
226 to "memcpy" storage when the source is a constant string. */
227 #ifndef STORE_BY_PIECES_P
228 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
231 /* This array records the insn_code of insns to perform block moves. */
232 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
234 /* This array records the insn_code of insns to perform block clears. */
235 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
237 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
239 #ifndef SLOW_UNALIGNED_ACCESS
240 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
243 /* This is run once per compilation to set up which modes can be used
244 directly in memory and to initialize the block move optab. */
247 init_expr_once (void)
250 enum machine_mode mode
;
255 /* Try indexing by frame ptr and try by stack ptr.
256 It is known that on the Convex the stack ptr isn't a valid index.
257 With luck, one or the other is valid on any machine. */
258 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
259 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
261 /* A scratch register we can modify in-place below to avoid
262 useless RTL allocations. */
263 reg
= gen_rtx_REG (VOIDmode
, -1);
265 insn
= rtx_alloc (INSN
);
266 pat
= gen_rtx_SET (0, NULL_RTX
, NULL_RTX
);
267 PATTERN (insn
) = pat
;
269 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
270 mode
= (enum machine_mode
) ((int) mode
+ 1))
274 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
275 PUT_MODE (mem
, mode
);
276 PUT_MODE (mem1
, mode
);
277 PUT_MODE (reg
, mode
);
279 /* See if there is some register that can be used in this mode and
280 directly loaded or stored from memory. */
282 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
283 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
284 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
287 if (! HARD_REGNO_MODE_OK (regno
, mode
))
293 SET_DEST (pat
) = reg
;
294 if (recog (pat
, insn
, &num_clobbers
) >= 0)
295 direct_load
[(int) mode
] = 1;
297 SET_SRC (pat
) = mem1
;
298 SET_DEST (pat
) = reg
;
299 if (recog (pat
, insn
, &num_clobbers
) >= 0)
300 direct_load
[(int) mode
] = 1;
303 SET_DEST (pat
) = mem
;
304 if (recog (pat
, insn
, &num_clobbers
) >= 0)
305 direct_store
[(int) mode
] = 1;
308 SET_DEST (pat
) = mem1
;
309 if (recog (pat
, insn
, &num_clobbers
) >= 0)
310 direct_store
[(int) mode
] = 1;
314 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
316 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
317 mode
= GET_MODE_WIDER_MODE (mode
))
319 enum machine_mode srcmode
;
320 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
321 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
325 ic
= can_extend_p (mode
, srcmode
, 0);
326 if (ic
== CODE_FOR_nothing
)
329 PUT_MODE (mem
, srcmode
);
331 if ((*insn_data
[ic
].operand
[1].predicate
) (mem
, srcmode
))
332 float_extend_from_mem
[mode
][srcmode
] = true;
337 /* This is run at the start of compiling a function. */
342 cfun
->expr
= (struct expr_status
*) ggc_alloc (sizeof (struct expr_status
));
345 pending_stack_adjust
= 0;
346 stack_pointer_delta
= 0;
347 inhibit_defer_pop
= 0;
349 apply_args_value
= 0;
353 /* Small sanity check that the queue is empty at the end of a function. */
356 finish_expr_for_function (void)
362 /* Manage the queue of increment instructions to be output
363 for POSTINCREMENT_EXPR expressions, etc. */
365 /* Queue up to increment (or change) VAR later. BODY says how:
366 BODY should be the same thing you would pass to emit_insn
367 to increment right away. It will go to emit_insn later on.
369 The value is a QUEUED expression to be used in place of VAR
370 where you want to guarantee the pre-incrementation value of VAR. */
373 enqueue_insn (rtx var
, rtx body
)
375 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
), var
, NULL_RTX
, NULL_RTX
,
376 body
, pending_chain
);
377 return pending_chain
;
380 /* Use protect_from_queue to convert a QUEUED expression
381 into something that you can put immediately into an instruction.
382 If the queued incrementation has not happened yet,
383 protect_from_queue returns the variable itself.
384 If the incrementation has happened, protect_from_queue returns a temp
385 that contains a copy of the old value of the variable.
387 Any time an rtx which might possibly be a QUEUED is to be put
388 into an instruction, it must be passed through protect_from_queue first.
389 QUEUED expressions are not meaningful in instructions.
391 Do not pass a value through protect_from_queue and then hold
392 on to it for a while before putting it in an instruction!
393 If the queue is flushed in between, incorrect code will result. */
396 protect_from_queue (rtx x
, int modify
)
398 RTX_CODE code
= GET_CODE (x
);
400 #if 0 /* A QUEUED can hang around after the queue is forced out. */
401 /* Shortcut for most common case. */
402 if (pending_chain
== 0)
408 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
409 use of autoincrement. Make a copy of the contents of the memory
410 location rather than a copy of the address, but not if the value is
411 of mode BLKmode. Don't modify X in place since it might be
413 if (code
== MEM
&& GET_MODE (x
) != BLKmode
414 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
417 rtx
new = replace_equiv_address_nv (x
, QUEUED_VAR (y
));
421 rtx temp
= gen_reg_rtx (GET_MODE (x
));
423 emit_insn_before (gen_move_insn (temp
, new),
428 /* Copy the address into a pseudo, so that the returned value
429 remains correct across calls to emit_queue. */
430 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
433 /* Otherwise, recursively protect the subexpressions of all
434 the kinds of rtx's that can contain a QUEUED. */
437 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
438 if (tem
!= XEXP (x
, 0))
444 else if (code
== PLUS
|| code
== MULT
)
446 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
447 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
448 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
457 /* If the increment has not happened, use the variable itself. Copy it
458 into a new pseudo so that the value remains correct across calls to
460 if (QUEUED_INSN (x
) == 0)
461 return copy_to_reg (QUEUED_VAR (x
));
462 /* If the increment has happened and a pre-increment copy exists,
464 if (QUEUED_COPY (x
) != 0)
465 return QUEUED_COPY (x
);
466 /* The increment has happened but we haven't set up a pre-increment copy.
467 Set one up now, and use it. */
468 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
469 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
471 return QUEUED_COPY (x
);
474 /* Return nonzero if X contains a QUEUED expression:
475 if it contains anything that will be altered by a queued increment.
476 We handle only combinations of MEM, PLUS, MINUS and MULT operators
477 since memory addresses generally contain only those. */
480 queued_subexp_p (rtx x
)
482 enum rtx_code code
= GET_CODE (x
);
488 return queued_subexp_p (XEXP (x
, 0));
492 return (queued_subexp_p (XEXP (x
, 0))
493 || queued_subexp_p (XEXP (x
, 1)));
499 /* Perform all the pending incrementations. */
505 while ((p
= pending_chain
))
507 rtx body
= QUEUED_BODY (p
);
509 switch (GET_CODE (body
))
517 QUEUED_INSN (p
) = body
;
521 #ifdef ENABLE_CHECKING
528 QUEUED_INSN (p
) = emit_insn (body
);
532 pending_chain
= QUEUED_NEXT (p
);
536 /* Copy data from FROM to TO, where the machine modes are not the same.
537 Both modes may be integer, or both may be floating.
538 UNSIGNEDP should be nonzero if FROM is an unsigned type.
539 This causes zero-extension instead of sign-extension. */
542 convert_move (rtx to
, rtx from
, int unsignedp
)
544 enum machine_mode to_mode
= GET_MODE (to
);
545 enum machine_mode from_mode
= GET_MODE (from
);
546 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
547 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
551 /* rtx code for making an equivalent value. */
552 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
553 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
555 to
= protect_from_queue (to
, 1);
556 from
= protect_from_queue (from
, 0);
558 if (to_real
!= from_real
)
561 /* If FROM is a SUBREG that indicates that we have already done at least
562 the required extension, strip it. We don't handle such SUBREGs as
565 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
566 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
567 >= GET_MODE_SIZE (to_mode
))
568 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
569 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
571 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
574 if (to_mode
== from_mode
575 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
577 emit_move_insn (to
, from
);
581 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
583 if (GET_MODE_BITSIZE (from_mode
) != GET_MODE_BITSIZE (to_mode
))
586 if (VECTOR_MODE_P (to_mode
))
587 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
589 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
591 emit_move_insn (to
, from
);
595 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
597 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
598 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
602 if (to_real
!= from_real
)
609 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
))
611 /* Try converting directly if the insn is supported. */
612 if ((code
= can_extend_p (to_mode
, from_mode
, 0))
615 emit_unop_insn (code
, to
, from
, UNKNOWN
);
620 #ifdef HAVE_trunchfqf2
621 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
623 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
627 #ifdef HAVE_trunctqfqf2
628 if (HAVE_trunctqfqf2
&& from_mode
== TQFmode
&& to_mode
== QFmode
)
630 emit_unop_insn (CODE_FOR_trunctqfqf2
, to
, from
, UNKNOWN
);
634 #ifdef HAVE_truncsfqf2
635 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
637 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
641 #ifdef HAVE_truncdfqf2
642 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
644 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
648 #ifdef HAVE_truncxfqf2
649 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
651 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
655 #ifdef HAVE_trunctfqf2
656 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
658 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
663 #ifdef HAVE_trunctqfhf2
664 if (HAVE_trunctqfhf2
&& from_mode
== TQFmode
&& to_mode
== HFmode
)
666 emit_unop_insn (CODE_FOR_trunctqfhf2
, to
, from
, UNKNOWN
);
670 #ifdef HAVE_truncsfhf2
671 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
673 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
677 #ifdef HAVE_truncdfhf2
678 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
680 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
684 #ifdef HAVE_truncxfhf2
685 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
687 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
691 #ifdef HAVE_trunctfhf2
692 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
694 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
699 #ifdef HAVE_truncsftqf2
700 if (HAVE_truncsftqf2
&& from_mode
== SFmode
&& to_mode
== TQFmode
)
702 emit_unop_insn (CODE_FOR_truncsftqf2
, to
, from
, UNKNOWN
);
706 #ifdef HAVE_truncdftqf2
707 if (HAVE_truncdftqf2
&& from_mode
== DFmode
&& to_mode
== TQFmode
)
709 emit_unop_insn (CODE_FOR_truncdftqf2
, to
, from
, UNKNOWN
);
713 #ifdef HAVE_truncxftqf2
714 if (HAVE_truncxftqf2
&& from_mode
== XFmode
&& to_mode
== TQFmode
)
716 emit_unop_insn (CODE_FOR_truncxftqf2
, to
, from
, UNKNOWN
);
720 #ifdef HAVE_trunctftqf2
721 if (HAVE_trunctftqf2
&& from_mode
== TFmode
&& to_mode
== TQFmode
)
723 emit_unop_insn (CODE_FOR_trunctftqf2
, to
, from
, UNKNOWN
);
728 #ifdef HAVE_truncdfsf2
729 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
731 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
735 #ifdef HAVE_truncxfsf2
736 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
738 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
742 #ifdef HAVE_trunctfsf2
743 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
745 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
749 #ifdef HAVE_truncxfdf2
750 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
752 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
756 #ifdef HAVE_trunctfdf2
757 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
759 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
771 libcall
= extendsfdf2_libfunc
;
775 libcall
= extendsfxf2_libfunc
;
779 libcall
= extendsftf2_libfunc
;
791 libcall
= truncdfsf2_libfunc
;
795 libcall
= extenddfxf2_libfunc
;
799 libcall
= extenddftf2_libfunc
;
811 libcall
= truncxfsf2_libfunc
;
815 libcall
= truncxfdf2_libfunc
;
827 libcall
= trunctfsf2_libfunc
;
831 libcall
= trunctfdf2_libfunc
;
843 if (libcall
== (rtx
) 0)
844 /* This conversion is not implemented yet. */
848 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
850 insns
= get_insns ();
852 emit_libcall_block (insns
, to
, value
, gen_rtx_FLOAT_TRUNCATE (to_mode
,
857 /* Now both modes are integers. */
859 /* Handle expanding beyond a word. */
860 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
861 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
868 enum machine_mode lowpart_mode
;
869 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
871 /* Try converting directly if the insn is supported. */
872 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
875 /* If FROM is a SUBREG, put it into a register. Do this
876 so that we always generate the same set of insns for
877 better cse'ing; if an intermediate assignment occurred,
878 we won't be doing the operation directly on the SUBREG. */
879 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
880 from
= force_reg (from_mode
, from
);
881 emit_unop_insn (code
, to
, from
, equiv_code
);
884 /* Next, try converting via full word. */
885 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
886 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
887 != CODE_FOR_nothing
))
889 if (GET_CODE (to
) == REG
)
890 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
891 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
892 emit_unop_insn (code
, to
,
893 gen_lowpart (word_mode
, to
), equiv_code
);
897 /* No special multiword conversion insn; do it by hand. */
900 /* Since we will turn this into a no conflict block, we must ensure
901 that the source does not overlap the target. */
903 if (reg_overlap_mentioned_p (to
, from
))
904 from
= force_reg (from_mode
, from
);
906 /* Get a copy of FROM widened to a word, if necessary. */
907 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
908 lowpart_mode
= word_mode
;
910 lowpart_mode
= from_mode
;
912 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
914 lowpart
= gen_lowpart (lowpart_mode
, to
);
915 emit_move_insn (lowpart
, lowfrom
);
917 /* Compute the value to put in each remaining word. */
919 fill_value
= const0_rtx
;
924 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
925 && STORE_FLAG_VALUE
== -1)
927 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
929 fill_value
= gen_reg_rtx (word_mode
);
930 emit_insn (gen_slt (fill_value
));
936 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
937 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
939 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
943 /* Fill the remaining words. */
944 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
946 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
947 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
952 if (fill_value
!= subword
)
953 emit_move_insn (subword
, fill_value
);
956 insns
= get_insns ();
959 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
960 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
964 /* Truncating multi-word to a word or less. */
965 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
966 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
968 if (!((GET_CODE (from
) == MEM
969 && ! MEM_VOLATILE_P (from
)
970 && direct_load
[(int) to_mode
]
971 && ! mode_dependent_address_p (XEXP (from
, 0)))
972 || GET_CODE (from
) == REG
973 || GET_CODE (from
) == SUBREG
))
974 from
= force_reg (from_mode
, from
);
975 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
979 /* Handle pointer conversion. */ /* SPEE 900220. */
980 if (to_mode
== PQImode
)
982 if (from_mode
!= QImode
)
983 from
= convert_to_mode (QImode
, from
, unsignedp
);
985 #ifdef HAVE_truncqipqi2
986 if (HAVE_truncqipqi2
)
988 emit_unop_insn (CODE_FOR_truncqipqi2
, to
, from
, UNKNOWN
);
991 #endif /* HAVE_truncqipqi2 */
995 if (from_mode
== PQImode
)
997 if (to_mode
!= QImode
)
999 from
= convert_to_mode (QImode
, from
, unsignedp
);
1004 #ifdef HAVE_extendpqiqi2
1005 if (HAVE_extendpqiqi2
)
1007 emit_unop_insn (CODE_FOR_extendpqiqi2
, to
, from
, UNKNOWN
);
1010 #endif /* HAVE_extendpqiqi2 */
1015 if (to_mode
== PSImode
)
1017 if (from_mode
!= SImode
)
1018 from
= convert_to_mode (SImode
, from
, unsignedp
);
1020 #ifdef HAVE_truncsipsi2
1021 if (HAVE_truncsipsi2
)
1023 emit_unop_insn (CODE_FOR_truncsipsi2
, to
, from
, UNKNOWN
);
1026 #endif /* HAVE_truncsipsi2 */
1030 if (from_mode
== PSImode
)
1032 if (to_mode
!= SImode
)
1034 from
= convert_to_mode (SImode
, from
, unsignedp
);
1039 #ifdef HAVE_extendpsisi2
1040 if (! unsignedp
&& HAVE_extendpsisi2
)
1042 emit_unop_insn (CODE_FOR_extendpsisi2
, to
, from
, UNKNOWN
);
1045 #endif /* HAVE_extendpsisi2 */
1046 #ifdef HAVE_zero_extendpsisi2
1047 if (unsignedp
&& HAVE_zero_extendpsisi2
)
1049 emit_unop_insn (CODE_FOR_zero_extendpsisi2
, to
, from
, UNKNOWN
);
1052 #endif /* HAVE_zero_extendpsisi2 */
1057 if (to_mode
== PDImode
)
1059 if (from_mode
!= DImode
)
1060 from
= convert_to_mode (DImode
, from
, unsignedp
);
1062 #ifdef HAVE_truncdipdi2
1063 if (HAVE_truncdipdi2
)
1065 emit_unop_insn (CODE_FOR_truncdipdi2
, to
, from
, UNKNOWN
);
1068 #endif /* HAVE_truncdipdi2 */
1072 if (from_mode
== PDImode
)
1074 if (to_mode
!= DImode
)
1076 from
= convert_to_mode (DImode
, from
, unsignedp
);
1081 #ifdef HAVE_extendpdidi2
1082 if (HAVE_extendpdidi2
)
1084 emit_unop_insn (CODE_FOR_extendpdidi2
, to
, from
, UNKNOWN
);
1087 #endif /* HAVE_extendpdidi2 */
1092 /* Now follow all the conversions between integers
1093 no more than a word long. */
1095 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1096 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
1097 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1098 GET_MODE_BITSIZE (from_mode
)))
1100 if (!((GET_CODE (from
) == MEM
1101 && ! MEM_VOLATILE_P (from
)
1102 && direct_load
[(int) to_mode
]
1103 && ! mode_dependent_address_p (XEXP (from
, 0)))
1104 || GET_CODE (from
) == REG
1105 || GET_CODE (from
) == SUBREG
))
1106 from
= force_reg (from_mode
, from
);
1107 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
1108 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
1109 from
= copy_to_reg (from
);
1110 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
1114 /* Handle extension. */
1115 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
1117 /* Convert directly if that works. */
1118 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
1119 != CODE_FOR_nothing
)
1122 from
= force_not_mem (from
);
1124 emit_unop_insn (code
, to
, from
, equiv_code
);
1129 enum machine_mode intermediate
;
1133 /* Search for a mode to convert via. */
1134 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
1135 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
1136 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
1137 != CODE_FOR_nothing
)
1138 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
1139 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1140 GET_MODE_BITSIZE (intermediate
))))
1141 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
1142 != CODE_FOR_nothing
))
1144 convert_move (to
, convert_to_mode (intermediate
, from
,
1145 unsignedp
), unsignedp
);
1149 /* No suitable intermediate mode.
1150 Generate what we need with shifts. */
1151 shift_amount
= build_int_2 (GET_MODE_BITSIZE (to_mode
)
1152 - GET_MODE_BITSIZE (from_mode
), 0);
1153 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
1154 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
1156 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
1159 emit_move_insn (to
, tmp
);
1164 /* Support special truncate insns for certain modes. */
1166 if (from_mode
== DImode
&& to_mode
== SImode
)
1168 #ifdef HAVE_truncdisi2
1169 if (HAVE_truncdisi2
)
1171 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1175 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1179 if (from_mode
== DImode
&& to_mode
== HImode
)
1181 #ifdef HAVE_truncdihi2
1182 if (HAVE_truncdihi2
)
1184 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1188 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1192 if (from_mode
== DImode
&& to_mode
== QImode
)
1194 #ifdef HAVE_truncdiqi2
1195 if (HAVE_truncdiqi2
)
1197 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1201 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1205 if (from_mode
== SImode
&& to_mode
== HImode
)
1207 #ifdef HAVE_truncsihi2
1208 if (HAVE_truncsihi2
)
1210 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1214 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1218 if (from_mode
== SImode
&& to_mode
== QImode
)
1220 #ifdef HAVE_truncsiqi2
1221 if (HAVE_truncsiqi2
)
1223 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1227 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1231 if (from_mode
== HImode
&& to_mode
== QImode
)
1233 #ifdef HAVE_trunchiqi2
1234 if (HAVE_trunchiqi2
)
1236 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1240 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1244 if (from_mode
== TImode
&& to_mode
== DImode
)
1246 #ifdef HAVE_trunctidi2
1247 if (HAVE_trunctidi2
)
1249 emit_unop_insn (CODE_FOR_trunctidi2
, to
, from
, UNKNOWN
);
1253 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1257 if (from_mode
== TImode
&& to_mode
== SImode
)
1259 #ifdef HAVE_trunctisi2
1260 if (HAVE_trunctisi2
)
1262 emit_unop_insn (CODE_FOR_trunctisi2
, to
, from
, UNKNOWN
);
1266 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1270 if (from_mode
== TImode
&& to_mode
== HImode
)
1272 #ifdef HAVE_trunctihi2
1273 if (HAVE_trunctihi2
)
1275 emit_unop_insn (CODE_FOR_trunctihi2
, to
, from
, UNKNOWN
);
1279 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1283 if (from_mode
== TImode
&& to_mode
== QImode
)
1285 #ifdef HAVE_trunctiqi2
1286 if (HAVE_trunctiqi2
)
1288 emit_unop_insn (CODE_FOR_trunctiqi2
, to
, from
, UNKNOWN
);
1292 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1296 /* Handle truncation of volatile memrefs, and so on;
1297 the things that couldn't be truncated directly,
1298 and for which there was no special instruction. */
1299 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1301 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1302 emit_move_insn (to
, temp
);
1306 /* Mode combination is not recognized. */
1310 /* Return an rtx for a value that would result
1311 from converting X to mode MODE.
1312 Both X and MODE may be floating, or both integer.
1313 UNSIGNEDP is nonzero if X is an unsigned value.
1314 This can be done by referring to a part of X in place
1315 or by copying to a new temporary with conversion.
1317 This function *must not* call protect_from_queue
1318 except when putting X into an insn (in which case convert_move does it). */
1321 convert_to_mode (enum machine_mode mode
, rtx x
, int unsignedp
)
1323 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
1326 /* Return an rtx for a value that would result
1327 from converting X from mode OLDMODE to mode MODE.
1328 Both modes may be floating, or both integer.
1329 UNSIGNEDP is nonzero if X is an unsigned value.
1331 This can be done by referring to a part of X in place
1332 or by copying to a new temporary with conversion.
1334 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1336 This function *must not* call protect_from_queue
1337 except when putting X into an insn (in which case convert_move does it). */
1340 convert_modes (enum machine_mode mode
, enum machine_mode oldmode
, rtx x
, int unsignedp
)
1344 /* If FROM is a SUBREG that indicates that we have already done at least
1345 the required extension, strip it. */
1347 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1348 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1349 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1350 x
= gen_lowpart (mode
, x
);
1352 if (GET_MODE (x
) != VOIDmode
)
1353 oldmode
= GET_MODE (x
);
1355 if (mode
== oldmode
)
1358 /* There is one case that we must handle specially: If we are converting
1359 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1360 we are to interpret the constant as unsigned, gen_lowpart will do
1361 the wrong if the constant appears negative. What we want to do is
1362 make the high-order word of the constant zero, not all ones. */
1364 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1365 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1366 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1368 HOST_WIDE_INT val
= INTVAL (x
);
1370 if (oldmode
!= VOIDmode
1371 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
1373 int width
= GET_MODE_BITSIZE (oldmode
);
1375 /* We need to zero extend VAL. */
1376 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1379 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
1382 /* We can do this with a gen_lowpart if both desired and current modes
1383 are integer, and this is either a constant integer, a register, or a
1384 non-volatile MEM. Except for the constant case where MODE is no
1385 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1387 if ((GET_CODE (x
) == CONST_INT
1388 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1389 || (GET_MODE_CLASS (mode
) == MODE_INT
1390 && GET_MODE_CLASS (oldmode
) == MODE_INT
1391 && (GET_CODE (x
) == CONST_DOUBLE
1392 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
1393 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
1394 && direct_load
[(int) mode
])
1395 || (GET_CODE (x
) == REG
1396 && (! HARD_REGISTER_P (x
)
1397 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
1398 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
1399 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
1401 /* ?? If we don't know OLDMODE, we have to assume here that
1402 X does not need sign- or zero-extension. This may not be
1403 the case, but it's the best we can do. */
1404 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1405 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1407 HOST_WIDE_INT val
= INTVAL (x
);
1408 int width
= GET_MODE_BITSIZE (oldmode
);
1410 /* We must sign or zero-extend in this case. Start by
1411 zero-extending, then sign extend if we need to. */
1412 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1414 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1415 val
|= (HOST_WIDE_INT
) (-1) << width
;
1417 return gen_int_mode (val
, mode
);
1420 return gen_lowpart (mode
, x
);
1423 temp
= gen_reg_rtx (mode
);
1424 convert_move (temp
, x
, unsignedp
);
1428 /* This macro is used to determine what the largest unit size that
1429 move_by_pieces can use is. */
1431 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1432 move efficiently, as opposed to MOVE_MAX which is the maximum
1433 number of bytes we can move with a single instruction. */
1435 #ifndef MOVE_MAX_PIECES
1436 #define MOVE_MAX_PIECES MOVE_MAX
1439 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1440 store efficiently. Due to internal GCC limitations, this is
1441 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1442 for an immediate constant. */
1444 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1446 /* Determine whether the LEN bytes can be moved by using several move
1447 instructions. Return nonzero if a call to move_by_pieces should
1451 can_move_by_pieces (unsigned HOST_WIDE_INT len
,
1452 unsigned int align ATTRIBUTE_UNUSED
)
1454 return MOVE_BY_PIECES_P (len
, align
);
1457 /* Generate several move instructions to copy LEN bytes from block FROM to
1458 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1459 and TO through protect_from_queue before calling.
1461 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1462 used to push FROM to the stack.
1464 ALIGN is maximum stack alignment we can assume.
1466 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1467 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1471 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
1472 unsigned int align
, int endp
)
1474 struct move_by_pieces data
;
1475 rtx to_addr
, from_addr
= XEXP (from
, 0);
1476 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
1477 enum machine_mode mode
= VOIDmode
, tmode
;
1478 enum insn_code icode
;
1480 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
1483 data
.from_addr
= from_addr
;
1486 to_addr
= XEXP (to
, 0);
1489 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1490 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1492 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1499 #ifdef STACK_GROWS_DOWNWARD
1505 data
.to_addr
= to_addr
;
1508 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1509 || GET_CODE (from_addr
) == POST_INC
1510 || GET_CODE (from_addr
) == POST_DEC
);
1512 data
.explicit_inc_from
= 0;
1513 data
.explicit_inc_to
= 0;
1514 if (data
.reverse
) data
.offset
= len
;
1517 /* If copying requires more than two move insns,
1518 copy addresses to registers (to make displacements shorter)
1519 and use post-increment if available. */
1520 if (!(data
.autinc_from
&& data
.autinc_to
)
1521 && move_by_pieces_ninsns (len
, align
) > 2)
1523 /* Find the mode of the largest move... */
1524 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1525 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1526 if (GET_MODE_SIZE (tmode
) < max_size
)
1529 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
1531 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1532 data
.autinc_from
= 1;
1533 data
.explicit_inc_from
= -1;
1535 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
1537 data
.from_addr
= copy_addr_to_reg (from_addr
);
1538 data
.autinc_from
= 1;
1539 data
.explicit_inc_from
= 1;
1541 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1542 data
.from_addr
= copy_addr_to_reg (from_addr
);
1543 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
1545 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1547 data
.explicit_inc_to
= -1;
1549 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
1551 data
.to_addr
= copy_addr_to_reg (to_addr
);
1553 data
.explicit_inc_to
= 1;
1555 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1556 data
.to_addr
= copy_addr_to_reg (to_addr
);
1559 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1560 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1561 align
= MOVE_MAX
* BITS_PER_UNIT
;
1563 /* First move what we can in the largest integer mode, then go to
1564 successively smaller modes. */
1566 while (max_size
> 1)
1568 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1569 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1570 if (GET_MODE_SIZE (tmode
) < max_size
)
1573 if (mode
== VOIDmode
)
1576 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1577 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1578 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1580 max_size
= GET_MODE_SIZE (mode
);
1583 /* The code above should have handled everything. */
1597 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
1598 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
1600 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
1603 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
1610 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
1618 /* Return number of insns required to move L bytes by pieces.
1619 ALIGN (in bits) is maximum alignment we can assume. */
1621 static unsigned HOST_WIDE_INT
1622 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
)
1624 unsigned HOST_WIDE_INT n_insns
= 0;
1625 unsigned HOST_WIDE_INT max_size
= MOVE_MAX
+ 1;
1627 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1628 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1629 align
= MOVE_MAX
* BITS_PER_UNIT
;
1631 while (max_size
> 1)
1633 enum machine_mode mode
= VOIDmode
, tmode
;
1634 enum insn_code icode
;
1636 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1637 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1638 if (GET_MODE_SIZE (tmode
) < max_size
)
1641 if (mode
== VOIDmode
)
1644 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1645 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1646 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1648 max_size
= GET_MODE_SIZE (mode
);
1656 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1657 with move instructions for mode MODE. GENFUN is the gen_... function
1658 to make a move insn for that mode. DATA has all the other info. */
1661 move_by_pieces_1 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
1662 struct move_by_pieces
*data
)
1664 unsigned int size
= GET_MODE_SIZE (mode
);
1665 rtx to1
= NULL_RTX
, from1
;
1667 while (data
->len
>= size
)
1670 data
->offset
-= size
;
1674 if (data
->autinc_to
)
1675 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1678 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1681 if (data
->autinc_from
)
1682 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1685 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1687 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1688 emit_insn (gen_add2_insn (data
->to_addr
,
1689 GEN_INT (-(HOST_WIDE_INT
)size
)));
1690 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1691 emit_insn (gen_add2_insn (data
->from_addr
,
1692 GEN_INT (-(HOST_WIDE_INT
)size
)));
1695 emit_insn ((*genfun
) (to1
, from1
));
1698 #ifdef PUSH_ROUNDING
1699 emit_single_push_insn (mode
, from1
, NULL
);
1705 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1706 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1707 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1708 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1710 if (! data
->reverse
)
1711 data
->offset
+= size
;
1717 /* Emit code to move a block Y to a block X. This may be done with
1718 string-move instructions, with multiple scalar move instructions,
1719 or with a library call.
1721 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1722 SIZE is an rtx that says how long they are.
1723 ALIGN is the maximum alignment we can assume they have.
1724 METHOD describes what kind of copy this is, and what mechanisms may be used.
1726 Return the address of the new block, if memcpy is called and returns it,
1730 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1738 case BLOCK_OP_NORMAL
:
1739 may_use_call
= true;
1742 case BLOCK_OP_CALL_PARM
:
1743 may_use_call
= block_move_libcall_safe_for_call_parm ();
1745 /* Make inhibit_defer_pop nonzero around the library call
1746 to force it to pop the arguments right away. */
1750 case BLOCK_OP_NO_LIBCALL
:
1751 may_use_call
= false;
1758 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1760 if (GET_MODE (x
) != BLKmode
)
1762 if (GET_MODE (y
) != BLKmode
)
1765 x
= protect_from_queue (x
, 1);
1766 y
= protect_from_queue (y
, 0);
1767 size
= protect_from_queue (size
, 0);
1769 if (GET_CODE (x
) != MEM
)
1771 if (GET_CODE (y
) != MEM
)
1776 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1777 can be incorrect is coming from __builtin_memcpy. */
1778 if (GET_CODE (size
) == CONST_INT
)
1780 x
= shallow_copy_rtx (x
);
1781 y
= shallow_copy_rtx (y
);
1782 set_mem_size (x
, size
);
1783 set_mem_size (y
, size
);
1786 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1787 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1788 else if (emit_block_move_via_movstr (x
, y
, size
, align
))
1790 else if (may_use_call
)
1791 retval
= emit_block_move_via_libcall (x
, y
, size
);
1793 emit_block_move_via_loop (x
, y
, size
, align
);
1795 if (method
== BLOCK_OP_CALL_PARM
)
1801 /* A subroutine of emit_block_move. Returns true if calling the
1802 block move libcall will not clobber any parameters which may have
1803 already been placed on the stack. */
1806 block_move_libcall_safe_for_call_parm (void)
1812 /* Check to see whether memcpy takes all register arguments. */
1814 takes_regs_uninit
, takes_regs_no
, takes_regs_yes
1815 } takes_regs
= takes_regs_uninit
;
1819 case takes_regs_uninit
:
1821 CUMULATIVE_ARGS args_so_far
;
1824 fn
= emit_block_move_libcall_fn (false);
1825 INIT_CUMULATIVE_ARGS (args_so_far
, TREE_TYPE (fn
), NULL_RTX
, 0);
1827 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1828 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1830 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1831 rtx tmp
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
1832 if (!tmp
|| !REG_P (tmp
))
1833 goto fail_takes_regs
;
1834 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1835 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, mode
,
1837 goto fail_takes_regs
;
1839 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, NULL_TREE
, 1);
1842 takes_regs
= takes_regs_yes
;
1845 case takes_regs_yes
:
1849 takes_regs
= takes_regs_no
;
1860 /* A subroutine of emit_block_move. Expand a movstr pattern;
1861 return true if successful. */
1864 emit_block_move_via_movstr (rtx x
, rtx y
, rtx size
, unsigned int align
)
1866 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1867 enum machine_mode mode
;
1869 /* Since this is a move insn, we don't care about volatility. */
1872 /* Try the most limited insn first, because there's no point
1873 including more than one in the machine description unless
1874 the more limited one has some advantage. */
1876 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1877 mode
= GET_MODE_WIDER_MODE (mode
))
1879 enum insn_code code
= movstr_optab
[(int) mode
];
1880 insn_operand_predicate_fn pred
;
1882 if (code
!= CODE_FOR_nothing
1883 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1884 here because if SIZE is less than the mode mask, as it is
1885 returned by the macro, it will definitely be less than the
1886 actual mode mask. */
1887 && ((GET_CODE (size
) == CONST_INT
1888 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1889 <= (GET_MODE_MASK (mode
) >> 1)))
1890 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1891 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1892 || (*pred
) (x
, BLKmode
))
1893 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1894 || (*pred
) (y
, BLKmode
))
1895 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1896 || (*pred
) (opalign
, VOIDmode
)))
1899 rtx last
= get_last_insn ();
1902 op2
= convert_to_mode (mode
, size
, 1);
1903 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1904 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1905 op2
= copy_to_mode_reg (mode
, op2
);
1907 /* ??? When called via emit_block_move_for_call, it'd be
1908 nice if there were some way to inform the backend, so
1909 that it doesn't fail the expansion because it thinks
1910 emitting the libcall would be more efficient. */
1912 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1920 delete_insns_since (last
);
1928 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1929 Return the return value from memcpy, 0 otherwise. */
1932 emit_block_move_via_libcall (rtx dst
, rtx src
, rtx size
)
1934 rtx dst_addr
, src_addr
;
1935 tree call_expr
, arg_list
, fn
, src_tree
, dst_tree
, size_tree
;
1936 enum machine_mode size_mode
;
1939 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1941 It is unsafe to save the value generated by protect_from_queue and reuse
1942 it later. Consider what happens if emit_queue is called before the
1943 return value from protect_from_queue is used.
1945 Expansion of the CALL_EXPR below will call emit_queue before we are
1946 finished emitting RTL for argument setup. So if we are not careful we
1947 could get the wrong value for an argument.
1949 To avoid this problem we go ahead and emit code to copy the addresses of
1950 DST and SRC and SIZE into new pseudos. We can then place those new
1951 pseudos into an RTL_EXPR and use them later, even after a call to
1954 Note this is not strictly needed for library calls since they do not call
1955 emit_queue before loading their arguments. However, we may need to have
1956 library calls call emit_queue in the future since failing to do so could
1957 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1958 arguments in registers. */
1960 dst_addr
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
1961 src_addr
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
1963 #ifdef POINTERS_EXTEND_UNSIGNED
1964 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1965 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1968 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1969 src_tree
= make_tree (ptr_type_node
, src_addr
);
1971 if (TARGET_MEM_FUNCTIONS
)
1972 size_mode
= TYPE_MODE (sizetype
);
1974 size_mode
= TYPE_MODE (unsigned_type_node
);
1976 size
= convert_to_mode (size_mode
, size
, 1);
1977 size
= copy_to_mode_reg (size_mode
, size
);
1979 /* It is incorrect to use the libcall calling conventions to call
1980 memcpy in this context. This could be a user call to memcpy and
1981 the user may wish to examine the return value from memcpy. For
1982 targets where libcalls and normal calls have different conventions
1983 for returning pointers, we could end up generating incorrect code.
1985 For convenience, we generate the call to bcopy this way as well. */
1987 if (TARGET_MEM_FUNCTIONS
)
1988 size_tree
= make_tree (sizetype
, size
);
1990 size_tree
= make_tree (unsigned_type_node
, size
);
1992 fn
= emit_block_move_libcall_fn (true);
1993 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
1994 if (TARGET_MEM_FUNCTIONS
)
1996 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1997 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
2001 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
2002 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
2005 /* Now we have to build up the CALL_EXPR itself. */
2006 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2007 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2008 call_expr
, arg_list
, NULL_TREE
);
2009 TREE_SIDE_EFFECTS (call_expr
) = 1;
2011 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
2013 /* If we are initializing a readonly value, show the above call clobbered
2014 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
2015 the delay slot scheduler might overlook conflicts and take nasty
2017 if (RTX_UNCHANGING_P (dst
))
2018 add_function_usage_to
2019 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode
,
2020 gen_rtx_CLOBBER (VOIDmode
, dst
),
2023 return TARGET_MEM_FUNCTIONS
? retval
: NULL_RTX
;
2026 /* A subroutine of emit_block_move_via_libcall. Create the tree node
2027 for the function we use for block copies. The first time FOR_CALL
2028 is true, we call assemble_external. */
2030 static GTY(()) tree block_move_fn
;
2033 init_block_move_fn (const char *asmspec
)
2039 if (TARGET_MEM_FUNCTIONS
)
2041 fn
= get_identifier ("memcpy");
2042 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2043 const_ptr_type_node
, sizetype
,
2048 fn
= get_identifier ("bcopy");
2049 args
= build_function_type_list (void_type_node
, const_ptr_type_node
,
2050 ptr_type_node
, unsigned_type_node
,
2054 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
2055 DECL_EXTERNAL (fn
) = 1;
2056 TREE_PUBLIC (fn
) = 1;
2057 DECL_ARTIFICIAL (fn
) = 1;
2058 TREE_NOTHROW (fn
) = 1;
2065 SET_DECL_RTL (block_move_fn
, NULL_RTX
);
2066 SET_DECL_ASSEMBLER_NAME (block_move_fn
, get_identifier (asmspec
));
2071 emit_block_move_libcall_fn (int for_call
)
2073 static bool emitted_extern
;
2076 init_block_move_fn (NULL
);
2078 if (for_call
&& !emitted_extern
)
2080 emitted_extern
= true;
2081 make_decl_rtl (block_move_fn
, NULL
);
2082 assemble_external (block_move_fn
);
2085 return block_move_fn
;
2088 /* A subroutine of emit_block_move. Copy the data via an explicit
2089 loop. This is used only when libcalls are forbidden. */
2090 /* ??? It'd be nice to copy in hunks larger than QImode. */
2093 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
2094 unsigned int align ATTRIBUTE_UNUSED
)
2096 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
2097 enum machine_mode iter_mode
;
2099 iter_mode
= GET_MODE (size
);
2100 if (iter_mode
== VOIDmode
)
2101 iter_mode
= word_mode
;
2103 top_label
= gen_label_rtx ();
2104 cmp_label
= gen_label_rtx ();
2105 iter
= gen_reg_rtx (iter_mode
);
2107 emit_move_insn (iter
, const0_rtx
);
2109 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
2110 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
2111 do_pending_stack_adjust ();
2113 emit_note (NOTE_INSN_LOOP_BEG
);
2115 emit_jump (cmp_label
);
2116 emit_label (top_label
);
2118 tmp
= convert_modes (Pmode
, iter_mode
, iter
, true);
2119 x_addr
= gen_rtx_PLUS (Pmode
, x_addr
, tmp
);
2120 y_addr
= gen_rtx_PLUS (Pmode
, y_addr
, tmp
);
2121 x
= change_address (x
, QImode
, x_addr
);
2122 y
= change_address (y
, QImode
, y_addr
);
2124 emit_move_insn (x
, y
);
2126 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
2127 true, OPTAB_LIB_WIDEN
);
2129 emit_move_insn (iter
, tmp
);
2131 emit_note (NOTE_INSN_LOOP_CONT
);
2132 emit_label (cmp_label
);
2134 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
2137 emit_note (NOTE_INSN_LOOP_END
);
2140 /* Copy all or part of a value X into registers starting at REGNO.
2141 The number of registers to be filled is NREGS. */
2144 move_block_to_reg (int regno
, rtx x
, int nregs
, enum machine_mode mode
)
2147 #ifdef HAVE_load_multiple
2155 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
2156 x
= validize_mem (force_const_mem (mode
, x
));
2158 /* See if the machine can do this with a load multiple insn. */
2159 #ifdef HAVE_load_multiple
2160 if (HAVE_load_multiple
)
2162 last
= get_last_insn ();
2163 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
2171 delete_insns_since (last
);
2175 for (i
= 0; i
< nregs
; i
++)
2176 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
2177 operand_subword_force (x
, i
, mode
));
2180 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2181 The number of registers to be filled is NREGS. */
2184 move_block_from_reg (int regno
, rtx x
, int nregs
)
2191 /* See if the machine can do this with a store multiple insn. */
2192 #ifdef HAVE_store_multiple
2193 if (HAVE_store_multiple
)
2195 rtx last
= get_last_insn ();
2196 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
2204 delete_insns_since (last
);
2208 for (i
= 0; i
< nregs
; i
++)
2210 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
2215 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
2219 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2220 ORIG, where ORIG is a non-consecutive group of registers represented by
2221 a PARALLEL. The clone is identical to the original except in that the
2222 original set of registers is replaced by a new set of pseudo registers.
2223 The new set has the same modes as the original set. */
2226 gen_group_rtx (rtx orig
)
2231 if (GET_CODE (orig
) != PARALLEL
)
2234 length
= XVECLEN (orig
, 0);
2235 tmps
= (rtx
*) alloca (sizeof (rtx
) * length
);
2237 /* Skip a NULL entry in first slot. */
2238 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
2243 for (; i
< length
; i
++)
2245 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
2246 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
2248 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
2251 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
2254 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2255 registers represented by a PARALLEL. SSIZE represents the total size of
2256 block SRC in bytes, or -1 if not known. */
2257 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2258 the balance will be in what would be the low-order memory addresses, i.e.
2259 left justified for big endian, right justified for little endian. This
2260 happens to be true for the targets currently using this support. If this
2261 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2265 emit_group_load (rtx dst
, rtx orig_src
, int ssize
)
2270 if (GET_CODE (dst
) != PARALLEL
)
2273 /* Check for a NULL entry, used to indicate that the parameter goes
2274 both on the stack and in registers. */
2275 if (XEXP (XVECEXP (dst
, 0, 0), 0))
2280 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
2282 /* Process the pieces. */
2283 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2285 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
2286 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
2287 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2290 /* Handle trailing fragments that run over the size of the struct. */
2291 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2293 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2294 bytelen
= ssize
- bytepos
;
2299 /* If we won't be loading directly from memory, protect the real source
2300 from strange tricks we might play; but make sure that the source can
2301 be loaded directly into the destination. */
2303 if (GET_CODE (orig_src
) != MEM
2304 && (!CONSTANT_P (orig_src
)
2305 || (GET_MODE (orig_src
) != mode
2306 && GET_MODE (orig_src
) != VOIDmode
)))
2308 if (GET_MODE (orig_src
) == VOIDmode
)
2309 src
= gen_reg_rtx (mode
);
2311 src
= gen_reg_rtx (GET_MODE (orig_src
));
2313 emit_move_insn (src
, orig_src
);
2316 /* Optimize the access just a bit. */
2317 if (GET_CODE (src
) == MEM
2318 && MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
)
2319 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2320 && bytelen
== GET_MODE_SIZE (mode
))
2322 tmps
[i
] = gen_reg_rtx (mode
);
2323 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
2325 else if (GET_CODE (src
) == CONCAT
)
2327 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
2328 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
2330 if ((bytepos
== 0 && bytelen
== slen0
)
2331 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
2333 /* The following assumes that the concatenated objects all
2334 have the same size. In this case, a simple calculation
2335 can be used to determine the object and the bit field
2337 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
2338 if (! CONSTANT_P (tmps
[i
])
2339 && (GET_CODE (tmps
[i
]) != REG
|| GET_MODE (tmps
[i
]) != mode
))
2340 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
2341 (bytepos
% slen0
) * BITS_PER_UNIT
,
2342 1, NULL_RTX
, mode
, mode
, ssize
);
2344 else if (bytepos
== 0)
2346 rtx mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
2347 emit_move_insn (mem
, src
);
2348 tmps
[i
] = adjust_address (mem
, mode
, 0);
2353 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2354 SIMD register, which is currently broken. While we get GCC
2355 to emit proper RTL for these cases, let's dump to memory. */
2356 else if (VECTOR_MODE_P (GET_MODE (dst
))
2357 && GET_CODE (src
) == REG
)
2359 int slen
= GET_MODE_SIZE (GET_MODE (src
));
2362 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
2363 emit_move_insn (mem
, src
);
2364 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
2366 else if (CONSTANT_P (src
)
2367 || (GET_CODE (src
) == REG
&& GET_MODE (src
) == mode
))
2370 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
2371 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
2374 if (BYTES_BIG_ENDIAN
&& shift
)
2375 expand_binop (mode
, ashl_optab
, tmps
[i
], GEN_INT (shift
),
2376 tmps
[i
], 0, OPTAB_WIDEN
);
2381 /* Copy the extracted pieces into the proper (probable) hard regs. */
2382 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2383 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
2386 /* Emit code to move a block SRC to block DST, where SRC and DST are
2387 non-consecutive groups of registers, each represented by a PARALLEL. */
2390 emit_group_move (rtx dst
, rtx src
)
2394 if (GET_CODE (src
) != PARALLEL
2395 || GET_CODE (dst
) != PARALLEL
2396 || XVECLEN (src
, 0) != XVECLEN (dst
, 0))
2399 /* Skip first entry if NULL. */
2400 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
2401 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
2402 XEXP (XVECEXP (src
, 0, i
), 0));
2405 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2406 registers represented by a PARALLEL. SSIZE represents the total size of
2407 block DST, or -1 if not known. */
2410 emit_group_store (rtx orig_dst
, rtx src
, int ssize
)
2415 if (GET_CODE (src
) != PARALLEL
)
2418 /* Check for a NULL entry, used to indicate that the parameter goes
2419 both on the stack and in registers. */
2420 if (XEXP (XVECEXP (src
, 0, 0), 0))
2425 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (src
, 0));
2427 /* Copy the (probable) hard regs into pseudos. */
2428 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2430 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2431 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2432 emit_move_insn (tmps
[i
], reg
);
2436 /* If we won't be storing directly into memory, protect the real destination
2437 from strange tricks we might play. */
2439 if (GET_CODE (dst
) == PARALLEL
)
2443 /* We can get a PARALLEL dst if there is a conditional expression in
2444 a return statement. In that case, the dst and src are the same,
2445 so no action is necessary. */
2446 if (rtx_equal_p (dst
, src
))
2449 /* It is unclear if we can ever reach here, but we may as well handle
2450 it. Allocate a temporary, and split this into a store/load to/from
2453 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
2454 emit_group_store (temp
, src
, ssize
);
2455 emit_group_load (dst
, temp
, ssize
);
2458 else if (GET_CODE (dst
) != MEM
&& GET_CODE (dst
) != CONCAT
)
2460 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
2461 /* Make life a bit easier for combine. */
2462 emit_move_insn (dst
, CONST0_RTX (GET_MODE (orig_dst
)));
2465 /* Process the pieces. */
2466 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2468 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2469 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2470 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2473 /* Handle trailing fragments that run over the size of the struct. */
2474 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2476 if (BYTES_BIG_ENDIAN
)
2478 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2479 expand_binop (mode
, ashr_optab
, tmps
[i
], GEN_INT (shift
),
2480 tmps
[i
], 0, OPTAB_WIDEN
);
2482 bytelen
= ssize
- bytepos
;
2485 if (GET_CODE (dst
) == CONCAT
)
2487 if (bytepos
+ bytelen
<= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2488 dest
= XEXP (dst
, 0);
2489 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2491 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2492 dest
= XEXP (dst
, 1);
2494 else if (bytepos
== 0 && XVECLEN (src
, 0))
2496 dest
= assign_stack_temp (GET_MODE (dest
),
2497 GET_MODE_SIZE (GET_MODE (dest
)), 0);
2498 emit_move_insn (adjust_address (dest
, GET_MODE (tmps
[i
]), bytepos
),
2507 /* Optimize the access just a bit. */
2508 if (GET_CODE (dest
) == MEM
2509 && MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
)
2510 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2511 && bytelen
== GET_MODE_SIZE (mode
))
2512 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2514 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2515 mode
, tmps
[i
], ssize
);
2520 /* Copy from the pseudo into the (probable) hard reg. */
2521 if (orig_dst
!= dst
)
2522 emit_move_insn (orig_dst
, dst
);
2525 /* Generate code to copy a BLKmode object of TYPE out of a
2526 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2527 is null, a stack temporary is created. TGTBLK is returned.
2529 The primary purpose of this routine is to handle functions
2530 that return BLKmode structures in registers. Some machines
2531 (the PA for example) want to return all small structures
2532 in registers regardless of the structure's alignment. */
2535 copy_blkmode_from_reg (rtx tgtblk
, rtx srcreg
, tree type
)
2537 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2538 rtx src
= NULL
, dst
= NULL
;
2539 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2540 unsigned HOST_WIDE_INT bitpos
, xbitpos
, big_endian_correction
= 0;
2544 tgtblk
= assign_temp (build_qualified_type (type
,
2546 | TYPE_QUAL_CONST
)),
2548 preserve_temp_slots (tgtblk
);
2551 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2552 into a new pseudo which is a full word. */
2554 if (GET_MODE (srcreg
) != BLKmode
2555 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2556 srcreg
= convert_to_mode (word_mode
, srcreg
, TREE_UNSIGNED (type
));
2558 /* Structures whose size is not a multiple of a word are aligned
2559 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2560 machine, this means we must skip the empty high order bytes when
2561 calculating the bit offset. */
2562 if (BYTES_BIG_ENDIAN
2563 && bytes
% UNITS_PER_WORD
)
2564 big_endian_correction
2565 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2567 /* Copy the structure BITSIZE bites at a time.
2569 We could probably emit more efficient code for machines which do not use
2570 strict alignment, but it doesn't seem worth the effort at the current
2572 for (bitpos
= 0, xbitpos
= big_endian_correction
;
2573 bitpos
< bytes
* BITS_PER_UNIT
;
2574 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2576 /* We need a new source operand each time xbitpos is on a
2577 word boundary and when xbitpos == big_endian_correction
2578 (the first time through). */
2579 if (xbitpos
% BITS_PER_WORD
== 0
2580 || xbitpos
== big_endian_correction
)
2581 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2584 /* We need a new destination operand each time bitpos is on
2586 if (bitpos
% BITS_PER_WORD
== 0)
2587 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2589 /* Use xbitpos for the source extraction (right justified) and
2590 xbitpos for the destination store (left justified). */
2591 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2592 extract_bit_field (src
, bitsize
,
2593 xbitpos
% BITS_PER_WORD
, 1,
2594 NULL_RTX
, word_mode
, word_mode
,
2602 /* Add a USE expression for REG to the (possibly empty) list pointed
2603 to by CALL_FUSAGE. REG must denote a hard register. */
2606 use_reg (rtx
*call_fusage
, rtx reg
)
2608 if (GET_CODE (reg
) != REG
2609 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2613 = gen_rtx_EXPR_LIST (VOIDmode
,
2614 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2617 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2618 starting at REGNO. All of these registers must be hard registers. */
2621 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2625 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
2628 for (i
= 0; i
< nregs
; i
++)
2629 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2632 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2633 PARALLEL REGS. This is for calls that pass values in multiple
2634 non-contiguous locations. The Irix 6 ABI has examples of this. */
2637 use_group_regs (rtx
*call_fusage
, rtx regs
)
2641 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2643 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2645 /* A NULL entry means the parameter goes both on the stack and in
2646 registers. This can also be a MEM for targets that pass values
2647 partially on the stack and partially in registers. */
2648 if (reg
!= 0 && GET_CODE (reg
) == REG
)
2649 use_reg (call_fusage
, reg
);
2654 /* Determine whether the LEN bytes generated by CONSTFUN can be
2655 stored to memory using several move instructions. CONSTFUNDATA is
2656 a pointer which will be passed as argument in every CONSTFUN call.
2657 ALIGN is maximum alignment we can assume. Return nonzero if a
2658 call to store_by_pieces should succeed. */
2661 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
2662 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2663 void *constfundata
, unsigned int align
)
2665 unsigned HOST_WIDE_INT max_size
, l
;
2666 HOST_WIDE_INT offset
= 0;
2667 enum machine_mode mode
, tmode
;
2668 enum insn_code icode
;
2672 if (! STORE_BY_PIECES_P (len
, align
))
2675 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2676 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2677 align
= MOVE_MAX
* BITS_PER_UNIT
;
2679 /* We would first store what we can in the largest integer mode, then go to
2680 successively smaller modes. */
2683 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2688 max_size
= STORE_MAX_PIECES
+ 1;
2689 while (max_size
> 1)
2691 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2692 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2693 if (GET_MODE_SIZE (tmode
) < max_size
)
2696 if (mode
== VOIDmode
)
2699 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2700 if (icode
!= CODE_FOR_nothing
2701 && align
>= GET_MODE_ALIGNMENT (mode
))
2703 unsigned int size
= GET_MODE_SIZE (mode
);
2710 cst
= (*constfun
) (constfundata
, offset
, mode
);
2711 if (!LEGITIMATE_CONSTANT_P (cst
))
2721 max_size
= GET_MODE_SIZE (mode
);
2724 /* The code above should have handled everything. */
2732 /* Generate several move instructions to store LEN bytes generated by
2733 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2734 pointer which will be passed as argument in every CONSTFUN call.
2735 ALIGN is maximum alignment we can assume.
2736 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2737 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2741 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
2742 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2743 void *constfundata
, unsigned int align
, int endp
)
2745 struct store_by_pieces data
;
2747 if (! STORE_BY_PIECES_P (len
, align
))
2749 to
= protect_from_queue (to
, 1);
2750 data
.constfun
= constfun
;
2751 data
.constfundata
= constfundata
;
2754 store_by_pieces_1 (&data
, align
);
2765 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2766 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2768 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
2771 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2778 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2786 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2787 rtx with BLKmode). The caller must pass TO through protect_from_queue
2788 before calling. ALIGN is maximum alignment we can assume. */
2791 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
2793 struct store_by_pieces data
;
2795 data
.constfun
= clear_by_pieces_1
;
2796 data
.constfundata
= NULL
;
2799 store_by_pieces_1 (&data
, align
);
2802 /* Callback routine for clear_by_pieces.
2803 Return const0_rtx unconditionally. */
2806 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED
,
2807 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2808 enum machine_mode mode ATTRIBUTE_UNUSED
)
2813 /* Subroutine of clear_by_pieces and store_by_pieces.
2814 Generate several move instructions to store LEN bytes of block TO. (A MEM
2815 rtx with BLKmode). The caller must pass TO through protect_from_queue
2816 before calling. ALIGN is maximum alignment we can assume. */
2819 store_by_pieces_1 (struct store_by_pieces
*data ATTRIBUTE_UNUSED
,
2820 unsigned int align ATTRIBUTE_UNUSED
)
2822 rtx to_addr
= XEXP (data
->to
, 0);
2823 unsigned HOST_WIDE_INT max_size
= STORE_MAX_PIECES
+ 1;
2824 enum machine_mode mode
= VOIDmode
, tmode
;
2825 enum insn_code icode
;
2828 data
->to_addr
= to_addr
;
2830 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2831 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2833 data
->explicit_inc_to
= 0;
2835 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2837 data
->offset
= data
->len
;
2839 /* If storing requires more than two move insns,
2840 copy addresses to registers (to make displacements shorter)
2841 and use post-increment if available. */
2842 if (!data
->autinc_to
2843 && move_by_pieces_ninsns (data
->len
, align
) > 2)
2845 /* Determine the main mode we'll be using. */
2846 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2847 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2848 if (GET_MODE_SIZE (tmode
) < max_size
)
2851 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2853 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2854 data
->autinc_to
= 1;
2855 data
->explicit_inc_to
= -1;
2858 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2859 && ! data
->autinc_to
)
2861 data
->to_addr
= copy_addr_to_reg (to_addr
);
2862 data
->autinc_to
= 1;
2863 data
->explicit_inc_to
= 1;
2866 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2867 data
->to_addr
= copy_addr_to_reg (to_addr
);
2870 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2871 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2872 align
= MOVE_MAX
* BITS_PER_UNIT
;
2874 /* First store what we can in the largest integer mode, then go to
2875 successively smaller modes. */
2877 while (max_size
> 1)
2879 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2880 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2881 if (GET_MODE_SIZE (tmode
) < max_size
)
2884 if (mode
== VOIDmode
)
2887 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2888 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2889 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2891 max_size
= GET_MODE_SIZE (mode
);
2894 /* The code above should have handled everything. */
2899 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2900 with move instructions for mode MODE. GENFUN is the gen_... function
2901 to make a move insn for that mode. DATA has all the other info. */
2904 store_by_pieces_2 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
2905 struct store_by_pieces
*data
)
2907 unsigned int size
= GET_MODE_SIZE (mode
);
2910 while (data
->len
>= size
)
2913 data
->offset
-= size
;
2915 if (data
->autinc_to
)
2916 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2919 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2921 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2922 emit_insn (gen_add2_insn (data
->to_addr
,
2923 GEN_INT (-(HOST_WIDE_INT
) size
)));
2925 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2926 emit_insn ((*genfun
) (to1
, cst
));
2928 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2929 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2931 if (! data
->reverse
)
2932 data
->offset
+= size
;
2938 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2939 its length in bytes. */
2942 clear_storage (rtx object
, rtx size
)
2945 unsigned int align
= (GET_CODE (object
) == MEM
? MEM_ALIGN (object
)
2946 : GET_MODE_ALIGNMENT (GET_MODE (object
)));
2948 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2949 just move a zero. Otherwise, do this a piece at a time. */
2950 if (GET_MODE (object
) != BLKmode
2951 && GET_CODE (size
) == CONST_INT
2952 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (object
)))
2953 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2956 object
= protect_from_queue (object
, 1);
2957 size
= protect_from_queue (size
, 0);
2959 if (GET_CODE (size
) == CONST_INT
2960 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2961 clear_by_pieces (object
, INTVAL (size
), align
);
2962 else if (clear_storage_via_clrstr (object
, size
, align
))
2965 retval
= clear_storage_via_libcall (object
, size
);
2971 /* A subroutine of clear_storage. Expand a clrstr pattern;
2972 return true if successful. */
2975 clear_storage_via_clrstr (rtx object
, rtx size
, unsigned int align
)
2977 /* Try the most limited insn first, because there's no point
2978 including more than one in the machine description unless
2979 the more limited one has some advantage. */
2981 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2982 enum machine_mode mode
;
2984 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2985 mode
= GET_MODE_WIDER_MODE (mode
))
2987 enum insn_code code
= clrstr_optab
[(int) mode
];
2988 insn_operand_predicate_fn pred
;
2990 if (code
!= CODE_FOR_nothing
2991 /* We don't need MODE to be narrower than
2992 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2993 the mode mask, as it is returned by the macro, it will
2994 definitely be less than the actual mode mask. */
2995 && ((GET_CODE (size
) == CONST_INT
2996 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2997 <= (GET_MODE_MASK (mode
) >> 1)))
2998 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2999 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
3000 || (*pred
) (object
, BLKmode
))
3001 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
3002 || (*pred
) (opalign
, VOIDmode
)))
3005 rtx last
= get_last_insn ();
3008 op1
= convert_to_mode (mode
, size
, 1);
3009 pred
= insn_data
[(int) code
].operand
[1].predicate
;
3010 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
3011 op1
= copy_to_mode_reg (mode
, op1
);
3013 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
3020 delete_insns_since (last
);
3027 /* A subroutine of clear_storage. Expand a call to memset or bzero.
3028 Return the return value of memset, 0 otherwise. */
3031 clear_storage_via_libcall (rtx object
, rtx size
)
3033 tree call_expr
, arg_list
, fn
, object_tree
, size_tree
;
3034 enum machine_mode size_mode
;
3037 /* OBJECT or SIZE may have been passed through protect_from_queue.
3039 It is unsafe to save the value generated by protect_from_queue
3040 and reuse it later. Consider what happens if emit_queue is
3041 called before the return value from protect_from_queue is used.
3043 Expansion of the CALL_EXPR below will call emit_queue before
3044 we are finished emitting RTL for argument setup. So if we are
3045 not careful we could get the wrong value for an argument.
3047 To avoid this problem we go ahead and emit code to copy OBJECT
3048 and SIZE into new pseudos. We can then place those new pseudos
3049 into an RTL_EXPR and use them later, even after a call to
3052 Note this is not strictly needed for library calls since they
3053 do not call emit_queue before loading their arguments. However,
3054 we may need to have library calls call emit_queue in the future
3055 since failing to do so could cause problems for targets which
3056 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3058 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
3060 if (TARGET_MEM_FUNCTIONS
)
3061 size_mode
= TYPE_MODE (sizetype
);
3063 size_mode
= TYPE_MODE (unsigned_type_node
);
3064 size
= convert_to_mode (size_mode
, size
, 1);
3065 size
= copy_to_mode_reg (size_mode
, size
);
3067 /* It is incorrect to use the libcall calling conventions to call
3068 memset in this context. This could be a user call to memset and
3069 the user may wish to examine the return value from memset. For
3070 targets where libcalls and normal calls have different conventions
3071 for returning pointers, we could end up generating incorrect code.
3073 For convenience, we generate the call to bzero this way as well. */
3075 object_tree
= make_tree (ptr_type_node
, object
);
3076 if (TARGET_MEM_FUNCTIONS
)
3077 size_tree
= make_tree (sizetype
, size
);
3079 size_tree
= make_tree (unsigned_type_node
, size
);
3081 fn
= clear_storage_libcall_fn (true);
3082 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
3083 if (TARGET_MEM_FUNCTIONS
)
3084 arg_list
= tree_cons (NULL_TREE
, integer_zero_node
, arg_list
);
3085 arg_list
= tree_cons (NULL_TREE
, object_tree
, arg_list
);
3087 /* Now we have to build up the CALL_EXPR itself. */
3088 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
3089 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
3090 call_expr
, arg_list
, NULL_TREE
);
3091 TREE_SIDE_EFFECTS (call_expr
) = 1;
3093 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
3095 /* If we are initializing a readonly value, show the above call
3096 clobbered it. Otherwise, a load from it may erroneously be
3097 hoisted from a loop. */
3098 if (RTX_UNCHANGING_P (object
))
3099 emit_insn (gen_rtx_CLOBBER (VOIDmode
, object
));
3101 return (TARGET_MEM_FUNCTIONS
? retval
: NULL_RTX
);
3104 /* A subroutine of clear_storage_via_libcall. Create the tree node
3105 for the function we use for block clears. The first time FOR_CALL
3106 is true, we call assemble_external. */
3108 static GTY(()) tree block_clear_fn
;
3111 init_block_clear_fn (const char *asmspec
)
3113 if (!block_clear_fn
)
3117 if (TARGET_MEM_FUNCTIONS
)
3119 fn
= get_identifier ("memset");
3120 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
3121 integer_type_node
, sizetype
,
3126 fn
= get_identifier ("bzero");
3127 args
= build_function_type_list (void_type_node
, ptr_type_node
,
3128 unsigned_type_node
, NULL_TREE
);
3131 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
3132 DECL_EXTERNAL (fn
) = 1;
3133 TREE_PUBLIC (fn
) = 1;
3134 DECL_ARTIFICIAL (fn
) = 1;
3135 TREE_NOTHROW (fn
) = 1;
3137 block_clear_fn
= fn
;
3142 SET_DECL_RTL (block_clear_fn
, NULL_RTX
);
3143 SET_DECL_ASSEMBLER_NAME (block_clear_fn
, get_identifier (asmspec
));
3148 clear_storage_libcall_fn (int for_call
)
3150 static bool emitted_extern
;
3152 if (!block_clear_fn
)
3153 init_block_clear_fn (NULL
);
3155 if (for_call
&& !emitted_extern
)
3157 emitted_extern
= true;
3158 make_decl_rtl (block_clear_fn
, NULL
);
3159 assemble_external (block_clear_fn
);
3162 return block_clear_fn
;
3165 /* Generate code to copy Y into X.
3166 Both Y and X must have the same mode, except that
3167 Y can be a constant with VOIDmode.
3168 This mode cannot be BLKmode; use emit_block_move for that.
3170 Return the last instruction emitted. */
3173 emit_move_insn (rtx x
, rtx y
)
3175 enum machine_mode mode
= GET_MODE (x
);
3176 rtx y_cst
= NULL_RTX
;
3179 x
= protect_from_queue (x
, 1);
3180 y
= protect_from_queue (y
, 0);
3182 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
3185 /* Never force constant_p_rtx to memory. */
3186 if (GET_CODE (y
) == CONSTANT_P_RTX
)
3188 else if (CONSTANT_P (y
))
3191 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3192 && (last_insn
= compress_float_constant (x
, y
)))
3197 if (!LEGITIMATE_CONSTANT_P (y
))
3199 y
= force_const_mem (mode
, y
);
3201 /* If the target's cannot_force_const_mem prevented the spill,
3202 assume that the target's move expanders will also take care
3203 of the non-legitimate constant. */
3209 /* If X or Y are memory references, verify that their addresses are valid
3211 if (GET_CODE (x
) == MEM
3212 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
3213 && ! push_operand (x
, GET_MODE (x
)))
3215 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
3216 x
= validize_mem (x
);
3218 if (GET_CODE (y
) == MEM
3219 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
3221 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
3222 y
= validize_mem (y
);
3224 if (mode
== BLKmode
)
3227 last_insn
= emit_move_insn_1 (x
, y
);
3229 if (y_cst
&& GET_CODE (x
) == REG
3230 && (set
= single_set (last_insn
)) != NULL_RTX
3231 && SET_DEST (set
) == x
3232 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
3233 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
3238 /* Low level part of emit_move_insn.
3239 Called just like emit_move_insn, but assumes X and Y
3240 are basically valid. */
3243 emit_move_insn_1 (rtx x
, rtx y
)
3245 enum machine_mode mode
= GET_MODE (x
);
3246 enum machine_mode submode
;
3247 enum mode_class
class = GET_MODE_CLASS (mode
);
3249 if ((unsigned int) mode
>= (unsigned int) MAX_MACHINE_MODE
)
3252 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
3254 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
3256 /* Expand complex moves by moving real part and imag part, if possible. */
3257 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
3258 && BLKmode
!= (submode
= GET_MODE_INNER (mode
))
3259 && (mov_optab
->handlers
[(int) submode
].insn_code
3260 != CODE_FOR_nothing
))
3262 /* Don't split destination if it is a stack push. */
3263 int stack
= push_operand (x
, GET_MODE (x
));
3265 #ifdef PUSH_ROUNDING
3266 /* In case we output to the stack, but the size is smaller than the
3267 machine can push exactly, we need to use move instructions. */
3269 && (PUSH_ROUNDING (GET_MODE_SIZE (submode
))
3270 != GET_MODE_SIZE (submode
)))
3273 HOST_WIDE_INT offset1
, offset2
;
3275 /* Do not use anti_adjust_stack, since we don't want to update
3276 stack_pointer_delta. */
3277 temp
= expand_binop (Pmode
,
3278 #ifdef STACK_GROWS_DOWNWARD
3286 (GET_MODE_SIZE (GET_MODE (x
)))),
3287 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
3289 if (temp
!= stack_pointer_rtx
)
3290 emit_move_insn (stack_pointer_rtx
, temp
);
3292 #ifdef STACK_GROWS_DOWNWARD
3294 offset2
= GET_MODE_SIZE (submode
);
3296 offset1
= -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)));
3297 offset2
= (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))
3298 + GET_MODE_SIZE (submode
));
3301 emit_move_insn (change_address (x
, submode
,
3302 gen_rtx_PLUS (Pmode
,
3304 GEN_INT (offset1
))),
3305 gen_realpart (submode
, y
));
3306 emit_move_insn (change_address (x
, submode
,
3307 gen_rtx_PLUS (Pmode
,
3309 GEN_INT (offset2
))),
3310 gen_imagpart (submode
, y
));
3314 /* If this is a stack, push the highpart first, so it
3315 will be in the argument order.
3317 In that case, change_address is used only to convert
3318 the mode, not to change the address. */
3321 /* Note that the real part always precedes the imag part in memory
3322 regardless of machine's endianness. */
3323 #ifdef STACK_GROWS_DOWNWARD
3324 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3325 gen_imagpart (submode
, y
));
3326 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3327 gen_realpart (submode
, y
));
3329 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3330 gen_realpart (submode
, y
));
3331 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3332 gen_imagpart (submode
, y
));
3337 rtx realpart_x
, realpart_y
;
3338 rtx imagpart_x
, imagpart_y
;
3340 /* If this is a complex value with each part being smaller than a
3341 word, the usual calling sequence will likely pack the pieces into
3342 a single register. Unfortunately, SUBREG of hard registers only
3343 deals in terms of words, so we have a problem converting input
3344 arguments to the CONCAT of two registers that is used elsewhere
3345 for complex values. If this is before reload, we can copy it into
3346 memory and reload. FIXME, we should see about using extract and
3347 insert on integer registers, but complex short and complex char
3348 variables should be rarely used. */
3349 if (GET_MODE_BITSIZE (mode
) < 2 * BITS_PER_WORD
3350 && (reload_in_progress
| reload_completed
) == 0)
3353 = (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
3355 = (REG_P (y
) && REGNO (y
) < FIRST_PSEUDO_REGISTER
);
3357 if (packed_dest_p
|| packed_src_p
)
3359 enum mode_class reg_class
= ((class == MODE_COMPLEX_FLOAT
)
3360 ? MODE_FLOAT
: MODE_INT
);
3362 enum machine_mode reg_mode
3363 = mode_for_size (GET_MODE_BITSIZE (mode
), reg_class
, 1);
3365 if (reg_mode
!= BLKmode
)
3367 rtx mem
= assign_stack_temp (reg_mode
,
3368 GET_MODE_SIZE (mode
), 0);
3369 rtx cmem
= adjust_address (mem
, mode
, 0);
3372 = N_("function using short complex types cannot be inline");
3376 rtx sreg
= gen_rtx_SUBREG (reg_mode
, x
, 0);
3378 emit_move_insn_1 (cmem
, y
);
3379 return emit_move_insn_1 (sreg
, mem
);
3383 rtx sreg
= gen_rtx_SUBREG (reg_mode
, y
, 0);
3385 emit_move_insn_1 (mem
, sreg
);
3386 return emit_move_insn_1 (x
, cmem
);
3392 realpart_x
= gen_realpart (submode
, x
);
3393 realpart_y
= gen_realpart (submode
, y
);
3394 imagpart_x
= gen_imagpart (submode
, x
);
3395 imagpart_y
= gen_imagpart (submode
, y
);
3397 /* Show the output dies here. This is necessary for SUBREGs
3398 of pseudos since we cannot track their lifetimes correctly;
3399 hard regs shouldn't appear here except as return values.
3400 We never want to emit such a clobber after reload. */
3402 && ! (reload_in_progress
|| reload_completed
)
3403 && (GET_CODE (realpart_x
) == SUBREG
3404 || GET_CODE (imagpart_x
) == SUBREG
))
3405 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3407 emit_move_insn (realpart_x
, realpart_y
);
3408 emit_move_insn (imagpart_x
, imagpart_y
);
3411 return get_last_insn ();
3414 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3415 find a mode to do it in. If we have a movcc, use it. Otherwise,
3416 find the MODE_INT mode of the same width. */
3417 else if (GET_MODE_CLASS (mode
) == MODE_CC
3418 && mov_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
3420 enum insn_code insn_code
;
3421 enum machine_mode tmode
= VOIDmode
;
3425 && mov_optab
->handlers
[(int) CCmode
].insn_code
!= CODE_FOR_nothing
)
3428 for (tmode
= QImode
; tmode
!= VOIDmode
;
3429 tmode
= GET_MODE_WIDER_MODE (tmode
))
3430 if (GET_MODE_SIZE (tmode
) == GET_MODE_SIZE (mode
))
3433 if (tmode
== VOIDmode
)
3436 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3437 may call change_address which is not appropriate if we were
3438 called when a reload was in progress. We don't have to worry
3439 about changing the address since the size in bytes is supposed to
3440 be the same. Copy the MEM to change the mode and move any
3441 substitutions from the old MEM to the new one. */
3443 if (reload_in_progress
)
3445 x
= gen_lowpart_common (tmode
, x1
);
3446 if (x
== 0 && GET_CODE (x1
) == MEM
)
3448 x
= adjust_address_nv (x1
, tmode
, 0);
3449 copy_replacements (x1
, x
);
3452 y
= gen_lowpart_common (tmode
, y1
);
3453 if (y
== 0 && GET_CODE (y1
) == MEM
)
3455 y
= adjust_address_nv (y1
, tmode
, 0);
3456 copy_replacements (y1
, y
);
3461 x
= gen_lowpart (tmode
, x
);
3462 y
= gen_lowpart (tmode
, y
);
3465 insn_code
= mov_optab
->handlers
[(int) tmode
].insn_code
;
3466 return emit_insn (GEN_FCN (insn_code
) (x
, y
));
3469 /* This will handle any multi-word or full-word mode that lacks a move_insn
3470 pattern. However, you will get better code if you define such patterns,
3471 even if they must turn into multiple assembler instructions. */
3472 else if (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
3479 #ifdef PUSH_ROUNDING
3481 /* If X is a push on the stack, do the push now and replace
3482 X with a reference to the stack pointer. */
3483 if (push_operand (x
, GET_MODE (x
)))
3488 /* Do not use anti_adjust_stack, since we don't want to update
3489 stack_pointer_delta. */
3490 temp
= expand_binop (Pmode
,
3491 #ifdef STACK_GROWS_DOWNWARD
3499 (GET_MODE_SIZE (GET_MODE (x
)))),
3500 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
3502 if (temp
!= stack_pointer_rtx
)
3503 emit_move_insn (stack_pointer_rtx
, temp
);
3505 code
= GET_CODE (XEXP (x
, 0));
3507 /* Just hope that small offsets off SP are OK. */
3508 if (code
== POST_INC
)
3509 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3510 GEN_INT (-((HOST_WIDE_INT
)
3511 GET_MODE_SIZE (GET_MODE (x
)))));
3512 else if (code
== POST_DEC
)
3513 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3514 GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
3516 temp
= stack_pointer_rtx
;
3518 x
= change_address (x
, VOIDmode
, temp
);
3522 /* If we are in reload, see if either operand is a MEM whose address
3523 is scheduled for replacement. */
3524 if (reload_in_progress
&& GET_CODE (x
) == MEM
3525 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3526 x
= replace_equiv_address_nv (x
, inner
);
3527 if (reload_in_progress
&& GET_CODE (y
) == MEM
3528 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3529 y
= replace_equiv_address_nv (y
, inner
);
3535 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3538 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3539 rtx ypart
= operand_subword (y
, i
, 1, mode
);
3541 /* If we can't get a part of Y, put Y into memory if it is a
3542 constant. Otherwise, force it into a register. If we still
3543 can't get a part of Y, abort. */
3544 if (ypart
== 0 && CONSTANT_P (y
))
3546 y
= force_const_mem (mode
, y
);
3547 ypart
= operand_subword (y
, i
, 1, mode
);
3549 else if (ypart
== 0)
3550 ypart
= operand_subword_force (y
, i
, mode
);
3552 if (xpart
== 0 || ypart
== 0)
3555 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3557 last_insn
= emit_move_insn (xpart
, ypart
);
3563 /* Show the output dies here. This is necessary for SUBREGs
3564 of pseudos since we cannot track their lifetimes correctly;
3565 hard regs shouldn't appear here except as return values.
3566 We never want to emit such a clobber after reload. */
3568 && ! (reload_in_progress
|| reload_completed
)
3569 && need_clobber
!= 0)
3570 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3580 /* If Y is representable exactly in a narrower mode, and the target can
3581 perform the extension directly from constant or memory, then emit the
3582 move as an extension. */
3585 compress_float_constant (rtx x
, rtx y
)
3587 enum machine_mode dstmode
= GET_MODE (x
);
3588 enum machine_mode orig_srcmode
= GET_MODE (y
);
3589 enum machine_mode srcmode
;
3592 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3594 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3595 srcmode
!= orig_srcmode
;
3596 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3599 rtx trunc_y
, last_insn
;
3601 /* Skip if the target can't extend this way. */
3602 ic
= can_extend_p (dstmode
, srcmode
, 0);
3603 if (ic
== CODE_FOR_nothing
)
3606 /* Skip if the narrowed value isn't exact. */
3607 if (! exact_real_truncate (srcmode
, &r
))
3610 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3612 if (LEGITIMATE_CONSTANT_P (trunc_y
))
3614 /* Skip if the target needs extra instructions to perform
3616 if (! (*insn_data
[ic
].operand
[1].predicate
) (trunc_y
, srcmode
))
3619 else if (float_extend_from_mem
[dstmode
][srcmode
])
3620 trunc_y
= validize_mem (force_const_mem (srcmode
, trunc_y
));
3624 emit_unop_insn (ic
, x
, trunc_y
, UNKNOWN
);
3625 last_insn
= get_last_insn ();
3627 if (GET_CODE (x
) == REG
)
3628 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3636 /* Pushing data onto the stack. */
3638 /* Push a block of length SIZE (perhaps variable)
3639 and return an rtx to address the beginning of the block.
3640 Note that it is not possible for the value returned to be a QUEUED.
3641 The value may be virtual_outgoing_args_rtx.
3643 EXTRA is the number of bytes of padding to push in addition to SIZE.
3644 BELOW nonzero means this padding comes at low addresses;
3645 otherwise, the padding comes at high addresses. */
3648 push_block (rtx size
, int extra
, int below
)
3652 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3653 if (CONSTANT_P (size
))
3654 anti_adjust_stack (plus_constant (size
, extra
));
3655 else if (GET_CODE (size
) == REG
&& extra
== 0)
3656 anti_adjust_stack (size
);
3659 temp
= copy_to_mode_reg (Pmode
, size
);
3661 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3662 temp
, 0, OPTAB_LIB_WIDEN
);
3663 anti_adjust_stack (temp
);
3666 #ifndef STACK_GROWS_DOWNWARD
3672 temp
= virtual_outgoing_args_rtx
;
3673 if (extra
!= 0 && below
)
3674 temp
= plus_constant (temp
, extra
);
3678 if (GET_CODE (size
) == CONST_INT
)
3679 temp
= plus_constant (virtual_outgoing_args_rtx
,
3680 -INTVAL (size
) - (below
? 0 : extra
));
3681 else if (extra
!= 0 && !below
)
3682 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3683 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3685 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3686 negate_rtx (Pmode
, size
));
3689 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3692 #ifdef PUSH_ROUNDING
3694 /* Emit single push insn. */
3697 emit_single_push_insn (enum machine_mode mode
, rtx x
, tree type
)
3700 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3702 enum insn_code icode
;
3703 insn_operand_predicate_fn pred
;
3705 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3706 /* If there is push pattern, use it. Otherwise try old way of throwing
3707 MEM representing push operation to move expander. */
3708 icode
= push_optab
->handlers
[(int) mode
].insn_code
;
3709 if (icode
!= CODE_FOR_nothing
)
3711 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3712 && !((*pred
) (x
, mode
))))
3713 x
= force_reg (mode
, x
);
3714 emit_insn (GEN_FCN (icode
) (x
));
3717 if (GET_MODE_SIZE (mode
) == rounded_size
)
3718 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3719 /* If we are to pad downward, adjust the stack pointer first and
3720 then store X into the stack location using an offset. This is
3721 because emit_move_insn does not know how to pad; it does not have
3723 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
3725 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
3726 HOST_WIDE_INT offset
;
3728 emit_move_insn (stack_pointer_rtx
,
3729 expand_binop (Pmode
,
3730 #ifdef STACK_GROWS_DOWNWARD
3736 GEN_INT (rounded_size
),
3737 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
3739 offset
= (HOST_WIDE_INT
) padding_size
;
3740 #ifdef STACK_GROWS_DOWNWARD
3741 if (STACK_PUSH_CODE
== POST_DEC
)
3742 /* We have already decremented the stack pointer, so get the
3744 offset
+= (HOST_WIDE_INT
) rounded_size
;
3746 if (STACK_PUSH_CODE
== POST_INC
)
3747 /* We have already incremented the stack pointer, so get the
3749 offset
-= (HOST_WIDE_INT
) rounded_size
;
3751 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (offset
));
3755 #ifdef STACK_GROWS_DOWNWARD
3756 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3757 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3758 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3760 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3761 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3762 GEN_INT (rounded_size
));
3764 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3767 dest
= gen_rtx_MEM (mode
, dest_addr
);
3771 set_mem_attributes (dest
, type
, 1);
3773 if (flag_optimize_sibling_calls
)
3774 /* Function incoming arguments may overlap with sibling call
3775 outgoing arguments and we cannot allow reordering of reads
3776 from function arguments with stores to outgoing arguments
3777 of sibling calls. */
3778 set_mem_alias_set (dest
, 0);
3780 emit_move_insn (dest
, x
);
3784 /* Generate code to push X onto the stack, assuming it has mode MODE and
3786 MODE is redundant except when X is a CONST_INT (since they don't
3788 SIZE is an rtx for the size of data to be copied (in bytes),
3789 needed only if X is BLKmode.
3791 ALIGN (in bits) is maximum alignment we can assume.
3793 If PARTIAL and REG are both nonzero, then copy that many of the first
3794 words of X into registers starting with REG, and push the rest of X.
3795 The amount of space pushed is decreased by PARTIAL words,
3796 rounded *down* to a multiple of PARM_BOUNDARY.
3797 REG must be a hard register in this case.
3798 If REG is zero but PARTIAL is not, take any all others actions for an
3799 argument partially in registers, but do not actually load any
3802 EXTRA is the amount in bytes of extra space to leave next to this arg.
3803 This is ignored if an argument block has already been allocated.
3805 On a machine that lacks real push insns, ARGS_ADDR is the address of
3806 the bottom of the argument block for this call. We use indexing off there
3807 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3808 argument block has not been preallocated.
3810 ARGS_SO_FAR is the size of args previously pushed for this call.
3812 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3813 for arguments passed in registers. If nonzero, it will be the number
3814 of bytes required. */
3817 emit_push_insn (rtx x
, enum machine_mode mode
, tree type
, rtx size
,
3818 unsigned int align
, int partial
, rtx reg
, int extra
,
3819 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
3823 enum direction stack_direction
3824 #ifdef STACK_GROWS_DOWNWARD
3830 /* Decide where to pad the argument: `downward' for below,
3831 `upward' for above, or `none' for don't pad it.
3832 Default is below for small data on big-endian machines; else above. */
3833 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3835 /* Invert direction if stack is post-decrement.
3837 if (STACK_PUSH_CODE
== POST_DEC
)
3838 if (where_pad
!= none
)
3839 where_pad
= (where_pad
== downward
? upward
: downward
);
3841 xinner
= x
= protect_from_queue (x
, 0);
3843 if (mode
== BLKmode
)
3845 /* Copy a block into the stack, entirely or partially. */
3848 int used
= partial
* UNITS_PER_WORD
;
3849 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3857 /* USED is now the # of bytes we need not copy to the stack
3858 because registers will take care of them. */
3861 xinner
= adjust_address (xinner
, BLKmode
, used
);
3863 /* If the partial register-part of the arg counts in its stack size,
3864 skip the part of stack space corresponding to the registers.
3865 Otherwise, start copying to the beginning of the stack space,
3866 by setting SKIP to 0. */
3867 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3869 #ifdef PUSH_ROUNDING
3870 /* Do it with several push insns if that doesn't take lots of insns
3871 and if there is no difficulty with push insns that skip bytes
3872 on the stack for alignment purposes. */
3875 && GET_CODE (size
) == CONST_INT
3877 && MEM_ALIGN (xinner
) >= align
3878 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3879 /* Here we avoid the case of a structure whose weak alignment
3880 forces many pushes of a small amount of data,
3881 and such small pushes do rounding that causes trouble. */
3882 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3883 || align
>= BIGGEST_ALIGNMENT
3884 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3885 == (align
/ BITS_PER_UNIT
)))
3886 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3888 /* Push padding now if padding above and stack grows down,
3889 or if padding below and stack grows up.
3890 But if space already allocated, this has already been done. */
3891 if (extra
&& args_addr
== 0
3892 && where_pad
!= none
&& where_pad
!= stack_direction
)
3893 anti_adjust_stack (GEN_INT (extra
));
3895 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
3898 #endif /* PUSH_ROUNDING */
3902 /* Otherwise make space on the stack and copy the data
3903 to the address of that space. */
3905 /* Deduct words put into registers from the size we must copy. */
3908 if (GET_CODE (size
) == CONST_INT
)
3909 size
= GEN_INT (INTVAL (size
) - used
);
3911 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3912 GEN_INT (used
), NULL_RTX
, 0,
3916 /* Get the address of the stack space.
3917 In this case, we do not deal with EXTRA separately.
3918 A single stack adjust will do. */
3921 temp
= push_block (size
, extra
, where_pad
== downward
);
3924 else if (GET_CODE (args_so_far
) == CONST_INT
)
3925 temp
= memory_address (BLKmode
,
3926 plus_constant (args_addr
,
3927 skip
+ INTVAL (args_so_far
)));
3929 temp
= memory_address (BLKmode
,
3930 plus_constant (gen_rtx_PLUS (Pmode
,
3935 if (!ACCUMULATE_OUTGOING_ARGS
)
3937 /* If the source is referenced relative to the stack pointer,
3938 copy it to another register to stabilize it. We do not need
3939 to do this if we know that we won't be changing sp. */
3941 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3942 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3943 temp
= copy_to_reg (temp
);
3946 target
= gen_rtx_MEM (BLKmode
, temp
);
3950 set_mem_attributes (target
, type
, 1);
3951 /* Function incoming arguments may overlap with sibling call
3952 outgoing arguments and we cannot allow reordering of reads
3953 from function arguments with stores to outgoing arguments
3954 of sibling calls. */
3955 set_mem_alias_set (target
, 0);
3958 /* ALIGN may well be better aligned than TYPE, e.g. due to
3959 PARM_BOUNDARY. Assume the caller isn't lying. */
3960 set_mem_align (target
, align
);
3962 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
3965 else if (partial
> 0)
3967 /* Scalar partly in registers. */
3969 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3972 /* # words of start of argument
3973 that we must make space for but need not store. */
3974 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
3975 int args_offset
= INTVAL (args_so_far
);
3978 /* Push padding now if padding above and stack grows down,
3979 or if padding below and stack grows up.
3980 But if space already allocated, this has already been done. */
3981 if (extra
&& args_addr
== 0
3982 && where_pad
!= none
&& where_pad
!= stack_direction
)
3983 anti_adjust_stack (GEN_INT (extra
));
3985 /* If we make space by pushing it, we might as well push
3986 the real data. Otherwise, we can leave OFFSET nonzero
3987 and leave the space uninitialized. */
3991 /* Now NOT_STACK gets the number of words that we don't need to
3992 allocate on the stack. */
3993 not_stack
= partial
- offset
;
3995 /* If the partial register-part of the arg counts in its stack size,
3996 skip the part of stack space corresponding to the registers.
3997 Otherwise, start copying to the beginning of the stack space,
3998 by setting SKIP to 0. */
3999 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
4001 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
4002 x
= validize_mem (force_const_mem (mode
, x
));
4004 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4005 SUBREGs of such registers are not allowed. */
4006 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
4007 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
4008 x
= copy_to_reg (x
);
4010 /* Loop over all the words allocated on the stack for this arg. */
4011 /* We can do it by words, because any scalar bigger than a word
4012 has a size a multiple of a word. */
4013 #ifndef PUSH_ARGS_REVERSED
4014 for (i
= not_stack
; i
< size
; i
++)
4016 for (i
= size
- 1; i
>= not_stack
; i
--)
4018 if (i
>= not_stack
+ offset
)
4019 emit_push_insn (operand_subword_force (x
, i
, mode
),
4020 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
4022 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
4024 reg_parm_stack_space
, alignment_pad
);
4031 /* Push padding now if padding above and stack grows down,
4032 or if padding below and stack grows up.
4033 But if space already allocated, this has already been done. */
4034 if (extra
&& args_addr
== 0
4035 && where_pad
!= none
&& where_pad
!= stack_direction
)
4036 anti_adjust_stack (GEN_INT (extra
));
4038 #ifdef PUSH_ROUNDING
4039 if (args_addr
== 0 && PUSH_ARGS
)
4040 emit_single_push_insn (mode
, x
, type
);
4044 if (GET_CODE (args_so_far
) == CONST_INT
)
4046 = memory_address (mode
,
4047 plus_constant (args_addr
,
4048 INTVAL (args_so_far
)));
4050 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
4052 dest
= gen_rtx_MEM (mode
, addr
);
4055 set_mem_attributes (dest
, type
, 1);
4056 /* Function incoming arguments may overlap with sibling call
4057 outgoing arguments and we cannot allow reordering of reads
4058 from function arguments with stores to outgoing arguments
4059 of sibling calls. */
4060 set_mem_alias_set (dest
, 0);
4063 emit_move_insn (dest
, x
);
4067 /* If part should go in registers, copy that part
4068 into the appropriate registers. Do this now, at the end,
4069 since mem-to-mem copies above may do function calls. */
4070 if (partial
> 0 && reg
!= 0)
4072 /* Handle calls that pass values in multiple non-contiguous locations.
4073 The Irix 6 ABI has examples of this. */
4074 if (GET_CODE (reg
) == PARALLEL
)
4075 emit_group_load (reg
, x
, -1); /* ??? size? */
4077 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
4080 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
4081 anti_adjust_stack (GEN_INT (extra
));
4083 if (alignment_pad
&& args_addr
== 0)
4084 anti_adjust_stack (alignment_pad
);
4087 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4091 get_subtarget (rtx x
)
4094 /* Only registers can be subtargets. */
4095 || GET_CODE (x
) != REG
4096 /* If the register is readonly, it can't be set more than once. */
4097 || RTX_UNCHANGING_P (x
)
4098 /* Don't use hard regs to avoid extending their life. */
4099 || REGNO (x
) < FIRST_PSEUDO_REGISTER
4100 /* Avoid subtargets inside loops,
4101 since they hide some invariant expressions. */
4102 || preserve_subexpressions_p ())
4106 /* Expand an assignment that stores the value of FROM into TO.
4107 If WANT_VALUE is nonzero, return an rtx for the value of TO.
4108 (This may contain a QUEUED rtx;
4109 if the value is constant, this rtx is a constant.)
4110 Otherwise, the returned value is NULL_RTX.
4112 SUGGEST_REG is no longer actually used.
4113 It used to mean, copy the value through a register
4114 and return that register, if that is possible.
4115 We now use WANT_VALUE to decide whether to do this. */
4118 expand_assignment (tree to
, tree from
, int want_value
,
4119 int suggest_reg ATTRIBUTE_UNUSED
)
4124 /* Don't crash if the lhs of the assignment was erroneous. */
4126 if (TREE_CODE (to
) == ERROR_MARK
)
4128 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
4129 return want_value
? result
: NULL_RTX
;
4132 /* Assignment of a structure component needs special treatment
4133 if the structure component's rtx is not simply a MEM.
4134 Assignment of an array element at a constant index, and assignment of
4135 an array element in an unaligned packed structure field, has the same
4138 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
4139 || TREE_CODE (to
) == ARRAY_REF
|| TREE_CODE (to
) == ARRAY_RANGE_REF
4140 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
4142 enum machine_mode mode1
;
4143 HOST_WIDE_INT bitsize
, bitpos
;
4151 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
4152 &unsignedp
, &volatilep
);
4154 /* If we are going to use store_bit_field and extract_bit_field,
4155 make sure to_rtx will be safe for multiple use. */
4157 if (mode1
== VOIDmode
&& want_value
)
4158 tem
= stabilize_reference (tem
);
4160 orig_to_rtx
= to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, 0);
4164 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
4166 if (GET_CODE (to_rtx
) != MEM
)
4169 #ifdef POINTERS_EXTEND_UNSIGNED
4170 if (GET_MODE (offset_rtx
) != Pmode
)
4171 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
4173 if (GET_MODE (offset_rtx
) != ptr_mode
)
4174 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4177 /* A constant address in TO_RTX can have VOIDmode, we must not try
4178 to call force_reg for that case. Avoid that case. */
4179 if (GET_CODE (to_rtx
) == MEM
4180 && GET_MODE (to_rtx
) == BLKmode
4181 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
4183 && (bitpos
% bitsize
) == 0
4184 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
4185 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
4187 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
4191 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4192 highest_pow2_factor_for_type (TREE_TYPE (to
),
4196 if (GET_CODE (to_rtx
) == MEM
)
4198 /* If the field is at offset zero, we could have been given the
4199 DECL_RTX of the parent struct. Don't munge it. */
4200 to_rtx
= shallow_copy_rtx (to_rtx
);
4202 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
4205 /* Deal with volatile and readonly fields. The former is only done
4206 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4207 if (volatilep
&& GET_CODE (to_rtx
) == MEM
)
4209 if (to_rtx
== orig_to_rtx
)
4210 to_rtx
= copy_rtx (to_rtx
);
4211 MEM_VOLATILE_P (to_rtx
) = 1;
4214 if (TREE_CODE (to
) == COMPONENT_REF
4215 && TREE_READONLY (TREE_OPERAND (to
, 1)))
4217 if (to_rtx
== orig_to_rtx
)
4218 to_rtx
= copy_rtx (to_rtx
);
4219 RTX_UNCHANGING_P (to_rtx
) = 1;
4222 if (GET_CODE (to_rtx
) == MEM
&& ! can_address_p (to
))
4224 if (to_rtx
== orig_to_rtx
)
4225 to_rtx
= copy_rtx (to_rtx
);
4226 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4229 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
4231 /* Spurious cast for HPUX compiler. */
4232 ? ((enum machine_mode
)
4233 TYPE_MODE (TREE_TYPE (to
)))
4235 unsignedp
, TREE_TYPE (tem
), get_alias_set (to
));
4237 preserve_temp_slots (result
);
4241 /* If the value is meaningful, convert RESULT to the proper mode.
4242 Otherwise, return nothing. */
4243 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
4244 TYPE_MODE (TREE_TYPE (from
)),
4246 TREE_UNSIGNED (TREE_TYPE (to
)))
4250 /* If the rhs is a function call and its value is not an aggregate,
4251 call the function before we start to compute the lhs.
4252 This is needed for correct code for cases such as
4253 val = setjmp (buf) on machines where reference to val
4254 requires loading up part of an address in a separate insn.
4256 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4257 since it might be a promoted variable where the zero- or sign- extension
4258 needs to be done. Handling this in the normal way is safe because no
4259 computation is done before the call. */
4260 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
)
4261 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
4262 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
4263 && GET_CODE (DECL_RTL (to
)) == REG
))
4268 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
4270 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4272 /* Handle calls that return values in multiple non-contiguous locations.
4273 The Irix 6 ABI has examples of this. */
4274 if (GET_CODE (to_rtx
) == PARALLEL
)
4275 emit_group_load (to_rtx
, value
, int_size_in_bytes (TREE_TYPE (from
)));
4276 else if (GET_MODE (to_rtx
) == BLKmode
)
4277 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
4280 #ifdef POINTERS_EXTEND_UNSIGNED
4281 if (POINTER_TYPE_P (TREE_TYPE (to
))
4282 && GET_MODE (to_rtx
) != GET_MODE (value
))
4283 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
4285 emit_move_insn (to_rtx
, value
);
4287 preserve_temp_slots (to_rtx
);
4290 return want_value
? to_rtx
: NULL_RTX
;
4293 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4294 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4297 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4299 /* Don't move directly into a return register. */
4300 if (TREE_CODE (to
) == RESULT_DECL
4301 && (GET_CODE (to_rtx
) == REG
|| GET_CODE (to_rtx
) == PARALLEL
))
4306 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
4308 if (GET_CODE (to_rtx
) == PARALLEL
)
4309 emit_group_load (to_rtx
, temp
, int_size_in_bytes (TREE_TYPE (from
)));
4311 emit_move_insn (to_rtx
, temp
);
4313 preserve_temp_slots (to_rtx
);
4316 return want_value
? to_rtx
: NULL_RTX
;
4319 /* In case we are returning the contents of an object which overlaps
4320 the place the value is being stored, use a safe function when copying
4321 a value through a pointer into a structure value return block. */
4322 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
4323 && current_function_returns_struct
4324 && !current_function_returns_pcc_struct
)
4329 size
= expr_size (from
);
4330 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
4332 if (TARGET_MEM_FUNCTIONS
)
4333 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
4334 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
4335 XEXP (from_rtx
, 0), Pmode
,
4336 convert_to_mode (TYPE_MODE (sizetype
),
4337 size
, TREE_UNSIGNED (sizetype
)),
4338 TYPE_MODE (sizetype
));
4340 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
4341 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
4342 XEXP (to_rtx
, 0), Pmode
,
4343 convert_to_mode (TYPE_MODE (integer_type_node
),
4345 TREE_UNSIGNED (integer_type_node
)),
4346 TYPE_MODE (integer_type_node
));
4348 preserve_temp_slots (to_rtx
);
4351 return want_value
? to_rtx
: NULL_RTX
;
4354 /* Compute FROM and store the value in the rtx we got. */
4357 result
= store_expr (from
, to_rtx
, want_value
);
4358 preserve_temp_slots (result
);
4361 return want_value
? result
: NULL_RTX
;
4364 /* Generate code for computing expression EXP,
4365 and storing the value into TARGET.
4366 TARGET may contain a QUEUED rtx.
4368 If WANT_VALUE & 1 is nonzero, return a copy of the value
4369 not in TARGET, so that we can be sure to use the proper
4370 value in a containing expression even if TARGET has something
4371 else stored in it. If possible, we copy the value through a pseudo
4372 and return that pseudo. Or, if the value is constant, we try to
4373 return the constant. In some cases, we return a pseudo
4374 copied *from* TARGET.
4376 If the mode is BLKmode then we may return TARGET itself.
4377 It turns out that in BLKmode it doesn't cause a problem.
4378 because C has no operators that could combine two different
4379 assignments into the same BLKmode object with different values
4380 with no sequence point. Will other languages need this to
4383 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4384 to catch quickly any cases where the caller uses the value
4385 and fails to set WANT_VALUE.
4387 If WANT_VALUE & 2 is set, this is a store into a call param on the
4388 stack, and block moves may need to be treated specially. */
4391 store_expr (tree exp
, rtx target
, int want_value
)
4394 int dont_return_target
= 0;
4395 int dont_store_target
= 0;
4397 if (VOID_TYPE_P (TREE_TYPE (exp
)))
4399 /* C++ can generate ?: expressions with a throw expression in one
4400 branch and an rvalue in the other. Here, we resolve attempts to
4401 store the throw expression's nonexistant result. */
4404 expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
4407 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4409 /* Perform first part of compound expression, then assign from second
4411 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
4412 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4414 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
4416 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4418 /* For conditional expression, get safe form of the target. Then
4419 test the condition, doing the appropriate assignment on either
4420 side. This avoids the creation of unnecessary temporaries.
4421 For non-BLKmode, it is more efficient not to do this. */
4423 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4426 target
= protect_from_queue (target
, 1);
4428 do_pending_stack_adjust ();
4430 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
4431 start_cleanup_deferral ();
4432 store_expr (TREE_OPERAND (exp
, 1), target
, want_value
& 2);
4433 end_cleanup_deferral ();
4435 emit_jump_insn (gen_jump (lab2
));
4438 start_cleanup_deferral ();
4439 store_expr (TREE_OPERAND (exp
, 2), target
, want_value
& 2);
4440 end_cleanup_deferral ();
4445 return want_value
& 1 ? target
: NULL_RTX
;
4447 else if (queued_subexp_p (target
))
4448 /* If target contains a postincrement, let's not risk
4449 using it as the place to generate the rhs. */
4451 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
4453 /* Expand EXP into a new pseudo. */
4454 temp
= gen_reg_rtx (GET_MODE (target
));
4455 temp
= expand_expr (exp
, temp
, GET_MODE (target
),
4457 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4460 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
),
4462 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4464 /* If target is volatile, ANSI requires accessing the value
4465 *from* the target, if it is accessed. So make that happen.
4466 In no case return the target itself. */
4467 if (! MEM_VOLATILE_P (target
) && (want_value
& 1) != 0)
4468 dont_return_target
= 1;
4470 else if ((want_value
& 1) != 0
4471 && GET_CODE (target
) == MEM
4472 && ! MEM_VOLATILE_P (target
)
4473 && GET_MODE (target
) != BLKmode
)
4474 /* If target is in memory and caller wants value in a register instead,
4475 arrange that. Pass TARGET as target for expand_expr so that,
4476 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4477 We know expand_expr will not use the target in that case.
4478 Don't do this if TARGET is volatile because we are supposed
4479 to write it and then read it. */
4481 temp
= expand_expr (exp
, target
, GET_MODE (target
),
4482 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4483 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
4485 /* If TEMP is already in the desired TARGET, only copy it from
4486 memory and don't store it there again. */
4488 || (rtx_equal_p (temp
, target
)
4489 && ! side_effects_p (temp
) && ! side_effects_p (target
)))
4490 dont_store_target
= 1;
4491 temp
= copy_to_reg (temp
);
4493 dont_return_target
= 1;
4495 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4496 /* If this is a scalar in a register that is stored in a wider mode
4497 than the declared mode, compute the result into its declared mode
4498 and then convert to the wider mode. Our value is the computed
4501 rtx inner_target
= 0;
4503 /* If we don't want a value, we can do the conversion inside EXP,
4504 which will often result in some optimizations. Do the conversion
4505 in two steps: first change the signedness, if needed, then
4506 the extend. But don't do this if the type of EXP is a subtype
4507 of something else since then the conversion might involve
4508 more than just converting modes. */
4509 if ((want_value
& 1) == 0
4510 && INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4511 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
4513 if (TREE_UNSIGNED (TREE_TYPE (exp
))
4514 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4516 ((*lang_hooks
.types
.signed_or_unsigned_type
)
4517 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)), exp
);
4519 exp
= convert ((*lang_hooks
.types
.type_for_mode
)
4520 (GET_MODE (SUBREG_REG (target
)),
4521 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4524 inner_target
= SUBREG_REG (target
);
4527 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
4528 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4530 /* If TEMP is a MEM and we want a result value, make the access
4531 now so it gets done only once. Strictly speaking, this is
4532 only necessary if the MEM is volatile, or if the address
4533 overlaps TARGET. But not performing the load twice also
4534 reduces the amount of rtl we generate and then have to CSE. */
4535 if (GET_CODE (temp
) == MEM
&& (want_value
& 1) != 0)
4536 temp
= copy_to_reg (temp
);
4538 /* If TEMP is a VOIDmode constant, use convert_modes to make
4539 sure that we properly convert it. */
4540 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4542 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4543 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4544 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4545 GET_MODE (target
), temp
,
4546 SUBREG_PROMOTED_UNSIGNED_P (target
));
4549 convert_move (SUBREG_REG (target
), temp
,
4550 SUBREG_PROMOTED_UNSIGNED_P (target
));
4552 /* If we promoted a constant, change the mode back down to match
4553 target. Otherwise, the caller might get confused by a result whose
4554 mode is larger than expected. */
4556 if ((want_value
& 1) != 0 && GET_MODE (temp
) != GET_MODE (target
))
4558 if (GET_MODE (temp
) != VOIDmode
)
4560 temp
= gen_lowpart_SUBREG (GET_MODE (target
), temp
);
4561 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4562 SUBREG_PROMOTED_UNSIGNED_SET (temp
,
4563 SUBREG_PROMOTED_UNSIGNED_P (target
));
4566 temp
= convert_modes (GET_MODE (target
),
4567 GET_MODE (SUBREG_REG (target
)),
4568 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4571 return want_value
& 1 ? temp
: NULL_RTX
;
4575 temp
= expand_expr (exp
, target
, GET_MODE (target
),
4576 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4577 /* Return TARGET if it's a specified hardware register.
4578 If TARGET is a volatile mem ref, either return TARGET
4579 or return a reg copied *from* TARGET; ANSI requires this.
4581 Otherwise, if TEMP is not TARGET, return TEMP
4582 if it is constant (for efficiency),
4583 or if we really want the correct value. */
4584 if (!(target
&& GET_CODE (target
) == REG
4585 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4586 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
4587 && ! rtx_equal_p (temp
, target
)
4588 && (CONSTANT_P (temp
) || (want_value
& 1) != 0))
4589 dont_return_target
= 1;
4592 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4593 the same as that of TARGET, adjust the constant. This is needed, for
4594 example, in case it is a CONST_DOUBLE and we want only a word-sized
4596 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4597 && TREE_CODE (exp
) != ERROR_MARK
4598 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4599 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4600 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
4602 /* If value was not generated in the target, store it there.
4603 Convert the value to TARGET's type first if necessary.
4604 If TEMP and TARGET compare equal according to rtx_equal_p, but
4605 one or both of them are volatile memory refs, we have to distinguish
4607 - expand_expr has used TARGET. In this case, we must not generate
4608 another copy. This can be detected by TARGET being equal according
4610 - expand_expr has not used TARGET - that means that the source just
4611 happens to have the same RTX form. Since temp will have been created
4612 by expand_expr, it will compare unequal according to == .
4613 We must generate a copy in this case, to reach the correct number
4614 of volatile memory references. */
4616 if ((! rtx_equal_p (temp
, target
)
4617 || (temp
!= target
&& (side_effects_p (temp
)
4618 || side_effects_p (target
))))
4619 && TREE_CODE (exp
) != ERROR_MARK
4620 && ! dont_store_target
4621 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4622 but TARGET is not valid memory reference, TEMP will differ
4623 from TARGET although it is really the same location. */
4624 && (TREE_CODE_CLASS (TREE_CODE (exp
)) != 'd'
4625 || target
!= DECL_RTL_IF_SET (exp
))
4626 /* If there's nothing to copy, don't bother. Don't call expr_size
4627 unless necessary, because some front-ends (C++) expr_size-hook
4628 aborts on objects that are not supposed to be bit-copied or
4630 && expr_size (exp
) != const0_rtx
)
4632 target
= protect_from_queue (target
, 1);
4633 if (GET_MODE (temp
) != GET_MODE (target
)
4634 && GET_MODE (temp
) != VOIDmode
)
4636 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4637 if (dont_return_target
)
4639 /* In this case, we will return TEMP,
4640 so make sure it has the proper mode.
4641 But don't forget to store the value into TARGET. */
4642 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4643 emit_move_insn (target
, temp
);
4646 convert_move (target
, temp
, unsignedp
);
4649 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4651 /* Handle copying a string constant into an array. The string
4652 constant may be shorter than the array. So copy just the string's
4653 actual length, and clear the rest. First get the size of the data
4654 type of the string, which is actually the size of the target. */
4655 rtx size
= expr_size (exp
);
4657 if (GET_CODE (size
) == CONST_INT
4658 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4659 emit_block_move (target
, temp
, size
,
4661 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4664 /* Compute the size of the data to copy from the string. */
4666 = size_binop (MIN_EXPR
,
4667 make_tree (sizetype
, size
),
4668 size_int (TREE_STRING_LENGTH (exp
)));
4670 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
4672 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4675 /* Copy that much. */
4676 copy_size_rtx
= convert_to_mode (ptr_mode
, copy_size_rtx
,
4677 TREE_UNSIGNED (sizetype
));
4678 emit_block_move (target
, temp
, copy_size_rtx
,
4680 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4682 /* Figure out how much is left in TARGET that we have to clear.
4683 Do all calculations in ptr_mode. */
4684 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4686 size
= plus_constant (size
, -INTVAL (copy_size_rtx
));
4687 target
= adjust_address (target
, BLKmode
,
4688 INTVAL (copy_size_rtx
));
4692 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
4693 copy_size_rtx
, NULL_RTX
, 0,
4696 #ifdef POINTERS_EXTEND_UNSIGNED
4697 if (GET_MODE (copy_size_rtx
) != Pmode
)
4698 copy_size_rtx
= convert_to_mode (Pmode
, copy_size_rtx
,
4699 TREE_UNSIGNED (sizetype
));
4702 target
= offset_address (target
, copy_size_rtx
,
4703 highest_pow2_factor (copy_size
));
4704 label
= gen_label_rtx ();
4705 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4706 GET_MODE (size
), 0, label
);
4709 if (size
!= const0_rtx
)
4710 clear_storage (target
, size
);
4716 /* Handle calls that return values in multiple non-contiguous locations.
4717 The Irix 6 ABI has examples of this. */
4718 else if (GET_CODE (target
) == PARALLEL
)
4719 emit_group_load (target
, temp
, int_size_in_bytes (TREE_TYPE (exp
)));
4720 else if (GET_MODE (temp
) == BLKmode
)
4721 emit_block_move (target
, temp
, expr_size (exp
),
4723 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4725 emit_move_insn (target
, temp
);
4728 /* If we don't want a value, return NULL_RTX. */
4729 if ((want_value
& 1) == 0)
4732 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4733 ??? The latter test doesn't seem to make sense. */
4734 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
4737 /* Return TARGET itself if it is a hard register. */
4738 else if ((want_value
& 1) != 0
4739 && GET_MODE (target
) != BLKmode
4740 && ! (GET_CODE (target
) == REG
4741 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4742 return copy_to_reg (target
);
4748 /* Return 1 if EXP just contains zeros. */
4751 is_zeros_p (tree exp
)
4755 switch (TREE_CODE (exp
))
4759 case NON_LVALUE_EXPR
:
4760 case VIEW_CONVERT_EXPR
:
4761 return is_zeros_p (TREE_OPERAND (exp
, 0));
4764 return integer_zerop (exp
);
4768 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
4771 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp
), dconst0
);
4774 for (elt
= TREE_VECTOR_CST_ELTS (exp
); elt
;
4775 elt
= TREE_CHAIN (elt
))
4776 if (!is_zeros_p (TREE_VALUE (elt
)))
4782 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4783 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
4784 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4785 if (! is_zeros_p (TREE_VALUE (elt
)))
4795 /* Return 1 if EXP contains mostly (3/4) zeros. */
4798 mostly_zeros_p (tree exp
)
4800 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4802 int elts
= 0, zeros
= 0;
4803 tree elt
= CONSTRUCTOR_ELTS (exp
);
4804 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4806 /* If there are no ranges of true bits, it is all zero. */
4807 return elt
== NULL_TREE
;
4809 for (; elt
; elt
= TREE_CHAIN (elt
))
4811 /* We do not handle the case where the index is a RANGE_EXPR,
4812 so the statistic will be somewhat inaccurate.
4813 We do make a more accurate count in store_constructor itself,
4814 so since this function is only used for nested array elements,
4815 this should be close enough. */
4816 if (mostly_zeros_p (TREE_VALUE (elt
)))
4821 return 4 * zeros
>= 3 * elts
;
4824 return is_zeros_p (exp
);
4827 /* Helper function for store_constructor.
4828 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4829 TYPE is the type of the CONSTRUCTOR, not the element type.
4830 CLEARED is as for store_constructor.
4831 ALIAS_SET is the alias set to use for any stores.
4833 This provides a recursive shortcut back to store_constructor when it isn't
4834 necessary to go through store_field. This is so that we can pass through
4835 the cleared field to let store_constructor know that we may not have to
4836 clear a substructure if the outer structure has already been cleared. */
4839 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
4840 HOST_WIDE_INT bitpos
, enum machine_mode mode
,
4841 tree exp
, tree type
, int cleared
, int alias_set
)
4843 if (TREE_CODE (exp
) == CONSTRUCTOR
4844 && bitpos
% BITS_PER_UNIT
== 0
4845 /* If we have a nonzero bitpos for a register target, then we just
4846 let store_field do the bitfield handling. This is unlikely to
4847 generate unnecessary clear instructions anyways. */
4848 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
4850 if (GET_CODE (target
) == MEM
)
4852 = adjust_address (target
,
4853 GET_MODE (target
) == BLKmode
4855 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4856 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
4859 /* Update the alias set, if required. */
4860 if (GET_CODE (target
) == MEM
&& ! MEM_KEEP_ALIAS_SET_P (target
)
4861 && MEM_ALIAS_SET (target
) != 0)
4863 target
= copy_rtx (target
);
4864 set_mem_alias_set (target
, alias_set
);
4867 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
4870 store_field (target
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
4874 /* Store the value of constructor EXP into the rtx TARGET.
4875 TARGET is either a REG or a MEM; we know it cannot conflict, since
4876 safe_from_p has been called.
4877 CLEARED is true if TARGET is known to have been zero'd.
4878 SIZE is the number of bytes of TARGET we are allowed to modify: this
4879 may not be the same as the size of EXP if we are assigning to a field
4880 which has been packed to exclude padding bits. */
4883 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
)
4885 tree type
= TREE_TYPE (exp
);
4886 #ifdef WORD_REGISTER_OPERATIONS
4887 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4890 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
4891 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4895 /* We either clear the aggregate or indicate the value is dead. */
4896 if ((TREE_CODE (type
) == UNION_TYPE
4897 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4899 && ! CONSTRUCTOR_ELTS (exp
))
4900 /* If the constructor is empty, clear the union. */
4902 clear_storage (target
, expr_size (exp
));
4906 /* If we are building a static constructor into a register,
4907 set the initial value as zero so we can fold the value into
4908 a constant. But if more than one register is involved,
4909 this probably loses. */
4910 else if (! cleared
&& GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
4911 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4913 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4917 /* If the constructor has fewer fields than the structure
4918 or if we are initializing the structure to mostly zeros,
4919 clear the whole structure first. Don't do this if TARGET is a
4920 register whose mode size isn't equal to SIZE since clear_storage
4921 can't handle this case. */
4922 else if (! cleared
&& size
> 0
4923 && ((list_length (CONSTRUCTOR_ELTS (exp
))
4924 != fields_length (type
))
4925 || mostly_zeros_p (exp
))
4926 && (GET_CODE (target
) != REG
4927 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
4930 rtx xtarget
= target
;
4932 if (readonly_fields_p (type
))
4934 xtarget
= copy_rtx (xtarget
);
4935 RTX_UNCHANGING_P (xtarget
) = 1;
4938 clear_storage (xtarget
, GEN_INT (size
));
4943 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4945 /* Store each element of the constructor into
4946 the corresponding field of TARGET. */
4948 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4950 tree field
= TREE_PURPOSE (elt
);
4951 tree value
= TREE_VALUE (elt
);
4952 enum machine_mode mode
;
4953 HOST_WIDE_INT bitsize
;
4954 HOST_WIDE_INT bitpos
= 0;
4956 rtx to_rtx
= target
;
4958 /* Just ignore missing fields.
4959 We cleared the whole structure, above,
4960 if any fields are missing. */
4964 if (cleared
&& is_zeros_p (value
))
4967 if (host_integerp (DECL_SIZE (field
), 1))
4968 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4972 mode
= DECL_MODE (field
);
4973 if (DECL_BIT_FIELD (field
))
4976 offset
= DECL_FIELD_OFFSET (field
);
4977 if (host_integerp (offset
, 0)
4978 && host_integerp (bit_position (field
), 0))
4980 bitpos
= int_bit_position (field
);
4984 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
4990 if (CONTAINS_PLACEHOLDER_P (offset
))
4991 offset
= build (WITH_RECORD_EXPR
, sizetype
,
4992 offset
, make_tree (TREE_TYPE (exp
), target
));
4994 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4995 if (GET_CODE (to_rtx
) != MEM
)
4998 #ifdef POINTERS_EXTEND_UNSIGNED
4999 if (GET_MODE (offset_rtx
) != Pmode
)
5000 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
5002 if (GET_MODE (offset_rtx
) != ptr_mode
)
5003 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
5006 to_rtx
= offset_address (to_rtx
, offset_rtx
,
5007 highest_pow2_factor (offset
));
5010 if (TREE_READONLY (field
))
5012 if (GET_CODE (to_rtx
) == MEM
)
5013 to_rtx
= copy_rtx (to_rtx
);
5015 RTX_UNCHANGING_P (to_rtx
) = 1;
5018 #ifdef WORD_REGISTER_OPERATIONS
5019 /* If this initializes a field that is smaller than a word, at the
5020 start of a word, try to widen it to a full word.
5021 This special case allows us to output C++ member function
5022 initializations in a form that the optimizers can understand. */
5023 if (GET_CODE (target
) == REG
5024 && bitsize
< BITS_PER_WORD
5025 && bitpos
% BITS_PER_WORD
== 0
5026 && GET_MODE_CLASS (mode
) == MODE_INT
5027 && TREE_CODE (value
) == INTEGER_CST
5029 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
5031 tree type
= TREE_TYPE (value
);
5033 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
5035 type
= (*lang_hooks
.types
.type_for_size
)
5036 (BITS_PER_WORD
, TREE_UNSIGNED (type
));
5037 value
= convert (type
, value
);
5040 if (BYTES_BIG_ENDIAN
)
5042 = fold (build (LSHIFT_EXPR
, type
, value
,
5043 build_int_2 (BITS_PER_WORD
- bitsize
, 0)));
5044 bitsize
= BITS_PER_WORD
;
5049 if (GET_CODE (to_rtx
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (to_rtx
)
5050 && DECL_NONADDRESSABLE_P (field
))
5052 to_rtx
= copy_rtx (to_rtx
);
5053 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
5056 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
5057 value
, type
, cleared
,
5058 get_alias_set (TREE_TYPE (field
)));
5061 else if (TREE_CODE (type
) == ARRAY_TYPE
5062 || TREE_CODE (type
) == VECTOR_TYPE
)
5067 tree domain
= TYPE_DOMAIN (type
);
5068 tree elttype
= TREE_TYPE (type
);
5070 HOST_WIDE_INT minelt
= 0;
5071 HOST_WIDE_INT maxelt
= 0;
5073 /* Vectors are like arrays, but the domain is stored via an array
5075 if (TREE_CODE (type
) == VECTOR_TYPE
)
5077 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5078 the same field as TYPE_DOMAIN, we are not guaranteed that
5080 domain
= TYPE_DEBUG_REPRESENTATION_TYPE (type
);
5081 domain
= TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain
)));
5084 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
5085 && TYPE_MAX_VALUE (domain
)
5086 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
5087 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
5089 /* If we have constant bounds for the range of the type, get them. */
5092 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
5093 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
5096 /* If the constructor has fewer elements than the array,
5097 clear the whole array first. Similarly if this is
5098 static constructor of a non-BLKmode object. */
5099 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
5103 HOST_WIDE_INT count
= 0, zero_count
= 0;
5104 need_to_clear
= ! const_bounds_p
;
5106 /* This loop is a more accurate version of the loop in
5107 mostly_zeros_p (it handles RANGE_EXPR in an index).
5108 It is also needed to check for missing elements. */
5109 for (elt
= CONSTRUCTOR_ELTS (exp
);
5110 elt
!= NULL_TREE
&& ! need_to_clear
;
5111 elt
= TREE_CHAIN (elt
))
5113 tree index
= TREE_PURPOSE (elt
);
5114 HOST_WIDE_INT this_node_count
;
5116 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
5118 tree lo_index
= TREE_OPERAND (index
, 0);
5119 tree hi_index
= TREE_OPERAND (index
, 1);
5121 if (! host_integerp (lo_index
, 1)
5122 || ! host_integerp (hi_index
, 1))
5128 this_node_count
= (tree_low_cst (hi_index
, 1)
5129 - tree_low_cst (lo_index
, 1) + 1);
5132 this_node_count
= 1;
5134 count
+= this_node_count
;
5135 if (mostly_zeros_p (TREE_VALUE (elt
)))
5136 zero_count
+= this_node_count
;
5139 /* Clear the entire array first if there are any missing elements,
5140 or if the incidence of zero elements is >= 75%. */
5142 && (count
< maxelt
- minelt
+ 1 || 4 * zero_count
>= 3 * count
))
5146 if (need_to_clear
&& size
> 0)
5151 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5153 clear_storage (target
, GEN_INT (size
));
5157 else if (REG_P (target
))
5158 /* Inform later passes that the old value is dead. */
5159 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
5161 /* Store each element of the constructor into
5162 the corresponding element of TARGET, determined
5163 by counting the elements. */
5164 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
5166 elt
= TREE_CHAIN (elt
), i
++)
5168 enum machine_mode mode
;
5169 HOST_WIDE_INT bitsize
;
5170 HOST_WIDE_INT bitpos
;
5172 tree value
= TREE_VALUE (elt
);
5173 tree index
= TREE_PURPOSE (elt
);
5174 rtx xtarget
= target
;
5176 if (cleared
&& is_zeros_p (value
))
5179 unsignedp
= TREE_UNSIGNED (elttype
);
5180 mode
= TYPE_MODE (elttype
);
5181 if (mode
== BLKmode
)
5182 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
5183 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
5186 bitsize
= GET_MODE_BITSIZE (mode
);
5188 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
5190 tree lo_index
= TREE_OPERAND (index
, 0);
5191 tree hi_index
= TREE_OPERAND (index
, 1);
5192 rtx index_r
, pos_rtx
, loop_end
;
5193 struct nesting
*loop
;
5194 HOST_WIDE_INT lo
, hi
, count
;
5197 /* If the range is constant and "small", unroll the loop. */
5199 && host_integerp (lo_index
, 0)
5200 && host_integerp (hi_index
, 0)
5201 && (lo
= tree_low_cst (lo_index
, 0),
5202 hi
= tree_low_cst (hi_index
, 0),
5203 count
= hi
- lo
+ 1,
5204 (GET_CODE (target
) != MEM
5206 || (host_integerp (TYPE_SIZE (elttype
), 1)
5207 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
5210 lo
-= minelt
; hi
-= minelt
;
5211 for (; lo
<= hi
; lo
++)
5213 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
5215 if (GET_CODE (target
) == MEM
5216 && !MEM_KEEP_ALIAS_SET_P (target
)
5217 && TREE_CODE (type
) == ARRAY_TYPE
5218 && TYPE_NONALIASED_COMPONENT (type
))
5220 target
= copy_rtx (target
);
5221 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5224 store_constructor_field
5225 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
5226 get_alias_set (elttype
));
5231 expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
5232 loop_end
= gen_label_rtx ();
5234 unsignedp
= TREE_UNSIGNED (domain
);
5236 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
5239 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
5241 SET_DECL_RTL (index
, index_r
);
5242 if (TREE_CODE (value
) == SAVE_EXPR
5243 && SAVE_EXPR_RTL (value
) == 0)
5245 /* Make sure value gets expanded once before the
5247 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
5250 store_expr (lo_index
, index_r
, 0);
5251 loop
= expand_start_loop (0);
5253 /* Assign value to element index. */
5255 = convert (ssizetype
,
5256 fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5257 index
, TYPE_MIN_VALUE (domain
))));
5258 position
= size_binop (MULT_EXPR
, position
,
5260 TYPE_SIZE_UNIT (elttype
)));
5262 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
5263 xtarget
= offset_address (target
, pos_rtx
,
5264 highest_pow2_factor (position
));
5265 xtarget
= adjust_address (xtarget
, mode
, 0);
5266 if (TREE_CODE (value
) == CONSTRUCTOR
)
5267 store_constructor (value
, xtarget
, cleared
,
5268 bitsize
/ BITS_PER_UNIT
);
5270 store_expr (value
, xtarget
, 0);
5272 expand_exit_loop_if_false (loop
,
5273 build (LT_EXPR
, integer_type_node
,
5276 expand_increment (build (PREINCREMENT_EXPR
,
5278 index
, integer_one_node
), 0, 0);
5280 emit_label (loop_end
);
5283 else if ((index
!= 0 && ! host_integerp (index
, 0))
5284 || ! host_integerp (TYPE_SIZE (elttype
), 1))
5289 index
= ssize_int (1);
5292 index
= convert (ssizetype
,
5293 fold (build (MINUS_EXPR
, index
,
5294 TYPE_MIN_VALUE (domain
))));
5296 position
= size_binop (MULT_EXPR
, index
,
5298 TYPE_SIZE_UNIT (elttype
)));
5299 xtarget
= offset_address (target
,
5300 expand_expr (position
, 0, VOIDmode
, 0),
5301 highest_pow2_factor (position
));
5302 xtarget
= adjust_address (xtarget
, mode
, 0);
5303 store_expr (value
, xtarget
, 0);
5308 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
5309 * tree_low_cst (TYPE_SIZE (elttype
), 1));
5311 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
5313 if (GET_CODE (target
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (target
)
5314 && TREE_CODE (type
) == ARRAY_TYPE
5315 && TYPE_NONALIASED_COMPONENT (type
))
5317 target
= copy_rtx (target
);
5318 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5321 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
5322 type
, cleared
, get_alias_set (elttype
));
5328 /* Set constructor assignments. */
5329 else if (TREE_CODE (type
) == SET_TYPE
)
5331 tree elt
= CONSTRUCTOR_ELTS (exp
);
5332 unsigned HOST_WIDE_INT nbytes
= int_size_in_bytes (type
), nbits
;
5333 tree domain
= TYPE_DOMAIN (type
);
5334 tree domain_min
, domain_max
, bitlength
;
5336 /* The default implementation strategy is to extract the constant
5337 parts of the constructor, use that to initialize the target,
5338 and then "or" in whatever non-constant ranges we need in addition.
5340 If a large set is all zero or all ones, it is
5341 probably better to set it using memset (if available) or bzero.
5342 Also, if a large set has just a single range, it may also be
5343 better to first clear all the first clear the set (using
5344 bzero/memset), and set the bits we want. */
5346 /* Check for all zeros. */
5347 if (elt
== NULL_TREE
&& size
> 0)
5350 clear_storage (target
, GEN_INT (size
));
5354 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
5355 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
5356 bitlength
= size_binop (PLUS_EXPR
,
5357 size_diffop (domain_max
, domain_min
),
5360 nbits
= tree_low_cst (bitlength
, 1);
5362 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5363 are "complicated" (more than one range), initialize (the
5364 constant parts) by copying from a constant. */
5365 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
5366 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
5368 unsigned int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
5369 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
5370 char *bit_buffer
= (char *) alloca (nbits
);
5371 HOST_WIDE_INT word
= 0;
5372 unsigned int bit_pos
= 0;
5373 unsigned int ibit
= 0;
5374 unsigned int offset
= 0; /* In bytes from beginning of set. */
5376 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
5379 if (bit_buffer
[ibit
])
5381 if (BYTES_BIG_ENDIAN
)
5382 word
|= (1 << (set_word_size
- 1 - bit_pos
));
5384 word
|= 1 << bit_pos
;
5388 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
5390 if (word
!= 0 || ! cleared
)
5392 rtx datum
= GEN_INT (word
);
5395 /* The assumption here is that it is safe to use
5396 XEXP if the set is multi-word, but not if
5397 it's single-word. */
5398 if (GET_CODE (target
) == MEM
)
5399 to_rtx
= adjust_address (target
, mode
, offset
);
5400 else if (offset
== 0)
5404 emit_move_insn (to_rtx
, datum
);
5411 offset
+= set_word_size
/ BITS_PER_UNIT
;
5416 /* Don't bother clearing storage if the set is all ones. */
5417 if (TREE_CHAIN (elt
) != NULL_TREE
5418 || (TREE_PURPOSE (elt
) == NULL_TREE
5420 : ( ! host_integerp (TREE_VALUE (elt
), 0)
5421 || ! host_integerp (TREE_PURPOSE (elt
), 0)
5422 || (tree_low_cst (TREE_VALUE (elt
), 0)
5423 - tree_low_cst (TREE_PURPOSE (elt
), 0) + 1
5424 != (HOST_WIDE_INT
) nbits
))))
5425 clear_storage (target
, expr_size (exp
));
5427 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
5429 /* Start of range of element or NULL. */
5430 tree startbit
= TREE_PURPOSE (elt
);
5431 /* End of range of element, or element value. */
5432 tree endbit
= TREE_VALUE (elt
);
5433 HOST_WIDE_INT startb
, endb
;
5434 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
5436 bitlength_rtx
= expand_expr (bitlength
,
5437 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
5439 /* Handle non-range tuple element like [ expr ]. */
5440 if (startbit
== NULL_TREE
)
5442 startbit
= save_expr (endbit
);
5446 startbit
= convert (sizetype
, startbit
);
5447 endbit
= convert (sizetype
, endbit
);
5448 if (! integer_zerop (domain_min
))
5450 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
5451 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
5453 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
5454 EXPAND_CONST_ADDRESS
);
5455 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
5456 EXPAND_CONST_ADDRESS
);
5462 ((build_qualified_type ((*lang_hooks
.types
.type_for_mode
)
5463 (GET_MODE (target
), 0),
5466 emit_move_insn (targetx
, target
);
5469 else if (GET_CODE (target
) == MEM
)
5474 /* Optimization: If startbit and endbit are constants divisible
5475 by BITS_PER_UNIT, call memset instead. */
5476 if (TARGET_MEM_FUNCTIONS
5477 && TREE_CODE (startbit
) == INTEGER_CST
5478 && TREE_CODE (endbit
) == INTEGER_CST
5479 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
5480 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
5482 emit_library_call (memset_libfunc
, LCT_NORMAL
,
5484 plus_constant (XEXP (targetx
, 0),
5485 startb
/ BITS_PER_UNIT
),
5487 constm1_rtx
, TYPE_MODE (integer_type_node
),
5488 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
5489 TYPE_MODE (sizetype
));
5492 emit_library_call (setbits_libfunc
, LCT_NORMAL
,
5493 VOIDmode
, 4, XEXP (targetx
, 0),
5494 Pmode
, bitlength_rtx
, TYPE_MODE (sizetype
),
5495 startbit_rtx
, TYPE_MODE (sizetype
),
5496 endbit_rtx
, TYPE_MODE (sizetype
));
5499 emit_move_insn (target
, targetx
);
5507 /* Store the value of EXP (an expression tree)
5508 into a subfield of TARGET which has mode MODE and occupies
5509 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5510 If MODE is VOIDmode, it means that we are storing into a bit-field.
5512 If VALUE_MODE is VOIDmode, return nothing in particular.
5513 UNSIGNEDP is not used in this case.
5515 Otherwise, return an rtx for the value stored. This rtx
5516 has mode VALUE_MODE if that is convenient to do.
5517 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5519 TYPE is the type of the underlying object,
5521 ALIAS_SET is the alias set for the destination. This value will
5522 (in general) be different from that for TARGET, since TARGET is a
5523 reference to the containing structure. */
5526 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
5527 enum machine_mode mode
, tree exp
, enum machine_mode value_mode
,
5528 int unsignedp
, tree type
, int alias_set
)
5530 HOST_WIDE_INT width_mask
= 0;
5532 if (TREE_CODE (exp
) == ERROR_MARK
)
5535 /* If we have nothing to store, do nothing unless the expression has
5538 return expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
5539 else if (bitsize
>= 0 && bitsize
< HOST_BITS_PER_WIDE_INT
)
5540 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5542 /* If we are storing into an unaligned field of an aligned union that is
5543 in a register, we may have the mode of TARGET being an integer mode but
5544 MODE == BLKmode. In that case, get an aligned object whose size and
5545 alignment are the same as TARGET and store TARGET into it (we can avoid
5546 the store if the field being stored is the entire width of TARGET). Then
5547 call ourselves recursively to store the field into a BLKmode version of
5548 that object. Finally, load from the object into TARGET. This is not
5549 very efficient in general, but should only be slightly more expensive
5550 than the otherwise-required unaligned accesses. Perhaps this can be
5551 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5552 twice, once with emit_move_insn and once via store_field. */
5555 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
5557 rtx object
= assign_temp (type
, 0, 1, 1);
5558 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
5560 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5561 emit_move_insn (object
, target
);
5563 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
5566 emit_move_insn (target
, object
);
5568 /* We want to return the BLKmode version of the data. */
5572 if (GET_CODE (target
) == CONCAT
)
5574 /* We're storing into a struct containing a single __complex. */
5578 return store_expr (exp
, target
, 0);
5581 /* If the structure is in a register or if the component
5582 is a bit field, we cannot use addressing to access it.
5583 Use bit-field techniques or SUBREG to store in it. */
5585 if (mode
== VOIDmode
5586 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5587 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5588 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5589 || GET_CODE (target
) == REG
5590 || GET_CODE (target
) == SUBREG
5591 /* If the field isn't aligned enough to store as an ordinary memref,
5592 store it as a bit field. */
5594 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
5595 || bitpos
% GET_MODE_ALIGNMENT (mode
))
5596 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
5597 || (bitpos
% BITS_PER_UNIT
!= 0)))
5598 /* If the RHS and field are a constant size and the size of the
5599 RHS isn't the same size as the bitfield, we must use bitfield
5602 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5603 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5605 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
5607 /* If BITSIZE is narrower than the size of the type of EXP
5608 we will be narrowing TEMP. Normally, what's wanted are the
5609 low-order bits. However, if EXP's type is a record and this is
5610 big-endian machine, we want the upper BITSIZE bits. */
5611 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5612 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
5613 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5614 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5615 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5619 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5621 if (mode
!= VOIDmode
&& mode
!= BLKmode
5622 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5623 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5625 /* If the modes of TARGET and TEMP are both BLKmode, both
5626 must be in memory and BITPOS must be aligned on a byte
5627 boundary. If so, we simply do a block copy. */
5628 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5630 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
5631 || bitpos
% BITS_PER_UNIT
!= 0)
5634 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5635 emit_block_move (target
, temp
,
5636 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5640 return value_mode
== VOIDmode
? const0_rtx
: target
;
5643 /* Store the value in the bitfield. */
5644 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
,
5645 int_size_in_bytes (type
));
5647 if (value_mode
!= VOIDmode
)
5649 /* The caller wants an rtx for the value.
5650 If possible, avoid refetching from the bitfield itself. */
5652 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
5655 enum machine_mode tmode
;
5657 tmode
= GET_MODE (temp
);
5658 if (tmode
== VOIDmode
)
5662 return expand_and (tmode
, temp
,
5663 gen_int_mode (width_mask
, tmode
),
5666 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
5667 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5668 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5671 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
5672 NULL_RTX
, value_mode
, VOIDmode
,
5673 int_size_in_bytes (type
));
5679 rtx addr
= XEXP (target
, 0);
5680 rtx to_rtx
= target
;
5682 /* If a value is wanted, it must be the lhs;
5683 so make the address stable for multiple use. */
5685 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
5686 && ! CONSTANT_ADDRESS_P (addr
)
5687 /* A frame-pointer reference is already stable. */
5688 && ! (GET_CODE (addr
) == PLUS
5689 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
5690 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
5691 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
5692 to_rtx
= replace_equiv_address (to_rtx
, copy_to_reg (addr
));
5694 /* Now build a reference to just the desired component. */
5696 to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5698 if (to_rtx
== target
)
5699 to_rtx
= copy_rtx (to_rtx
);
5701 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5702 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5703 set_mem_alias_set (to_rtx
, alias_set
);
5705 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
5709 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5710 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5711 codes and find the ultimate containing object, which we return.
5713 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5714 bit position, and *PUNSIGNEDP to the signedness of the field.
5715 If the position of the field is variable, we store a tree
5716 giving the variable offset (in units) in *POFFSET.
5717 This offset is in addition to the bit position.
5718 If the position is not variable, we store 0 in *POFFSET.
5720 If any of the extraction expressions is volatile,
5721 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5723 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5724 is a mode that can be used to access the field. In that case, *PBITSIZE
5727 If the field describes a variable-sized object, *PMODE is set to
5728 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5729 this case, but the address of the object can be found. */
5732 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
5733 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
5734 enum machine_mode
*pmode
, int *punsignedp
,
5738 enum machine_mode mode
= VOIDmode
;
5739 tree offset
= size_zero_node
;
5740 tree bit_offset
= bitsize_zero_node
;
5741 tree placeholder_ptr
= 0;
5744 /* First get the mode, signedness, and size. We do this from just the
5745 outermost expression. */
5746 if (TREE_CODE (exp
) == COMPONENT_REF
)
5748 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5749 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5750 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5752 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
5754 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5756 size_tree
= TREE_OPERAND (exp
, 1);
5757 *punsignedp
= TREE_UNSIGNED (exp
);
5761 mode
= TYPE_MODE (TREE_TYPE (exp
));
5762 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
5764 if (mode
== BLKmode
)
5765 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5767 *pbitsize
= GET_MODE_BITSIZE (mode
);
5772 if (! host_integerp (size_tree
, 1))
5773 mode
= BLKmode
, *pbitsize
= -1;
5775 *pbitsize
= tree_low_cst (size_tree
, 1);
5778 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5779 and find the ultimate containing object. */
5782 if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5783 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
, TREE_OPERAND (exp
, 2));
5784 else if (TREE_CODE (exp
) == COMPONENT_REF
)
5786 tree field
= TREE_OPERAND (exp
, 1);
5787 tree this_offset
= DECL_FIELD_OFFSET (field
);
5789 /* If this field hasn't been filled in yet, don't go
5790 past it. This should only happen when folding expressions
5791 made during type construction. */
5792 if (this_offset
== 0)
5794 else if (CONTAINS_PLACEHOLDER_P (this_offset
))
5795 this_offset
= build (WITH_RECORD_EXPR
, sizetype
, this_offset
, exp
);
5797 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5798 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5799 DECL_FIELD_BIT_OFFSET (field
));
5801 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5804 else if (TREE_CODE (exp
) == ARRAY_REF
5805 || TREE_CODE (exp
) == ARRAY_RANGE_REF
)
5807 tree index
= TREE_OPERAND (exp
, 1);
5808 tree array
= TREE_OPERAND (exp
, 0);
5809 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
5810 tree low_bound
= (domain
? TYPE_MIN_VALUE (domain
) : 0);
5811 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array
)));
5813 /* We assume all arrays have sizes that are a multiple of a byte.
5814 First subtract the lower bound, if any, in the type of the
5815 index, then convert to sizetype and multiply by the size of the
5817 if (low_bound
!= 0 && ! integer_zerop (low_bound
))
5818 index
= fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5821 /* If the index has a self-referential type, pass it to a
5822 WITH_RECORD_EXPR; if the component size is, pass our
5823 component to one. */
5824 if (CONTAINS_PLACEHOLDER_P (index
))
5825 index
= build (WITH_RECORD_EXPR
, TREE_TYPE (index
), index
, exp
);
5826 if (CONTAINS_PLACEHOLDER_P (unit_size
))
5827 unit_size
= build (WITH_RECORD_EXPR
, sizetype
, unit_size
, array
);
5829 offset
= size_binop (PLUS_EXPR
, offset
,
5830 size_binop (MULT_EXPR
,
5831 convert (sizetype
, index
),
5835 else if (TREE_CODE (exp
) == PLACEHOLDER_EXPR
)
5837 tree
new = find_placeholder (exp
, &placeholder_ptr
);
5839 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5840 We might have been called from tree optimization where we
5841 haven't set up an object yet. */
5850 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5851 conversions that don't change the mode, and all view conversions
5852 except those that need to "step up" the alignment. */
5853 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
5854 && ! (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
5855 && ! ((TYPE_ALIGN (TREE_TYPE (exp
))
5856 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5858 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
5859 < BIGGEST_ALIGNMENT
)
5860 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
5861 || TYPE_ALIGN_OK (TREE_TYPE
5862 (TREE_OPERAND (exp
, 0))))))
5863 && ! ((TREE_CODE (exp
) == NOP_EXPR
5864 || TREE_CODE (exp
) == CONVERT_EXPR
)
5865 && (TYPE_MODE (TREE_TYPE (exp
))
5866 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
5869 /* If any reference in the chain is volatile, the effect is volatile. */
5870 if (TREE_THIS_VOLATILE (exp
))
5873 exp
= TREE_OPERAND (exp
, 0);
5876 /* If OFFSET is constant, see if we can return the whole thing as a
5877 constant bit position. Otherwise, split it up. */
5878 if (host_integerp (offset
, 0)
5879 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5881 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5882 && host_integerp (tem
, 0))
5883 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5885 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5891 /* Return 1 if T is an expression that get_inner_reference handles. */
5894 handled_component_p (tree t
)
5896 switch (TREE_CODE (t
))
5901 case ARRAY_RANGE_REF
:
5902 case NON_LVALUE_EXPR
:
5903 case VIEW_CONVERT_EXPR
:
5906 /* ??? Sure they are handled, but get_inner_reference may return
5907 a different PBITSIZE, depending upon whether the expression is
5908 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5911 return (TYPE_MODE (TREE_TYPE (t
))
5912 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t
, 0))));
5919 /* Given an rtx VALUE that may contain additions and multiplications, return
5920 an equivalent value that just refers to a register, memory, or constant.
5921 This is done by generating instructions to perform the arithmetic and
5922 returning a pseudo-register containing the value.
5924 The returned value may be a REG, SUBREG, MEM or constant. */
5927 force_operand (rtx value
, rtx target
)
5930 /* Use subtarget as the target for operand 0 of a binary operation. */
5931 rtx subtarget
= get_subtarget (target
);
5932 enum rtx_code code
= GET_CODE (value
);
5934 /* Check for a PIC address load. */
5935 if ((code
== PLUS
|| code
== MINUS
)
5936 && XEXP (value
, 0) == pic_offset_table_rtx
5937 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5938 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5939 || GET_CODE (XEXP (value
, 1)) == CONST
))
5942 subtarget
= gen_reg_rtx (GET_MODE (value
));
5943 emit_move_insn (subtarget
, value
);
5947 if (code
== ZERO_EXTEND
|| code
== SIGN_EXTEND
)
5950 target
= gen_reg_rtx (GET_MODE (value
));
5951 convert_move (target
, force_operand (XEXP (value
, 0), NULL
),
5952 code
== ZERO_EXTEND
);
5956 if (GET_RTX_CLASS (code
) == '2' || GET_RTX_CLASS (code
) == 'c')
5958 op2
= XEXP (value
, 1);
5959 if (!CONSTANT_P (op2
) && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5961 if (code
== MINUS
&& GET_CODE (op2
) == CONST_INT
)
5964 op2
= negate_rtx (GET_MODE (value
), op2
);
5967 /* Check for an addition with OP2 a constant integer and our first
5968 operand a PLUS of a virtual register and something else. In that
5969 case, we want to emit the sum of the virtual register and the
5970 constant first and then add the other value. This allows virtual
5971 register instantiation to simply modify the constant rather than
5972 creating another one around this addition. */
5973 if (code
== PLUS
&& GET_CODE (op2
) == CONST_INT
5974 && GET_CODE (XEXP (value
, 0)) == PLUS
5975 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
5976 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5977 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5979 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
5980 XEXP (XEXP (value
, 0), 0), op2
,
5981 subtarget
, 0, OPTAB_LIB_WIDEN
);
5982 return expand_simple_binop (GET_MODE (value
), code
, temp
,
5983 force_operand (XEXP (XEXP (value
,
5985 target
, 0, OPTAB_LIB_WIDEN
);
5988 op1
= force_operand (XEXP (value
, 0), subtarget
);
5989 op2
= force_operand (op2
, NULL_RTX
);
5993 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
5995 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
5996 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5997 target
, 1, OPTAB_LIB_WIDEN
);
5999 return expand_divmod (0,
6000 FLOAT_MODE_P (GET_MODE (value
))
6001 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
6002 GET_MODE (value
), op1
, op2
, target
, 0);
6005 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
6009 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
6013 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
6017 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
6018 target
, 0, OPTAB_LIB_WIDEN
);
6021 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
6022 target
, 1, OPTAB_LIB_WIDEN
);
6025 if (GET_RTX_CLASS (code
) == '1')
6027 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
6028 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
6031 #ifdef INSN_SCHEDULING
6032 /* On machines that have insn scheduling, we want all memory reference to be
6033 explicit, so we need to deal with such paradoxical SUBREGs. */
6034 if (GET_CODE (value
) == SUBREG
&& GET_CODE (SUBREG_REG (value
)) == MEM
6035 && (GET_MODE_SIZE (GET_MODE (value
))
6036 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
6038 = simplify_gen_subreg (GET_MODE (value
),
6039 force_reg (GET_MODE (SUBREG_REG (value
)),
6040 force_operand (SUBREG_REG (value
),
6042 GET_MODE (SUBREG_REG (value
)),
6043 SUBREG_BYTE (value
));
6049 /* Subroutine of expand_expr: return nonzero iff there is no way that
6050 EXP can reference X, which is being modified. TOP_P is nonzero if this
6051 call is going to be used to determine whether we need a temporary
6052 for EXP, as opposed to a recursive call to this function.
6054 It is always safe for this routine to return zero since it merely
6055 searches for optimization opportunities. */
6058 safe_from_p (rtx x
, tree exp
, int top_p
)
6062 static tree save_expr_list
;
6065 /* If EXP has varying size, we MUST use a target since we currently
6066 have no way of allocating temporaries of variable size
6067 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6068 So we assume here that something at a higher level has prevented a
6069 clash. This is somewhat bogus, but the best we can do. Only
6070 do this when X is BLKmode and when we are at the top level. */
6071 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
6072 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
6073 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
6074 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
6075 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
6077 && GET_MODE (x
) == BLKmode
)
6078 /* If X is in the outgoing argument area, it is always safe. */
6079 || (GET_CODE (x
) == MEM
6080 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
6081 || (GET_CODE (XEXP (x
, 0)) == PLUS
6082 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
6085 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6086 find the underlying pseudo. */
6087 if (GET_CODE (x
) == SUBREG
)
6090 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6094 /* A SAVE_EXPR might appear many times in the expression passed to the
6095 top-level safe_from_p call, and if it has a complex subexpression,
6096 examining it multiple times could result in a combinatorial explosion.
6097 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6098 with optimization took about 28 minutes to compile -- even though it was
6099 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6100 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6101 we have processed. Note that the only test of top_p was above. */
6110 rtn
= safe_from_p (x
, exp
, 0);
6112 for (t
= save_expr_list
; t
!= 0; t
= TREE_CHAIN (t
))
6113 TREE_PRIVATE (TREE_PURPOSE (t
)) = 0;
6118 /* Now look at our tree code and possibly recurse. */
6119 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
6122 exp_rtl
= DECL_RTL_IF_SET (exp
);
6129 if (TREE_CODE (exp
) == TREE_LIST
)
6133 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
6135 exp
= TREE_CHAIN (exp
);
6138 if (TREE_CODE (exp
) != TREE_LIST
)
6139 return safe_from_p (x
, exp
, 0);
6142 else if (TREE_CODE (exp
) == ERROR_MARK
)
6143 return 1; /* An already-visited SAVE_EXPR? */
6149 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
6154 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6158 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6159 the expression. If it is set, we conflict iff we are that rtx or
6160 both are in memory. Otherwise, we check all operands of the
6161 expression recursively. */
6163 switch (TREE_CODE (exp
))
6166 /* If the operand is static or we are static, we can't conflict.
6167 Likewise if we don't conflict with the operand at all. */
6168 if (staticp (TREE_OPERAND (exp
, 0))
6169 || TREE_STATIC (exp
)
6170 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
6173 /* Otherwise, the only way this can conflict is if we are taking
6174 the address of a DECL a that address if part of X, which is
6176 exp
= TREE_OPERAND (exp
, 0);
6179 if (!DECL_RTL_SET_P (exp
)
6180 || GET_CODE (DECL_RTL (exp
)) != MEM
)
6183 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
6188 if (GET_CODE (x
) == MEM
6189 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
6190 get_alias_set (exp
)))
6195 /* Assume that the call will clobber all hard registers and
6197 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6198 || GET_CODE (x
) == MEM
)
6203 /* If a sequence exists, we would have to scan every instruction
6204 in the sequence to see if it was safe. This is probably not
6206 if (RTL_EXPR_SEQUENCE (exp
))
6209 exp_rtl
= RTL_EXPR_RTL (exp
);
6212 case WITH_CLEANUP_EXPR
:
6213 exp_rtl
= WITH_CLEANUP_EXPR_RTL (exp
);
6216 case CLEANUP_POINT_EXPR
:
6217 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6220 exp_rtl
= SAVE_EXPR_RTL (exp
);
6224 /* If we've already scanned this, don't do it again. Otherwise,
6225 show we've scanned it and record for clearing the flag if we're
6227 if (TREE_PRIVATE (exp
))
6230 TREE_PRIVATE (exp
) = 1;
6231 if (! safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
6233 TREE_PRIVATE (exp
) = 0;
6237 save_expr_list
= tree_cons (exp
, NULL_TREE
, save_expr_list
);
6241 /* The only operand we look at is operand 1. The rest aren't
6242 part of the expression. */
6243 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
6245 case METHOD_CALL_EXPR
:
6246 /* This takes an rtx argument, but shouldn't appear here. */
6253 /* If we have an rtx, we do not need to scan our operands. */
6257 nops
= first_rtl_op (TREE_CODE (exp
));
6258 for (i
= 0; i
< nops
; i
++)
6259 if (TREE_OPERAND (exp
, i
) != 0
6260 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
6263 /* If this is a language-specific tree code, it may require
6264 special handling. */
6265 if ((unsigned int) TREE_CODE (exp
)
6266 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6267 && !(*lang_hooks
.safe_from_p
) (x
, exp
))
6271 /* If we have an rtl, find any enclosed object. Then see if we conflict
6275 if (GET_CODE (exp_rtl
) == SUBREG
)
6277 exp_rtl
= SUBREG_REG (exp_rtl
);
6278 if (GET_CODE (exp_rtl
) == REG
6279 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
6283 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6284 are memory and they conflict. */
6285 return ! (rtx_equal_p (x
, exp_rtl
)
6286 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
6287 && true_dependence (exp_rtl
, VOIDmode
, x
,
6288 rtx_addr_varies_p
)));
6291 /* If we reach here, it is safe. */
6295 /* Subroutine of expand_expr: return rtx if EXP is a
6296 variable or parameter; else return 0. */
6302 switch (TREE_CODE (exp
))
6306 return DECL_RTL (exp
);
6312 #ifdef MAX_INTEGER_COMPUTATION_MODE
6315 check_max_integer_computation_mode (tree exp
)
6317 enum tree_code code
;
6318 enum machine_mode mode
;
6320 /* Strip any NOPs that don't change the mode. */
6322 code
= TREE_CODE (exp
);
6324 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6325 if (code
== NOP_EXPR
6326 && TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
6329 /* First check the type of the overall operation. We need only look at
6330 unary, binary and relational operations. */
6331 if (TREE_CODE_CLASS (code
) == '1'
6332 || TREE_CODE_CLASS (code
) == '2'
6333 || TREE_CODE_CLASS (code
) == '<')
6335 mode
= TYPE_MODE (TREE_TYPE (exp
));
6336 if (GET_MODE_CLASS (mode
) == MODE_INT
6337 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6338 internal_error ("unsupported wide integer operation");
6341 /* Check operand of a unary op. */
6342 if (TREE_CODE_CLASS (code
) == '1')
6344 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6345 if (GET_MODE_CLASS (mode
) == MODE_INT
6346 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6347 internal_error ("unsupported wide integer operation");
6350 /* Check operands of a binary/comparison op. */
6351 if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<')
6353 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6354 if (GET_MODE_CLASS (mode
) == MODE_INT
6355 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6356 internal_error ("unsupported wide integer operation");
6358 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
6359 if (GET_MODE_CLASS (mode
) == MODE_INT
6360 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6361 internal_error ("unsupported wide integer operation");
6366 /* Return the highest power of two that EXP is known to be a multiple of.
6367 This is used in updating alignment of MEMs in array references. */
6369 static unsigned HOST_WIDE_INT
6370 highest_pow2_factor (tree exp
)
6372 unsigned HOST_WIDE_INT c0
, c1
;
6374 switch (TREE_CODE (exp
))
6377 /* We can find the lowest bit that's a one. If the low
6378 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6379 We need to handle this case since we can find it in a COND_EXPR,
6380 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6381 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6383 if (TREE_CONSTANT_OVERFLOW (exp
))
6384 return BIGGEST_ALIGNMENT
;
6387 /* Note: tree_low_cst is intentionally not used here,
6388 we don't care about the upper bits. */
6389 c0
= TREE_INT_CST_LOW (exp
);
6391 return c0
? c0
: BIGGEST_ALIGNMENT
;
6395 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
6396 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6397 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6398 return MIN (c0
, c1
);
6401 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6402 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6405 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6407 if (integer_pow2p (TREE_OPERAND (exp
, 1))
6408 && host_integerp (TREE_OPERAND (exp
, 1), 1))
6410 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6411 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
6412 return MAX (1, c0
/ c1
);
6416 case NON_LVALUE_EXPR
: case NOP_EXPR
: case CONVERT_EXPR
:
6417 case SAVE_EXPR
: case WITH_RECORD_EXPR
:
6418 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
6421 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6424 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6425 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
6426 return MIN (c0
, c1
);
6435 /* Similar, except that it is known that the expression must be a multiple
6436 of the alignment of TYPE. */
6438 static unsigned HOST_WIDE_INT
6439 highest_pow2_factor_for_type (tree type
, tree exp
)
6441 unsigned HOST_WIDE_INT type_align
, factor
;
6443 factor
= highest_pow2_factor (exp
);
6444 type_align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
6445 return MAX (factor
, type_align
);
6448 /* Return an object on the placeholder list that matches EXP, a
6449 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6450 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6451 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6452 is a location which initially points to a starting location in the
6453 placeholder list (zero means start of the list) and where a pointer into
6454 the placeholder list at which the object is found is placed. */
6457 find_placeholder (tree exp
, tree
*plist
)
6459 tree type
= TREE_TYPE (exp
);
6460 tree placeholder_expr
;
6462 for (placeholder_expr
6463 = plist
&& *plist
? TREE_CHAIN (*plist
) : placeholder_list
;
6464 placeholder_expr
!= 0;
6465 placeholder_expr
= TREE_CHAIN (placeholder_expr
))
6467 tree need_type
= TYPE_MAIN_VARIANT (type
);
6470 /* Find the outermost reference that is of the type we want. If none,
6471 see if any object has a type that is a pointer to the type we
6473 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6474 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
6475 || TREE_CODE (elt
) == COND_EXPR
)
6476 ? TREE_OPERAND (elt
, 1)
6477 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6478 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6479 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6480 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6481 ? TREE_OPERAND (elt
, 0) : 0))
6482 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
6485 *plist
= placeholder_expr
;
6489 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6491 = ((TREE_CODE (elt
) == COMPOUND_EXPR
6492 || TREE_CODE (elt
) == COND_EXPR
)
6493 ? TREE_OPERAND (elt
, 1)
6494 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6495 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6496 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6497 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6498 ? TREE_OPERAND (elt
, 0) : 0))
6499 if (POINTER_TYPE_P (TREE_TYPE (elt
))
6500 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
6504 *plist
= placeholder_expr
;
6505 return build1 (INDIRECT_REF
, need_type
, elt
);
6512 /* expand_expr: generate code for computing expression EXP.
6513 An rtx for the computed value is returned. The value is never null.
6514 In the case of a void EXP, const0_rtx is returned.
6516 The value may be stored in TARGET if TARGET is nonzero.
6517 TARGET is just a suggestion; callers must assume that
6518 the rtx returned may not be the same as TARGET.
6520 If TARGET is CONST0_RTX, it means that the value will be ignored.
6522 If TMODE is not VOIDmode, it suggests generating the
6523 result in mode TMODE. But this is done only when convenient.
6524 Otherwise, TMODE is ignored and the value generated in its natural mode.
6525 TMODE is just a suggestion; callers must assume that
6526 the rtx returned may not have mode TMODE.
6528 Note that TARGET may have neither TMODE nor MODE. In that case, it
6529 probably will not be used.
6531 If MODIFIER is EXPAND_SUM then when EXP is an addition
6532 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6533 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6534 products as above, or REG or MEM, or constant.
6535 Ordinarily in such cases we would output mul or add instructions
6536 and then return a pseudo reg containing the sum.
6538 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6539 it also marks a label as absolutely required (it can't be dead).
6540 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6541 This is used for outputting expressions used in initializers.
6543 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6544 with a constant address even if that address is not normally legitimate.
6545 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6547 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6548 a call parameter. Such targets require special care as we haven't yet
6549 marked TARGET so that it's safe from being trashed by libcalls. We
6550 don't want to use TARGET for anything but the final result;
6551 Intermediate values must go elsewhere. Additionally, calls to
6552 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6555 expand_expr (tree exp
, rtx target
, enum machine_mode tmode
, enum expand_modifier modifier
)
6558 tree type
= TREE_TYPE (exp
);
6559 int unsignedp
= TREE_UNSIGNED (type
);
6560 enum machine_mode mode
;
6561 enum tree_code code
= TREE_CODE (exp
);
6563 rtx subtarget
, original_target
;
6567 /* Handle ERROR_MARK before anybody tries to access its type. */
6568 if (TREE_CODE (exp
) == ERROR_MARK
|| TREE_CODE (type
) == ERROR_MARK
)
6570 op0
= CONST0_RTX (tmode
);
6576 mode
= TYPE_MODE (type
);
6577 /* Use subtarget as the target for operand 0 of a binary operation. */
6578 subtarget
= get_subtarget (target
);
6579 original_target
= target
;
6580 ignore
= (target
== const0_rtx
6581 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
6582 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
6583 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
6584 && TREE_CODE (type
) == VOID_TYPE
));
6586 /* If we are going to ignore this result, we need only do something
6587 if there is a side-effect somewhere in the expression. If there
6588 is, short-circuit the most common cases here. Note that we must
6589 not call expand_expr with anything but const0_rtx in case this
6590 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6594 if (! TREE_SIDE_EFFECTS (exp
))
6597 /* Ensure we reference a volatile object even if value is ignored, but
6598 don't do this if all we are doing is taking its address. */
6599 if (TREE_THIS_VOLATILE (exp
)
6600 && TREE_CODE (exp
) != FUNCTION_DECL
6601 && mode
!= VOIDmode
&& mode
!= BLKmode
6602 && modifier
!= EXPAND_CONST_ADDRESS
)
6604 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
6605 if (GET_CODE (temp
) == MEM
)
6606 temp
= copy_to_reg (temp
);
6610 if (TREE_CODE_CLASS (code
) == '1' || code
== COMPONENT_REF
6611 || code
== INDIRECT_REF
|| code
== BUFFER_REF
)
6612 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6615 else if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<'
6616 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
6618 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6619 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6622 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6623 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
6624 /* If the second operand has no side effects, just evaluate
6626 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6628 else if (code
== BIT_FIELD_REF
)
6630 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6631 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6632 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, modifier
);
6639 #ifdef MAX_INTEGER_COMPUTATION_MODE
6640 /* Only check stuff here if the mode we want is different from the mode
6641 of the expression; if it's the same, check_max_integer_computation_mode
6642 will handle it. Do we really need to check this stuff at all? */
6645 && GET_MODE (target
) != mode
6646 && TREE_CODE (exp
) != INTEGER_CST
6647 && TREE_CODE (exp
) != PARM_DECL
6648 && TREE_CODE (exp
) != ARRAY_REF
6649 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6650 && TREE_CODE (exp
) != COMPONENT_REF
6651 && TREE_CODE (exp
) != BIT_FIELD_REF
6652 && TREE_CODE (exp
) != INDIRECT_REF
6653 && TREE_CODE (exp
) != CALL_EXPR
6654 && TREE_CODE (exp
) != VAR_DECL
6655 && TREE_CODE (exp
) != RTL_EXPR
)
6657 enum machine_mode mode
= GET_MODE (target
);
6659 if (GET_MODE_CLASS (mode
) == MODE_INT
6660 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6661 internal_error ("unsupported wide integer operation");
6665 && TREE_CODE (exp
) != INTEGER_CST
6666 && TREE_CODE (exp
) != PARM_DECL
6667 && TREE_CODE (exp
) != ARRAY_REF
6668 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6669 && TREE_CODE (exp
) != COMPONENT_REF
6670 && TREE_CODE (exp
) != BIT_FIELD_REF
6671 && TREE_CODE (exp
) != INDIRECT_REF
6672 && TREE_CODE (exp
) != VAR_DECL
6673 && TREE_CODE (exp
) != CALL_EXPR
6674 && TREE_CODE (exp
) != RTL_EXPR
6675 && GET_MODE_CLASS (tmode
) == MODE_INT
6676 && tmode
> MAX_INTEGER_COMPUTATION_MODE
)
6677 internal_error ("unsupported wide integer operation");
6679 check_max_integer_computation_mode (exp
);
6682 /* If will do cse, generate all results into pseudo registers
6683 since 1) that allows cse to find more things
6684 and 2) otherwise cse could produce an insn the machine
6685 cannot support. An exception is a CONSTRUCTOR into a multi-word
6686 MEM: that's much more likely to be most efficient into the MEM.
6687 Another is a CALL_EXPR which must return in memory. */
6689 if (! cse_not_expected
&& mode
!= BLKmode
&& target
6690 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
)
6691 && ! (code
== CONSTRUCTOR
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
6692 && ! (code
== CALL_EXPR
&& aggregate_value_p (exp
)))
6699 tree function
= decl_function_context (exp
);
6700 /* Labels in containing functions, or labels used from initializers,
6702 if (modifier
== EXPAND_INITIALIZER
6703 || (function
!= current_function_decl
6704 && function
!= inline_function_decl
6706 temp
= force_label_rtx (exp
);
6708 temp
= label_rtx (exp
);
6710 temp
= gen_rtx_MEM (FUNCTION_MODE
, gen_rtx_LABEL_REF (Pmode
, temp
));
6711 if (function
!= current_function_decl
6712 && function
!= inline_function_decl
&& function
!= 0)
6713 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
6718 if (!DECL_RTL_SET_P (exp
))
6720 error_with_decl (exp
, "prior parameter's size depends on `%s'");
6721 return CONST0_RTX (mode
);
6724 /* ... fall through ... */
6727 /* If a static var's type was incomplete when the decl was written,
6728 but the type is complete now, lay out the decl now. */
6729 if (DECL_SIZE (exp
) == 0
6730 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
6731 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6732 layout_decl (exp
, 0);
6734 /* ... fall through ... */
6738 if (DECL_RTL (exp
) == 0)
6741 /* Ensure variable marked as used even if it doesn't go through
6742 a parser. If it hasn't be used yet, write out an external
6744 if (! TREE_USED (exp
))
6746 assemble_external (exp
);
6747 TREE_USED (exp
) = 1;
6750 /* Show we haven't gotten RTL for this yet. */
6753 /* Handle variables inherited from containing functions. */
6754 context
= decl_function_context (exp
);
6756 /* We treat inline_function_decl as an alias for the current function
6757 because that is the inline function whose vars, types, etc.
6758 are being merged into the current function.
6759 See expand_inline_function. */
6761 if (context
!= 0 && context
!= current_function_decl
6762 && context
!= inline_function_decl
6763 /* If var is static, we don't need a static chain to access it. */
6764 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
6765 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
6769 /* Mark as non-local and addressable. */
6770 DECL_NONLOCAL (exp
) = 1;
6771 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
6773 (*lang_hooks
.mark_addressable
) (exp
);
6774 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
6776 addr
= XEXP (DECL_RTL (exp
), 0);
6777 if (GET_CODE (addr
) == MEM
)
6779 = replace_equiv_address (addr
,
6780 fix_lexical_addr (XEXP (addr
, 0), exp
));
6782 addr
= fix_lexical_addr (addr
, exp
);
6784 temp
= replace_equiv_address (DECL_RTL (exp
), addr
);
6787 /* This is the case of an array whose size is to be determined
6788 from its initializer, while the initializer is still being parsed.
6791 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6792 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
6793 temp
= validize_mem (DECL_RTL (exp
));
6795 /* If DECL_RTL is memory, we are in the normal case and either
6796 the address is not valid or it is not a register and -fforce-addr
6797 is specified, get the address into a register. */
6799 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6800 && modifier
!= EXPAND_CONST_ADDRESS
6801 && modifier
!= EXPAND_SUM
6802 && modifier
!= EXPAND_INITIALIZER
6803 && (! memory_address_p (DECL_MODE (exp
),
6804 XEXP (DECL_RTL (exp
), 0))
6806 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
6807 temp
= replace_equiv_address (DECL_RTL (exp
),
6808 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
6810 /* If we got something, return it. But first, set the alignment
6811 if the address is a register. */
6814 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
6815 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6820 /* If the mode of DECL_RTL does not match that of the decl, it
6821 must be a promoted value. We return a SUBREG of the wanted mode,
6822 but mark it so that we know that it was already extended. */
6824 if (GET_CODE (DECL_RTL (exp
)) == REG
6825 && GET_MODE (DECL_RTL (exp
)) != DECL_MODE (exp
))
6827 /* Get the signedness used for this variable. Ensure we get the
6828 same mode we got when the variable was declared. */
6829 if (GET_MODE (DECL_RTL (exp
))
6830 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
,
6831 (TREE_CODE (exp
) == RESULT_DECL
? 1 : 0)))
6834 temp
= gen_lowpart_SUBREG (mode
, DECL_RTL (exp
));
6835 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6836 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6840 return DECL_RTL (exp
);
6843 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
6844 TREE_INT_CST_HIGH (exp
), mode
);
6846 /* ??? If overflow is set, fold will have done an incomplete job,
6847 which can result in (plus xx (const_int 0)), which can get
6848 simplified by validate_replace_rtx during virtual register
6849 instantiation, which can result in unrecognizable insns.
6850 Avoid this by forcing all overflows into registers. */
6851 if (TREE_CONSTANT_OVERFLOW (exp
)
6852 && modifier
!= EXPAND_INITIALIZER
)
6853 temp
= force_reg (mode
, temp
);
6858 return const_vector_from_tree (exp
);
6861 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
6864 /* If optimized, generate immediate CONST_DOUBLE
6865 which will be turned into memory by reload if necessary.
6867 We used to force a register so that loop.c could see it. But
6868 this does not allow gen_* patterns to perform optimizations with
6869 the constants. It also produces two insns in cases like "x = 1.0;".
6870 On most machines, floating-point constants are not permitted in
6871 many insns, so we'd end up copying it to a register in any case.
6873 Now, we do the copying in expand_binop, if appropriate. */
6874 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
6875 TYPE_MODE (TREE_TYPE (exp
)));
6878 /* Handle evaluating a complex constant in a CONCAT target. */
6879 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
6881 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
6884 rtarg
= XEXP (original_target
, 0);
6885 itarg
= XEXP (original_target
, 1);
6887 /* Move the real and imaginary parts separately. */
6888 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, 0);
6889 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, 0);
6892 emit_move_insn (rtarg
, op0
);
6894 emit_move_insn (itarg
, op1
);
6896 return original_target
;
6899 /* ... fall through ... */
6902 temp
= output_constant_def (exp
, 1);
6904 /* temp contains a constant address.
6905 On RISC machines where a constant address isn't valid,
6906 make some insns to get that address into a register. */
6907 if (modifier
!= EXPAND_CONST_ADDRESS
6908 && modifier
!= EXPAND_INITIALIZER
6909 && modifier
!= EXPAND_SUM
6910 && (! memory_address_p (mode
, XEXP (temp
, 0))
6911 || flag_force_addr
))
6912 return replace_equiv_address (temp
,
6913 copy_rtx (XEXP (temp
, 0)));
6916 case EXPR_WITH_FILE_LOCATION
:
6919 location_t saved_loc
= input_location
;
6920 input_filename
= EXPR_WFL_FILENAME (exp
);
6921 input_line
= EXPR_WFL_LINENO (exp
);
6922 if (EXPR_WFL_EMIT_LINE_NOTE (exp
))
6923 emit_line_note (input_filename
, input_line
);
6924 /* Possibly avoid switching back and forth here. */
6925 to_return
= expand_expr (EXPR_WFL_NODE (exp
), target
, tmode
, modifier
);
6926 input_location
= saved_loc
;
6931 context
= decl_function_context (exp
);
6933 /* If this SAVE_EXPR was at global context, assume we are an
6934 initialization function and move it into our context. */
6936 SAVE_EXPR_CONTEXT (exp
) = current_function_decl
;
6938 /* We treat inline_function_decl as an alias for the current function
6939 because that is the inline function whose vars, types, etc.
6940 are being merged into the current function.
6941 See expand_inline_function. */
6942 if (context
== current_function_decl
|| context
== inline_function_decl
)
6945 /* If this is non-local, handle it. */
6948 /* The following call just exists to abort if the context is
6949 not of a containing function. */
6950 find_function_data (context
);
6952 temp
= SAVE_EXPR_RTL (exp
);
6953 if (temp
&& GET_CODE (temp
) == REG
)
6955 put_var_into_stack (exp
, /*rescan=*/true);
6956 temp
= SAVE_EXPR_RTL (exp
);
6958 if (temp
== 0 || GET_CODE (temp
) != MEM
)
6961 replace_equiv_address (temp
,
6962 fix_lexical_addr (XEXP (temp
, 0), exp
));
6964 if (SAVE_EXPR_RTL (exp
) == 0)
6966 if (mode
== VOIDmode
)
6969 temp
= assign_temp (build_qualified_type (type
,
6971 | TYPE_QUAL_CONST
)),
6974 SAVE_EXPR_RTL (exp
) = temp
;
6975 if (!optimize
&& GET_CODE (temp
) == REG
)
6976 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, temp
,
6979 /* If the mode of TEMP does not match that of the expression, it
6980 must be a promoted value. We pass store_expr a SUBREG of the
6981 wanted mode but mark it so that we know that it was already
6984 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
6986 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6987 promote_mode (type
, mode
, &unsignedp
, 0);
6988 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6989 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6992 if (temp
== const0_rtx
)
6993 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
6995 store_expr (TREE_OPERAND (exp
, 0), temp
,
6996 modifier
== EXPAND_STACK_PARM
? 2 : 0);
6998 TREE_USED (exp
) = 1;
7001 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
7002 must be a promoted value. We return a SUBREG of the wanted mode,
7003 but mark it so that we know that it was already extended. */
7005 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
7006 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
7008 /* Compute the signedness and make the proper SUBREG. */
7009 promote_mode (type
, mode
, &unsignedp
, 0);
7010 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
7011 SUBREG_PROMOTED_VAR_P (temp
) = 1;
7012 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
7016 return SAVE_EXPR_RTL (exp
);
7021 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7022 TREE_OPERAND (exp
, 0)
7023 = (*lang_hooks
.unsave_expr_now
) (TREE_OPERAND (exp
, 0));
7027 case PLACEHOLDER_EXPR
:
7029 tree old_list
= placeholder_list
;
7030 tree placeholder_expr
= 0;
7032 exp
= find_placeholder (exp
, &placeholder_expr
);
7036 placeholder_list
= TREE_CHAIN (placeholder_expr
);
7037 temp
= expand_expr (exp
, original_target
, tmode
, modifier
);
7038 placeholder_list
= old_list
;
7042 case WITH_RECORD_EXPR
:
7043 /* Put the object on the placeholder list, expand our first operand,
7044 and pop the list. */
7045 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
7047 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
, tmode
,
7049 placeholder_list
= TREE_CHAIN (placeholder_list
);
7053 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
7054 expand_goto (TREE_OPERAND (exp
, 0));
7056 expand_computed_goto (TREE_OPERAND (exp
, 0));
7060 expand_exit_loop_if_false (NULL
,
7061 invert_truthvalue (TREE_OPERAND (exp
, 0)));
7064 case LABELED_BLOCK_EXPR
:
7065 if (LABELED_BLOCK_BODY (exp
))
7066 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp
), 0, 1);
7067 /* Should perhaps use expand_label, but this is simpler and safer. */
7068 do_pending_stack_adjust ();
7069 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp
)));
7072 case EXIT_BLOCK_EXPR
:
7073 if (EXIT_BLOCK_RETURN (exp
))
7074 sorry ("returned value in block_exit_expr");
7075 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp
)));
7080 expand_start_loop (1);
7081 expand_expr_stmt_value (TREE_OPERAND (exp
, 0), 0, 1);
7089 tree vars
= TREE_OPERAND (exp
, 0);
7091 /* Need to open a binding contour here because
7092 if there are any cleanups they must be contained here. */
7093 expand_start_bindings (2);
7095 /* Mark the corresponding BLOCK for output in its proper place. */
7096 if (TREE_OPERAND (exp
, 2) != 0
7097 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
7098 (*lang_hooks
.decls
.insert_block
) (TREE_OPERAND (exp
, 2));
7100 /* If VARS have not yet been expanded, expand them now. */
7103 if (!DECL_RTL_SET_P (vars
))
7105 expand_decl_init (vars
);
7106 vars
= TREE_CHAIN (vars
);
7109 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, modifier
);
7111 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
7117 if (RTL_EXPR_SEQUENCE (exp
))
7119 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
7121 emit_insn (RTL_EXPR_SEQUENCE (exp
));
7122 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
7124 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
7125 free_temps_for_rtl_expr (exp
);
7126 return RTL_EXPR_RTL (exp
);
7129 /* If we don't need the result, just ensure we evaluate any
7135 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
7136 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
, 0);
7141 /* All elts simple constants => refer to a constant in memory. But
7142 if this is a non-BLKmode mode, let it store a field at a time
7143 since that should make a CONST_INT or CONST_DOUBLE when we
7144 fold. Likewise, if we have a target we can use, it is best to
7145 store directly into the target unless the type is large enough
7146 that memcpy will be used. If we are making an initializer and
7147 all operands are constant, put it in memory as well.
7149 FIXME: Avoid trying to fill vector constructors piece-meal.
7150 Output them with output_constant_def below unless we're sure
7151 they're zeros. This should go away when vector initializers
7152 are treated like VECTOR_CST instead of arrays.
7154 else if ((TREE_STATIC (exp
)
7155 && ((mode
== BLKmode
7156 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
7157 || TREE_ADDRESSABLE (exp
)
7158 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
7159 && (! MOVE_BY_PIECES_P
7160 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
7162 && ((TREE_CODE (type
) == VECTOR_TYPE
7163 && !is_zeros_p (exp
))
7164 || ! mostly_zeros_p (exp
)))))
7165 || ((modifier
== EXPAND_INITIALIZER
7166 || modifier
== EXPAND_CONST_ADDRESS
)
7167 && TREE_CONSTANT (exp
)))
7169 rtx constructor
= output_constant_def (exp
, 1);
7171 if (modifier
!= EXPAND_CONST_ADDRESS
7172 && modifier
!= EXPAND_INITIALIZER
7173 && modifier
!= EXPAND_SUM
)
7174 constructor
= validize_mem (constructor
);
7180 /* Handle calls that pass values in multiple non-contiguous
7181 locations. The Irix 6 ABI has examples of this. */
7182 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
7183 || GET_CODE (target
) == PARALLEL
7184 || modifier
== EXPAND_STACK_PARM
)
7186 = assign_temp (build_qualified_type (type
,
7188 | (TREE_READONLY (exp
)
7189 * TYPE_QUAL_CONST
))),
7190 0, TREE_ADDRESSABLE (exp
), 1);
7192 store_constructor (exp
, target
, 0, int_expr_size (exp
));
7198 tree exp1
= TREE_OPERAND (exp
, 0);
7200 tree string
= string_constant (exp1
, &index
);
7202 /* Try to optimize reads from const strings. */
7204 && TREE_CODE (string
) == STRING_CST
7205 && TREE_CODE (index
) == INTEGER_CST
7206 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
7207 && GET_MODE_CLASS (mode
) == MODE_INT
7208 && GET_MODE_SIZE (mode
) == 1
7209 && modifier
!= EXPAND_WRITE
)
7210 return gen_int_mode (TREE_STRING_POINTER (string
)
7211 [TREE_INT_CST_LOW (index
)], mode
);
7213 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
7214 op0
= memory_address (mode
, op0
);
7215 temp
= gen_rtx_MEM (mode
, op0
);
7216 set_mem_attributes (temp
, exp
, 0);
7218 /* If we are writing to this object and its type is a record with
7219 readonly fields, we must mark it as readonly so it will
7220 conflict with readonly references to those fields. */
7221 if (modifier
== EXPAND_WRITE
&& readonly_fields_p (type
))
7222 RTX_UNCHANGING_P (temp
) = 1;
7228 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
7232 tree array
= TREE_OPERAND (exp
, 0);
7233 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
7234 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
7235 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
7238 /* Optimize the special-case of a zero lower bound.
7240 We convert the low_bound to sizetype to avoid some problems
7241 with constant folding. (E.g. suppose the lower bound is 1,
7242 and its mode is QI. Without the conversion, (ARRAY
7243 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7244 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7246 if (! integer_zerop (low_bound
))
7247 index
= size_diffop (index
, convert (sizetype
, low_bound
));
7249 /* Fold an expression like: "foo"[2].
7250 This is not done in fold so it won't happen inside &.
7251 Don't fold if this is for wide characters since it's too
7252 difficult to do correctly and this is a very rare case. */
7254 if (modifier
!= EXPAND_CONST_ADDRESS
7255 && modifier
!= EXPAND_INITIALIZER
7256 && modifier
!= EXPAND_MEMORY
7257 && TREE_CODE (array
) == STRING_CST
7258 && TREE_CODE (index
) == INTEGER_CST
7259 && compare_tree_int (index
, TREE_STRING_LENGTH (array
)) < 0
7260 && GET_MODE_CLASS (mode
) == MODE_INT
7261 && GET_MODE_SIZE (mode
) == 1)
7262 return gen_int_mode (TREE_STRING_POINTER (array
)
7263 [TREE_INT_CST_LOW (index
)], mode
);
7265 /* If this is a constant index into a constant array,
7266 just get the value from the array. Handle both the cases when
7267 we have an explicit constructor and when our operand is a variable
7268 that was declared const. */
7270 if (modifier
!= EXPAND_CONST_ADDRESS
7271 && modifier
!= EXPAND_INITIALIZER
7272 && modifier
!= EXPAND_MEMORY
7273 && TREE_CODE (array
) == CONSTRUCTOR
7274 && ! TREE_SIDE_EFFECTS (array
)
7275 && TREE_CODE (index
) == INTEGER_CST
7276 && 0 > compare_tree_int (index
,
7277 list_length (CONSTRUCTOR_ELTS
7278 (TREE_OPERAND (exp
, 0)))))
7282 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
7283 i
= TREE_INT_CST_LOW (index
);
7284 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
7288 return expand_expr (fold (TREE_VALUE (elem
)), target
, tmode
,
7292 else if (optimize
>= 1
7293 && modifier
!= EXPAND_CONST_ADDRESS
7294 && modifier
!= EXPAND_INITIALIZER
7295 && modifier
!= EXPAND_MEMORY
7296 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
7297 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
7298 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
7300 if (TREE_CODE (index
) == INTEGER_CST
)
7302 tree init
= DECL_INITIAL (array
);
7304 if (TREE_CODE (init
) == CONSTRUCTOR
)
7308 for (elem
= CONSTRUCTOR_ELTS (init
);
7310 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
7311 elem
= TREE_CHAIN (elem
))
7314 if (elem
&& !TREE_SIDE_EFFECTS (TREE_VALUE (elem
)))
7315 return expand_expr (fold (TREE_VALUE (elem
)), target
,
7318 else if (TREE_CODE (init
) == STRING_CST
7319 && 0 > compare_tree_int (index
,
7320 TREE_STRING_LENGTH (init
)))
7322 tree type
= TREE_TYPE (TREE_TYPE (init
));
7323 enum machine_mode mode
= TYPE_MODE (type
);
7325 if (GET_MODE_CLASS (mode
) == MODE_INT
7326 && GET_MODE_SIZE (mode
) == 1)
7327 return gen_int_mode (TREE_STRING_POINTER (init
)
7328 [TREE_INT_CST_LOW (index
)], mode
);
7333 goto normal_inner_ref
;
7336 /* If the operand is a CONSTRUCTOR, we can just extract the
7337 appropriate field if it is present. */
7338 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
)
7342 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
7343 elt
= TREE_CHAIN (elt
))
7344 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
7345 /* We can normally use the value of the field in the
7346 CONSTRUCTOR. However, if this is a bitfield in
7347 an integral mode that we can fit in a HOST_WIDE_INT,
7348 we must mask only the number of bits in the bitfield,
7349 since this is done implicitly by the constructor. If
7350 the bitfield does not meet either of those conditions,
7351 we can't do this optimization. */
7352 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
7353 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
7355 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
7356 <= HOST_BITS_PER_WIDE_INT
))))
7358 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
))
7359 && modifier
== EXPAND_STACK_PARM
)
7361 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
7362 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
7364 HOST_WIDE_INT bitsize
7365 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt
)));
7366 enum machine_mode imode
7367 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
7369 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
7371 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
7372 op0
= expand_and (imode
, op0
, op1
, target
);
7377 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
7380 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
7382 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
7390 goto normal_inner_ref
;
7393 case ARRAY_RANGE_REF
:
7396 enum machine_mode mode1
;
7397 HOST_WIDE_INT bitsize
, bitpos
;
7400 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7401 &mode1
, &unsignedp
, &volatilep
);
7404 /* If we got back the original object, something is wrong. Perhaps
7405 we are evaluating an expression too early. In any event, don't
7406 infinitely recurse. */
7410 /* If TEM's type is a union of variable size, pass TARGET to the inner
7411 computation, since it will need a temporary and TARGET is known
7412 to have to do. This occurs in unchecked conversion in Ada. */
7416 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
7417 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
7419 && modifier
!= EXPAND_STACK_PARM
7420 ? target
: NULL_RTX
),
7422 (modifier
== EXPAND_INITIALIZER
7423 || modifier
== EXPAND_CONST_ADDRESS
7424 || modifier
== EXPAND_STACK_PARM
)
7425 ? modifier
: EXPAND_NORMAL
);
7427 /* If this is a constant, put it into a register if it is a
7428 legitimate constant and OFFSET is 0 and memory if it isn't. */
7429 if (CONSTANT_P (op0
))
7431 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
7432 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
7434 op0
= force_reg (mode
, op0
);
7436 op0
= validize_mem (force_const_mem (mode
, op0
));
7441 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
7444 /* If this object is in a register, put it into memory.
7445 This case can't occur in C, but can in Ada if we have
7446 unchecked conversion of an expression from a scalar type to
7447 an array or record type. */
7448 if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7449 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
7451 /* If the operand is a SAVE_EXPR, we can deal with this by
7452 forcing the SAVE_EXPR into memory. */
7453 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
7455 put_var_into_stack (TREE_OPERAND (exp
, 0),
7457 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
7462 = build_qualified_type (TREE_TYPE (tem
),
7463 (TYPE_QUALS (TREE_TYPE (tem
))
7464 | TYPE_QUAL_CONST
));
7465 rtx memloc
= assign_temp (nt
, 1, 1, 1);
7467 emit_move_insn (memloc
, op0
);
7472 if (GET_CODE (op0
) != MEM
)
7475 #ifdef POINTERS_EXTEND_UNSIGNED
7476 if (GET_MODE (offset_rtx
) != Pmode
)
7477 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
7479 if (GET_MODE (offset_rtx
) != ptr_mode
)
7480 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
7483 /* A constant address in OP0 can have VOIDmode, we must not try
7484 to call force_reg for that case. Avoid that case. */
7485 if (GET_CODE (op0
) == MEM
7486 && GET_MODE (op0
) == BLKmode
7487 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
7489 && (bitpos
% bitsize
) == 0
7490 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
7491 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
7493 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7497 op0
= offset_address (op0
, offset_rtx
,
7498 highest_pow2_factor (offset
));
7501 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7502 record its alignment as BIGGEST_ALIGNMENT. */
7503 if (GET_CODE (op0
) == MEM
&& bitpos
== 0 && offset
!= 0
7504 && is_aligning_offset (offset
, tem
))
7505 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
7507 /* Don't forget about volatility even if this is a bitfield. */
7508 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
7510 if (op0
== orig_op0
)
7511 op0
= copy_rtx (op0
);
7513 MEM_VOLATILE_P (op0
) = 1;
7516 /* The following code doesn't handle CONCAT.
7517 Assume only bitpos == 0 can be used for CONCAT, due to
7518 one element arrays having the same mode as its element. */
7519 if (GET_CODE (op0
) == CONCAT
)
7521 if (bitpos
!= 0 || bitsize
!= GET_MODE_BITSIZE (GET_MODE (op0
)))
7526 /* In cases where an aligned union has an unaligned object
7527 as a field, we might be extracting a BLKmode value from
7528 an integer-mode (e.g., SImode) object. Handle this case
7529 by doing the extract into an object as wide as the field
7530 (which we know to be the width of a basic mode), then
7531 storing into memory, and changing the mode to BLKmode. */
7532 if (mode1
== VOIDmode
7533 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7534 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
7535 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7536 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
7537 && modifier
!= EXPAND_CONST_ADDRESS
7538 && modifier
!= EXPAND_INITIALIZER
)
7539 /* If the field isn't aligned enough to fetch as a memref,
7540 fetch it as a bit field. */
7541 || (mode1
!= BLKmode
7542 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
7543 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0))
7544 && SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
)))
7545 || (bitpos
% BITS_PER_UNIT
!= 0)))
7546 /* If the type and the field are a constant size and the
7547 size of the type isn't the same size as the bitfield,
7548 we must use bitfield operations. */
7550 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
)))
7552 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7555 enum machine_mode ext_mode
= mode
;
7557 if (ext_mode
== BLKmode
7558 && ! (target
!= 0 && GET_CODE (op0
) == MEM
7559 && GET_CODE (target
) == MEM
7560 && bitpos
% BITS_PER_UNIT
== 0))
7561 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7563 if (ext_mode
== BLKmode
)
7565 /* In this case, BITPOS must start at a byte boundary and
7566 TARGET, if specified, must be a MEM. */
7567 if (GET_CODE (op0
) != MEM
7568 || (target
!= 0 && GET_CODE (target
) != MEM
)
7569 || bitpos
% BITS_PER_UNIT
!= 0)
7572 op0
= adjust_address (op0
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
7574 target
= assign_temp (type
, 0, 1, 1);
7576 emit_block_move (target
, op0
,
7577 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7579 (modifier
== EXPAND_STACK_PARM
7580 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7585 op0
= validize_mem (op0
);
7587 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
7588 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7590 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
7591 (modifier
== EXPAND_STACK_PARM
7592 ? NULL_RTX
: target
),
7594 int_size_in_bytes (TREE_TYPE (tem
)));
7596 /* If the result is a record type and BITSIZE is narrower than
7597 the mode of OP0, an integral mode, and this is a big endian
7598 machine, we must put the field into the high-order bits. */
7599 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7600 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7601 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
7602 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7603 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7607 if (mode
== BLKmode
)
7609 rtx
new = assign_temp (build_qualified_type
7610 ((*lang_hooks
.types
.type_for_mode
)
7612 TYPE_QUAL_CONST
), 0, 1, 1);
7614 emit_move_insn (new, op0
);
7615 op0
= copy_rtx (new);
7616 PUT_MODE (op0
, BLKmode
);
7617 set_mem_attributes (op0
, exp
, 1);
7623 /* If the result is BLKmode, use that to access the object
7625 if (mode
== BLKmode
)
7628 /* Get a reference to just this component. */
7629 if (modifier
== EXPAND_CONST_ADDRESS
7630 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7631 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7633 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7635 if (op0
== orig_op0
)
7636 op0
= copy_rtx (op0
);
7638 set_mem_attributes (op0
, exp
, 0);
7639 if (GET_CODE (XEXP (op0
, 0)) == REG
)
7640 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7642 MEM_VOLATILE_P (op0
) |= volatilep
;
7643 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7644 || modifier
== EXPAND_CONST_ADDRESS
7645 || modifier
== EXPAND_INITIALIZER
)
7647 else if (target
== 0)
7648 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7650 convert_move (target
, op0
, unsignedp
);
7656 rtx insn
, before
= get_last_insn (), vtbl_ref
;
7658 /* Evaluate the interior expression. */
7659 subtarget
= expand_expr (TREE_OPERAND (exp
, 0), target
,
7662 /* Get or create an instruction off which to hang a note. */
7663 if (REG_P (subtarget
))
7666 insn
= get_last_insn ();
7669 if (! INSN_P (insn
))
7670 insn
= prev_nonnote_insn (insn
);
7674 target
= gen_reg_rtx (GET_MODE (subtarget
));
7675 insn
= emit_move_insn (target
, subtarget
);
7678 /* Collect the data for the note. */
7679 vtbl_ref
= XEXP (DECL_RTL (TREE_OPERAND (exp
, 1)), 0);
7680 vtbl_ref
= plus_constant (vtbl_ref
,
7681 tree_low_cst (TREE_OPERAND (exp
, 2), 0));
7682 /* Discard the initial CONST that was added. */
7683 vtbl_ref
= XEXP (vtbl_ref
, 0);
7686 = gen_rtx_EXPR_LIST (REG_VTABLE_REF
, vtbl_ref
, REG_NOTES (insn
));
7691 /* Intended for a reference to a buffer of a file-object in Pascal.
7692 But it's not certain that a special tree code will really be
7693 necessary for these. INDIRECT_REF might work for them. */
7699 /* Pascal set IN expression.
7702 rlo = set_low - (set_low%bits_per_word);
7703 the_word = set [ (index - rlo)/bits_per_word ];
7704 bit_index = index % bits_per_word;
7705 bitmask = 1 << bit_index;
7706 return !!(the_word & bitmask); */
7708 tree set
= TREE_OPERAND (exp
, 0);
7709 tree index
= TREE_OPERAND (exp
, 1);
7710 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
7711 tree set_type
= TREE_TYPE (set
);
7712 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
7713 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
7714 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
7715 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
7716 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
7717 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
7718 rtx setaddr
= XEXP (setval
, 0);
7719 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
7721 rtx diff
, quo
, rem
, addr
, bit
, result
;
7723 /* If domain is empty, answer is no. Likewise if index is constant
7724 and out of bounds. */
7725 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
7726 && TREE_CODE (set_low_bound
) == INTEGER_CST
7727 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
7728 || (TREE_CODE (index
) == INTEGER_CST
7729 && TREE_CODE (set_low_bound
) == INTEGER_CST
7730 && tree_int_cst_lt (index
, set_low_bound
))
7731 || (TREE_CODE (set_high_bound
) == INTEGER_CST
7732 && TREE_CODE (index
) == INTEGER_CST
7733 && tree_int_cst_lt (set_high_bound
, index
))))
7737 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7739 /* If we get here, we have to generate the code for both cases
7740 (in range and out of range). */
7742 op0
= gen_label_rtx ();
7743 op1
= gen_label_rtx ();
7745 if (! (GET_CODE (index_val
) == CONST_INT
7746 && GET_CODE (lo_r
) == CONST_INT
))
7747 emit_cmp_and_jump_insns (index_val
, lo_r
, LT
, NULL_RTX
,
7748 GET_MODE (index_val
), iunsignedp
, op1
);
7750 if (! (GET_CODE (index_val
) == CONST_INT
7751 && GET_CODE (hi_r
) == CONST_INT
))
7752 emit_cmp_and_jump_insns (index_val
, hi_r
, GT
, NULL_RTX
,
7753 GET_MODE (index_val
), iunsignedp
, op1
);
7755 /* Calculate the element number of bit zero in the first word
7757 if (GET_CODE (lo_r
) == CONST_INT
)
7758 rlow
= GEN_INT (INTVAL (lo_r
)
7759 & ~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
7761 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
7762 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
7763 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7765 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
7766 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7768 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
7769 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7770 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
7771 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7773 addr
= memory_address (byte_mode
,
7774 expand_binop (index_mode
, add_optab
, diff
,
7775 setaddr
, NULL_RTX
, iunsignedp
,
7778 /* Extract the bit we want to examine. */
7779 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
7780 gen_rtx_MEM (byte_mode
, addr
),
7781 make_tree (TREE_TYPE (index
), rem
),
7783 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
7784 GET_MODE (target
) == byte_mode
? target
: 0,
7785 1, OPTAB_LIB_WIDEN
);
7787 if (result
!= target
)
7788 convert_move (target
, result
, 1);
7790 /* Output the code to handle the out-of-range case. */
7793 emit_move_insn (target
, const0_rtx
);
7798 case WITH_CLEANUP_EXPR
:
7799 if (WITH_CLEANUP_EXPR_RTL (exp
) == 0)
7801 WITH_CLEANUP_EXPR_RTL (exp
)
7802 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7803 expand_decl_cleanup_eh (NULL_TREE
, TREE_OPERAND (exp
, 1),
7804 CLEANUP_EH_ONLY (exp
));
7806 /* That's it for this cleanup. */
7807 TREE_OPERAND (exp
, 1) = 0;
7809 return WITH_CLEANUP_EXPR_RTL (exp
);
7811 case CLEANUP_POINT_EXPR
:
7813 /* Start a new binding layer that will keep track of all cleanup
7814 actions to be performed. */
7815 expand_start_bindings (2);
7817 target_temp_slot_level
= temp_slot_level
;
7819 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7820 /* If we're going to use this value, load it up now. */
7822 op0
= force_not_mem (op0
);
7823 preserve_temp_slots (op0
);
7824 expand_end_bindings (NULL_TREE
, 0, 0);
7829 /* Check for a built-in function. */
7830 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7831 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7833 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7835 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7836 == BUILT_IN_FRONTEND
)
7837 return (*lang_hooks
.expand_expr
) (exp
, original_target
,
7840 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7843 return expand_call (exp
, target
, ignore
);
7845 case NON_LVALUE_EXPR
:
7848 case REFERENCE_EXPR
:
7849 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7852 if (TREE_CODE (type
) == UNION_TYPE
)
7854 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7856 /* If both input and output are BLKmode, this conversion isn't doing
7857 anything except possibly changing memory attribute. */
7858 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7860 rtx result
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7863 result
= copy_rtx (result
);
7864 set_mem_attributes (result
, exp
, 0);
7869 target
= assign_temp (type
, 0, 1, 1);
7871 if (GET_CODE (target
) == MEM
)
7872 /* Store data into beginning of memory target. */
7873 store_expr (TREE_OPERAND (exp
, 0),
7874 adjust_address (target
, TYPE_MODE (valtype
), 0),
7875 modifier
== EXPAND_STACK_PARM
? 2 : 0);
7877 else if (GET_CODE (target
) == REG
)
7878 /* Store this field into a union of the proper type. */
7879 store_field (target
,
7880 MIN ((int_size_in_bytes (TREE_TYPE
7881 (TREE_OPERAND (exp
, 0)))
7883 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7884 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7885 VOIDmode
, 0, type
, 0);
7889 /* Return the entire union. */
7893 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7895 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7898 /* If the signedness of the conversion differs and OP0 is
7899 a promoted SUBREG, clear that indication since we now
7900 have to do the proper extension. */
7901 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7902 && GET_CODE (op0
) == SUBREG
)
7903 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7908 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7909 if (GET_MODE (op0
) == mode
)
7912 /* If OP0 is a constant, just convert it into the proper mode. */
7913 if (CONSTANT_P (op0
))
7915 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7916 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
7918 if (modifier
== EXPAND_INITIALIZER
)
7919 return simplify_gen_subreg (mode
, op0
, inner_mode
,
7920 subreg_lowpart_offset (mode
,
7923 return convert_modes (mode
, inner_mode
, op0
,
7924 TREE_UNSIGNED (inner_type
));
7927 if (modifier
== EXPAND_INITIALIZER
)
7928 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7932 convert_to_mode (mode
, op0
,
7933 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7935 convert_move (target
, op0
,
7936 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7939 case VIEW_CONVERT_EXPR
:
7940 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7942 /* If the input and output modes are both the same, we are done.
7943 Otherwise, if neither mode is BLKmode and both are integral and within
7944 a word, we can use gen_lowpart. If neither is true, make sure the
7945 operand is in memory and convert the MEM to the new mode. */
7946 if (TYPE_MODE (type
) == GET_MODE (op0
))
7948 else if (TYPE_MODE (type
) != BLKmode
&& GET_MODE (op0
) != BLKmode
7949 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7950 && GET_MODE_CLASS (TYPE_MODE (type
)) == MODE_INT
7951 && GET_MODE_SIZE (TYPE_MODE (type
)) <= UNITS_PER_WORD
7952 && GET_MODE_SIZE (GET_MODE (op0
)) <= UNITS_PER_WORD
)
7953 op0
= gen_lowpart (TYPE_MODE (type
), op0
);
7954 else if (GET_CODE (op0
) != MEM
)
7956 /* If the operand is not a MEM, force it into memory. Since we
7957 are going to be be changing the mode of the MEM, don't call
7958 force_const_mem for constants because we don't allow pool
7959 constants to change mode. */
7960 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7962 if (TREE_ADDRESSABLE (exp
))
7965 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
7967 = assign_stack_temp_for_type
7968 (TYPE_MODE (inner_type
),
7969 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
7971 emit_move_insn (target
, op0
);
7975 /* At this point, OP0 is in the correct mode. If the output type is such
7976 that the operand is known to be aligned, indicate that it is.
7977 Otherwise, we need only be concerned about alignment for non-BLKmode
7979 if (GET_CODE (op0
) == MEM
)
7981 op0
= copy_rtx (op0
);
7983 if (TYPE_ALIGN_OK (type
))
7984 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
7985 else if (TYPE_MODE (type
) != BLKmode
&& STRICT_ALIGNMENT
7986 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
7988 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7989 HOST_WIDE_INT temp_size
7990 = MAX (int_size_in_bytes (inner_type
),
7991 (HOST_WIDE_INT
) GET_MODE_SIZE (TYPE_MODE (type
)));
7992 rtx
new = assign_stack_temp_for_type (TYPE_MODE (type
),
7993 temp_size
, 0, type
);
7994 rtx new_with_op0_mode
= adjust_address (new, GET_MODE (op0
), 0);
7996 if (TREE_ADDRESSABLE (exp
))
7999 if (GET_MODE (op0
) == BLKmode
)
8000 emit_block_move (new_with_op0_mode
, op0
,
8001 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type
))),
8002 (modifier
== EXPAND_STACK_PARM
8003 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
8005 emit_move_insn (new_with_op0_mode
, op0
);
8010 op0
= adjust_address (op0
, TYPE_MODE (type
), 0);
8016 this_optab
= ! unsignedp
&& flag_trapv
8017 && (GET_MODE_CLASS (mode
) == MODE_INT
)
8018 ? addv_optab
: add_optab
;
8020 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
8021 something else, make sure we add the register to the constant and
8022 then to the other thing. This case can occur during strength
8023 reduction and doing it this way will produce better code if the
8024 frame pointer or argument pointer is eliminated.
8026 fold-const.c will ensure that the constant is always in the inner
8027 PLUS_EXPR, so the only case we need to do anything about is if
8028 sp, ap, or fp is our second argument, in which case we must swap
8029 the innermost first argument and our second argument. */
8031 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
8032 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
8033 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
8034 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
8035 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
8036 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
8038 tree t
= TREE_OPERAND (exp
, 1);
8040 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
8041 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
8044 /* If the result is to be ptr_mode and we are adding an integer to
8045 something, we might be forming a constant. So try to use
8046 plus_constant. If it produces a sum and we can't accept it,
8047 use force_operand. This allows P = &ARR[const] to generate
8048 efficient code on machines where a SYMBOL_REF is not a valid
8051 If this is an EXPAND_SUM call, always return the sum. */
8052 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
8053 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
8055 if (modifier
== EXPAND_STACK_PARM
)
8057 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
8058 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
8059 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
8063 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
8065 /* Use immed_double_const to ensure that the constant is
8066 truncated according to the mode of OP1, then sign extended
8067 to a HOST_WIDE_INT. Using the constant directly can result
8068 in non-canonical RTL in a 64x32 cross compile. */
8070 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
8072 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
8073 op1
= plus_constant (op1
, INTVAL (constant_part
));
8074 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8075 op1
= force_operand (op1
, target
);
8079 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
8080 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
8081 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
8085 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
8086 (modifier
== EXPAND_INITIALIZER
8087 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
8088 if (! CONSTANT_P (op0
))
8090 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
8091 VOIDmode
, modifier
);
8092 /* Don't go to both_summands if modifier
8093 says it's not right to return a PLUS. */
8094 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8098 /* Use immed_double_const to ensure that the constant is
8099 truncated according to the mode of OP1, then sign extended
8100 to a HOST_WIDE_INT. Using the constant directly can result
8101 in non-canonical RTL in a 64x32 cross compile. */
8103 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
8105 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8106 op0
= plus_constant (op0
, INTVAL (constant_part
));
8107 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8108 op0
= force_operand (op0
, target
);
8113 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8116 /* No sense saving up arithmetic to be done
8117 if it's all in the wrong mode to form part of an address.
8118 And force_operand won't know whether to sign-extend or
8120 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8121 || mode
!= ptr_mode
)
8123 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8124 if (! operand_equal_p (TREE_OPERAND (exp
, 0),
8125 TREE_OPERAND (exp
, 1), 0))
8126 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8129 if (op0
== const0_rtx
)
8131 if (op1
== const0_rtx
)
8136 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, modifier
);
8137 if (! operand_equal_p (TREE_OPERAND (exp
, 0),
8138 TREE_OPERAND (exp
, 1), 0))
8139 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
8140 VOIDmode
, modifier
);
8144 /* We come here from MINUS_EXPR when the second operand is a
8147 /* Make sure any term that's a sum with a constant comes last. */
8148 if (GET_CODE (op0
) == PLUS
8149 && CONSTANT_P (XEXP (op0
, 1)))
8155 /* If adding to a sum including a constant,
8156 associate it to put the constant outside. */
8157 if (GET_CODE (op1
) == PLUS
8158 && CONSTANT_P (XEXP (op1
, 1)))
8160 rtx constant_term
= const0_rtx
;
8162 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
8165 /* Ensure that MULT comes first if there is one. */
8166 else if (GET_CODE (op0
) == MULT
)
8167 op0
= gen_rtx_PLUS (mode
, op0
, XEXP (op1
, 0));
8169 op0
= gen_rtx_PLUS (mode
, XEXP (op1
, 0), op0
);
8171 /* Let's also eliminate constants from op0 if possible. */
8172 op0
= eliminate_constant_term (op0
, &constant_term
);
8174 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8175 their sum should be a constant. Form it into OP1, since the
8176 result we want will then be OP0 + OP1. */
8178 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
8183 op1
= gen_rtx_PLUS (mode
, constant_term
, XEXP (op1
, 1));
8186 /* Put a constant term last and put a multiplication first. */
8187 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
8188 temp
= op1
, op1
= op0
, op0
= temp
;
8190 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
8191 return temp
? temp
: gen_rtx_PLUS (mode
, op0
, op1
);
8194 /* For initializers, we are allowed to return a MINUS of two
8195 symbolic constants. Here we handle all cases when both operands
8197 /* Handle difference of two symbolic constants,
8198 for the sake of an initializer. */
8199 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8200 && really_constant_p (TREE_OPERAND (exp
, 0))
8201 && really_constant_p (TREE_OPERAND (exp
, 1)))
8203 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
,
8205 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
,
8208 /* If the last operand is a CONST_INT, use plus_constant of
8209 the negated constant. Else make the MINUS. */
8210 if (GET_CODE (op1
) == CONST_INT
)
8211 return plus_constant (op0
, - INTVAL (op1
));
8213 return gen_rtx_MINUS (mode
, op0
, op1
);
8216 this_optab
= ! unsignedp
&& flag_trapv
8217 && (GET_MODE_CLASS(mode
) == MODE_INT
)
8218 ? subv_optab
: sub_optab
;
8220 /* No sense saving up arithmetic to be done
8221 if it's all in the wrong mode to form part of an address.
8222 And force_operand won't know whether to sign-extend or
8224 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8225 || mode
!= ptr_mode
)
8228 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8231 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, modifier
);
8232 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, modifier
);
8234 /* Convert A - const to A + (-const). */
8235 if (GET_CODE (op1
) == CONST_INT
)
8237 op1
= negate_rtx (mode
, op1
);
8244 /* If first operand is constant, swap them.
8245 Thus the following special case checks need only
8246 check the second operand. */
8247 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
8249 tree t1
= TREE_OPERAND (exp
, 0);
8250 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
8251 TREE_OPERAND (exp
, 1) = t1
;
8254 /* Attempt to return something suitable for generating an
8255 indexed address, for machines that support that. */
8257 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
8258 && host_integerp (TREE_OPERAND (exp
, 1), 0))
8260 tree exp1
= TREE_OPERAND (exp
, 1);
8262 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
8265 /* If we knew for certain that this is arithmetic for an array
8266 reference, and we knew the bounds of the array, then we could
8267 apply the distributive law across (PLUS X C) for constant C.
8268 Without such knowledge, we risk overflowing the computation
8269 when both X and C are large, but X+C isn't. */
8270 /* ??? Could perhaps special-case EXP being unsigned and C being
8271 positive. In that case we are certain that X+C is no smaller
8272 than X and so the transformed expression will overflow iff the
8273 original would have. */
8275 if (GET_CODE (op0
) != REG
)
8276 op0
= force_operand (op0
, NULL_RTX
);
8277 if (GET_CODE (op0
) != REG
)
8278 op0
= copy_to_mode_reg (mode
, op0
);
8280 return gen_rtx_MULT (mode
, op0
,
8281 gen_int_mode (tree_low_cst (exp1
, 0),
8282 TYPE_MODE (TREE_TYPE (exp1
))));
8285 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8288 if (modifier
== EXPAND_STACK_PARM
)
8291 /* Check for multiplying things that have been extended
8292 from a narrower type. If this machine supports multiplying
8293 in that narrower type with a result in the desired type,
8294 do it that way, and avoid the explicit type-conversion. */
8295 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
8296 && TREE_CODE (type
) == INTEGER_TYPE
8297 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8298 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
8299 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
8300 && int_fits_type_p (TREE_OPERAND (exp
, 1),
8301 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8302 /* Don't use a widening multiply if a shift will do. */
8303 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
8304 > HOST_BITS_PER_WIDE_INT
)
8305 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
8307 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8308 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
8310 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
8311 /* If both operands are extended, they must either both
8312 be zero-extended or both be sign-extended. */
8313 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
8315 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
8317 enum machine_mode innermode
8318 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
8319 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8320 ? smul_widen_optab
: umul_widen_optab
);
8321 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8322 ? umul_widen_optab
: smul_widen_optab
);
8323 if (mode
== GET_MODE_WIDER_MODE (innermode
))
8325 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
8327 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8328 NULL_RTX
, VOIDmode
, 0);
8329 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
8330 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
8333 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
8334 NULL_RTX
, VOIDmode
, 0);
8337 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
8338 && innermode
== word_mode
)
8341 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8342 NULL_RTX
, VOIDmode
, 0);
8343 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
8344 op1
= convert_modes (innermode
, mode
,
8345 expand_expr (TREE_OPERAND (exp
, 1),
8346 NULL_RTX
, VOIDmode
, 0),
8349 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
8350 NULL_RTX
, VOIDmode
, 0);
8351 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8352 unsignedp
, OPTAB_LIB_WIDEN
);
8353 htem
= expand_mult_highpart_adjust (innermode
,
8354 gen_highpart (innermode
, temp
),
8356 gen_highpart (innermode
, temp
),
8358 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
8363 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8364 if (! operand_equal_p (TREE_OPERAND (exp
, 0),
8365 TREE_OPERAND (exp
, 1), 0))
8366 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8369 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
8371 case TRUNC_DIV_EXPR
:
8372 case FLOOR_DIV_EXPR
:
8374 case ROUND_DIV_EXPR
:
8375 case EXACT_DIV_EXPR
:
8376 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8378 if (modifier
== EXPAND_STACK_PARM
)
8380 /* Possible optimization: compute the dividend with EXPAND_SUM
8381 then if the divisor is constant can optimize the case
8382 where some terms of the dividend have coeffs divisible by it. */
8383 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8384 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8385 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
8388 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8389 expensive divide. If not, combine will rebuild the original
8391 if (flag_unsafe_math_optimizations
&& optimize
&& !optimize_size
8392 && TREE_CODE (type
) == REAL_TYPE
8393 && !real_onep (TREE_OPERAND (exp
, 0)))
8394 return expand_expr (build (MULT_EXPR
, type
, TREE_OPERAND (exp
, 0),
8395 build (RDIV_EXPR
, type
,
8396 build_real (type
, dconst1
),
8397 TREE_OPERAND (exp
, 1))),
8398 target
, tmode
, modifier
);
8399 this_optab
= sdiv_optab
;
8402 case TRUNC_MOD_EXPR
:
8403 case FLOOR_MOD_EXPR
:
8405 case ROUND_MOD_EXPR
:
8406 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8408 if (modifier
== EXPAND_STACK_PARM
)
8410 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8411 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8412 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
8414 case FIX_ROUND_EXPR
:
8415 case FIX_FLOOR_EXPR
:
8417 abort (); /* Not used for C. */
8419 case FIX_TRUNC_EXPR
:
8420 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8421 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8422 target
= gen_reg_rtx (mode
);
8423 expand_fix (target
, op0
, unsignedp
);
8427 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8428 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8429 target
= gen_reg_rtx (mode
);
8430 /* expand_float can't figure out what to do if FROM has VOIDmode.
8431 So give it the correct mode. With -O, cse will optimize this. */
8432 if (GET_MODE (op0
) == VOIDmode
)
8433 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8435 expand_float (target
, op0
,
8436 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8440 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8441 if (modifier
== EXPAND_STACK_PARM
)
8443 temp
= expand_unop (mode
,
8444 ! unsignedp
&& flag_trapv
8445 && (GET_MODE_CLASS(mode
) == MODE_INT
)
8446 ? negv_optab
: neg_optab
, op0
, target
, 0);
8452 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8453 if (modifier
== EXPAND_STACK_PARM
)
8456 /* Handle complex values specially. */
8457 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
8458 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
8459 return expand_complex_abs (mode
, op0
, target
, unsignedp
);
8461 /* Unsigned abs is simply the operand. Testing here means we don't
8462 risk generating incorrect code below. */
8463 if (TREE_UNSIGNED (type
))
8466 return expand_abs (mode
, op0
, target
, unsignedp
,
8467 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
8471 target
= original_target
;
8473 || modifier
== EXPAND_STACK_PARM
8474 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1), 1)
8475 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
8476 || GET_MODE (target
) != mode
8477 || (GET_CODE (target
) == REG
8478 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8479 target
= gen_reg_rtx (mode
);
8480 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8481 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8483 /* First try to do it with a special MIN or MAX instruction.
8484 If that does not win, use a conditional jump to select the proper
8486 this_optab
= (TREE_UNSIGNED (type
)
8487 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
8488 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
8490 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8495 /* At this point, a MEM target is no longer useful; we will get better
8498 if (GET_CODE (target
) == MEM
)
8499 target
= gen_reg_rtx (mode
);
8502 emit_move_insn (target
, op0
);
8504 op0
= gen_label_rtx ();
8506 /* If this mode is an integer too wide to compare properly,
8507 compare word by word. Rely on cse to optimize constant cases. */
8508 if (GET_MODE_CLASS (mode
) == MODE_INT
8509 && ! can_compare_p (GE
, mode
, ccp_jump
))
8511 if (code
== MAX_EXPR
)
8512 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8513 target
, op1
, NULL_RTX
, op0
);
8515 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8516 op1
, target
, NULL_RTX
, op0
);
8520 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)));
8521 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
8522 unsignedp
, mode
, NULL_RTX
, NULL_RTX
,
8525 emit_move_insn (target
, op1
);
8530 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8531 if (modifier
== EXPAND_STACK_PARM
)
8533 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8539 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8540 if (modifier
== EXPAND_STACK_PARM
)
8542 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
8548 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8549 temp
= expand_unop (mode
, clz_optab
, op0
, target
, 1);
8555 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8556 temp
= expand_unop (mode
, ctz_optab
, op0
, target
, 1);
8562 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8563 temp
= expand_unop (mode
, popcount_optab
, op0
, target
, 1);
8569 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8570 temp
= expand_unop (mode
, parity_optab
, op0
, target
, 1);
8575 /* ??? Can optimize bitwise operations with one arg constant.
8576 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8577 and (a bitwise1 b) bitwise2 b (etc)
8578 but that is probably not worth while. */
8580 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8581 boolean values when we want in all cases to compute both of them. In
8582 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8583 as actual zero-or-1 values and then bitwise anding. In cases where
8584 there cannot be any side effects, better code would be made by
8585 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8586 how to recognize those cases. */
8588 case TRUTH_AND_EXPR
:
8590 this_optab
= and_optab
;
8595 this_optab
= ior_optab
;
8598 case TRUTH_XOR_EXPR
:
8600 this_optab
= xor_optab
;
8607 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8609 if (modifier
== EXPAND_STACK_PARM
)
8611 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8612 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
8615 /* Could determine the answer when only additive constants differ. Also,
8616 the addition of one can be handled by changing the condition. */
8623 case UNORDERED_EXPR
:
8630 temp
= do_store_flag (exp
,
8631 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
8632 tmode
!= VOIDmode
? tmode
: mode
, 0);
8636 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8637 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
8639 && GET_CODE (original_target
) == REG
8640 && (GET_MODE (original_target
)
8641 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
8643 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
8646 /* If temp is constant, we can just compute the result. */
8647 if (GET_CODE (temp
) == CONST_INT
)
8649 if (INTVAL (temp
) != 0)
8650 emit_move_insn (target
, const1_rtx
);
8652 emit_move_insn (target
, const0_rtx
);
8657 if (temp
!= original_target
)
8659 enum machine_mode mode1
= GET_MODE (temp
);
8660 if (mode1
== VOIDmode
)
8661 mode1
= tmode
!= VOIDmode
? tmode
: mode
;
8663 temp
= copy_to_mode_reg (mode1
, temp
);
8666 op1
= gen_label_rtx ();
8667 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
8668 GET_MODE (temp
), unsignedp
, op1
);
8669 emit_move_insn (temp
, const1_rtx
);
8674 /* If no set-flag instruction, must generate a conditional
8675 store into a temporary variable. Drop through
8676 and handle this like && and ||. */
8678 case TRUTH_ANDIF_EXPR
:
8679 case TRUTH_ORIF_EXPR
:
8682 || modifier
== EXPAND_STACK_PARM
8683 || ! safe_from_p (target
, exp
, 1)
8684 /* Make sure we don't have a hard reg (such as function's return
8685 value) live across basic blocks, if not optimizing. */
8686 || (!optimize
&& GET_CODE (target
) == REG
8687 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8688 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8691 emit_clr_insn (target
);
8693 op1
= gen_label_rtx ();
8694 jumpifnot (exp
, op1
);
8697 emit_0_to_1_insn (target
);
8700 return ignore
? const0_rtx
: target
;
8702 case TRUTH_NOT_EXPR
:
8703 if (modifier
== EXPAND_STACK_PARM
)
8705 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8706 /* The parser is careful to generate TRUTH_NOT_EXPR
8707 only with operands that are always zero or one. */
8708 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8709 target
, 1, OPTAB_LIB_WIDEN
);
8715 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
8717 return expand_expr (TREE_OPERAND (exp
, 1),
8718 (ignore
? const0_rtx
: target
),
8719 VOIDmode
, modifier
);
8722 /* If we would have a "singleton" (see below) were it not for a
8723 conversion in each arm, bring that conversion back out. */
8724 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8725 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
8726 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
8727 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
8729 tree iftrue
= TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
8730 tree iffalse
= TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
8732 if ((TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '2'
8733 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8734 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '2'
8735 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0))
8736 || (TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '1'
8737 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8738 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '1'
8739 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0)))
8740 return expand_expr (build1 (NOP_EXPR
, type
,
8741 build (COND_EXPR
, TREE_TYPE (iftrue
),
8742 TREE_OPERAND (exp
, 0),
8744 target
, tmode
, modifier
);
8748 /* Note that COND_EXPRs whose type is a structure or union
8749 are required to be constructed to contain assignments of
8750 a temporary variable, so that we can evaluate them here
8751 for side effect only. If type is void, we must do likewise. */
8753 /* If an arm of the branch requires a cleanup,
8754 only that cleanup is performed. */
8757 tree binary_op
= 0, unary_op
= 0;
8759 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8760 convert it to our mode, if necessary. */
8761 if (integer_onep (TREE_OPERAND (exp
, 1))
8762 && integer_zerop (TREE_OPERAND (exp
, 2))
8763 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8767 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
8772 if (modifier
== EXPAND_STACK_PARM
)
8774 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, modifier
);
8775 if (GET_MODE (op0
) == mode
)
8779 target
= gen_reg_rtx (mode
);
8780 convert_move (target
, op0
, unsignedp
);
8784 /* Check for X ? A + B : A. If we have this, we can copy A to the
8785 output and conditionally add B. Similarly for unary operations.
8786 Don't do this if X has side-effects because those side effects
8787 might affect A or B and the "?" operation is a sequence point in
8788 ANSI. (operand_equal_p tests for side effects.) */
8790 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
8791 && operand_equal_p (TREE_OPERAND (exp
, 2),
8792 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8793 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
8794 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
8795 && operand_equal_p (TREE_OPERAND (exp
, 1),
8796 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8797 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
8798 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
8799 && operand_equal_p (TREE_OPERAND (exp
, 2),
8800 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8801 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
8802 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
8803 && operand_equal_p (TREE_OPERAND (exp
, 1),
8804 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8805 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
8807 /* If we are not to produce a result, we have no target. Otherwise,
8808 if a target was specified use it; it will not be used as an
8809 intermediate target unless it is safe. If no target, use a
8814 else if (modifier
== EXPAND_STACK_PARM
)
8815 temp
= assign_temp (type
, 0, 0, 1);
8816 else if (original_target
8817 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
8818 || (singleton
&& GET_CODE (original_target
) == REG
8819 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
8820 && original_target
== var_rtx (singleton
)))
8821 && GET_MODE (original_target
) == mode
8822 #ifdef HAVE_conditional_move
8823 && (! can_conditionally_move_p (mode
)
8824 || GET_CODE (original_target
) == REG
8825 || TREE_ADDRESSABLE (type
))
8827 && (GET_CODE (original_target
) != MEM
8828 || TREE_ADDRESSABLE (type
)))
8829 temp
= original_target
;
8830 else if (TREE_ADDRESSABLE (type
))
8833 temp
= assign_temp (type
, 0, 0, 1);
8835 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8836 do the test of X as a store-flag operation, do this as
8837 A + ((X != 0) << log C). Similarly for other simple binary
8838 operators. Only do for C == 1 if BRANCH_COST is low. */
8839 if (temp
&& singleton
&& binary_op
8840 && (TREE_CODE (binary_op
) == PLUS_EXPR
8841 || TREE_CODE (binary_op
) == MINUS_EXPR
8842 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
8843 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
8844 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
8845 : integer_onep (TREE_OPERAND (binary_op
, 1)))
8846 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8850 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
8851 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8852 ? addv_optab
: add_optab
)
8853 : TREE_CODE (binary_op
) == MINUS_EXPR
8854 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8855 ? subv_optab
: sub_optab
)
8856 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
8859 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8860 if (singleton
== TREE_OPERAND (exp
, 1))
8861 cond
= invert_truthvalue (TREE_OPERAND (exp
, 0));
8863 cond
= TREE_OPERAND (exp
, 0);
8865 result
= do_store_flag (cond
, (safe_from_p (temp
, singleton
, 1)
8867 mode
, BRANCH_COST
<= 1);
8869 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
8870 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
8871 build_int_2 (tree_log2
8875 (safe_from_p (temp
, singleton
, 1)
8876 ? temp
: NULL_RTX
), 0);
8880 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
8881 return expand_binop (mode
, boptab
, op1
, result
, temp
,
8882 unsignedp
, OPTAB_LIB_WIDEN
);
8886 do_pending_stack_adjust ();
8888 op0
= gen_label_rtx ();
8890 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
8894 /* If the target conflicts with the other operand of the
8895 binary op, we can't use it. Also, we can't use the target
8896 if it is a hard register, because evaluating the condition
8897 might clobber it. */
8899 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
8900 || (GET_CODE (temp
) == REG
8901 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
8902 temp
= gen_reg_rtx (mode
);
8903 store_expr (singleton
, temp
,
8904 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8907 expand_expr (singleton
,
8908 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8909 if (singleton
== TREE_OPERAND (exp
, 1))
8910 jumpif (TREE_OPERAND (exp
, 0), op0
);
8912 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8914 start_cleanup_deferral ();
8915 if (binary_op
&& temp
== 0)
8916 /* Just touch the other operand. */
8917 expand_expr (TREE_OPERAND (binary_op
, 1),
8918 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8920 store_expr (build (TREE_CODE (binary_op
), type
,
8921 make_tree (type
, temp
),
8922 TREE_OPERAND (binary_op
, 1)),
8923 temp
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8925 store_expr (build1 (TREE_CODE (unary_op
), type
,
8926 make_tree (type
, temp
)),
8927 temp
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8930 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8931 comparison operator. If we have one of these cases, set the
8932 output to A, branch on A (cse will merge these two references),
8933 then set the output to FOO. */
8935 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8936 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8937 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8938 TREE_OPERAND (exp
, 1), 0)
8939 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8940 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
8941 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
8943 if (GET_CODE (temp
) == REG
8944 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8945 temp
= gen_reg_rtx (mode
);
8946 store_expr (TREE_OPERAND (exp
, 1), temp
,
8947 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8948 jumpif (TREE_OPERAND (exp
, 0), op0
);
8950 start_cleanup_deferral ();
8951 store_expr (TREE_OPERAND (exp
, 2), temp
,
8952 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8956 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8957 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8958 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8959 TREE_OPERAND (exp
, 2), 0)
8960 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8961 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
8962 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
8964 if (GET_CODE (temp
) == REG
8965 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8966 temp
= gen_reg_rtx (mode
);
8967 store_expr (TREE_OPERAND (exp
, 2), temp
,
8968 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8969 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8971 start_cleanup_deferral ();
8972 store_expr (TREE_OPERAND (exp
, 1), temp
,
8973 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8978 op1
= gen_label_rtx ();
8979 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8981 start_cleanup_deferral ();
8983 /* One branch of the cond can be void, if it never returns. For
8984 example A ? throw : E */
8986 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8987 store_expr (TREE_OPERAND (exp
, 1), temp
,
8988 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8990 expand_expr (TREE_OPERAND (exp
, 1),
8991 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8992 end_cleanup_deferral ();
8994 emit_jump_insn (gen_jump (op1
));
8997 start_cleanup_deferral ();
8999 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
9000 store_expr (TREE_OPERAND (exp
, 2), temp
,
9001 modifier
== EXPAND_STACK_PARM
? 2 : 0);
9003 expand_expr (TREE_OPERAND (exp
, 2),
9004 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
9007 end_cleanup_deferral ();
9018 /* Something needs to be initialized, but we didn't know
9019 where that thing was when building the tree. For example,
9020 it could be the return value of a function, or a parameter
9021 to a function which lays down in the stack, or a temporary
9022 variable which must be passed by reference.
9024 We guarantee that the expression will either be constructed
9025 or copied into our original target. */
9027 tree slot
= TREE_OPERAND (exp
, 0);
9028 tree cleanups
= NULL_TREE
;
9031 if (TREE_CODE (slot
) != VAR_DECL
)
9035 target
= original_target
;
9037 /* Set this here so that if we get a target that refers to a
9038 register variable that's already been used, put_reg_into_stack
9039 knows that it should fix up those uses. */
9040 TREE_USED (slot
) = 1;
9044 if (DECL_RTL_SET_P (slot
))
9046 target
= DECL_RTL (slot
);
9047 /* If we have already expanded the slot, so don't do
9049 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
9054 target
= assign_temp (type
, 2, 0, 1);
9055 /* All temp slots at this level must not conflict. */
9056 preserve_temp_slots (target
);
9057 SET_DECL_RTL (slot
, target
);
9058 if (TREE_ADDRESSABLE (slot
))
9059 put_var_into_stack (slot
, /*rescan=*/false);
9061 /* Since SLOT is not known to the called function
9062 to belong to its stack frame, we must build an explicit
9063 cleanup. This case occurs when we must build up a reference
9064 to pass the reference as an argument. In this case,
9065 it is very likely that such a reference need not be
9068 if (TREE_OPERAND (exp
, 2) == 0)
9069 TREE_OPERAND (exp
, 2)
9070 = (*lang_hooks
.maybe_build_cleanup
) (slot
);
9071 cleanups
= TREE_OPERAND (exp
, 2);
9076 /* This case does occur, when expanding a parameter which
9077 needs to be constructed on the stack. The target
9078 is the actual stack address that we want to initialize.
9079 The function we call will perform the cleanup in this case. */
9081 /* If we have already assigned it space, use that space,
9082 not target that we were passed in, as our target
9083 parameter is only a hint. */
9084 if (DECL_RTL_SET_P (slot
))
9086 target
= DECL_RTL (slot
);
9087 /* If we have already expanded the slot, so don't do
9089 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
9094 SET_DECL_RTL (slot
, target
);
9095 /* If we must have an addressable slot, then make sure that
9096 the RTL that we just stored in slot is OK. */
9097 if (TREE_ADDRESSABLE (slot
))
9098 put_var_into_stack (slot
, /*rescan=*/true);
9102 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
9103 /* Mark it as expanded. */
9104 TREE_OPERAND (exp
, 1) = NULL_TREE
;
9106 store_expr (exp1
, target
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
9108 expand_decl_cleanup_eh (NULL_TREE
, cleanups
, CLEANUP_EH_ONLY (exp
));
9115 tree lhs
= TREE_OPERAND (exp
, 0);
9116 tree rhs
= TREE_OPERAND (exp
, 1);
9118 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
9124 /* If lhs is complex, expand calls in rhs before computing it.
9125 That's so we don't compute a pointer and save it over a
9126 call. If lhs is simple, compute it first so we can give it
9127 as a target if the rhs is just a call. This avoids an
9128 extra temp and copy and that prevents a partial-subsumption
9129 which makes bad code. Actually we could treat
9130 component_ref's of vars like vars. */
9132 tree lhs
= TREE_OPERAND (exp
, 0);
9133 tree rhs
= TREE_OPERAND (exp
, 1);
9137 /* Check for |= or &= of a bitfield of size one into another bitfield
9138 of size 1. In this case, (unless we need the result of the
9139 assignment) we can do this more efficiently with a
9140 test followed by an assignment, if necessary.
9142 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9143 things change so we do, this code should be enhanced to
9146 && TREE_CODE (lhs
) == COMPONENT_REF
9147 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
9148 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
9149 && TREE_OPERAND (rhs
, 0) == lhs
9150 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
9151 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
9152 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
9154 rtx label
= gen_label_rtx ();
9156 do_jump (TREE_OPERAND (rhs
, 1),
9157 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
9158 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
9159 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
9160 (TREE_CODE (rhs
) == BIT_IOR_EXPR
9162 : integer_zero_node
)),
9164 do_pending_stack_adjust ();
9169 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
9175 if (!TREE_OPERAND (exp
, 0))
9176 expand_null_return ();
9178 expand_return (TREE_OPERAND (exp
, 0));
9181 case PREINCREMENT_EXPR
:
9182 case PREDECREMENT_EXPR
:
9183 return expand_increment (exp
, 0, ignore
);
9185 case POSTINCREMENT_EXPR
:
9186 case POSTDECREMENT_EXPR
:
9187 /* Faster to treat as pre-increment if result is not used. */
9188 return expand_increment (exp
, ! ignore
, ignore
);
9191 if (modifier
== EXPAND_STACK_PARM
)
9193 /* Are we taking the address of a nested function? */
9194 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
9195 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
9196 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0))
9197 && ! TREE_STATIC (exp
))
9199 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
9200 op0
= force_operand (op0
, target
);
9202 /* If we are taking the address of something erroneous, just
9204 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
9206 /* If we are taking the address of a constant and are at the
9207 top level, we have to use output_constant_def since we can't
9208 call force_const_mem at top level. */
9210 && (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
9211 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0)))
9213 op0
= XEXP (output_constant_def (TREE_OPERAND (exp
, 0), 0), 0);
9216 /* We make sure to pass const0_rtx down if we came in with
9217 ignore set, to avoid doing the cleanups twice for something. */
9218 op0
= expand_expr (TREE_OPERAND (exp
, 0),
9219 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
9220 (modifier
== EXPAND_INITIALIZER
9221 ? modifier
: EXPAND_CONST_ADDRESS
));
9223 /* If we are going to ignore the result, OP0 will have been set
9224 to const0_rtx, so just return it. Don't get confused and
9225 think we are taking the address of the constant. */
9229 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9230 clever and returns a REG when given a MEM. */
9231 op0
= protect_from_queue (op0
, 1);
9233 /* We would like the object in memory. If it is a constant, we can
9234 have it be statically allocated into memory. For a non-constant,
9235 we need to allocate some memory and store the value into it. */
9237 if (CONSTANT_P (op0
))
9238 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
9240 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
9241 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
9242 || GET_CODE (op0
) == PARALLEL
|| GET_CODE (op0
) == LO_SUM
)
9244 /* If the operand is a SAVE_EXPR, we can deal with this by
9245 forcing the SAVE_EXPR into memory. */
9246 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
9248 put_var_into_stack (TREE_OPERAND (exp
, 0),
9250 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
9254 /* If this object is in a register, it can't be BLKmode. */
9255 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9256 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
9258 if (GET_CODE (op0
) == PARALLEL
)
9259 /* Handle calls that pass values in multiple
9260 non-contiguous locations. The Irix 6 ABI has examples
9262 emit_group_store (memloc
, op0
,
9263 int_size_in_bytes (inner_type
));
9265 emit_move_insn (memloc
, op0
);
9271 if (GET_CODE (op0
) != MEM
)
9274 mark_temp_addr_taken (op0
);
9275 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
9277 op0
= XEXP (op0
, 0);
9278 #ifdef POINTERS_EXTEND_UNSIGNED
9279 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
9280 && mode
== ptr_mode
)
9281 op0
= convert_memory_address (ptr_mode
, op0
);
9286 /* If OP0 is not aligned as least as much as the type requires, we
9287 need to make a temporary, copy OP0 to it, and take the address of
9288 the temporary. We want to use the alignment of the type, not of
9289 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9290 the test for BLKmode means that can't happen. The test for
9291 BLKmode is because we never make mis-aligned MEMs with
9294 We don't need to do this at all if the machine doesn't have
9295 strict alignment. */
9296 if (STRICT_ALIGNMENT
&& GET_MODE (op0
) == BLKmode
9297 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
9299 && MEM_ALIGN (op0
) < BIGGEST_ALIGNMENT
)
9301 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9304 if (TYPE_ALIGN_OK (inner_type
))
9307 if (TREE_ADDRESSABLE (inner_type
))
9309 /* We can't make a bitwise copy of this object, so fail. */
9310 error ("cannot take the address of an unaligned member");
9314 new = assign_stack_temp_for_type
9315 (TYPE_MODE (inner_type
),
9316 MEM_SIZE (op0
) ? INTVAL (MEM_SIZE (op0
))
9317 : int_size_in_bytes (inner_type
),
9318 1, build_qualified_type (inner_type
,
9319 (TYPE_QUALS (inner_type
)
9320 | TYPE_QUAL_CONST
)));
9322 emit_block_move (new, op0
, expr_size (TREE_OPERAND (exp
, 0)),
9323 (modifier
== EXPAND_STACK_PARM
9324 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
9329 op0
= force_operand (XEXP (op0
, 0), target
);
9333 && GET_CODE (op0
) != REG
9334 && modifier
!= EXPAND_CONST_ADDRESS
9335 && modifier
!= EXPAND_INITIALIZER
9336 && modifier
!= EXPAND_SUM
)
9337 op0
= force_reg (Pmode
, op0
);
9339 if (GET_CODE (op0
) == REG
9340 && ! REG_USERVAR_P (op0
))
9341 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)));
9343 #ifdef POINTERS_EXTEND_UNSIGNED
9344 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
9345 && mode
== ptr_mode
)
9346 op0
= convert_memory_address (ptr_mode
, op0
);
9351 case ENTRY_VALUE_EXPR
:
9354 /* COMPLEX type for Extended Pascal & Fortran */
9357 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
9360 /* Get the rtx code of the operands. */
9361 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9362 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
9365 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
9369 /* Move the real (op0) and imaginary (op1) parts to their location. */
9370 emit_move_insn (gen_realpart (mode
, target
), op0
);
9371 emit_move_insn (gen_imagpart (mode
, target
), op1
);
9373 insns
= get_insns ();
9376 /* Complex construction should appear as a single unit. */
9377 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9378 each with a separate pseudo as destination.
9379 It's not correct for flow to treat them as a unit. */
9380 if (GET_CODE (target
) != CONCAT
)
9381 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
9389 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9390 return gen_realpart (mode
, op0
);
9393 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9394 return gen_imagpart (mode
, op0
);
9398 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
9402 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9405 target
= gen_reg_rtx (mode
);
9409 /* Store the realpart and the negated imagpart to target. */
9410 emit_move_insn (gen_realpart (partmode
, target
),
9411 gen_realpart (partmode
, op0
));
9413 imag_t
= gen_imagpart (partmode
, target
);
9414 temp
= expand_unop (partmode
,
9415 ! unsignedp
&& flag_trapv
9416 && (GET_MODE_CLASS(partmode
) == MODE_INT
)
9417 ? negv_optab
: neg_optab
,
9418 gen_imagpart (partmode
, op0
), imag_t
, 0);
9420 emit_move_insn (imag_t
, temp
);
9422 insns
= get_insns ();
9425 /* Conjugate should appear as a single unit
9426 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9427 each with a separate pseudo as destination.
9428 It's not correct for flow to treat them as a unit. */
9429 if (GET_CODE (target
) != CONCAT
)
9430 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
9437 case TRY_CATCH_EXPR
:
9439 tree handler
= TREE_OPERAND (exp
, 1);
9441 expand_eh_region_start ();
9443 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9445 expand_eh_region_end_cleanup (handler
);
9450 case TRY_FINALLY_EXPR
:
9452 tree try_block
= TREE_OPERAND (exp
, 0);
9453 tree finally_block
= TREE_OPERAND (exp
, 1);
9455 if (!optimize
|| unsafe_for_reeval (finally_block
) > 1)
9457 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9458 is not sufficient, so we cannot expand the block twice.
9459 So we play games with GOTO_SUBROUTINE_EXPR to let us
9460 expand the thing only once. */
9461 /* When not optimizing, we go ahead with this form since
9462 (1) user breakpoints operate more predictably without
9463 code duplication, and
9464 (2) we're not running any of the global optimizers
9465 that would explode in time/space with the highly
9466 connected CFG created by the indirect branching. */
9468 rtx finally_label
= gen_label_rtx ();
9469 rtx done_label
= gen_label_rtx ();
9470 rtx return_link
= gen_reg_rtx (Pmode
);
9471 tree cleanup
= build (GOTO_SUBROUTINE_EXPR
, void_type_node
,
9472 (tree
) finally_label
, (tree
) return_link
);
9473 TREE_SIDE_EFFECTS (cleanup
) = 1;
9475 /* Start a new binding layer that will keep track of all cleanup
9476 actions to be performed. */
9477 expand_start_bindings (2);
9478 target_temp_slot_level
= temp_slot_level
;
9480 expand_decl_cleanup (NULL_TREE
, cleanup
);
9481 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
9483 preserve_temp_slots (op0
);
9484 expand_end_bindings (NULL_TREE
, 0, 0);
9485 emit_jump (done_label
);
9486 emit_label (finally_label
);
9487 expand_expr (finally_block
, const0_rtx
, VOIDmode
, 0);
9488 emit_indirect_jump (return_link
);
9489 emit_label (done_label
);
9493 expand_start_bindings (2);
9494 target_temp_slot_level
= temp_slot_level
;
9496 expand_decl_cleanup (NULL_TREE
, finally_block
);
9497 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
9499 preserve_temp_slots (op0
);
9500 expand_end_bindings (NULL_TREE
, 0, 0);
9506 case GOTO_SUBROUTINE_EXPR
:
9508 rtx subr
= (rtx
) TREE_OPERAND (exp
, 0);
9509 rtx return_link
= *(rtx
*) &TREE_OPERAND (exp
, 1);
9510 rtx return_address
= gen_label_rtx ();
9511 emit_move_insn (return_link
,
9512 gen_rtx_LABEL_REF (Pmode
, return_address
));
9514 emit_label (return_address
);
9519 return expand_builtin_va_arg (TREE_OPERAND (exp
, 0), type
);
9522 return get_exception_pointer (cfun
);
9525 /* Function descriptors are not valid except for as
9526 initialization constants, and should not be expanded. */
9530 return (*lang_hooks
.expand_expr
) (exp
, original_target
, tmode
, modifier
);
9533 /* Here to do an ordinary binary operator, generating an instruction
9534 from the optab already placed in `this_optab'. */
9536 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
9538 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
9539 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9541 if (modifier
== EXPAND_STACK_PARM
)
9543 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9544 unsignedp
, OPTAB_LIB_WIDEN
);
9550 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9551 when applied to the address of EXP produces an address known to be
9552 aligned more than BIGGEST_ALIGNMENT. */
9555 is_aligning_offset (tree offset
, tree exp
)
9557 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9558 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9559 || TREE_CODE (offset
) == NOP_EXPR
9560 || TREE_CODE (offset
) == CONVERT_EXPR
9561 || TREE_CODE (offset
) == WITH_RECORD_EXPR
)
9562 offset
= TREE_OPERAND (offset
, 0);
9564 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9565 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9566 if (TREE_CODE (offset
) != BIT_AND_EXPR
9567 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
9568 || compare_tree_int (TREE_OPERAND (offset
, 1), BIGGEST_ALIGNMENT
) <= 0
9569 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
9572 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9573 It must be NEGATE_EXPR. Then strip any more conversions. */
9574 offset
= TREE_OPERAND (offset
, 0);
9575 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9576 || TREE_CODE (offset
) == NOP_EXPR
9577 || TREE_CODE (offset
) == CONVERT_EXPR
)
9578 offset
= TREE_OPERAND (offset
, 0);
9580 if (TREE_CODE (offset
) != NEGATE_EXPR
)
9583 offset
= TREE_OPERAND (offset
, 0);
9584 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9585 || TREE_CODE (offset
) == NOP_EXPR
9586 || TREE_CODE (offset
) == CONVERT_EXPR
)
9587 offset
= TREE_OPERAND (offset
, 0);
9589 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9590 whose type is the same as EXP. */
9591 return (TREE_CODE (offset
) == ADDR_EXPR
9592 && (TREE_OPERAND (offset
, 0) == exp
9593 || (TREE_CODE (TREE_OPERAND (offset
, 0)) == PLACEHOLDER_EXPR
9594 && (TREE_TYPE (TREE_OPERAND (offset
, 0))
9595 == TREE_TYPE (exp
)))));
9598 /* Return the tree node if an ARG corresponds to a string constant or zero
9599 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9600 in bytes within the string that ARG is accessing. The type of the
9601 offset will be `sizetype'. */
9604 string_constant (tree arg
, tree
*ptr_offset
)
9608 if (TREE_CODE (arg
) == ADDR_EXPR
9609 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
9611 *ptr_offset
= size_zero_node
;
9612 return TREE_OPERAND (arg
, 0);
9614 else if (TREE_CODE (arg
) == PLUS_EXPR
)
9616 tree arg0
= TREE_OPERAND (arg
, 0);
9617 tree arg1
= TREE_OPERAND (arg
, 1);
9622 if (TREE_CODE (arg0
) == ADDR_EXPR
9623 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
9625 *ptr_offset
= convert (sizetype
, arg1
);
9626 return TREE_OPERAND (arg0
, 0);
9628 else if (TREE_CODE (arg1
) == ADDR_EXPR
9629 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
9631 *ptr_offset
= convert (sizetype
, arg0
);
9632 return TREE_OPERAND (arg1
, 0);
9639 /* Expand code for a post- or pre- increment or decrement
9640 and return the RTX for the result.
9641 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9644 expand_increment (tree exp
, int post
, int ignore
)
9648 tree incremented
= TREE_OPERAND (exp
, 0);
9649 optab this_optab
= add_optab
;
9651 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
9652 int op0_is_copy
= 0;
9653 int single_insn
= 0;
9654 /* 1 means we can't store into OP0 directly,
9655 because it is a subreg narrower than a word,
9656 and we don't dare clobber the rest of the word. */
9659 /* Stabilize any component ref that might need to be
9660 evaluated more than once below. */
9662 || TREE_CODE (incremented
) == BIT_FIELD_REF
9663 || (TREE_CODE (incremented
) == COMPONENT_REF
9664 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
9665 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
9666 incremented
= stabilize_reference (incremented
);
9667 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9668 ones into save exprs so that they don't accidentally get evaluated
9669 more than once by the code below. */
9670 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
9671 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
9672 incremented
= save_expr (incremented
);
9674 /* Compute the operands as RTX.
9675 Note whether OP0 is the actual lvalue or a copy of it:
9676 I believe it is a copy iff it is a register or subreg
9677 and insns were generated in computing it. */
9679 temp
= get_last_insn ();
9680 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, 0);
9682 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9683 in place but instead must do sign- or zero-extension during assignment,
9684 so we copy it into a new register and let the code below use it as
9687 Note that we can safely modify this SUBREG since it is know not to be
9688 shared (it was made by the expand_expr call above). */
9690 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9693 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9697 else if (GET_CODE (op0
) == SUBREG
9698 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9700 /* We cannot increment this SUBREG in place. If we are
9701 post-incrementing, get a copy of the old value. Otherwise,
9702 just mark that we cannot increment in place. */
9704 op0
= copy_to_reg (op0
);
9709 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
9710 && temp
!= get_last_insn ());
9711 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9713 /* Decide whether incrementing or decrementing. */
9714 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9715 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9716 this_optab
= sub_optab
;
9718 /* Convert decrement by a constant into a negative increment. */
9719 if (this_optab
== sub_optab
9720 && GET_CODE (op1
) == CONST_INT
)
9722 op1
= GEN_INT (-INTVAL (op1
));
9723 this_optab
= add_optab
;
9726 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp
)))
9727 this_optab
= this_optab
== add_optab
? addv_optab
: subv_optab
;
9729 /* For a preincrement, see if we can do this with a single instruction. */
9732 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9733 if (icode
!= (int) CODE_FOR_nothing
9734 /* Make sure that OP0 is valid for operands 0 and 1
9735 of the insn we want to queue. */
9736 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9737 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
)
9738 && (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9742 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9743 then we cannot just increment OP0. We must therefore contrive to
9744 increment the original value. Then, for postincrement, we can return
9745 OP0 since it is a copy of the old value. For preincrement, expand here
9746 unless we can do it with a single insn.
9748 Likewise if storing directly into OP0 would clobber high bits
9749 we need to preserve (bad_subreg). */
9750 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9752 /* This is the easiest way to increment the value wherever it is.
9753 Problems with multiple evaluation of INCREMENTED are prevented
9754 because either (1) it is a component_ref or preincrement,
9755 in which case it was stabilized above, or (2) it is an array_ref
9756 with constant index in an array in a register, which is
9757 safe to reevaluate. */
9758 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9759 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9760 ? MINUS_EXPR
: PLUS_EXPR
),
9763 TREE_OPERAND (exp
, 1));
9765 while (TREE_CODE (incremented
) == NOP_EXPR
9766 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9768 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9769 incremented
= TREE_OPERAND (incremented
, 0);
9772 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
, 0);
9773 return post
? op0
: temp
;
9778 /* We have a true reference to the value in OP0.
9779 If there is an insn to add or subtract in this mode, queue it.
9780 Queueing the increment insn avoids the register shuffling
9781 that often results if we must increment now and first save
9782 the old value for subsequent use. */
9784 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9785 op0
= stabilize (op0
);
9788 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9789 if (icode
!= (int) CODE_FOR_nothing
9790 /* Make sure that OP0 is valid for operands 0 and 1
9791 of the insn we want to queue. */
9792 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9793 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
))
9795 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9796 op1
= force_reg (mode
, op1
);
9798 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9800 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9802 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
9803 ? force_reg (Pmode
, XEXP (op0
, 0))
9804 : copy_to_reg (XEXP (op0
, 0)));
9807 op0
= replace_equiv_address (op0
, addr
);
9808 temp
= force_reg (GET_MODE (op0
), op0
);
9809 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9810 op1
= force_reg (mode
, op1
);
9812 /* The increment queue is LIFO, thus we have to `queue'
9813 the instructions in reverse order. */
9814 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9815 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9820 /* Preincrement, or we can't increment with one simple insn. */
9822 /* Save a copy of the value before inc or dec, to return it later. */
9823 temp
= value
= copy_to_reg (op0
);
9825 /* Arrange to return the incremented value. */
9826 /* Copy the rtx because expand_binop will protect from the queue,
9827 and the results of that would be invalid for us to return
9828 if our caller does emit_queue before using our result. */
9829 temp
= copy_rtx (value
= op0
);
9831 /* Increment however we can. */
9832 op1
= expand_binop (mode
, this_optab
, value
, op1
, op0
,
9833 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9835 /* Make sure the value is stored into OP0. */
9837 emit_move_insn (op0
, op1
);
9842 /* Generate code to calculate EXP using a store-flag instruction
9843 and return an rtx for the result. EXP is either a comparison
9844 or a TRUTH_NOT_EXPR whose operand is a comparison.
9846 If TARGET is nonzero, store the result there if convenient.
9848 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9851 Return zero if there is no suitable set-flag instruction
9852 available on this machine.
9854 Once expand_expr has been called on the arguments of the comparison,
9855 we are committed to doing the store flag, since it is not safe to
9856 re-evaluate the expression. We emit the store-flag insn by calling
9857 emit_store_flag, but only expand the arguments if we have a reason
9858 to believe that emit_store_flag will be successful. If we think that
9859 it will, but it isn't, we have to simulate the store-flag with a
9860 set/jump/set sequence. */
9863 do_store_flag (tree exp
, rtx target
, enum machine_mode mode
, int only_cheap
)
9866 tree arg0
, arg1
, type
;
9868 enum machine_mode operand_mode
;
9872 enum insn_code icode
;
9873 rtx subtarget
= target
;
9876 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9877 result at the end. We can't simply invert the test since it would
9878 have already been inverted if it were valid. This case occurs for
9879 some floating-point comparisons. */
9881 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
9882 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
9884 arg0
= TREE_OPERAND (exp
, 0);
9885 arg1
= TREE_OPERAND (exp
, 1);
9887 /* Don't crash if the comparison was erroneous. */
9888 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
9891 type
= TREE_TYPE (arg0
);
9892 operand_mode
= TYPE_MODE (type
);
9893 unsignedp
= TREE_UNSIGNED (type
);
9895 /* We won't bother with BLKmode store-flag operations because it would mean
9896 passing a lot of information to emit_store_flag. */
9897 if (operand_mode
== BLKmode
)
9900 /* We won't bother with store-flag operations involving function pointers
9901 when function pointers must be canonicalized before comparisons. */
9902 #ifdef HAVE_canonicalize_funcptr_for_compare
9903 if (HAVE_canonicalize_funcptr_for_compare
9904 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
9905 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
9907 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
9908 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
9909 == FUNCTION_TYPE
))))
9916 /* Get the rtx comparison code to use. We know that EXP is a comparison
9917 operation of some type. Some comparisons against 1 and -1 can be
9918 converted to comparisons with zero. Do so here so that the tests
9919 below will be aware that we have a comparison with zero. These
9920 tests will not catch constants in the first operand, but constants
9921 are rarely passed as the first operand. */
9923 switch (TREE_CODE (exp
))
9932 if (integer_onep (arg1
))
9933 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
9935 code
= unsignedp
? LTU
: LT
;
9938 if (! unsignedp
&& integer_all_onesp (arg1
))
9939 arg1
= integer_zero_node
, code
= LT
;
9941 code
= unsignedp
? LEU
: LE
;
9944 if (! unsignedp
&& integer_all_onesp (arg1
))
9945 arg1
= integer_zero_node
, code
= GE
;
9947 code
= unsignedp
? GTU
: GT
;
9950 if (integer_onep (arg1
))
9951 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
9953 code
= unsignedp
? GEU
: GE
;
9956 case UNORDERED_EXPR
:
9982 /* Put a constant second. */
9983 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
9985 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
9986 code
= swap_condition (code
);
9989 /* If this is an equality or inequality test of a single bit, we can
9990 do this by shifting the bit being tested to the low-order bit and
9991 masking the result with the constant 1. If the condition was EQ,
9992 we xor it with 1. This does not require an scc insn and is faster
9993 than an scc insn even if we have it.
9995 The code to make this transformation was moved into fold_single_bit_test,
9996 so we just call into the folder and expand its result. */
9998 if ((code
== NE
|| code
== EQ
)
9999 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
10000 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10001 return expand_expr (fold_single_bit_test (code
== NE
? NE_EXPR
: EQ_EXPR
,
10003 target
, VOIDmode
, EXPAND_NORMAL
);
10005 /* Now see if we are likely to be able to do this. Return if not. */
10006 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
10009 icode
= setcc_gen_code
[(int) code
];
10010 if (icode
== CODE_FOR_nothing
10011 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
10013 /* We can only do this if it is one of the special cases that
10014 can be handled without an scc insn. */
10015 if ((code
== LT
&& integer_zerop (arg1
))
10016 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
10018 else if (BRANCH_COST
>= 0
10019 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
10020 && TREE_CODE (type
) != REAL_TYPE
10021 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
10022 != CODE_FOR_nothing
)
10023 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
10024 != CODE_FOR_nothing
)))
10030 if (! get_subtarget (target
)
10031 || GET_MODE (subtarget
) != operand_mode
10032 || ! safe_from_p (subtarget
, arg1
, 1))
10035 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
10036 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
10039 target
= gen_reg_rtx (mode
);
10041 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10042 because, if the emit_store_flag does anything it will succeed and
10043 OP0 and OP1 will not be used subsequently. */
10045 result
= emit_store_flag (target
, code
,
10046 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
10047 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
10048 operand_mode
, unsignedp
, 1);
10053 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
10054 result
, 0, OPTAB_LIB_WIDEN
);
10058 /* If this failed, we have to do this with set/compare/jump/set code. */
10059 if (GET_CODE (target
) != REG
10060 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
10061 target
= gen_reg_rtx (GET_MODE (target
));
10063 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
10064 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
10065 operand_mode
, NULL_RTX
);
10066 if (GET_CODE (result
) == CONST_INT
)
10067 return (((result
== const0_rtx
&& ! invert
)
10068 || (result
!= const0_rtx
&& invert
))
10069 ? const0_rtx
: const1_rtx
);
10071 /* The code of RESULT may not match CODE if compare_from_rtx
10072 decided to swap its operands and reverse the original code.
10074 We know that compare_from_rtx returns either a CONST_INT or
10075 a new comparison code, so it is safe to just extract the
10076 code from RESULT. */
10077 code
= GET_CODE (result
);
10079 label
= gen_label_rtx ();
10080 if (bcc_gen_fctn
[(int) code
] == 0)
10083 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
10084 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
10085 emit_label (label
);
10091 /* Stubs in case we haven't got a casesi insn. */
10092 #ifndef HAVE_casesi
10093 # define HAVE_casesi 0
10094 # define gen_casesi(a, b, c, d, e) (0)
10095 # define CODE_FOR_casesi CODE_FOR_nothing
10098 /* If the machine does not have a case insn that compares the bounds,
10099 this means extra overhead for dispatch tables, which raises the
10100 threshold for using them. */
10101 #ifndef CASE_VALUES_THRESHOLD
10102 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10103 #endif /* CASE_VALUES_THRESHOLD */
10106 case_values_threshold (void)
10108 return CASE_VALUES_THRESHOLD
;
10111 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10112 0 otherwise (i.e. if there is no casesi instruction). */
10114 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
10115 rtx table_label ATTRIBUTE_UNUSED
, rtx default_label
)
10117 enum machine_mode index_mode
= SImode
;
10118 int index_bits
= GET_MODE_BITSIZE (index_mode
);
10119 rtx op1
, op2
, index
;
10120 enum machine_mode op_mode
;
10125 /* Convert the index to SImode. */
10126 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
10128 enum machine_mode omode
= TYPE_MODE (index_type
);
10129 rtx rangertx
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
10131 /* We must handle the endpoints in the original mode. */
10132 index_expr
= build (MINUS_EXPR
, index_type
,
10133 index_expr
, minval
);
10134 minval
= integer_zero_node
;
10135 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10136 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
10137 omode
, 1, default_label
);
10138 /* Now we can safely truncate. */
10139 index
= convert_to_mode (index_mode
, index
, 0);
10143 if (TYPE_MODE (index_type
) != index_mode
)
10145 index_expr
= convert ((*lang_hooks
.types
.type_for_size
)
10146 (index_bits
, 0), index_expr
);
10147 index_type
= TREE_TYPE (index_expr
);
10150 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10153 index
= protect_from_queue (index
, 0);
10154 do_pending_stack_adjust ();
10156 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
10157 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
10159 index
= copy_to_mode_reg (op_mode
, index
);
10161 op1
= expand_expr (minval
, NULL_RTX
, VOIDmode
, 0);
10163 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
10164 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
10165 op1
, TREE_UNSIGNED (TREE_TYPE (minval
)));
10166 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
10168 op1
= copy_to_mode_reg (op_mode
, op1
);
10170 op2
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
10172 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
10173 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
10174 op2
, TREE_UNSIGNED (TREE_TYPE (range
)));
10175 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
10177 op2
= copy_to_mode_reg (op_mode
, op2
);
10179 emit_jump_insn (gen_casesi (index
, op1
, op2
,
10180 table_label
, default_label
));
10184 /* Attempt to generate a tablejump instruction; same concept. */
10185 #ifndef HAVE_tablejump
10186 #define HAVE_tablejump 0
10187 #define gen_tablejump(x, y) (0)
10190 /* Subroutine of the next function.
10192 INDEX is the value being switched on, with the lowest value
10193 in the table already subtracted.
10194 MODE is its expected mode (needed if INDEX is constant).
10195 RANGE is the length of the jump table.
10196 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10198 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10199 index value is out of range. */
10202 do_tablejump (rtx index
, enum machine_mode mode
, rtx range
, rtx table_label
,
10207 if (INTVAL (range
) > cfun
->max_jumptable_ents
)
10208 cfun
->max_jumptable_ents
= INTVAL (range
);
10210 /* Do an unsigned comparison (in the proper mode) between the index
10211 expression and the value which represents the length of the range.
10212 Since we just finished subtracting the lower bound of the range
10213 from the index expression, this comparison allows us to simultaneously
10214 check that the original index expression value is both greater than
10215 or equal to the minimum value of the range and less than or equal to
10216 the maximum value of the range. */
10218 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
10221 /* If index is in range, it must fit in Pmode.
10222 Convert to Pmode so we can index with it. */
10224 index
= convert_to_mode (Pmode
, index
, 1);
10226 /* Don't let a MEM slip thru, because then INDEX that comes
10227 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10228 and break_out_memory_refs will go to work on it and mess it up. */
10229 #ifdef PIC_CASE_VECTOR_ADDRESS
10230 if (flag_pic
&& GET_CODE (index
) != REG
)
10231 index
= copy_to_mode_reg (Pmode
, index
);
10234 /* If flag_force_addr were to affect this address
10235 it could interfere with the tricky assumptions made
10236 about addresses that contain label-refs,
10237 which may be valid only very near the tablejump itself. */
10238 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10239 GET_MODE_SIZE, because this indicates how large insns are. The other
10240 uses should all be Pmode, because they are addresses. This code
10241 could fail if addresses and insns are not the same size. */
10242 index
= gen_rtx_PLUS (Pmode
,
10243 gen_rtx_MULT (Pmode
, index
,
10244 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
10245 gen_rtx_LABEL_REF (Pmode
, table_label
));
10246 #ifdef PIC_CASE_VECTOR_ADDRESS
10248 index
= PIC_CASE_VECTOR_ADDRESS (index
);
10251 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
10252 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
10253 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
10254 RTX_UNCHANGING_P (vector
) = 1;
10255 MEM_NOTRAP_P (vector
) = 1;
10256 convert_move (temp
, vector
, 0);
10258 emit_jump_insn (gen_tablejump (temp
, table_label
));
10260 /* If we are generating PIC code or if the table is PC-relative, the
10261 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10262 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
10267 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
10268 rtx table_label
, rtx default_label
)
10272 if (! HAVE_tablejump
)
10275 index_expr
= fold (build (MINUS_EXPR
, index_type
,
10276 convert (index_type
, index_expr
),
10277 convert (index_type
, minval
)));
10278 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10280 index
= protect_from_queue (index
, 0);
10281 do_pending_stack_adjust ();
10283 do_tablejump (index
, TYPE_MODE (index_type
),
10284 convert_modes (TYPE_MODE (index_type
),
10285 TYPE_MODE (TREE_TYPE (range
)),
10286 expand_expr (range
, NULL_RTX
,
10288 TREE_UNSIGNED (TREE_TYPE (range
))),
10289 table_label
, default_label
);
10293 /* Nonzero if the mode is a valid vector mode for this architecture.
10294 This returns nonzero even if there is no hardware support for the
10295 vector mode, but we can emulate with narrower modes. */
10298 vector_mode_valid_p (enum machine_mode mode
)
10300 enum mode_class
class = GET_MODE_CLASS (mode
);
10301 enum machine_mode innermode
;
10303 /* Doh! What's going on? */
10304 if (class != MODE_VECTOR_INT
10305 && class != MODE_VECTOR_FLOAT
)
10308 /* Hardware support. Woo hoo! */
10309 if (VECTOR_MODE_SUPPORTED_P (mode
))
10312 innermode
= GET_MODE_INNER (mode
);
10314 /* We should probably return 1 if requesting V4DI and we have no DI,
10315 but we have V2DI, but this is probably very unlikely. */
10317 /* If we have support for the inner mode, we can safely emulate it.
10318 We may not have V2DI, but me can emulate with a pair of DIs. */
10319 return mov_optab
->handlers
[innermode
].insn_code
!= CODE_FOR_nothing
;
10322 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10324 const_vector_from_tree (tree exp
)
10329 enum machine_mode inner
, mode
;
10331 mode
= TYPE_MODE (TREE_TYPE (exp
));
10333 if (is_zeros_p (exp
))
10334 return CONST0_RTX (mode
);
10336 units
= GET_MODE_NUNITS (mode
);
10337 inner
= GET_MODE_INNER (mode
);
10339 v
= rtvec_alloc (units
);
10341 link
= TREE_VECTOR_CST_ELTS (exp
);
10342 for (i
= 0; link
; link
= TREE_CHAIN (link
), ++i
)
10344 elt
= TREE_VALUE (link
);
10346 if (TREE_CODE (elt
) == REAL_CST
)
10347 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
10350 RTVEC_ELT (v
, i
) = immed_double_const (TREE_INT_CST_LOW (elt
),
10351 TREE_INT_CST_HIGH (elt
),
10355 /* Initialize remaining elements to 0. */
10356 for (; i
< units
; ++i
)
10357 RTVEC_ELT (v
, i
) = CONST0_RTX (inner
);
10359 return gen_rtx_raw_CONST_VECTOR (mode
, v
);
10362 #include "gt-expr.h"