1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
71 #define STACK_PUSH_CODE PRE_INC
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
85 #define TARGET_MEM_FUNCTIONS 0
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 tree placeholder_list
= 0;
100 /* This structure is used by move_by_pieces to describe the move to
102 struct move_by_pieces
111 int explicit_inc_from
;
112 unsigned HOST_WIDE_INT len
;
113 HOST_WIDE_INT offset
;
117 /* This structure is used by store_by_pieces to describe the clear to
120 struct store_by_pieces
126 unsigned HOST_WIDE_INT len
;
127 HOST_WIDE_INT offset
;
128 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
133 static rtx enqueue_insn
PARAMS ((rtx
, rtx
));
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
135 PARAMS ((unsigned HOST_WIDE_INT
,
137 static void move_by_pieces_1
PARAMS ((rtx (*) (rtx
, ...), enum machine_mode
,
138 struct move_by_pieces
*));
139 static bool block_move_libcall_safe_for_call_parm
PARAMS ((void));
140 static bool emit_block_move_via_movstr
PARAMS ((rtx
, rtx
, rtx
, unsigned));
141 static rtx emit_block_move_via_libcall
PARAMS ((rtx
, rtx
, rtx
));
142 static tree emit_block_move_libcall_fn
PARAMS ((int));
143 static void emit_block_move_via_loop
PARAMS ((rtx
, rtx
, rtx
, unsigned));
144 static rtx clear_by_pieces_1
PARAMS ((PTR
, HOST_WIDE_INT
,
146 static void clear_by_pieces
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
148 static void store_by_pieces_1
PARAMS ((struct store_by_pieces
*,
150 static void store_by_pieces_2
PARAMS ((rtx (*) (rtx
, ...),
152 struct store_by_pieces
*));
153 static bool clear_storage_via_clrstr
PARAMS ((rtx
, rtx
, unsigned));
154 static rtx clear_storage_via_libcall
PARAMS ((rtx
, rtx
));
155 static tree clear_storage_libcall_fn
PARAMS ((int));
156 static rtx compress_float_constant
PARAMS ((rtx
, rtx
));
157 static rtx get_subtarget
PARAMS ((rtx
));
158 static int is_zeros_p
PARAMS ((tree
));
159 static int mostly_zeros_p
PARAMS ((tree
));
160 static void store_constructor_field
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
161 HOST_WIDE_INT
, enum machine_mode
,
162 tree
, tree
, int, int));
163 static void store_constructor
PARAMS ((tree
, rtx
, int, HOST_WIDE_INT
));
164 static rtx store_field
PARAMS ((rtx
, HOST_WIDE_INT
,
165 HOST_WIDE_INT
, enum machine_mode
,
166 tree
, enum machine_mode
, int, tree
,
168 static rtx var_rtx
PARAMS ((tree
));
170 static unsigned HOST_WIDE_INT highest_pow2_factor
PARAMS ((tree
));
171 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type
PARAMS ((tree
,
174 static int is_aligning_offset
PARAMS ((tree
, tree
));
175 static rtx expand_increment
PARAMS ((tree
, int, int));
176 static rtx do_store_flag
PARAMS ((tree
, rtx
, enum machine_mode
, int));
178 static void emit_single_push_insn
PARAMS ((enum machine_mode
, rtx
, tree
));
180 static void do_tablejump
PARAMS ((rtx
, enum machine_mode
, rtx
, rtx
, rtx
));
181 static rtx const_vector_from_tree
PARAMS ((tree
));
183 /* Record for each mode whether we can move a register directly to or
184 from an object of that mode in memory. If we can't, we won't try
185 to use that mode directly when accessing a field of that mode. */
187 static char direct_load
[NUM_MACHINE_MODES
];
188 static char direct_store
[NUM_MACHINE_MODES
];
190 /* Record for each mode whether we can float-extend from memory. */
192 static bool float_extend_from_mem
[NUM_MACHINE_MODES
][NUM_MACHINE_MODES
];
194 /* If a memory-to-memory move would take MOVE_RATIO or more simple
195 move-instruction sequences, we will do a movstr or libcall instead. */
198 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
201 /* If we are optimizing for space (-Os), cut down the default move ratio. */
202 #define MOVE_RATIO (optimize_size ? 3 : 15)
206 /* This macro is used to determine whether move_by_pieces should be called
207 to perform a structure copy. */
208 #ifndef MOVE_BY_PIECES_P
209 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
210 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
213 /* If a clear memory operation would take CLEAR_RATIO or more simple
214 move-instruction sequences, we will do a clrstr or libcall instead. */
217 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
218 #define CLEAR_RATIO 2
220 /* If we are optimizing for space, cut down the default clear ratio. */
221 #define CLEAR_RATIO (optimize_size ? 3 : 15)
225 /* This macro is used to determine whether clear_by_pieces should be
226 called to clear storage. */
227 #ifndef CLEAR_BY_PIECES_P
228 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
229 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
232 /* This macro is used to determine whether store_by_pieces should be
233 called to "memset" storage with byte values other than zero, or
234 to "memcpy" storage when the source is a constant string. */
235 #ifndef STORE_BY_PIECES_P
236 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
239 /* This array records the insn_code of insns to perform block moves. */
240 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
242 /* This array records the insn_code of insns to perform block clears. */
243 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
245 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
247 #ifndef SLOW_UNALIGNED_ACCESS
248 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
251 /* This is run once per compilation to set up which modes can be used
252 directly in memory and to initialize the block move optab. */
258 enum machine_mode mode
;
263 /* Try indexing by frame ptr and try by stack ptr.
264 It is known that on the Convex the stack ptr isn't a valid index.
265 With luck, one or the other is valid on any machine. */
266 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
267 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
269 /* A scratch register we can modify in-place below to avoid
270 useless RTL allocations. */
271 reg
= gen_rtx_REG (VOIDmode
, -1);
273 insn
= rtx_alloc (INSN
);
274 pat
= gen_rtx_SET (0, NULL_RTX
, NULL_RTX
);
275 PATTERN (insn
) = pat
;
277 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
278 mode
= (enum machine_mode
) ((int) mode
+ 1))
282 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
283 PUT_MODE (mem
, mode
);
284 PUT_MODE (mem1
, mode
);
285 PUT_MODE (reg
, mode
);
287 /* See if there is some register that can be used in this mode and
288 directly loaded or stored from memory. */
290 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
291 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
292 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
295 if (! HARD_REGNO_MODE_OK (regno
, mode
))
301 SET_DEST (pat
) = reg
;
302 if (recog (pat
, insn
, &num_clobbers
) >= 0)
303 direct_load
[(int) mode
] = 1;
305 SET_SRC (pat
) = mem1
;
306 SET_DEST (pat
) = reg
;
307 if (recog (pat
, insn
, &num_clobbers
) >= 0)
308 direct_load
[(int) mode
] = 1;
311 SET_DEST (pat
) = mem
;
312 if (recog (pat
, insn
, &num_clobbers
) >= 0)
313 direct_store
[(int) mode
] = 1;
316 SET_DEST (pat
) = mem1
;
317 if (recog (pat
, insn
, &num_clobbers
) >= 0)
318 direct_store
[(int) mode
] = 1;
322 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
324 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
325 mode
= GET_MODE_WIDER_MODE (mode
))
327 enum machine_mode srcmode
;
328 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
329 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
333 ic
= can_extend_p (mode
, srcmode
, 0);
334 if (ic
== CODE_FOR_nothing
)
337 PUT_MODE (mem
, srcmode
);
339 if ((*insn_data
[ic
].operand
[1].predicate
) (mem
, srcmode
))
340 float_extend_from_mem
[mode
][srcmode
] = true;
345 /* This is run at the start of compiling a function. */
350 cfun
->expr
= (struct expr_status
*) ggc_alloc (sizeof (struct expr_status
));
353 pending_stack_adjust
= 0;
354 stack_pointer_delta
= 0;
355 inhibit_defer_pop
= 0;
357 apply_args_value
= 0;
361 /* Small sanity check that the queue is empty at the end of a function. */
364 finish_expr_for_function ()
370 /* Manage the queue of increment instructions to be output
371 for POSTINCREMENT_EXPR expressions, etc. */
373 /* Queue up to increment (or change) VAR later. BODY says how:
374 BODY should be the same thing you would pass to emit_insn
375 to increment right away. It will go to emit_insn later on.
377 The value is a QUEUED expression to be used in place of VAR
378 where you want to guarantee the pre-incrementation value of VAR. */
381 enqueue_insn (var
, body
)
384 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
), var
, NULL_RTX
, NULL_RTX
,
385 body
, pending_chain
);
386 return pending_chain
;
389 /* Use protect_from_queue to convert a QUEUED expression
390 into something that you can put immediately into an instruction.
391 If the queued incrementation has not happened yet,
392 protect_from_queue returns the variable itself.
393 If the incrementation has happened, protect_from_queue returns a temp
394 that contains a copy of the old value of the variable.
396 Any time an rtx which might possibly be a QUEUED is to be put
397 into an instruction, it must be passed through protect_from_queue first.
398 QUEUED expressions are not meaningful in instructions.
400 Do not pass a value through protect_from_queue and then hold
401 on to it for a while before putting it in an instruction!
402 If the queue is flushed in between, incorrect code will result. */
405 protect_from_queue (x
, modify
)
409 RTX_CODE code
= GET_CODE (x
);
411 #if 0 /* A QUEUED can hang around after the queue is forced out. */
412 /* Shortcut for most common case. */
413 if (pending_chain
== 0)
419 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
420 use of autoincrement. Make a copy of the contents of the memory
421 location rather than a copy of the address, but not if the value is
422 of mode BLKmode. Don't modify X in place since it might be
424 if (code
== MEM
&& GET_MODE (x
) != BLKmode
425 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
428 rtx
new = replace_equiv_address_nv (x
, QUEUED_VAR (y
));
432 rtx temp
= gen_reg_rtx (GET_MODE (x
));
434 emit_insn_before (gen_move_insn (temp
, new),
439 /* Copy the address into a pseudo, so that the returned value
440 remains correct across calls to emit_queue. */
441 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
444 /* Otherwise, recursively protect the subexpressions of all
445 the kinds of rtx's that can contain a QUEUED. */
448 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
449 if (tem
!= XEXP (x
, 0))
455 else if (code
== PLUS
|| code
== MULT
)
457 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
458 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
459 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
468 /* If the increment has not happened, use the variable itself. Copy it
469 into a new pseudo so that the value remains correct across calls to
471 if (QUEUED_INSN (x
) == 0)
472 return copy_to_reg (QUEUED_VAR (x
));
473 /* If the increment has happened and a pre-increment copy exists,
475 if (QUEUED_COPY (x
) != 0)
476 return QUEUED_COPY (x
);
477 /* The increment has happened but we haven't set up a pre-increment copy.
478 Set one up now, and use it. */
479 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
480 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
482 return QUEUED_COPY (x
);
485 /* Return nonzero if X contains a QUEUED expression:
486 if it contains anything that will be altered by a queued increment.
487 We handle only combinations of MEM, PLUS, MINUS and MULT operators
488 since memory addresses generally contain only those. */
494 enum rtx_code code
= GET_CODE (x
);
500 return queued_subexp_p (XEXP (x
, 0));
504 return (queued_subexp_p (XEXP (x
, 0))
505 || queued_subexp_p (XEXP (x
, 1)));
511 /* Perform all the pending incrementations. */
517 while ((p
= pending_chain
))
519 rtx body
= QUEUED_BODY (p
);
521 switch (GET_CODE (body
))
529 QUEUED_INSN (p
) = body
;
533 #ifdef ENABLE_CHECKING
540 QUEUED_INSN (p
) = emit_insn (body
);
544 pending_chain
= QUEUED_NEXT (p
);
548 /* Copy data from FROM to TO, where the machine modes are not the same.
549 Both modes may be integer, or both may be floating.
550 UNSIGNEDP should be nonzero if FROM is an unsigned type.
551 This causes zero-extension instead of sign-extension. */
554 convert_move (to
, from
, unsignedp
)
558 enum machine_mode to_mode
= GET_MODE (to
);
559 enum machine_mode from_mode
= GET_MODE (from
);
560 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
561 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
565 /* rtx code for making an equivalent value. */
566 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
567 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
569 to
= protect_from_queue (to
, 1);
570 from
= protect_from_queue (from
, 0);
572 if (to_real
!= from_real
)
575 /* If FROM is a SUBREG that indicates that we have already done at least
576 the required extension, strip it. We don't handle such SUBREGs as
579 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
580 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
581 >= GET_MODE_SIZE (to_mode
))
582 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
583 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
585 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
588 if (to_mode
== from_mode
589 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
591 emit_move_insn (to
, from
);
595 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
597 if (GET_MODE_BITSIZE (from_mode
) != GET_MODE_BITSIZE (to_mode
))
600 if (VECTOR_MODE_P (to_mode
))
601 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
603 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
605 emit_move_insn (to
, from
);
609 if (to_real
!= from_real
)
616 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
))
618 /* Try converting directly if the insn is supported. */
619 if ((code
= can_extend_p (to_mode
, from_mode
, 0))
622 emit_unop_insn (code
, to
, from
, UNKNOWN
);
627 #ifdef HAVE_trunchfqf2
628 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
630 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
634 #ifdef HAVE_trunctqfqf2
635 if (HAVE_trunctqfqf2
&& from_mode
== TQFmode
&& to_mode
== QFmode
)
637 emit_unop_insn (CODE_FOR_trunctqfqf2
, to
, from
, UNKNOWN
);
641 #ifdef HAVE_truncsfqf2
642 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
644 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
648 #ifdef HAVE_truncdfqf2
649 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
651 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
655 #ifdef HAVE_truncxfqf2
656 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
658 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
662 #ifdef HAVE_trunctfqf2
663 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
665 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
670 #ifdef HAVE_trunctqfhf2
671 if (HAVE_trunctqfhf2
&& from_mode
== TQFmode
&& to_mode
== HFmode
)
673 emit_unop_insn (CODE_FOR_trunctqfhf2
, to
, from
, UNKNOWN
);
677 #ifdef HAVE_truncsfhf2
678 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
680 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
684 #ifdef HAVE_truncdfhf2
685 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
687 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
691 #ifdef HAVE_truncxfhf2
692 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
694 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
698 #ifdef HAVE_trunctfhf2
699 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
701 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
706 #ifdef HAVE_truncsftqf2
707 if (HAVE_truncsftqf2
&& from_mode
== SFmode
&& to_mode
== TQFmode
)
709 emit_unop_insn (CODE_FOR_truncsftqf2
, to
, from
, UNKNOWN
);
713 #ifdef HAVE_truncdftqf2
714 if (HAVE_truncdftqf2
&& from_mode
== DFmode
&& to_mode
== TQFmode
)
716 emit_unop_insn (CODE_FOR_truncdftqf2
, to
, from
, UNKNOWN
);
720 #ifdef HAVE_truncxftqf2
721 if (HAVE_truncxftqf2
&& from_mode
== XFmode
&& to_mode
== TQFmode
)
723 emit_unop_insn (CODE_FOR_truncxftqf2
, to
, from
, UNKNOWN
);
727 #ifdef HAVE_trunctftqf2
728 if (HAVE_trunctftqf2
&& from_mode
== TFmode
&& to_mode
== TQFmode
)
730 emit_unop_insn (CODE_FOR_trunctftqf2
, to
, from
, UNKNOWN
);
735 #ifdef HAVE_truncdfsf2
736 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
738 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
742 #ifdef HAVE_truncxfsf2
743 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
745 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
749 #ifdef HAVE_trunctfsf2
750 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
752 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
756 #ifdef HAVE_truncxfdf2
757 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
759 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
763 #ifdef HAVE_trunctfdf2
764 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
766 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
778 libcall
= extendsfdf2_libfunc
;
782 libcall
= extendsfxf2_libfunc
;
786 libcall
= extendsftf2_libfunc
;
798 libcall
= truncdfsf2_libfunc
;
802 libcall
= extenddfxf2_libfunc
;
806 libcall
= extenddftf2_libfunc
;
818 libcall
= truncxfsf2_libfunc
;
822 libcall
= truncxfdf2_libfunc
;
834 libcall
= trunctfsf2_libfunc
;
838 libcall
= trunctfdf2_libfunc
;
850 if (libcall
== (rtx
) 0)
851 /* This conversion is not implemented yet. */
855 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
857 insns
= get_insns ();
859 emit_libcall_block (insns
, to
, value
, gen_rtx_FLOAT_TRUNCATE (to_mode
,
864 /* Now both modes are integers. */
866 /* Handle expanding beyond a word. */
867 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
868 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
875 enum machine_mode lowpart_mode
;
876 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
878 /* Try converting directly if the insn is supported. */
879 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
882 /* If FROM is a SUBREG, put it into a register. Do this
883 so that we always generate the same set of insns for
884 better cse'ing; if an intermediate assignment occurred,
885 we won't be doing the operation directly on the SUBREG. */
886 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
887 from
= force_reg (from_mode
, from
);
888 emit_unop_insn (code
, to
, from
, equiv_code
);
891 /* Next, try converting via full word. */
892 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
893 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
894 != CODE_FOR_nothing
))
896 if (GET_CODE (to
) == REG
)
897 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
898 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
899 emit_unop_insn (code
, to
,
900 gen_lowpart (word_mode
, to
), equiv_code
);
904 /* No special multiword conversion insn; do it by hand. */
907 /* Since we will turn this into a no conflict block, we must ensure
908 that the source does not overlap the target. */
910 if (reg_overlap_mentioned_p (to
, from
))
911 from
= force_reg (from_mode
, from
);
913 /* Get a copy of FROM widened to a word, if necessary. */
914 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
915 lowpart_mode
= word_mode
;
917 lowpart_mode
= from_mode
;
919 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
921 lowpart
= gen_lowpart (lowpart_mode
, to
);
922 emit_move_insn (lowpart
, lowfrom
);
924 /* Compute the value to put in each remaining word. */
926 fill_value
= const0_rtx
;
931 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
932 && STORE_FLAG_VALUE
== -1)
934 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
936 fill_value
= gen_reg_rtx (word_mode
);
937 emit_insn (gen_slt (fill_value
));
943 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
944 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
946 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
950 /* Fill the remaining words. */
951 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
953 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
954 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
959 if (fill_value
!= subword
)
960 emit_move_insn (subword
, fill_value
);
963 insns
= get_insns ();
966 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
967 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
971 /* Truncating multi-word to a word or less. */
972 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
973 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
975 if (!((GET_CODE (from
) == MEM
976 && ! MEM_VOLATILE_P (from
)
977 && direct_load
[(int) to_mode
]
978 && ! mode_dependent_address_p (XEXP (from
, 0)))
979 || GET_CODE (from
) == REG
980 || GET_CODE (from
) == SUBREG
))
981 from
= force_reg (from_mode
, from
);
982 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
986 /* Handle pointer conversion. */ /* SPEE 900220. */
987 if (to_mode
== PQImode
)
989 if (from_mode
!= QImode
)
990 from
= convert_to_mode (QImode
, from
, unsignedp
);
992 #ifdef HAVE_truncqipqi2
993 if (HAVE_truncqipqi2
)
995 emit_unop_insn (CODE_FOR_truncqipqi2
, to
, from
, UNKNOWN
);
998 #endif /* HAVE_truncqipqi2 */
1002 if (from_mode
== PQImode
)
1004 if (to_mode
!= QImode
)
1006 from
= convert_to_mode (QImode
, from
, unsignedp
);
1011 #ifdef HAVE_extendpqiqi2
1012 if (HAVE_extendpqiqi2
)
1014 emit_unop_insn (CODE_FOR_extendpqiqi2
, to
, from
, UNKNOWN
);
1017 #endif /* HAVE_extendpqiqi2 */
1022 if (to_mode
== PSImode
)
1024 if (from_mode
!= SImode
)
1025 from
= convert_to_mode (SImode
, from
, unsignedp
);
1027 #ifdef HAVE_truncsipsi2
1028 if (HAVE_truncsipsi2
)
1030 emit_unop_insn (CODE_FOR_truncsipsi2
, to
, from
, UNKNOWN
);
1033 #endif /* HAVE_truncsipsi2 */
1037 if (from_mode
== PSImode
)
1039 if (to_mode
!= SImode
)
1041 from
= convert_to_mode (SImode
, from
, unsignedp
);
1046 #ifdef HAVE_extendpsisi2
1047 if (! unsignedp
&& HAVE_extendpsisi2
)
1049 emit_unop_insn (CODE_FOR_extendpsisi2
, to
, from
, UNKNOWN
);
1052 #endif /* HAVE_extendpsisi2 */
1053 #ifdef HAVE_zero_extendpsisi2
1054 if (unsignedp
&& HAVE_zero_extendpsisi2
)
1056 emit_unop_insn (CODE_FOR_zero_extendpsisi2
, to
, from
, UNKNOWN
);
1059 #endif /* HAVE_zero_extendpsisi2 */
1064 if (to_mode
== PDImode
)
1066 if (from_mode
!= DImode
)
1067 from
= convert_to_mode (DImode
, from
, unsignedp
);
1069 #ifdef HAVE_truncdipdi2
1070 if (HAVE_truncdipdi2
)
1072 emit_unop_insn (CODE_FOR_truncdipdi2
, to
, from
, UNKNOWN
);
1075 #endif /* HAVE_truncdipdi2 */
1079 if (from_mode
== PDImode
)
1081 if (to_mode
!= DImode
)
1083 from
= convert_to_mode (DImode
, from
, unsignedp
);
1088 #ifdef HAVE_extendpdidi2
1089 if (HAVE_extendpdidi2
)
1091 emit_unop_insn (CODE_FOR_extendpdidi2
, to
, from
, UNKNOWN
);
1094 #endif /* HAVE_extendpdidi2 */
1099 /* Now follow all the conversions between integers
1100 no more than a word long. */
1102 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1103 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
1104 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1105 GET_MODE_BITSIZE (from_mode
)))
1107 if (!((GET_CODE (from
) == MEM
1108 && ! MEM_VOLATILE_P (from
)
1109 && direct_load
[(int) to_mode
]
1110 && ! mode_dependent_address_p (XEXP (from
, 0)))
1111 || GET_CODE (from
) == REG
1112 || GET_CODE (from
) == SUBREG
))
1113 from
= force_reg (from_mode
, from
);
1114 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
1115 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
1116 from
= copy_to_reg (from
);
1117 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
1121 /* Handle extension. */
1122 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
1124 /* Convert directly if that works. */
1125 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
1126 != CODE_FOR_nothing
)
1129 from
= force_not_mem (from
);
1131 emit_unop_insn (code
, to
, from
, equiv_code
);
1136 enum machine_mode intermediate
;
1140 /* Search for a mode to convert via. */
1141 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
1142 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
1143 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
1144 != CODE_FOR_nothing
)
1145 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
1146 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1147 GET_MODE_BITSIZE (intermediate
))))
1148 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
1149 != CODE_FOR_nothing
))
1151 convert_move (to
, convert_to_mode (intermediate
, from
,
1152 unsignedp
), unsignedp
);
1156 /* No suitable intermediate mode.
1157 Generate what we need with shifts. */
1158 shift_amount
= build_int_2 (GET_MODE_BITSIZE (to_mode
)
1159 - GET_MODE_BITSIZE (from_mode
), 0);
1160 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
1161 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
1163 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
1166 emit_move_insn (to
, tmp
);
1171 /* Support special truncate insns for certain modes. */
1173 if (from_mode
== DImode
&& to_mode
== SImode
)
1175 #ifdef HAVE_truncdisi2
1176 if (HAVE_truncdisi2
)
1178 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1182 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1186 if (from_mode
== DImode
&& to_mode
== HImode
)
1188 #ifdef HAVE_truncdihi2
1189 if (HAVE_truncdihi2
)
1191 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1195 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1199 if (from_mode
== DImode
&& to_mode
== QImode
)
1201 #ifdef HAVE_truncdiqi2
1202 if (HAVE_truncdiqi2
)
1204 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1208 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1212 if (from_mode
== SImode
&& to_mode
== HImode
)
1214 #ifdef HAVE_truncsihi2
1215 if (HAVE_truncsihi2
)
1217 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1221 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1225 if (from_mode
== SImode
&& to_mode
== QImode
)
1227 #ifdef HAVE_truncsiqi2
1228 if (HAVE_truncsiqi2
)
1230 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1234 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1238 if (from_mode
== HImode
&& to_mode
== QImode
)
1240 #ifdef HAVE_trunchiqi2
1241 if (HAVE_trunchiqi2
)
1243 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1247 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1251 if (from_mode
== TImode
&& to_mode
== DImode
)
1253 #ifdef HAVE_trunctidi2
1254 if (HAVE_trunctidi2
)
1256 emit_unop_insn (CODE_FOR_trunctidi2
, to
, from
, UNKNOWN
);
1260 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1264 if (from_mode
== TImode
&& to_mode
== SImode
)
1266 #ifdef HAVE_trunctisi2
1267 if (HAVE_trunctisi2
)
1269 emit_unop_insn (CODE_FOR_trunctisi2
, to
, from
, UNKNOWN
);
1273 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1277 if (from_mode
== TImode
&& to_mode
== HImode
)
1279 #ifdef HAVE_trunctihi2
1280 if (HAVE_trunctihi2
)
1282 emit_unop_insn (CODE_FOR_trunctihi2
, to
, from
, UNKNOWN
);
1286 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1290 if (from_mode
== TImode
&& to_mode
== QImode
)
1292 #ifdef HAVE_trunctiqi2
1293 if (HAVE_trunctiqi2
)
1295 emit_unop_insn (CODE_FOR_trunctiqi2
, to
, from
, UNKNOWN
);
1299 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1303 /* Handle truncation of volatile memrefs, and so on;
1304 the things that couldn't be truncated directly,
1305 and for which there was no special instruction. */
1306 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1308 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1309 emit_move_insn (to
, temp
);
1313 /* Mode combination is not recognized. */
1317 /* Return an rtx for a value that would result
1318 from converting X to mode MODE.
1319 Both X and MODE may be floating, or both integer.
1320 UNSIGNEDP is nonzero if X is an unsigned value.
1321 This can be done by referring to a part of X in place
1322 or by copying to a new temporary with conversion.
1324 This function *must not* call protect_from_queue
1325 except when putting X into an insn (in which case convert_move does it). */
1328 convert_to_mode (mode
, x
, unsignedp
)
1329 enum machine_mode mode
;
1333 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
1336 /* Return an rtx for a value that would result
1337 from converting X from mode OLDMODE to mode MODE.
1338 Both modes may be floating, or both integer.
1339 UNSIGNEDP is nonzero if X is an unsigned value.
1341 This can be done by referring to a part of X in place
1342 or by copying to a new temporary with conversion.
1344 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1346 This function *must not* call protect_from_queue
1347 except when putting X into an insn (in which case convert_move does it). */
1350 convert_modes (mode
, oldmode
, x
, unsignedp
)
1351 enum machine_mode mode
, oldmode
;
1357 /* If FROM is a SUBREG that indicates that we have already done at least
1358 the required extension, strip it. */
1360 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1361 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1362 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1363 x
= gen_lowpart (mode
, x
);
1365 if (GET_MODE (x
) != VOIDmode
)
1366 oldmode
= GET_MODE (x
);
1368 if (mode
== oldmode
)
1371 /* There is one case that we must handle specially: If we are converting
1372 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1373 we are to interpret the constant as unsigned, gen_lowpart will do
1374 the wrong if the constant appears negative. What we want to do is
1375 make the high-order word of the constant zero, not all ones. */
1377 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1378 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1379 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1381 HOST_WIDE_INT val
= INTVAL (x
);
1383 if (oldmode
!= VOIDmode
1384 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
1386 int width
= GET_MODE_BITSIZE (oldmode
);
1388 /* We need to zero extend VAL. */
1389 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1392 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
1395 /* We can do this with a gen_lowpart if both desired and current modes
1396 are integer, and this is either a constant integer, a register, or a
1397 non-volatile MEM. Except for the constant case where MODE is no
1398 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1400 if ((GET_CODE (x
) == CONST_INT
1401 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1402 || (GET_MODE_CLASS (mode
) == MODE_INT
1403 && GET_MODE_CLASS (oldmode
) == MODE_INT
1404 && (GET_CODE (x
) == CONST_DOUBLE
1405 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
1406 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
1407 && direct_load
[(int) mode
])
1408 || (GET_CODE (x
) == REG
1409 && (! HARD_REGISTER_P (x
)
1410 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
1411 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
1412 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
1414 /* ?? If we don't know OLDMODE, we have to assume here that
1415 X does not need sign- or zero-extension. This may not be
1416 the case, but it's the best we can do. */
1417 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1418 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1420 HOST_WIDE_INT val
= INTVAL (x
);
1421 int width
= GET_MODE_BITSIZE (oldmode
);
1423 /* We must sign or zero-extend in this case. Start by
1424 zero-extending, then sign extend if we need to. */
1425 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1427 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1428 val
|= (HOST_WIDE_INT
) (-1) << width
;
1430 return gen_int_mode (val
, mode
);
1433 return gen_lowpart (mode
, x
);
1436 temp
= gen_reg_rtx (mode
);
1437 convert_move (temp
, x
, unsignedp
);
1441 /* This macro is used to determine what the largest unit size that
1442 move_by_pieces can use is. */
1444 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1445 move efficiently, as opposed to MOVE_MAX which is the maximum
1446 number of bytes we can move with a single instruction. */
1448 #ifndef MOVE_MAX_PIECES
1449 #define MOVE_MAX_PIECES MOVE_MAX
1452 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1453 store efficiently. Due to internal GCC limitations, this is
1454 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1455 for an immediate constant. */
1457 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1459 /* Determine whether the LEN bytes can be moved by using several move
1460 instructions. Return nonzero if a call to move_by_pieces should
1464 can_move_by_pieces (len
, align
)
1465 unsigned HOST_WIDE_INT len
;
1468 return MOVE_BY_PIECES_P (len
, align
);
1471 /* Generate several move instructions to copy LEN bytes from block FROM to
1472 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1473 and TO through protect_from_queue before calling.
1475 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1476 used to push FROM to the stack.
1478 ALIGN is maximum stack alignment we can assume.
1480 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1481 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1485 move_by_pieces (to
, from
, len
, align
, endp
)
1487 unsigned HOST_WIDE_INT len
;
1491 struct move_by_pieces data
;
1492 rtx to_addr
, from_addr
= XEXP (from
, 0);
1493 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
1494 enum machine_mode mode
= VOIDmode
, tmode
;
1495 enum insn_code icode
;
1497 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
1500 data
.from_addr
= from_addr
;
1503 to_addr
= XEXP (to
, 0);
1506 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1507 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1509 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1516 #ifdef STACK_GROWS_DOWNWARD
1522 data
.to_addr
= to_addr
;
1525 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1526 || GET_CODE (from_addr
) == POST_INC
1527 || GET_CODE (from_addr
) == POST_DEC
);
1529 data
.explicit_inc_from
= 0;
1530 data
.explicit_inc_to
= 0;
1531 if (data
.reverse
) data
.offset
= len
;
1534 /* If copying requires more than two move insns,
1535 copy addresses to registers (to make displacements shorter)
1536 and use post-increment if available. */
1537 if (!(data
.autinc_from
&& data
.autinc_to
)
1538 && move_by_pieces_ninsns (len
, align
) > 2)
1540 /* Find the mode of the largest move... */
1541 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1542 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1543 if (GET_MODE_SIZE (tmode
) < max_size
)
1546 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
1548 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1549 data
.autinc_from
= 1;
1550 data
.explicit_inc_from
= -1;
1552 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
1554 data
.from_addr
= copy_addr_to_reg (from_addr
);
1555 data
.autinc_from
= 1;
1556 data
.explicit_inc_from
= 1;
1558 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1559 data
.from_addr
= copy_addr_to_reg (from_addr
);
1560 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
1562 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1564 data
.explicit_inc_to
= -1;
1566 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
1568 data
.to_addr
= copy_addr_to_reg (to_addr
);
1570 data
.explicit_inc_to
= 1;
1572 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1573 data
.to_addr
= copy_addr_to_reg (to_addr
);
1576 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1577 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1578 align
= MOVE_MAX
* BITS_PER_UNIT
;
1580 /* First move what we can in the largest integer mode, then go to
1581 successively smaller modes. */
1583 while (max_size
> 1)
1585 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1586 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1587 if (GET_MODE_SIZE (tmode
) < max_size
)
1590 if (mode
== VOIDmode
)
1593 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1594 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1595 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1597 max_size
= GET_MODE_SIZE (mode
);
1600 /* The code above should have handled everything. */
1614 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
1615 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
1617 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
1620 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
1627 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
1635 /* Return number of insns required to move L bytes by pieces.
1636 ALIGN (in bits) is maximum alignment we can assume. */
1638 static unsigned HOST_WIDE_INT
1639 move_by_pieces_ninsns (l
, align
)
1640 unsigned HOST_WIDE_INT l
;
1643 unsigned HOST_WIDE_INT n_insns
= 0;
1644 unsigned HOST_WIDE_INT max_size
= MOVE_MAX
+ 1;
1646 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1647 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1648 align
= MOVE_MAX
* BITS_PER_UNIT
;
1650 while (max_size
> 1)
1652 enum machine_mode mode
= VOIDmode
, tmode
;
1653 enum insn_code icode
;
1655 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1656 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1657 if (GET_MODE_SIZE (tmode
) < max_size
)
1660 if (mode
== VOIDmode
)
1663 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1664 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1665 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1667 max_size
= GET_MODE_SIZE (mode
);
1675 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1676 with move instructions for mode MODE. GENFUN is the gen_... function
1677 to make a move insn for that mode. DATA has all the other info. */
1680 move_by_pieces_1 (genfun
, mode
, data
)
1681 rtx (*genfun
) PARAMS ((rtx
, ...));
1682 enum machine_mode mode
;
1683 struct move_by_pieces
*data
;
1685 unsigned int size
= GET_MODE_SIZE (mode
);
1686 rtx to1
= NULL_RTX
, from1
;
1688 while (data
->len
>= size
)
1691 data
->offset
-= size
;
1695 if (data
->autinc_to
)
1696 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1699 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1702 if (data
->autinc_from
)
1703 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1706 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1708 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1709 emit_insn (gen_add2_insn (data
->to_addr
,
1710 GEN_INT (-(HOST_WIDE_INT
)size
)));
1711 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1712 emit_insn (gen_add2_insn (data
->from_addr
,
1713 GEN_INT (-(HOST_WIDE_INT
)size
)));
1716 emit_insn ((*genfun
) (to1
, from1
));
1719 #ifdef PUSH_ROUNDING
1720 emit_single_push_insn (mode
, from1
, NULL
);
1726 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1727 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1728 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1729 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1731 if (! data
->reverse
)
1732 data
->offset
+= size
;
1738 /* Emit code to move a block Y to a block X. This may be done with
1739 string-move instructions, with multiple scalar move instructions,
1740 or with a library call.
1742 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1743 SIZE is an rtx that says how long they are.
1744 ALIGN is the maximum alignment we can assume they have.
1745 METHOD describes what kind of copy this is, and what mechanisms may be used.
1747 Return the address of the new block, if memcpy is called and returns it,
1751 emit_block_move (x
, y
, size
, method
)
1753 enum block_op_methods method
;
1761 case BLOCK_OP_NORMAL
:
1762 may_use_call
= true;
1765 case BLOCK_OP_CALL_PARM
:
1766 may_use_call
= block_move_libcall_safe_for_call_parm ();
1768 /* Make inhibit_defer_pop nonzero around the library call
1769 to force it to pop the arguments right away. */
1773 case BLOCK_OP_NO_LIBCALL
:
1774 may_use_call
= false;
1781 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1783 if (GET_MODE (x
) != BLKmode
)
1785 if (GET_MODE (y
) != BLKmode
)
1788 x
= protect_from_queue (x
, 1);
1789 y
= protect_from_queue (y
, 0);
1790 size
= protect_from_queue (size
, 0);
1792 if (GET_CODE (x
) != MEM
)
1794 if (GET_CODE (y
) != MEM
)
1799 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1800 can be incorrect is coming from __builtin_memcpy. */
1801 if (GET_CODE (size
) == CONST_INT
)
1803 x
= shallow_copy_rtx (x
);
1804 y
= shallow_copy_rtx (y
);
1805 set_mem_size (x
, size
);
1806 set_mem_size (y
, size
);
1809 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1810 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1811 else if (emit_block_move_via_movstr (x
, y
, size
, align
))
1813 else if (may_use_call
)
1814 retval
= emit_block_move_via_libcall (x
, y
, size
);
1816 emit_block_move_via_loop (x
, y
, size
, align
);
1818 if (method
== BLOCK_OP_CALL_PARM
)
1824 /* A subroutine of emit_block_move. Returns true if calling the
1825 block move libcall will not clobber any parameters which may have
1826 already been placed on the stack. */
1829 block_move_libcall_safe_for_call_parm ()
1835 /* Check to see whether memcpy takes all register arguments. */
1837 takes_regs_uninit
, takes_regs_no
, takes_regs_yes
1838 } takes_regs
= takes_regs_uninit
;
1842 case takes_regs_uninit
:
1844 CUMULATIVE_ARGS args_so_far
;
1847 fn
= emit_block_move_libcall_fn (false);
1848 INIT_CUMULATIVE_ARGS (args_so_far
, TREE_TYPE (fn
), NULL_RTX
, 0);
1850 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1851 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1853 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1854 rtx tmp
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
1855 if (!tmp
|| !REG_P (tmp
))
1856 goto fail_takes_regs
;
1857 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1858 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, mode
,
1860 goto fail_takes_regs
;
1862 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, NULL_TREE
, 1);
1865 takes_regs
= takes_regs_yes
;
1868 case takes_regs_yes
:
1872 takes_regs
= takes_regs_no
;
1883 /* A subroutine of emit_block_move. Expand a movstr pattern;
1884 return true if successful. */
1887 emit_block_move_via_movstr (x
, y
, size
, align
)
1891 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1892 enum machine_mode mode
;
1894 /* Since this is a move insn, we don't care about volatility. */
1897 /* Try the most limited insn first, because there's no point
1898 including more than one in the machine description unless
1899 the more limited one has some advantage. */
1901 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1902 mode
= GET_MODE_WIDER_MODE (mode
))
1904 enum insn_code code
= movstr_optab
[(int) mode
];
1905 insn_operand_predicate_fn pred
;
1907 if (code
!= CODE_FOR_nothing
1908 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1909 here because if SIZE is less than the mode mask, as it is
1910 returned by the macro, it will definitely be less than the
1911 actual mode mask. */
1912 && ((GET_CODE (size
) == CONST_INT
1913 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1914 <= (GET_MODE_MASK (mode
) >> 1)))
1915 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1916 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1917 || (*pred
) (x
, BLKmode
))
1918 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1919 || (*pred
) (y
, BLKmode
))
1920 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1921 || (*pred
) (opalign
, VOIDmode
)))
1924 rtx last
= get_last_insn ();
1927 op2
= convert_to_mode (mode
, size
, 1);
1928 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1929 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1930 op2
= copy_to_mode_reg (mode
, op2
);
1932 /* ??? When called via emit_block_move_for_call, it'd be
1933 nice if there were some way to inform the backend, so
1934 that it doesn't fail the expansion because it thinks
1935 emitting the libcall would be more efficient. */
1937 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1945 delete_insns_since (last
);
1953 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1954 Return the return value from memcpy, 0 otherwise. */
1957 emit_block_move_via_libcall (dst
, src
, size
)
1960 rtx dst_addr
, src_addr
;
1961 tree call_expr
, arg_list
, fn
, src_tree
, dst_tree
, size_tree
;
1962 enum machine_mode size_mode
;
1965 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1967 It is unsafe to save the value generated by protect_from_queue and reuse
1968 it later. Consider what happens if emit_queue is called before the
1969 return value from protect_from_queue is used.
1971 Expansion of the CALL_EXPR below will call emit_queue before we are
1972 finished emitting RTL for argument setup. So if we are not careful we
1973 could get the wrong value for an argument.
1975 To avoid this problem we go ahead and emit code to copy the addresses of
1976 DST and SRC and SIZE into new pseudos. We can then place those new
1977 pseudos into an RTL_EXPR and use them later, even after a call to
1980 Note this is not strictly needed for library calls since they do not call
1981 emit_queue before loading their arguments. However, we may need to have
1982 library calls call emit_queue in the future since failing to do so could
1983 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1984 arguments in registers. */
1986 dst_addr
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
1987 src_addr
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
1989 #ifdef POINTERS_EXTEND_UNSIGNED
1990 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1991 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1994 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1995 src_tree
= make_tree (ptr_type_node
, src_addr
);
1997 if (TARGET_MEM_FUNCTIONS
)
1998 size_mode
= TYPE_MODE (sizetype
);
2000 size_mode
= TYPE_MODE (unsigned_type_node
);
2002 size
= convert_to_mode (size_mode
, size
, 1);
2003 size
= copy_to_mode_reg (size_mode
, size
);
2005 /* It is incorrect to use the libcall calling conventions to call
2006 memcpy in this context. This could be a user call to memcpy and
2007 the user may wish to examine the return value from memcpy. For
2008 targets where libcalls and normal calls have different conventions
2009 for returning pointers, we could end up generating incorrect code.
2011 For convenience, we generate the call to bcopy this way as well. */
2013 if (TARGET_MEM_FUNCTIONS
)
2014 size_tree
= make_tree (sizetype
, size
);
2016 size_tree
= make_tree (unsigned_type_node
, size
);
2018 fn
= emit_block_move_libcall_fn (true);
2019 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
2020 if (TARGET_MEM_FUNCTIONS
)
2022 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
2023 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
2027 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
2028 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
2031 /* Now we have to build up the CALL_EXPR itself. */
2032 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2033 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2034 call_expr
, arg_list
, NULL_TREE
);
2035 TREE_SIDE_EFFECTS (call_expr
) = 1;
2037 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
2039 /* If we are initializing a readonly value, show the above call clobbered
2040 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
2041 the delay slot scheduler might overlook conflicts and take nasty
2043 if (RTX_UNCHANGING_P (dst
))
2044 add_function_usage_to
2045 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode
,
2046 gen_rtx_CLOBBER (VOIDmode
, dst
),
2049 return TARGET_MEM_FUNCTIONS
? retval
: NULL_RTX
;
2052 /* A subroutine of emit_block_move_via_libcall. Create the tree node
2053 for the function we use for block copies. The first time FOR_CALL
2054 is true, we call assemble_external. */
2056 static GTY(()) tree block_move_fn
;
2059 init_block_move_fn (asmspec
)
2060 const char *asmspec
;
2066 if (TARGET_MEM_FUNCTIONS
)
2068 fn
= get_identifier ("memcpy");
2069 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2070 const_ptr_type_node
, sizetype
,
2075 fn
= get_identifier ("bcopy");
2076 args
= build_function_type_list (void_type_node
, const_ptr_type_node
,
2077 ptr_type_node
, unsigned_type_node
,
2081 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
2082 DECL_EXTERNAL (fn
) = 1;
2083 TREE_PUBLIC (fn
) = 1;
2084 DECL_ARTIFICIAL (fn
) = 1;
2085 TREE_NOTHROW (fn
) = 1;
2092 SET_DECL_RTL (block_move_fn
, NULL_RTX
);
2093 SET_DECL_ASSEMBLER_NAME (block_move_fn
, get_identifier (asmspec
));
2098 emit_block_move_libcall_fn (for_call
)
2101 static bool emitted_extern
;
2104 init_block_move_fn (NULL
);
2106 if (for_call
&& !emitted_extern
)
2108 emitted_extern
= true;
2109 make_decl_rtl (block_move_fn
, NULL
);
2110 assemble_external (block_move_fn
);
2113 return block_move_fn
;
2116 /* A subroutine of emit_block_move. Copy the data via an explicit
2117 loop. This is used only when libcalls are forbidden. */
2118 /* ??? It'd be nice to copy in hunks larger than QImode. */
2121 emit_block_move_via_loop (x
, y
, size
, align
)
2123 unsigned int align ATTRIBUTE_UNUSED
;
2125 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
2126 enum machine_mode iter_mode
;
2128 iter_mode
= GET_MODE (size
);
2129 if (iter_mode
== VOIDmode
)
2130 iter_mode
= word_mode
;
2132 top_label
= gen_label_rtx ();
2133 cmp_label
= gen_label_rtx ();
2134 iter
= gen_reg_rtx (iter_mode
);
2136 emit_move_insn (iter
, const0_rtx
);
2138 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
2139 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
2140 do_pending_stack_adjust ();
2142 emit_note (NULL
, NOTE_INSN_LOOP_BEG
);
2144 emit_jump (cmp_label
);
2145 emit_label (top_label
);
2147 tmp
= convert_modes (Pmode
, iter_mode
, iter
, true);
2148 x_addr
= gen_rtx_PLUS (Pmode
, x_addr
, tmp
);
2149 y_addr
= gen_rtx_PLUS (Pmode
, y_addr
, tmp
);
2150 x
= change_address (x
, QImode
, x_addr
);
2151 y
= change_address (y
, QImode
, y_addr
);
2153 emit_move_insn (x
, y
);
2155 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
2156 true, OPTAB_LIB_WIDEN
);
2158 emit_move_insn (iter
, tmp
);
2160 emit_note (NULL
, NOTE_INSN_LOOP_CONT
);
2161 emit_label (cmp_label
);
2163 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
2166 emit_note (NULL
, NOTE_INSN_LOOP_END
);
2169 /* Copy all or part of a value X into registers starting at REGNO.
2170 The number of registers to be filled is NREGS. */
2173 move_block_to_reg (regno
, x
, nregs
, mode
)
2177 enum machine_mode mode
;
2180 #ifdef HAVE_load_multiple
2188 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
2189 x
= validize_mem (force_const_mem (mode
, x
));
2191 /* See if the machine can do this with a load multiple insn. */
2192 #ifdef HAVE_load_multiple
2193 if (HAVE_load_multiple
)
2195 last
= get_last_insn ();
2196 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
2204 delete_insns_since (last
);
2208 for (i
= 0; i
< nregs
; i
++)
2209 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
2210 operand_subword_force (x
, i
, mode
));
2213 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2214 The number of registers to be filled is NREGS. */
2217 move_block_from_reg (regno
, x
, nregs
)
2227 /* See if the machine can do this with a store multiple insn. */
2228 #ifdef HAVE_store_multiple
2229 if (HAVE_store_multiple
)
2231 rtx last
= get_last_insn ();
2232 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
2240 delete_insns_since (last
);
2244 for (i
= 0; i
< nregs
; i
++)
2246 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
2251 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
2255 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2256 ORIG, where ORIG is a non-consecutive group of registers represented by
2257 a PARALLEL. The clone is identical to the original except in that the
2258 original set of registers is replaced by a new set of pseudo registers.
2259 The new set has the same modes as the original set. */
2262 gen_group_rtx (orig
)
2268 if (GET_CODE (orig
) != PARALLEL
)
2271 length
= XVECLEN (orig
, 0);
2272 tmps
= (rtx
*) alloca (sizeof (rtx
) * length
);
2274 /* Skip a NULL entry in first slot. */
2275 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
2280 for (; i
< length
; i
++)
2282 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
2283 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
2285 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
2288 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
2291 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2292 registers represented by a PARALLEL. SSIZE represents the total size of
2293 block SRC in bytes, or -1 if not known. */
2294 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2295 the balance will be in what would be the low-order memory addresses, i.e.
2296 left justified for big endian, right justified for little endian. This
2297 happens to be true for the targets currently using this support. If this
2298 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2302 emit_group_load (dst
, orig_src
, ssize
)
2309 if (GET_CODE (dst
) != PARALLEL
)
2312 /* Check for a NULL entry, used to indicate that the parameter goes
2313 both on the stack and in registers. */
2314 if (XEXP (XVECEXP (dst
, 0, 0), 0))
2319 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
2321 /* Process the pieces. */
2322 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2324 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
2325 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
2326 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2329 /* Handle trailing fragments that run over the size of the struct. */
2330 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2332 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2333 bytelen
= ssize
- bytepos
;
2338 /* If we won't be loading directly from memory, protect the real source
2339 from strange tricks we might play; but make sure that the source can
2340 be loaded directly into the destination. */
2342 if (GET_CODE (orig_src
) != MEM
2343 && (!CONSTANT_P (orig_src
)
2344 || (GET_MODE (orig_src
) != mode
2345 && GET_MODE (orig_src
) != VOIDmode
)))
2347 if (GET_MODE (orig_src
) == VOIDmode
)
2348 src
= gen_reg_rtx (mode
);
2350 src
= gen_reg_rtx (GET_MODE (orig_src
));
2352 emit_move_insn (src
, orig_src
);
2355 /* Optimize the access just a bit. */
2356 if (GET_CODE (src
) == MEM
2357 && MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
)
2358 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2359 && bytelen
== GET_MODE_SIZE (mode
))
2361 tmps
[i
] = gen_reg_rtx (mode
);
2362 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
2364 else if (GET_CODE (src
) == CONCAT
)
2366 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
2367 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
2369 if ((bytepos
== 0 && bytelen
== slen0
)
2370 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
2372 /* The following assumes that the concatenated objects all
2373 have the same size. In this case, a simple calculation
2374 can be used to determine the object and the bit field
2376 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
2377 if (! CONSTANT_P (tmps
[i
])
2378 && (GET_CODE (tmps
[i
]) != REG
|| GET_MODE (tmps
[i
]) != mode
))
2379 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
2380 (bytepos
% slen0
) * BITS_PER_UNIT
,
2381 1, NULL_RTX
, mode
, mode
, ssize
);
2383 else if (bytepos
== 0)
2385 rtx mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
2386 emit_move_insn (mem
, src
);
2387 tmps
[i
] = adjust_address (mem
, mode
, 0);
2392 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2393 SIMD register, which is currently broken. While we get GCC
2394 to emit proper RTL for these cases, let's dump to memory. */
2395 else if (VECTOR_MODE_P (GET_MODE (dst
))
2396 && GET_CODE (src
) == REG
)
2398 int slen
= GET_MODE_SIZE (GET_MODE (src
));
2401 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
2402 emit_move_insn (mem
, src
);
2403 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
2405 else if (CONSTANT_P (src
)
2406 || (GET_CODE (src
) == REG
&& GET_MODE (src
) == mode
))
2409 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
2410 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
2413 if (BYTES_BIG_ENDIAN
&& shift
)
2414 expand_binop (mode
, ashl_optab
, tmps
[i
], GEN_INT (shift
),
2415 tmps
[i
], 0, OPTAB_WIDEN
);
2420 /* Copy the extracted pieces into the proper (probable) hard regs. */
2421 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2422 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
2425 /* Emit code to move a block SRC to block DST, where SRC and DST are
2426 non-consecutive groups of registers, each represented by a PARALLEL. */
2429 emit_group_move (dst
, src
)
2434 if (GET_CODE (src
) != PARALLEL
2435 || GET_CODE (dst
) != PARALLEL
2436 || XVECLEN (src
, 0) != XVECLEN (dst
, 0))
2439 /* Skip first entry if NULL. */
2440 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
2441 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
2442 XEXP (XVECEXP (src
, 0, i
), 0));
2445 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2446 registers represented by a PARALLEL. SSIZE represents the total size of
2447 block DST, or -1 if not known. */
2450 emit_group_store (orig_dst
, src
, ssize
)
2457 if (GET_CODE (src
) != PARALLEL
)
2460 /* Check for a NULL entry, used to indicate that the parameter goes
2461 both on the stack and in registers. */
2462 if (XEXP (XVECEXP (src
, 0, 0), 0))
2467 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (src
, 0));
2469 /* Copy the (probable) hard regs into pseudos. */
2470 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2472 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2473 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2474 emit_move_insn (tmps
[i
], reg
);
2478 /* If we won't be storing directly into memory, protect the real destination
2479 from strange tricks we might play. */
2481 if (GET_CODE (dst
) == PARALLEL
)
2485 /* We can get a PARALLEL dst if there is a conditional expression in
2486 a return statement. In that case, the dst and src are the same,
2487 so no action is necessary. */
2488 if (rtx_equal_p (dst
, src
))
2491 /* It is unclear if we can ever reach here, but we may as well handle
2492 it. Allocate a temporary, and split this into a store/load to/from
2495 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
2496 emit_group_store (temp
, src
, ssize
);
2497 emit_group_load (dst
, temp
, ssize
);
2500 else if (GET_CODE (dst
) != MEM
&& GET_CODE (dst
) != CONCAT
)
2502 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
2503 /* Make life a bit easier for combine. */
2504 emit_move_insn (dst
, CONST0_RTX (GET_MODE (orig_dst
)));
2507 /* Process the pieces. */
2508 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2510 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2511 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2512 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2515 /* Handle trailing fragments that run over the size of the struct. */
2516 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2518 if (BYTES_BIG_ENDIAN
)
2520 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2521 expand_binop (mode
, ashr_optab
, tmps
[i
], GEN_INT (shift
),
2522 tmps
[i
], 0, OPTAB_WIDEN
);
2524 bytelen
= ssize
- bytepos
;
2527 if (GET_CODE (dst
) == CONCAT
)
2529 if (bytepos
+ bytelen
<= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2530 dest
= XEXP (dst
, 0);
2531 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2533 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2534 dest
= XEXP (dst
, 1);
2536 else if (bytepos
== 0 && XVECLEN (src
, 0))
2538 dest
= assign_stack_temp (GET_MODE (dest
),
2539 GET_MODE_SIZE (GET_MODE (dest
)), 0);
2540 emit_move_insn (adjust_address (dest
, GET_MODE (tmps
[i
]), bytepos
),
2549 /* Optimize the access just a bit. */
2550 if (GET_CODE (dest
) == MEM
2551 && MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
)
2552 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2553 && bytelen
== GET_MODE_SIZE (mode
))
2554 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2556 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2557 mode
, tmps
[i
], ssize
);
2562 /* Copy from the pseudo into the (probable) hard reg. */
2563 if (orig_dst
!= dst
)
2564 emit_move_insn (orig_dst
, dst
);
2567 /* Generate code to copy a BLKmode object of TYPE out of a
2568 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2569 is null, a stack temporary is created. TGTBLK is returned.
2571 The primary purpose of this routine is to handle functions
2572 that return BLKmode structures in registers. Some machines
2573 (the PA for example) want to return all small structures
2574 in registers regardless of the structure's alignment. */
2577 copy_blkmode_from_reg (tgtblk
, srcreg
, type
)
2582 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2583 rtx src
= NULL
, dst
= NULL
;
2584 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2585 unsigned HOST_WIDE_INT bitpos
, xbitpos
, big_endian_correction
= 0;
2589 tgtblk
= assign_temp (build_qualified_type (type
,
2591 | TYPE_QUAL_CONST
)),
2593 preserve_temp_slots (tgtblk
);
2596 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2597 into a new pseudo which is a full word. */
2599 if (GET_MODE (srcreg
) != BLKmode
2600 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2601 srcreg
= convert_to_mode (word_mode
, srcreg
, TREE_UNSIGNED (type
));
2603 /* Structures whose size is not a multiple of a word are aligned
2604 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2605 machine, this means we must skip the empty high order bytes when
2606 calculating the bit offset. */
2607 if (BYTES_BIG_ENDIAN
2608 && bytes
% UNITS_PER_WORD
)
2609 big_endian_correction
2610 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2612 /* Copy the structure BITSIZE bites at a time.
2614 We could probably emit more efficient code for machines which do not use
2615 strict alignment, but it doesn't seem worth the effort at the current
2617 for (bitpos
= 0, xbitpos
= big_endian_correction
;
2618 bitpos
< bytes
* BITS_PER_UNIT
;
2619 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2621 /* We need a new source operand each time xbitpos is on a
2622 word boundary and when xbitpos == big_endian_correction
2623 (the first time through). */
2624 if (xbitpos
% BITS_PER_WORD
== 0
2625 || xbitpos
== big_endian_correction
)
2626 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2629 /* We need a new destination operand each time bitpos is on
2631 if (bitpos
% BITS_PER_WORD
== 0)
2632 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2634 /* Use xbitpos for the source extraction (right justified) and
2635 xbitpos for the destination store (left justified). */
2636 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2637 extract_bit_field (src
, bitsize
,
2638 xbitpos
% BITS_PER_WORD
, 1,
2639 NULL_RTX
, word_mode
, word_mode
,
2647 /* Add a USE expression for REG to the (possibly empty) list pointed
2648 to by CALL_FUSAGE. REG must denote a hard register. */
2651 use_reg (call_fusage
, reg
)
2652 rtx
*call_fusage
, reg
;
2654 if (GET_CODE (reg
) != REG
2655 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2659 = gen_rtx_EXPR_LIST (VOIDmode
,
2660 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2663 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2664 starting at REGNO. All of these registers must be hard registers. */
2667 use_regs (call_fusage
, regno
, nregs
)
2674 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
2677 for (i
= 0; i
< nregs
; i
++)
2678 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2681 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2682 PARALLEL REGS. This is for calls that pass values in multiple
2683 non-contiguous locations. The Irix 6 ABI has examples of this. */
2686 use_group_regs (call_fusage
, regs
)
2692 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2694 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2696 /* A NULL entry means the parameter goes both on the stack and in
2697 registers. This can also be a MEM for targets that pass values
2698 partially on the stack and partially in registers. */
2699 if (reg
!= 0 && GET_CODE (reg
) == REG
)
2700 use_reg (call_fusage
, reg
);
2705 /* Determine whether the LEN bytes generated by CONSTFUN can be
2706 stored to memory using several move instructions. CONSTFUNDATA is
2707 a pointer which will be passed as argument in every CONSTFUN call.
2708 ALIGN is maximum alignment we can assume. Return nonzero if a
2709 call to store_by_pieces should succeed. */
2712 can_store_by_pieces (len
, constfun
, constfundata
, align
)
2713 unsigned HOST_WIDE_INT len
;
2714 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
2718 unsigned HOST_WIDE_INT max_size
, l
;
2719 HOST_WIDE_INT offset
= 0;
2720 enum machine_mode mode
, tmode
;
2721 enum insn_code icode
;
2725 if (! STORE_BY_PIECES_P (len
, align
))
2728 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2729 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2730 align
= MOVE_MAX
* BITS_PER_UNIT
;
2732 /* We would first store what we can in the largest integer mode, then go to
2733 successively smaller modes. */
2736 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2741 max_size
= STORE_MAX_PIECES
+ 1;
2742 while (max_size
> 1)
2744 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2745 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2746 if (GET_MODE_SIZE (tmode
) < max_size
)
2749 if (mode
== VOIDmode
)
2752 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2753 if (icode
!= CODE_FOR_nothing
2754 && align
>= GET_MODE_ALIGNMENT (mode
))
2756 unsigned int size
= GET_MODE_SIZE (mode
);
2763 cst
= (*constfun
) (constfundata
, offset
, mode
);
2764 if (!LEGITIMATE_CONSTANT_P (cst
))
2774 max_size
= GET_MODE_SIZE (mode
);
2777 /* The code above should have handled everything. */
2785 /* Generate several move instructions to store LEN bytes generated by
2786 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2787 pointer which will be passed as argument in every CONSTFUN call.
2788 ALIGN is maximum alignment we can assume.
2789 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2790 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2794 store_by_pieces (to
, len
, constfun
, constfundata
, align
, endp
)
2796 unsigned HOST_WIDE_INT len
;
2797 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
2802 struct store_by_pieces data
;
2804 if (! STORE_BY_PIECES_P (len
, align
))
2806 to
= protect_from_queue (to
, 1);
2807 data
.constfun
= constfun
;
2808 data
.constfundata
= constfundata
;
2811 store_by_pieces_1 (&data
, align
);
2822 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2823 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2825 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
2828 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2835 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2843 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2844 rtx with BLKmode). The caller must pass TO through protect_from_queue
2845 before calling. ALIGN is maximum alignment we can assume. */
2848 clear_by_pieces (to
, len
, align
)
2850 unsigned HOST_WIDE_INT len
;
2853 struct store_by_pieces data
;
2855 data
.constfun
= clear_by_pieces_1
;
2856 data
.constfundata
= NULL
;
2859 store_by_pieces_1 (&data
, align
);
2862 /* Callback routine for clear_by_pieces.
2863 Return const0_rtx unconditionally. */
2866 clear_by_pieces_1 (data
, offset
, mode
)
2867 PTR data ATTRIBUTE_UNUSED
;
2868 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
;
2869 enum machine_mode mode ATTRIBUTE_UNUSED
;
2874 /* Subroutine of clear_by_pieces and store_by_pieces.
2875 Generate several move instructions to store LEN bytes of block TO. (A MEM
2876 rtx with BLKmode). The caller must pass TO through protect_from_queue
2877 before calling. ALIGN is maximum alignment we can assume. */
2880 store_by_pieces_1 (data
, align
)
2881 struct store_by_pieces
*data
;
2884 rtx to_addr
= XEXP (data
->to
, 0);
2885 unsigned HOST_WIDE_INT max_size
= STORE_MAX_PIECES
+ 1;
2886 enum machine_mode mode
= VOIDmode
, tmode
;
2887 enum insn_code icode
;
2890 data
->to_addr
= to_addr
;
2892 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2893 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2895 data
->explicit_inc_to
= 0;
2897 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2899 data
->offset
= data
->len
;
2901 /* If storing requires more than two move insns,
2902 copy addresses to registers (to make displacements shorter)
2903 and use post-increment if available. */
2904 if (!data
->autinc_to
2905 && move_by_pieces_ninsns (data
->len
, align
) > 2)
2907 /* Determine the main mode we'll be using. */
2908 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2909 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2910 if (GET_MODE_SIZE (tmode
) < max_size
)
2913 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2915 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2916 data
->autinc_to
= 1;
2917 data
->explicit_inc_to
= -1;
2920 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2921 && ! data
->autinc_to
)
2923 data
->to_addr
= copy_addr_to_reg (to_addr
);
2924 data
->autinc_to
= 1;
2925 data
->explicit_inc_to
= 1;
2928 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2929 data
->to_addr
= copy_addr_to_reg (to_addr
);
2932 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2933 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2934 align
= MOVE_MAX
* BITS_PER_UNIT
;
2936 /* First store what we can in the largest integer mode, then go to
2937 successively smaller modes. */
2939 while (max_size
> 1)
2941 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2942 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2943 if (GET_MODE_SIZE (tmode
) < max_size
)
2946 if (mode
== VOIDmode
)
2949 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2950 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2951 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2953 max_size
= GET_MODE_SIZE (mode
);
2956 /* The code above should have handled everything. */
2961 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2962 with move instructions for mode MODE. GENFUN is the gen_... function
2963 to make a move insn for that mode. DATA has all the other info. */
2966 store_by_pieces_2 (genfun
, mode
, data
)
2967 rtx (*genfun
) PARAMS ((rtx
, ...));
2968 enum machine_mode mode
;
2969 struct store_by_pieces
*data
;
2971 unsigned int size
= GET_MODE_SIZE (mode
);
2974 while (data
->len
>= size
)
2977 data
->offset
-= size
;
2979 if (data
->autinc_to
)
2980 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2983 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2985 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2986 emit_insn (gen_add2_insn (data
->to_addr
,
2987 GEN_INT (-(HOST_WIDE_INT
) size
)));
2989 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2990 emit_insn ((*genfun
) (to1
, cst
));
2992 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2993 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2995 if (! data
->reverse
)
2996 data
->offset
+= size
;
3002 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
3003 its length in bytes. */
3006 clear_storage (object
, size
)
3011 unsigned int align
= (GET_CODE (object
) == MEM
? MEM_ALIGN (object
)
3012 : GET_MODE_ALIGNMENT (GET_MODE (object
)));
3014 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
3015 just move a zero. Otherwise, do this a piece at a time. */
3016 if (GET_MODE (object
) != BLKmode
3017 && GET_CODE (size
) == CONST_INT
3018 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (object
)))
3019 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
3022 object
= protect_from_queue (object
, 1);
3023 size
= protect_from_queue (size
, 0);
3025 if (GET_CODE (size
) == CONST_INT
3026 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
3027 clear_by_pieces (object
, INTVAL (size
), align
);
3028 else if (clear_storage_via_clrstr (object
, size
, align
))
3031 retval
= clear_storage_via_libcall (object
, size
);
3037 /* A subroutine of clear_storage. Expand a clrstr pattern;
3038 return true if successful. */
3041 clear_storage_via_clrstr (object
, size
, align
)
3045 /* Try the most limited insn first, because there's no point
3046 including more than one in the machine description unless
3047 the more limited one has some advantage. */
3049 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
3050 enum machine_mode mode
;
3052 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
3053 mode
= GET_MODE_WIDER_MODE (mode
))
3055 enum insn_code code
= clrstr_optab
[(int) mode
];
3056 insn_operand_predicate_fn pred
;
3058 if (code
!= CODE_FOR_nothing
3059 /* We don't need MODE to be narrower than
3060 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
3061 the mode mask, as it is returned by the macro, it will
3062 definitely be less than the actual mode mask. */
3063 && ((GET_CODE (size
) == CONST_INT
3064 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
3065 <= (GET_MODE_MASK (mode
) >> 1)))
3066 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
3067 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
3068 || (*pred
) (object
, BLKmode
))
3069 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
3070 || (*pred
) (opalign
, VOIDmode
)))
3073 rtx last
= get_last_insn ();
3076 op1
= convert_to_mode (mode
, size
, 1);
3077 pred
= insn_data
[(int) code
].operand
[1].predicate
;
3078 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
3079 op1
= copy_to_mode_reg (mode
, op1
);
3081 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
3088 delete_insns_since (last
);
3095 /* A subroutine of clear_storage. Expand a call to memset or bzero.
3096 Return the return value of memset, 0 otherwise. */
3099 clear_storage_via_libcall (object
, size
)
3102 tree call_expr
, arg_list
, fn
, object_tree
, size_tree
;
3103 enum machine_mode size_mode
;
3106 /* OBJECT or SIZE may have been passed through protect_from_queue.
3108 It is unsafe to save the value generated by protect_from_queue
3109 and reuse it later. Consider what happens if emit_queue is
3110 called before the return value from protect_from_queue is used.
3112 Expansion of the CALL_EXPR below will call emit_queue before
3113 we are finished emitting RTL for argument setup. So if we are
3114 not careful we could get the wrong value for an argument.
3116 To avoid this problem we go ahead and emit code to copy OBJECT
3117 and SIZE into new pseudos. We can then place those new pseudos
3118 into an RTL_EXPR and use them later, even after a call to
3121 Note this is not strictly needed for library calls since they
3122 do not call emit_queue before loading their arguments. However,
3123 we may need to have library calls call emit_queue in the future
3124 since failing to do so could cause problems for targets which
3125 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3127 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
3129 if (TARGET_MEM_FUNCTIONS
)
3130 size_mode
= TYPE_MODE (sizetype
);
3132 size_mode
= TYPE_MODE (unsigned_type_node
);
3133 size
= convert_to_mode (size_mode
, size
, 1);
3134 size
= copy_to_mode_reg (size_mode
, size
);
3136 /* It is incorrect to use the libcall calling conventions to call
3137 memset in this context. This could be a user call to memset and
3138 the user may wish to examine the return value from memset. For
3139 targets where libcalls and normal calls have different conventions
3140 for returning pointers, we could end up generating incorrect code.
3142 For convenience, we generate the call to bzero this way as well. */
3144 object_tree
= make_tree (ptr_type_node
, object
);
3145 if (TARGET_MEM_FUNCTIONS
)
3146 size_tree
= make_tree (sizetype
, size
);
3148 size_tree
= make_tree (unsigned_type_node
, size
);
3150 fn
= clear_storage_libcall_fn (true);
3151 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
3152 if (TARGET_MEM_FUNCTIONS
)
3153 arg_list
= tree_cons (NULL_TREE
, integer_zero_node
, arg_list
);
3154 arg_list
= tree_cons (NULL_TREE
, object_tree
, arg_list
);
3156 /* Now we have to build up the CALL_EXPR itself. */
3157 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
3158 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
3159 call_expr
, arg_list
, NULL_TREE
);
3160 TREE_SIDE_EFFECTS (call_expr
) = 1;
3162 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
3164 /* If we are initializing a readonly value, show the above call
3165 clobbered it. Otherwise, a load from it may erroneously be
3166 hoisted from a loop. */
3167 if (RTX_UNCHANGING_P (object
))
3168 emit_insn (gen_rtx_CLOBBER (VOIDmode
, object
));
3170 return (TARGET_MEM_FUNCTIONS
? retval
: NULL_RTX
);
3173 /* A subroutine of clear_storage_via_libcall. Create the tree node
3174 for the function we use for block clears. The first time FOR_CALL
3175 is true, we call assemble_external. */
3177 static GTY(()) tree block_clear_fn
;
3180 init_block_clear_fn (asmspec
)
3181 const char *asmspec
;
3183 if (!block_clear_fn
)
3187 if (TARGET_MEM_FUNCTIONS
)
3189 fn
= get_identifier ("memset");
3190 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
3191 integer_type_node
, sizetype
,
3196 fn
= get_identifier ("bzero");
3197 args
= build_function_type_list (void_type_node
, ptr_type_node
,
3198 unsigned_type_node
, NULL_TREE
);
3201 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
3202 DECL_EXTERNAL (fn
) = 1;
3203 TREE_PUBLIC (fn
) = 1;
3204 DECL_ARTIFICIAL (fn
) = 1;
3205 TREE_NOTHROW (fn
) = 1;
3207 block_clear_fn
= fn
;
3212 SET_DECL_RTL (block_clear_fn
, NULL_RTX
);
3213 SET_DECL_ASSEMBLER_NAME (block_clear_fn
, get_identifier (asmspec
));
3218 clear_storage_libcall_fn (for_call
)
3221 static bool emitted_extern
;
3223 if (!block_clear_fn
)
3224 init_block_clear_fn (NULL
);
3226 if (for_call
&& !emitted_extern
)
3228 emitted_extern
= true;
3229 make_decl_rtl (block_clear_fn
, NULL
);
3230 assemble_external (block_clear_fn
);
3233 return block_clear_fn
;
3236 /* Generate code to copy Y into X.
3237 Both Y and X must have the same mode, except that
3238 Y can be a constant with VOIDmode.
3239 This mode cannot be BLKmode; use emit_block_move for that.
3241 Return the last instruction emitted. */
3244 emit_move_insn (x
, y
)
3247 enum machine_mode mode
= GET_MODE (x
);
3248 rtx y_cst
= NULL_RTX
;
3251 x
= protect_from_queue (x
, 1);
3252 y
= protect_from_queue (y
, 0);
3254 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
3257 /* Never force constant_p_rtx to memory. */
3258 if (GET_CODE (y
) == CONSTANT_P_RTX
)
3260 else if (CONSTANT_P (y
))
3263 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3264 && (last_insn
= compress_float_constant (x
, y
)))
3269 if (!LEGITIMATE_CONSTANT_P (y
))
3271 y
= force_const_mem (mode
, y
);
3273 /* If the target's cannot_force_const_mem prevented the spill,
3274 assume that the target's move expanders will also take care
3275 of the non-legitimate constant. */
3281 /* If X or Y are memory references, verify that their addresses are valid
3283 if (GET_CODE (x
) == MEM
3284 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
3285 && ! push_operand (x
, GET_MODE (x
)))
3287 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
3288 x
= validize_mem (x
);
3290 if (GET_CODE (y
) == MEM
3291 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
3293 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
3294 y
= validize_mem (y
);
3296 if (mode
== BLKmode
)
3299 last_insn
= emit_move_insn_1 (x
, y
);
3301 if (y_cst
&& GET_CODE (x
) == REG
3302 && (set
= single_set (last_insn
)) != NULL_RTX
3303 && SET_DEST (set
) == x
3304 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
3305 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
3310 /* Low level part of emit_move_insn.
3311 Called just like emit_move_insn, but assumes X and Y
3312 are basically valid. */
3315 emit_move_insn_1 (x
, y
)
3318 enum machine_mode mode
= GET_MODE (x
);
3319 enum machine_mode submode
;
3320 enum mode_class
class = GET_MODE_CLASS (mode
);
3322 if ((unsigned int) mode
>= (unsigned int) MAX_MACHINE_MODE
)
3325 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
3327 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
3329 /* Expand complex moves by moving real part and imag part, if possible. */
3330 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
3331 && BLKmode
!= (submode
= GET_MODE_INNER (mode
))
3332 && (mov_optab
->handlers
[(int) submode
].insn_code
3333 != CODE_FOR_nothing
))
3335 /* Don't split destination if it is a stack push. */
3336 int stack
= push_operand (x
, GET_MODE (x
));
3338 #ifdef PUSH_ROUNDING
3339 /* In case we output to the stack, but the size is smaller machine can
3340 push exactly, we need to use move instructions. */
3342 && (PUSH_ROUNDING (GET_MODE_SIZE (submode
))
3343 != GET_MODE_SIZE (submode
)))
3346 HOST_WIDE_INT offset1
, offset2
;
3348 /* Do not use anti_adjust_stack, since we don't want to update
3349 stack_pointer_delta. */
3350 temp
= expand_binop (Pmode
,
3351 #ifdef STACK_GROWS_DOWNWARD
3359 (GET_MODE_SIZE (GET_MODE (x
)))),
3360 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
3362 if (temp
!= stack_pointer_rtx
)
3363 emit_move_insn (stack_pointer_rtx
, temp
);
3365 #ifdef STACK_GROWS_DOWNWARD
3367 offset2
= GET_MODE_SIZE (submode
);
3369 offset1
= -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)));
3370 offset2
= (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))
3371 + GET_MODE_SIZE (submode
));
3374 emit_move_insn (change_address (x
, submode
,
3375 gen_rtx_PLUS (Pmode
,
3377 GEN_INT (offset1
))),
3378 gen_realpart (submode
, y
));
3379 emit_move_insn (change_address (x
, submode
,
3380 gen_rtx_PLUS (Pmode
,
3382 GEN_INT (offset2
))),
3383 gen_imagpart (submode
, y
));
3387 /* If this is a stack, push the highpart first, so it
3388 will be in the argument order.
3390 In that case, change_address is used only to convert
3391 the mode, not to change the address. */
3394 /* Note that the real part always precedes the imag part in memory
3395 regardless of machine's endianness. */
3396 #ifdef STACK_GROWS_DOWNWARD
3397 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3398 gen_imagpart (submode
, y
));
3399 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3400 gen_realpart (submode
, y
));
3402 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3403 gen_realpart (submode
, y
));
3404 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3405 gen_imagpart (submode
, y
));
3410 rtx realpart_x
, realpart_y
;
3411 rtx imagpart_x
, imagpart_y
;
3413 /* If this is a complex value with each part being smaller than a
3414 word, the usual calling sequence will likely pack the pieces into
3415 a single register. Unfortunately, SUBREG of hard registers only
3416 deals in terms of words, so we have a problem converting input
3417 arguments to the CONCAT of two registers that is used elsewhere
3418 for complex values. If this is before reload, we can copy it into
3419 memory and reload. FIXME, we should see about using extract and
3420 insert on integer registers, but complex short and complex char
3421 variables should be rarely used. */
3422 if (GET_MODE_BITSIZE (mode
) < 2 * BITS_PER_WORD
3423 && (reload_in_progress
| reload_completed
) == 0)
3426 = (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
3428 = (REG_P (y
) && REGNO (y
) < FIRST_PSEUDO_REGISTER
);
3430 if (packed_dest_p
|| packed_src_p
)
3432 enum mode_class reg_class
= ((class == MODE_COMPLEX_FLOAT
)
3433 ? MODE_FLOAT
: MODE_INT
);
3435 enum machine_mode reg_mode
3436 = mode_for_size (GET_MODE_BITSIZE (mode
), reg_class
, 1);
3438 if (reg_mode
!= BLKmode
)
3440 rtx mem
= assign_stack_temp (reg_mode
,
3441 GET_MODE_SIZE (mode
), 0);
3442 rtx cmem
= adjust_address (mem
, mode
, 0);
3445 = N_("function using short complex types cannot be inline");
3449 rtx sreg
= gen_rtx_SUBREG (reg_mode
, x
, 0);
3451 emit_move_insn_1 (cmem
, y
);
3452 return emit_move_insn_1 (sreg
, mem
);
3456 rtx sreg
= gen_rtx_SUBREG (reg_mode
, y
, 0);
3458 emit_move_insn_1 (mem
, sreg
);
3459 return emit_move_insn_1 (x
, cmem
);
3465 realpart_x
= gen_realpart (submode
, x
);
3466 realpart_y
= gen_realpart (submode
, y
);
3467 imagpart_x
= gen_imagpart (submode
, x
);
3468 imagpart_y
= gen_imagpart (submode
, y
);
3470 /* Show the output dies here. This is necessary for SUBREGs
3471 of pseudos since we cannot track their lifetimes correctly;
3472 hard regs shouldn't appear here except as return values.
3473 We never want to emit such a clobber after reload. */
3475 && ! (reload_in_progress
|| reload_completed
)
3476 && (GET_CODE (realpart_x
) == SUBREG
3477 || GET_CODE (imagpart_x
) == SUBREG
))
3478 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3480 emit_move_insn (realpart_x
, realpart_y
);
3481 emit_move_insn (imagpart_x
, imagpart_y
);
3484 return get_last_insn ();
3487 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3488 find a mode to do it in. If we have a movcc, use it. Otherwise,
3489 find the MODE_INT mode of the same width. */
3490 else if (GET_MODE_CLASS (mode
) == MODE_CC
3491 && mov_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
3493 enum insn_code insn_code
;
3494 enum machine_mode tmode
= VOIDmode
;
3498 && mov_optab
->handlers
[(int) CCmode
].insn_code
!= CODE_FOR_nothing
)
3501 for (tmode
= QImode
; tmode
!= VOIDmode
;
3502 tmode
= GET_MODE_WIDER_MODE (tmode
))
3503 if (GET_MODE_SIZE (tmode
) == GET_MODE_SIZE (mode
))
3506 if (tmode
== VOIDmode
)
3509 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3510 may call change_address which is not appropriate if we were
3511 called when a reload was in progress. We don't have to worry
3512 about changing the address since the size in bytes is supposed to
3513 be the same. Copy the MEM to change the mode and move any
3514 substitutions from the old MEM to the new one. */
3516 if (reload_in_progress
)
3518 x
= gen_lowpart_common (tmode
, x1
);
3519 if (x
== 0 && GET_CODE (x1
) == MEM
)
3521 x
= adjust_address_nv (x1
, tmode
, 0);
3522 copy_replacements (x1
, x
);
3525 y
= gen_lowpart_common (tmode
, y1
);
3526 if (y
== 0 && GET_CODE (y1
) == MEM
)
3528 y
= adjust_address_nv (y1
, tmode
, 0);
3529 copy_replacements (y1
, y
);
3534 x
= gen_lowpart (tmode
, x
);
3535 y
= gen_lowpart (tmode
, y
);
3538 insn_code
= mov_optab
->handlers
[(int) tmode
].insn_code
;
3539 return emit_insn (GEN_FCN (insn_code
) (x
, y
));
3542 /* This will handle any multi-word or full-word mode that lacks a move_insn
3543 pattern. However, you will get better code if you define such patterns,
3544 even if they must turn into multiple assembler instructions. */
3545 else if (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
3552 #ifdef PUSH_ROUNDING
3554 /* If X is a push on the stack, do the push now and replace
3555 X with a reference to the stack pointer. */
3556 if (push_operand (x
, GET_MODE (x
)))
3561 /* Do not use anti_adjust_stack, since we don't want to update
3562 stack_pointer_delta. */
3563 temp
= expand_binop (Pmode
,
3564 #ifdef STACK_GROWS_DOWNWARD
3572 (GET_MODE_SIZE (GET_MODE (x
)))),
3573 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
3575 if (temp
!= stack_pointer_rtx
)
3576 emit_move_insn (stack_pointer_rtx
, temp
);
3578 code
= GET_CODE (XEXP (x
, 0));
3580 /* Just hope that small offsets off SP are OK. */
3581 if (code
== POST_INC
)
3582 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3583 GEN_INT (-((HOST_WIDE_INT
)
3584 GET_MODE_SIZE (GET_MODE (x
)))));
3585 else if (code
== POST_DEC
)
3586 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3587 GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
3589 temp
= stack_pointer_rtx
;
3591 x
= change_address (x
, VOIDmode
, temp
);
3595 /* If we are in reload, see if either operand is a MEM whose address
3596 is scheduled for replacement. */
3597 if (reload_in_progress
&& GET_CODE (x
) == MEM
3598 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3599 x
= replace_equiv_address_nv (x
, inner
);
3600 if (reload_in_progress
&& GET_CODE (y
) == MEM
3601 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3602 y
= replace_equiv_address_nv (y
, inner
);
3608 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3611 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3612 rtx ypart
= operand_subword (y
, i
, 1, mode
);
3614 /* If we can't get a part of Y, put Y into memory if it is a
3615 constant. Otherwise, force it into a register. If we still
3616 can't get a part of Y, abort. */
3617 if (ypart
== 0 && CONSTANT_P (y
))
3619 y
= force_const_mem (mode
, y
);
3620 ypart
= operand_subword (y
, i
, 1, mode
);
3622 else if (ypart
== 0)
3623 ypart
= operand_subword_force (y
, i
, mode
);
3625 if (xpart
== 0 || ypart
== 0)
3628 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3630 last_insn
= emit_move_insn (xpart
, ypart
);
3636 /* Show the output dies here. This is necessary for SUBREGs
3637 of pseudos since we cannot track their lifetimes correctly;
3638 hard regs shouldn't appear here except as return values.
3639 We never want to emit such a clobber after reload. */
3641 && ! (reload_in_progress
|| reload_completed
)
3642 && need_clobber
!= 0)
3643 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3653 /* If Y is representable exactly in a narrower mode, and the target can
3654 perform the extension directly from constant or memory, then emit the
3655 move as an extension. */
3658 compress_float_constant (x
, y
)
3661 enum machine_mode dstmode
= GET_MODE (x
);
3662 enum machine_mode orig_srcmode
= GET_MODE (y
);
3663 enum machine_mode srcmode
;
3666 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3668 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3669 srcmode
!= orig_srcmode
;
3670 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3673 rtx trunc_y
, last_insn
;
3675 /* Skip if the target can't extend this way. */
3676 ic
= can_extend_p (dstmode
, srcmode
, 0);
3677 if (ic
== CODE_FOR_nothing
)
3680 /* Skip if the narrowed value isn't exact. */
3681 if (! exact_real_truncate (srcmode
, &r
))
3684 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3686 if (LEGITIMATE_CONSTANT_P (trunc_y
))
3688 /* Skip if the target needs extra instructions to perform
3690 if (! (*insn_data
[ic
].operand
[1].predicate
) (trunc_y
, srcmode
))
3693 else if (float_extend_from_mem
[dstmode
][srcmode
])
3694 trunc_y
= validize_mem (force_const_mem (srcmode
, trunc_y
));
3698 emit_unop_insn (ic
, x
, trunc_y
, UNKNOWN
);
3699 last_insn
= get_last_insn ();
3701 if (GET_CODE (x
) == REG
)
3702 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3710 /* Pushing data onto the stack. */
3712 /* Push a block of length SIZE (perhaps variable)
3713 and return an rtx to address the beginning of the block.
3714 Note that it is not possible for the value returned to be a QUEUED.
3715 The value may be virtual_outgoing_args_rtx.
3717 EXTRA is the number of bytes of padding to push in addition to SIZE.
3718 BELOW nonzero means this padding comes at low addresses;
3719 otherwise, the padding comes at high addresses. */
3722 push_block (size
, extra
, below
)
3728 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3729 if (CONSTANT_P (size
))
3730 anti_adjust_stack (plus_constant (size
, extra
));
3731 else if (GET_CODE (size
) == REG
&& extra
== 0)
3732 anti_adjust_stack (size
);
3735 temp
= copy_to_mode_reg (Pmode
, size
);
3737 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3738 temp
, 0, OPTAB_LIB_WIDEN
);
3739 anti_adjust_stack (temp
);
3742 #ifndef STACK_GROWS_DOWNWARD
3748 temp
= virtual_outgoing_args_rtx
;
3749 if (extra
!= 0 && below
)
3750 temp
= plus_constant (temp
, extra
);
3754 if (GET_CODE (size
) == CONST_INT
)
3755 temp
= plus_constant (virtual_outgoing_args_rtx
,
3756 -INTVAL (size
) - (below
? 0 : extra
));
3757 else if (extra
!= 0 && !below
)
3758 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3759 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3761 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3762 negate_rtx (Pmode
, size
));
3765 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3768 #ifdef PUSH_ROUNDING
3770 /* Emit single push insn. */
3773 emit_single_push_insn (mode
, x
, type
)
3775 enum machine_mode mode
;
3779 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3781 enum insn_code icode
;
3782 insn_operand_predicate_fn pred
;
3784 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3785 /* If there is push pattern, use it. Otherwise try old way of throwing
3786 MEM representing push operation to move expander. */
3787 icode
= push_optab
->handlers
[(int) mode
].insn_code
;
3788 if (icode
!= CODE_FOR_nothing
)
3790 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3791 && !((*pred
) (x
, mode
))))
3792 x
= force_reg (mode
, x
);
3793 emit_insn (GEN_FCN (icode
) (x
));
3796 if (GET_MODE_SIZE (mode
) == rounded_size
)
3797 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3800 #ifdef STACK_GROWS_DOWNWARD
3801 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3802 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3804 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3805 GEN_INT (rounded_size
));
3807 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3810 dest
= gen_rtx_MEM (mode
, dest_addr
);
3814 set_mem_attributes (dest
, type
, 1);
3816 if (flag_optimize_sibling_calls
)
3817 /* Function incoming arguments may overlap with sibling call
3818 outgoing arguments and we cannot allow reordering of reads
3819 from function arguments with stores to outgoing arguments
3820 of sibling calls. */
3821 set_mem_alias_set (dest
, 0);
3823 emit_move_insn (dest
, x
);
3827 /* Generate code to push X onto the stack, assuming it has mode MODE and
3829 MODE is redundant except when X is a CONST_INT (since they don't
3831 SIZE is an rtx for the size of data to be copied (in bytes),
3832 needed only if X is BLKmode.
3834 ALIGN (in bits) is maximum alignment we can assume.
3836 If PARTIAL and REG are both nonzero, then copy that many of the first
3837 words of X into registers starting with REG, and push the rest of X.
3838 The amount of space pushed is decreased by PARTIAL words,
3839 rounded *down* to a multiple of PARM_BOUNDARY.
3840 REG must be a hard register in this case.
3841 If REG is zero but PARTIAL is not, take any all others actions for an
3842 argument partially in registers, but do not actually load any
3845 EXTRA is the amount in bytes of extra space to leave next to this arg.
3846 This is ignored if an argument block has already been allocated.
3848 On a machine that lacks real push insns, ARGS_ADDR is the address of
3849 the bottom of the argument block for this call. We use indexing off there
3850 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3851 argument block has not been preallocated.
3853 ARGS_SO_FAR is the size of args previously pushed for this call.
3855 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3856 for arguments passed in registers. If nonzero, it will be the number
3857 of bytes required. */
3860 emit_push_insn (x
, mode
, type
, size
, align
, partial
, reg
, extra
,
3861 args_addr
, args_so_far
, reg_parm_stack_space
,
3864 enum machine_mode mode
;
3873 int reg_parm_stack_space
;
3877 enum direction stack_direction
3878 #ifdef STACK_GROWS_DOWNWARD
3884 /* Decide where to pad the argument: `downward' for below,
3885 `upward' for above, or `none' for don't pad it.
3886 Default is below for small data on big-endian machines; else above. */
3887 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3889 /* Invert direction if stack is post-decrement.
3891 if (STACK_PUSH_CODE
== POST_DEC
)
3892 if (where_pad
!= none
)
3893 where_pad
= (where_pad
== downward
? upward
: downward
);
3895 xinner
= x
= protect_from_queue (x
, 0);
3897 if (mode
== BLKmode
)
3899 /* Copy a block into the stack, entirely or partially. */
3902 int used
= partial
* UNITS_PER_WORD
;
3903 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3911 /* USED is now the # of bytes we need not copy to the stack
3912 because registers will take care of them. */
3915 xinner
= adjust_address (xinner
, BLKmode
, used
);
3917 /* If the partial register-part of the arg counts in its stack size,
3918 skip the part of stack space corresponding to the registers.
3919 Otherwise, start copying to the beginning of the stack space,
3920 by setting SKIP to 0. */
3921 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3923 #ifdef PUSH_ROUNDING
3924 /* Do it with several push insns if that doesn't take lots of insns
3925 and if there is no difficulty with push insns that skip bytes
3926 on the stack for alignment purposes. */
3929 && GET_CODE (size
) == CONST_INT
3931 && MEM_ALIGN (xinner
) >= align
3932 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3933 /* Here we avoid the case of a structure whose weak alignment
3934 forces many pushes of a small amount of data,
3935 and such small pushes do rounding that causes trouble. */
3936 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3937 || align
>= BIGGEST_ALIGNMENT
3938 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3939 == (align
/ BITS_PER_UNIT
)))
3940 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3942 /* Push padding now if padding above and stack grows down,
3943 or if padding below and stack grows up.
3944 But if space already allocated, this has already been done. */
3945 if (extra
&& args_addr
== 0
3946 && where_pad
!= none
&& where_pad
!= stack_direction
)
3947 anti_adjust_stack (GEN_INT (extra
));
3949 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
3952 #endif /* PUSH_ROUNDING */
3956 /* Otherwise make space on the stack and copy the data
3957 to the address of that space. */
3959 /* Deduct words put into registers from the size we must copy. */
3962 if (GET_CODE (size
) == CONST_INT
)
3963 size
= GEN_INT (INTVAL (size
) - used
);
3965 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3966 GEN_INT (used
), NULL_RTX
, 0,
3970 /* Get the address of the stack space.
3971 In this case, we do not deal with EXTRA separately.
3972 A single stack adjust will do. */
3975 temp
= push_block (size
, extra
, where_pad
== downward
);
3978 else if (GET_CODE (args_so_far
) == CONST_INT
)
3979 temp
= memory_address (BLKmode
,
3980 plus_constant (args_addr
,
3981 skip
+ INTVAL (args_so_far
)));
3983 temp
= memory_address (BLKmode
,
3984 plus_constant (gen_rtx_PLUS (Pmode
,
3989 if (!ACCUMULATE_OUTGOING_ARGS
)
3991 /* If the source is referenced relative to the stack pointer,
3992 copy it to another register to stabilize it. We do not need
3993 to do this if we know that we won't be changing sp. */
3995 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3996 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3997 temp
= copy_to_reg (temp
);
4000 target
= gen_rtx_MEM (BLKmode
, temp
);
4004 set_mem_attributes (target
, type
, 1);
4005 /* Function incoming arguments may overlap with sibling call
4006 outgoing arguments and we cannot allow reordering of reads
4007 from function arguments with stores to outgoing arguments
4008 of sibling calls. */
4009 set_mem_alias_set (target
, 0);
4012 /* ALIGN may well be better aligned than TYPE, e.g. due to
4013 PARM_BOUNDARY. Assume the caller isn't lying. */
4014 set_mem_align (target
, align
);
4016 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
4019 else if (partial
> 0)
4021 /* Scalar partly in registers. */
4023 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
4026 /* # words of start of argument
4027 that we must make space for but need not store. */
4028 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
4029 int args_offset
= INTVAL (args_so_far
);
4032 /* Push padding now if padding above and stack grows down,
4033 or if padding below and stack grows up.
4034 But if space already allocated, this has already been done. */
4035 if (extra
&& args_addr
== 0
4036 && where_pad
!= none
&& where_pad
!= stack_direction
)
4037 anti_adjust_stack (GEN_INT (extra
));
4039 /* If we make space by pushing it, we might as well push
4040 the real data. Otherwise, we can leave OFFSET nonzero
4041 and leave the space uninitialized. */
4045 /* Now NOT_STACK gets the number of words that we don't need to
4046 allocate on the stack. */
4047 not_stack
= partial
- offset
;
4049 /* If the partial register-part of the arg counts in its stack size,
4050 skip the part of stack space corresponding to the registers.
4051 Otherwise, start copying to the beginning of the stack space,
4052 by setting SKIP to 0. */
4053 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
4055 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
4056 x
= validize_mem (force_const_mem (mode
, x
));
4058 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4059 SUBREGs of such registers are not allowed. */
4060 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
4061 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
4062 x
= copy_to_reg (x
);
4064 /* Loop over all the words allocated on the stack for this arg. */
4065 /* We can do it by words, because any scalar bigger than a word
4066 has a size a multiple of a word. */
4067 #ifndef PUSH_ARGS_REVERSED
4068 for (i
= not_stack
; i
< size
; i
++)
4070 for (i
= size
- 1; i
>= not_stack
; i
--)
4072 if (i
>= not_stack
+ offset
)
4073 emit_push_insn (operand_subword_force (x
, i
, mode
),
4074 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
4076 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
4078 reg_parm_stack_space
, alignment_pad
);
4085 /* Push padding now if padding above and stack grows down,
4086 or if padding below and stack grows up.
4087 But if space already allocated, this has already been done. */
4088 if (extra
&& args_addr
== 0
4089 && where_pad
!= none
&& where_pad
!= stack_direction
)
4090 anti_adjust_stack (GEN_INT (extra
));
4092 #ifdef PUSH_ROUNDING
4093 if (args_addr
== 0 && PUSH_ARGS
)
4094 emit_single_push_insn (mode
, x
, type
);
4098 if (GET_CODE (args_so_far
) == CONST_INT
)
4100 = memory_address (mode
,
4101 plus_constant (args_addr
,
4102 INTVAL (args_so_far
)));
4104 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
4106 dest
= gen_rtx_MEM (mode
, addr
);
4109 set_mem_attributes (dest
, type
, 1);
4110 /* Function incoming arguments may overlap with sibling call
4111 outgoing arguments and we cannot allow reordering of reads
4112 from function arguments with stores to outgoing arguments
4113 of sibling calls. */
4114 set_mem_alias_set (dest
, 0);
4117 emit_move_insn (dest
, x
);
4121 /* If part should go in registers, copy that part
4122 into the appropriate registers. Do this now, at the end,
4123 since mem-to-mem copies above may do function calls. */
4124 if (partial
> 0 && reg
!= 0)
4126 /* Handle calls that pass values in multiple non-contiguous locations.
4127 The Irix 6 ABI has examples of this. */
4128 if (GET_CODE (reg
) == PARALLEL
)
4129 emit_group_load (reg
, x
, -1); /* ??? size? */
4131 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
4134 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
4135 anti_adjust_stack (GEN_INT (extra
));
4137 if (alignment_pad
&& args_addr
== 0)
4138 anti_adjust_stack (alignment_pad
);
4141 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4149 /* Only registers can be subtargets. */
4150 || GET_CODE (x
) != REG
4151 /* If the register is readonly, it can't be set more than once. */
4152 || RTX_UNCHANGING_P (x
)
4153 /* Don't use hard regs to avoid extending their life. */
4154 || REGNO (x
) < FIRST_PSEUDO_REGISTER
4155 /* Avoid subtargets inside loops,
4156 since they hide some invariant expressions. */
4157 || preserve_subexpressions_p ())
4161 /* Expand an assignment that stores the value of FROM into TO.
4162 If WANT_VALUE is nonzero, return an rtx for the value of TO.
4163 (This may contain a QUEUED rtx;
4164 if the value is constant, this rtx is a constant.)
4165 Otherwise, the returned value is NULL_RTX.
4167 SUGGEST_REG is no longer actually used.
4168 It used to mean, copy the value through a register
4169 and return that register, if that is possible.
4170 We now use WANT_VALUE to decide whether to do this. */
4173 expand_assignment (to
, from
, want_value
, suggest_reg
)
4176 int suggest_reg ATTRIBUTE_UNUSED
;
4181 /* Don't crash if the lhs of the assignment was erroneous. */
4183 if (TREE_CODE (to
) == ERROR_MARK
)
4185 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
4186 return want_value
? result
: NULL_RTX
;
4189 /* Assignment of a structure component needs special treatment
4190 if the structure component's rtx is not simply a MEM.
4191 Assignment of an array element at a constant index, and assignment of
4192 an array element in an unaligned packed structure field, has the same
4195 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
4196 || TREE_CODE (to
) == ARRAY_REF
|| TREE_CODE (to
) == ARRAY_RANGE_REF
4197 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
4199 enum machine_mode mode1
;
4200 HOST_WIDE_INT bitsize
, bitpos
;
4208 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
4209 &unsignedp
, &volatilep
);
4211 /* If we are going to use store_bit_field and extract_bit_field,
4212 make sure to_rtx will be safe for multiple use. */
4214 if (mode1
== VOIDmode
&& want_value
)
4215 tem
= stabilize_reference (tem
);
4217 orig_to_rtx
= to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, 0);
4221 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
4223 if (GET_CODE (to_rtx
) != MEM
)
4226 #ifdef POINTERS_EXTEND_UNSIGNED
4227 if (GET_MODE (offset_rtx
) != Pmode
)
4228 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
4230 if (GET_MODE (offset_rtx
) != ptr_mode
)
4231 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4234 /* A constant address in TO_RTX can have VOIDmode, we must not try
4235 to call force_reg for that case. Avoid that case. */
4236 if (GET_CODE (to_rtx
) == MEM
4237 && GET_MODE (to_rtx
) == BLKmode
4238 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
4240 && (bitpos
% bitsize
) == 0
4241 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
4242 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
4244 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
4248 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4249 highest_pow2_factor_for_type (TREE_TYPE (to
),
4253 if (GET_CODE (to_rtx
) == MEM
)
4255 /* If the field is at offset zero, we could have been given the
4256 DECL_RTX of the parent struct. Don't munge it. */
4257 to_rtx
= shallow_copy_rtx (to_rtx
);
4259 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
4262 /* Deal with volatile and readonly fields. The former is only done
4263 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4264 if (volatilep
&& GET_CODE (to_rtx
) == MEM
)
4266 if (to_rtx
== orig_to_rtx
)
4267 to_rtx
= copy_rtx (to_rtx
);
4268 MEM_VOLATILE_P (to_rtx
) = 1;
4271 if (TREE_CODE (to
) == COMPONENT_REF
4272 && TREE_READONLY (TREE_OPERAND (to
, 1)))
4274 if (to_rtx
== orig_to_rtx
)
4275 to_rtx
= copy_rtx (to_rtx
);
4276 RTX_UNCHANGING_P (to_rtx
) = 1;
4279 if (GET_CODE (to_rtx
) == MEM
&& ! can_address_p (to
))
4281 if (to_rtx
== orig_to_rtx
)
4282 to_rtx
= copy_rtx (to_rtx
);
4283 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4286 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
4288 /* Spurious cast for HPUX compiler. */
4289 ? ((enum machine_mode
)
4290 TYPE_MODE (TREE_TYPE (to
)))
4292 unsignedp
, TREE_TYPE (tem
), get_alias_set (to
));
4294 preserve_temp_slots (result
);
4298 /* If the value is meaningful, convert RESULT to the proper mode.
4299 Otherwise, return nothing. */
4300 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
4301 TYPE_MODE (TREE_TYPE (from
)),
4303 TREE_UNSIGNED (TREE_TYPE (to
)))
4307 /* If the rhs is a function call and its value is not an aggregate,
4308 call the function before we start to compute the lhs.
4309 This is needed for correct code for cases such as
4310 val = setjmp (buf) on machines where reference to val
4311 requires loading up part of an address in a separate insn.
4313 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4314 since it might be a promoted variable where the zero- or sign- extension
4315 needs to be done. Handling this in the normal way is safe because no
4316 computation is done before the call. */
4317 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
)
4318 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
4319 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
4320 && GET_CODE (DECL_RTL (to
)) == REG
))
4325 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
4327 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4329 /* Handle calls that return values in multiple non-contiguous locations.
4330 The Irix 6 ABI has examples of this. */
4331 if (GET_CODE (to_rtx
) == PARALLEL
)
4332 emit_group_load (to_rtx
, value
, int_size_in_bytes (TREE_TYPE (from
)));
4333 else if (GET_MODE (to_rtx
) == BLKmode
)
4334 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
4337 #ifdef POINTERS_EXTEND_UNSIGNED
4338 if (POINTER_TYPE_P (TREE_TYPE (to
))
4339 && GET_MODE (to_rtx
) != GET_MODE (value
))
4340 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
4342 emit_move_insn (to_rtx
, value
);
4344 preserve_temp_slots (to_rtx
);
4347 return want_value
? to_rtx
: NULL_RTX
;
4350 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4351 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4354 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4356 /* Don't move directly into a return register. */
4357 if (TREE_CODE (to
) == RESULT_DECL
4358 && (GET_CODE (to_rtx
) == REG
|| GET_CODE (to_rtx
) == PARALLEL
))
4363 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
4365 if (GET_CODE (to_rtx
) == PARALLEL
)
4366 emit_group_load (to_rtx
, temp
, int_size_in_bytes (TREE_TYPE (from
)));
4368 emit_move_insn (to_rtx
, temp
);
4370 preserve_temp_slots (to_rtx
);
4373 return want_value
? to_rtx
: NULL_RTX
;
4376 /* In case we are returning the contents of an object which overlaps
4377 the place the value is being stored, use a safe function when copying
4378 a value through a pointer into a structure value return block. */
4379 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
4380 && current_function_returns_struct
4381 && !current_function_returns_pcc_struct
)
4386 size
= expr_size (from
);
4387 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
4389 if (TARGET_MEM_FUNCTIONS
)
4390 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
4391 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
4392 XEXP (from_rtx
, 0), Pmode
,
4393 convert_to_mode (TYPE_MODE (sizetype
),
4394 size
, TREE_UNSIGNED (sizetype
)),
4395 TYPE_MODE (sizetype
));
4397 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
4398 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
4399 XEXP (to_rtx
, 0), Pmode
,
4400 convert_to_mode (TYPE_MODE (integer_type_node
),
4402 TREE_UNSIGNED (integer_type_node
)),
4403 TYPE_MODE (integer_type_node
));
4405 preserve_temp_slots (to_rtx
);
4408 return want_value
? to_rtx
: NULL_RTX
;
4411 /* Compute FROM and store the value in the rtx we got. */
4414 result
= store_expr (from
, to_rtx
, want_value
);
4415 preserve_temp_slots (result
);
4418 return want_value
? result
: NULL_RTX
;
4421 /* Generate code for computing expression EXP,
4422 and storing the value into TARGET.
4423 TARGET may contain a QUEUED rtx.
4425 If WANT_VALUE & 1 is nonzero, return a copy of the value
4426 not in TARGET, so that we can be sure to use the proper
4427 value in a containing expression even if TARGET has something
4428 else stored in it. If possible, we copy the value through a pseudo
4429 and return that pseudo. Or, if the value is constant, we try to
4430 return the constant. In some cases, we return a pseudo
4431 copied *from* TARGET.
4433 If the mode is BLKmode then we may return TARGET itself.
4434 It turns out that in BLKmode it doesn't cause a problem.
4435 because C has no operators that could combine two different
4436 assignments into the same BLKmode object with different values
4437 with no sequence point. Will other languages need this to
4440 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4441 to catch quickly any cases where the caller uses the value
4442 and fails to set WANT_VALUE.
4444 If WANT_VALUE & 2 is set, this is a store into a call param on the
4445 stack, and block moves may need to be treated specially. */
4448 store_expr (exp
, target
, want_value
)
4454 int dont_return_target
= 0;
4455 int dont_store_target
= 0;
4457 if (VOID_TYPE_P (TREE_TYPE (exp
)))
4459 /* C++ can generate ?: expressions with a throw expression in one
4460 branch and an rvalue in the other. Here, we resolve attempts to
4461 store the throw expression's nonexistant result. */
4464 expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
4467 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4469 /* Perform first part of compound expression, then assign from second
4471 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
4472 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4474 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
4476 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4478 /* For conditional expression, get safe form of the target. Then
4479 test the condition, doing the appropriate assignment on either
4480 side. This avoids the creation of unnecessary temporaries.
4481 For non-BLKmode, it is more efficient not to do this. */
4483 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4486 target
= protect_from_queue (target
, 1);
4488 do_pending_stack_adjust ();
4490 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
4491 start_cleanup_deferral ();
4492 store_expr (TREE_OPERAND (exp
, 1), target
, want_value
& 2);
4493 end_cleanup_deferral ();
4495 emit_jump_insn (gen_jump (lab2
));
4498 start_cleanup_deferral ();
4499 store_expr (TREE_OPERAND (exp
, 2), target
, want_value
& 2);
4500 end_cleanup_deferral ();
4505 return want_value
& 1 ? target
: NULL_RTX
;
4507 else if (queued_subexp_p (target
))
4508 /* If target contains a postincrement, let's not risk
4509 using it as the place to generate the rhs. */
4511 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
4513 /* Expand EXP into a new pseudo. */
4514 temp
= gen_reg_rtx (GET_MODE (target
));
4515 temp
= expand_expr (exp
, temp
, GET_MODE (target
),
4517 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4520 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
),
4522 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4524 /* If target is volatile, ANSI requires accessing the value
4525 *from* the target, if it is accessed. So make that happen.
4526 In no case return the target itself. */
4527 if (! MEM_VOLATILE_P (target
) && (want_value
& 1) != 0)
4528 dont_return_target
= 1;
4530 else if ((want_value
& 1) != 0
4531 && GET_CODE (target
) == MEM
4532 && ! MEM_VOLATILE_P (target
)
4533 && GET_MODE (target
) != BLKmode
)
4534 /* If target is in memory and caller wants value in a register instead,
4535 arrange that. Pass TARGET as target for expand_expr so that,
4536 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4537 We know expand_expr will not use the target in that case.
4538 Don't do this if TARGET is volatile because we are supposed
4539 to write it and then read it. */
4541 temp
= expand_expr (exp
, target
, GET_MODE (target
),
4542 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4543 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
4545 /* If TEMP is already in the desired TARGET, only copy it from
4546 memory and don't store it there again. */
4548 || (rtx_equal_p (temp
, target
)
4549 && ! side_effects_p (temp
) && ! side_effects_p (target
)))
4550 dont_store_target
= 1;
4551 temp
= copy_to_reg (temp
);
4553 dont_return_target
= 1;
4555 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4556 /* If this is a scalar in a register that is stored in a wider mode
4557 than the declared mode, compute the result into its declared mode
4558 and then convert to the wider mode. Our value is the computed
4561 rtx inner_target
= 0;
4563 /* If we don't want a value, we can do the conversion inside EXP,
4564 which will often result in some optimizations. Do the conversion
4565 in two steps: first change the signedness, if needed, then
4566 the extend. But don't do this if the type of EXP is a subtype
4567 of something else since then the conversion might involve
4568 more than just converting modes. */
4569 if ((want_value
& 1) == 0
4570 && INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4571 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
4573 if (TREE_UNSIGNED (TREE_TYPE (exp
))
4574 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4576 ((*lang_hooks
.types
.signed_or_unsigned_type
)
4577 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)), exp
);
4579 exp
= convert ((*lang_hooks
.types
.type_for_mode
)
4580 (GET_MODE (SUBREG_REG (target
)),
4581 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4584 inner_target
= SUBREG_REG (target
);
4587 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
4588 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4590 /* If TEMP is a MEM and we want a result value, make the access
4591 now so it gets done only once. Strictly speaking, this is
4592 only necessary if the MEM is volatile, or if the address
4593 overlaps TARGET. But not performing the load twice also
4594 reduces the amount of rtl we generate and then have to CSE. */
4595 if (GET_CODE (temp
) == MEM
&& (want_value
& 1) != 0)
4596 temp
= copy_to_reg (temp
);
4598 /* If TEMP is a VOIDmode constant, use convert_modes to make
4599 sure that we properly convert it. */
4600 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4602 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4603 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4604 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4605 GET_MODE (target
), temp
,
4606 SUBREG_PROMOTED_UNSIGNED_P (target
));
4609 convert_move (SUBREG_REG (target
), temp
,
4610 SUBREG_PROMOTED_UNSIGNED_P (target
));
4612 /* If we promoted a constant, change the mode back down to match
4613 target. Otherwise, the caller might get confused by a result whose
4614 mode is larger than expected. */
4616 if ((want_value
& 1) != 0 && GET_MODE (temp
) != GET_MODE (target
))
4618 if (GET_MODE (temp
) != VOIDmode
)
4620 temp
= gen_lowpart_SUBREG (GET_MODE (target
), temp
);
4621 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4622 SUBREG_PROMOTED_UNSIGNED_SET (temp
,
4623 SUBREG_PROMOTED_UNSIGNED_P (target
));
4626 temp
= convert_modes (GET_MODE (target
),
4627 GET_MODE (SUBREG_REG (target
)),
4628 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4631 return want_value
& 1 ? temp
: NULL_RTX
;
4635 temp
= expand_expr (exp
, target
, GET_MODE (target
),
4636 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4637 /* Return TARGET if it's a specified hardware register.
4638 If TARGET is a volatile mem ref, either return TARGET
4639 or return a reg copied *from* TARGET; ANSI requires this.
4641 Otherwise, if TEMP is not TARGET, return TEMP
4642 if it is constant (for efficiency),
4643 or if we really want the correct value. */
4644 if (!(target
&& GET_CODE (target
) == REG
4645 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4646 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
4647 && ! rtx_equal_p (temp
, target
)
4648 && (CONSTANT_P (temp
) || (want_value
& 1) != 0))
4649 dont_return_target
= 1;
4652 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4653 the same as that of TARGET, adjust the constant. This is needed, for
4654 example, in case it is a CONST_DOUBLE and we want only a word-sized
4656 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4657 && TREE_CODE (exp
) != ERROR_MARK
4658 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4659 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4660 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
4662 /* If value was not generated in the target, store it there.
4663 Convert the value to TARGET's type first if necessary.
4664 If TEMP and TARGET compare equal according to rtx_equal_p, but
4665 one or both of them are volatile memory refs, we have to distinguish
4667 - expand_expr has used TARGET. In this case, we must not generate
4668 another copy. This can be detected by TARGET being equal according
4670 - expand_expr has not used TARGET - that means that the source just
4671 happens to have the same RTX form. Since temp will have been created
4672 by expand_expr, it will compare unequal according to == .
4673 We must generate a copy in this case, to reach the correct number
4674 of volatile memory references. */
4676 if ((! rtx_equal_p (temp
, target
)
4677 || (temp
!= target
&& (side_effects_p (temp
)
4678 || side_effects_p (target
))))
4679 && TREE_CODE (exp
) != ERROR_MARK
4680 && ! dont_store_target
4681 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4682 but TARGET is not valid memory reference, TEMP will differ
4683 from TARGET although it is really the same location. */
4684 && (TREE_CODE_CLASS (TREE_CODE (exp
)) != 'd'
4685 || target
!= DECL_RTL_IF_SET (exp
))
4686 /* If there's nothing to copy, don't bother. Don't call expr_size
4687 unless necessary, because some front-ends (C++) expr_size-hook
4688 aborts on objects that are not supposed to be bit-copied or
4690 && expr_size (exp
) != const0_rtx
)
4692 target
= protect_from_queue (target
, 1);
4693 if (GET_MODE (temp
) != GET_MODE (target
)
4694 && GET_MODE (temp
) != VOIDmode
)
4696 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4697 if (dont_return_target
)
4699 /* In this case, we will return TEMP,
4700 so make sure it has the proper mode.
4701 But don't forget to store the value into TARGET. */
4702 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4703 emit_move_insn (target
, temp
);
4706 convert_move (target
, temp
, unsignedp
);
4709 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4711 /* Handle copying a string constant into an array. The string
4712 constant may be shorter than the array. So copy just the string's
4713 actual length, and clear the rest. First get the size of the data
4714 type of the string, which is actually the size of the target. */
4715 rtx size
= expr_size (exp
);
4717 if (GET_CODE (size
) == CONST_INT
4718 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4719 emit_block_move (target
, temp
, size
,
4721 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4724 /* Compute the size of the data to copy from the string. */
4726 = size_binop (MIN_EXPR
,
4727 make_tree (sizetype
, size
),
4728 size_int (TREE_STRING_LENGTH (exp
)));
4730 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
4732 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4735 /* Copy that much. */
4736 copy_size_rtx
= convert_to_mode (ptr_mode
, copy_size_rtx
,
4737 TREE_UNSIGNED (sizetype
));
4738 emit_block_move (target
, temp
, copy_size_rtx
,
4740 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4742 /* Figure out how much is left in TARGET that we have to clear.
4743 Do all calculations in ptr_mode. */
4744 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4746 size
= plus_constant (size
, -INTVAL (copy_size_rtx
));
4747 target
= adjust_address (target
, BLKmode
,
4748 INTVAL (copy_size_rtx
));
4752 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
4753 copy_size_rtx
, NULL_RTX
, 0,
4756 #ifdef POINTERS_EXTEND_UNSIGNED
4757 if (GET_MODE (copy_size_rtx
) != Pmode
)
4758 copy_size_rtx
= convert_to_mode (Pmode
, copy_size_rtx
,
4759 TREE_UNSIGNED (sizetype
));
4762 target
= offset_address (target
, copy_size_rtx
,
4763 highest_pow2_factor (copy_size
));
4764 label
= gen_label_rtx ();
4765 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4766 GET_MODE (size
), 0, label
);
4769 if (size
!= const0_rtx
)
4770 clear_storage (target
, size
);
4776 /* Handle calls that return values in multiple non-contiguous locations.
4777 The Irix 6 ABI has examples of this. */
4778 else if (GET_CODE (target
) == PARALLEL
)
4779 emit_group_load (target
, temp
, int_size_in_bytes (TREE_TYPE (exp
)));
4780 else if (GET_MODE (temp
) == BLKmode
)
4781 emit_block_move (target
, temp
, expr_size (exp
),
4783 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4785 emit_move_insn (target
, temp
);
4788 /* If we don't want a value, return NULL_RTX. */
4789 if ((want_value
& 1) == 0)
4792 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4793 ??? The latter test doesn't seem to make sense. */
4794 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
4797 /* Return TARGET itself if it is a hard register. */
4798 else if ((want_value
& 1) != 0
4799 && GET_MODE (target
) != BLKmode
4800 && ! (GET_CODE (target
) == REG
4801 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4802 return copy_to_reg (target
);
4808 /* Return 1 if EXP just contains zeros. */
4816 switch (TREE_CODE (exp
))
4820 case NON_LVALUE_EXPR
:
4821 case VIEW_CONVERT_EXPR
:
4822 return is_zeros_p (TREE_OPERAND (exp
, 0));
4825 return integer_zerop (exp
);
4829 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
4832 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp
), dconst0
);
4835 for (elt
= TREE_VECTOR_CST_ELTS (exp
); elt
;
4836 elt
= TREE_CHAIN (elt
))
4837 if (!is_zeros_p (TREE_VALUE (elt
)))
4843 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4844 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
4845 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4846 if (! is_zeros_p (TREE_VALUE (elt
)))
4856 /* Return 1 if EXP contains mostly (3/4) zeros. */
4859 mostly_zeros_p (exp
)
4862 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4864 int elts
= 0, zeros
= 0;
4865 tree elt
= CONSTRUCTOR_ELTS (exp
);
4866 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4868 /* If there are no ranges of true bits, it is all zero. */
4869 return elt
== NULL_TREE
;
4871 for (; elt
; elt
= TREE_CHAIN (elt
))
4873 /* We do not handle the case where the index is a RANGE_EXPR,
4874 so the statistic will be somewhat inaccurate.
4875 We do make a more accurate count in store_constructor itself,
4876 so since this function is only used for nested array elements,
4877 this should be close enough. */
4878 if (mostly_zeros_p (TREE_VALUE (elt
)))
4883 return 4 * zeros
>= 3 * elts
;
4886 return is_zeros_p (exp
);
4889 /* Helper function for store_constructor.
4890 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4891 TYPE is the type of the CONSTRUCTOR, not the element type.
4892 CLEARED is as for store_constructor.
4893 ALIAS_SET is the alias set to use for any stores.
4895 This provides a recursive shortcut back to store_constructor when it isn't
4896 necessary to go through store_field. This is so that we can pass through
4897 the cleared field to let store_constructor know that we may not have to
4898 clear a substructure if the outer structure has already been cleared. */
4901 store_constructor_field (target
, bitsize
, bitpos
, mode
, exp
, type
, cleared
,
4904 unsigned HOST_WIDE_INT bitsize
;
4905 HOST_WIDE_INT bitpos
;
4906 enum machine_mode mode
;
4911 if (TREE_CODE (exp
) == CONSTRUCTOR
4912 && bitpos
% BITS_PER_UNIT
== 0
4913 /* If we have a nonzero bitpos for a register target, then we just
4914 let store_field do the bitfield handling. This is unlikely to
4915 generate unnecessary clear instructions anyways. */
4916 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
4918 if (GET_CODE (target
) == MEM
)
4920 = adjust_address (target
,
4921 GET_MODE (target
) == BLKmode
4923 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4924 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
4927 /* Update the alias set, if required. */
4928 if (GET_CODE (target
) == MEM
&& ! MEM_KEEP_ALIAS_SET_P (target
)
4929 && MEM_ALIAS_SET (target
) != 0)
4931 target
= copy_rtx (target
);
4932 set_mem_alias_set (target
, alias_set
);
4935 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
4938 store_field (target
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
4942 /* Store the value of constructor EXP into the rtx TARGET.
4943 TARGET is either a REG or a MEM; we know it cannot conflict, since
4944 safe_from_p has been called.
4945 CLEARED is true if TARGET is known to have been zero'd.
4946 SIZE is the number of bytes of TARGET we are allowed to modify: this
4947 may not be the same as the size of EXP if we are assigning to a field
4948 which has been packed to exclude padding bits. */
4951 store_constructor (exp
, target
, cleared
, size
)
4957 tree type
= TREE_TYPE (exp
);
4958 #ifdef WORD_REGISTER_OPERATIONS
4959 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4962 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
4963 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4967 /* We either clear the aggregate or indicate the value is dead. */
4968 if ((TREE_CODE (type
) == UNION_TYPE
4969 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4971 && ! CONSTRUCTOR_ELTS (exp
))
4972 /* If the constructor is empty, clear the union. */
4974 clear_storage (target
, expr_size (exp
));
4978 /* If we are building a static constructor into a register,
4979 set the initial value as zero so we can fold the value into
4980 a constant. But if more than one register is involved,
4981 this probably loses. */
4982 else if (! cleared
&& GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
4983 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4985 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4989 /* If the constructor has fewer fields than the structure
4990 or if we are initializing the structure to mostly zeros,
4991 clear the whole structure first. Don't do this if TARGET is a
4992 register whose mode size isn't equal to SIZE since clear_storage
4993 can't handle this case. */
4994 else if (! cleared
&& size
> 0
4995 && ((list_length (CONSTRUCTOR_ELTS (exp
))
4996 != fields_length (type
))
4997 || mostly_zeros_p (exp
))
4998 && (GET_CODE (target
) != REG
4999 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
5002 rtx xtarget
= target
;
5004 if (readonly_fields_p (type
))
5006 xtarget
= copy_rtx (xtarget
);
5007 RTX_UNCHANGING_P (xtarget
) = 1;
5010 clear_storage (xtarget
, GEN_INT (size
));
5015 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
5017 /* Store each element of the constructor into
5018 the corresponding field of TARGET. */
5020 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
5022 tree field
= TREE_PURPOSE (elt
);
5023 tree value
= TREE_VALUE (elt
);
5024 enum machine_mode mode
;
5025 HOST_WIDE_INT bitsize
;
5026 HOST_WIDE_INT bitpos
= 0;
5028 rtx to_rtx
= target
;
5030 /* Just ignore missing fields.
5031 We cleared the whole structure, above,
5032 if any fields are missing. */
5036 if (cleared
&& is_zeros_p (value
))
5039 if (host_integerp (DECL_SIZE (field
), 1))
5040 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
5044 mode
= DECL_MODE (field
);
5045 if (DECL_BIT_FIELD (field
))
5048 offset
= DECL_FIELD_OFFSET (field
);
5049 if (host_integerp (offset
, 0)
5050 && host_integerp (bit_position (field
), 0))
5052 bitpos
= int_bit_position (field
);
5056 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
5062 if (CONTAINS_PLACEHOLDER_P (offset
))
5063 offset
= build (WITH_RECORD_EXPR
, sizetype
,
5064 offset
, make_tree (TREE_TYPE (exp
), target
));
5066 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
5067 if (GET_CODE (to_rtx
) != MEM
)
5070 #ifdef POINTERS_EXTEND_UNSIGNED
5071 if (GET_MODE (offset_rtx
) != Pmode
)
5072 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
5074 if (GET_MODE (offset_rtx
) != ptr_mode
)
5075 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
5078 to_rtx
= offset_address (to_rtx
, offset_rtx
,
5079 highest_pow2_factor (offset
));
5082 if (TREE_READONLY (field
))
5084 if (GET_CODE (to_rtx
) == MEM
)
5085 to_rtx
= copy_rtx (to_rtx
);
5087 RTX_UNCHANGING_P (to_rtx
) = 1;
5090 #ifdef WORD_REGISTER_OPERATIONS
5091 /* If this initializes a field that is smaller than a word, at the
5092 start of a word, try to widen it to a full word.
5093 This special case allows us to output C++ member function
5094 initializations in a form that the optimizers can understand. */
5095 if (GET_CODE (target
) == REG
5096 && bitsize
< BITS_PER_WORD
5097 && bitpos
% BITS_PER_WORD
== 0
5098 && GET_MODE_CLASS (mode
) == MODE_INT
5099 && TREE_CODE (value
) == INTEGER_CST
5101 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
5103 tree type
= TREE_TYPE (value
);
5105 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
5107 type
= (*lang_hooks
.types
.type_for_size
)
5108 (BITS_PER_WORD
, TREE_UNSIGNED (type
));
5109 value
= convert (type
, value
);
5112 if (BYTES_BIG_ENDIAN
)
5114 = fold (build (LSHIFT_EXPR
, type
, value
,
5115 build_int_2 (BITS_PER_WORD
- bitsize
, 0)));
5116 bitsize
= BITS_PER_WORD
;
5121 if (GET_CODE (to_rtx
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (to_rtx
)
5122 && DECL_NONADDRESSABLE_P (field
))
5124 to_rtx
= copy_rtx (to_rtx
);
5125 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
5128 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
5129 value
, type
, cleared
,
5130 get_alias_set (TREE_TYPE (field
)));
5133 else if (TREE_CODE (type
) == ARRAY_TYPE
5134 || TREE_CODE (type
) == VECTOR_TYPE
)
5139 tree domain
= TYPE_DOMAIN (type
);
5140 tree elttype
= TREE_TYPE (type
);
5142 HOST_WIDE_INT minelt
= 0;
5143 HOST_WIDE_INT maxelt
= 0;
5145 /* Vectors are like arrays, but the domain is stored via an array
5147 if (TREE_CODE (type
) == VECTOR_TYPE
)
5149 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5150 the same field as TYPE_DOMAIN, we are not guaranteed that
5152 domain
= TYPE_DEBUG_REPRESENTATION_TYPE (type
);
5153 domain
= TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain
)));
5156 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
5157 && TYPE_MAX_VALUE (domain
)
5158 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
5159 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
5161 /* If we have constant bounds for the range of the type, get them. */
5164 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
5165 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
5168 /* If the constructor has fewer elements than the array,
5169 clear the whole array first. Similarly if this is
5170 static constructor of a non-BLKmode object. */
5171 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
5175 HOST_WIDE_INT count
= 0, zero_count
= 0;
5176 need_to_clear
= ! const_bounds_p
;
5178 /* This loop is a more accurate version of the loop in
5179 mostly_zeros_p (it handles RANGE_EXPR in an index).
5180 It is also needed to check for missing elements. */
5181 for (elt
= CONSTRUCTOR_ELTS (exp
);
5182 elt
!= NULL_TREE
&& ! need_to_clear
;
5183 elt
= TREE_CHAIN (elt
))
5185 tree index
= TREE_PURPOSE (elt
);
5186 HOST_WIDE_INT this_node_count
;
5188 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
5190 tree lo_index
= TREE_OPERAND (index
, 0);
5191 tree hi_index
= TREE_OPERAND (index
, 1);
5193 if (! host_integerp (lo_index
, 1)
5194 || ! host_integerp (hi_index
, 1))
5200 this_node_count
= (tree_low_cst (hi_index
, 1)
5201 - tree_low_cst (lo_index
, 1) + 1);
5204 this_node_count
= 1;
5206 count
+= this_node_count
;
5207 if (mostly_zeros_p (TREE_VALUE (elt
)))
5208 zero_count
+= this_node_count
;
5211 /* Clear the entire array first if there are any missing elements,
5212 or if the incidence of zero elements is >= 75%. */
5214 && (count
< maxelt
- minelt
+ 1 || 4 * zero_count
>= 3 * count
))
5218 if (need_to_clear
&& size
> 0)
5223 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5225 clear_storage (target
, GEN_INT (size
));
5229 else if (REG_P (target
))
5230 /* Inform later passes that the old value is dead. */
5231 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
5233 /* Store each element of the constructor into
5234 the corresponding element of TARGET, determined
5235 by counting the elements. */
5236 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
5238 elt
= TREE_CHAIN (elt
), i
++)
5240 enum machine_mode mode
;
5241 HOST_WIDE_INT bitsize
;
5242 HOST_WIDE_INT bitpos
;
5244 tree value
= TREE_VALUE (elt
);
5245 tree index
= TREE_PURPOSE (elt
);
5246 rtx xtarget
= target
;
5248 if (cleared
&& is_zeros_p (value
))
5251 unsignedp
= TREE_UNSIGNED (elttype
);
5252 mode
= TYPE_MODE (elttype
);
5253 if (mode
== BLKmode
)
5254 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
5255 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
5258 bitsize
= GET_MODE_BITSIZE (mode
);
5260 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
5262 tree lo_index
= TREE_OPERAND (index
, 0);
5263 tree hi_index
= TREE_OPERAND (index
, 1);
5264 rtx index_r
, pos_rtx
, loop_end
;
5265 struct nesting
*loop
;
5266 HOST_WIDE_INT lo
, hi
, count
;
5269 /* If the range is constant and "small", unroll the loop. */
5271 && host_integerp (lo_index
, 0)
5272 && host_integerp (hi_index
, 0)
5273 && (lo
= tree_low_cst (lo_index
, 0),
5274 hi
= tree_low_cst (hi_index
, 0),
5275 count
= hi
- lo
+ 1,
5276 (GET_CODE (target
) != MEM
5278 || (host_integerp (TYPE_SIZE (elttype
), 1)
5279 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
5282 lo
-= minelt
; hi
-= minelt
;
5283 for (; lo
<= hi
; lo
++)
5285 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
5287 if (GET_CODE (target
) == MEM
5288 && !MEM_KEEP_ALIAS_SET_P (target
)
5289 && TREE_CODE (type
) == ARRAY_TYPE
5290 && TYPE_NONALIASED_COMPONENT (type
))
5292 target
= copy_rtx (target
);
5293 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5296 store_constructor_field
5297 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
5298 get_alias_set (elttype
));
5303 expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
5304 loop_end
= gen_label_rtx ();
5306 unsignedp
= TREE_UNSIGNED (domain
);
5308 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
5311 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
5313 SET_DECL_RTL (index
, index_r
);
5314 if (TREE_CODE (value
) == SAVE_EXPR
5315 && SAVE_EXPR_RTL (value
) == 0)
5317 /* Make sure value gets expanded once before the
5319 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
5322 store_expr (lo_index
, index_r
, 0);
5323 loop
= expand_start_loop (0);
5325 /* Assign value to element index. */
5327 = convert (ssizetype
,
5328 fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5329 index
, TYPE_MIN_VALUE (domain
))));
5330 position
= size_binop (MULT_EXPR
, position
,
5332 TYPE_SIZE_UNIT (elttype
)));
5334 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
5335 xtarget
= offset_address (target
, pos_rtx
,
5336 highest_pow2_factor (position
));
5337 xtarget
= adjust_address (xtarget
, mode
, 0);
5338 if (TREE_CODE (value
) == CONSTRUCTOR
)
5339 store_constructor (value
, xtarget
, cleared
,
5340 bitsize
/ BITS_PER_UNIT
);
5342 store_expr (value
, xtarget
, 0);
5344 expand_exit_loop_if_false (loop
,
5345 build (LT_EXPR
, integer_type_node
,
5348 expand_increment (build (PREINCREMENT_EXPR
,
5350 index
, integer_one_node
), 0, 0);
5352 emit_label (loop_end
);
5355 else if ((index
!= 0 && ! host_integerp (index
, 0))
5356 || ! host_integerp (TYPE_SIZE (elttype
), 1))
5361 index
= ssize_int (1);
5364 index
= convert (ssizetype
,
5365 fold (build (MINUS_EXPR
, index
,
5366 TYPE_MIN_VALUE (domain
))));
5368 position
= size_binop (MULT_EXPR
, index
,
5370 TYPE_SIZE_UNIT (elttype
)));
5371 xtarget
= offset_address (target
,
5372 expand_expr (position
, 0, VOIDmode
, 0),
5373 highest_pow2_factor (position
));
5374 xtarget
= adjust_address (xtarget
, mode
, 0);
5375 store_expr (value
, xtarget
, 0);
5380 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
5381 * tree_low_cst (TYPE_SIZE (elttype
), 1));
5383 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
5385 if (GET_CODE (target
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (target
)
5386 && TREE_CODE (type
) == ARRAY_TYPE
5387 && TYPE_NONALIASED_COMPONENT (type
))
5389 target
= copy_rtx (target
);
5390 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5393 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
5394 type
, cleared
, get_alias_set (elttype
));
5400 /* Set constructor assignments. */
5401 else if (TREE_CODE (type
) == SET_TYPE
)
5403 tree elt
= CONSTRUCTOR_ELTS (exp
);
5404 unsigned HOST_WIDE_INT nbytes
= int_size_in_bytes (type
), nbits
;
5405 tree domain
= TYPE_DOMAIN (type
);
5406 tree domain_min
, domain_max
, bitlength
;
5408 /* The default implementation strategy is to extract the constant
5409 parts of the constructor, use that to initialize the target,
5410 and then "or" in whatever non-constant ranges we need in addition.
5412 If a large set is all zero or all ones, it is
5413 probably better to set it using memset (if available) or bzero.
5414 Also, if a large set has just a single range, it may also be
5415 better to first clear all the first clear the set (using
5416 bzero/memset), and set the bits we want. */
5418 /* Check for all zeros. */
5419 if (elt
== NULL_TREE
&& size
> 0)
5422 clear_storage (target
, GEN_INT (size
));
5426 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
5427 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
5428 bitlength
= size_binop (PLUS_EXPR
,
5429 size_diffop (domain_max
, domain_min
),
5432 nbits
= tree_low_cst (bitlength
, 1);
5434 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5435 are "complicated" (more than one range), initialize (the
5436 constant parts) by copying from a constant. */
5437 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
5438 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
5440 unsigned int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
5441 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
5442 char *bit_buffer
= (char *) alloca (nbits
);
5443 HOST_WIDE_INT word
= 0;
5444 unsigned int bit_pos
= 0;
5445 unsigned int ibit
= 0;
5446 unsigned int offset
= 0; /* In bytes from beginning of set. */
5448 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
5451 if (bit_buffer
[ibit
])
5453 if (BYTES_BIG_ENDIAN
)
5454 word
|= (1 << (set_word_size
- 1 - bit_pos
));
5456 word
|= 1 << bit_pos
;
5460 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
5462 if (word
!= 0 || ! cleared
)
5464 rtx datum
= GEN_INT (word
);
5467 /* The assumption here is that it is safe to use
5468 XEXP if the set is multi-word, but not if
5469 it's single-word. */
5470 if (GET_CODE (target
) == MEM
)
5471 to_rtx
= adjust_address (target
, mode
, offset
);
5472 else if (offset
== 0)
5476 emit_move_insn (to_rtx
, datum
);
5483 offset
+= set_word_size
/ BITS_PER_UNIT
;
5488 /* Don't bother clearing storage if the set is all ones. */
5489 if (TREE_CHAIN (elt
) != NULL_TREE
5490 || (TREE_PURPOSE (elt
) == NULL_TREE
5492 : ( ! host_integerp (TREE_VALUE (elt
), 0)
5493 || ! host_integerp (TREE_PURPOSE (elt
), 0)
5494 || (tree_low_cst (TREE_VALUE (elt
), 0)
5495 - tree_low_cst (TREE_PURPOSE (elt
), 0) + 1
5496 != (HOST_WIDE_INT
) nbits
))))
5497 clear_storage (target
, expr_size (exp
));
5499 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
5501 /* Start of range of element or NULL. */
5502 tree startbit
= TREE_PURPOSE (elt
);
5503 /* End of range of element, or element value. */
5504 tree endbit
= TREE_VALUE (elt
);
5505 HOST_WIDE_INT startb
, endb
;
5506 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
5508 bitlength_rtx
= expand_expr (bitlength
,
5509 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
5511 /* Handle non-range tuple element like [ expr ]. */
5512 if (startbit
== NULL_TREE
)
5514 startbit
= save_expr (endbit
);
5518 startbit
= convert (sizetype
, startbit
);
5519 endbit
= convert (sizetype
, endbit
);
5520 if (! integer_zerop (domain_min
))
5522 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
5523 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
5525 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
5526 EXPAND_CONST_ADDRESS
);
5527 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
5528 EXPAND_CONST_ADDRESS
);
5534 ((build_qualified_type ((*lang_hooks
.types
.type_for_mode
)
5535 (GET_MODE (target
), 0),
5538 emit_move_insn (targetx
, target
);
5541 else if (GET_CODE (target
) == MEM
)
5546 /* Optimization: If startbit and endbit are constants divisible
5547 by BITS_PER_UNIT, call memset instead. */
5548 if (TARGET_MEM_FUNCTIONS
5549 && TREE_CODE (startbit
) == INTEGER_CST
5550 && TREE_CODE (endbit
) == INTEGER_CST
5551 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
5552 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
5554 emit_library_call (memset_libfunc
, LCT_NORMAL
,
5556 plus_constant (XEXP (targetx
, 0),
5557 startb
/ BITS_PER_UNIT
),
5559 constm1_rtx
, TYPE_MODE (integer_type_node
),
5560 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
5561 TYPE_MODE (sizetype
));
5564 emit_library_call (setbits_libfunc
, LCT_NORMAL
,
5565 VOIDmode
, 4, XEXP (targetx
, 0),
5566 Pmode
, bitlength_rtx
, TYPE_MODE (sizetype
),
5567 startbit_rtx
, TYPE_MODE (sizetype
),
5568 endbit_rtx
, TYPE_MODE (sizetype
));
5571 emit_move_insn (target
, targetx
);
5579 /* Store the value of EXP (an expression tree)
5580 into a subfield of TARGET which has mode MODE and occupies
5581 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5582 If MODE is VOIDmode, it means that we are storing into a bit-field.
5584 If VALUE_MODE is VOIDmode, return nothing in particular.
5585 UNSIGNEDP is not used in this case.
5587 Otherwise, return an rtx for the value stored. This rtx
5588 has mode VALUE_MODE if that is convenient to do.
5589 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5591 TYPE is the type of the underlying object,
5593 ALIAS_SET is the alias set for the destination. This value will
5594 (in general) be different from that for TARGET, since TARGET is a
5595 reference to the containing structure. */
5598 store_field (target
, bitsize
, bitpos
, mode
, exp
, value_mode
, unsignedp
, type
,
5601 HOST_WIDE_INT bitsize
;
5602 HOST_WIDE_INT bitpos
;
5603 enum machine_mode mode
;
5605 enum machine_mode value_mode
;
5610 HOST_WIDE_INT width_mask
= 0;
5612 if (TREE_CODE (exp
) == ERROR_MARK
)
5615 /* If we have nothing to store, do nothing unless the expression has
5618 return expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
5619 else if (bitsize
>= 0 && bitsize
< HOST_BITS_PER_WIDE_INT
)
5620 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5622 /* If we are storing into an unaligned field of an aligned union that is
5623 in a register, we may have the mode of TARGET being an integer mode but
5624 MODE == BLKmode. In that case, get an aligned object whose size and
5625 alignment are the same as TARGET and store TARGET into it (we can avoid
5626 the store if the field being stored is the entire width of TARGET). Then
5627 call ourselves recursively to store the field into a BLKmode version of
5628 that object. Finally, load from the object into TARGET. This is not
5629 very efficient in general, but should only be slightly more expensive
5630 than the otherwise-required unaligned accesses. Perhaps this can be
5631 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5632 twice, once with emit_move_insn and once via store_field. */
5635 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
5637 rtx object
= assign_temp (type
, 0, 1, 1);
5638 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
5640 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5641 emit_move_insn (object
, target
);
5643 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
5646 emit_move_insn (target
, object
);
5648 /* We want to return the BLKmode version of the data. */
5652 if (GET_CODE (target
) == CONCAT
)
5654 /* We're storing into a struct containing a single __complex. */
5658 return store_expr (exp
, target
, 0);
5661 /* If the structure is in a register or if the component
5662 is a bit field, we cannot use addressing to access it.
5663 Use bit-field techniques or SUBREG to store in it. */
5665 if (mode
== VOIDmode
5666 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5667 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5668 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5669 || GET_CODE (target
) == REG
5670 || GET_CODE (target
) == SUBREG
5671 /* If the field isn't aligned enough to store as an ordinary memref,
5672 store it as a bit field. */
5674 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
5675 || bitpos
% GET_MODE_ALIGNMENT (mode
))
5676 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
5677 || (bitpos
% BITS_PER_UNIT
!= 0)))
5678 /* If the RHS and field are a constant size and the size of the
5679 RHS isn't the same size as the bitfield, we must use bitfield
5682 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5683 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5685 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
5687 /* If BITSIZE is narrower than the size of the type of EXP
5688 we will be narrowing TEMP. Normally, what's wanted are the
5689 low-order bits. However, if EXP's type is a record and this is
5690 big-endian machine, we want the upper BITSIZE bits. */
5691 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5692 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
5693 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5694 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5695 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5699 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5701 if (mode
!= VOIDmode
&& mode
!= BLKmode
5702 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5703 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5705 /* If the modes of TARGET and TEMP are both BLKmode, both
5706 must be in memory and BITPOS must be aligned on a byte
5707 boundary. If so, we simply do a block copy. */
5708 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5710 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
5711 || bitpos
% BITS_PER_UNIT
!= 0)
5714 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5715 emit_block_move (target
, temp
,
5716 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5720 return value_mode
== VOIDmode
? const0_rtx
: target
;
5723 /* Store the value in the bitfield. */
5724 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
,
5725 int_size_in_bytes (type
));
5727 if (value_mode
!= VOIDmode
)
5729 /* The caller wants an rtx for the value.
5730 If possible, avoid refetching from the bitfield itself. */
5732 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
5735 enum machine_mode tmode
;
5737 tmode
= GET_MODE (temp
);
5738 if (tmode
== VOIDmode
)
5742 return expand_and (tmode
, temp
,
5743 gen_int_mode (width_mask
, tmode
),
5746 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
5747 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5748 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5751 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
5752 NULL_RTX
, value_mode
, VOIDmode
,
5753 int_size_in_bytes (type
));
5759 rtx addr
= XEXP (target
, 0);
5760 rtx to_rtx
= target
;
5762 /* If a value is wanted, it must be the lhs;
5763 so make the address stable for multiple use. */
5765 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
5766 && ! CONSTANT_ADDRESS_P (addr
)
5767 /* A frame-pointer reference is already stable. */
5768 && ! (GET_CODE (addr
) == PLUS
5769 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
5770 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
5771 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
5772 to_rtx
= replace_equiv_address (to_rtx
, copy_to_reg (addr
));
5774 /* Now build a reference to just the desired component. */
5776 to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5778 if (to_rtx
== target
)
5779 to_rtx
= copy_rtx (to_rtx
);
5781 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5782 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5783 set_mem_alias_set (to_rtx
, alias_set
);
5785 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
5789 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5790 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5791 codes and find the ultimate containing object, which we return.
5793 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5794 bit position, and *PUNSIGNEDP to the signedness of the field.
5795 If the position of the field is variable, we store a tree
5796 giving the variable offset (in units) in *POFFSET.
5797 This offset is in addition to the bit position.
5798 If the position is not variable, we store 0 in *POFFSET.
5800 If any of the extraction expressions is volatile,
5801 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5803 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5804 is a mode that can be used to access the field. In that case, *PBITSIZE
5807 If the field describes a variable-sized object, *PMODE is set to
5808 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5809 this case, but the address of the object can be found. */
5812 get_inner_reference (exp
, pbitsize
, pbitpos
, poffset
, pmode
,
5813 punsignedp
, pvolatilep
)
5815 HOST_WIDE_INT
*pbitsize
;
5816 HOST_WIDE_INT
*pbitpos
;
5818 enum machine_mode
*pmode
;
5823 enum machine_mode mode
= VOIDmode
;
5824 tree offset
= size_zero_node
;
5825 tree bit_offset
= bitsize_zero_node
;
5826 tree placeholder_ptr
= 0;
5829 /* First get the mode, signedness, and size. We do this from just the
5830 outermost expression. */
5831 if (TREE_CODE (exp
) == COMPONENT_REF
)
5833 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5834 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5835 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5837 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
5839 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5841 size_tree
= TREE_OPERAND (exp
, 1);
5842 *punsignedp
= TREE_UNSIGNED (exp
);
5846 mode
= TYPE_MODE (TREE_TYPE (exp
));
5847 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
5849 if (mode
== BLKmode
)
5850 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5852 *pbitsize
= GET_MODE_BITSIZE (mode
);
5857 if (! host_integerp (size_tree
, 1))
5858 mode
= BLKmode
, *pbitsize
= -1;
5860 *pbitsize
= tree_low_cst (size_tree
, 1);
5863 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5864 and find the ultimate containing object. */
5867 if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5868 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
, TREE_OPERAND (exp
, 2));
5869 else if (TREE_CODE (exp
) == COMPONENT_REF
)
5871 tree field
= TREE_OPERAND (exp
, 1);
5872 tree this_offset
= DECL_FIELD_OFFSET (field
);
5874 /* If this field hasn't been filled in yet, don't go
5875 past it. This should only happen when folding expressions
5876 made during type construction. */
5877 if (this_offset
== 0)
5879 else if (CONTAINS_PLACEHOLDER_P (this_offset
))
5880 this_offset
= build (WITH_RECORD_EXPR
, sizetype
, this_offset
, exp
);
5882 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5883 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5884 DECL_FIELD_BIT_OFFSET (field
));
5886 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5889 else if (TREE_CODE (exp
) == ARRAY_REF
5890 || TREE_CODE (exp
) == ARRAY_RANGE_REF
)
5892 tree index
= TREE_OPERAND (exp
, 1);
5893 tree array
= TREE_OPERAND (exp
, 0);
5894 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
5895 tree low_bound
= (domain
? TYPE_MIN_VALUE (domain
) : 0);
5896 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array
)));
5898 /* We assume all arrays have sizes that are a multiple of a byte.
5899 First subtract the lower bound, if any, in the type of the
5900 index, then convert to sizetype and multiply by the size of the
5902 if (low_bound
!= 0 && ! integer_zerop (low_bound
))
5903 index
= fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5906 /* If the index has a self-referential type, pass it to a
5907 WITH_RECORD_EXPR; if the component size is, pass our
5908 component to one. */
5909 if (CONTAINS_PLACEHOLDER_P (index
))
5910 index
= build (WITH_RECORD_EXPR
, TREE_TYPE (index
), index
, exp
);
5911 if (CONTAINS_PLACEHOLDER_P (unit_size
))
5912 unit_size
= build (WITH_RECORD_EXPR
, sizetype
, unit_size
, array
);
5914 offset
= size_binop (PLUS_EXPR
, offset
,
5915 size_binop (MULT_EXPR
,
5916 convert (sizetype
, index
),
5920 else if (TREE_CODE (exp
) == PLACEHOLDER_EXPR
)
5922 tree
new = find_placeholder (exp
, &placeholder_ptr
);
5924 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5925 We might have been called from tree optimization where we
5926 haven't set up an object yet. */
5935 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5936 conversions that don't change the mode, and all view conversions
5937 except those that need to "step up" the alignment. */
5938 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
5939 && ! (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
5940 && ! ((TYPE_ALIGN (TREE_TYPE (exp
))
5941 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5943 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
5944 < BIGGEST_ALIGNMENT
)
5945 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
5946 || TYPE_ALIGN_OK (TREE_TYPE
5947 (TREE_OPERAND (exp
, 0))))))
5948 && ! ((TREE_CODE (exp
) == NOP_EXPR
5949 || TREE_CODE (exp
) == CONVERT_EXPR
)
5950 && (TYPE_MODE (TREE_TYPE (exp
))
5951 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
5954 /* If any reference in the chain is volatile, the effect is volatile. */
5955 if (TREE_THIS_VOLATILE (exp
))
5958 exp
= TREE_OPERAND (exp
, 0);
5961 /* If OFFSET is constant, see if we can return the whole thing as a
5962 constant bit position. Otherwise, split it up. */
5963 if (host_integerp (offset
, 0)
5964 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5966 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5967 && host_integerp (tem
, 0))
5968 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5970 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5976 /* Return 1 if T is an expression that get_inner_reference handles. */
5979 handled_component_p (t
)
5982 switch (TREE_CODE (t
))
5987 case ARRAY_RANGE_REF
:
5988 case NON_LVALUE_EXPR
:
5989 case VIEW_CONVERT_EXPR
:
5994 return (TYPE_MODE (TREE_TYPE (t
))
5995 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t
, 0))));
6002 /* Given an rtx VALUE that may contain additions and multiplications, return
6003 an equivalent value that just refers to a register, memory, or constant.
6004 This is done by generating instructions to perform the arithmetic and
6005 returning a pseudo-register containing the value.
6007 The returned value may be a REG, SUBREG, MEM or constant. */
6010 force_operand (value
, target
)
6014 /* Use subtarget as the target for operand 0 of a binary operation. */
6015 rtx subtarget
= get_subtarget (target
);
6016 enum rtx_code code
= GET_CODE (value
);
6018 /* Check for a PIC address load. */
6019 if ((code
== PLUS
|| code
== MINUS
)
6020 && XEXP (value
, 0) == pic_offset_table_rtx
6021 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
6022 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
6023 || GET_CODE (XEXP (value
, 1)) == CONST
))
6026 subtarget
= gen_reg_rtx (GET_MODE (value
));
6027 emit_move_insn (subtarget
, value
);
6031 if (code
== ZERO_EXTEND
|| code
== SIGN_EXTEND
)
6034 target
= gen_reg_rtx (GET_MODE (value
));
6035 convert_move (target
, force_operand (XEXP (value
, 0), NULL
),
6036 code
== ZERO_EXTEND
);
6040 if (GET_RTX_CLASS (code
) == '2' || GET_RTX_CLASS (code
) == 'c')
6042 op2
= XEXP (value
, 1);
6043 if (!CONSTANT_P (op2
) && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
6045 if (code
== MINUS
&& GET_CODE (op2
) == CONST_INT
)
6048 op2
= negate_rtx (GET_MODE (value
), op2
);
6051 /* Check for an addition with OP2 a constant integer and our first
6052 operand a PLUS of a virtual register and something else. In that
6053 case, we want to emit the sum of the virtual register and the
6054 constant first and then add the other value. This allows virtual
6055 register instantiation to simply modify the constant rather than
6056 creating another one around this addition. */
6057 if (code
== PLUS
&& GET_CODE (op2
) == CONST_INT
6058 && GET_CODE (XEXP (value
, 0)) == PLUS
6059 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
6060 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6061 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
6063 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
6064 XEXP (XEXP (value
, 0), 0), op2
,
6065 subtarget
, 0, OPTAB_LIB_WIDEN
);
6066 return expand_simple_binop (GET_MODE (value
), code
, temp
,
6067 force_operand (XEXP (XEXP (value
,
6069 target
, 0, OPTAB_LIB_WIDEN
);
6072 op1
= force_operand (XEXP (value
, 0), subtarget
);
6073 op2
= force_operand (op2
, NULL_RTX
);
6077 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
6079 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
6080 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
6081 target
, 1, OPTAB_LIB_WIDEN
);
6083 return expand_divmod (0,
6084 FLOAT_MODE_P (GET_MODE (value
))
6085 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
6086 GET_MODE (value
), op1
, op2
, target
, 0);
6089 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
6093 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
6097 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
6101 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
6102 target
, 0, OPTAB_LIB_WIDEN
);
6105 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
6106 target
, 1, OPTAB_LIB_WIDEN
);
6109 if (GET_RTX_CLASS (code
) == '1')
6111 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
6112 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
6115 #ifdef INSN_SCHEDULING
6116 /* On machines that have insn scheduling, we want all memory reference to be
6117 explicit, so we need to deal with such paradoxical SUBREGs. */
6118 if (GET_CODE (value
) == SUBREG
&& GET_CODE (SUBREG_REG (value
)) == MEM
6119 && (GET_MODE_SIZE (GET_MODE (value
))
6120 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
6122 = simplify_gen_subreg (GET_MODE (value
),
6123 force_reg (GET_MODE (SUBREG_REG (value
)),
6124 force_operand (SUBREG_REG (value
),
6126 GET_MODE (SUBREG_REG (value
)),
6127 SUBREG_BYTE (value
));
6133 /* Subroutine of expand_expr: return nonzero iff there is no way that
6134 EXP can reference X, which is being modified. TOP_P is nonzero if this
6135 call is going to be used to determine whether we need a temporary
6136 for EXP, as opposed to a recursive call to this function.
6138 It is always safe for this routine to return zero since it merely
6139 searches for optimization opportunities. */
6142 safe_from_p (x
, exp
, top_p
)
6149 static tree save_expr_list
;
6152 /* If EXP has varying size, we MUST use a target since we currently
6153 have no way of allocating temporaries of variable size
6154 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6155 So we assume here that something at a higher level has prevented a
6156 clash. This is somewhat bogus, but the best we can do. Only
6157 do this when X is BLKmode and when we are at the top level. */
6158 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
6159 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
6160 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
6161 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
6162 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
6164 && GET_MODE (x
) == BLKmode
)
6165 /* If X is in the outgoing argument area, it is always safe. */
6166 || (GET_CODE (x
) == MEM
6167 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
6168 || (GET_CODE (XEXP (x
, 0)) == PLUS
6169 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
6172 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6173 find the underlying pseudo. */
6174 if (GET_CODE (x
) == SUBREG
)
6177 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6181 /* A SAVE_EXPR might appear many times in the expression passed to the
6182 top-level safe_from_p call, and if it has a complex subexpression,
6183 examining it multiple times could result in a combinatorial explosion.
6184 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6185 with optimization took about 28 minutes to compile -- even though it was
6186 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6187 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6188 we have processed. Note that the only test of top_p was above. */
6197 rtn
= safe_from_p (x
, exp
, 0);
6199 for (t
= save_expr_list
; t
!= 0; t
= TREE_CHAIN (t
))
6200 TREE_PRIVATE (TREE_PURPOSE (t
)) = 0;
6205 /* Now look at our tree code and possibly recurse. */
6206 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
6209 exp_rtl
= DECL_RTL_IF_SET (exp
);
6216 if (TREE_CODE (exp
) == TREE_LIST
)
6220 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
6222 exp
= TREE_CHAIN (exp
);
6225 if (TREE_CODE (exp
) != TREE_LIST
)
6226 return safe_from_p (x
, exp
, 0);
6229 else if (TREE_CODE (exp
) == ERROR_MARK
)
6230 return 1; /* An already-visited SAVE_EXPR? */
6236 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
6241 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6245 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6246 the expression. If it is set, we conflict iff we are that rtx or
6247 both are in memory. Otherwise, we check all operands of the
6248 expression recursively. */
6250 switch (TREE_CODE (exp
))
6253 /* If the operand is static or we are static, we can't conflict.
6254 Likewise if we don't conflict with the operand at all. */
6255 if (staticp (TREE_OPERAND (exp
, 0))
6256 || TREE_STATIC (exp
)
6257 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
6260 /* Otherwise, the only way this can conflict is if we are taking
6261 the address of a DECL a that address if part of X, which is
6263 exp
= TREE_OPERAND (exp
, 0);
6266 if (!DECL_RTL_SET_P (exp
)
6267 || GET_CODE (DECL_RTL (exp
)) != MEM
)
6270 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
6275 if (GET_CODE (x
) == MEM
6276 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
6277 get_alias_set (exp
)))
6282 /* Assume that the call will clobber all hard registers and
6284 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6285 || GET_CODE (x
) == MEM
)
6290 /* If a sequence exists, we would have to scan every instruction
6291 in the sequence to see if it was safe. This is probably not
6293 if (RTL_EXPR_SEQUENCE (exp
))
6296 exp_rtl
= RTL_EXPR_RTL (exp
);
6299 case WITH_CLEANUP_EXPR
:
6300 exp_rtl
= WITH_CLEANUP_EXPR_RTL (exp
);
6303 case CLEANUP_POINT_EXPR
:
6304 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6307 exp_rtl
= SAVE_EXPR_RTL (exp
);
6311 /* If we've already scanned this, don't do it again. Otherwise,
6312 show we've scanned it and record for clearing the flag if we're
6314 if (TREE_PRIVATE (exp
))
6317 TREE_PRIVATE (exp
) = 1;
6318 if (! safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
6320 TREE_PRIVATE (exp
) = 0;
6324 save_expr_list
= tree_cons (exp
, NULL_TREE
, save_expr_list
);
6328 /* The only operand we look at is operand 1. The rest aren't
6329 part of the expression. */
6330 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
6332 case METHOD_CALL_EXPR
:
6333 /* This takes an rtx argument, but shouldn't appear here. */
6340 /* If we have an rtx, we do not need to scan our operands. */
6344 nops
= first_rtl_op (TREE_CODE (exp
));
6345 for (i
= 0; i
< nops
; i
++)
6346 if (TREE_OPERAND (exp
, i
) != 0
6347 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
6350 /* If this is a language-specific tree code, it may require
6351 special handling. */
6352 if ((unsigned int) TREE_CODE (exp
)
6353 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6354 && !(*lang_hooks
.safe_from_p
) (x
, exp
))
6358 /* If we have an rtl, find any enclosed object. Then see if we conflict
6362 if (GET_CODE (exp_rtl
) == SUBREG
)
6364 exp_rtl
= SUBREG_REG (exp_rtl
);
6365 if (GET_CODE (exp_rtl
) == REG
6366 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
6370 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6371 are memory and they conflict. */
6372 return ! (rtx_equal_p (x
, exp_rtl
)
6373 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
6374 && true_dependence (exp_rtl
, VOIDmode
, x
,
6375 rtx_addr_varies_p
)));
6378 /* If we reach here, it is safe. */
6382 /* Subroutine of expand_expr: return rtx if EXP is a
6383 variable or parameter; else return 0. */
6390 switch (TREE_CODE (exp
))
6394 return DECL_RTL (exp
);
6400 #ifdef MAX_INTEGER_COMPUTATION_MODE
6403 check_max_integer_computation_mode (exp
)
6406 enum tree_code code
;
6407 enum machine_mode mode
;
6409 /* Strip any NOPs that don't change the mode. */
6411 code
= TREE_CODE (exp
);
6413 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6414 if (code
== NOP_EXPR
6415 && TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
6418 /* First check the type of the overall operation. We need only look at
6419 unary, binary and relational operations. */
6420 if (TREE_CODE_CLASS (code
) == '1'
6421 || TREE_CODE_CLASS (code
) == '2'
6422 || TREE_CODE_CLASS (code
) == '<')
6424 mode
= TYPE_MODE (TREE_TYPE (exp
));
6425 if (GET_MODE_CLASS (mode
) == MODE_INT
6426 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6427 internal_error ("unsupported wide integer operation");
6430 /* Check operand of a unary op. */
6431 if (TREE_CODE_CLASS (code
) == '1')
6433 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6434 if (GET_MODE_CLASS (mode
) == MODE_INT
6435 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6436 internal_error ("unsupported wide integer operation");
6439 /* Check operands of a binary/comparison op. */
6440 if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<')
6442 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6443 if (GET_MODE_CLASS (mode
) == MODE_INT
6444 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6445 internal_error ("unsupported wide integer operation");
6447 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
6448 if (GET_MODE_CLASS (mode
) == MODE_INT
6449 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6450 internal_error ("unsupported wide integer operation");
6455 /* Return the highest power of two that EXP is known to be a multiple of.
6456 This is used in updating alignment of MEMs in array references. */
6458 static unsigned HOST_WIDE_INT
6459 highest_pow2_factor (exp
)
6462 unsigned HOST_WIDE_INT c0
, c1
;
6464 switch (TREE_CODE (exp
))
6467 /* We can find the lowest bit that's a one. If the low
6468 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6469 We need to handle this case since we can find it in a COND_EXPR,
6470 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6471 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6473 if (TREE_CONSTANT_OVERFLOW (exp
))
6474 return BIGGEST_ALIGNMENT
;
6477 /* Note: tree_low_cst is intentionally not used here,
6478 we don't care about the upper bits. */
6479 c0
= TREE_INT_CST_LOW (exp
);
6481 return c0
? c0
: BIGGEST_ALIGNMENT
;
6485 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
6486 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6487 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6488 return MIN (c0
, c1
);
6491 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6492 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6495 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6497 if (integer_pow2p (TREE_OPERAND (exp
, 1))
6498 && host_integerp (TREE_OPERAND (exp
, 1), 1))
6500 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6501 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
6502 return MAX (1, c0
/ c1
);
6506 case NON_LVALUE_EXPR
: case NOP_EXPR
: case CONVERT_EXPR
:
6507 case SAVE_EXPR
: case WITH_RECORD_EXPR
:
6508 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
6511 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6514 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6515 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
6516 return MIN (c0
, c1
);
6525 /* Similar, except that it is known that the expression must be a multiple
6526 of the alignment of TYPE. */
6528 static unsigned HOST_WIDE_INT
6529 highest_pow2_factor_for_type (type
, exp
)
6533 unsigned HOST_WIDE_INT type_align
, factor
;
6535 factor
= highest_pow2_factor (exp
);
6536 type_align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
6537 return MAX (factor
, type_align
);
6540 /* Return an object on the placeholder list that matches EXP, a
6541 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6542 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6543 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6544 is a location which initially points to a starting location in the
6545 placeholder list (zero means start of the list) and where a pointer into
6546 the placeholder list at which the object is found is placed. */
6549 find_placeholder (exp
, plist
)
6553 tree type
= TREE_TYPE (exp
);
6554 tree placeholder_expr
;
6556 for (placeholder_expr
6557 = plist
&& *plist
? TREE_CHAIN (*plist
) : placeholder_list
;
6558 placeholder_expr
!= 0;
6559 placeholder_expr
= TREE_CHAIN (placeholder_expr
))
6561 tree need_type
= TYPE_MAIN_VARIANT (type
);
6564 /* Find the outermost reference that is of the type we want. If none,
6565 see if any object has a type that is a pointer to the type we
6567 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6568 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
6569 || TREE_CODE (elt
) == COND_EXPR
)
6570 ? TREE_OPERAND (elt
, 1)
6571 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6572 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6573 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6574 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6575 ? TREE_OPERAND (elt
, 0) : 0))
6576 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
6579 *plist
= placeholder_expr
;
6583 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6585 = ((TREE_CODE (elt
) == COMPOUND_EXPR
6586 || TREE_CODE (elt
) == COND_EXPR
)
6587 ? TREE_OPERAND (elt
, 1)
6588 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6589 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6590 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6591 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6592 ? TREE_OPERAND (elt
, 0) : 0))
6593 if (POINTER_TYPE_P (TREE_TYPE (elt
))
6594 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
6598 *plist
= placeholder_expr
;
6599 return build1 (INDIRECT_REF
, need_type
, elt
);
6606 /* expand_expr: generate code for computing expression EXP.
6607 An rtx for the computed value is returned. The value is never null.
6608 In the case of a void EXP, const0_rtx is returned.
6610 The value may be stored in TARGET if TARGET is nonzero.
6611 TARGET is just a suggestion; callers must assume that
6612 the rtx returned may not be the same as TARGET.
6614 If TARGET is CONST0_RTX, it means that the value will be ignored.
6616 If TMODE is not VOIDmode, it suggests generating the
6617 result in mode TMODE. But this is done only when convenient.
6618 Otherwise, TMODE is ignored and the value generated in its natural mode.
6619 TMODE is just a suggestion; callers must assume that
6620 the rtx returned may not have mode TMODE.
6622 Note that TARGET may have neither TMODE nor MODE. In that case, it
6623 probably will not be used.
6625 If MODIFIER is EXPAND_SUM then when EXP is an addition
6626 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6627 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6628 products as above, or REG or MEM, or constant.
6629 Ordinarily in such cases we would output mul or add instructions
6630 and then return a pseudo reg containing the sum.
6632 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6633 it also marks a label as absolutely required (it can't be dead).
6634 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6635 This is used for outputting expressions used in initializers.
6637 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6638 with a constant address even if that address is not normally legitimate.
6639 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6641 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6642 a call parameter. Such targets require special care as we haven't yet
6643 marked TARGET so that it's safe from being trashed by libcalls. We
6644 don't want to use TARGET for anything but the final result;
6645 Intermediate values must go elsewhere. Additionally, calls to
6646 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6649 expand_expr (exp
, target
, tmode
, modifier
)
6652 enum machine_mode tmode
;
6653 enum expand_modifier modifier
;
6656 tree type
= TREE_TYPE (exp
);
6657 int unsignedp
= TREE_UNSIGNED (type
);
6658 enum machine_mode mode
;
6659 enum tree_code code
= TREE_CODE (exp
);
6661 rtx subtarget
, original_target
;
6665 /* Handle ERROR_MARK before anybody tries to access its type. */
6666 if (TREE_CODE (exp
) == ERROR_MARK
|| TREE_CODE (type
) == ERROR_MARK
)
6668 op0
= CONST0_RTX (tmode
);
6674 mode
= TYPE_MODE (type
);
6675 /* Use subtarget as the target for operand 0 of a binary operation. */
6676 subtarget
= get_subtarget (target
);
6677 original_target
= target
;
6678 ignore
= (target
== const0_rtx
6679 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
6680 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
6681 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
6682 && TREE_CODE (type
) == VOID_TYPE
));
6684 /* If we are going to ignore this result, we need only do something
6685 if there is a side-effect somewhere in the expression. If there
6686 is, short-circuit the most common cases here. Note that we must
6687 not call expand_expr with anything but const0_rtx in case this
6688 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6692 if (! TREE_SIDE_EFFECTS (exp
))
6695 /* Ensure we reference a volatile object even if value is ignored, but
6696 don't do this if all we are doing is taking its address. */
6697 if (TREE_THIS_VOLATILE (exp
)
6698 && TREE_CODE (exp
) != FUNCTION_DECL
6699 && mode
!= VOIDmode
&& mode
!= BLKmode
6700 && modifier
!= EXPAND_CONST_ADDRESS
)
6702 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
6703 if (GET_CODE (temp
) == MEM
)
6704 temp
= copy_to_reg (temp
);
6708 if (TREE_CODE_CLASS (code
) == '1' || code
== COMPONENT_REF
6709 || code
== INDIRECT_REF
|| code
== BUFFER_REF
)
6710 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6713 else if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<'
6714 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
6716 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6717 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6720 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6721 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
6722 /* If the second operand has no side effects, just evaluate
6724 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6726 else if (code
== BIT_FIELD_REF
)
6728 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6729 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6730 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, modifier
);
6737 #ifdef MAX_INTEGER_COMPUTATION_MODE
6738 /* Only check stuff here if the mode we want is different from the mode
6739 of the expression; if it's the same, check_max_integer_computation_mode
6740 will handle it. Do we really need to check this stuff at all? */
6743 && GET_MODE (target
) != mode
6744 && TREE_CODE (exp
) != INTEGER_CST
6745 && TREE_CODE (exp
) != PARM_DECL
6746 && TREE_CODE (exp
) != ARRAY_REF
6747 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6748 && TREE_CODE (exp
) != COMPONENT_REF
6749 && TREE_CODE (exp
) != BIT_FIELD_REF
6750 && TREE_CODE (exp
) != INDIRECT_REF
6751 && TREE_CODE (exp
) != CALL_EXPR
6752 && TREE_CODE (exp
) != VAR_DECL
6753 && TREE_CODE (exp
) != RTL_EXPR
)
6755 enum machine_mode mode
= GET_MODE (target
);
6757 if (GET_MODE_CLASS (mode
) == MODE_INT
6758 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6759 internal_error ("unsupported wide integer operation");
6763 && TREE_CODE (exp
) != INTEGER_CST
6764 && TREE_CODE (exp
) != PARM_DECL
6765 && TREE_CODE (exp
) != ARRAY_REF
6766 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6767 && TREE_CODE (exp
) != COMPONENT_REF
6768 && TREE_CODE (exp
) != BIT_FIELD_REF
6769 && TREE_CODE (exp
) != INDIRECT_REF
6770 && TREE_CODE (exp
) != VAR_DECL
6771 && TREE_CODE (exp
) != CALL_EXPR
6772 && TREE_CODE (exp
) != RTL_EXPR
6773 && GET_MODE_CLASS (tmode
) == MODE_INT
6774 && tmode
> MAX_INTEGER_COMPUTATION_MODE
)
6775 internal_error ("unsupported wide integer operation");
6777 check_max_integer_computation_mode (exp
);
6780 /* If will do cse, generate all results into pseudo registers
6781 since 1) that allows cse to find more things
6782 and 2) otherwise cse could produce an insn the machine
6783 cannot support. An exception is a CONSTRUCTOR into a multi-word
6784 MEM: that's much more likely to be most efficient into the MEM.
6785 Another is a CALL_EXPR which must return in memory. */
6787 if (! cse_not_expected
&& mode
!= BLKmode
&& target
6788 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
)
6789 && ! (code
== CONSTRUCTOR
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
6790 && ! (code
== CALL_EXPR
&& aggregate_value_p (exp
)))
6797 tree function
= decl_function_context (exp
);
6798 /* Labels in containing functions, or labels used from initializers,
6800 if (modifier
== EXPAND_INITIALIZER
6801 || (function
!= current_function_decl
6802 && function
!= inline_function_decl
6804 temp
= force_label_rtx (exp
);
6806 temp
= label_rtx (exp
);
6808 temp
= gen_rtx_MEM (FUNCTION_MODE
, gen_rtx_LABEL_REF (Pmode
, temp
));
6809 if (function
!= current_function_decl
6810 && function
!= inline_function_decl
&& function
!= 0)
6811 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
6816 if (!DECL_RTL_SET_P (exp
))
6818 error_with_decl (exp
, "prior parameter's size depends on `%s'");
6819 return CONST0_RTX (mode
);
6822 /* ... fall through ... */
6825 /* If a static var's type was incomplete when the decl was written,
6826 but the type is complete now, lay out the decl now. */
6827 if (DECL_SIZE (exp
) == 0
6828 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
6829 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6830 layout_decl (exp
, 0);
6832 /* ... fall through ... */
6836 if (DECL_RTL (exp
) == 0)
6839 /* Ensure variable marked as used even if it doesn't go through
6840 a parser. If it hasn't be used yet, write out an external
6842 if (! TREE_USED (exp
))
6844 assemble_external (exp
);
6845 TREE_USED (exp
) = 1;
6848 /* Show we haven't gotten RTL for this yet. */
6851 /* Handle variables inherited from containing functions. */
6852 context
= decl_function_context (exp
);
6854 /* We treat inline_function_decl as an alias for the current function
6855 because that is the inline function whose vars, types, etc.
6856 are being merged into the current function.
6857 See expand_inline_function. */
6859 if (context
!= 0 && context
!= current_function_decl
6860 && context
!= inline_function_decl
6861 /* If var is static, we don't need a static chain to access it. */
6862 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
6863 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
6867 /* Mark as non-local and addressable. */
6868 DECL_NONLOCAL (exp
) = 1;
6869 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
6871 (*lang_hooks
.mark_addressable
) (exp
);
6872 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
6874 addr
= XEXP (DECL_RTL (exp
), 0);
6875 if (GET_CODE (addr
) == MEM
)
6877 = replace_equiv_address (addr
,
6878 fix_lexical_addr (XEXP (addr
, 0), exp
));
6880 addr
= fix_lexical_addr (addr
, exp
);
6882 temp
= replace_equiv_address (DECL_RTL (exp
), addr
);
6885 /* This is the case of an array whose size is to be determined
6886 from its initializer, while the initializer is still being parsed.
6889 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6890 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
6891 temp
= validize_mem (DECL_RTL (exp
));
6893 /* If DECL_RTL is memory, we are in the normal case and either
6894 the address is not valid or it is not a register and -fforce-addr
6895 is specified, get the address into a register. */
6897 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6898 && modifier
!= EXPAND_CONST_ADDRESS
6899 && modifier
!= EXPAND_SUM
6900 && modifier
!= EXPAND_INITIALIZER
6901 && (! memory_address_p (DECL_MODE (exp
),
6902 XEXP (DECL_RTL (exp
), 0))
6904 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
6905 temp
= replace_equiv_address (DECL_RTL (exp
),
6906 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
6908 /* If we got something, return it. But first, set the alignment
6909 if the address is a register. */
6912 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
6913 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6918 /* If the mode of DECL_RTL does not match that of the decl, it
6919 must be a promoted value. We return a SUBREG of the wanted mode,
6920 but mark it so that we know that it was already extended. */
6922 if (GET_CODE (DECL_RTL (exp
)) == REG
6923 && GET_MODE (DECL_RTL (exp
)) != DECL_MODE (exp
))
6925 /* Get the signedness used for this variable. Ensure we get the
6926 same mode we got when the variable was declared. */
6927 if (GET_MODE (DECL_RTL (exp
))
6928 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
,
6929 (TREE_CODE (exp
) == RESULT_DECL
? 1 : 0)))
6932 temp
= gen_lowpart_SUBREG (mode
, DECL_RTL (exp
));
6933 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6934 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6938 return DECL_RTL (exp
);
6941 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
6942 TREE_INT_CST_HIGH (exp
), mode
);
6944 /* ??? If overflow is set, fold will have done an incomplete job,
6945 which can result in (plus xx (const_int 0)), which can get
6946 simplified by validate_replace_rtx during virtual register
6947 instantiation, which can result in unrecognizable insns.
6948 Avoid this by forcing all overflows into registers. */
6949 if (TREE_CONSTANT_OVERFLOW (exp
)
6950 && modifier
!= EXPAND_INITIALIZER
)
6951 temp
= force_reg (mode
, temp
);
6956 return const_vector_from_tree (exp
);
6959 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
6962 /* If optimized, generate immediate CONST_DOUBLE
6963 which will be turned into memory by reload if necessary.
6965 We used to force a register so that loop.c could see it. But
6966 this does not allow gen_* patterns to perform optimizations with
6967 the constants. It also produces two insns in cases like "x = 1.0;".
6968 On most machines, floating-point constants are not permitted in
6969 many insns, so we'd end up copying it to a register in any case.
6971 Now, we do the copying in expand_binop, if appropriate. */
6972 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
6973 TYPE_MODE (TREE_TYPE (exp
)));
6977 temp
= output_constant_def (exp
, 1);
6979 /* temp contains a constant address.
6980 On RISC machines where a constant address isn't valid,
6981 make some insns to get that address into a register. */
6982 if (modifier
!= EXPAND_CONST_ADDRESS
6983 && modifier
!= EXPAND_INITIALIZER
6984 && modifier
!= EXPAND_SUM
6985 && (! memory_address_p (mode
, XEXP (temp
, 0))
6986 || flag_force_addr
))
6987 return replace_equiv_address (temp
,
6988 copy_rtx (XEXP (temp
, 0)));
6991 case EXPR_WITH_FILE_LOCATION
:
6994 location_t saved_loc
= input_location
;
6995 input_filename
= EXPR_WFL_FILENAME (exp
);
6996 input_line
= EXPR_WFL_LINENO (exp
);
6997 if (EXPR_WFL_EMIT_LINE_NOTE (exp
))
6998 emit_line_note (input_filename
, input_line
);
6999 /* Possibly avoid switching back and forth here. */
7000 to_return
= expand_expr (EXPR_WFL_NODE (exp
), target
, tmode
, modifier
);
7001 input_location
= saved_loc
;
7006 context
= decl_function_context (exp
);
7008 /* If this SAVE_EXPR was at global context, assume we are an
7009 initialization function and move it into our context. */
7011 SAVE_EXPR_CONTEXT (exp
) = current_function_decl
;
7013 /* We treat inline_function_decl as an alias for the current function
7014 because that is the inline function whose vars, types, etc.
7015 are being merged into the current function.
7016 See expand_inline_function. */
7017 if (context
== current_function_decl
|| context
== inline_function_decl
)
7020 /* If this is non-local, handle it. */
7023 /* The following call just exists to abort if the context is
7024 not of a containing function. */
7025 find_function_data (context
);
7027 temp
= SAVE_EXPR_RTL (exp
);
7028 if (temp
&& GET_CODE (temp
) == REG
)
7030 put_var_into_stack (exp
, /*rescan=*/true);
7031 temp
= SAVE_EXPR_RTL (exp
);
7033 if (temp
== 0 || GET_CODE (temp
) != MEM
)
7036 replace_equiv_address (temp
,
7037 fix_lexical_addr (XEXP (temp
, 0), exp
));
7039 if (SAVE_EXPR_RTL (exp
) == 0)
7041 if (mode
== VOIDmode
)
7044 temp
= assign_temp (build_qualified_type (type
,
7046 | TYPE_QUAL_CONST
)),
7049 SAVE_EXPR_RTL (exp
) = temp
;
7050 if (!optimize
&& GET_CODE (temp
) == REG
)
7051 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, temp
,
7054 /* If the mode of TEMP does not match that of the expression, it
7055 must be a promoted value. We pass store_expr a SUBREG of the
7056 wanted mode but mark it so that we know that it was already
7059 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
7061 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
7062 promote_mode (type
, mode
, &unsignedp
, 0);
7063 SUBREG_PROMOTED_VAR_P (temp
) = 1;
7064 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
7067 if (temp
== const0_rtx
)
7068 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
7070 store_expr (TREE_OPERAND (exp
, 0), temp
,
7071 modifier
== EXPAND_STACK_PARM
? 2 : 0);
7073 TREE_USED (exp
) = 1;
7076 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
7077 must be a promoted value. We return a SUBREG of the wanted mode,
7078 but mark it so that we know that it was already extended. */
7080 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
7081 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
7083 /* Compute the signedness and make the proper SUBREG. */
7084 promote_mode (type
, mode
, &unsignedp
, 0);
7085 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
7086 SUBREG_PROMOTED_VAR_P (temp
) = 1;
7087 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
7091 return SAVE_EXPR_RTL (exp
);
7096 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7097 TREE_OPERAND (exp
, 0)
7098 = (*lang_hooks
.unsave_expr_now
) (TREE_OPERAND (exp
, 0));
7102 case PLACEHOLDER_EXPR
:
7104 tree old_list
= placeholder_list
;
7105 tree placeholder_expr
= 0;
7107 exp
= find_placeholder (exp
, &placeholder_expr
);
7111 placeholder_list
= TREE_CHAIN (placeholder_expr
);
7112 temp
= expand_expr (exp
, original_target
, tmode
, modifier
);
7113 placeholder_list
= old_list
;
7117 case WITH_RECORD_EXPR
:
7118 /* Put the object on the placeholder list, expand our first operand,
7119 and pop the list. */
7120 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
7122 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
, tmode
,
7124 placeholder_list
= TREE_CHAIN (placeholder_list
);
7128 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
7129 expand_goto (TREE_OPERAND (exp
, 0));
7131 expand_computed_goto (TREE_OPERAND (exp
, 0));
7135 expand_exit_loop_if_false (NULL
,
7136 invert_truthvalue (TREE_OPERAND (exp
, 0)));
7139 case LABELED_BLOCK_EXPR
:
7140 if (LABELED_BLOCK_BODY (exp
))
7141 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp
), 0, 1);
7142 /* Should perhaps use expand_label, but this is simpler and safer. */
7143 do_pending_stack_adjust ();
7144 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp
)));
7147 case EXIT_BLOCK_EXPR
:
7148 if (EXIT_BLOCK_RETURN (exp
))
7149 sorry ("returned value in block_exit_expr");
7150 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp
)));
7155 expand_start_loop (1);
7156 expand_expr_stmt_value (TREE_OPERAND (exp
, 0), 0, 1);
7164 tree vars
= TREE_OPERAND (exp
, 0);
7166 /* Need to open a binding contour here because
7167 if there are any cleanups they must be contained here. */
7168 expand_start_bindings (2);
7170 /* Mark the corresponding BLOCK for output in its proper place. */
7171 if (TREE_OPERAND (exp
, 2) != 0
7172 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
7173 (*lang_hooks
.decls
.insert_block
) (TREE_OPERAND (exp
, 2));
7175 /* If VARS have not yet been expanded, expand them now. */
7178 if (!DECL_RTL_SET_P (vars
))
7180 expand_decl_init (vars
);
7181 vars
= TREE_CHAIN (vars
);
7184 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, modifier
);
7186 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
7192 if (RTL_EXPR_SEQUENCE (exp
))
7194 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
7196 emit_insn (RTL_EXPR_SEQUENCE (exp
));
7197 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
7199 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
7200 free_temps_for_rtl_expr (exp
);
7201 return RTL_EXPR_RTL (exp
);
7204 /* If we don't need the result, just ensure we evaluate any
7210 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
7211 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
, 0);
7216 /* All elts simple constants => refer to a constant in memory. But
7217 if this is a non-BLKmode mode, let it store a field at a time
7218 since that should make a CONST_INT or CONST_DOUBLE when we
7219 fold. Likewise, if we have a target we can use, it is best to
7220 store directly into the target unless the type is large enough
7221 that memcpy will be used. If we are making an initializer and
7222 all operands are constant, put it in memory as well.
7224 FIXME: Avoid trying to fill vector constructors piece-meal.
7225 Output them with output_constant_def below unless we're sure
7226 they're zeros. This should go away when vector initializers
7227 are treated like VECTOR_CST instead of arrays.
7229 else if ((TREE_STATIC (exp
)
7230 && ((mode
== BLKmode
7231 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
7232 || TREE_ADDRESSABLE (exp
)
7233 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
7234 && (! MOVE_BY_PIECES_P
7235 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
7237 && ((TREE_CODE (type
) == VECTOR_TYPE
7238 && !is_zeros_p (exp
))
7239 || ! mostly_zeros_p (exp
)))))
7240 || ((modifier
== EXPAND_INITIALIZER
7241 || modifier
== EXPAND_CONST_ADDRESS
)
7242 && TREE_CONSTANT (exp
)))
7244 rtx constructor
= output_constant_def (exp
, 1);
7246 if (modifier
!= EXPAND_CONST_ADDRESS
7247 && modifier
!= EXPAND_INITIALIZER
7248 && modifier
!= EXPAND_SUM
)
7249 constructor
= validize_mem (constructor
);
7255 /* Handle calls that pass values in multiple non-contiguous
7256 locations. The Irix 6 ABI has examples of this. */
7257 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
7258 || GET_CODE (target
) == PARALLEL
7259 || modifier
== EXPAND_STACK_PARM
)
7261 = assign_temp (build_qualified_type (type
,
7263 | (TREE_READONLY (exp
)
7264 * TYPE_QUAL_CONST
))),
7265 0, TREE_ADDRESSABLE (exp
), 1);
7267 store_constructor (exp
, target
, 0, int_expr_size (exp
));
7273 tree exp1
= TREE_OPERAND (exp
, 0);
7275 tree string
= string_constant (exp1
, &index
);
7277 /* Try to optimize reads from const strings. */
7279 && TREE_CODE (string
) == STRING_CST
7280 && TREE_CODE (index
) == INTEGER_CST
7281 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
7282 && GET_MODE_CLASS (mode
) == MODE_INT
7283 && GET_MODE_SIZE (mode
) == 1
7284 && modifier
!= EXPAND_WRITE
)
7285 return gen_int_mode (TREE_STRING_POINTER (string
)
7286 [TREE_INT_CST_LOW (index
)], mode
);
7288 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
7289 op0
= memory_address (mode
, op0
);
7290 temp
= gen_rtx_MEM (mode
, op0
);
7291 set_mem_attributes (temp
, exp
, 0);
7293 /* If we are writing to this object and its type is a record with
7294 readonly fields, we must mark it as readonly so it will
7295 conflict with readonly references to those fields. */
7296 if (modifier
== EXPAND_WRITE
&& readonly_fields_p (type
))
7297 RTX_UNCHANGING_P (temp
) = 1;
7303 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
7307 tree array
= TREE_OPERAND (exp
, 0);
7308 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
7309 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
7310 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
7313 /* Optimize the special-case of a zero lower bound.
7315 We convert the low_bound to sizetype to avoid some problems
7316 with constant folding. (E.g. suppose the lower bound is 1,
7317 and its mode is QI. Without the conversion, (ARRAY
7318 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7319 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7321 if (! integer_zerop (low_bound
))
7322 index
= size_diffop (index
, convert (sizetype
, low_bound
));
7324 /* Fold an expression like: "foo"[2].
7325 This is not done in fold so it won't happen inside &.
7326 Don't fold if this is for wide characters since it's too
7327 difficult to do correctly and this is a very rare case. */
7329 if (modifier
!= EXPAND_CONST_ADDRESS
7330 && modifier
!= EXPAND_INITIALIZER
7331 && modifier
!= EXPAND_MEMORY
7332 && TREE_CODE (array
) == STRING_CST
7333 && TREE_CODE (index
) == INTEGER_CST
7334 && compare_tree_int (index
, TREE_STRING_LENGTH (array
)) < 0
7335 && GET_MODE_CLASS (mode
) == MODE_INT
7336 && GET_MODE_SIZE (mode
) == 1)
7337 return gen_int_mode (TREE_STRING_POINTER (array
)
7338 [TREE_INT_CST_LOW (index
)], mode
);
7340 /* If this is a constant index into a constant array,
7341 just get the value from the array. Handle both the cases when
7342 we have an explicit constructor and when our operand is a variable
7343 that was declared const. */
7345 if (modifier
!= EXPAND_CONST_ADDRESS
7346 && modifier
!= EXPAND_INITIALIZER
7347 && modifier
!= EXPAND_MEMORY
7348 && TREE_CODE (array
) == CONSTRUCTOR
7349 && ! TREE_SIDE_EFFECTS (array
)
7350 && TREE_CODE (index
) == INTEGER_CST
7351 && 0 > compare_tree_int (index
,
7352 list_length (CONSTRUCTOR_ELTS
7353 (TREE_OPERAND (exp
, 0)))))
7357 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
7358 i
= TREE_INT_CST_LOW (index
);
7359 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
7363 return expand_expr (fold (TREE_VALUE (elem
)), target
, tmode
,
7367 else if (optimize
>= 1
7368 && modifier
!= EXPAND_CONST_ADDRESS
7369 && modifier
!= EXPAND_INITIALIZER
7370 && modifier
!= EXPAND_MEMORY
7371 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
7372 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
7373 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
7375 if (TREE_CODE (index
) == INTEGER_CST
)
7377 tree init
= DECL_INITIAL (array
);
7379 if (TREE_CODE (init
) == CONSTRUCTOR
)
7383 for (elem
= CONSTRUCTOR_ELTS (init
);
7385 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
7386 elem
= TREE_CHAIN (elem
))
7389 if (elem
&& !TREE_SIDE_EFFECTS (TREE_VALUE (elem
)))
7390 return expand_expr (fold (TREE_VALUE (elem
)), target
,
7393 else if (TREE_CODE (init
) == STRING_CST
7394 && 0 > compare_tree_int (index
,
7395 TREE_STRING_LENGTH (init
)))
7397 tree type
= TREE_TYPE (TREE_TYPE (init
));
7398 enum machine_mode mode
= TYPE_MODE (type
);
7400 if (GET_MODE_CLASS (mode
) == MODE_INT
7401 && GET_MODE_SIZE (mode
) == 1)
7402 return gen_int_mode (TREE_STRING_POINTER (init
)
7403 [TREE_INT_CST_LOW (index
)], mode
);
7408 goto normal_inner_ref
;
7411 /* If the operand is a CONSTRUCTOR, we can just extract the
7412 appropriate field if it is present. */
7413 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
)
7417 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
7418 elt
= TREE_CHAIN (elt
))
7419 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
7420 /* We can normally use the value of the field in the
7421 CONSTRUCTOR. However, if this is a bitfield in
7422 an integral mode that we can fit in a HOST_WIDE_INT,
7423 we must mask only the number of bits in the bitfield,
7424 since this is done implicitly by the constructor. If
7425 the bitfield does not meet either of those conditions,
7426 we can't do this optimization. */
7427 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
7428 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
7430 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
7431 <= HOST_BITS_PER_WIDE_INT
))))
7433 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
))
7434 && modifier
== EXPAND_STACK_PARM
)
7436 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
7437 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
7439 HOST_WIDE_INT bitsize
7440 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt
)));
7441 enum machine_mode imode
7442 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
7444 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
7446 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
7447 op0
= expand_and (imode
, op0
, op1
, target
);
7452 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
7455 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
7457 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
7465 goto normal_inner_ref
;
7468 case ARRAY_RANGE_REF
:
7471 enum machine_mode mode1
;
7472 HOST_WIDE_INT bitsize
, bitpos
;
7475 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7476 &mode1
, &unsignedp
, &volatilep
);
7479 /* If we got back the original object, something is wrong. Perhaps
7480 we are evaluating an expression too early. In any event, don't
7481 infinitely recurse. */
7485 /* If TEM's type is a union of variable size, pass TARGET to the inner
7486 computation, since it will need a temporary and TARGET is known
7487 to have to do. This occurs in unchecked conversion in Ada. */
7491 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
7492 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
7494 && modifier
!= EXPAND_STACK_PARM
7495 ? target
: NULL_RTX
),
7497 (modifier
== EXPAND_INITIALIZER
7498 || modifier
== EXPAND_CONST_ADDRESS
7499 || modifier
== EXPAND_STACK_PARM
)
7500 ? modifier
: EXPAND_NORMAL
);
7502 /* If this is a constant, put it into a register if it is a
7503 legitimate constant and OFFSET is 0 and memory if it isn't. */
7504 if (CONSTANT_P (op0
))
7506 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
7507 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
7509 op0
= force_reg (mode
, op0
);
7511 op0
= validize_mem (force_const_mem (mode
, op0
));
7516 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
7519 /* If this object is in a register, put it into memory.
7520 This case can't occur in C, but can in Ada if we have
7521 unchecked conversion of an expression from a scalar type to
7522 an array or record type. */
7523 if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7524 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
7526 /* If the operand is a SAVE_EXPR, we can deal with this by
7527 forcing the SAVE_EXPR into memory. */
7528 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
7530 put_var_into_stack (TREE_OPERAND (exp
, 0),
7532 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
7537 = build_qualified_type (TREE_TYPE (tem
),
7538 (TYPE_QUALS (TREE_TYPE (tem
))
7539 | TYPE_QUAL_CONST
));
7540 rtx memloc
= assign_temp (nt
, 1, 1, 1);
7542 emit_move_insn (memloc
, op0
);
7547 if (GET_CODE (op0
) != MEM
)
7550 #ifdef POINTERS_EXTEND_UNSIGNED
7551 if (GET_MODE (offset_rtx
) != Pmode
)
7552 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
7554 if (GET_MODE (offset_rtx
) != ptr_mode
)
7555 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
7558 /* A constant address in OP0 can have VOIDmode, we must not try
7559 to call force_reg for that case. Avoid that case. */
7560 if (GET_CODE (op0
) == MEM
7561 && GET_MODE (op0
) == BLKmode
7562 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
7564 && (bitpos
% bitsize
) == 0
7565 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
7566 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
7568 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7572 op0
= offset_address (op0
, offset_rtx
,
7573 highest_pow2_factor (offset
));
7576 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7577 record its alignment as BIGGEST_ALIGNMENT. */
7578 if (GET_CODE (op0
) == MEM
&& bitpos
== 0 && offset
!= 0
7579 && is_aligning_offset (offset
, tem
))
7580 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
7582 /* Don't forget about volatility even if this is a bitfield. */
7583 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
7585 if (op0
== orig_op0
)
7586 op0
= copy_rtx (op0
);
7588 MEM_VOLATILE_P (op0
) = 1;
7591 /* The following code doesn't handle CONCAT.
7592 Assume only bitpos == 0 can be used for CONCAT, due to
7593 one element arrays having the same mode as its element. */
7594 if (GET_CODE (op0
) == CONCAT
)
7596 if (bitpos
!= 0 || bitsize
!= GET_MODE_BITSIZE (GET_MODE (op0
)))
7601 /* In cases where an aligned union has an unaligned object
7602 as a field, we might be extracting a BLKmode value from
7603 an integer-mode (e.g., SImode) object. Handle this case
7604 by doing the extract into an object as wide as the field
7605 (which we know to be the width of a basic mode), then
7606 storing into memory, and changing the mode to BLKmode. */
7607 if (mode1
== VOIDmode
7608 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7609 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
7610 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7611 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
7612 && modifier
!= EXPAND_CONST_ADDRESS
7613 && modifier
!= EXPAND_INITIALIZER
)
7614 /* If the field isn't aligned enough to fetch as a memref,
7615 fetch it as a bit field. */
7616 || (mode1
!= BLKmode
7617 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
7618 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0))
7619 && SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
)))
7620 || (bitpos
% BITS_PER_UNIT
!= 0)))
7621 /* If the type and the field are a constant size and the
7622 size of the type isn't the same size as the bitfield,
7623 we must use bitfield operations. */
7625 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
)))
7627 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7630 enum machine_mode ext_mode
= mode
;
7632 if (ext_mode
== BLKmode
7633 && ! (target
!= 0 && GET_CODE (op0
) == MEM
7634 && GET_CODE (target
) == MEM
7635 && bitpos
% BITS_PER_UNIT
== 0))
7636 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7638 if (ext_mode
== BLKmode
)
7640 /* In this case, BITPOS must start at a byte boundary and
7641 TARGET, if specified, must be a MEM. */
7642 if (GET_CODE (op0
) != MEM
7643 || (target
!= 0 && GET_CODE (target
) != MEM
)
7644 || bitpos
% BITS_PER_UNIT
!= 0)
7647 op0
= adjust_address (op0
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
7649 target
= assign_temp (type
, 0, 1, 1);
7651 emit_block_move (target
, op0
,
7652 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7654 (modifier
== EXPAND_STACK_PARM
7655 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7660 op0
= validize_mem (op0
);
7662 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
7663 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7665 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
7666 (modifier
== EXPAND_STACK_PARM
7667 ? NULL_RTX
: target
),
7669 int_size_in_bytes (TREE_TYPE (tem
)));
7671 /* If the result is a record type and BITSIZE is narrower than
7672 the mode of OP0, an integral mode, and this is a big endian
7673 machine, we must put the field into the high-order bits. */
7674 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7675 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7676 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
7677 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7678 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7682 if (mode
== BLKmode
)
7684 rtx
new = assign_temp (build_qualified_type
7685 ((*lang_hooks
.types
.type_for_mode
)
7687 TYPE_QUAL_CONST
), 0, 1, 1);
7689 emit_move_insn (new, op0
);
7690 op0
= copy_rtx (new);
7691 PUT_MODE (op0
, BLKmode
);
7692 set_mem_attributes (op0
, exp
, 1);
7698 /* If the result is BLKmode, use that to access the object
7700 if (mode
== BLKmode
)
7703 /* Get a reference to just this component. */
7704 if (modifier
== EXPAND_CONST_ADDRESS
7705 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7706 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7708 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7710 if (op0
== orig_op0
)
7711 op0
= copy_rtx (op0
);
7713 set_mem_attributes (op0
, exp
, 0);
7714 if (GET_CODE (XEXP (op0
, 0)) == REG
)
7715 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7717 MEM_VOLATILE_P (op0
) |= volatilep
;
7718 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7719 || modifier
== EXPAND_CONST_ADDRESS
7720 || modifier
== EXPAND_INITIALIZER
)
7722 else if (target
== 0)
7723 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7725 convert_move (target
, op0
, unsignedp
);
7731 rtx insn
, before
= get_last_insn (), vtbl_ref
;
7733 /* Evaluate the interior expression. */
7734 subtarget
= expand_expr (TREE_OPERAND (exp
, 0), target
,
7737 /* Get or create an instruction off which to hang a note. */
7738 if (REG_P (subtarget
))
7741 insn
= get_last_insn ();
7744 if (! INSN_P (insn
))
7745 insn
= prev_nonnote_insn (insn
);
7749 target
= gen_reg_rtx (GET_MODE (subtarget
));
7750 insn
= emit_move_insn (target
, subtarget
);
7753 /* Collect the data for the note. */
7754 vtbl_ref
= XEXP (DECL_RTL (TREE_OPERAND (exp
, 1)), 0);
7755 vtbl_ref
= plus_constant (vtbl_ref
,
7756 tree_low_cst (TREE_OPERAND (exp
, 2), 0));
7757 /* Discard the initial CONST that was added. */
7758 vtbl_ref
= XEXP (vtbl_ref
, 0);
7761 = gen_rtx_EXPR_LIST (REG_VTABLE_REF
, vtbl_ref
, REG_NOTES (insn
));
7766 /* Intended for a reference to a buffer of a file-object in Pascal.
7767 But it's not certain that a special tree code will really be
7768 necessary for these. INDIRECT_REF might work for them. */
7774 /* Pascal set IN expression.
7777 rlo = set_low - (set_low%bits_per_word);
7778 the_word = set [ (index - rlo)/bits_per_word ];
7779 bit_index = index % bits_per_word;
7780 bitmask = 1 << bit_index;
7781 return !!(the_word & bitmask); */
7783 tree set
= TREE_OPERAND (exp
, 0);
7784 tree index
= TREE_OPERAND (exp
, 1);
7785 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
7786 tree set_type
= TREE_TYPE (set
);
7787 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
7788 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
7789 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
7790 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
7791 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
7792 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
7793 rtx setaddr
= XEXP (setval
, 0);
7794 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
7796 rtx diff
, quo
, rem
, addr
, bit
, result
;
7798 /* If domain is empty, answer is no. Likewise if index is constant
7799 and out of bounds. */
7800 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
7801 && TREE_CODE (set_low_bound
) == INTEGER_CST
7802 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
7803 || (TREE_CODE (index
) == INTEGER_CST
7804 && TREE_CODE (set_low_bound
) == INTEGER_CST
7805 && tree_int_cst_lt (index
, set_low_bound
))
7806 || (TREE_CODE (set_high_bound
) == INTEGER_CST
7807 && TREE_CODE (index
) == INTEGER_CST
7808 && tree_int_cst_lt (set_high_bound
, index
))))
7812 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7814 /* If we get here, we have to generate the code for both cases
7815 (in range and out of range). */
7817 op0
= gen_label_rtx ();
7818 op1
= gen_label_rtx ();
7820 if (! (GET_CODE (index_val
) == CONST_INT
7821 && GET_CODE (lo_r
) == CONST_INT
))
7822 emit_cmp_and_jump_insns (index_val
, lo_r
, LT
, NULL_RTX
,
7823 GET_MODE (index_val
), iunsignedp
, op1
);
7825 if (! (GET_CODE (index_val
) == CONST_INT
7826 && GET_CODE (hi_r
) == CONST_INT
))
7827 emit_cmp_and_jump_insns (index_val
, hi_r
, GT
, NULL_RTX
,
7828 GET_MODE (index_val
), iunsignedp
, op1
);
7830 /* Calculate the element number of bit zero in the first word
7832 if (GET_CODE (lo_r
) == CONST_INT
)
7833 rlow
= GEN_INT (INTVAL (lo_r
)
7834 & ~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
7836 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
7837 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
7838 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7840 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
7841 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7843 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
7844 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7845 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
7846 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7848 addr
= memory_address (byte_mode
,
7849 expand_binop (index_mode
, add_optab
, diff
,
7850 setaddr
, NULL_RTX
, iunsignedp
,
7853 /* Extract the bit we want to examine. */
7854 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
7855 gen_rtx_MEM (byte_mode
, addr
),
7856 make_tree (TREE_TYPE (index
), rem
),
7858 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
7859 GET_MODE (target
) == byte_mode
? target
: 0,
7860 1, OPTAB_LIB_WIDEN
);
7862 if (result
!= target
)
7863 convert_move (target
, result
, 1);
7865 /* Output the code to handle the out-of-range case. */
7868 emit_move_insn (target
, const0_rtx
);
7873 case WITH_CLEANUP_EXPR
:
7874 if (WITH_CLEANUP_EXPR_RTL (exp
) == 0)
7876 WITH_CLEANUP_EXPR_RTL (exp
)
7877 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7878 expand_decl_cleanup_eh (NULL_TREE
, TREE_OPERAND (exp
, 1),
7879 CLEANUP_EH_ONLY (exp
));
7881 /* That's it for this cleanup. */
7882 TREE_OPERAND (exp
, 1) = 0;
7884 return WITH_CLEANUP_EXPR_RTL (exp
);
7886 case CLEANUP_POINT_EXPR
:
7888 /* Start a new binding layer that will keep track of all cleanup
7889 actions to be performed. */
7890 expand_start_bindings (2);
7892 target_temp_slot_level
= temp_slot_level
;
7894 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7895 /* If we're going to use this value, load it up now. */
7897 op0
= force_not_mem (op0
);
7898 preserve_temp_slots (op0
);
7899 expand_end_bindings (NULL_TREE
, 0, 0);
7904 /* Check for a built-in function. */
7905 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7906 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7908 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7910 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7911 == BUILT_IN_FRONTEND
)
7912 return (*lang_hooks
.expand_expr
) (exp
, original_target
,
7915 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7918 return expand_call (exp
, target
, ignore
);
7920 case NON_LVALUE_EXPR
:
7923 case REFERENCE_EXPR
:
7924 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7927 if (TREE_CODE (type
) == UNION_TYPE
)
7929 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7931 /* If both input and output are BLKmode, this conversion isn't doing
7932 anything except possibly changing memory attribute. */
7933 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7935 rtx result
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7938 result
= copy_rtx (result
);
7939 set_mem_attributes (result
, exp
, 0);
7944 target
= assign_temp (type
, 0, 1, 1);
7946 if (GET_CODE (target
) == MEM
)
7947 /* Store data into beginning of memory target. */
7948 store_expr (TREE_OPERAND (exp
, 0),
7949 adjust_address (target
, TYPE_MODE (valtype
), 0),
7950 modifier
== EXPAND_STACK_PARM
? 2 : 0);
7952 else if (GET_CODE (target
) == REG
)
7953 /* Store this field into a union of the proper type. */
7954 store_field (target
,
7955 MIN ((int_size_in_bytes (TREE_TYPE
7956 (TREE_OPERAND (exp
, 0)))
7958 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7959 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7960 VOIDmode
, 0, type
, 0);
7964 /* Return the entire union. */
7968 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7970 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7973 /* If the signedness of the conversion differs and OP0 is
7974 a promoted SUBREG, clear that indication since we now
7975 have to do the proper extension. */
7976 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7977 && GET_CODE (op0
) == SUBREG
)
7978 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7983 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7984 if (GET_MODE (op0
) == mode
)
7987 /* If OP0 is a constant, just convert it into the proper mode. */
7988 if (CONSTANT_P (op0
))
7990 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7991 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
7993 if (modifier
== EXPAND_INITIALIZER
)
7994 return simplify_gen_subreg (mode
, op0
, inner_mode
,
7995 subreg_lowpart_offset (mode
,
7998 return convert_modes (mode
, inner_mode
, op0
,
7999 TREE_UNSIGNED (inner_type
));
8002 if (modifier
== EXPAND_INITIALIZER
)
8003 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
8007 convert_to_mode (mode
, op0
,
8008 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8010 convert_move (target
, op0
,
8011 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8014 case VIEW_CONVERT_EXPR
:
8015 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
8017 /* If the input and output modes are both the same, we are done.
8018 Otherwise, if neither mode is BLKmode and both are integral and within
8019 a word, we can use gen_lowpart. If neither is true, make sure the
8020 operand is in memory and convert the MEM to the new mode. */
8021 if (TYPE_MODE (type
) == GET_MODE (op0
))
8023 else if (TYPE_MODE (type
) != BLKmode
&& GET_MODE (op0
) != BLKmode
8024 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
8025 && GET_MODE_CLASS (TYPE_MODE (type
)) == MODE_INT
8026 && GET_MODE_SIZE (TYPE_MODE (type
)) <= UNITS_PER_WORD
8027 && GET_MODE_SIZE (GET_MODE (op0
)) <= UNITS_PER_WORD
)
8028 op0
= gen_lowpart (TYPE_MODE (type
), op0
);
8029 else if (GET_CODE (op0
) != MEM
)
8031 /* If the operand is not a MEM, force it into memory. Since we
8032 are going to be be changing the mode of the MEM, don't call
8033 force_const_mem for constants because we don't allow pool
8034 constants to change mode. */
8035 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8037 if (TREE_ADDRESSABLE (exp
))
8040 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
8042 = assign_stack_temp_for_type
8043 (TYPE_MODE (inner_type
),
8044 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
8046 emit_move_insn (target
, op0
);
8050 /* At this point, OP0 is in the correct mode. If the output type is such
8051 that the operand is known to be aligned, indicate that it is.
8052 Otherwise, we need only be concerned about alignment for non-BLKmode
8054 if (GET_CODE (op0
) == MEM
)
8056 op0
= copy_rtx (op0
);
8058 if (TYPE_ALIGN_OK (type
))
8059 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
8060 else if (TYPE_MODE (type
) != BLKmode
&& STRICT_ALIGNMENT
8061 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
8063 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8064 HOST_WIDE_INT temp_size
8065 = MAX (int_size_in_bytes (inner_type
),
8066 (HOST_WIDE_INT
) GET_MODE_SIZE (TYPE_MODE (type
)));
8067 rtx
new = assign_stack_temp_for_type (TYPE_MODE (type
),
8068 temp_size
, 0, type
);
8069 rtx new_with_op0_mode
= adjust_address (new, GET_MODE (op0
), 0);
8071 if (TREE_ADDRESSABLE (exp
))
8074 if (GET_MODE (op0
) == BLKmode
)
8075 emit_block_move (new_with_op0_mode
, op0
,
8076 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type
))),
8077 (modifier
== EXPAND_STACK_PARM
8078 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
8080 emit_move_insn (new_with_op0_mode
, op0
);
8085 op0
= adjust_address (op0
, TYPE_MODE (type
), 0);
8091 this_optab
= ! unsignedp
&& flag_trapv
8092 && (GET_MODE_CLASS (mode
) == MODE_INT
)
8093 ? addv_optab
: add_optab
;
8095 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
8096 something else, make sure we add the register to the constant and
8097 then to the other thing. This case can occur during strength
8098 reduction and doing it this way will produce better code if the
8099 frame pointer or argument pointer is eliminated.
8101 fold-const.c will ensure that the constant is always in the inner
8102 PLUS_EXPR, so the only case we need to do anything about is if
8103 sp, ap, or fp is our second argument, in which case we must swap
8104 the innermost first argument and our second argument. */
8106 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
8107 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
8108 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
8109 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
8110 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
8111 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
8113 tree t
= TREE_OPERAND (exp
, 1);
8115 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
8116 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
8119 /* If the result is to be ptr_mode and we are adding an integer to
8120 something, we might be forming a constant. So try to use
8121 plus_constant. If it produces a sum and we can't accept it,
8122 use force_operand. This allows P = &ARR[const] to generate
8123 efficient code on machines where a SYMBOL_REF is not a valid
8126 If this is an EXPAND_SUM call, always return the sum. */
8127 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
8128 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
8130 if (modifier
== EXPAND_STACK_PARM
)
8132 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
8133 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
8134 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
8138 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
8140 /* Use immed_double_const to ensure that the constant is
8141 truncated according to the mode of OP1, then sign extended
8142 to a HOST_WIDE_INT. Using the constant directly can result
8143 in non-canonical RTL in a 64x32 cross compile. */
8145 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
8147 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
8148 op1
= plus_constant (op1
, INTVAL (constant_part
));
8149 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8150 op1
= force_operand (op1
, target
);
8154 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
8155 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
8156 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
8160 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
8161 (modifier
== EXPAND_INITIALIZER
8162 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
8163 if (! CONSTANT_P (op0
))
8165 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
8166 VOIDmode
, modifier
);
8167 /* Don't go to both_summands if modifier
8168 says it's not right to return a PLUS. */
8169 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8173 /* Use immed_double_const to ensure that the constant is
8174 truncated according to the mode of OP1, then sign extended
8175 to a HOST_WIDE_INT. Using the constant directly can result
8176 in non-canonical RTL in a 64x32 cross compile. */
8178 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
8180 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8181 op0
= plus_constant (op0
, INTVAL (constant_part
));
8182 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8183 op0
= force_operand (op0
, target
);
8188 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8191 /* No sense saving up arithmetic to be done
8192 if it's all in the wrong mode to form part of an address.
8193 And force_operand won't know whether to sign-extend or
8195 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8196 || mode
!= ptr_mode
)
8198 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8199 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8200 if (op0
== const0_rtx
)
8202 if (op1
== const0_rtx
)
8207 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, modifier
);
8208 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, modifier
);
8210 /* We come here from MINUS_EXPR when the second operand is a
8213 /* Make sure any term that's a sum with a constant comes last. */
8214 if (GET_CODE (op0
) == PLUS
8215 && CONSTANT_P (XEXP (op0
, 1)))
8221 /* If adding to a sum including a constant,
8222 associate it to put the constant outside. */
8223 if (GET_CODE (op1
) == PLUS
8224 && CONSTANT_P (XEXP (op1
, 1)))
8226 rtx constant_term
= const0_rtx
;
8228 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
8231 /* Ensure that MULT comes first if there is one. */
8232 else if (GET_CODE (op0
) == MULT
)
8233 op0
= gen_rtx_PLUS (mode
, op0
, XEXP (op1
, 0));
8235 op0
= gen_rtx_PLUS (mode
, XEXP (op1
, 0), op0
);
8237 /* Let's also eliminate constants from op0 if possible. */
8238 op0
= eliminate_constant_term (op0
, &constant_term
);
8240 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8241 their sum should be a constant. Form it into OP1, since the
8242 result we want will then be OP0 + OP1. */
8244 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
8249 op1
= gen_rtx_PLUS (mode
, constant_term
, XEXP (op1
, 1));
8252 /* Put a constant term last and put a multiplication first. */
8253 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
8254 temp
= op1
, op1
= op0
, op0
= temp
;
8256 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
8257 return temp
? temp
: gen_rtx_PLUS (mode
, op0
, op1
);
8260 /* For initializers, we are allowed to return a MINUS of two
8261 symbolic constants. Here we handle all cases when both operands
8263 /* Handle difference of two symbolic constants,
8264 for the sake of an initializer. */
8265 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8266 && really_constant_p (TREE_OPERAND (exp
, 0))
8267 && really_constant_p (TREE_OPERAND (exp
, 1)))
8269 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
,
8271 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
,
8274 /* If the last operand is a CONST_INT, use plus_constant of
8275 the negated constant. Else make the MINUS. */
8276 if (GET_CODE (op1
) == CONST_INT
)
8277 return plus_constant (op0
, - INTVAL (op1
));
8279 return gen_rtx_MINUS (mode
, op0
, op1
);
8282 this_optab
= ! unsignedp
&& flag_trapv
8283 && (GET_MODE_CLASS(mode
) == MODE_INT
)
8284 ? subv_optab
: sub_optab
;
8286 /* No sense saving up arithmetic to be done
8287 if it's all in the wrong mode to form part of an address.
8288 And force_operand won't know whether to sign-extend or
8290 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8291 || mode
!= ptr_mode
)
8294 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8297 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, modifier
);
8298 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, modifier
);
8300 /* Convert A - const to A + (-const). */
8301 if (GET_CODE (op1
) == CONST_INT
)
8303 op1
= negate_rtx (mode
, op1
);
8310 /* If first operand is constant, swap them.
8311 Thus the following special case checks need only
8312 check the second operand. */
8313 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
8315 tree t1
= TREE_OPERAND (exp
, 0);
8316 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
8317 TREE_OPERAND (exp
, 1) = t1
;
8320 /* Attempt to return something suitable for generating an
8321 indexed address, for machines that support that. */
8323 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
8324 && host_integerp (TREE_OPERAND (exp
, 1), 0))
8326 tree exp1
= TREE_OPERAND (exp
, 1);
8328 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
8331 /* If we knew for certain that this is arithmetic for an array
8332 reference, and we knew the bounds of the array, then we could
8333 apply the distributive law across (PLUS X C) for constant C.
8334 Without such knowledge, we risk overflowing the computation
8335 when both X and C are large, but X+C isn't. */
8336 /* ??? Could perhaps special-case EXP being unsigned and C being
8337 positive. In that case we are certain that X+C is no smaller
8338 than X and so the transformed expression will overflow iff the
8339 original would have. */
8341 if (GET_CODE (op0
) != REG
)
8342 op0
= force_operand (op0
, NULL_RTX
);
8343 if (GET_CODE (op0
) != REG
)
8344 op0
= copy_to_mode_reg (mode
, op0
);
8346 return gen_rtx_MULT (mode
, op0
,
8347 gen_int_mode (tree_low_cst (exp1
, 0),
8348 TYPE_MODE (TREE_TYPE (exp1
))));
8351 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8354 if (modifier
== EXPAND_STACK_PARM
)
8357 /* Check for multiplying things that have been extended
8358 from a narrower type. If this machine supports multiplying
8359 in that narrower type with a result in the desired type,
8360 do it that way, and avoid the explicit type-conversion. */
8361 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
8362 && TREE_CODE (type
) == INTEGER_TYPE
8363 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8364 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
8365 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
8366 && int_fits_type_p (TREE_OPERAND (exp
, 1),
8367 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8368 /* Don't use a widening multiply if a shift will do. */
8369 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
8370 > HOST_BITS_PER_WIDE_INT
)
8371 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
8373 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8374 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
8376 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
8377 /* If both operands are extended, they must either both
8378 be zero-extended or both be sign-extended. */
8379 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
8381 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
8383 enum machine_mode innermode
8384 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
8385 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8386 ? smul_widen_optab
: umul_widen_optab
);
8387 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8388 ? umul_widen_optab
: smul_widen_optab
);
8389 if (mode
== GET_MODE_WIDER_MODE (innermode
))
8391 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
8393 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8394 NULL_RTX
, VOIDmode
, 0);
8395 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
8396 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
8399 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
8400 NULL_RTX
, VOIDmode
, 0);
8403 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
8404 && innermode
== word_mode
)
8407 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8408 NULL_RTX
, VOIDmode
, 0);
8409 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
8410 op1
= convert_modes (innermode
, mode
,
8411 expand_expr (TREE_OPERAND (exp
, 1),
8412 NULL_RTX
, VOIDmode
, 0),
8415 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
8416 NULL_RTX
, VOIDmode
, 0);
8417 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8418 unsignedp
, OPTAB_LIB_WIDEN
);
8419 htem
= expand_mult_highpart_adjust (innermode
,
8420 gen_highpart (innermode
, temp
),
8422 gen_highpart (innermode
, temp
),
8424 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
8429 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8430 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8431 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
8433 case TRUNC_DIV_EXPR
:
8434 case FLOOR_DIV_EXPR
:
8436 case ROUND_DIV_EXPR
:
8437 case EXACT_DIV_EXPR
:
8438 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8440 if (modifier
== EXPAND_STACK_PARM
)
8442 /* Possible optimization: compute the dividend with EXPAND_SUM
8443 then if the divisor is constant can optimize the case
8444 where some terms of the dividend have coeffs divisible by it. */
8445 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8446 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8447 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
8450 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8451 expensive divide. If not, combine will rebuild the original
8453 if (flag_unsafe_math_optimizations
&& optimize
&& !optimize_size
8454 && TREE_CODE (type
) == REAL_TYPE
8455 && !real_onep (TREE_OPERAND (exp
, 0)))
8456 return expand_expr (build (MULT_EXPR
, type
, TREE_OPERAND (exp
, 0),
8457 build (RDIV_EXPR
, type
,
8458 build_real (type
, dconst1
),
8459 TREE_OPERAND (exp
, 1))),
8460 target
, tmode
, modifier
);
8461 this_optab
= sdiv_optab
;
8464 case TRUNC_MOD_EXPR
:
8465 case FLOOR_MOD_EXPR
:
8467 case ROUND_MOD_EXPR
:
8468 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8470 if (modifier
== EXPAND_STACK_PARM
)
8472 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8473 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8474 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
8476 case FIX_ROUND_EXPR
:
8477 case FIX_FLOOR_EXPR
:
8479 abort (); /* Not used for C. */
8481 case FIX_TRUNC_EXPR
:
8482 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8483 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8484 target
= gen_reg_rtx (mode
);
8485 expand_fix (target
, op0
, unsignedp
);
8489 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8490 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8491 target
= gen_reg_rtx (mode
);
8492 /* expand_float can't figure out what to do if FROM has VOIDmode.
8493 So give it the correct mode. With -O, cse will optimize this. */
8494 if (GET_MODE (op0
) == VOIDmode
)
8495 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8497 expand_float (target
, op0
,
8498 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8502 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8503 if (modifier
== EXPAND_STACK_PARM
)
8505 temp
= expand_unop (mode
,
8506 ! unsignedp
&& flag_trapv
8507 && (GET_MODE_CLASS(mode
) == MODE_INT
)
8508 ? negv_optab
: neg_optab
, op0
, target
, 0);
8514 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8515 if (modifier
== EXPAND_STACK_PARM
)
8518 /* Handle complex values specially. */
8519 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
8520 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
8521 return expand_complex_abs (mode
, op0
, target
, unsignedp
);
8523 /* Unsigned abs is simply the operand. Testing here means we don't
8524 risk generating incorrect code below. */
8525 if (TREE_UNSIGNED (type
))
8528 return expand_abs (mode
, op0
, target
, unsignedp
,
8529 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
8533 target
= original_target
;
8535 || modifier
== EXPAND_STACK_PARM
8536 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1), 1)
8537 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
8538 || GET_MODE (target
) != mode
8539 || (GET_CODE (target
) == REG
8540 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8541 target
= gen_reg_rtx (mode
);
8542 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8543 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8545 /* First try to do it with a special MIN or MAX instruction.
8546 If that does not win, use a conditional jump to select the proper
8548 this_optab
= (TREE_UNSIGNED (type
)
8549 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
8550 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
8552 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8557 /* At this point, a MEM target is no longer useful; we will get better
8560 if (GET_CODE (target
) == MEM
)
8561 target
= gen_reg_rtx (mode
);
8564 emit_move_insn (target
, op0
);
8566 op0
= gen_label_rtx ();
8568 /* If this mode is an integer too wide to compare properly,
8569 compare word by word. Rely on cse to optimize constant cases. */
8570 if (GET_MODE_CLASS (mode
) == MODE_INT
8571 && ! can_compare_p (GE
, mode
, ccp_jump
))
8573 if (code
== MAX_EXPR
)
8574 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8575 target
, op1
, NULL_RTX
, op0
);
8577 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8578 op1
, target
, NULL_RTX
, op0
);
8582 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)));
8583 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
8584 unsignedp
, mode
, NULL_RTX
, NULL_RTX
,
8587 emit_move_insn (target
, op1
);
8592 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8593 if (modifier
== EXPAND_STACK_PARM
)
8595 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8601 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8602 if (modifier
== EXPAND_STACK_PARM
)
8604 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
8610 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8611 temp
= expand_unop (mode
, clz_optab
, op0
, target
, 1);
8617 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8618 temp
= expand_unop (mode
, ctz_optab
, op0
, target
, 1);
8624 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8625 temp
= expand_unop (mode
, popcount_optab
, op0
, target
, 1);
8631 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8632 temp
= expand_unop (mode
, parity_optab
, op0
, target
, 1);
8637 /* ??? Can optimize bitwise operations with one arg constant.
8638 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8639 and (a bitwise1 b) bitwise2 b (etc)
8640 but that is probably not worth while. */
8642 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8643 boolean values when we want in all cases to compute both of them. In
8644 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8645 as actual zero-or-1 values and then bitwise anding. In cases where
8646 there cannot be any side effects, better code would be made by
8647 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8648 how to recognize those cases. */
8650 case TRUTH_AND_EXPR
:
8652 this_optab
= and_optab
;
8657 this_optab
= ior_optab
;
8660 case TRUTH_XOR_EXPR
:
8662 this_optab
= xor_optab
;
8669 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8671 if (modifier
== EXPAND_STACK_PARM
)
8673 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8674 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
8677 /* Could determine the answer when only additive constants differ. Also,
8678 the addition of one can be handled by changing the condition. */
8685 case UNORDERED_EXPR
:
8692 temp
= do_store_flag (exp
,
8693 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
8694 tmode
!= VOIDmode
? tmode
: mode
, 0);
8698 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8699 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
8701 && GET_CODE (original_target
) == REG
8702 && (GET_MODE (original_target
)
8703 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
8705 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
8708 /* If temp is constant, we can just compute the result. */
8709 if (GET_CODE (temp
) == CONST_INT
)
8711 if (INTVAL (temp
) != 0)
8712 emit_move_insn (target
, const1_rtx
);
8714 emit_move_insn (target
, const0_rtx
);
8719 if (temp
!= original_target
)
8721 enum machine_mode mode1
= GET_MODE (temp
);
8722 if (mode1
== VOIDmode
)
8723 mode1
= tmode
!= VOIDmode
? tmode
: mode
;
8725 temp
= copy_to_mode_reg (mode1
, temp
);
8728 op1
= gen_label_rtx ();
8729 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
8730 GET_MODE (temp
), unsignedp
, op1
);
8731 emit_move_insn (temp
, const1_rtx
);
8736 /* If no set-flag instruction, must generate a conditional
8737 store into a temporary variable. Drop through
8738 and handle this like && and ||. */
8740 case TRUTH_ANDIF_EXPR
:
8741 case TRUTH_ORIF_EXPR
:
8744 || modifier
== EXPAND_STACK_PARM
8745 || ! safe_from_p (target
, exp
, 1)
8746 /* Make sure we don't have a hard reg (such as function's return
8747 value) live across basic blocks, if not optimizing. */
8748 || (!optimize
&& GET_CODE (target
) == REG
8749 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8750 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8753 emit_clr_insn (target
);
8755 op1
= gen_label_rtx ();
8756 jumpifnot (exp
, op1
);
8759 emit_0_to_1_insn (target
);
8762 return ignore
? const0_rtx
: target
;
8764 case TRUTH_NOT_EXPR
:
8765 if (modifier
== EXPAND_STACK_PARM
)
8767 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8768 /* The parser is careful to generate TRUTH_NOT_EXPR
8769 only with operands that are always zero or one. */
8770 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8771 target
, 1, OPTAB_LIB_WIDEN
);
8777 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
8779 return expand_expr (TREE_OPERAND (exp
, 1),
8780 (ignore
? const0_rtx
: target
),
8781 VOIDmode
, modifier
);
8784 /* If we would have a "singleton" (see below) were it not for a
8785 conversion in each arm, bring that conversion back out. */
8786 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8787 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
8788 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
8789 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
8791 tree iftrue
= TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
8792 tree iffalse
= TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
8794 if ((TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '2'
8795 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8796 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '2'
8797 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0))
8798 || (TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '1'
8799 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8800 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '1'
8801 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0)))
8802 return expand_expr (build1 (NOP_EXPR
, type
,
8803 build (COND_EXPR
, TREE_TYPE (iftrue
),
8804 TREE_OPERAND (exp
, 0),
8806 target
, tmode
, modifier
);
8810 /* Note that COND_EXPRs whose type is a structure or union
8811 are required to be constructed to contain assignments of
8812 a temporary variable, so that we can evaluate them here
8813 for side effect only. If type is void, we must do likewise. */
8815 /* If an arm of the branch requires a cleanup,
8816 only that cleanup is performed. */
8819 tree binary_op
= 0, unary_op
= 0;
8821 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8822 convert it to our mode, if necessary. */
8823 if (integer_onep (TREE_OPERAND (exp
, 1))
8824 && integer_zerop (TREE_OPERAND (exp
, 2))
8825 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8829 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
8834 if (modifier
== EXPAND_STACK_PARM
)
8836 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, modifier
);
8837 if (GET_MODE (op0
) == mode
)
8841 target
= gen_reg_rtx (mode
);
8842 convert_move (target
, op0
, unsignedp
);
8846 /* Check for X ? A + B : A. If we have this, we can copy A to the
8847 output and conditionally add B. Similarly for unary operations.
8848 Don't do this if X has side-effects because those side effects
8849 might affect A or B and the "?" operation is a sequence point in
8850 ANSI. (operand_equal_p tests for side effects.) */
8852 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
8853 && operand_equal_p (TREE_OPERAND (exp
, 2),
8854 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8855 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
8856 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
8857 && operand_equal_p (TREE_OPERAND (exp
, 1),
8858 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8859 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
8860 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
8861 && operand_equal_p (TREE_OPERAND (exp
, 2),
8862 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8863 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
8864 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
8865 && operand_equal_p (TREE_OPERAND (exp
, 1),
8866 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8867 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
8869 /* If we are not to produce a result, we have no target. Otherwise,
8870 if a target was specified use it; it will not be used as an
8871 intermediate target unless it is safe. If no target, use a
8876 else if (modifier
== EXPAND_STACK_PARM
)
8877 temp
= assign_temp (type
, 0, 0, 1);
8878 else if (original_target
8879 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
8880 || (singleton
&& GET_CODE (original_target
) == REG
8881 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
8882 && original_target
== var_rtx (singleton
)))
8883 && GET_MODE (original_target
) == mode
8884 #ifdef HAVE_conditional_move
8885 && (! can_conditionally_move_p (mode
)
8886 || GET_CODE (original_target
) == REG
8887 || TREE_ADDRESSABLE (type
))
8889 && (GET_CODE (original_target
) != MEM
8890 || TREE_ADDRESSABLE (type
)))
8891 temp
= original_target
;
8892 else if (TREE_ADDRESSABLE (type
))
8895 temp
= assign_temp (type
, 0, 0, 1);
8897 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8898 do the test of X as a store-flag operation, do this as
8899 A + ((X != 0) << log C). Similarly for other simple binary
8900 operators. Only do for C == 1 if BRANCH_COST is low. */
8901 if (temp
&& singleton
&& binary_op
8902 && (TREE_CODE (binary_op
) == PLUS_EXPR
8903 || TREE_CODE (binary_op
) == MINUS_EXPR
8904 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
8905 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
8906 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
8907 : integer_onep (TREE_OPERAND (binary_op
, 1)))
8908 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8912 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
8913 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8914 ? addv_optab
: add_optab
)
8915 : TREE_CODE (binary_op
) == MINUS_EXPR
8916 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8917 ? subv_optab
: sub_optab
)
8918 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
8921 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8922 if (singleton
== TREE_OPERAND (exp
, 1))
8923 cond
= invert_truthvalue (TREE_OPERAND (exp
, 0));
8925 cond
= TREE_OPERAND (exp
, 0);
8927 result
= do_store_flag (cond
, (safe_from_p (temp
, singleton
, 1)
8929 mode
, BRANCH_COST
<= 1);
8931 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
8932 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
8933 build_int_2 (tree_log2
8937 (safe_from_p (temp
, singleton
, 1)
8938 ? temp
: NULL_RTX
), 0);
8942 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
8943 return expand_binop (mode
, boptab
, op1
, result
, temp
,
8944 unsignedp
, OPTAB_LIB_WIDEN
);
8948 do_pending_stack_adjust ();
8950 op0
= gen_label_rtx ();
8952 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
8956 /* If the target conflicts with the other operand of the
8957 binary op, we can't use it. Also, we can't use the target
8958 if it is a hard register, because evaluating the condition
8959 might clobber it. */
8961 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
8962 || (GET_CODE (temp
) == REG
8963 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
8964 temp
= gen_reg_rtx (mode
);
8965 store_expr (singleton
, temp
,
8966 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8969 expand_expr (singleton
,
8970 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8971 if (singleton
== TREE_OPERAND (exp
, 1))
8972 jumpif (TREE_OPERAND (exp
, 0), op0
);
8974 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8976 start_cleanup_deferral ();
8977 if (binary_op
&& temp
== 0)
8978 /* Just touch the other operand. */
8979 expand_expr (TREE_OPERAND (binary_op
, 1),
8980 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8982 store_expr (build (TREE_CODE (binary_op
), type
,
8983 make_tree (type
, temp
),
8984 TREE_OPERAND (binary_op
, 1)),
8985 temp
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8987 store_expr (build1 (TREE_CODE (unary_op
), type
,
8988 make_tree (type
, temp
)),
8989 temp
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8992 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8993 comparison operator. If we have one of these cases, set the
8994 output to A, branch on A (cse will merge these two references),
8995 then set the output to FOO. */
8997 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8998 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8999 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
9000 TREE_OPERAND (exp
, 1), 0)
9001 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
9002 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
9003 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
9005 if (GET_CODE (temp
) == REG
9006 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
9007 temp
= gen_reg_rtx (mode
);
9008 store_expr (TREE_OPERAND (exp
, 1), temp
,
9009 modifier
== EXPAND_STACK_PARM
? 2 : 0);
9010 jumpif (TREE_OPERAND (exp
, 0), op0
);
9012 start_cleanup_deferral ();
9013 store_expr (TREE_OPERAND (exp
, 2), temp
,
9014 modifier
== EXPAND_STACK_PARM
? 2 : 0);
9018 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
9019 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
9020 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
9021 TREE_OPERAND (exp
, 2), 0)
9022 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
9023 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
9024 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
9026 if (GET_CODE (temp
) == REG
9027 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
9028 temp
= gen_reg_rtx (mode
);
9029 store_expr (TREE_OPERAND (exp
, 2), temp
,
9030 modifier
== EXPAND_STACK_PARM
? 2 : 0);
9031 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
9033 start_cleanup_deferral ();
9034 store_expr (TREE_OPERAND (exp
, 1), temp
,
9035 modifier
== EXPAND_STACK_PARM
? 2 : 0);
9040 op1
= gen_label_rtx ();
9041 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
9043 start_cleanup_deferral ();
9045 /* One branch of the cond can be void, if it never returns. For
9046 example A ? throw : E */
9048 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
9049 store_expr (TREE_OPERAND (exp
, 1), temp
,
9050 modifier
== EXPAND_STACK_PARM
? 2 : 0);
9052 expand_expr (TREE_OPERAND (exp
, 1),
9053 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
9054 end_cleanup_deferral ();
9056 emit_jump_insn (gen_jump (op1
));
9059 start_cleanup_deferral ();
9061 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
9062 store_expr (TREE_OPERAND (exp
, 2), temp
,
9063 modifier
== EXPAND_STACK_PARM
? 2 : 0);
9065 expand_expr (TREE_OPERAND (exp
, 2),
9066 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
9069 end_cleanup_deferral ();
9080 /* Something needs to be initialized, but we didn't know
9081 where that thing was when building the tree. For example,
9082 it could be the return value of a function, or a parameter
9083 to a function which lays down in the stack, or a temporary
9084 variable which must be passed by reference.
9086 We guarantee that the expression will either be constructed
9087 or copied into our original target. */
9089 tree slot
= TREE_OPERAND (exp
, 0);
9090 tree cleanups
= NULL_TREE
;
9093 if (TREE_CODE (slot
) != VAR_DECL
)
9097 target
= original_target
;
9099 /* Set this here so that if we get a target that refers to a
9100 register variable that's already been used, put_reg_into_stack
9101 knows that it should fix up those uses. */
9102 TREE_USED (slot
) = 1;
9106 if (DECL_RTL_SET_P (slot
))
9108 target
= DECL_RTL (slot
);
9109 /* If we have already expanded the slot, so don't do
9111 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
9116 target
= assign_temp (type
, 2, 0, 1);
9117 /* All temp slots at this level must not conflict. */
9118 preserve_temp_slots (target
);
9119 SET_DECL_RTL (slot
, target
);
9120 if (TREE_ADDRESSABLE (slot
))
9121 put_var_into_stack (slot
, /*rescan=*/false);
9123 /* Since SLOT is not known to the called function
9124 to belong to its stack frame, we must build an explicit
9125 cleanup. This case occurs when we must build up a reference
9126 to pass the reference as an argument. In this case,
9127 it is very likely that such a reference need not be
9130 if (TREE_OPERAND (exp
, 2) == 0)
9131 TREE_OPERAND (exp
, 2)
9132 = (*lang_hooks
.maybe_build_cleanup
) (slot
);
9133 cleanups
= TREE_OPERAND (exp
, 2);
9138 /* This case does occur, when expanding a parameter which
9139 needs to be constructed on the stack. The target
9140 is the actual stack address that we want to initialize.
9141 The function we call will perform the cleanup in this case. */
9143 /* If we have already assigned it space, use that space,
9144 not target that we were passed in, as our target
9145 parameter is only a hint. */
9146 if (DECL_RTL_SET_P (slot
))
9148 target
= DECL_RTL (slot
);
9149 /* If we have already expanded the slot, so don't do
9151 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
9156 SET_DECL_RTL (slot
, target
);
9157 /* If we must have an addressable slot, then make sure that
9158 the RTL that we just stored in slot is OK. */
9159 if (TREE_ADDRESSABLE (slot
))
9160 put_var_into_stack (slot
, /*rescan=*/true);
9164 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
9165 /* Mark it as expanded. */
9166 TREE_OPERAND (exp
, 1) = NULL_TREE
;
9168 store_expr (exp1
, target
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
9170 expand_decl_cleanup_eh (NULL_TREE
, cleanups
, CLEANUP_EH_ONLY (exp
));
9177 tree lhs
= TREE_OPERAND (exp
, 0);
9178 tree rhs
= TREE_OPERAND (exp
, 1);
9180 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
9186 /* If lhs is complex, expand calls in rhs before computing it.
9187 That's so we don't compute a pointer and save it over a
9188 call. If lhs is simple, compute it first so we can give it
9189 as a target if the rhs is just a call. This avoids an
9190 extra temp and copy and that prevents a partial-subsumption
9191 which makes bad code. Actually we could treat
9192 component_ref's of vars like vars. */
9194 tree lhs
= TREE_OPERAND (exp
, 0);
9195 tree rhs
= TREE_OPERAND (exp
, 1);
9199 /* Check for |= or &= of a bitfield of size one into another bitfield
9200 of size 1. In this case, (unless we need the result of the
9201 assignment) we can do this more efficiently with a
9202 test followed by an assignment, if necessary.
9204 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9205 things change so we do, this code should be enhanced to
9208 && TREE_CODE (lhs
) == COMPONENT_REF
9209 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
9210 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
9211 && TREE_OPERAND (rhs
, 0) == lhs
9212 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
9213 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
9214 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
9216 rtx label
= gen_label_rtx ();
9218 do_jump (TREE_OPERAND (rhs
, 1),
9219 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
9220 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
9221 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
9222 (TREE_CODE (rhs
) == BIT_IOR_EXPR
9224 : integer_zero_node
)),
9226 do_pending_stack_adjust ();
9231 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
9237 if (!TREE_OPERAND (exp
, 0))
9238 expand_null_return ();
9240 expand_return (TREE_OPERAND (exp
, 0));
9243 case PREINCREMENT_EXPR
:
9244 case PREDECREMENT_EXPR
:
9245 return expand_increment (exp
, 0, ignore
);
9247 case POSTINCREMENT_EXPR
:
9248 case POSTDECREMENT_EXPR
:
9249 /* Faster to treat as pre-increment if result is not used. */
9250 return expand_increment (exp
, ! ignore
, ignore
);
9253 if (modifier
== EXPAND_STACK_PARM
)
9255 /* Are we taking the address of a nested function? */
9256 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
9257 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
9258 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0))
9259 && ! TREE_STATIC (exp
))
9261 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
9262 op0
= force_operand (op0
, target
);
9264 /* If we are taking the address of something erroneous, just
9266 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
9268 /* If we are taking the address of a constant and are at the
9269 top level, we have to use output_constant_def since we can't
9270 call force_const_mem at top level. */
9272 && (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
9273 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0)))
9275 op0
= XEXP (output_constant_def (TREE_OPERAND (exp
, 0), 0), 0);
9278 /* We make sure to pass const0_rtx down if we came in with
9279 ignore set, to avoid doing the cleanups twice for something. */
9280 op0
= expand_expr (TREE_OPERAND (exp
, 0),
9281 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
9282 (modifier
== EXPAND_INITIALIZER
9283 ? modifier
: EXPAND_CONST_ADDRESS
));
9285 /* If we are going to ignore the result, OP0 will have been set
9286 to const0_rtx, so just return it. Don't get confused and
9287 think we are taking the address of the constant. */
9291 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9292 clever and returns a REG when given a MEM. */
9293 op0
= protect_from_queue (op0
, 1);
9295 /* We would like the object in memory. If it is a constant, we can
9296 have it be statically allocated into memory. For a non-constant,
9297 we need to allocate some memory and store the value into it. */
9299 if (CONSTANT_P (op0
))
9300 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
9302 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
9303 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
9304 || GET_CODE (op0
) == PARALLEL
|| GET_CODE (op0
) == LO_SUM
)
9306 /* If the operand is a SAVE_EXPR, we can deal with this by
9307 forcing the SAVE_EXPR into memory. */
9308 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
9310 put_var_into_stack (TREE_OPERAND (exp
, 0),
9312 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
9316 /* If this object is in a register, it can't be BLKmode. */
9317 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9318 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
9320 if (GET_CODE (op0
) == PARALLEL
)
9321 /* Handle calls that pass values in multiple
9322 non-contiguous locations. The Irix 6 ABI has examples
9324 emit_group_store (memloc
, op0
,
9325 int_size_in_bytes (inner_type
));
9327 emit_move_insn (memloc
, op0
);
9333 if (GET_CODE (op0
) != MEM
)
9336 mark_temp_addr_taken (op0
);
9337 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
9339 op0
= XEXP (op0
, 0);
9340 #ifdef POINTERS_EXTEND_UNSIGNED
9341 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
9342 && mode
== ptr_mode
)
9343 op0
= convert_memory_address (ptr_mode
, op0
);
9348 /* If OP0 is not aligned as least as much as the type requires, we
9349 need to make a temporary, copy OP0 to it, and take the address of
9350 the temporary. We want to use the alignment of the type, not of
9351 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9352 the test for BLKmode means that can't happen. The test for
9353 BLKmode is because we never make mis-aligned MEMs with
9356 We don't need to do this at all if the machine doesn't have
9357 strict alignment. */
9358 if (STRICT_ALIGNMENT
&& GET_MODE (op0
) == BLKmode
9359 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
9361 && MEM_ALIGN (op0
) < BIGGEST_ALIGNMENT
)
9363 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9366 if (TYPE_ALIGN_OK (inner_type
))
9369 if (TREE_ADDRESSABLE (inner_type
))
9371 /* We can't make a bitwise copy of this object, so fail. */
9372 error ("cannot take the address of an unaligned member");
9376 new = assign_stack_temp_for_type
9377 (TYPE_MODE (inner_type
),
9378 MEM_SIZE (op0
) ? INTVAL (MEM_SIZE (op0
))
9379 : int_size_in_bytes (inner_type
),
9380 1, build_qualified_type (inner_type
,
9381 (TYPE_QUALS (inner_type
)
9382 | TYPE_QUAL_CONST
)));
9384 emit_block_move (new, op0
, expr_size (TREE_OPERAND (exp
, 0)),
9385 (modifier
== EXPAND_STACK_PARM
9386 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
9391 op0
= force_operand (XEXP (op0
, 0), target
);
9395 && GET_CODE (op0
) != REG
9396 && modifier
!= EXPAND_CONST_ADDRESS
9397 && modifier
!= EXPAND_INITIALIZER
9398 && modifier
!= EXPAND_SUM
)
9399 op0
= force_reg (Pmode
, op0
);
9401 if (GET_CODE (op0
) == REG
9402 && ! REG_USERVAR_P (op0
))
9403 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)));
9405 #ifdef POINTERS_EXTEND_UNSIGNED
9406 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
9407 && mode
== ptr_mode
)
9408 op0
= convert_memory_address (ptr_mode
, op0
);
9413 case ENTRY_VALUE_EXPR
:
9416 /* COMPLEX type for Extended Pascal & Fortran */
9419 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
9422 /* Get the rtx code of the operands. */
9423 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9424 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
9427 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
9431 /* Move the real (op0) and imaginary (op1) parts to their location. */
9432 emit_move_insn (gen_realpart (mode
, target
), op0
);
9433 emit_move_insn (gen_imagpart (mode
, target
), op1
);
9435 insns
= get_insns ();
9438 /* Complex construction should appear as a single unit. */
9439 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9440 each with a separate pseudo as destination.
9441 It's not correct for flow to treat them as a unit. */
9442 if (GET_CODE (target
) != CONCAT
)
9443 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
9451 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9452 return gen_realpart (mode
, op0
);
9455 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9456 return gen_imagpart (mode
, op0
);
9460 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
9464 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9467 target
= gen_reg_rtx (mode
);
9471 /* Store the realpart and the negated imagpart to target. */
9472 emit_move_insn (gen_realpart (partmode
, target
),
9473 gen_realpart (partmode
, op0
));
9475 imag_t
= gen_imagpart (partmode
, target
);
9476 temp
= expand_unop (partmode
,
9477 ! unsignedp
&& flag_trapv
9478 && (GET_MODE_CLASS(partmode
) == MODE_INT
)
9479 ? negv_optab
: neg_optab
,
9480 gen_imagpart (partmode
, op0
), imag_t
, 0);
9482 emit_move_insn (imag_t
, temp
);
9484 insns
= get_insns ();
9487 /* Conjugate should appear as a single unit
9488 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9489 each with a separate pseudo as destination.
9490 It's not correct for flow to treat them as a unit. */
9491 if (GET_CODE (target
) != CONCAT
)
9492 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
9499 case TRY_CATCH_EXPR
:
9501 tree handler
= TREE_OPERAND (exp
, 1);
9503 expand_eh_region_start ();
9505 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9507 expand_eh_region_end_cleanup (handler
);
9512 case TRY_FINALLY_EXPR
:
9514 tree try_block
= TREE_OPERAND (exp
, 0);
9515 tree finally_block
= TREE_OPERAND (exp
, 1);
9517 if (!optimize
|| unsafe_for_reeval (finally_block
) > 1)
9519 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9520 is not sufficient, so we cannot expand the block twice.
9521 So we play games with GOTO_SUBROUTINE_EXPR to let us
9522 expand the thing only once. */
9523 /* When not optimizing, we go ahead with this form since
9524 (1) user breakpoints operate more predictably without
9525 code duplication, and
9526 (2) we're not running any of the global optimizers
9527 that would explode in time/space with the highly
9528 connected CFG created by the indirect branching. */
9530 rtx finally_label
= gen_label_rtx ();
9531 rtx done_label
= gen_label_rtx ();
9532 rtx return_link
= gen_reg_rtx (Pmode
);
9533 tree cleanup
= build (GOTO_SUBROUTINE_EXPR
, void_type_node
,
9534 (tree
) finally_label
, (tree
) return_link
);
9535 TREE_SIDE_EFFECTS (cleanup
) = 1;
9537 /* Start a new binding layer that will keep track of all cleanup
9538 actions to be performed. */
9539 expand_start_bindings (2);
9540 target_temp_slot_level
= temp_slot_level
;
9542 expand_decl_cleanup (NULL_TREE
, cleanup
);
9543 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
9545 preserve_temp_slots (op0
);
9546 expand_end_bindings (NULL_TREE
, 0, 0);
9547 emit_jump (done_label
);
9548 emit_label (finally_label
);
9549 expand_expr (finally_block
, const0_rtx
, VOIDmode
, 0);
9550 emit_indirect_jump (return_link
);
9551 emit_label (done_label
);
9555 expand_start_bindings (2);
9556 target_temp_slot_level
= temp_slot_level
;
9558 expand_decl_cleanup (NULL_TREE
, finally_block
);
9559 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
9561 preserve_temp_slots (op0
);
9562 expand_end_bindings (NULL_TREE
, 0, 0);
9568 case GOTO_SUBROUTINE_EXPR
:
9570 rtx subr
= (rtx
) TREE_OPERAND (exp
, 0);
9571 rtx return_link
= *(rtx
*) &TREE_OPERAND (exp
, 1);
9572 rtx return_address
= gen_label_rtx ();
9573 emit_move_insn (return_link
,
9574 gen_rtx_LABEL_REF (Pmode
, return_address
));
9576 emit_label (return_address
);
9581 return expand_builtin_va_arg (TREE_OPERAND (exp
, 0), type
);
9584 return get_exception_pointer (cfun
);
9587 /* Function descriptors are not valid except for as
9588 initialization constants, and should not be expanded. */
9592 return (*lang_hooks
.expand_expr
) (exp
, original_target
, tmode
, modifier
);
9595 /* Here to do an ordinary binary operator, generating an instruction
9596 from the optab already placed in `this_optab'. */
9598 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
9600 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
9601 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9603 if (modifier
== EXPAND_STACK_PARM
)
9605 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9606 unsignedp
, OPTAB_LIB_WIDEN
);
9612 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9613 when applied to the address of EXP produces an address known to be
9614 aligned more than BIGGEST_ALIGNMENT. */
9617 is_aligning_offset (offset
, exp
)
9621 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9622 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9623 || TREE_CODE (offset
) == NOP_EXPR
9624 || TREE_CODE (offset
) == CONVERT_EXPR
9625 || TREE_CODE (offset
) == WITH_RECORD_EXPR
)
9626 offset
= TREE_OPERAND (offset
, 0);
9628 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9629 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9630 if (TREE_CODE (offset
) != BIT_AND_EXPR
9631 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
9632 || compare_tree_int (TREE_OPERAND (offset
, 1), BIGGEST_ALIGNMENT
) <= 0
9633 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
9636 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9637 It must be NEGATE_EXPR. Then strip any more conversions. */
9638 offset
= TREE_OPERAND (offset
, 0);
9639 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9640 || TREE_CODE (offset
) == NOP_EXPR
9641 || TREE_CODE (offset
) == CONVERT_EXPR
)
9642 offset
= TREE_OPERAND (offset
, 0);
9644 if (TREE_CODE (offset
) != NEGATE_EXPR
)
9647 offset
= TREE_OPERAND (offset
, 0);
9648 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9649 || TREE_CODE (offset
) == NOP_EXPR
9650 || TREE_CODE (offset
) == CONVERT_EXPR
)
9651 offset
= TREE_OPERAND (offset
, 0);
9653 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9654 whose type is the same as EXP. */
9655 return (TREE_CODE (offset
) == ADDR_EXPR
9656 && (TREE_OPERAND (offset
, 0) == exp
9657 || (TREE_CODE (TREE_OPERAND (offset
, 0)) == PLACEHOLDER_EXPR
9658 && (TREE_TYPE (TREE_OPERAND (offset
, 0))
9659 == TREE_TYPE (exp
)))));
9662 /* Return the tree node if an ARG corresponds to a string constant or zero
9663 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9664 in bytes within the string that ARG is accessing. The type of the
9665 offset will be `sizetype'. */
9668 string_constant (arg
, ptr_offset
)
9674 if (TREE_CODE (arg
) == ADDR_EXPR
9675 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
9677 *ptr_offset
= size_zero_node
;
9678 return TREE_OPERAND (arg
, 0);
9680 else if (TREE_CODE (arg
) == PLUS_EXPR
)
9682 tree arg0
= TREE_OPERAND (arg
, 0);
9683 tree arg1
= TREE_OPERAND (arg
, 1);
9688 if (TREE_CODE (arg0
) == ADDR_EXPR
9689 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
9691 *ptr_offset
= convert (sizetype
, arg1
);
9692 return TREE_OPERAND (arg0
, 0);
9694 else if (TREE_CODE (arg1
) == ADDR_EXPR
9695 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
9697 *ptr_offset
= convert (sizetype
, arg0
);
9698 return TREE_OPERAND (arg1
, 0);
9705 /* Expand code for a post- or pre- increment or decrement
9706 and return the RTX for the result.
9707 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9710 expand_increment (exp
, post
, ignore
)
9716 tree incremented
= TREE_OPERAND (exp
, 0);
9717 optab this_optab
= add_optab
;
9719 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
9720 int op0_is_copy
= 0;
9721 int single_insn
= 0;
9722 /* 1 means we can't store into OP0 directly,
9723 because it is a subreg narrower than a word,
9724 and we don't dare clobber the rest of the word. */
9727 /* Stabilize any component ref that might need to be
9728 evaluated more than once below. */
9730 || TREE_CODE (incremented
) == BIT_FIELD_REF
9731 || (TREE_CODE (incremented
) == COMPONENT_REF
9732 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
9733 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
9734 incremented
= stabilize_reference (incremented
);
9735 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9736 ones into save exprs so that they don't accidentally get evaluated
9737 more than once by the code below. */
9738 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
9739 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
9740 incremented
= save_expr (incremented
);
9742 /* Compute the operands as RTX.
9743 Note whether OP0 is the actual lvalue or a copy of it:
9744 I believe it is a copy iff it is a register or subreg
9745 and insns were generated in computing it. */
9747 temp
= get_last_insn ();
9748 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, 0);
9750 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9751 in place but instead must do sign- or zero-extension during assignment,
9752 so we copy it into a new register and let the code below use it as
9755 Note that we can safely modify this SUBREG since it is know not to be
9756 shared (it was made by the expand_expr call above). */
9758 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9761 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9765 else if (GET_CODE (op0
) == SUBREG
9766 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9768 /* We cannot increment this SUBREG in place. If we are
9769 post-incrementing, get a copy of the old value. Otherwise,
9770 just mark that we cannot increment in place. */
9772 op0
= copy_to_reg (op0
);
9777 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
9778 && temp
!= get_last_insn ());
9779 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9781 /* Decide whether incrementing or decrementing. */
9782 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9783 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9784 this_optab
= sub_optab
;
9786 /* Convert decrement by a constant into a negative increment. */
9787 if (this_optab
== sub_optab
9788 && GET_CODE (op1
) == CONST_INT
)
9790 op1
= GEN_INT (-INTVAL (op1
));
9791 this_optab
= add_optab
;
9794 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp
)))
9795 this_optab
= this_optab
== add_optab
? addv_optab
: subv_optab
;
9797 /* For a preincrement, see if we can do this with a single instruction. */
9800 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9801 if (icode
!= (int) CODE_FOR_nothing
9802 /* Make sure that OP0 is valid for operands 0 and 1
9803 of the insn we want to queue. */
9804 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9805 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
)
9806 && (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9810 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9811 then we cannot just increment OP0. We must therefore contrive to
9812 increment the original value. Then, for postincrement, we can return
9813 OP0 since it is a copy of the old value. For preincrement, expand here
9814 unless we can do it with a single insn.
9816 Likewise if storing directly into OP0 would clobber high bits
9817 we need to preserve (bad_subreg). */
9818 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9820 /* This is the easiest way to increment the value wherever it is.
9821 Problems with multiple evaluation of INCREMENTED are prevented
9822 because either (1) it is a component_ref or preincrement,
9823 in which case it was stabilized above, or (2) it is an array_ref
9824 with constant index in an array in a register, which is
9825 safe to reevaluate. */
9826 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9827 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9828 ? MINUS_EXPR
: PLUS_EXPR
),
9831 TREE_OPERAND (exp
, 1));
9833 while (TREE_CODE (incremented
) == NOP_EXPR
9834 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9836 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9837 incremented
= TREE_OPERAND (incremented
, 0);
9840 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
, 0);
9841 return post
? op0
: temp
;
9846 /* We have a true reference to the value in OP0.
9847 If there is an insn to add or subtract in this mode, queue it.
9848 Queueing the increment insn avoids the register shuffling
9849 that often results if we must increment now and first save
9850 the old value for subsequent use. */
9852 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9853 op0
= stabilize (op0
);
9856 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9857 if (icode
!= (int) CODE_FOR_nothing
9858 /* Make sure that OP0 is valid for operands 0 and 1
9859 of the insn we want to queue. */
9860 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9861 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
))
9863 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9864 op1
= force_reg (mode
, op1
);
9866 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9868 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9870 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
9871 ? force_reg (Pmode
, XEXP (op0
, 0))
9872 : copy_to_reg (XEXP (op0
, 0)));
9875 op0
= replace_equiv_address (op0
, addr
);
9876 temp
= force_reg (GET_MODE (op0
), op0
);
9877 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9878 op1
= force_reg (mode
, op1
);
9880 /* The increment queue is LIFO, thus we have to `queue'
9881 the instructions in reverse order. */
9882 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9883 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9888 /* Preincrement, or we can't increment with one simple insn. */
9890 /* Save a copy of the value before inc or dec, to return it later. */
9891 temp
= value
= copy_to_reg (op0
);
9893 /* Arrange to return the incremented value. */
9894 /* Copy the rtx because expand_binop will protect from the queue,
9895 and the results of that would be invalid for us to return
9896 if our caller does emit_queue before using our result. */
9897 temp
= copy_rtx (value
= op0
);
9899 /* Increment however we can. */
9900 op1
= expand_binop (mode
, this_optab
, value
, op1
, op0
,
9901 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9903 /* Make sure the value is stored into OP0. */
9905 emit_move_insn (op0
, op1
);
9910 /* Generate code to calculate EXP using a store-flag instruction
9911 and return an rtx for the result. EXP is either a comparison
9912 or a TRUTH_NOT_EXPR whose operand is a comparison.
9914 If TARGET is nonzero, store the result there if convenient.
9916 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9919 Return zero if there is no suitable set-flag instruction
9920 available on this machine.
9922 Once expand_expr has been called on the arguments of the comparison,
9923 we are committed to doing the store flag, since it is not safe to
9924 re-evaluate the expression. We emit the store-flag insn by calling
9925 emit_store_flag, but only expand the arguments if we have a reason
9926 to believe that emit_store_flag will be successful. If we think that
9927 it will, but it isn't, we have to simulate the store-flag with a
9928 set/jump/set sequence. */
9931 do_store_flag (exp
, target
, mode
, only_cheap
)
9934 enum machine_mode mode
;
9938 tree arg0
, arg1
, type
;
9940 enum machine_mode operand_mode
;
9944 enum insn_code icode
;
9945 rtx subtarget
= target
;
9948 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9949 result at the end. We can't simply invert the test since it would
9950 have already been inverted if it were valid. This case occurs for
9951 some floating-point comparisons. */
9953 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
9954 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
9956 arg0
= TREE_OPERAND (exp
, 0);
9957 arg1
= TREE_OPERAND (exp
, 1);
9959 /* Don't crash if the comparison was erroneous. */
9960 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
9963 type
= TREE_TYPE (arg0
);
9964 operand_mode
= TYPE_MODE (type
);
9965 unsignedp
= TREE_UNSIGNED (type
);
9967 /* We won't bother with BLKmode store-flag operations because it would mean
9968 passing a lot of information to emit_store_flag. */
9969 if (operand_mode
== BLKmode
)
9972 /* We won't bother with store-flag operations involving function pointers
9973 when function pointers must be canonicalized before comparisons. */
9974 #ifdef HAVE_canonicalize_funcptr_for_compare
9975 if (HAVE_canonicalize_funcptr_for_compare
9976 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
9977 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
9979 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
9980 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
9981 == FUNCTION_TYPE
))))
9988 /* Get the rtx comparison code to use. We know that EXP is a comparison
9989 operation of some type. Some comparisons against 1 and -1 can be
9990 converted to comparisons with zero. Do so here so that the tests
9991 below will be aware that we have a comparison with zero. These
9992 tests will not catch constants in the first operand, but constants
9993 are rarely passed as the first operand. */
9995 switch (TREE_CODE (exp
))
10004 if (integer_onep (arg1
))
10005 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10007 code
= unsignedp
? LTU
: LT
;
10010 if (! unsignedp
&& integer_all_onesp (arg1
))
10011 arg1
= integer_zero_node
, code
= LT
;
10013 code
= unsignedp
? LEU
: LE
;
10016 if (! unsignedp
&& integer_all_onesp (arg1
))
10017 arg1
= integer_zero_node
, code
= GE
;
10019 code
= unsignedp
? GTU
: GT
;
10022 if (integer_onep (arg1
))
10023 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
10025 code
= unsignedp
? GEU
: GE
;
10028 case UNORDERED_EXPR
:
10054 /* Put a constant second. */
10055 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
10057 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
10058 code
= swap_condition (code
);
10061 /* If this is an equality or inequality test of a single bit, we can
10062 do this by shifting the bit being tested to the low-order bit and
10063 masking the result with the constant 1. If the condition was EQ,
10064 we xor it with 1. This does not require an scc insn and is faster
10065 than an scc insn even if we have it. */
10067 if ((code
== NE
|| code
== EQ
)
10068 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
10069 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10071 tree inner
= TREE_OPERAND (arg0
, 0);
10072 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
10075 /* If INNER is a right shift of a constant and it plus BITNUM does
10076 not overflow, adjust BITNUM and INNER. */
10078 if (TREE_CODE (inner
) == RSHIFT_EXPR
10079 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
10080 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
10081 && bitnum
< TYPE_PRECISION (type
)
10082 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
10083 bitnum
- TYPE_PRECISION (type
)))
10085 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
10086 inner
= TREE_OPERAND (inner
, 0);
10089 /* If we are going to be able to omit the AND below, we must do our
10090 operations as unsigned. If we must use the AND, we have a choice.
10091 Normally unsigned is faster, but for some machines signed is. */
10092 ops_unsignedp
= (bitnum
== TYPE_PRECISION (type
) - 1 ? 1
10093 #ifdef LOAD_EXTEND_OP
10094 : (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1)
10100 if (! get_subtarget (subtarget
)
10101 || GET_MODE (subtarget
) != operand_mode
10102 || ! safe_from_p (subtarget
, inner
, 1))
10105 op0
= expand_expr (inner
, subtarget
, VOIDmode
, 0);
10108 op0
= expand_shift (RSHIFT_EXPR
, operand_mode
, op0
,
10109 size_int (bitnum
), subtarget
, ops_unsignedp
);
10111 if (GET_MODE (op0
) != mode
)
10112 op0
= convert_to_mode (mode
, op0
, ops_unsignedp
);
10114 if ((code
== EQ
&& ! invert
) || (code
== NE
&& invert
))
10115 op0
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
, subtarget
,
10116 ops_unsignedp
, OPTAB_LIB_WIDEN
);
10118 /* Put the AND last so it can combine with more things. */
10119 if (bitnum
!= TYPE_PRECISION (type
) - 1)
10120 op0
= expand_and (mode
, op0
, const1_rtx
, subtarget
);
10125 /* Now see if we are likely to be able to do this. Return if not. */
10126 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
10129 icode
= setcc_gen_code
[(int) code
];
10130 if (icode
== CODE_FOR_nothing
10131 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
10133 /* We can only do this if it is one of the special cases that
10134 can be handled without an scc insn. */
10135 if ((code
== LT
&& integer_zerop (arg1
))
10136 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
10138 else if (BRANCH_COST
>= 0
10139 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
10140 && TREE_CODE (type
) != REAL_TYPE
10141 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
10142 != CODE_FOR_nothing
)
10143 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
10144 != CODE_FOR_nothing
)))
10150 if (! get_subtarget (target
)
10151 || GET_MODE (subtarget
) != operand_mode
10152 || ! safe_from_p (subtarget
, arg1
, 1))
10155 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
10156 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
10159 target
= gen_reg_rtx (mode
);
10161 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10162 because, if the emit_store_flag does anything it will succeed and
10163 OP0 and OP1 will not be used subsequently. */
10165 result
= emit_store_flag (target
, code
,
10166 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
10167 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
10168 operand_mode
, unsignedp
, 1);
10173 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
10174 result
, 0, OPTAB_LIB_WIDEN
);
10178 /* If this failed, we have to do this with set/compare/jump/set code. */
10179 if (GET_CODE (target
) != REG
10180 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
10181 target
= gen_reg_rtx (GET_MODE (target
));
10183 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
10184 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
10185 operand_mode
, NULL_RTX
);
10186 if (GET_CODE (result
) == CONST_INT
)
10187 return (((result
== const0_rtx
&& ! invert
)
10188 || (result
!= const0_rtx
&& invert
))
10189 ? const0_rtx
: const1_rtx
);
10191 /* The code of RESULT may not match CODE if compare_from_rtx
10192 decided to swap its operands and reverse the original code.
10194 We know that compare_from_rtx returns either a CONST_INT or
10195 a new comparison code, so it is safe to just extract the
10196 code from RESULT. */
10197 code
= GET_CODE (result
);
10199 label
= gen_label_rtx ();
10200 if (bcc_gen_fctn
[(int) code
] == 0)
10203 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
10204 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
10205 emit_label (label
);
10211 /* Stubs in case we haven't got a casesi insn. */
10212 #ifndef HAVE_casesi
10213 # define HAVE_casesi 0
10214 # define gen_casesi(a, b, c, d, e) (0)
10215 # define CODE_FOR_casesi CODE_FOR_nothing
10218 /* If the machine does not have a case insn that compares the bounds,
10219 this means extra overhead for dispatch tables, which raises the
10220 threshold for using them. */
10221 #ifndef CASE_VALUES_THRESHOLD
10222 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10223 #endif /* CASE_VALUES_THRESHOLD */
10226 case_values_threshold ()
10228 return CASE_VALUES_THRESHOLD
;
10231 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10232 0 otherwise (i.e. if there is no casesi instruction). */
10234 try_casesi (index_type
, index_expr
, minval
, range
,
10235 table_label
, default_label
)
10236 tree index_type
, index_expr
, minval
, range
;
10237 rtx table_label ATTRIBUTE_UNUSED
;
10240 enum machine_mode index_mode
= SImode
;
10241 int index_bits
= GET_MODE_BITSIZE (index_mode
);
10242 rtx op1
, op2
, index
;
10243 enum machine_mode op_mode
;
10248 /* Convert the index to SImode. */
10249 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
10251 enum machine_mode omode
= TYPE_MODE (index_type
);
10252 rtx rangertx
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
10254 /* We must handle the endpoints in the original mode. */
10255 index_expr
= build (MINUS_EXPR
, index_type
,
10256 index_expr
, minval
);
10257 minval
= integer_zero_node
;
10258 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10259 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
10260 omode
, 1, default_label
);
10261 /* Now we can safely truncate. */
10262 index
= convert_to_mode (index_mode
, index
, 0);
10266 if (TYPE_MODE (index_type
) != index_mode
)
10268 index_expr
= convert ((*lang_hooks
.types
.type_for_size
)
10269 (index_bits
, 0), index_expr
);
10270 index_type
= TREE_TYPE (index_expr
);
10273 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10276 index
= protect_from_queue (index
, 0);
10277 do_pending_stack_adjust ();
10279 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
10280 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
10282 index
= copy_to_mode_reg (op_mode
, index
);
10284 op1
= expand_expr (minval
, NULL_RTX
, VOIDmode
, 0);
10286 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
10287 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
10288 op1
, TREE_UNSIGNED (TREE_TYPE (minval
)));
10289 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
10291 op1
= copy_to_mode_reg (op_mode
, op1
);
10293 op2
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
10295 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
10296 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
10297 op2
, TREE_UNSIGNED (TREE_TYPE (range
)));
10298 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
10300 op2
= copy_to_mode_reg (op_mode
, op2
);
10302 emit_jump_insn (gen_casesi (index
, op1
, op2
,
10303 table_label
, default_label
));
10307 /* Attempt to generate a tablejump instruction; same concept. */
10308 #ifndef HAVE_tablejump
10309 #define HAVE_tablejump 0
10310 #define gen_tablejump(x, y) (0)
10313 /* Subroutine of the next function.
10315 INDEX is the value being switched on, with the lowest value
10316 in the table already subtracted.
10317 MODE is its expected mode (needed if INDEX is constant).
10318 RANGE is the length of the jump table.
10319 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10321 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10322 index value is out of range. */
10325 do_tablejump (index
, mode
, range
, table_label
, default_label
)
10326 rtx index
, range
, table_label
, default_label
;
10327 enum machine_mode mode
;
10331 if (INTVAL (range
) > cfun
->max_jumptable_ents
)
10332 cfun
->max_jumptable_ents
= INTVAL (range
);
10334 /* Do an unsigned comparison (in the proper mode) between the index
10335 expression and the value which represents the length of the range.
10336 Since we just finished subtracting the lower bound of the range
10337 from the index expression, this comparison allows us to simultaneously
10338 check that the original index expression value is both greater than
10339 or equal to the minimum value of the range and less than or equal to
10340 the maximum value of the range. */
10342 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
10345 /* If index is in range, it must fit in Pmode.
10346 Convert to Pmode so we can index with it. */
10348 index
= convert_to_mode (Pmode
, index
, 1);
10350 /* Don't let a MEM slip thru, because then INDEX that comes
10351 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10352 and break_out_memory_refs will go to work on it and mess it up. */
10353 #ifdef PIC_CASE_VECTOR_ADDRESS
10354 if (flag_pic
&& GET_CODE (index
) != REG
)
10355 index
= copy_to_mode_reg (Pmode
, index
);
10358 /* If flag_force_addr were to affect this address
10359 it could interfere with the tricky assumptions made
10360 about addresses that contain label-refs,
10361 which may be valid only very near the tablejump itself. */
10362 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10363 GET_MODE_SIZE, because this indicates how large insns are. The other
10364 uses should all be Pmode, because they are addresses. This code
10365 could fail if addresses and insns are not the same size. */
10366 index
= gen_rtx_PLUS (Pmode
,
10367 gen_rtx_MULT (Pmode
, index
,
10368 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
10369 gen_rtx_LABEL_REF (Pmode
, table_label
));
10370 #ifdef PIC_CASE_VECTOR_ADDRESS
10372 index
= PIC_CASE_VECTOR_ADDRESS (index
);
10375 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
10376 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
10377 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
10378 RTX_UNCHANGING_P (vector
) = 1;
10379 MEM_NOTRAP_P (vector
) = 1;
10380 convert_move (temp
, vector
, 0);
10382 emit_jump_insn (gen_tablejump (temp
, table_label
));
10384 /* If we are generating PIC code or if the table is PC-relative, the
10385 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10386 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
10391 try_tablejump (index_type
, index_expr
, minval
, range
,
10392 table_label
, default_label
)
10393 tree index_type
, index_expr
, minval
, range
;
10394 rtx table_label
, default_label
;
10398 if (! HAVE_tablejump
)
10401 index_expr
= fold (build (MINUS_EXPR
, index_type
,
10402 convert (index_type
, index_expr
),
10403 convert (index_type
, minval
)));
10404 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10406 index
= protect_from_queue (index
, 0);
10407 do_pending_stack_adjust ();
10409 do_tablejump (index
, TYPE_MODE (index_type
),
10410 convert_modes (TYPE_MODE (index_type
),
10411 TYPE_MODE (TREE_TYPE (range
)),
10412 expand_expr (range
, NULL_RTX
,
10414 TREE_UNSIGNED (TREE_TYPE (range
))),
10415 table_label
, default_label
);
10419 /* Nonzero if the mode is a valid vector mode for this architecture.
10420 This returns nonzero even if there is no hardware support for the
10421 vector mode, but we can emulate with narrower modes. */
10424 vector_mode_valid_p (mode
)
10425 enum machine_mode mode
;
10427 enum mode_class
class = GET_MODE_CLASS (mode
);
10428 enum machine_mode innermode
;
10430 /* Doh! What's going on? */
10431 if (class != MODE_VECTOR_INT
10432 && class != MODE_VECTOR_FLOAT
)
10435 /* Hardware support. Woo hoo! */
10436 if (VECTOR_MODE_SUPPORTED_P (mode
))
10439 innermode
= GET_MODE_INNER (mode
);
10441 /* We should probably return 1 if requesting V4DI and we have no DI,
10442 but we have V2DI, but this is probably very unlikely. */
10444 /* If we have support for the inner mode, we can safely emulate it.
10445 We may not have V2DI, but me can emulate with a pair of DIs. */
10446 return mov_optab
->handlers
[innermode
].insn_code
!= CODE_FOR_nothing
;
10449 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10451 const_vector_from_tree (exp
)
10457 enum machine_mode inner
, mode
;
10459 mode
= TYPE_MODE (TREE_TYPE (exp
));
10461 if (is_zeros_p (exp
))
10462 return CONST0_RTX (mode
);
10464 units
= GET_MODE_NUNITS (mode
);
10465 inner
= GET_MODE_INNER (mode
);
10467 v
= rtvec_alloc (units
);
10469 link
= TREE_VECTOR_CST_ELTS (exp
);
10470 for (i
= 0; link
; link
= TREE_CHAIN (link
), ++i
)
10472 elt
= TREE_VALUE (link
);
10474 if (TREE_CODE (elt
) == REAL_CST
)
10475 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
10478 RTVEC_ELT (v
, i
) = immed_double_const (TREE_INT_CST_LOW (elt
),
10479 TREE_INT_CST_HIGH (elt
),
10483 return gen_rtx_raw_CONST_VECTOR (mode
, v
);
10486 #include "gt-expr.h"